diff --git a/vendor/github.com/andybalholm/brotli/LICENSE b/vendor/github.com/andybalholm/brotli/LICENSE
new file mode 100644
index 0000000..33b7cdd
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/andybalholm/brotli/README.md b/vendor/github.com/andybalholm/brotli/README.md
new file mode 100644
index 0000000..1ea7fdb
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/README.md
@@ -0,0 +1,7 @@
+This package is a brotli compressor and decompressor implemented in Go.
+It was translated from the reference implementation (https://github.com/google/brotli)
+with the `c2go` tool at https://github.com/andybalholm/c2go.
+
+I am using it in production with https://github.com/andybalholm/redwood.
+
+API documentation is found at https://pkg.go.dev/github.com/andybalholm/brotli?tab=doc.
diff --git a/vendor/github.com/andybalholm/brotli/backward_references.go b/vendor/github.com/andybalholm/brotli/backward_references.go
new file mode 100644
index 0000000..008c054
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/backward_references.go
@@ -0,0 +1,185 @@
+package brotli
+
+import (
+ "sync"
+)
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Function to find backward reference copies. */
+
+func computeDistanceCode(distance uint, max_distance uint, dist_cache []int) uint {
+ if distance <= max_distance {
+ var distance_plus_3 uint = distance + 3
+ var offset0 uint = distance_plus_3 - uint(dist_cache[0])
+ var offset1 uint = distance_plus_3 - uint(dist_cache[1])
+ if distance == uint(dist_cache[0]) {
+ return 0
+ } else if distance == uint(dist_cache[1]) {
+ return 1
+ } else if offset0 < 7 {
+ return (0x9750468 >> (4 * offset0)) & 0xF
+ } else if offset1 < 7 {
+ return (0xFDB1ACE >> (4 * offset1)) & 0xF
+ } else if distance == uint(dist_cache[2]) {
+ return 2
+ } else if distance == uint(dist_cache[3]) {
+ return 3
+ }
+ }
+
+ return distance + numDistanceShortCodes - 1
+}
+
+var hasherSearchResultPool sync.Pool
+
+func createBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher hasherHandle, dist_cache []int, last_insert_len *uint, commands *[]command, num_literals *uint) {
+ var max_backward_limit uint = maxBackwardLimit(params.lgwin)
+ var insert_length uint = *last_insert_len
+ var pos_end uint = position + num_bytes
+ var store_end uint
+ if num_bytes >= hasher.StoreLookahead() {
+ store_end = position + num_bytes - hasher.StoreLookahead() + 1
+ } else {
+ store_end = position
+ }
+ var random_heuristics_window_size uint = literalSpreeLengthForSparseSearch(params)
+ var apply_random_heuristics uint = position + random_heuristics_window_size
+ var gap uint = 0
+ /* Set maximum distance, see section 9.1. of the spec. */
+
+ const kMinScore uint = scoreBase + 100
+
+ /* For speed up heuristics for random data. */
+
+ /* Minimum score to accept a backward reference. */
+ hasher.PrepareDistanceCache(dist_cache)
+ sr2, _ := hasherSearchResultPool.Get().(*hasherSearchResult)
+ if sr2 == nil {
+ sr2 = &hasherSearchResult{}
+ }
+ sr, _ := hasherSearchResultPool.Get().(*hasherSearchResult)
+ if sr == nil {
+ sr = &hasherSearchResult{}
+ }
+
+ for position+hasher.HashTypeLength() < pos_end {
+ var max_length uint = pos_end - position
+ var max_distance uint = brotli_min_size_t(position, max_backward_limit)
+ sr.len = 0
+ sr.len_code_delta = 0
+ sr.distance = 0
+ sr.score = kMinScore
+ hasher.FindLongestMatch(¶ms.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, sr)
+ if sr.score > kMinScore {
+ /* Found a match. Let's look for something even better ahead. */
+ var delayed_backward_references_in_row int = 0
+ max_length--
+ for ; ; max_length-- {
+ var cost_diff_lazy uint = 175
+ if params.quality < minQualityForExtensiveReferenceSearch {
+ sr2.len = brotli_min_size_t(sr.len-1, max_length)
+ } else {
+ sr2.len = 0
+ }
+ sr2.len_code_delta = 0
+ sr2.distance = 0
+ sr2.score = kMinScore
+ max_distance = brotli_min_size_t(position+1, max_backward_limit)
+ hasher.FindLongestMatch(¶ms.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, sr2)
+ if sr2.score >= sr.score+cost_diff_lazy {
+ /* Ok, let's just write one byte for now and start a match from the
+ next byte. */
+ position++
+
+ insert_length++
+ *sr = *sr2
+ delayed_backward_references_in_row++
+ if delayed_backward_references_in_row < 4 && position+hasher.HashTypeLength() < pos_end {
+ continue
+ }
+ }
+
+ break
+ }
+
+ apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
+ max_distance = brotli_min_size_t(position, max_backward_limit)
+ {
+ /* The first 16 codes are special short-codes,
+ and the minimum offset is 1. */
+ var distance_code uint = computeDistanceCode(sr.distance, max_distance+gap, dist_cache)
+ if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
+ dist_cache[3] = dist_cache[2]
+ dist_cache[2] = dist_cache[1]
+ dist_cache[1] = dist_cache[0]
+ dist_cache[0] = int(sr.distance)
+ hasher.PrepareDistanceCache(dist_cache)
+ }
+
+ *commands = append(*commands, makeCommand(¶ms.dist, insert_length, sr.len, sr.len_code_delta, distance_code))
+ }
+
+ *num_literals += insert_length
+ insert_length = 0
+ /* Put the hash keys into the table, if there are enough bytes left.
+ Depending on the hasher implementation, it can push all positions
+ in the given range or only a subset of them.
+ Avoid hash poisoning with RLE data. */
+ {
+ var range_start uint = position + 2
+ var range_end uint = brotli_min_size_t(position+sr.len, store_end)
+ if sr.distance < sr.len>>2 {
+ range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
+ }
+
+ hasher.StoreRange(ringbuffer, ringbuffer_mask, range_start, range_end)
+ }
+
+ position += sr.len
+ } else {
+ insert_length++
+ position++
+
+ /* If we have not seen matches for a long time, we can skip some
+ match lookups. Unsuccessful match lookups are very very expensive
+ and this kind of a heuristic speeds up compression quite
+ a lot. */
+ if position > apply_random_heuristics {
+ /* Going through uncompressible data, jump. */
+ if position > apply_random_heuristics+4*random_heuristics_window_size {
+ var kMargin uint = brotli_max_size_t(hasher.StoreLookahead()-1, 4)
+ /* It is quite a long time since we saw a copy, so we assume
+ that this data is not compressible, and store hashes less
+ often. Hashes of non compressible data are less likely to
+ turn out to be useful in the future, too, so we store less of
+ them to not to flood out the hash table of good compressible
+ data. */
+
+ var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
+ for ; position < pos_jump; position += 4 {
+ hasher.Store(ringbuffer, ringbuffer_mask, position)
+ insert_length += 4
+ }
+ } else {
+ var kMargin uint = brotli_max_size_t(hasher.StoreLookahead()-1, 2)
+ var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
+ for ; position < pos_jump; position += 2 {
+ hasher.Store(ringbuffer, ringbuffer_mask, position)
+ insert_length += 2
+ }
+ }
+ }
+ }
+ }
+
+ insert_length += pos_end - position
+ *last_insert_len = insert_length
+
+ hasherSearchResultPool.Put(sr)
+ hasherSearchResultPool.Put(sr2)
+}
diff --git a/vendor/github.com/andybalholm/brotli/backward_references_hq.go b/vendor/github.com/andybalholm/brotli/backward_references_hq.go
new file mode 100644
index 0000000..21629c1
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/backward_references_hq.go
@@ -0,0 +1,796 @@
+package brotli
+
+import "math"
+
+type zopfliNode struct {
+ length uint32
+ distance uint32
+ dcode_insert_length uint32
+ u struct {
+ cost float32
+ next uint32
+ shortcut uint32
+ }
+}
+
+const maxEffectiveDistanceAlphabetSize = 544
+
+const kInfinity float32 = 1.7e38 /* ~= 2 ^ 127 */
+
+var kDistanceCacheIndex = []uint32{0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1}
+
+var kDistanceCacheOffset = []int{0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3}
+
+func initZopfliNodes(array []zopfliNode, length uint) {
+ var stub zopfliNode
+ var i uint
+ stub.length = 1
+ stub.distance = 0
+ stub.dcode_insert_length = 0
+ stub.u.cost = kInfinity
+ for i = 0; i < length; i++ {
+ array[i] = stub
+ }
+}
+
+func zopfliNodeCopyLength(self *zopfliNode) uint32 {
+ return self.length & 0x1FFFFFF
+}
+
+func zopfliNodeLengthCode(self *zopfliNode) uint32 {
+ var modifier uint32 = self.length >> 25
+ return zopfliNodeCopyLength(self) + 9 - modifier
+}
+
+func zopfliNodeCopyDistance(self *zopfliNode) uint32 {
+ return self.distance
+}
+
+func zopfliNodeDistanceCode(self *zopfliNode) uint32 {
+ var short_code uint32 = self.dcode_insert_length >> 27
+ if short_code == 0 {
+ return zopfliNodeCopyDistance(self) + numDistanceShortCodes - 1
+ } else {
+ return short_code - 1
+ }
+}
+
+func zopfliNodeCommandLength(self *zopfliNode) uint32 {
+ return zopfliNodeCopyLength(self) + (self.dcode_insert_length & 0x7FFFFFF)
+}
+
+/* Histogram based cost model for zopflification. */
+type zopfliCostModel struct {
+ cost_cmd_ [numCommandSymbols]float32
+ cost_dist_ []float32
+ distance_histogram_size uint32
+ literal_costs_ []float32
+ min_cost_cmd_ float32
+ num_bytes_ uint
+}
+
+func initZopfliCostModel(self *zopfliCostModel, dist *distanceParams, num_bytes uint) {
+ var distance_histogram_size uint32 = dist.alphabet_size
+ if distance_histogram_size > maxEffectiveDistanceAlphabetSize {
+ distance_histogram_size = maxEffectiveDistanceAlphabetSize
+ }
+
+ self.num_bytes_ = num_bytes
+ self.literal_costs_ = make([]float32, (num_bytes + 2))
+ self.cost_dist_ = make([]float32, (dist.alphabet_size))
+ self.distance_histogram_size = distance_histogram_size
+}
+
+func cleanupZopfliCostModel(self *zopfliCostModel) {
+ self.literal_costs_ = nil
+ self.cost_dist_ = nil
+}
+
+func setCost(histogram []uint32, histogram_size uint, literal_histogram bool, cost []float32) {
+ var sum uint = 0
+ var missing_symbol_sum uint
+ var log2sum float32
+ var missing_symbol_cost float32
+ var i uint
+ for i = 0; i < histogram_size; i++ {
+ sum += uint(histogram[i])
+ }
+
+ log2sum = float32(fastLog2(sum))
+ missing_symbol_sum = sum
+ if !literal_histogram {
+ for i = 0; i < histogram_size; i++ {
+ if histogram[i] == 0 {
+ missing_symbol_sum++
+ }
+ }
+ }
+
+ missing_symbol_cost = float32(fastLog2(missing_symbol_sum)) + 2
+ for i = 0; i < histogram_size; i++ {
+ if histogram[i] == 0 {
+ cost[i] = missing_symbol_cost
+ continue
+ }
+
+ /* Shannon bits for this symbol. */
+ cost[i] = log2sum - float32(fastLog2(uint(histogram[i])))
+
+ /* Cannot be coded with less than 1 bit */
+ if cost[i] < 1 {
+ cost[i] = 1
+ }
+ }
+}
+
+func zopfliCostModelSetFromCommands(self *zopfliCostModel, position uint, ringbuffer []byte, ringbuffer_mask uint, commands []command, last_insert_len uint) {
+ var histogram_literal [numLiteralSymbols]uint32
+ var histogram_cmd [numCommandSymbols]uint32
+ var histogram_dist [maxEffectiveDistanceAlphabetSize]uint32
+ var cost_literal [numLiteralSymbols]float32
+ var pos uint = position - last_insert_len
+ var min_cost_cmd float32 = kInfinity
+ var cost_cmd []float32 = self.cost_cmd_[:]
+ var literal_costs []float32
+
+ histogram_literal = [numLiteralSymbols]uint32{}
+ histogram_cmd = [numCommandSymbols]uint32{}
+ histogram_dist = [maxEffectiveDistanceAlphabetSize]uint32{}
+
+ for i := range commands {
+ var inslength uint = uint(commands[i].insert_len_)
+ var copylength uint = uint(commandCopyLen(&commands[i]))
+ var distcode uint = uint(commands[i].dist_prefix_) & 0x3FF
+ var cmdcode uint = uint(commands[i].cmd_prefix_)
+ var j uint
+
+ histogram_cmd[cmdcode]++
+ if cmdcode >= 128 {
+ histogram_dist[distcode]++
+ }
+
+ for j = 0; j < inslength; j++ {
+ histogram_literal[ringbuffer[(pos+j)&ringbuffer_mask]]++
+ }
+
+ pos += inslength + copylength
+ }
+
+ setCost(histogram_literal[:], numLiteralSymbols, true, cost_literal[:])
+ setCost(histogram_cmd[:], numCommandSymbols, false, cost_cmd)
+ setCost(histogram_dist[:], uint(self.distance_histogram_size), false, self.cost_dist_)
+
+ for i := 0; i < numCommandSymbols; i++ {
+ min_cost_cmd = brotli_min_float(min_cost_cmd, cost_cmd[i])
+ }
+
+ self.min_cost_cmd_ = min_cost_cmd
+ {
+ literal_costs = self.literal_costs_
+ var literal_carry float32 = 0.0
+ num_bytes := int(self.num_bytes_)
+ literal_costs[0] = 0.0
+ for i := 0; i < num_bytes; i++ {
+ literal_carry += cost_literal[ringbuffer[(position+uint(i))&ringbuffer_mask]]
+ literal_costs[i+1] = literal_costs[i] + literal_carry
+ literal_carry -= literal_costs[i+1] - literal_costs[i]
+ }
+ }
+}
+
+func zopfliCostModelSetFromLiteralCosts(self *zopfliCostModel, position uint, ringbuffer []byte, ringbuffer_mask uint) {
+ var literal_costs []float32 = self.literal_costs_
+ var literal_carry float32 = 0.0
+ var cost_dist []float32 = self.cost_dist_
+ var cost_cmd []float32 = self.cost_cmd_[:]
+ var num_bytes uint = self.num_bytes_
+ var i uint
+ estimateBitCostsForLiterals(position, num_bytes, ringbuffer_mask, ringbuffer, literal_costs[1:])
+ literal_costs[0] = 0.0
+ for i = 0; i < num_bytes; i++ {
+ literal_carry += literal_costs[i+1]
+ literal_costs[i+1] = literal_costs[i] + literal_carry
+ literal_carry -= literal_costs[i+1] - literal_costs[i]
+ }
+
+ for i = 0; i < numCommandSymbols; i++ {
+ cost_cmd[i] = float32(fastLog2(uint(11 + uint32(i))))
+ }
+
+ for i = 0; uint32(i) < self.distance_histogram_size; i++ {
+ cost_dist[i] = float32(fastLog2(uint(20 + uint32(i))))
+ }
+
+ self.min_cost_cmd_ = float32(fastLog2(11))
+}
+
+func zopfliCostModelGetCommandCost(self *zopfliCostModel, cmdcode uint16) float32 {
+ return self.cost_cmd_[cmdcode]
+}
+
+func zopfliCostModelGetDistanceCost(self *zopfliCostModel, distcode uint) float32 {
+ return self.cost_dist_[distcode]
+}
+
+func zopfliCostModelGetLiteralCosts(self *zopfliCostModel, from uint, to uint) float32 {
+ return self.literal_costs_[to] - self.literal_costs_[from]
+}
+
+func zopfliCostModelGetMinCostCmd(self *zopfliCostModel) float32 {
+ return self.min_cost_cmd_
+}
+
+/* REQUIRES: len >= 2, start_pos <= pos */
+/* REQUIRES: cost < kInfinity, nodes[start_pos].cost < kInfinity */
+/* Maintains the "ZopfliNode array invariant". */
+func updateZopfliNode(nodes []zopfliNode, pos uint, start_pos uint, len uint, len_code uint, dist uint, short_code uint, cost float32) {
+ var next *zopfliNode = &nodes[pos+len]
+ next.length = uint32(len | (len+9-len_code)<<25)
+ next.distance = uint32(dist)
+ next.dcode_insert_length = uint32(short_code<<27 | (pos - start_pos))
+ next.u.cost = cost
+}
+
+type posData struct {
+ pos uint
+ distance_cache [4]int
+ costdiff float32
+ cost float32
+}
+
+/* Maintains the smallest 8 cost difference together with their positions */
+type startPosQueue struct {
+ q_ [8]posData
+ idx_ uint
+}
+
+func initStartPosQueue(self *startPosQueue) {
+ self.idx_ = 0
+}
+
+func startPosQueueSize(self *startPosQueue) uint {
+ return brotli_min_size_t(self.idx_, 8)
+}
+
+func startPosQueuePush(self *startPosQueue, posdata *posData) {
+ var offset uint = ^(self.idx_) & 7
+ self.idx_++
+ var len uint = startPosQueueSize(self)
+ var i uint
+ var q []posData = self.q_[:]
+ q[offset] = *posdata
+
+ /* Restore the sorted order. In the list of |len| items at most |len - 1|
+ adjacent element comparisons / swaps are required. */
+ for i = 1; i < len; i++ {
+ if q[offset&7].costdiff > q[(offset+1)&7].costdiff {
+ var tmp posData = q[offset&7]
+ q[offset&7] = q[(offset+1)&7]
+ q[(offset+1)&7] = tmp
+ }
+
+ offset++
+ }
+}
+
+func startPosQueueAt(self *startPosQueue, k uint) *posData {
+ return &self.q_[(k-self.idx_)&7]
+}
+
+/* Returns the minimum possible copy length that can improve the cost of any */
+/* future position. */
+func computeMinimumCopyLength(start_cost float32, nodes []zopfliNode, num_bytes uint, pos uint) uint {
+ var min_cost float32 = start_cost
+ var len uint = 2
+ var next_len_bucket uint = 4
+ /* Compute the minimum possible cost of reaching any future position. */
+
+ var next_len_offset uint = 10
+ for pos+len <= num_bytes && nodes[pos+len].u.cost <= min_cost {
+ /* We already reached (pos + len) with no more cost than the minimum
+ possible cost of reaching anything from this pos, so there is no point in
+ looking for lengths <= len. */
+ len++
+
+ if len == next_len_offset {
+ /* We reached the next copy length code bucket, so we add one more
+ extra bit to the minimum cost. */
+ min_cost += 1.0
+
+ next_len_offset += next_len_bucket
+ next_len_bucket *= 2
+ }
+ }
+
+ return uint(len)
+}
+
+/* REQUIRES: nodes[pos].cost < kInfinity
+ REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */
+func computeDistanceShortcut(block_start uint, pos uint, max_backward_limit uint, gap uint, nodes []zopfliNode) uint32 {
+ var clen uint = uint(zopfliNodeCopyLength(&nodes[pos]))
+ var ilen uint = uint(nodes[pos].dcode_insert_length & 0x7FFFFFF)
+ var dist uint = uint(zopfliNodeCopyDistance(&nodes[pos]))
+
+ /* Since |block_start + pos| is the end position of the command, the copy part
+ starts from |block_start + pos - clen|. Distances that are greater than
+ this or greater than |max_backward_limit| + |gap| are static dictionary
+ references, and do not update the last distances.
+ Also distance code 0 (last distance) does not update the last distances. */
+ if pos == 0 {
+ return 0
+ } else if dist+clen <= block_start+pos+gap && dist <= max_backward_limit+gap && zopfliNodeDistanceCode(&nodes[pos]) > 0 {
+ return uint32(pos)
+ } else {
+ return nodes[pos-clen-ilen].u.shortcut
+ }
+}
+
+/* Fills in dist_cache[0..3] with the last four distances (as defined by
+ Section 4. of the Spec) that would be used at (block_start + pos) if we
+ used the shortest path of commands from block_start, computed from
+ nodes[0..pos]. The last four distances at block_start are in
+ starting_dist_cache[0..3].
+ REQUIRES: nodes[pos].cost < kInfinity
+ REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */
+func computeDistanceCache(pos uint, starting_dist_cache []int, nodes []zopfliNode, dist_cache []int) {
+ var idx int = 0
+ var p uint = uint(nodes[pos].u.shortcut)
+ for idx < 4 && p > 0 {
+ var ilen uint = uint(nodes[p].dcode_insert_length & 0x7FFFFFF)
+ var clen uint = uint(zopfliNodeCopyLength(&nodes[p]))
+ var dist uint = uint(zopfliNodeCopyDistance(&nodes[p]))
+ dist_cache[idx] = int(dist)
+ idx++
+
+ /* Because of prerequisite, p >= clen + ilen >= 2. */
+ p = uint(nodes[p-clen-ilen].u.shortcut)
+ }
+
+ for ; idx < 4; idx++ {
+ dist_cache[idx] = starting_dist_cache[0]
+ starting_dist_cache = starting_dist_cache[1:]
+ }
+}
+
+/* Maintains "ZopfliNode array invariant" and pushes node to the queue, if it
+ is eligible. */
+func evaluateNode(block_start uint, pos uint, max_backward_limit uint, gap uint, starting_dist_cache []int, model *zopfliCostModel, queue *startPosQueue, nodes []zopfliNode) {
+ /* Save cost, because ComputeDistanceCache invalidates it. */
+ var node_cost float32 = nodes[pos].u.cost
+ nodes[pos].u.shortcut = computeDistanceShortcut(block_start, pos, max_backward_limit, gap, nodes)
+ if node_cost <= zopfliCostModelGetLiteralCosts(model, 0, pos) {
+ var posdata posData
+ posdata.pos = pos
+ posdata.cost = node_cost
+ posdata.costdiff = node_cost - zopfliCostModelGetLiteralCosts(model, 0, pos)
+ computeDistanceCache(pos, starting_dist_cache, nodes, posdata.distance_cache[:])
+ startPosQueuePush(queue, &posdata)
+ }
+}
+
+/* Returns longest copy length. */
+func updateNodes(num_bytes uint, block_start uint, pos uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, max_backward_limit uint, starting_dist_cache []int, num_matches uint, matches []backwardMatch, model *zopfliCostModel, queue *startPosQueue, nodes []zopfliNode) uint {
+ var cur_ix uint = block_start + pos
+ var cur_ix_masked uint = cur_ix & ringbuffer_mask
+ var max_distance uint = brotli_min_size_t(cur_ix, max_backward_limit)
+ var max_len uint = num_bytes - pos
+ var max_zopfli_len uint = maxZopfliLen(params)
+ var max_iters uint = maxZopfliCandidates(params)
+ var min_len uint
+ var result uint = 0
+ var k uint
+ var gap uint = 0
+
+ evaluateNode(block_start, pos, max_backward_limit, gap, starting_dist_cache, model, queue, nodes)
+ {
+ var posdata *posData = startPosQueueAt(queue, 0)
+ var min_cost float32 = (posdata.cost + zopfliCostModelGetMinCostCmd(model) + zopfliCostModelGetLiteralCosts(model, posdata.pos, pos))
+ min_len = computeMinimumCopyLength(min_cost, nodes, num_bytes, pos)
+ }
+
+ /* Go over the command starting positions in order of increasing cost
+ difference. */
+ for k = 0; k < max_iters && k < startPosQueueSize(queue); k++ {
+ var posdata *posData = startPosQueueAt(queue, k)
+ var start uint = posdata.pos
+ var inscode uint16 = getInsertLengthCode(pos - start)
+ var start_costdiff float32 = posdata.costdiff
+ var base_cost float32 = start_costdiff + float32(getInsertExtra(inscode)) + zopfliCostModelGetLiteralCosts(model, 0, pos)
+ var best_len uint = min_len - 1
+ var j uint = 0
+ /* Look for last distance matches using the distance cache from this
+ starting position. */
+ for ; j < numDistanceShortCodes && best_len < max_len; j++ {
+ var idx uint = uint(kDistanceCacheIndex[j])
+ var backward uint = uint(posdata.distance_cache[idx] + kDistanceCacheOffset[j])
+ var prev_ix uint = cur_ix - backward
+ var len uint = 0
+ var continuation byte = ringbuffer[cur_ix_masked+best_len]
+ if cur_ix_masked+best_len > ringbuffer_mask {
+ break
+ }
+
+ if backward > max_distance+gap {
+ /* Word dictionary -> ignore. */
+ continue
+ }
+
+ if backward <= max_distance {
+ /* Regular backward reference. */
+ if prev_ix >= cur_ix {
+ continue
+ }
+
+ prev_ix &= ringbuffer_mask
+ if prev_ix+best_len > ringbuffer_mask || continuation != ringbuffer[prev_ix+best_len] {
+ continue
+ }
+
+ len = findMatchLengthWithLimit(ringbuffer[prev_ix:], ringbuffer[cur_ix_masked:], max_len)
+ } else {
+ continue
+ }
+ {
+ var dist_cost float32 = base_cost + zopfliCostModelGetDistanceCost(model, j)
+ var l uint
+ for l = best_len + 1; l <= len; l++ {
+ var copycode uint16 = getCopyLengthCode(l)
+ var cmdcode uint16 = combineLengthCodes(inscode, copycode, j == 0)
+ var tmp float32
+ if cmdcode < 128 {
+ tmp = base_cost
+ } else {
+ tmp = dist_cost
+ }
+ var cost float32 = tmp + float32(getCopyExtra(copycode)) + zopfliCostModelGetCommandCost(model, cmdcode)
+ if cost < nodes[pos+l].u.cost {
+ updateZopfliNode(nodes, pos, start, l, l, backward, j+1, cost)
+ result = brotli_max_size_t(result, l)
+ }
+
+ best_len = l
+ }
+ }
+ }
+
+ /* At higher iterations look only for new last distance matches, since
+ looking only for new command start positions with the same distances
+ does not help much. */
+ if k >= 2 {
+ continue
+ }
+ {
+ /* Loop through all possible copy lengths at this position. */
+ var len uint = min_len
+ for j = 0; j < num_matches; j++ {
+ var match backwardMatch = matches[j]
+ var dist uint = uint(match.distance)
+ var is_dictionary_match bool = (dist > max_distance+gap)
+ var dist_code uint = dist + numDistanceShortCodes - 1
+ var dist_symbol uint16
+ var distextra uint32
+ var distnumextra uint32
+ var dist_cost float32
+ var max_match_len uint
+ /* We already tried all possible last distance matches, so we can use
+ normal distance code here. */
+ prefixEncodeCopyDistance(dist_code, uint(params.dist.num_direct_distance_codes), uint(params.dist.distance_postfix_bits), &dist_symbol, &distextra)
+
+ distnumextra = uint32(dist_symbol) >> 10
+ dist_cost = base_cost + float32(distnumextra) + zopfliCostModelGetDistanceCost(model, uint(dist_symbol)&0x3FF)
+
+ /* Try all copy lengths up until the maximum copy length corresponding
+ to this distance. If the distance refers to the static dictionary, or
+ the maximum length is long enough, try only one maximum length. */
+ max_match_len = backwardMatchLength(&match)
+
+ if len < max_match_len && (is_dictionary_match || max_match_len > max_zopfli_len) {
+ len = max_match_len
+ }
+
+ for ; len <= max_match_len; len++ {
+ var len_code uint
+ if is_dictionary_match {
+ len_code = backwardMatchLengthCode(&match)
+ } else {
+ len_code = len
+ }
+ var copycode uint16 = getCopyLengthCode(len_code)
+ var cmdcode uint16 = combineLengthCodes(inscode, copycode, false)
+ var cost float32 = dist_cost + float32(getCopyExtra(copycode)) + zopfliCostModelGetCommandCost(model, cmdcode)
+ if cost < nodes[pos+len].u.cost {
+ updateZopfliNode(nodes, pos, start, uint(len), len_code, dist, 0, cost)
+ if len > result {
+ result = len
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return result
+}
+
+func computeShortestPathFromNodes(num_bytes uint, nodes []zopfliNode) uint {
+ var index uint = num_bytes
+ var num_commands uint = 0
+ for nodes[index].dcode_insert_length&0x7FFFFFF == 0 && nodes[index].length == 1 {
+ index--
+ }
+ nodes[index].u.next = math.MaxUint32
+ for index != 0 {
+ var len uint = uint(zopfliNodeCommandLength(&nodes[index]))
+ index -= uint(len)
+ nodes[index].u.next = uint32(len)
+ num_commands++
+ }
+
+ return num_commands
+}
+
+/* REQUIRES: nodes != NULL and len(nodes) >= num_bytes + 1 */
+func zopfliCreateCommands(num_bytes uint, block_start uint, nodes []zopfliNode, dist_cache []int, last_insert_len *uint, params *encoderParams, commands *[]command, num_literals *uint) {
+ var max_backward_limit uint = maxBackwardLimit(params.lgwin)
+ var pos uint = 0
+ var offset uint32 = nodes[0].u.next
+ var i uint
+ var gap uint = 0
+ for i = 0; offset != math.MaxUint32; i++ {
+ var next *zopfliNode = &nodes[uint32(pos)+offset]
+ var copy_length uint = uint(zopfliNodeCopyLength(next))
+ var insert_length uint = uint(next.dcode_insert_length & 0x7FFFFFF)
+ pos += insert_length
+ offset = next.u.next
+ if i == 0 {
+ insert_length += *last_insert_len
+ *last_insert_len = 0
+ }
+ {
+ var distance uint = uint(zopfliNodeCopyDistance(next))
+ var len_code uint = uint(zopfliNodeLengthCode(next))
+ var max_distance uint = brotli_min_size_t(block_start+pos, max_backward_limit)
+ var is_dictionary bool = (distance > max_distance+gap)
+ var dist_code uint = uint(zopfliNodeDistanceCode(next))
+ *commands = append(*commands, makeCommand(¶ms.dist, insert_length, copy_length, int(len_code)-int(copy_length), dist_code))
+
+ if !is_dictionary && dist_code > 0 {
+ dist_cache[3] = dist_cache[2]
+ dist_cache[2] = dist_cache[1]
+ dist_cache[1] = dist_cache[0]
+ dist_cache[0] = int(distance)
+ }
+ }
+
+ *num_literals += insert_length
+ pos += copy_length
+ }
+
+ *last_insert_len += num_bytes - pos
+}
+
+func zopfliIterate(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, gap uint, dist_cache []int, model *zopfliCostModel, num_matches []uint32, matches []backwardMatch, nodes []zopfliNode) uint {
+ var max_backward_limit uint = maxBackwardLimit(params.lgwin)
+ var max_zopfli_len uint = maxZopfliLen(params)
+ var queue startPosQueue
+ var cur_match_pos uint = 0
+ var i uint
+ nodes[0].length = 0
+ nodes[0].u.cost = 0
+ initStartPosQueue(&queue)
+ for i = 0; i+3 < num_bytes; i++ {
+ var skip uint = updateNodes(num_bytes, position, i, ringbuffer, ringbuffer_mask, params, max_backward_limit, dist_cache, uint(num_matches[i]), matches[cur_match_pos:], model, &queue, nodes)
+ if skip < longCopyQuickStep {
+ skip = 0
+ }
+ cur_match_pos += uint(num_matches[i])
+ if num_matches[i] == 1 && backwardMatchLength(&matches[cur_match_pos-1]) > max_zopfli_len {
+ skip = brotli_max_size_t(backwardMatchLength(&matches[cur_match_pos-1]), skip)
+ }
+
+ if skip > 1 {
+ skip--
+ for skip != 0 {
+ i++
+ if i+3 >= num_bytes {
+ break
+ }
+ evaluateNode(position, i, max_backward_limit, gap, dist_cache, model, &queue, nodes)
+ cur_match_pos += uint(num_matches[i])
+ skip--
+ }
+ }
+ }
+
+ return computeShortestPathFromNodes(num_bytes, nodes)
+}
+
+/* Computes the shortest path of commands from position to at most
+ position + num_bytes.
+
+ On return, path->size() is the number of commands found and path[i] is the
+ length of the i-th command (copy length plus insert length).
+ Note that the sum of the lengths of all commands can be less than num_bytes.
+
+ On return, the nodes[0..num_bytes] array will have the following
+ "ZopfliNode array invariant":
+ For each i in [1..num_bytes], if nodes[i].cost < kInfinity, then
+ (1) nodes[i].copy_length() >= 2
+ (2) nodes[i].command_length() <= i and
+ (3) nodes[i - nodes[i].command_length()].cost < kInfinity
+
+ REQUIRES: nodes != nil and len(nodes) >= num_bytes + 1 */
+func zopfliComputeShortestPath(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, dist_cache []int, hasher *h10, nodes []zopfliNode) uint {
+ var max_backward_limit uint = maxBackwardLimit(params.lgwin)
+ var max_zopfli_len uint = maxZopfliLen(params)
+ var model zopfliCostModel
+ var queue startPosQueue
+ var matches [2 * (maxNumMatchesH10 + 64)]backwardMatch
+ var store_end uint
+ if num_bytes >= hasher.StoreLookahead() {
+ store_end = position + num_bytes - hasher.StoreLookahead() + 1
+ } else {
+ store_end = position
+ }
+ var i uint
+ var gap uint = 0
+ var lz_matches_offset uint = 0
+ nodes[0].length = 0
+ nodes[0].u.cost = 0
+ initZopfliCostModel(&model, ¶ms.dist, num_bytes)
+ zopfliCostModelSetFromLiteralCosts(&model, position, ringbuffer, ringbuffer_mask)
+ initStartPosQueue(&queue)
+ for i = 0; i+hasher.HashTypeLength()-1 < num_bytes; i++ {
+ var pos uint = position + i
+ var max_distance uint = brotli_min_size_t(pos, max_backward_limit)
+ var skip uint
+ var num_matches uint
+ num_matches = findAllMatchesH10(hasher, ¶ms.dictionary, ringbuffer, ringbuffer_mask, pos, num_bytes-i, max_distance, gap, params, matches[lz_matches_offset:])
+ if num_matches > 0 && backwardMatchLength(&matches[num_matches-1]) > max_zopfli_len {
+ matches[0] = matches[num_matches-1]
+ num_matches = 1
+ }
+
+ skip = updateNodes(num_bytes, position, i, ringbuffer, ringbuffer_mask, params, max_backward_limit, dist_cache, num_matches, matches[:], &model, &queue, nodes)
+ if skip < longCopyQuickStep {
+ skip = 0
+ }
+ if num_matches == 1 && backwardMatchLength(&matches[0]) > max_zopfli_len {
+ skip = brotli_max_size_t(backwardMatchLength(&matches[0]), skip)
+ }
+
+ if skip > 1 {
+ /* Add the tail of the copy to the hasher. */
+ hasher.StoreRange(ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+skip, store_end))
+
+ skip--
+ for skip != 0 {
+ i++
+ if i+hasher.HashTypeLength()-1 >= num_bytes {
+ break
+ }
+ evaluateNode(position, i, max_backward_limit, gap, dist_cache, &model, &queue, nodes)
+ skip--
+ }
+ }
+ }
+
+ cleanupZopfliCostModel(&model)
+ return computeShortestPathFromNodes(num_bytes, nodes)
+}
+
+func createZopfliBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher *h10, dist_cache []int, last_insert_len *uint, commands *[]command, num_literals *uint) {
+ var nodes []zopfliNode
+ nodes = make([]zopfliNode, (num_bytes + 1))
+ initZopfliNodes(nodes, num_bytes+1)
+ zopfliComputeShortestPath(num_bytes, position, ringbuffer, ringbuffer_mask, params, dist_cache, hasher, nodes)
+ zopfliCreateCommands(num_bytes, position, nodes, dist_cache, last_insert_len, params, commands, num_literals)
+ nodes = nil
+}
+
+func createHqZopfliBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher hasherHandle, dist_cache []int, last_insert_len *uint, commands *[]command, num_literals *uint) {
+ var max_backward_limit uint = maxBackwardLimit(params.lgwin)
+ var num_matches []uint32 = make([]uint32, num_bytes)
+ var matches_size uint = 4 * num_bytes
+ var store_end uint
+ if num_bytes >= hasher.StoreLookahead() {
+ store_end = position + num_bytes - hasher.StoreLookahead() + 1
+ } else {
+ store_end = position
+ }
+ var cur_match_pos uint = 0
+ var i uint
+ var orig_num_literals uint
+ var orig_last_insert_len uint
+ var orig_dist_cache [4]int
+ var orig_num_commands int
+ var model zopfliCostModel
+ var nodes []zopfliNode
+ var matches []backwardMatch = make([]backwardMatch, matches_size)
+ var gap uint = 0
+ var shadow_matches uint = 0
+ var new_array []backwardMatch
+ for i = 0; i+hasher.HashTypeLength()-1 < num_bytes; i++ {
+ var pos uint = position + i
+ var max_distance uint = brotli_min_size_t(pos, max_backward_limit)
+ var max_length uint = num_bytes - i
+ var num_found_matches uint
+ var cur_match_end uint
+ var j uint
+
+ /* Ensure that we have enough free slots. */
+ if matches_size < cur_match_pos+maxNumMatchesH10+shadow_matches {
+ var new_size uint = matches_size
+ if new_size == 0 {
+ new_size = cur_match_pos + maxNumMatchesH10 + shadow_matches
+ }
+
+ for new_size < cur_match_pos+maxNumMatchesH10+shadow_matches {
+ new_size *= 2
+ }
+
+ new_array = make([]backwardMatch, new_size)
+ if matches_size != 0 {
+ copy(new_array, matches[:matches_size])
+ }
+
+ matches = new_array
+ matches_size = new_size
+ }
+
+ num_found_matches = findAllMatchesH10(hasher.(*h10), ¶ms.dictionary, ringbuffer, ringbuffer_mask, pos, max_length, max_distance, gap, params, matches[cur_match_pos+shadow_matches:])
+ cur_match_end = cur_match_pos + num_found_matches
+ for j = cur_match_pos; j+1 < cur_match_end; j++ {
+ assert(backwardMatchLength(&matches[j]) <= backwardMatchLength(&matches[j+1]))
+ }
+
+ num_matches[i] = uint32(num_found_matches)
+ if num_found_matches > 0 {
+ var match_len uint = backwardMatchLength(&matches[cur_match_end-1])
+ if match_len > maxZopfliLenQuality11 {
+ var skip uint = match_len - 1
+ matches[cur_match_pos] = matches[cur_match_end-1]
+ cur_match_pos++
+ num_matches[i] = 1
+
+ /* Add the tail of the copy to the hasher. */
+ hasher.StoreRange(ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+match_len, store_end))
+ var pos uint = i
+ for i := 0; i < int(skip); i++ {
+ num_matches[pos+1:][i] = 0
+ }
+ i += skip
+ } else {
+ cur_match_pos = cur_match_end
+ }
+ }
+ }
+
+ orig_num_literals = *num_literals
+ orig_last_insert_len = *last_insert_len
+ copy(orig_dist_cache[:], dist_cache[:4])
+ orig_num_commands = len(*commands)
+ nodes = make([]zopfliNode, (num_bytes + 1))
+ initZopfliCostModel(&model, ¶ms.dist, num_bytes)
+ for i = 0; i < 2; i++ {
+ initZopfliNodes(nodes, num_bytes+1)
+ if i == 0 {
+ zopfliCostModelSetFromLiteralCosts(&model, position, ringbuffer, ringbuffer_mask)
+ } else {
+ zopfliCostModelSetFromCommands(&model, position, ringbuffer, ringbuffer_mask, (*commands)[orig_num_commands:], orig_last_insert_len)
+ }
+
+ *commands = (*commands)[:orig_num_commands]
+ *num_literals = orig_num_literals
+ *last_insert_len = orig_last_insert_len
+ copy(dist_cache, orig_dist_cache[:4])
+ zopfliIterate(num_bytes, position, ringbuffer, ringbuffer_mask, params, gap, dist_cache, &model, num_matches, matches, nodes)
+ zopfliCreateCommands(num_bytes, position, nodes, dist_cache, last_insert_len, params, commands, num_literals)
+ }
+
+ cleanupZopfliCostModel(&model)
+ nodes = nil
+ matches = nil
+ num_matches = nil
+}
diff --git a/vendor/github.com/andybalholm/brotli/bit_cost.go b/vendor/github.com/andybalholm/brotli/bit_cost.go
new file mode 100644
index 0000000..0005fc1
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/bit_cost.go
@@ -0,0 +1,436 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Functions to estimate the bit cost of Huffman trees. */
+func shannonEntropy(population []uint32, size uint, total *uint) float64 {
+ var sum uint = 0
+ var retval float64 = 0
+ var population_end []uint32 = population[size:]
+ var p uint
+ for -cap(population) < -cap(population_end) {
+ p = uint(population[0])
+ population = population[1:]
+ sum += p
+ retval -= float64(p) * fastLog2(p)
+ }
+
+ if sum != 0 {
+ retval += float64(sum) * fastLog2(sum)
+ }
+ *total = sum
+ return retval
+}
+
+func bitsEntropy(population []uint32, size uint) float64 {
+ var sum uint
+ var retval float64 = shannonEntropy(population, size, &sum)
+ if retval < float64(sum) {
+ /* At least one bit per literal is needed. */
+ retval = float64(sum)
+ }
+
+ return retval
+}
+
+const kOneSymbolHistogramCost float64 = 12
+const kTwoSymbolHistogramCost float64 = 20
+const kThreeSymbolHistogramCost float64 = 28
+const kFourSymbolHistogramCost float64 = 37
+
+func populationCostLiteral(histogram *histogramLiteral) float64 {
+ var data_size uint = histogramDataSizeLiteral()
+ var count int = 0
+ var s [5]uint
+ var bits float64 = 0.0
+ var i uint
+ if histogram.total_count_ == 0 {
+ return kOneSymbolHistogramCost
+ }
+
+ for i = 0; i < data_size; i++ {
+ if histogram.data_[i] > 0 {
+ s[count] = i
+ count++
+ if count > 4 {
+ break
+ }
+ }
+ }
+
+ if count == 1 {
+ return kOneSymbolHistogramCost
+ }
+
+ if count == 2 {
+ return kTwoSymbolHistogramCost + float64(histogram.total_count_)
+ }
+
+ if count == 3 {
+ var histo0 uint32 = histogram.data_[s[0]]
+ var histo1 uint32 = histogram.data_[s[1]]
+ var histo2 uint32 = histogram.data_[s[2]]
+ var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2))
+ return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax)
+ }
+
+ if count == 4 {
+ var histo [4]uint32
+ var h23 uint32
+ var histomax uint32
+ for i = 0; i < 4; i++ {
+ histo[i] = histogram.data_[s[i]]
+ }
+
+ /* Sort */
+ for i = 0; i < 4; i++ {
+ var j uint
+ for j = i + 1; j < 4; j++ {
+ if histo[j] > histo[i] {
+ var tmp uint32 = histo[j]
+ histo[j] = histo[i]
+ histo[i] = tmp
+ }
+ }
+ }
+
+ h23 = histo[2] + histo[3]
+ histomax = brotli_max_uint32_t(h23, histo[0])
+ return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax)
+ }
+ {
+ var max_depth uint = 1
+ var depth_histo = [codeLengthCodes]uint32{0}
+ /* In this loop we compute the entropy of the histogram and simultaneously
+ build a simplified histogram of the code length codes where we use the
+ zero repeat code 17, but we don't use the non-zero repeat code 16. */
+
+ var log2total float64 = fastLog2(histogram.total_count_)
+ for i = 0; i < data_size; {
+ if histogram.data_[i] > 0 {
+ var log2p float64 = log2total - fastLog2(uint(histogram.data_[i]))
+ /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
+ = log2(total_count) - log2(count(symbol)) */
+
+ var depth uint = uint(log2p + 0.5)
+ /* Approximate the bit depth by round(-log2(P(symbol))) */
+ bits += float64(histogram.data_[i]) * log2p
+
+ if depth > 15 {
+ depth = 15
+ }
+
+ if depth > max_depth {
+ max_depth = depth
+ }
+
+ depth_histo[depth]++
+ i++
+ } else {
+ var reps uint32 = 1
+ /* Compute the run length of zeros and add the appropriate number of 0
+ and 17 code length codes to the code length code histogram. */
+
+ var k uint
+ for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ {
+ reps++
+ }
+
+ i += uint(reps)
+ if i == data_size {
+ /* Don't add any cost for the last zero run, since these are encoded
+ only implicitly. */
+ break
+ }
+
+ if reps < 3 {
+ depth_histo[0] += reps
+ } else {
+ reps -= 2
+ for reps > 0 {
+ depth_histo[repeatZeroCodeLength]++
+
+ /* Add the 3 extra bits for the 17 code length code. */
+ bits += 3
+
+ reps >>= 3
+ }
+ }
+ }
+ }
+
+ /* Add the estimated encoding cost of the code length code histogram. */
+ bits += float64(18 + 2*max_depth)
+
+ /* Add the entropy of the code length code histogram. */
+ bits += bitsEntropy(depth_histo[:], codeLengthCodes)
+ }
+
+ return bits
+}
+
+func populationCostCommand(histogram *histogramCommand) float64 {
+ var data_size uint = histogramDataSizeCommand()
+ var count int = 0
+ var s [5]uint
+ var bits float64 = 0.0
+ var i uint
+ if histogram.total_count_ == 0 {
+ return kOneSymbolHistogramCost
+ }
+
+ for i = 0; i < data_size; i++ {
+ if histogram.data_[i] > 0 {
+ s[count] = i
+ count++
+ if count > 4 {
+ break
+ }
+ }
+ }
+
+ if count == 1 {
+ return kOneSymbolHistogramCost
+ }
+
+ if count == 2 {
+ return kTwoSymbolHistogramCost + float64(histogram.total_count_)
+ }
+
+ if count == 3 {
+ var histo0 uint32 = histogram.data_[s[0]]
+ var histo1 uint32 = histogram.data_[s[1]]
+ var histo2 uint32 = histogram.data_[s[2]]
+ var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2))
+ return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax)
+ }
+
+ if count == 4 {
+ var histo [4]uint32
+ var h23 uint32
+ var histomax uint32
+ for i = 0; i < 4; i++ {
+ histo[i] = histogram.data_[s[i]]
+ }
+
+ /* Sort */
+ for i = 0; i < 4; i++ {
+ var j uint
+ for j = i + 1; j < 4; j++ {
+ if histo[j] > histo[i] {
+ var tmp uint32 = histo[j]
+ histo[j] = histo[i]
+ histo[i] = tmp
+ }
+ }
+ }
+
+ h23 = histo[2] + histo[3]
+ histomax = brotli_max_uint32_t(h23, histo[0])
+ return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax)
+ }
+ {
+ var max_depth uint = 1
+ var depth_histo = [codeLengthCodes]uint32{0}
+ /* In this loop we compute the entropy of the histogram and simultaneously
+ build a simplified histogram of the code length codes where we use the
+ zero repeat code 17, but we don't use the non-zero repeat code 16. */
+
+ var log2total float64 = fastLog2(histogram.total_count_)
+ for i = 0; i < data_size; {
+ if histogram.data_[i] > 0 {
+ var log2p float64 = log2total - fastLog2(uint(histogram.data_[i]))
+ /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
+ = log2(total_count) - log2(count(symbol)) */
+
+ var depth uint = uint(log2p + 0.5)
+ /* Approximate the bit depth by round(-log2(P(symbol))) */
+ bits += float64(histogram.data_[i]) * log2p
+
+ if depth > 15 {
+ depth = 15
+ }
+
+ if depth > max_depth {
+ max_depth = depth
+ }
+
+ depth_histo[depth]++
+ i++
+ } else {
+ var reps uint32 = 1
+ /* Compute the run length of zeros and add the appropriate number of 0
+ and 17 code length codes to the code length code histogram. */
+
+ var k uint
+ for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ {
+ reps++
+ }
+
+ i += uint(reps)
+ if i == data_size {
+ /* Don't add any cost for the last zero run, since these are encoded
+ only implicitly. */
+ break
+ }
+
+ if reps < 3 {
+ depth_histo[0] += reps
+ } else {
+ reps -= 2
+ for reps > 0 {
+ depth_histo[repeatZeroCodeLength]++
+
+ /* Add the 3 extra bits for the 17 code length code. */
+ bits += 3
+
+ reps >>= 3
+ }
+ }
+ }
+ }
+
+ /* Add the estimated encoding cost of the code length code histogram. */
+ bits += float64(18 + 2*max_depth)
+
+ /* Add the entropy of the code length code histogram. */
+ bits += bitsEntropy(depth_histo[:], codeLengthCodes)
+ }
+
+ return bits
+}
+
+func populationCostDistance(histogram *histogramDistance) float64 {
+ var data_size uint = histogramDataSizeDistance()
+ var count int = 0
+ var s [5]uint
+ var bits float64 = 0.0
+ var i uint
+ if histogram.total_count_ == 0 {
+ return kOneSymbolHistogramCost
+ }
+
+ for i = 0; i < data_size; i++ {
+ if histogram.data_[i] > 0 {
+ s[count] = i
+ count++
+ if count > 4 {
+ break
+ }
+ }
+ }
+
+ if count == 1 {
+ return kOneSymbolHistogramCost
+ }
+
+ if count == 2 {
+ return kTwoSymbolHistogramCost + float64(histogram.total_count_)
+ }
+
+ if count == 3 {
+ var histo0 uint32 = histogram.data_[s[0]]
+ var histo1 uint32 = histogram.data_[s[1]]
+ var histo2 uint32 = histogram.data_[s[2]]
+ var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2))
+ return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax)
+ }
+
+ if count == 4 {
+ var histo [4]uint32
+ var h23 uint32
+ var histomax uint32
+ for i = 0; i < 4; i++ {
+ histo[i] = histogram.data_[s[i]]
+ }
+
+ /* Sort */
+ for i = 0; i < 4; i++ {
+ var j uint
+ for j = i + 1; j < 4; j++ {
+ if histo[j] > histo[i] {
+ var tmp uint32 = histo[j]
+ histo[j] = histo[i]
+ histo[i] = tmp
+ }
+ }
+ }
+
+ h23 = histo[2] + histo[3]
+ histomax = brotli_max_uint32_t(h23, histo[0])
+ return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax)
+ }
+ {
+ var max_depth uint = 1
+ var depth_histo = [codeLengthCodes]uint32{0}
+ /* In this loop we compute the entropy of the histogram and simultaneously
+ build a simplified histogram of the code length codes where we use the
+ zero repeat code 17, but we don't use the non-zero repeat code 16. */
+
+ var log2total float64 = fastLog2(histogram.total_count_)
+ for i = 0; i < data_size; {
+ if histogram.data_[i] > 0 {
+ var log2p float64 = log2total - fastLog2(uint(histogram.data_[i]))
+ /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
+ = log2(total_count) - log2(count(symbol)) */
+
+ var depth uint = uint(log2p + 0.5)
+ /* Approximate the bit depth by round(-log2(P(symbol))) */
+ bits += float64(histogram.data_[i]) * log2p
+
+ if depth > 15 {
+ depth = 15
+ }
+
+ if depth > max_depth {
+ max_depth = depth
+ }
+
+ depth_histo[depth]++
+ i++
+ } else {
+ var reps uint32 = 1
+ /* Compute the run length of zeros and add the appropriate number of 0
+ and 17 code length codes to the code length code histogram. */
+
+ var k uint
+ for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ {
+ reps++
+ }
+
+ i += uint(reps)
+ if i == data_size {
+ /* Don't add any cost for the last zero run, since these are encoded
+ only implicitly. */
+ break
+ }
+
+ if reps < 3 {
+ depth_histo[0] += reps
+ } else {
+ reps -= 2
+ for reps > 0 {
+ depth_histo[repeatZeroCodeLength]++
+
+ /* Add the 3 extra bits for the 17 code length code. */
+ bits += 3
+
+ reps >>= 3
+ }
+ }
+ }
+ }
+
+ /* Add the estimated encoding cost of the code length code histogram. */
+ bits += float64(18 + 2*max_depth)
+
+ /* Add the entropy of the code length code histogram. */
+ bits += bitsEntropy(depth_histo[:], codeLengthCodes)
+ }
+
+ return bits
+}
diff --git a/vendor/github.com/andybalholm/brotli/bit_reader.go b/vendor/github.com/andybalholm/brotli/bit_reader.go
new file mode 100644
index 0000000..fba8687
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/bit_reader.go
@@ -0,0 +1,266 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Bit reading helpers */
+
+const shortFillBitWindowRead = (8 >> 1)
+
+var kBitMask = [33]uint32{
+ 0x00000000,
+ 0x00000001,
+ 0x00000003,
+ 0x00000007,
+ 0x0000000F,
+ 0x0000001F,
+ 0x0000003F,
+ 0x0000007F,
+ 0x000000FF,
+ 0x000001FF,
+ 0x000003FF,
+ 0x000007FF,
+ 0x00000FFF,
+ 0x00001FFF,
+ 0x00003FFF,
+ 0x00007FFF,
+ 0x0000FFFF,
+ 0x0001FFFF,
+ 0x0003FFFF,
+ 0x0007FFFF,
+ 0x000FFFFF,
+ 0x001FFFFF,
+ 0x003FFFFF,
+ 0x007FFFFF,
+ 0x00FFFFFF,
+ 0x01FFFFFF,
+ 0x03FFFFFF,
+ 0x07FFFFFF,
+ 0x0FFFFFFF,
+ 0x1FFFFFFF,
+ 0x3FFFFFFF,
+ 0x7FFFFFFF,
+ 0xFFFFFFFF,
+}
+
+func bitMask(n uint32) uint32 {
+ return kBitMask[n]
+}
+
+type bitReader struct {
+ val_ uint64
+ bit_pos_ uint32
+ input []byte
+ input_len uint
+ byte_pos uint
+}
+
+type bitReaderState struct {
+ val_ uint64
+ bit_pos_ uint32
+ input []byte
+ input_len uint
+ byte_pos uint
+}
+
+/* Initializes the BrotliBitReader fields. */
+
+/* Ensures that accumulator is not empty.
+ May consume up to sizeof(brotli_reg_t) - 1 bytes of input.
+ Returns false if data is required but there is no input available.
+ For BROTLI_ALIGNED_READ this function also prepares bit reader for aligned
+ reading. */
+func bitReaderSaveState(from *bitReader, to *bitReaderState) {
+ to.val_ = from.val_
+ to.bit_pos_ = from.bit_pos_
+ to.input = from.input
+ to.input_len = from.input_len
+ to.byte_pos = from.byte_pos
+}
+
+func bitReaderRestoreState(to *bitReader, from *bitReaderState) {
+ to.val_ = from.val_
+ to.bit_pos_ = from.bit_pos_
+ to.input = from.input
+ to.input_len = from.input_len
+ to.byte_pos = from.byte_pos
+}
+
+func getAvailableBits(br *bitReader) uint32 {
+ return 64 - br.bit_pos_
+}
+
+/* Returns amount of unread bytes the bit reader still has buffered from the
+ BrotliInput, including whole bytes in br->val_. */
+func getRemainingBytes(br *bitReader) uint {
+ return uint(uint32(br.input_len-br.byte_pos) + (getAvailableBits(br) >> 3))
+}
+
+/* Checks if there is at least |num| bytes left in the input ring-buffer
+ (excluding the bits remaining in br->val_). */
+func checkInputAmount(br *bitReader, num uint) bool {
+ return br.input_len-br.byte_pos >= num
+}
+
+/* Guarantees that there are at least |n_bits| + 1 bits in accumulator.
+ Precondition: accumulator contains at least 1 bit.
+ |n_bits| should be in the range [1..24] for regular build. For portable
+ non-64-bit little-endian build only 16 bits are safe to request. */
+func fillBitWindow(br *bitReader, n_bits uint32) {
+ if br.bit_pos_ >= 32 {
+ br.val_ >>= 32
+ br.bit_pos_ ^= 32 /* here same as -= 32 because of the if condition */
+ br.val_ |= (uint64(binary.LittleEndian.Uint32(br.input[br.byte_pos:]))) << 32
+ br.byte_pos += 4
+ }
+}
+
+/* Mostly like BrotliFillBitWindow, but guarantees only 16 bits and reads no
+ more than BROTLI_SHORT_FILL_BIT_WINDOW_READ bytes of input. */
+func fillBitWindow16(br *bitReader) {
+ fillBitWindow(br, 17)
+}
+
+/* Tries to pull one byte of input to accumulator.
+ Returns false if there is no input available. */
+func pullByte(br *bitReader) bool {
+ if br.byte_pos == br.input_len {
+ return false
+ }
+
+ br.val_ >>= 8
+ br.val_ |= (uint64(br.input[br.byte_pos])) << 56
+ br.bit_pos_ -= 8
+ br.byte_pos++
+ return true
+}
+
+/* Returns currently available bits.
+ The number of valid bits could be calculated by BrotliGetAvailableBits. */
+func getBitsUnmasked(br *bitReader) uint64 {
+ return br.val_ >> br.bit_pos_
+}
+
+/* Like BrotliGetBits, but does not mask the result.
+ The result contains at least 16 valid bits. */
+func get16BitsUnmasked(br *bitReader) uint32 {
+ fillBitWindow(br, 16)
+ return uint32(getBitsUnmasked(br))
+}
+
+/* Returns the specified number of bits from |br| without advancing bit
+ position. */
+func getBits(br *bitReader, n_bits uint32) uint32 {
+ fillBitWindow(br, n_bits)
+ return uint32(getBitsUnmasked(br)) & bitMask(n_bits)
+}
+
+/* Tries to peek the specified amount of bits. Returns false, if there
+ is not enough input. */
+func safeGetBits(br *bitReader, n_bits uint32, val *uint32) bool {
+ for getAvailableBits(br) < n_bits {
+ if !pullByte(br) {
+ return false
+ }
+ }
+
+ *val = uint32(getBitsUnmasked(br)) & bitMask(n_bits)
+ return true
+}
+
+/* Advances the bit pos by |n_bits|. */
+func dropBits(br *bitReader, n_bits uint32) {
+ br.bit_pos_ += n_bits
+}
+
+func bitReaderUnload(br *bitReader) {
+ var unused_bytes uint32 = getAvailableBits(br) >> 3
+ var unused_bits uint32 = unused_bytes << 3
+ br.byte_pos -= uint(unused_bytes)
+ if unused_bits == 64 {
+ br.val_ = 0
+ } else {
+ br.val_ <<= unused_bits
+ }
+
+ br.bit_pos_ += unused_bits
+}
+
+/* Reads the specified number of bits from |br| and advances the bit pos.
+ Precondition: accumulator MUST contain at least |n_bits|. */
+func takeBits(br *bitReader, n_bits uint32, val *uint32) {
+ *val = uint32(getBitsUnmasked(br)) & bitMask(n_bits)
+ dropBits(br, n_bits)
+}
+
+/* Reads the specified number of bits from |br| and advances the bit pos.
+ Assumes that there is enough input to perform BrotliFillBitWindow. */
+func readBits(br *bitReader, n_bits uint32) uint32 {
+ var val uint32
+ fillBitWindow(br, n_bits)
+ takeBits(br, n_bits, &val)
+ return val
+}
+
+/* Tries to read the specified amount of bits. Returns false, if there
+ is not enough input. |n_bits| MUST be positive. */
+func safeReadBits(br *bitReader, n_bits uint32, val *uint32) bool {
+ for getAvailableBits(br) < n_bits {
+ if !pullByte(br) {
+ return false
+ }
+ }
+
+ takeBits(br, n_bits, val)
+ return true
+}
+
+/* Advances the bit reader position to the next byte boundary and verifies
+ that any skipped bits are set to zero. */
+func bitReaderJumpToByteBoundary(br *bitReader) bool {
+ var pad_bits_count uint32 = getAvailableBits(br) & 0x7
+ var pad_bits uint32 = 0
+ if pad_bits_count != 0 {
+ takeBits(br, pad_bits_count, &pad_bits)
+ }
+
+ return pad_bits == 0
+}
+
+/* Copies remaining input bytes stored in the bit reader to the output. Value
+ |num| may not be larger than BrotliGetRemainingBytes. The bit reader must be
+ warmed up again after this. */
+func copyBytes(dest []byte, br *bitReader, num uint) {
+ for getAvailableBits(br) >= 8 && num > 0 {
+ dest[0] = byte(getBitsUnmasked(br))
+ dropBits(br, 8)
+ dest = dest[1:]
+ num--
+ }
+
+ copy(dest, br.input[br.byte_pos:][:num])
+ br.byte_pos += num
+}
+
+func initBitReader(br *bitReader) {
+ br.val_ = 0
+ br.bit_pos_ = 64
+}
+
+func warmupBitReader(br *bitReader) bool {
+ /* Fixing alignment after unaligned BrotliFillWindow would result accumulator
+ overflow. If unalignment is caused by BrotliSafeReadBits, then there is
+ enough space in accumulator to fix alignment. */
+ if getAvailableBits(br) == 0 {
+ if !pullByte(br) {
+ return false
+ }
+ }
+
+ return true
+}
diff --git a/vendor/github.com/andybalholm/brotli/block_splitter.go b/vendor/github.com/andybalholm/brotli/block_splitter.go
new file mode 100644
index 0000000..978a131
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/block_splitter.go
@@ -0,0 +1,144 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Block split point selection utilities. */
+
+type blockSplit struct {
+ num_types uint
+ num_blocks uint
+ types []byte
+ lengths []uint32
+ types_alloc_size uint
+ lengths_alloc_size uint
+}
+
+const (
+ kMaxLiteralHistograms uint = 100
+ kMaxCommandHistograms uint = 50
+ kLiteralBlockSwitchCost float64 = 28.1
+ kCommandBlockSwitchCost float64 = 13.5
+ kDistanceBlockSwitchCost float64 = 14.6
+ kLiteralStrideLength uint = 70
+ kCommandStrideLength uint = 40
+ kSymbolsPerLiteralHistogram uint = 544
+ kSymbolsPerCommandHistogram uint = 530
+ kSymbolsPerDistanceHistogram uint = 544
+ kMinLengthForBlockSplitting uint = 128
+ kIterMulForRefining uint = 2
+ kMinItersForRefining uint = 100
+)
+
+func countLiterals(cmds []command) uint {
+ var total_length uint = 0
+ /* Count how many we have. */
+
+ for i := range cmds {
+ total_length += uint(cmds[i].insert_len_)
+ }
+
+ return total_length
+}
+
+func copyLiteralsToByteArray(cmds []command, data []byte, offset uint, mask uint, literals []byte) {
+ var pos uint = 0
+ var from_pos uint = offset & mask
+ for i := range cmds {
+ var insert_len uint = uint(cmds[i].insert_len_)
+ if from_pos+insert_len > mask {
+ var head_size uint = mask + 1 - from_pos
+ copy(literals[pos:], data[from_pos:][:head_size])
+ from_pos = 0
+ pos += head_size
+ insert_len -= head_size
+ }
+
+ if insert_len > 0 {
+ copy(literals[pos:], data[from_pos:][:insert_len])
+ pos += insert_len
+ }
+
+ from_pos = uint((uint32(from_pos+insert_len) + commandCopyLen(&cmds[i])) & uint32(mask))
+ }
+}
+
+func myRand(seed *uint32) uint32 {
+ /* Initial seed should be 7. In this case, loop length is (1 << 29). */
+ *seed *= 16807
+
+ return *seed
+}
+
+func bitCost(count uint) float64 {
+ if count == 0 {
+ return -2.0
+ } else {
+ return fastLog2(count)
+ }
+}
+
+const histogramsPerBatch = 64
+
+const clustersPerBatch = 16
+
+func initBlockSplit(self *blockSplit) {
+ self.num_types = 0
+ self.num_blocks = 0
+ self.types = self.types[:0]
+ self.lengths = self.lengths[:0]
+ self.types_alloc_size = 0
+ self.lengths_alloc_size = 0
+}
+
+func splitBlock(cmds []command, data []byte, pos uint, mask uint, params *encoderParams, literal_split *blockSplit, insert_and_copy_split *blockSplit, dist_split *blockSplit) {
+ {
+ var literals_count uint = countLiterals(cmds)
+ var literals []byte = make([]byte, literals_count)
+
+ /* Create a continuous array of literals. */
+ copyLiteralsToByteArray(cmds, data, pos, mask, literals)
+
+ /* Create the block split on the array of literals.
+ Literal histograms have alphabet size 256. */
+ splitByteVectorLiteral(literals, literals_count, kSymbolsPerLiteralHistogram, kMaxLiteralHistograms, kLiteralStrideLength, kLiteralBlockSwitchCost, params, literal_split)
+
+ literals = nil
+ }
+ {
+ var insert_and_copy_codes []uint16 = make([]uint16, len(cmds))
+ /* Compute prefix codes for commands. */
+
+ for i := range cmds {
+ insert_and_copy_codes[i] = cmds[i].cmd_prefix_
+ }
+
+ /* Create the block split on the array of command prefixes. */
+ splitByteVectorCommand(insert_and_copy_codes, kSymbolsPerCommandHistogram, kMaxCommandHistograms, kCommandStrideLength, kCommandBlockSwitchCost, params, insert_and_copy_split)
+
+ /* TODO: reuse for distances? */
+
+ insert_and_copy_codes = nil
+ }
+ {
+ var distance_prefixes []uint16 = make([]uint16, len(cmds))
+ var j uint = 0
+ /* Create a continuous array of distance prefixes. */
+
+ for i := range cmds {
+ var cmd *command = &cmds[i]
+ if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 {
+ distance_prefixes[j] = cmd.dist_prefix_ & 0x3FF
+ j++
+ }
+ }
+
+ /* Create the block split on the array of distance prefixes. */
+ splitByteVectorDistance(distance_prefixes, j, kSymbolsPerDistanceHistogram, kMaxCommandHistograms, kCommandStrideLength, kDistanceBlockSwitchCost, params, dist_split)
+
+ distance_prefixes = nil
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_command.go b/vendor/github.com/andybalholm/brotli/block_splitter_command.go
new file mode 100644
index 0000000..9dec13e
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/block_splitter_command.go
@@ -0,0 +1,434 @@
+package brotli
+
+import "math"
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+func initialEntropyCodesCommand(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramCommand) {
+ var seed uint32 = 7
+ var block_length uint = length / num_histograms
+ var i uint
+ clearHistogramsCommand(histograms, num_histograms)
+ for i = 0; i < num_histograms; i++ {
+ var pos uint = length * i / num_histograms
+ if i != 0 {
+ pos += uint(myRand(&seed) % uint32(block_length))
+ }
+
+ if pos+stride >= length {
+ pos = length - stride - 1
+ }
+
+ histogramAddVectorCommand(&histograms[i], data[pos:], stride)
+ }
+}
+
+func randomSampleCommand(seed *uint32, data []uint16, length uint, stride uint, sample *histogramCommand) {
+ var pos uint = 0
+ if stride >= length {
+ stride = length
+ } else {
+ pos = uint(myRand(seed) % uint32(length-stride+1))
+ }
+
+ histogramAddVectorCommand(sample, data[pos:], stride)
+}
+
+func refineEntropyCodesCommand(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramCommand) {
+ var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining
+ var seed uint32 = 7
+ var iter uint
+ iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms
+ for iter = 0; iter < iters; iter++ {
+ var sample histogramCommand
+ histogramClearCommand(&sample)
+ randomSampleCommand(&seed, data, length, stride, &sample)
+ histogramAddHistogramCommand(&histograms[iter%num_histograms], &sample)
+ }
+}
+
+/* Assigns a block id from the range [0, num_histograms) to each data element
+ in data[0..length) and fills in block_id[0..length) with the assigned values.
+ Returns the number of blocks, i.e. one plus the number of block switches. */
+func findBlocksCommand(data []uint16, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramCommand, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint {
+ var data_size uint = histogramDataSizeCommand()
+ var bitmaplen uint = (num_histograms + 7) >> 3
+ var num_blocks uint = 1
+ var i uint
+ var j uint
+ assert(num_histograms <= 256)
+ if num_histograms <= 1 {
+ for i = 0; i < length; i++ {
+ block_id[i] = 0
+ }
+
+ return 1
+ }
+
+ for i := 0; i < int(data_size*num_histograms); i++ {
+ insert_cost[i] = 0
+ }
+ for i = 0; i < num_histograms; i++ {
+ insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_)))
+ }
+
+ for i = data_size; i != 0; {
+ i--
+ for j = 0; j < num_histograms; j++ {
+ insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i]))
+ }
+ }
+
+ for i := 0; i < int(num_histograms); i++ {
+ cost[i] = 0
+ }
+ for i := 0; i < int(length*bitmaplen); i++ {
+ switch_signal[i] = 0
+ }
+
+ /* After each iteration of this loop, cost[k] will contain the difference
+ between the minimum cost of arriving at the current byte position using
+ entropy code k, and the minimum cost of arriving at the current byte
+ position. This difference is capped at the block switch cost, and if it
+ reaches block switch cost, it means that when we trace back from the last
+ position, we need to switch here. */
+ for i = 0; i < length; i++ {
+ var byte_ix uint = i
+ var ix uint = byte_ix * bitmaplen
+ var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms
+ var min_cost float64 = 1e99
+ var block_switch_cost float64 = block_switch_bitcost
+ var k uint
+ for k = 0; k < num_histograms; k++ {
+ /* We are coding the symbol in data[byte_ix] with entropy code k. */
+ cost[k] += insert_cost[insert_cost_ix+k]
+
+ if cost[k] < min_cost {
+ min_cost = cost[k]
+ block_id[byte_ix] = byte(k)
+ }
+ }
+
+ /* More blocks for the beginning. */
+ if byte_ix < 2000 {
+ block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000
+ }
+
+ for k = 0; k < num_histograms; k++ {
+ cost[k] -= min_cost
+ if cost[k] >= block_switch_cost {
+ var mask byte = byte(1 << (k & 7))
+ cost[k] = block_switch_cost
+ assert(k>>3 < bitmaplen)
+ switch_signal[ix+(k>>3)] |= mask
+ /* Trace back from the last position and switch at the marked places. */
+ }
+ }
+ }
+ {
+ var byte_ix uint = length - 1
+ var ix uint = byte_ix * bitmaplen
+ var cur_id byte = block_id[byte_ix]
+ for byte_ix > 0 {
+ var mask byte = byte(1 << (cur_id & 7))
+ assert(uint(cur_id)>>3 < bitmaplen)
+ byte_ix--
+ ix -= bitmaplen
+ if switch_signal[ix+uint(cur_id>>3)]&mask != 0 {
+ if cur_id != block_id[byte_ix] {
+ cur_id = block_id[byte_ix]
+ num_blocks++
+ }
+ }
+
+ block_id[byte_ix] = cur_id
+ }
+ }
+
+ return num_blocks
+}
+
+var remapBlockIdsCommand_kInvalidId uint16 = 256
+
+func remapBlockIdsCommand(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint {
+ var next_id uint16 = 0
+ var i uint
+ for i = 0; i < num_histograms; i++ {
+ new_id[i] = remapBlockIdsCommand_kInvalidId
+ }
+
+ for i = 0; i < length; i++ {
+ assert(uint(block_ids[i]) < num_histograms)
+ if new_id[block_ids[i]] == remapBlockIdsCommand_kInvalidId {
+ new_id[block_ids[i]] = next_id
+ next_id++
+ }
+ }
+
+ for i = 0; i < length; i++ {
+ block_ids[i] = byte(new_id[block_ids[i]])
+ assert(uint(block_ids[i]) < num_histograms)
+ }
+
+ assert(uint(next_id) <= num_histograms)
+ return uint(next_id)
+}
+
+func buildBlockHistogramsCommand(data []uint16, length uint, block_ids []byte, num_histograms uint, histograms []histogramCommand) {
+ var i uint
+ clearHistogramsCommand(histograms, num_histograms)
+ for i = 0; i < length; i++ {
+ histogramAddCommand(&histograms[block_ids[i]], uint(data[i]))
+ }
+}
+
+var clusterBlocksCommand_kInvalidIndex uint32 = math.MaxUint32
+
+func clusterBlocksCommand(data []uint16, length uint, num_blocks uint, block_ids []byte, split *blockSplit) {
+ var histogram_symbols []uint32 = make([]uint32, num_blocks)
+ var block_lengths []uint32 = make([]uint32, num_blocks)
+ var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch
+ var all_histograms_size uint = 0
+ var all_histograms_capacity uint = expected_num_clusters
+ var all_histograms []histogramCommand = make([]histogramCommand, all_histograms_capacity)
+ var cluster_size_size uint = 0
+ var cluster_size_capacity uint = expected_num_clusters
+ var cluster_size []uint32 = make([]uint32, cluster_size_capacity)
+ var num_clusters uint = 0
+ var histograms []histogramCommand = make([]histogramCommand, brotli_min_size_t(num_blocks, histogramsPerBatch))
+ var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2
+ var pairs_capacity uint = max_num_pairs + 1
+ var pairs []histogramPair = make([]histogramPair, pairs_capacity)
+ var pos uint = 0
+ var clusters []uint32
+ var num_final_clusters uint
+ var new_index []uint32
+ var i uint
+ var sizes = [histogramsPerBatch]uint32{0}
+ var new_clusters = [histogramsPerBatch]uint32{0}
+ var symbols = [histogramsPerBatch]uint32{0}
+ var remap = [histogramsPerBatch]uint32{0}
+
+ for i := 0; i < int(num_blocks); i++ {
+ block_lengths[i] = 0
+ }
+ {
+ var block_idx uint = 0
+ for i = 0; i < length; i++ {
+ assert(block_idx < num_blocks)
+ block_lengths[block_idx]++
+ if i+1 == length || block_ids[i] != block_ids[i+1] {
+ block_idx++
+ }
+ }
+
+ assert(block_idx == num_blocks)
+ }
+
+ for i = 0; i < num_blocks; i += histogramsPerBatch {
+ var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch)
+ var num_new_clusters uint
+ var j uint
+ for j = 0; j < num_to_combine; j++ {
+ var k uint
+ histogramClearCommand(&histograms[j])
+ for k = 0; uint32(k) < block_lengths[i+j]; k++ {
+ histogramAddCommand(&histograms[j], uint(data[pos]))
+ pos++
+ }
+
+ histograms[j].bit_cost_ = populationCostCommand(&histograms[j])
+ new_clusters[j] = uint32(j)
+ symbols[j] = uint32(j)
+ sizes[j] = 1
+ }
+
+ num_new_clusters = histogramCombineCommand(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs)
+ if all_histograms_capacity < (all_histograms_size + num_new_clusters) {
+ var _new_size uint
+ if all_histograms_capacity == 0 {
+ _new_size = all_histograms_size + num_new_clusters
+ } else {
+ _new_size = all_histograms_capacity
+ }
+ var new_array []histogramCommand
+ for _new_size < (all_histograms_size + num_new_clusters) {
+ _new_size *= 2
+ }
+ new_array = make([]histogramCommand, _new_size)
+ if all_histograms_capacity != 0 {
+ copy(new_array, all_histograms[:all_histograms_capacity])
+ }
+
+ all_histograms = new_array
+ all_histograms_capacity = _new_size
+ }
+
+ brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters)
+ for j = 0; j < num_new_clusters; j++ {
+ all_histograms[all_histograms_size] = histograms[new_clusters[j]]
+ all_histograms_size++
+ cluster_size[cluster_size_size] = sizes[new_clusters[j]]
+ cluster_size_size++
+ remap[new_clusters[j]] = uint32(j)
+ }
+
+ for j = 0; j < num_to_combine; j++ {
+ histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]]
+ }
+
+ num_clusters += num_new_clusters
+ assert(num_clusters == cluster_size_size)
+ assert(num_clusters == all_histograms_size)
+ }
+
+ histograms = nil
+
+ max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters)
+ if pairs_capacity < max_num_pairs+1 {
+ pairs = nil
+ pairs = make([]histogramPair, (max_num_pairs + 1))
+ }
+
+ clusters = make([]uint32, num_clusters)
+ for i = 0; i < num_clusters; i++ {
+ clusters[i] = uint32(i)
+ }
+
+ num_final_clusters = histogramCombineCommand(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs)
+ pairs = nil
+ cluster_size = nil
+
+ new_index = make([]uint32, num_clusters)
+ for i = 0; i < num_clusters; i++ {
+ new_index[i] = clusterBlocksCommand_kInvalidIndex
+ }
+ pos = 0
+ {
+ var next_index uint32 = 0
+ for i = 0; i < num_blocks; i++ {
+ var histo histogramCommand
+ var j uint
+ var best_out uint32
+ var best_bits float64
+ histogramClearCommand(&histo)
+ for j = 0; uint32(j) < block_lengths[i]; j++ {
+ histogramAddCommand(&histo, uint(data[pos]))
+ pos++
+ }
+
+ if i == 0 {
+ best_out = histogram_symbols[0]
+ } else {
+ best_out = histogram_symbols[i-1]
+ }
+ best_bits = histogramBitCostDistanceCommand(&histo, &all_histograms[best_out])
+ for j = 0; j < num_final_clusters; j++ {
+ var cur_bits float64 = histogramBitCostDistanceCommand(&histo, &all_histograms[clusters[j]])
+ if cur_bits < best_bits {
+ best_bits = cur_bits
+ best_out = clusters[j]
+ }
+ }
+
+ histogram_symbols[i] = best_out
+ if new_index[best_out] == clusterBlocksCommand_kInvalidIndex {
+ new_index[best_out] = next_index
+ next_index++
+ }
+ }
+ }
+
+ clusters = nil
+ all_histograms = nil
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks)
+ {
+ var cur_length uint32 = 0
+ var block_idx uint = 0
+ var max_type byte = 0
+ for i = 0; i < num_blocks; i++ {
+ cur_length += block_lengths[i]
+ if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] {
+ var id byte = byte(new_index[histogram_symbols[i]])
+ split.types[block_idx] = id
+ split.lengths[block_idx] = cur_length
+ max_type = brotli_max_uint8_t(max_type, id)
+ cur_length = 0
+ block_idx++
+ }
+ }
+
+ split.num_blocks = block_idx
+ split.num_types = uint(max_type) + 1
+ }
+
+ new_index = nil
+ block_lengths = nil
+ histogram_symbols = nil
+}
+
+func splitByteVectorCommand(data []uint16, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) {
+ length := uint(len(data))
+ var data_size uint = histogramDataSizeCommand()
+ var num_histograms uint = length/literals_per_histogram + 1
+ var histograms []histogramCommand
+ if num_histograms > max_histograms {
+ num_histograms = max_histograms
+ }
+
+ if length == 0 {
+ split.num_types = 1
+ return
+ } else if length < kMinLengthForBlockSplitting {
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1)
+ split.num_types = 1
+ split.types[split.num_blocks] = 0
+ split.lengths[split.num_blocks] = uint32(length)
+ split.num_blocks++
+ return
+ }
+
+ histograms = make([]histogramCommand, num_histograms)
+
+ /* Find good entropy codes. */
+ initialEntropyCodesCommand(data, length, sampling_stride_length, num_histograms, histograms)
+
+ refineEntropyCodesCommand(data, length, sampling_stride_length, num_histograms, histograms)
+ {
+ var block_ids []byte = make([]byte, length)
+ var num_blocks uint = 0
+ var bitmaplen uint = (num_histograms + 7) >> 3
+ var insert_cost []float64 = make([]float64, (data_size * num_histograms))
+ var cost []float64 = make([]float64, num_histograms)
+ var switch_signal []byte = make([]byte, (length * bitmaplen))
+ var new_id []uint16 = make([]uint16, num_histograms)
+ var iters uint
+ if params.quality < hqZopflificationQuality {
+ iters = 3
+ } else {
+ iters = 10
+ }
+ /* Find a good path through literals with the good entropy codes. */
+
+ var i uint
+ for i = 0; i < iters; i++ {
+ num_blocks = findBlocksCommand(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids)
+ num_histograms = remapBlockIdsCommand(block_ids, length, new_id, num_histograms)
+ buildBlockHistogramsCommand(data, length, block_ids, num_histograms, histograms)
+ }
+
+ insert_cost = nil
+ cost = nil
+ switch_signal = nil
+ new_id = nil
+ histograms = nil
+ clusterBlocksCommand(data, length, num_blocks, block_ids, split)
+ block_ids = nil
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_distance.go b/vendor/github.com/andybalholm/brotli/block_splitter_distance.go
new file mode 100644
index 0000000..953530d
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/block_splitter_distance.go
@@ -0,0 +1,433 @@
+package brotli
+
+import "math"
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+func initialEntropyCodesDistance(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramDistance) {
+ var seed uint32 = 7
+ var block_length uint = length / num_histograms
+ var i uint
+ clearHistogramsDistance(histograms, num_histograms)
+ for i = 0; i < num_histograms; i++ {
+ var pos uint = length * i / num_histograms
+ if i != 0 {
+ pos += uint(myRand(&seed) % uint32(block_length))
+ }
+
+ if pos+stride >= length {
+ pos = length - stride - 1
+ }
+
+ histogramAddVectorDistance(&histograms[i], data[pos:], stride)
+ }
+}
+
+func randomSampleDistance(seed *uint32, data []uint16, length uint, stride uint, sample *histogramDistance) {
+ var pos uint = 0
+ if stride >= length {
+ stride = length
+ } else {
+ pos = uint(myRand(seed) % uint32(length-stride+1))
+ }
+
+ histogramAddVectorDistance(sample, data[pos:], stride)
+}
+
+func refineEntropyCodesDistance(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramDistance) {
+ var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining
+ var seed uint32 = 7
+ var iter uint
+ iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms
+ for iter = 0; iter < iters; iter++ {
+ var sample histogramDistance
+ histogramClearDistance(&sample)
+ randomSampleDistance(&seed, data, length, stride, &sample)
+ histogramAddHistogramDistance(&histograms[iter%num_histograms], &sample)
+ }
+}
+
+/* Assigns a block id from the range [0, num_histograms) to each data element
+ in data[0..length) and fills in block_id[0..length) with the assigned values.
+ Returns the number of blocks, i.e. one plus the number of block switches. */
+func findBlocksDistance(data []uint16, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramDistance, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint {
+ var data_size uint = histogramDataSizeDistance()
+ var bitmaplen uint = (num_histograms + 7) >> 3
+ var num_blocks uint = 1
+ var i uint
+ var j uint
+ assert(num_histograms <= 256)
+ if num_histograms <= 1 {
+ for i = 0; i < length; i++ {
+ block_id[i] = 0
+ }
+
+ return 1
+ }
+
+ for i := 0; i < int(data_size*num_histograms); i++ {
+ insert_cost[i] = 0
+ }
+ for i = 0; i < num_histograms; i++ {
+ insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_)))
+ }
+
+ for i = data_size; i != 0; {
+ i--
+ for j = 0; j < num_histograms; j++ {
+ insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i]))
+ }
+ }
+
+ for i := 0; i < int(num_histograms); i++ {
+ cost[i] = 0
+ }
+ for i := 0; i < int(length*bitmaplen); i++ {
+ switch_signal[i] = 0
+ }
+
+ /* After each iteration of this loop, cost[k] will contain the difference
+ between the minimum cost of arriving at the current byte position using
+ entropy code k, and the minimum cost of arriving at the current byte
+ position. This difference is capped at the block switch cost, and if it
+ reaches block switch cost, it means that when we trace back from the last
+ position, we need to switch here. */
+ for i = 0; i < length; i++ {
+ var byte_ix uint = i
+ var ix uint = byte_ix * bitmaplen
+ var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms
+ var min_cost float64 = 1e99
+ var block_switch_cost float64 = block_switch_bitcost
+ var k uint
+ for k = 0; k < num_histograms; k++ {
+ /* We are coding the symbol in data[byte_ix] with entropy code k. */
+ cost[k] += insert_cost[insert_cost_ix+k]
+
+ if cost[k] < min_cost {
+ min_cost = cost[k]
+ block_id[byte_ix] = byte(k)
+ }
+ }
+
+ /* More blocks for the beginning. */
+ if byte_ix < 2000 {
+ block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000
+ }
+
+ for k = 0; k < num_histograms; k++ {
+ cost[k] -= min_cost
+ if cost[k] >= block_switch_cost {
+ var mask byte = byte(1 << (k & 7))
+ cost[k] = block_switch_cost
+ assert(k>>3 < bitmaplen)
+ switch_signal[ix+(k>>3)] |= mask
+ /* Trace back from the last position and switch at the marked places. */
+ }
+ }
+ }
+ {
+ var byte_ix uint = length - 1
+ var ix uint = byte_ix * bitmaplen
+ var cur_id byte = block_id[byte_ix]
+ for byte_ix > 0 {
+ var mask byte = byte(1 << (cur_id & 7))
+ assert(uint(cur_id)>>3 < bitmaplen)
+ byte_ix--
+ ix -= bitmaplen
+ if switch_signal[ix+uint(cur_id>>3)]&mask != 0 {
+ if cur_id != block_id[byte_ix] {
+ cur_id = block_id[byte_ix]
+ num_blocks++
+ }
+ }
+
+ block_id[byte_ix] = cur_id
+ }
+ }
+
+ return num_blocks
+}
+
+var remapBlockIdsDistance_kInvalidId uint16 = 256
+
+func remapBlockIdsDistance(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint {
+ var next_id uint16 = 0
+ var i uint
+ for i = 0; i < num_histograms; i++ {
+ new_id[i] = remapBlockIdsDistance_kInvalidId
+ }
+
+ for i = 0; i < length; i++ {
+ assert(uint(block_ids[i]) < num_histograms)
+ if new_id[block_ids[i]] == remapBlockIdsDistance_kInvalidId {
+ new_id[block_ids[i]] = next_id
+ next_id++
+ }
+ }
+
+ for i = 0; i < length; i++ {
+ block_ids[i] = byte(new_id[block_ids[i]])
+ assert(uint(block_ids[i]) < num_histograms)
+ }
+
+ assert(uint(next_id) <= num_histograms)
+ return uint(next_id)
+}
+
+func buildBlockHistogramsDistance(data []uint16, length uint, block_ids []byte, num_histograms uint, histograms []histogramDistance) {
+ var i uint
+ clearHistogramsDistance(histograms, num_histograms)
+ for i = 0; i < length; i++ {
+ histogramAddDistance(&histograms[block_ids[i]], uint(data[i]))
+ }
+}
+
+var clusterBlocksDistance_kInvalidIndex uint32 = math.MaxUint32
+
+func clusterBlocksDistance(data []uint16, length uint, num_blocks uint, block_ids []byte, split *blockSplit) {
+ var histogram_symbols []uint32 = make([]uint32, num_blocks)
+ var block_lengths []uint32 = make([]uint32, num_blocks)
+ var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch
+ var all_histograms_size uint = 0
+ var all_histograms_capacity uint = expected_num_clusters
+ var all_histograms []histogramDistance = make([]histogramDistance, all_histograms_capacity)
+ var cluster_size_size uint = 0
+ var cluster_size_capacity uint = expected_num_clusters
+ var cluster_size []uint32 = make([]uint32, cluster_size_capacity)
+ var num_clusters uint = 0
+ var histograms []histogramDistance = make([]histogramDistance, brotli_min_size_t(num_blocks, histogramsPerBatch))
+ var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2
+ var pairs_capacity uint = max_num_pairs + 1
+ var pairs []histogramPair = make([]histogramPair, pairs_capacity)
+ var pos uint = 0
+ var clusters []uint32
+ var num_final_clusters uint
+ var new_index []uint32
+ var i uint
+ var sizes = [histogramsPerBatch]uint32{0}
+ var new_clusters = [histogramsPerBatch]uint32{0}
+ var symbols = [histogramsPerBatch]uint32{0}
+ var remap = [histogramsPerBatch]uint32{0}
+
+ for i := 0; i < int(num_blocks); i++ {
+ block_lengths[i] = 0
+ }
+ {
+ var block_idx uint = 0
+ for i = 0; i < length; i++ {
+ assert(block_idx < num_blocks)
+ block_lengths[block_idx]++
+ if i+1 == length || block_ids[i] != block_ids[i+1] {
+ block_idx++
+ }
+ }
+
+ assert(block_idx == num_blocks)
+ }
+
+ for i = 0; i < num_blocks; i += histogramsPerBatch {
+ var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch)
+ var num_new_clusters uint
+ var j uint
+ for j = 0; j < num_to_combine; j++ {
+ var k uint
+ histogramClearDistance(&histograms[j])
+ for k = 0; uint32(k) < block_lengths[i+j]; k++ {
+ histogramAddDistance(&histograms[j], uint(data[pos]))
+ pos++
+ }
+
+ histograms[j].bit_cost_ = populationCostDistance(&histograms[j])
+ new_clusters[j] = uint32(j)
+ symbols[j] = uint32(j)
+ sizes[j] = 1
+ }
+
+ num_new_clusters = histogramCombineDistance(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs)
+ if all_histograms_capacity < (all_histograms_size + num_new_clusters) {
+ var _new_size uint
+ if all_histograms_capacity == 0 {
+ _new_size = all_histograms_size + num_new_clusters
+ } else {
+ _new_size = all_histograms_capacity
+ }
+ var new_array []histogramDistance
+ for _new_size < (all_histograms_size + num_new_clusters) {
+ _new_size *= 2
+ }
+ new_array = make([]histogramDistance, _new_size)
+ if all_histograms_capacity != 0 {
+ copy(new_array, all_histograms[:all_histograms_capacity])
+ }
+
+ all_histograms = new_array
+ all_histograms_capacity = _new_size
+ }
+
+ brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters)
+ for j = 0; j < num_new_clusters; j++ {
+ all_histograms[all_histograms_size] = histograms[new_clusters[j]]
+ all_histograms_size++
+ cluster_size[cluster_size_size] = sizes[new_clusters[j]]
+ cluster_size_size++
+ remap[new_clusters[j]] = uint32(j)
+ }
+
+ for j = 0; j < num_to_combine; j++ {
+ histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]]
+ }
+
+ num_clusters += num_new_clusters
+ assert(num_clusters == cluster_size_size)
+ assert(num_clusters == all_histograms_size)
+ }
+
+ histograms = nil
+
+ max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters)
+ if pairs_capacity < max_num_pairs+1 {
+ pairs = nil
+ pairs = make([]histogramPair, (max_num_pairs + 1))
+ }
+
+ clusters = make([]uint32, num_clusters)
+ for i = 0; i < num_clusters; i++ {
+ clusters[i] = uint32(i)
+ }
+
+ num_final_clusters = histogramCombineDistance(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs)
+ pairs = nil
+ cluster_size = nil
+
+ new_index = make([]uint32, num_clusters)
+ for i = 0; i < num_clusters; i++ {
+ new_index[i] = clusterBlocksDistance_kInvalidIndex
+ }
+ pos = 0
+ {
+ var next_index uint32 = 0
+ for i = 0; i < num_blocks; i++ {
+ var histo histogramDistance
+ var j uint
+ var best_out uint32
+ var best_bits float64
+ histogramClearDistance(&histo)
+ for j = 0; uint32(j) < block_lengths[i]; j++ {
+ histogramAddDistance(&histo, uint(data[pos]))
+ pos++
+ }
+
+ if i == 0 {
+ best_out = histogram_symbols[0]
+ } else {
+ best_out = histogram_symbols[i-1]
+ }
+ best_bits = histogramBitCostDistanceDistance(&histo, &all_histograms[best_out])
+ for j = 0; j < num_final_clusters; j++ {
+ var cur_bits float64 = histogramBitCostDistanceDistance(&histo, &all_histograms[clusters[j]])
+ if cur_bits < best_bits {
+ best_bits = cur_bits
+ best_out = clusters[j]
+ }
+ }
+
+ histogram_symbols[i] = best_out
+ if new_index[best_out] == clusterBlocksDistance_kInvalidIndex {
+ new_index[best_out] = next_index
+ next_index++
+ }
+ }
+ }
+
+ clusters = nil
+ all_histograms = nil
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks)
+ {
+ var cur_length uint32 = 0
+ var block_idx uint = 0
+ var max_type byte = 0
+ for i = 0; i < num_blocks; i++ {
+ cur_length += block_lengths[i]
+ if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] {
+ var id byte = byte(new_index[histogram_symbols[i]])
+ split.types[block_idx] = id
+ split.lengths[block_idx] = cur_length
+ max_type = brotli_max_uint8_t(max_type, id)
+ cur_length = 0
+ block_idx++
+ }
+ }
+
+ split.num_blocks = block_idx
+ split.num_types = uint(max_type) + 1
+ }
+
+ new_index = nil
+ block_lengths = nil
+ histogram_symbols = nil
+}
+
+func splitByteVectorDistance(data []uint16, length uint, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) {
+ var data_size uint = histogramDataSizeDistance()
+ var num_histograms uint = length/literals_per_histogram + 1
+ var histograms []histogramDistance
+ if num_histograms > max_histograms {
+ num_histograms = max_histograms
+ }
+
+ if length == 0 {
+ split.num_types = 1
+ return
+ } else if length < kMinLengthForBlockSplitting {
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1)
+ split.num_types = 1
+ split.types[split.num_blocks] = 0
+ split.lengths[split.num_blocks] = uint32(length)
+ split.num_blocks++
+ return
+ }
+
+ histograms = make([]histogramDistance, num_histograms)
+
+ /* Find good entropy codes. */
+ initialEntropyCodesDistance(data, length, sampling_stride_length, num_histograms, histograms)
+
+ refineEntropyCodesDistance(data, length, sampling_stride_length, num_histograms, histograms)
+ {
+ var block_ids []byte = make([]byte, length)
+ var num_blocks uint = 0
+ var bitmaplen uint = (num_histograms + 7) >> 3
+ var insert_cost []float64 = make([]float64, (data_size * num_histograms))
+ var cost []float64 = make([]float64, num_histograms)
+ var switch_signal []byte = make([]byte, (length * bitmaplen))
+ var new_id []uint16 = make([]uint16, num_histograms)
+ var iters uint
+ if params.quality < hqZopflificationQuality {
+ iters = 3
+ } else {
+ iters = 10
+ }
+ /* Find a good path through literals with the good entropy codes. */
+
+ var i uint
+ for i = 0; i < iters; i++ {
+ num_blocks = findBlocksDistance(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids)
+ num_histograms = remapBlockIdsDistance(block_ids, length, new_id, num_histograms)
+ buildBlockHistogramsDistance(data, length, block_ids, num_histograms, histograms)
+ }
+
+ insert_cost = nil
+ cost = nil
+ switch_signal = nil
+ new_id = nil
+ histograms = nil
+ clusterBlocksDistance(data, length, num_blocks, block_ids, split)
+ block_ids = nil
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_literal.go b/vendor/github.com/andybalholm/brotli/block_splitter_literal.go
new file mode 100644
index 0000000..1c895cf
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/block_splitter_literal.go
@@ -0,0 +1,433 @@
+package brotli
+
+import "math"
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+func initialEntropyCodesLiteral(data []byte, length uint, stride uint, num_histograms uint, histograms []histogramLiteral) {
+ var seed uint32 = 7
+ var block_length uint = length / num_histograms
+ var i uint
+ clearHistogramsLiteral(histograms, num_histograms)
+ for i = 0; i < num_histograms; i++ {
+ var pos uint = length * i / num_histograms
+ if i != 0 {
+ pos += uint(myRand(&seed) % uint32(block_length))
+ }
+
+ if pos+stride >= length {
+ pos = length - stride - 1
+ }
+
+ histogramAddVectorLiteral(&histograms[i], data[pos:], stride)
+ }
+}
+
+func randomSampleLiteral(seed *uint32, data []byte, length uint, stride uint, sample *histogramLiteral) {
+ var pos uint = 0
+ if stride >= length {
+ stride = length
+ } else {
+ pos = uint(myRand(seed) % uint32(length-stride+1))
+ }
+
+ histogramAddVectorLiteral(sample, data[pos:], stride)
+}
+
+func refineEntropyCodesLiteral(data []byte, length uint, stride uint, num_histograms uint, histograms []histogramLiteral) {
+ var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining
+ var seed uint32 = 7
+ var iter uint
+ iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms
+ for iter = 0; iter < iters; iter++ {
+ var sample histogramLiteral
+ histogramClearLiteral(&sample)
+ randomSampleLiteral(&seed, data, length, stride, &sample)
+ histogramAddHistogramLiteral(&histograms[iter%num_histograms], &sample)
+ }
+}
+
+/* Assigns a block id from the range [0, num_histograms) to each data element
+ in data[0..length) and fills in block_id[0..length) with the assigned values.
+ Returns the number of blocks, i.e. one plus the number of block switches. */
+func findBlocksLiteral(data []byte, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramLiteral, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint {
+ var data_size uint = histogramDataSizeLiteral()
+ var bitmaplen uint = (num_histograms + 7) >> 3
+ var num_blocks uint = 1
+ var i uint
+ var j uint
+ assert(num_histograms <= 256)
+ if num_histograms <= 1 {
+ for i = 0; i < length; i++ {
+ block_id[i] = 0
+ }
+
+ return 1
+ }
+
+ for i := 0; i < int(data_size*num_histograms); i++ {
+ insert_cost[i] = 0
+ }
+ for i = 0; i < num_histograms; i++ {
+ insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_)))
+ }
+
+ for i = data_size; i != 0; {
+ i--
+ for j = 0; j < num_histograms; j++ {
+ insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i]))
+ }
+ }
+
+ for i := 0; i < int(num_histograms); i++ {
+ cost[i] = 0
+ }
+ for i := 0; i < int(length*bitmaplen); i++ {
+ switch_signal[i] = 0
+ }
+
+ /* After each iteration of this loop, cost[k] will contain the difference
+ between the minimum cost of arriving at the current byte position using
+ entropy code k, and the minimum cost of arriving at the current byte
+ position. This difference is capped at the block switch cost, and if it
+ reaches block switch cost, it means that when we trace back from the last
+ position, we need to switch here. */
+ for i = 0; i < length; i++ {
+ var byte_ix uint = i
+ var ix uint = byte_ix * bitmaplen
+ var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms
+ var min_cost float64 = 1e99
+ var block_switch_cost float64 = block_switch_bitcost
+ var k uint
+ for k = 0; k < num_histograms; k++ {
+ /* We are coding the symbol in data[byte_ix] with entropy code k. */
+ cost[k] += insert_cost[insert_cost_ix+k]
+
+ if cost[k] < min_cost {
+ min_cost = cost[k]
+ block_id[byte_ix] = byte(k)
+ }
+ }
+
+ /* More blocks for the beginning. */
+ if byte_ix < 2000 {
+ block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000
+ }
+
+ for k = 0; k < num_histograms; k++ {
+ cost[k] -= min_cost
+ if cost[k] >= block_switch_cost {
+ var mask byte = byte(1 << (k & 7))
+ cost[k] = block_switch_cost
+ assert(k>>3 < bitmaplen)
+ switch_signal[ix+(k>>3)] |= mask
+ /* Trace back from the last position and switch at the marked places. */
+ }
+ }
+ }
+ {
+ var byte_ix uint = length - 1
+ var ix uint = byte_ix * bitmaplen
+ var cur_id byte = block_id[byte_ix]
+ for byte_ix > 0 {
+ var mask byte = byte(1 << (cur_id & 7))
+ assert(uint(cur_id)>>3 < bitmaplen)
+ byte_ix--
+ ix -= bitmaplen
+ if switch_signal[ix+uint(cur_id>>3)]&mask != 0 {
+ if cur_id != block_id[byte_ix] {
+ cur_id = block_id[byte_ix]
+ num_blocks++
+ }
+ }
+
+ block_id[byte_ix] = cur_id
+ }
+ }
+
+ return num_blocks
+}
+
+var remapBlockIdsLiteral_kInvalidId uint16 = 256
+
+func remapBlockIdsLiteral(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint {
+ var next_id uint16 = 0
+ var i uint
+ for i = 0; i < num_histograms; i++ {
+ new_id[i] = remapBlockIdsLiteral_kInvalidId
+ }
+
+ for i = 0; i < length; i++ {
+ assert(uint(block_ids[i]) < num_histograms)
+ if new_id[block_ids[i]] == remapBlockIdsLiteral_kInvalidId {
+ new_id[block_ids[i]] = next_id
+ next_id++
+ }
+ }
+
+ for i = 0; i < length; i++ {
+ block_ids[i] = byte(new_id[block_ids[i]])
+ assert(uint(block_ids[i]) < num_histograms)
+ }
+
+ assert(uint(next_id) <= num_histograms)
+ return uint(next_id)
+}
+
+func buildBlockHistogramsLiteral(data []byte, length uint, block_ids []byte, num_histograms uint, histograms []histogramLiteral) {
+ var i uint
+ clearHistogramsLiteral(histograms, num_histograms)
+ for i = 0; i < length; i++ {
+ histogramAddLiteral(&histograms[block_ids[i]], uint(data[i]))
+ }
+}
+
+var clusterBlocksLiteral_kInvalidIndex uint32 = math.MaxUint32
+
+func clusterBlocksLiteral(data []byte, length uint, num_blocks uint, block_ids []byte, split *blockSplit) {
+ var histogram_symbols []uint32 = make([]uint32, num_blocks)
+ var block_lengths []uint32 = make([]uint32, num_blocks)
+ var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch
+ var all_histograms_size uint = 0
+ var all_histograms_capacity uint = expected_num_clusters
+ var all_histograms []histogramLiteral = make([]histogramLiteral, all_histograms_capacity)
+ var cluster_size_size uint = 0
+ var cluster_size_capacity uint = expected_num_clusters
+ var cluster_size []uint32 = make([]uint32, cluster_size_capacity)
+ var num_clusters uint = 0
+ var histograms []histogramLiteral = make([]histogramLiteral, brotli_min_size_t(num_blocks, histogramsPerBatch))
+ var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2
+ var pairs_capacity uint = max_num_pairs + 1
+ var pairs []histogramPair = make([]histogramPair, pairs_capacity)
+ var pos uint = 0
+ var clusters []uint32
+ var num_final_clusters uint
+ var new_index []uint32
+ var i uint
+ var sizes = [histogramsPerBatch]uint32{0}
+ var new_clusters = [histogramsPerBatch]uint32{0}
+ var symbols = [histogramsPerBatch]uint32{0}
+ var remap = [histogramsPerBatch]uint32{0}
+
+ for i := 0; i < int(num_blocks); i++ {
+ block_lengths[i] = 0
+ }
+ {
+ var block_idx uint = 0
+ for i = 0; i < length; i++ {
+ assert(block_idx < num_blocks)
+ block_lengths[block_idx]++
+ if i+1 == length || block_ids[i] != block_ids[i+1] {
+ block_idx++
+ }
+ }
+
+ assert(block_idx == num_blocks)
+ }
+
+ for i = 0; i < num_blocks; i += histogramsPerBatch {
+ var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch)
+ var num_new_clusters uint
+ var j uint
+ for j = 0; j < num_to_combine; j++ {
+ var k uint
+ histogramClearLiteral(&histograms[j])
+ for k = 0; uint32(k) < block_lengths[i+j]; k++ {
+ histogramAddLiteral(&histograms[j], uint(data[pos]))
+ pos++
+ }
+
+ histograms[j].bit_cost_ = populationCostLiteral(&histograms[j])
+ new_clusters[j] = uint32(j)
+ symbols[j] = uint32(j)
+ sizes[j] = 1
+ }
+
+ num_new_clusters = histogramCombineLiteral(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs)
+ if all_histograms_capacity < (all_histograms_size + num_new_clusters) {
+ var _new_size uint
+ if all_histograms_capacity == 0 {
+ _new_size = all_histograms_size + num_new_clusters
+ } else {
+ _new_size = all_histograms_capacity
+ }
+ var new_array []histogramLiteral
+ for _new_size < (all_histograms_size + num_new_clusters) {
+ _new_size *= 2
+ }
+ new_array = make([]histogramLiteral, _new_size)
+ if all_histograms_capacity != 0 {
+ copy(new_array, all_histograms[:all_histograms_capacity])
+ }
+
+ all_histograms = new_array
+ all_histograms_capacity = _new_size
+ }
+
+ brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters)
+ for j = 0; j < num_new_clusters; j++ {
+ all_histograms[all_histograms_size] = histograms[new_clusters[j]]
+ all_histograms_size++
+ cluster_size[cluster_size_size] = sizes[new_clusters[j]]
+ cluster_size_size++
+ remap[new_clusters[j]] = uint32(j)
+ }
+
+ for j = 0; j < num_to_combine; j++ {
+ histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]]
+ }
+
+ num_clusters += num_new_clusters
+ assert(num_clusters == cluster_size_size)
+ assert(num_clusters == all_histograms_size)
+ }
+
+ histograms = nil
+
+ max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters)
+ if pairs_capacity < max_num_pairs+1 {
+ pairs = nil
+ pairs = make([]histogramPair, (max_num_pairs + 1))
+ }
+
+ clusters = make([]uint32, num_clusters)
+ for i = 0; i < num_clusters; i++ {
+ clusters[i] = uint32(i)
+ }
+
+ num_final_clusters = histogramCombineLiteral(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs)
+ pairs = nil
+ cluster_size = nil
+
+ new_index = make([]uint32, num_clusters)
+ for i = 0; i < num_clusters; i++ {
+ new_index[i] = clusterBlocksLiteral_kInvalidIndex
+ }
+ pos = 0
+ {
+ var next_index uint32 = 0
+ for i = 0; i < num_blocks; i++ {
+ var histo histogramLiteral
+ var j uint
+ var best_out uint32
+ var best_bits float64
+ histogramClearLiteral(&histo)
+ for j = 0; uint32(j) < block_lengths[i]; j++ {
+ histogramAddLiteral(&histo, uint(data[pos]))
+ pos++
+ }
+
+ if i == 0 {
+ best_out = histogram_symbols[0]
+ } else {
+ best_out = histogram_symbols[i-1]
+ }
+ best_bits = histogramBitCostDistanceLiteral(&histo, &all_histograms[best_out])
+ for j = 0; j < num_final_clusters; j++ {
+ var cur_bits float64 = histogramBitCostDistanceLiteral(&histo, &all_histograms[clusters[j]])
+ if cur_bits < best_bits {
+ best_bits = cur_bits
+ best_out = clusters[j]
+ }
+ }
+
+ histogram_symbols[i] = best_out
+ if new_index[best_out] == clusterBlocksLiteral_kInvalidIndex {
+ new_index[best_out] = next_index
+ next_index++
+ }
+ }
+ }
+
+ clusters = nil
+ all_histograms = nil
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks)
+ {
+ var cur_length uint32 = 0
+ var block_idx uint = 0
+ var max_type byte = 0
+ for i = 0; i < num_blocks; i++ {
+ cur_length += block_lengths[i]
+ if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] {
+ var id byte = byte(new_index[histogram_symbols[i]])
+ split.types[block_idx] = id
+ split.lengths[block_idx] = cur_length
+ max_type = brotli_max_uint8_t(max_type, id)
+ cur_length = 0
+ block_idx++
+ }
+ }
+
+ split.num_blocks = block_idx
+ split.num_types = uint(max_type) + 1
+ }
+
+ new_index = nil
+ block_lengths = nil
+ histogram_symbols = nil
+}
+
+func splitByteVectorLiteral(data []byte, length uint, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) {
+ var data_size uint = histogramDataSizeLiteral()
+ var num_histograms uint = length/literals_per_histogram + 1
+ var histograms []histogramLiteral
+ if num_histograms > max_histograms {
+ num_histograms = max_histograms
+ }
+
+ if length == 0 {
+ split.num_types = 1
+ return
+ } else if length < kMinLengthForBlockSplitting {
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1)
+ split.num_types = 1
+ split.types[split.num_blocks] = 0
+ split.lengths[split.num_blocks] = uint32(length)
+ split.num_blocks++
+ return
+ }
+
+ histograms = make([]histogramLiteral, num_histograms)
+
+ /* Find good entropy codes. */
+ initialEntropyCodesLiteral(data, length, sampling_stride_length, num_histograms, histograms)
+
+ refineEntropyCodesLiteral(data, length, sampling_stride_length, num_histograms, histograms)
+ {
+ var block_ids []byte = make([]byte, length)
+ var num_blocks uint = 0
+ var bitmaplen uint = (num_histograms + 7) >> 3
+ var insert_cost []float64 = make([]float64, (data_size * num_histograms))
+ var cost []float64 = make([]float64, num_histograms)
+ var switch_signal []byte = make([]byte, (length * bitmaplen))
+ var new_id []uint16 = make([]uint16, num_histograms)
+ var iters uint
+ if params.quality < hqZopflificationQuality {
+ iters = 3
+ } else {
+ iters = 10
+ }
+ /* Find a good path through literals with the good entropy codes. */
+
+ var i uint
+ for i = 0; i < iters; i++ {
+ num_blocks = findBlocksLiteral(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids)
+ num_histograms = remapBlockIdsLiteral(block_ids, length, new_id, num_histograms)
+ buildBlockHistogramsLiteral(data, length, block_ids, num_histograms, histograms)
+ }
+
+ insert_cost = nil
+ cost = nil
+ switch_signal = nil
+ new_id = nil
+ histograms = nil
+ clusterBlocksLiteral(data, length, num_blocks, block_ids, split)
+ block_ids = nil
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go b/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go
new file mode 100644
index 0000000..7acfb18
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go
@@ -0,0 +1,1300 @@
+package brotli
+
+import (
+ "math"
+ "sync"
+)
+
+const maxHuffmanTreeSize = (2*numCommandSymbols + 1)
+
+/* The maximum size of Huffman dictionary for distances assuming that
+ NPOSTFIX = 0 and NDIRECT = 0. */
+const maxSimpleDistanceAlphabetSize = 140
+
+/* Represents the range of values belonging to a prefix code:
+ [offset, offset + 2^nbits) */
+type prefixCodeRange struct {
+ offset uint32
+ nbits uint32
+}
+
+var kBlockLengthPrefixCode = [numBlockLenSymbols]prefixCodeRange{
+ prefixCodeRange{1, 2},
+ prefixCodeRange{5, 2},
+ prefixCodeRange{9, 2},
+ prefixCodeRange{13, 2},
+ prefixCodeRange{17, 3},
+ prefixCodeRange{25, 3},
+ prefixCodeRange{33, 3},
+ prefixCodeRange{41, 3},
+ prefixCodeRange{49, 4},
+ prefixCodeRange{65, 4},
+ prefixCodeRange{81, 4},
+ prefixCodeRange{97, 4},
+ prefixCodeRange{113, 5},
+ prefixCodeRange{145, 5},
+ prefixCodeRange{177, 5},
+ prefixCodeRange{209, 5},
+ prefixCodeRange{241, 6},
+ prefixCodeRange{305, 6},
+ prefixCodeRange{369, 7},
+ prefixCodeRange{497, 8},
+ prefixCodeRange{753, 9},
+ prefixCodeRange{1265, 10},
+ prefixCodeRange{2289, 11},
+ prefixCodeRange{4337, 12},
+ prefixCodeRange{8433, 13},
+ prefixCodeRange{16625, 24},
+}
+
+func blockLengthPrefixCode(len uint32) uint32 {
+ var code uint32
+ if len >= 177 {
+ if len >= 753 {
+ code = 20
+ } else {
+ code = 14
+ }
+ } else if len >= 41 {
+ code = 7
+ } else {
+ code = 0
+ }
+ for code < (numBlockLenSymbols-1) && len >= kBlockLengthPrefixCode[code+1].offset {
+ code++
+ }
+ return code
+}
+
+func getBlockLengthPrefixCode(len uint32, code *uint, n_extra *uint32, extra *uint32) {
+ *code = uint(blockLengthPrefixCode(uint32(len)))
+ *n_extra = kBlockLengthPrefixCode[*code].nbits
+ *extra = len - kBlockLengthPrefixCode[*code].offset
+}
+
+type blockTypeCodeCalculator struct {
+ last_type uint
+ second_last_type uint
+}
+
+func initBlockTypeCodeCalculator(self *blockTypeCodeCalculator) {
+ self.last_type = 1
+ self.second_last_type = 0
+}
+
+func nextBlockTypeCode(calculator *blockTypeCodeCalculator, type_ byte) uint {
+ var type_code uint
+ if uint(type_) == calculator.last_type+1 {
+ type_code = 1
+ } else if uint(type_) == calculator.second_last_type {
+ type_code = 0
+ } else {
+ type_code = uint(type_) + 2
+ }
+ calculator.second_last_type = calculator.last_type
+ calculator.last_type = uint(type_)
+ return type_code
+}
+
+/* |nibblesbits| represents the 2 bits to encode MNIBBLES (0-3)
+ REQUIRES: length > 0
+ REQUIRES: length <= (1 << 24) */
+func encodeMlen(length uint, bits *uint64, numbits *uint, nibblesbits *uint64) {
+ var lg uint
+ if length == 1 {
+ lg = 1
+ } else {
+ lg = uint(log2FloorNonZero(uint(uint32(length-1)))) + 1
+ }
+ var tmp uint
+ if lg < 16 {
+ tmp = 16
+ } else {
+ tmp = (lg + 3)
+ }
+ var mnibbles uint = tmp / 4
+ assert(length > 0)
+ assert(length <= 1<<24)
+ assert(lg <= 24)
+ *nibblesbits = uint64(mnibbles) - 4
+ *numbits = mnibbles * 4
+ *bits = uint64(length) - 1
+}
+
+func storeCommandExtra(cmd *command, storage_ix *uint, storage []byte) {
+ var copylen_code uint32 = commandCopyLenCode(cmd)
+ var inscode uint16 = getInsertLengthCode(uint(cmd.insert_len_))
+ var copycode uint16 = getCopyLengthCode(uint(copylen_code))
+ var insnumextra uint32 = getInsertExtra(inscode)
+ var insextraval uint64 = uint64(cmd.insert_len_) - uint64(getInsertBase(inscode))
+ var copyextraval uint64 = uint64(copylen_code) - uint64(getCopyBase(copycode))
+ var bits uint64 = copyextraval< 0
+ REQUIRES: length <= (1 << 24) */
+func storeCompressedMetaBlockHeader(is_final_block bool, length uint, storage_ix *uint, storage []byte) {
+ var lenbits uint64
+ var nlenbits uint
+ var nibblesbits uint64
+ var is_final uint64
+ if is_final_block {
+ is_final = 1
+ } else {
+ is_final = 0
+ }
+
+ /* Write ISLAST bit. */
+ writeBits(1, is_final, storage_ix, storage)
+
+ /* Write ISEMPTY bit. */
+ if is_final_block {
+ writeBits(1, 0, storage_ix, storage)
+ }
+
+ encodeMlen(length, &lenbits, &nlenbits, &nibblesbits)
+ writeBits(2, nibblesbits, storage_ix, storage)
+ writeBits(nlenbits, lenbits, storage_ix, storage)
+
+ if !is_final_block {
+ /* Write ISUNCOMPRESSED bit. */
+ writeBits(1, 0, storage_ix, storage)
+ }
+}
+
+/* Stores the uncompressed meta-block header.
+ REQUIRES: length > 0
+ REQUIRES: length <= (1 << 24) */
+func storeUncompressedMetaBlockHeader(length uint, storage_ix *uint, storage []byte) {
+ var lenbits uint64
+ var nlenbits uint
+ var nibblesbits uint64
+
+ /* Write ISLAST bit.
+ Uncompressed block cannot be the last one, so set to 0. */
+ writeBits(1, 0, storage_ix, storage)
+
+ encodeMlen(length, &lenbits, &nlenbits, &nibblesbits)
+ writeBits(2, nibblesbits, storage_ix, storage)
+ writeBits(nlenbits, lenbits, storage_ix, storage)
+
+ /* Write ISUNCOMPRESSED bit. */
+ writeBits(1, 1, storage_ix, storage)
+}
+
+var storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder = [codeLengthCodes]byte{1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15}
+
+var storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeSymbols = [6]byte{0, 7, 3, 2, 1, 15}
+var storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeBitLengths = [6]byte{2, 4, 3, 2, 2, 4}
+
+func storeHuffmanTreeOfHuffmanTreeToBitMask(num_codes int, code_length_bitdepth []byte, storage_ix *uint, storage []byte) {
+ var skip_some uint = 0
+ var codes_to_store uint = codeLengthCodes
+ /* The bit lengths of the Huffman code over the code length alphabet
+ are compressed with the following static Huffman code:
+ Symbol Code
+ ------ ----
+ 0 00
+ 1 1110
+ 2 110
+ 3 01
+ 4 10
+ 5 1111 */
+
+ /* Throw away trailing zeros: */
+ if num_codes > 1 {
+ for ; codes_to_store > 0; codes_to_store-- {
+ if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[codes_to_store-1]] != 0 {
+ break
+ }
+ }
+ }
+
+ if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[0]] == 0 && code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[1]] == 0 {
+ skip_some = 2 /* skips two. */
+ if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[2]] == 0 {
+ skip_some = 3 /* skips three. */
+ }
+ }
+
+ writeBits(2, uint64(skip_some), storage_ix, storage)
+ {
+ var i uint
+ for i = skip_some; i < codes_to_store; i++ {
+ var l uint = uint(code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[i]])
+ writeBits(uint(storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeBitLengths[l]), uint64(storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeSymbols[l]), storage_ix, storage)
+ }
+ }
+}
+
+func storeHuffmanTreeToBitMask(huffman_tree_size uint, huffman_tree []byte, huffman_tree_extra_bits []byte, code_length_bitdepth []byte, code_length_bitdepth_symbols []uint16, storage_ix *uint, storage []byte) {
+ var i uint
+ for i = 0; i < huffman_tree_size; i++ {
+ var ix uint = uint(huffman_tree[i])
+ writeBits(uint(code_length_bitdepth[ix]), uint64(code_length_bitdepth_symbols[ix]), storage_ix, storage)
+
+ /* Extra bits */
+ switch ix {
+ case repeatPreviousCodeLength:
+ writeBits(2, uint64(huffman_tree_extra_bits[i]), storage_ix, storage)
+
+ case repeatZeroCodeLength:
+ writeBits(3, uint64(huffman_tree_extra_bits[i]), storage_ix, storage)
+ }
+ }
+}
+
+func storeSimpleHuffmanTree(depths []byte, symbols []uint, num_symbols uint, max_bits uint, storage_ix *uint, storage []byte) {
+ /* value of 1 indicates a simple Huffman code */
+ writeBits(2, 1, storage_ix, storage)
+
+ writeBits(2, uint64(num_symbols)-1, storage_ix, storage) /* NSYM - 1 */
+ {
+ /* Sort */
+ var i uint
+ for i = 0; i < num_symbols; i++ {
+ var j uint
+ for j = i + 1; j < num_symbols; j++ {
+ if depths[symbols[j]] < depths[symbols[i]] {
+ var tmp uint = symbols[j]
+ symbols[j] = symbols[i]
+ symbols[i] = tmp
+ }
+ }
+ }
+ }
+
+ if num_symbols == 2 {
+ writeBits(max_bits, uint64(symbols[0]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[1]), storage_ix, storage)
+ } else if num_symbols == 3 {
+ writeBits(max_bits, uint64(symbols[0]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[1]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[2]), storage_ix, storage)
+ } else {
+ writeBits(max_bits, uint64(symbols[0]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[1]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[2]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[3]), storage_ix, storage)
+
+ /* tree-select */
+ var tmp int
+ if depths[symbols[0]] == 1 {
+ tmp = 1
+ } else {
+ tmp = 0
+ }
+ writeBits(1, uint64(tmp), storage_ix, storage)
+ }
+}
+
+/* num = alphabet size
+ depths = symbol depths */
+func storeHuffmanTree(depths []byte, num uint, tree []huffmanTree, storage_ix *uint, storage []byte) {
+ var huffman_tree [numCommandSymbols]byte
+ var huffman_tree_extra_bits [numCommandSymbols]byte
+ var huffman_tree_size uint = 0
+ var code_length_bitdepth = [codeLengthCodes]byte{0}
+ var code_length_bitdepth_symbols [codeLengthCodes]uint16
+ var huffman_tree_histogram = [codeLengthCodes]uint32{0}
+ var i uint
+ var num_codes int = 0
+ /* Write the Huffman tree into the brotli-representation.
+ The command alphabet is the largest, so this allocation will fit all
+ alphabets. */
+
+ var code uint = 0
+
+ assert(num <= numCommandSymbols)
+
+ writeHuffmanTree(depths, num, &huffman_tree_size, huffman_tree[:], huffman_tree_extra_bits[:])
+
+ /* Calculate the statistics of the Huffman tree in brotli-representation. */
+ for i = 0; i < huffman_tree_size; i++ {
+ huffman_tree_histogram[huffman_tree[i]]++
+ }
+
+ for i = 0; i < codeLengthCodes; i++ {
+ if huffman_tree_histogram[i] != 0 {
+ if num_codes == 0 {
+ code = i
+ num_codes = 1
+ } else if num_codes == 1 {
+ num_codes = 2
+ break
+ }
+ }
+ }
+
+ /* Calculate another Huffman tree to use for compressing both the
+ earlier Huffman tree with. */
+ createHuffmanTree(huffman_tree_histogram[:], codeLengthCodes, 5, tree, code_length_bitdepth[:])
+
+ convertBitDepthsToSymbols(code_length_bitdepth[:], codeLengthCodes, code_length_bitdepth_symbols[:])
+
+ /* Now, we have all the data, let's start storing it */
+ storeHuffmanTreeOfHuffmanTreeToBitMask(num_codes, code_length_bitdepth[:], storage_ix, storage)
+
+ if num_codes == 1 {
+ code_length_bitdepth[code] = 0
+ }
+
+ /* Store the real Huffman tree now. */
+ storeHuffmanTreeToBitMask(huffman_tree_size, huffman_tree[:], huffman_tree_extra_bits[:], code_length_bitdepth[:], code_length_bitdepth_symbols[:], storage_ix, storage)
+}
+
+/* Builds a Huffman tree from histogram[0:length] into depth[0:length] and
+ bits[0:length] and stores the encoded tree to the bit stream. */
+func buildAndStoreHuffmanTree(histogram []uint32, histogram_length uint, alphabet_size uint, tree []huffmanTree, depth []byte, bits []uint16, storage_ix *uint, storage []byte) {
+ var count uint = 0
+ var s4 = [4]uint{0}
+ var i uint
+ var max_bits uint = 0
+ for i = 0; i < histogram_length; i++ {
+ if histogram[i] != 0 {
+ if count < 4 {
+ s4[count] = i
+ } else if count > 4 {
+ break
+ }
+
+ count++
+ }
+ }
+ {
+ var max_bits_counter uint = alphabet_size - 1
+ for max_bits_counter != 0 {
+ max_bits_counter >>= 1
+ max_bits++
+ }
+ }
+
+ if count <= 1 {
+ writeBits(4, 1, storage_ix, storage)
+ writeBits(max_bits, uint64(s4[0]), storage_ix, storage)
+ depth[s4[0]] = 0
+ bits[s4[0]] = 0
+ return
+ }
+
+ for i := 0; i < int(histogram_length); i++ {
+ depth[i] = 0
+ }
+ createHuffmanTree(histogram, histogram_length, 15, tree, depth)
+ convertBitDepthsToSymbols(depth, histogram_length, bits)
+
+ if count <= 4 {
+ storeSimpleHuffmanTree(depth, s4[:], count, max_bits, storage_ix, storage)
+ } else {
+ storeHuffmanTree(depth, histogram_length, tree, storage_ix, storage)
+ }
+}
+
+func sortHuffmanTree1(v0 huffmanTree, v1 huffmanTree) bool {
+ return v0.total_count_ < v1.total_count_
+}
+
+var huffmanTreePool sync.Pool
+
+func buildAndStoreHuffmanTreeFast(histogram []uint32, histogram_total uint, max_bits uint, depth []byte, bits []uint16, storage_ix *uint, storage []byte) {
+ var count uint = 0
+ var symbols = [4]uint{0}
+ var length uint = 0
+ var total uint = histogram_total
+ for total != 0 {
+ if histogram[length] != 0 {
+ if count < 4 {
+ symbols[count] = length
+ }
+
+ count++
+ total -= uint(histogram[length])
+ }
+
+ length++
+ }
+
+ if count <= 1 {
+ writeBits(4, 1, storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[0]), storage_ix, storage)
+ depth[symbols[0]] = 0
+ bits[symbols[0]] = 0
+ return
+ }
+
+ for i := 0; i < int(length); i++ {
+ depth[i] = 0
+ }
+ {
+ var max_tree_size uint = 2*length + 1
+ tree, _ := huffmanTreePool.Get().(*[]huffmanTree)
+ if tree == nil || cap(*tree) < int(max_tree_size) {
+ tmp := make([]huffmanTree, max_tree_size)
+ tree = &tmp
+ } else {
+ *tree = (*tree)[:max_tree_size]
+ }
+ var count_limit uint32
+ for count_limit = 1; ; count_limit *= 2 {
+ var node int = 0
+ var l uint
+ for l = length; l != 0; {
+ l--
+ if histogram[l] != 0 {
+ if histogram[l] >= count_limit {
+ initHuffmanTree(&(*tree)[node:][0], histogram[l], -1, int16(l))
+ } else {
+ initHuffmanTree(&(*tree)[node:][0], count_limit, -1, int16(l))
+ }
+
+ node++
+ }
+ }
+ {
+ var n int = node
+ /* Points to the next leaf node. */ /* Points to the next non-leaf node. */
+ var sentinel huffmanTree
+ var i int = 0
+ var j int = n + 1
+ var k int
+
+ sortHuffmanTreeItems(*tree, uint(n), huffmanTreeComparator(sortHuffmanTree1))
+
+ /* The nodes are:
+ [0, n): the sorted leaf nodes that we start with.
+ [n]: we add a sentinel here.
+ [n + 1, 2n): new parent nodes are added here, starting from
+ (n+1). These are naturally in ascending order.
+ [2n]: we add a sentinel at the end as well.
+ There will be (2n+1) elements at the end. */
+ initHuffmanTree(&sentinel, math.MaxUint32, -1, -1)
+
+ (*tree)[node] = sentinel
+ node++
+ (*tree)[node] = sentinel
+ node++
+
+ for k = n - 1; k > 0; k-- {
+ var left int
+ var right int
+ if (*tree)[i].total_count_ <= (*tree)[j].total_count_ {
+ left = i
+ i++
+ } else {
+ left = j
+ j++
+ }
+
+ if (*tree)[i].total_count_ <= (*tree)[j].total_count_ {
+ right = i
+ i++
+ } else {
+ right = j
+ j++
+ }
+
+ /* The sentinel node becomes the parent node. */
+ (*tree)[node-1].total_count_ = (*tree)[left].total_count_ + (*tree)[right].total_count_
+
+ (*tree)[node-1].index_left_ = int16(left)
+ (*tree)[node-1].index_right_or_value_ = int16(right)
+
+ /* Add back the last sentinel node. */
+ (*tree)[node] = sentinel
+ node++
+ }
+
+ if setDepth(2*n-1, *tree, depth, 14) {
+ /* We need to pack the Huffman tree in 14 bits. If this was not
+ successful, add fake entities to the lowest values and retry. */
+ break
+ }
+ }
+ }
+
+ huffmanTreePool.Put(tree)
+ }
+
+ convertBitDepthsToSymbols(depth, length, bits)
+ if count <= 4 {
+ var i uint
+
+ /* value of 1 indicates a simple Huffman code */
+ writeBits(2, 1, storage_ix, storage)
+
+ writeBits(2, uint64(count)-1, storage_ix, storage) /* NSYM - 1 */
+
+ /* Sort */
+ for i = 0; i < count; i++ {
+ var j uint
+ for j = i + 1; j < count; j++ {
+ if depth[symbols[j]] < depth[symbols[i]] {
+ var tmp uint = symbols[j]
+ symbols[j] = symbols[i]
+ symbols[i] = tmp
+ }
+ }
+ }
+
+ if count == 2 {
+ writeBits(max_bits, uint64(symbols[0]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[1]), storage_ix, storage)
+ } else if count == 3 {
+ writeBits(max_bits, uint64(symbols[0]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[1]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[2]), storage_ix, storage)
+ } else {
+ writeBits(max_bits, uint64(symbols[0]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[1]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[2]), storage_ix, storage)
+ writeBits(max_bits, uint64(symbols[3]), storage_ix, storage)
+
+ /* tree-select */
+ var tmp int
+ if depth[symbols[0]] == 1 {
+ tmp = 1
+ } else {
+ tmp = 0
+ }
+ writeBits(1, uint64(tmp), storage_ix, storage)
+ }
+ } else {
+ var previous_value byte = 8
+ var i uint
+
+ /* Complex Huffman Tree */
+ storeStaticCodeLengthCode(storage_ix, storage)
+
+ /* Actual RLE coding. */
+ for i = 0; i < length; {
+ var value byte = depth[i]
+ var reps uint = 1
+ var k uint
+ for k = i + 1; k < length && depth[k] == value; k++ {
+ reps++
+ }
+
+ i += reps
+ if value == 0 {
+ writeBits(uint(kZeroRepsDepth[reps]), kZeroRepsBits[reps], storage_ix, storage)
+ } else {
+ if previous_value != value {
+ writeBits(uint(kCodeLengthDepth[value]), uint64(kCodeLengthBits[value]), storage_ix, storage)
+ reps--
+ }
+
+ if reps < 3 {
+ for reps != 0 {
+ reps--
+ writeBits(uint(kCodeLengthDepth[value]), uint64(kCodeLengthBits[value]), storage_ix, storage)
+ }
+ } else {
+ reps -= 3
+ writeBits(uint(kNonZeroRepsDepth[reps]), kNonZeroRepsBits[reps], storage_ix, storage)
+ }
+
+ previous_value = value
+ }
+ }
+ }
+}
+
+func indexOf(v []byte, v_size uint, value byte) uint {
+ var i uint = 0
+ for ; i < v_size; i++ {
+ if v[i] == value {
+ return i
+ }
+ }
+
+ return i
+}
+
+func moveToFront(v []byte, index uint) {
+ var value byte = v[index]
+ var i uint
+ for i = index; i != 0; i-- {
+ v[i] = v[i-1]
+ }
+
+ v[0] = value
+}
+
+func moveToFrontTransform(v_in []uint32, v_size uint, v_out []uint32) {
+ var i uint
+ var mtf [256]byte
+ var max_value uint32
+ if v_size == 0 {
+ return
+ }
+
+ max_value = v_in[0]
+ for i = 1; i < v_size; i++ {
+ if v_in[i] > max_value {
+ max_value = v_in[i]
+ }
+ }
+
+ assert(max_value < 256)
+ for i = 0; uint32(i) <= max_value; i++ {
+ mtf[i] = byte(i)
+ }
+ {
+ var mtf_size uint = uint(max_value + 1)
+ for i = 0; i < v_size; i++ {
+ var index uint = indexOf(mtf[:], mtf_size, byte(v_in[i]))
+ assert(index < mtf_size)
+ v_out[i] = uint32(index)
+ moveToFront(mtf[:], index)
+ }
+ }
+}
+
+/* Finds runs of zeros in v[0..in_size) and replaces them with a prefix code of
+ the run length plus extra bits (lower 9 bits is the prefix code and the rest
+ are the extra bits). Non-zero values in v[] are shifted by
+ *max_length_prefix. Will not create prefix codes bigger than the initial
+ value of *max_run_length_prefix. The prefix code of run length L is simply
+ Log2Floor(L) and the number of extra bits is the same as the prefix code. */
+func runLengthCodeZeros(in_size uint, v []uint32, out_size *uint, max_run_length_prefix *uint32) {
+ var max_reps uint32 = 0
+ var i uint
+ var max_prefix uint32
+ for i = 0; i < in_size; {
+ var reps uint32 = 0
+ for ; i < in_size && v[i] != 0; i++ {
+ }
+ for ; i < in_size && v[i] == 0; i++ {
+ reps++
+ }
+
+ max_reps = brotli_max_uint32_t(reps, max_reps)
+ }
+
+ if max_reps > 0 {
+ max_prefix = log2FloorNonZero(uint(max_reps))
+ } else {
+ max_prefix = 0
+ }
+ max_prefix = brotli_min_uint32_t(max_prefix, *max_run_length_prefix)
+ *max_run_length_prefix = max_prefix
+ *out_size = 0
+ for i = 0; i < in_size; {
+ assert(*out_size <= i)
+ if v[i] != 0 {
+ v[*out_size] = v[i] + *max_run_length_prefix
+ i++
+ (*out_size)++
+ } else {
+ var reps uint32 = 1
+ var k uint
+ for k = i + 1; k < in_size && v[k] == 0; k++ {
+ reps++
+ }
+
+ i += uint(reps)
+ for reps != 0 {
+ if reps < 2< 0)
+ writeSingleBit(use_rle, storage_ix, storage)
+ if use_rle {
+ writeBits(4, uint64(max_run_length_prefix)-1, storage_ix, storage)
+ }
+ }
+
+ buildAndStoreHuffmanTree(histogram[:], uint(uint32(num_clusters)+max_run_length_prefix), uint(uint32(num_clusters)+max_run_length_prefix), tree, depths[:], bits[:], storage_ix, storage)
+ for i = 0; i < num_rle_symbols; i++ {
+ var rle_symbol uint32 = rle_symbols[i] & encodeContextMap_kSymbolMask
+ var extra_bits_val uint32 = rle_symbols[i] >> symbolBits
+ writeBits(uint(depths[rle_symbol]), uint64(bits[rle_symbol]), storage_ix, storage)
+ if rle_symbol > 0 && rle_symbol <= max_run_length_prefix {
+ writeBits(uint(rle_symbol), uint64(extra_bits_val), storage_ix, storage)
+ }
+ }
+
+ writeBits(1, 1, storage_ix, storage) /* use move-to-front */
+ rle_symbols = nil
+}
+
+/* Stores the block switch command with index block_ix to the bit stream. */
+func storeBlockSwitch(code *blockSplitCode, block_len uint32, block_type byte, is_first_block bool, storage_ix *uint, storage []byte) {
+ var typecode uint = nextBlockTypeCode(&code.type_code_calculator, block_type)
+ var lencode uint
+ var len_nextra uint32
+ var len_extra uint32
+ if !is_first_block {
+ writeBits(uint(code.type_depths[typecode]), uint64(code.type_bits[typecode]), storage_ix, storage)
+ }
+
+ getBlockLengthPrefixCode(block_len, &lencode, &len_nextra, &len_extra)
+
+ writeBits(uint(code.length_depths[lencode]), uint64(code.length_bits[lencode]), storage_ix, storage)
+ writeBits(uint(len_nextra), uint64(len_extra), storage_ix, storage)
+}
+
+/* Builds a BlockSplitCode data structure from the block split given by the
+ vector of block types and block lengths and stores it to the bit stream. */
+func buildAndStoreBlockSplitCode(types []byte, lengths []uint32, num_blocks uint, num_types uint, tree []huffmanTree, code *blockSplitCode, storage_ix *uint, storage []byte) {
+ var type_histo [maxBlockTypeSymbols]uint32
+ var length_histo [numBlockLenSymbols]uint32
+ var i uint
+ var type_code_calculator blockTypeCodeCalculator
+ for i := 0; i < int(num_types+2); i++ {
+ type_histo[i] = 0
+ }
+ length_histo = [numBlockLenSymbols]uint32{}
+ initBlockTypeCodeCalculator(&type_code_calculator)
+ for i = 0; i < num_blocks; i++ {
+ var type_code uint = nextBlockTypeCode(&type_code_calculator, types[i])
+ if i != 0 {
+ type_histo[type_code]++
+ }
+ length_histo[blockLengthPrefixCode(lengths[i])]++
+ }
+
+ storeVarLenUint8(num_types-1, storage_ix, storage)
+ if num_types > 1 { /* TODO: else? could StoreBlockSwitch occur? */
+ buildAndStoreHuffmanTree(type_histo[0:], num_types+2, num_types+2, tree, code.type_depths[0:], code.type_bits[0:], storage_ix, storage)
+ buildAndStoreHuffmanTree(length_histo[0:], numBlockLenSymbols, numBlockLenSymbols, tree, code.length_depths[0:], code.length_bits[0:], storage_ix, storage)
+ storeBlockSwitch(code, lengths[0], types[0], true, storage_ix, storage)
+ }
+}
+
+/* Stores a context map where the histogram type is always the block type. */
+func storeTrivialContextMap(num_types uint, context_bits uint, tree []huffmanTree, storage_ix *uint, storage []byte) {
+ storeVarLenUint8(num_types-1, storage_ix, storage)
+ if num_types > 1 {
+ var repeat_code uint = context_bits - 1
+ var repeat_bits uint = (1 << repeat_code) - 1
+ var alphabet_size uint = num_types + repeat_code
+ var histogram [maxContextMapSymbols]uint32
+ var depths [maxContextMapSymbols]byte
+ var bits [maxContextMapSymbols]uint16
+ var i uint
+ for i := 0; i < int(alphabet_size); i++ {
+ histogram[i] = 0
+ }
+
+ /* Write RLEMAX. */
+ writeBits(1, 1, storage_ix, storage)
+
+ writeBits(4, uint64(repeat_code)-1, storage_ix, storage)
+ histogram[repeat_code] = uint32(num_types)
+ histogram[0] = 1
+ for i = context_bits; i < alphabet_size; i++ {
+ histogram[i] = 1
+ }
+
+ buildAndStoreHuffmanTree(histogram[:], alphabet_size, alphabet_size, tree, depths[:], bits[:], storage_ix, storage)
+ for i = 0; i < num_types; i++ {
+ var tmp uint
+ if i == 0 {
+ tmp = 0
+ } else {
+ tmp = i + context_bits - 1
+ }
+ var code uint = tmp
+ writeBits(uint(depths[code]), uint64(bits[code]), storage_ix, storage)
+ writeBits(uint(depths[repeat_code]), uint64(bits[repeat_code]), storage_ix, storage)
+ writeBits(repeat_code, uint64(repeat_bits), storage_ix, storage)
+ }
+
+ /* Write IMTF (inverse-move-to-front) bit. */
+ writeBits(1, 1, storage_ix, storage)
+ }
+}
+
+/* Manages the encoding of one block category (literal, command or distance). */
+type blockEncoder struct {
+ histogram_length_ uint
+ num_block_types_ uint
+ block_types_ []byte
+ block_lengths_ []uint32
+ num_blocks_ uint
+ block_split_code_ blockSplitCode
+ block_ix_ uint
+ block_len_ uint
+ entropy_ix_ uint
+ depths_ []byte
+ bits_ []uint16
+}
+
+var blockEncoderPool sync.Pool
+
+func getBlockEncoder(histogram_length uint, num_block_types uint, block_types []byte, block_lengths []uint32, num_blocks uint) *blockEncoder {
+ self, _ := blockEncoderPool.Get().(*blockEncoder)
+
+ if self != nil {
+ self.block_ix_ = 0
+ self.entropy_ix_ = 0
+ self.depths_ = self.depths_[:0]
+ self.bits_ = self.bits_[:0]
+ } else {
+ self = &blockEncoder{}
+ }
+
+ self.histogram_length_ = histogram_length
+ self.num_block_types_ = num_block_types
+ self.block_types_ = block_types
+ self.block_lengths_ = block_lengths
+ self.num_blocks_ = num_blocks
+ initBlockTypeCodeCalculator(&self.block_split_code_.type_code_calculator)
+ if num_blocks == 0 {
+ self.block_len_ = 0
+ } else {
+ self.block_len_ = uint(block_lengths[0])
+ }
+
+ return self
+}
+
+func cleanupBlockEncoder(self *blockEncoder) {
+ blockEncoderPool.Put(self)
+}
+
+/* Creates entropy codes of block lengths and block types and stores them
+ to the bit stream. */
+func buildAndStoreBlockSwitchEntropyCodes(self *blockEncoder, tree []huffmanTree, storage_ix *uint, storage []byte) {
+ buildAndStoreBlockSplitCode(self.block_types_, self.block_lengths_, self.num_blocks_, self.num_block_types_, tree, &self.block_split_code_, storage_ix, storage)
+}
+
+/* Stores the next symbol with the entropy code of the current block type.
+ Updates the block type and block length at block boundaries. */
+func storeSymbol(self *blockEncoder, symbol uint, storage_ix *uint, storage []byte) {
+ if self.block_len_ == 0 {
+ self.block_ix_++
+ var block_ix uint = self.block_ix_
+ var block_len uint32 = self.block_lengths_[block_ix]
+ var block_type byte = self.block_types_[block_ix]
+ self.block_len_ = uint(block_len)
+ self.entropy_ix_ = uint(block_type) * self.histogram_length_
+ storeBlockSwitch(&self.block_split_code_, block_len, block_type, false, storage_ix, storage)
+ }
+
+ self.block_len_--
+ {
+ var ix uint = self.entropy_ix_ + symbol
+ writeBits(uint(self.depths_[ix]), uint64(self.bits_[ix]), storage_ix, storage)
+ }
+}
+
+/* Stores the next symbol with the entropy code of the current block type and
+ context value.
+ Updates the block type and block length at block boundaries. */
+func storeSymbolWithContext(self *blockEncoder, symbol uint, context uint, context_map []uint32, storage_ix *uint, storage []byte, context_bits uint) {
+ if self.block_len_ == 0 {
+ self.block_ix_++
+ var block_ix uint = self.block_ix_
+ var block_len uint32 = self.block_lengths_[block_ix]
+ var block_type byte = self.block_types_[block_ix]
+ self.block_len_ = uint(block_len)
+ self.entropy_ix_ = uint(block_type) << context_bits
+ storeBlockSwitch(&self.block_split_code_, block_len, block_type, false, storage_ix, storage)
+ }
+
+ self.block_len_--
+ {
+ var histo_ix uint = uint(context_map[self.entropy_ix_+context])
+ var ix uint = histo_ix*self.histogram_length_ + symbol
+ writeBits(uint(self.depths_[ix]), uint64(self.bits_[ix]), storage_ix, storage)
+ }
+}
+
+func buildAndStoreEntropyCodesLiteral(self *blockEncoder, histograms []histogramLiteral, histograms_size uint, alphabet_size uint, tree []huffmanTree, storage_ix *uint, storage []byte) {
+ var table_size uint = histograms_size * self.histogram_length_
+ if cap(self.depths_) < int(table_size) {
+ self.depths_ = make([]byte, table_size)
+ } else {
+ self.depths_ = self.depths_[:table_size]
+ }
+ if cap(self.bits_) < int(table_size) {
+ self.bits_ = make([]uint16, table_size)
+ } else {
+ self.bits_ = self.bits_[:table_size]
+ }
+ {
+ var i uint
+ for i = 0; i < histograms_size; i++ {
+ var ix uint = i * self.histogram_length_
+ buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], storage_ix, storage)
+ }
+ }
+}
+
+func buildAndStoreEntropyCodesCommand(self *blockEncoder, histograms []histogramCommand, histograms_size uint, alphabet_size uint, tree []huffmanTree, storage_ix *uint, storage []byte) {
+ var table_size uint = histograms_size * self.histogram_length_
+ if cap(self.depths_) < int(table_size) {
+ self.depths_ = make([]byte, table_size)
+ } else {
+ self.depths_ = self.depths_[:table_size]
+ }
+ if cap(self.bits_) < int(table_size) {
+ self.bits_ = make([]uint16, table_size)
+ } else {
+ self.bits_ = self.bits_[:table_size]
+ }
+ {
+ var i uint
+ for i = 0; i < histograms_size; i++ {
+ var ix uint = i * self.histogram_length_
+ buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], storage_ix, storage)
+ }
+ }
+}
+
+func buildAndStoreEntropyCodesDistance(self *blockEncoder, histograms []histogramDistance, histograms_size uint, alphabet_size uint, tree []huffmanTree, storage_ix *uint, storage []byte) {
+ var table_size uint = histograms_size * self.histogram_length_
+ if cap(self.depths_) < int(table_size) {
+ self.depths_ = make([]byte, table_size)
+ } else {
+ self.depths_ = self.depths_[:table_size]
+ }
+ if cap(self.bits_) < int(table_size) {
+ self.bits_ = make([]uint16, table_size)
+ } else {
+ self.bits_ = self.bits_[:table_size]
+ }
+ {
+ var i uint
+ for i = 0; i < histograms_size; i++ {
+ var ix uint = i * self.histogram_length_
+ buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], storage_ix, storage)
+ }
+ }
+}
+
+func jumpToByteBoundary(storage_ix *uint, storage []byte) {
+ *storage_ix = (*storage_ix + 7) &^ 7
+ storage[*storage_ix>>3] = 0
+}
+
+func storeMetaBlock(input []byte, start_pos uint, length uint, mask uint, prev_byte byte, prev_byte2 byte, is_last bool, params *encoderParams, literal_context_mode int, commands []command, mb *metaBlockSplit, storage_ix *uint, storage []byte) {
+ var pos uint = start_pos
+ var i uint
+ var num_distance_symbols uint32 = params.dist.alphabet_size
+ var num_effective_distance_symbols uint32 = num_distance_symbols
+ var tree []huffmanTree
+ var literal_context_lut contextLUT = getContextLUT(literal_context_mode)
+ var dist *distanceParams = ¶ms.dist
+ if params.large_window && num_effective_distance_symbols > numHistogramDistanceSymbols {
+ num_effective_distance_symbols = numHistogramDistanceSymbols
+ }
+
+ storeCompressedMetaBlockHeader(is_last, length, storage_ix, storage)
+
+ tree = make([]huffmanTree, maxHuffmanTreeSize)
+ literal_enc := getBlockEncoder(numLiteralSymbols, mb.literal_split.num_types, mb.literal_split.types, mb.literal_split.lengths, mb.literal_split.num_blocks)
+ command_enc := getBlockEncoder(numCommandSymbols, mb.command_split.num_types, mb.command_split.types, mb.command_split.lengths, mb.command_split.num_blocks)
+ distance_enc := getBlockEncoder(uint(num_effective_distance_symbols), mb.distance_split.num_types, mb.distance_split.types, mb.distance_split.lengths, mb.distance_split.num_blocks)
+
+ buildAndStoreBlockSwitchEntropyCodes(literal_enc, tree, storage_ix, storage)
+ buildAndStoreBlockSwitchEntropyCodes(command_enc, tree, storage_ix, storage)
+ buildAndStoreBlockSwitchEntropyCodes(distance_enc, tree, storage_ix, storage)
+
+ writeBits(2, uint64(dist.distance_postfix_bits), storage_ix, storage)
+ writeBits(4, uint64(dist.num_direct_distance_codes)>>dist.distance_postfix_bits, storage_ix, storage)
+ for i = 0; i < mb.literal_split.num_types; i++ {
+ writeBits(2, uint64(literal_context_mode), storage_ix, storage)
+ }
+
+ if mb.literal_context_map_size == 0 {
+ storeTrivialContextMap(mb.literal_histograms_size, literalContextBits, tree, storage_ix, storage)
+ } else {
+ encodeContextMap(mb.literal_context_map, mb.literal_context_map_size, mb.literal_histograms_size, tree, storage_ix, storage)
+ }
+
+ if mb.distance_context_map_size == 0 {
+ storeTrivialContextMap(mb.distance_histograms_size, distanceContextBits, tree, storage_ix, storage)
+ } else {
+ encodeContextMap(mb.distance_context_map, mb.distance_context_map_size, mb.distance_histograms_size, tree, storage_ix, storage)
+ }
+
+ buildAndStoreEntropyCodesLiteral(literal_enc, mb.literal_histograms, mb.literal_histograms_size, numLiteralSymbols, tree, storage_ix, storage)
+ buildAndStoreEntropyCodesCommand(command_enc, mb.command_histograms, mb.command_histograms_size, numCommandSymbols, tree, storage_ix, storage)
+ buildAndStoreEntropyCodesDistance(distance_enc, mb.distance_histograms, mb.distance_histograms_size, uint(num_distance_symbols), tree, storage_ix, storage)
+ tree = nil
+
+ for _, cmd := range commands {
+ var cmd_code uint = uint(cmd.cmd_prefix_)
+ storeSymbol(command_enc, cmd_code, storage_ix, storage)
+ storeCommandExtra(&cmd, storage_ix, storage)
+ if mb.literal_context_map_size == 0 {
+ var j uint
+ for j = uint(cmd.insert_len_); j != 0; j-- {
+ storeSymbol(literal_enc, uint(input[pos&mask]), storage_ix, storage)
+ pos++
+ }
+ } else {
+ var j uint
+ for j = uint(cmd.insert_len_); j != 0; j-- {
+ var context uint = uint(getContext(prev_byte, prev_byte2, literal_context_lut))
+ var literal byte = input[pos&mask]
+ storeSymbolWithContext(literal_enc, uint(literal), context, mb.literal_context_map, storage_ix, storage, literalContextBits)
+ prev_byte2 = prev_byte
+ prev_byte = literal
+ pos++
+ }
+ }
+
+ pos += uint(commandCopyLen(&cmd))
+ if commandCopyLen(&cmd) != 0 {
+ prev_byte2 = input[(pos-2)&mask]
+ prev_byte = input[(pos-1)&mask]
+ if cmd.cmd_prefix_ >= 128 {
+ var dist_code uint = uint(cmd.dist_prefix_) & 0x3FF
+ var distnumextra uint32 = uint32(cmd.dist_prefix_) >> 10
+ var distextra uint64 = uint64(cmd.dist_extra_)
+ if mb.distance_context_map_size == 0 {
+ storeSymbol(distance_enc, dist_code, storage_ix, storage)
+ } else {
+ var context uint = uint(commandDistanceContext(&cmd))
+ storeSymbolWithContext(distance_enc, dist_code, context, mb.distance_context_map, storage_ix, storage, distanceContextBits)
+ }
+
+ writeBits(uint(distnumextra), distextra, storage_ix, storage)
+ }
+ }
+ }
+
+ cleanupBlockEncoder(distance_enc)
+ cleanupBlockEncoder(command_enc)
+ cleanupBlockEncoder(literal_enc)
+ if is_last {
+ jumpToByteBoundary(storage_ix, storage)
+ }
+}
+
+func buildHistograms(input []byte, start_pos uint, mask uint, commands []command, lit_histo *histogramLiteral, cmd_histo *histogramCommand, dist_histo *histogramDistance) {
+ var pos uint = start_pos
+ for _, cmd := range commands {
+ var j uint
+ histogramAddCommand(cmd_histo, uint(cmd.cmd_prefix_))
+ for j = uint(cmd.insert_len_); j != 0; j-- {
+ histogramAddLiteral(lit_histo, uint(input[pos&mask]))
+ pos++
+ }
+
+ pos += uint(commandCopyLen(&cmd))
+ if commandCopyLen(&cmd) != 0 && cmd.cmd_prefix_ >= 128 {
+ histogramAddDistance(dist_histo, uint(cmd.dist_prefix_)&0x3FF)
+ }
+ }
+}
+
+func storeDataWithHuffmanCodes(input []byte, start_pos uint, mask uint, commands []command, lit_depth []byte, lit_bits []uint16, cmd_depth []byte, cmd_bits []uint16, dist_depth []byte, dist_bits []uint16, storage_ix *uint, storage []byte) {
+ var pos uint = start_pos
+ for _, cmd := range commands {
+ var cmd_code uint = uint(cmd.cmd_prefix_)
+ var j uint
+ writeBits(uint(cmd_depth[cmd_code]), uint64(cmd_bits[cmd_code]), storage_ix, storage)
+ storeCommandExtra(&cmd, storage_ix, storage)
+ for j = uint(cmd.insert_len_); j != 0; j-- {
+ var literal byte = input[pos&mask]
+ writeBits(uint(lit_depth[literal]), uint64(lit_bits[literal]), storage_ix, storage)
+ pos++
+ }
+
+ pos += uint(commandCopyLen(&cmd))
+ if commandCopyLen(&cmd) != 0 && cmd.cmd_prefix_ >= 128 {
+ var dist_code uint = uint(cmd.dist_prefix_) & 0x3FF
+ var distnumextra uint32 = uint32(cmd.dist_prefix_) >> 10
+ var distextra uint32 = cmd.dist_extra_
+ writeBits(uint(dist_depth[dist_code]), uint64(dist_bits[dist_code]), storage_ix, storage)
+ writeBits(uint(distnumextra), uint64(distextra), storage_ix, storage)
+ }
+ }
+}
+
+func storeMetaBlockTrivial(input []byte, start_pos uint, length uint, mask uint, is_last bool, params *encoderParams, commands []command, storage_ix *uint, storage []byte) {
+ var lit_histo histogramLiteral
+ var cmd_histo histogramCommand
+ var dist_histo histogramDistance
+ var lit_depth [numLiteralSymbols]byte
+ var lit_bits [numLiteralSymbols]uint16
+ var cmd_depth [numCommandSymbols]byte
+ var cmd_bits [numCommandSymbols]uint16
+ var dist_depth [maxSimpleDistanceAlphabetSize]byte
+ var dist_bits [maxSimpleDistanceAlphabetSize]uint16
+ var tree []huffmanTree
+ var num_distance_symbols uint32 = params.dist.alphabet_size
+
+ storeCompressedMetaBlockHeader(is_last, length, storage_ix, storage)
+
+ histogramClearLiteral(&lit_histo)
+ histogramClearCommand(&cmd_histo)
+ histogramClearDistance(&dist_histo)
+
+ buildHistograms(input, start_pos, mask, commands, &lit_histo, &cmd_histo, &dist_histo)
+
+ writeBits(13, 0, storage_ix, storage)
+
+ tree = make([]huffmanTree, maxHuffmanTreeSize)
+ buildAndStoreHuffmanTree(lit_histo.data_[:], numLiteralSymbols, numLiteralSymbols, tree, lit_depth[:], lit_bits[:], storage_ix, storage)
+ buildAndStoreHuffmanTree(cmd_histo.data_[:], numCommandSymbols, numCommandSymbols, tree, cmd_depth[:], cmd_bits[:], storage_ix, storage)
+ buildAndStoreHuffmanTree(dist_histo.data_[:], maxSimpleDistanceAlphabetSize, uint(num_distance_symbols), tree, dist_depth[:], dist_bits[:], storage_ix, storage)
+ tree = nil
+ storeDataWithHuffmanCodes(input, start_pos, mask, commands, lit_depth[:], lit_bits[:], cmd_depth[:], cmd_bits[:], dist_depth[:], dist_bits[:], storage_ix, storage)
+ if is_last {
+ jumpToByteBoundary(storage_ix, storage)
+ }
+}
+
+func storeMetaBlockFast(input []byte, start_pos uint, length uint, mask uint, is_last bool, params *encoderParams, commands []command, storage_ix *uint, storage []byte) {
+ var num_distance_symbols uint32 = params.dist.alphabet_size
+ var distance_alphabet_bits uint32 = log2FloorNonZero(uint(num_distance_symbols-1)) + 1
+
+ storeCompressedMetaBlockHeader(is_last, length, storage_ix, storage)
+
+ writeBits(13, 0, storage_ix, storage)
+
+ if len(commands) <= 128 {
+ var histogram = [numLiteralSymbols]uint32{0}
+ var pos uint = start_pos
+ var num_literals uint = 0
+ var lit_depth [numLiteralSymbols]byte
+ var lit_bits [numLiteralSymbols]uint16
+ for _, cmd := range commands {
+ var j uint
+ for j = uint(cmd.insert_len_); j != 0; j-- {
+ histogram[input[pos&mask]]++
+ pos++
+ }
+
+ num_literals += uint(cmd.insert_len_)
+ pos += uint(commandCopyLen(&cmd))
+ }
+
+ buildAndStoreHuffmanTreeFast(histogram[:], num_literals, /* max_bits = */
+ 8, lit_depth[:], lit_bits[:], storage_ix, storage)
+
+ storeStaticCommandHuffmanTree(storage_ix, storage)
+ storeStaticDistanceHuffmanTree(storage_ix, storage)
+ storeDataWithHuffmanCodes(input, start_pos, mask, commands, lit_depth[:], lit_bits[:], kStaticCommandCodeDepth[:], kStaticCommandCodeBits[:], kStaticDistanceCodeDepth[:], kStaticDistanceCodeBits[:], storage_ix, storage)
+ } else {
+ var lit_histo histogramLiteral
+ var cmd_histo histogramCommand
+ var dist_histo histogramDistance
+ var lit_depth [numLiteralSymbols]byte
+ var lit_bits [numLiteralSymbols]uint16
+ var cmd_depth [numCommandSymbols]byte
+ var cmd_bits [numCommandSymbols]uint16
+ var dist_depth [maxSimpleDistanceAlphabetSize]byte
+ var dist_bits [maxSimpleDistanceAlphabetSize]uint16
+ histogramClearLiteral(&lit_histo)
+ histogramClearCommand(&cmd_histo)
+ histogramClearDistance(&dist_histo)
+ buildHistograms(input, start_pos, mask, commands, &lit_histo, &cmd_histo, &dist_histo)
+ buildAndStoreHuffmanTreeFast(lit_histo.data_[:], lit_histo.total_count_, /* max_bits = */
+ 8, lit_depth[:], lit_bits[:], storage_ix, storage)
+
+ buildAndStoreHuffmanTreeFast(cmd_histo.data_[:], cmd_histo.total_count_, /* max_bits = */
+ 10, cmd_depth[:], cmd_bits[:], storage_ix, storage)
+
+ buildAndStoreHuffmanTreeFast(dist_histo.data_[:], dist_histo.total_count_, /* max_bits = */
+ uint(distance_alphabet_bits), dist_depth[:], dist_bits[:], storage_ix, storage)
+
+ storeDataWithHuffmanCodes(input, start_pos, mask, commands, lit_depth[:], lit_bits[:], cmd_depth[:], cmd_bits[:], dist_depth[:], dist_bits[:], storage_ix, storage)
+ }
+
+ if is_last {
+ jumpToByteBoundary(storage_ix, storage)
+ }
+}
+
+/* This is for storing uncompressed blocks (simple raw storage of
+ bytes-as-bytes). */
+func storeUncompressedMetaBlock(is_final_block bool, input []byte, position uint, mask uint, len uint, storage_ix *uint, storage []byte) {
+ var masked_pos uint = position & mask
+ storeUncompressedMetaBlockHeader(uint(len), storage_ix, storage)
+ jumpToByteBoundary(storage_ix, storage)
+
+ if masked_pos+len > mask+1 {
+ var len1 uint = mask + 1 - masked_pos
+ copy(storage[*storage_ix>>3:], input[masked_pos:][:len1])
+ *storage_ix += len1 << 3
+ len -= len1
+ masked_pos = 0
+ }
+
+ copy(storage[*storage_ix>>3:], input[masked_pos:][:len])
+ *storage_ix += uint(len << 3)
+
+ /* We need to clear the next 4 bytes to continue to be
+ compatible with BrotliWriteBits. */
+ writeBitsPrepareStorage(*storage_ix, storage)
+
+ /* Since the uncompressed block itself may not be the final block, add an
+ empty one after this. */
+ if is_final_block {
+ writeBits(1, 1, storage_ix, storage) /* islast */
+ writeBits(1, 1, storage_ix, storage) /* isempty */
+ jumpToByteBoundary(storage_ix, storage)
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/cluster.go b/vendor/github.com/andybalholm/brotli/cluster.go
new file mode 100644
index 0000000..df8a328
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/cluster.go
@@ -0,0 +1,30 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Functions for clustering similar histograms together. */
+
+type histogramPair struct {
+ idx1 uint32
+ idx2 uint32
+ cost_combo float64
+ cost_diff float64
+}
+
+func histogramPairIsLess(p1 *histogramPair, p2 *histogramPair) bool {
+ if p1.cost_diff != p2.cost_diff {
+ return p1.cost_diff > p2.cost_diff
+ }
+
+ return (p1.idx2 - p1.idx1) > (p2.idx2 - p2.idx1)
+}
+
+/* Returns entropy reduction of the context map when we combine two clusters. */
+func clusterCostDiff(size_a uint, size_b uint) float64 {
+ var size_c uint = size_a + size_b
+ return float64(size_a)*fastLog2(size_a) + float64(size_b)*fastLog2(size_b) - float64(size_c)*fastLog2(size_c)
+}
diff --git a/vendor/github.com/andybalholm/brotli/cluster_command.go b/vendor/github.com/andybalholm/brotli/cluster_command.go
new file mode 100644
index 0000000..45b569b
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/cluster_command.go
@@ -0,0 +1,164 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if
+ it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */
+func compareAndPushToQueueCommand(out []histogramCommand, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) {
+ var is_good_pair bool = false
+ var p histogramPair
+ p.idx2 = 0
+ p.idx1 = p.idx2
+ p.cost_combo = 0
+ p.cost_diff = p.cost_combo
+ if idx1 == idx2 {
+ return
+ }
+
+ if idx2 < idx1 {
+ var t uint32 = idx2
+ idx2 = idx1
+ idx1 = t
+ }
+
+ p.idx1 = idx1
+ p.idx2 = idx2
+ p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2]))
+ p.cost_diff -= out[idx1].bit_cost_
+ p.cost_diff -= out[idx2].bit_cost_
+
+ if out[idx1].total_count_ == 0 {
+ p.cost_combo = out[idx2].bit_cost_
+ is_good_pair = true
+ } else if out[idx2].total_count_ == 0 {
+ p.cost_combo = out[idx1].bit_cost_
+ is_good_pair = true
+ } else {
+ var threshold float64
+ if *num_pairs == 0 {
+ threshold = 1e99
+ } else {
+ threshold = brotli_max_double(0.0, pairs[0].cost_diff)
+ }
+ var combo histogramCommand = out[idx1]
+ var cost_combo float64
+ histogramAddHistogramCommand(&combo, &out[idx2])
+ cost_combo = populationCostCommand(&combo)
+ if cost_combo < threshold-p.cost_diff {
+ p.cost_combo = cost_combo
+ is_good_pair = true
+ }
+ }
+
+ if is_good_pair {
+ p.cost_diff += p.cost_combo
+ if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) {
+ /* Replace the top of the queue if needed. */
+ if *num_pairs < max_num_pairs {
+ pairs[*num_pairs] = pairs[0]
+ (*num_pairs)++
+ }
+
+ pairs[0] = p
+ } else if *num_pairs < max_num_pairs {
+ pairs[*num_pairs] = p
+ (*num_pairs)++
+ }
+ }
+}
+
+func histogramCombineCommand(out []histogramCommand, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint {
+ var cost_diff_threshold float64 = 0.0
+ var min_cluster_size uint = 1
+ var num_pairs uint = 0
+ {
+ /* We maintain a vector of histogram pairs, with the property that the pair
+ with the maximum bit cost reduction is the first. */
+ var idx1 uint
+ for idx1 = 0; idx1 < num_clusters; idx1++ {
+ var idx2 uint
+ for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ {
+ compareAndPushToQueueCommand(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs)
+ }
+ }
+ }
+
+ for num_clusters > min_cluster_size {
+ var best_idx1 uint32
+ var best_idx2 uint32
+ var i uint
+ if pairs[0].cost_diff >= cost_diff_threshold {
+ cost_diff_threshold = 1e99
+ min_cluster_size = max_clusters
+ continue
+ }
+
+ /* Take the best pair from the top of heap. */
+ best_idx1 = pairs[0].idx1
+
+ best_idx2 = pairs[0].idx2
+ histogramAddHistogramCommand(&out[best_idx1], &out[best_idx2])
+ out[best_idx1].bit_cost_ = pairs[0].cost_combo
+ cluster_size[best_idx1] += cluster_size[best_idx2]
+ for i = 0; i < symbols_size; i++ {
+ if symbols[i] == best_idx2 {
+ symbols[i] = best_idx1
+ }
+ }
+
+ for i = 0; i < num_clusters; i++ {
+ if clusters[i] == best_idx2 {
+ copy(clusters[i:], clusters[i+1:][:num_clusters-i-1])
+ break
+ }
+ }
+
+ num_clusters--
+ {
+ /* Remove pairs intersecting the just combined best pair. */
+ var copy_to_idx uint = 0
+ for i = 0; i < num_pairs; i++ {
+ var p *histogramPair = &pairs[i]
+ if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 {
+ /* Remove invalid pair from the queue. */
+ continue
+ }
+
+ if histogramPairIsLess(&pairs[0], p) {
+ /* Replace the top of the queue if needed. */
+ var front histogramPair = pairs[0]
+ pairs[0] = *p
+ pairs[copy_to_idx] = front
+ } else {
+ pairs[copy_to_idx] = *p
+ }
+
+ copy_to_idx++
+ }
+
+ num_pairs = copy_to_idx
+ }
+
+ /* Push new pairs formed with the combined histogram to the heap. */
+ for i = 0; i < num_clusters; i++ {
+ compareAndPushToQueueCommand(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs)
+ }
+ }
+
+ return num_clusters
+}
+
+/* What is the bit cost of moving histogram from cur_symbol to candidate. */
+func histogramBitCostDistanceCommand(histogram *histogramCommand, candidate *histogramCommand) float64 {
+ if histogram.total_count_ == 0 {
+ return 0.0
+ } else {
+ var tmp histogramCommand = *histogram
+ histogramAddHistogramCommand(&tmp, candidate)
+ return populationCostCommand(&tmp) - candidate.bit_cost_
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/cluster_distance.go b/vendor/github.com/andybalholm/brotli/cluster_distance.go
new file mode 100644
index 0000000..1aaa86e
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/cluster_distance.go
@@ -0,0 +1,326 @@
+package brotli
+
+import "math"
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if
+ it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */
+func compareAndPushToQueueDistance(out []histogramDistance, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) {
+ var is_good_pair bool = false
+ var p histogramPair
+ p.idx2 = 0
+ p.idx1 = p.idx2
+ p.cost_combo = 0
+ p.cost_diff = p.cost_combo
+ if idx1 == idx2 {
+ return
+ }
+
+ if idx2 < idx1 {
+ var t uint32 = idx2
+ idx2 = idx1
+ idx1 = t
+ }
+
+ p.idx1 = idx1
+ p.idx2 = idx2
+ p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2]))
+ p.cost_diff -= out[idx1].bit_cost_
+ p.cost_diff -= out[idx2].bit_cost_
+
+ if out[idx1].total_count_ == 0 {
+ p.cost_combo = out[idx2].bit_cost_
+ is_good_pair = true
+ } else if out[idx2].total_count_ == 0 {
+ p.cost_combo = out[idx1].bit_cost_
+ is_good_pair = true
+ } else {
+ var threshold float64
+ if *num_pairs == 0 {
+ threshold = 1e99
+ } else {
+ threshold = brotli_max_double(0.0, pairs[0].cost_diff)
+ }
+ var combo histogramDistance = out[idx1]
+ var cost_combo float64
+ histogramAddHistogramDistance(&combo, &out[idx2])
+ cost_combo = populationCostDistance(&combo)
+ if cost_combo < threshold-p.cost_diff {
+ p.cost_combo = cost_combo
+ is_good_pair = true
+ }
+ }
+
+ if is_good_pair {
+ p.cost_diff += p.cost_combo
+ if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) {
+ /* Replace the top of the queue if needed. */
+ if *num_pairs < max_num_pairs {
+ pairs[*num_pairs] = pairs[0]
+ (*num_pairs)++
+ }
+
+ pairs[0] = p
+ } else if *num_pairs < max_num_pairs {
+ pairs[*num_pairs] = p
+ (*num_pairs)++
+ }
+ }
+}
+
+func histogramCombineDistance(out []histogramDistance, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint {
+ var cost_diff_threshold float64 = 0.0
+ var min_cluster_size uint = 1
+ var num_pairs uint = 0
+ {
+ /* We maintain a vector of histogram pairs, with the property that the pair
+ with the maximum bit cost reduction is the first. */
+ var idx1 uint
+ for idx1 = 0; idx1 < num_clusters; idx1++ {
+ var idx2 uint
+ for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ {
+ compareAndPushToQueueDistance(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs)
+ }
+ }
+ }
+
+ for num_clusters > min_cluster_size {
+ var best_idx1 uint32
+ var best_idx2 uint32
+ var i uint
+ if pairs[0].cost_diff >= cost_diff_threshold {
+ cost_diff_threshold = 1e99
+ min_cluster_size = max_clusters
+ continue
+ }
+
+ /* Take the best pair from the top of heap. */
+ best_idx1 = pairs[0].idx1
+
+ best_idx2 = pairs[0].idx2
+ histogramAddHistogramDistance(&out[best_idx1], &out[best_idx2])
+ out[best_idx1].bit_cost_ = pairs[0].cost_combo
+ cluster_size[best_idx1] += cluster_size[best_idx2]
+ for i = 0; i < symbols_size; i++ {
+ if symbols[i] == best_idx2 {
+ symbols[i] = best_idx1
+ }
+ }
+
+ for i = 0; i < num_clusters; i++ {
+ if clusters[i] == best_idx2 {
+ copy(clusters[i:], clusters[i+1:][:num_clusters-i-1])
+ break
+ }
+ }
+
+ num_clusters--
+ {
+ /* Remove pairs intersecting the just combined best pair. */
+ var copy_to_idx uint = 0
+ for i = 0; i < num_pairs; i++ {
+ var p *histogramPair = &pairs[i]
+ if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 {
+ /* Remove invalid pair from the queue. */
+ continue
+ }
+
+ if histogramPairIsLess(&pairs[0], p) {
+ /* Replace the top of the queue if needed. */
+ var front histogramPair = pairs[0]
+ pairs[0] = *p
+ pairs[copy_to_idx] = front
+ } else {
+ pairs[copy_to_idx] = *p
+ }
+
+ copy_to_idx++
+ }
+
+ num_pairs = copy_to_idx
+ }
+
+ /* Push new pairs formed with the combined histogram to the heap. */
+ for i = 0; i < num_clusters; i++ {
+ compareAndPushToQueueDistance(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs)
+ }
+ }
+
+ return num_clusters
+}
+
+/* What is the bit cost of moving histogram from cur_symbol to candidate. */
+func histogramBitCostDistanceDistance(histogram *histogramDistance, candidate *histogramDistance) float64 {
+ if histogram.total_count_ == 0 {
+ return 0.0
+ } else {
+ var tmp histogramDistance = *histogram
+ histogramAddHistogramDistance(&tmp, candidate)
+ return populationCostDistance(&tmp) - candidate.bit_cost_
+ }
+}
+
+/* Find the best 'out' histogram for each of the 'in' histograms.
+ When called, clusters[0..num_clusters) contains the unique values from
+ symbols[0..in_size), but this property is not preserved in this function.
+ Note: we assume that out[]->bit_cost_ is already up-to-date. */
+func histogramRemapDistance(in []histogramDistance, in_size uint, clusters []uint32, num_clusters uint, out []histogramDistance, symbols []uint32) {
+ var i uint
+ for i = 0; i < in_size; i++ {
+ var best_out uint32
+ if i == 0 {
+ best_out = symbols[0]
+ } else {
+ best_out = symbols[i-1]
+ }
+ var best_bits float64 = histogramBitCostDistanceDistance(&in[i], &out[best_out])
+ var j uint
+ for j = 0; j < num_clusters; j++ {
+ var cur_bits float64 = histogramBitCostDistanceDistance(&in[i], &out[clusters[j]])
+ if cur_bits < best_bits {
+ best_bits = cur_bits
+ best_out = clusters[j]
+ }
+ }
+
+ symbols[i] = best_out
+ }
+
+ /* Recompute each out based on raw and symbols. */
+ for i = 0; i < num_clusters; i++ {
+ histogramClearDistance(&out[clusters[i]])
+ }
+
+ for i = 0; i < in_size; i++ {
+ histogramAddHistogramDistance(&out[symbols[i]], &in[i])
+ }
+}
+
+/* Reorders elements of the out[0..length) array and changes values in
+ symbols[0..length) array in the following way:
+ * when called, symbols[] contains indexes into out[], and has N unique
+ values (possibly N < length)
+ * on return, symbols'[i] = f(symbols[i]) and
+ out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length,
+ where f is a bijection between the range of symbols[] and [0..N), and
+ the first occurrences of values in symbols'[i] come in consecutive
+ increasing order.
+ Returns N, the number of unique values in symbols[]. */
+
+var histogramReindexDistance_kInvalidIndex uint32 = math.MaxUint32
+
+func histogramReindexDistance(out []histogramDistance, symbols []uint32, length uint) uint {
+ var new_index []uint32 = make([]uint32, length)
+ var next_index uint32
+ var tmp []histogramDistance
+ var i uint
+ for i = 0; i < length; i++ {
+ new_index[i] = histogramReindexDistance_kInvalidIndex
+ }
+
+ next_index = 0
+ for i = 0; i < length; i++ {
+ if new_index[symbols[i]] == histogramReindexDistance_kInvalidIndex {
+ new_index[symbols[i]] = next_index
+ next_index++
+ }
+ }
+
+ /* TODO: by using idea of "cycle-sort" we can avoid allocation of
+ tmp and reduce the number of copying by the factor of 2. */
+ tmp = make([]histogramDistance, next_index)
+
+ next_index = 0
+ for i = 0; i < length; i++ {
+ if new_index[symbols[i]] == next_index {
+ tmp[next_index] = out[symbols[i]]
+ next_index++
+ }
+
+ symbols[i] = new_index[symbols[i]]
+ }
+
+ new_index = nil
+ for i = 0; uint32(i) < next_index; i++ {
+ out[i] = tmp[i]
+ }
+
+ tmp = nil
+ return uint(next_index)
+}
+
+func clusterHistogramsDistance(in []histogramDistance, in_size uint, max_histograms uint, out []histogramDistance, out_size *uint, histogram_symbols []uint32) {
+ var cluster_size []uint32 = make([]uint32, in_size)
+ var clusters []uint32 = make([]uint32, in_size)
+ var num_clusters uint = 0
+ var max_input_histograms uint = 64
+ var pairs_capacity uint = max_input_histograms * max_input_histograms / 2
+ var pairs []histogramPair = make([]histogramPair, (pairs_capacity + 1))
+ var i uint
+
+ /* For the first pass of clustering, we allow all pairs. */
+ for i = 0; i < in_size; i++ {
+ cluster_size[i] = 1
+ }
+
+ for i = 0; i < in_size; i++ {
+ out[i] = in[i]
+ out[i].bit_cost_ = populationCostDistance(&in[i])
+ histogram_symbols[i] = uint32(i)
+ }
+
+ for i = 0; i < in_size; i += max_input_histograms {
+ var num_to_combine uint = brotli_min_size_t(in_size-i, max_input_histograms)
+ var num_new_clusters uint
+ var j uint
+ for j = 0; j < num_to_combine; j++ {
+ clusters[num_clusters+j] = uint32(i + j)
+ }
+
+ num_new_clusters = histogramCombineDistance(out, cluster_size, histogram_symbols[i:], clusters[num_clusters:], pairs, num_to_combine, num_to_combine, max_histograms, pairs_capacity)
+ num_clusters += num_new_clusters
+ }
+ {
+ /* For the second pass, we limit the total number of histogram pairs.
+ After this limit is reached, we only keep searching for the best pair. */
+ var max_num_pairs uint = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters)
+ if pairs_capacity < (max_num_pairs + 1) {
+ var _new_size uint
+ if pairs_capacity == 0 {
+ _new_size = max_num_pairs + 1
+ } else {
+ _new_size = pairs_capacity
+ }
+ var new_array []histogramPair
+ for _new_size < (max_num_pairs + 1) {
+ _new_size *= 2
+ }
+ new_array = make([]histogramPair, _new_size)
+ if pairs_capacity != 0 {
+ copy(new_array, pairs[:pairs_capacity])
+ }
+
+ pairs = new_array
+ pairs_capacity = _new_size
+ }
+
+ /* Collapse similar histograms. */
+ num_clusters = histogramCombineDistance(out, cluster_size, histogram_symbols, clusters, pairs, num_clusters, in_size, max_histograms, max_num_pairs)
+ }
+
+ pairs = nil
+ cluster_size = nil
+
+ /* Find the optimal map from original histograms to the final ones. */
+ histogramRemapDistance(in, in_size, clusters, num_clusters, out, histogram_symbols)
+
+ clusters = nil
+
+ /* Convert the context map to a canonical form. */
+ *out_size = histogramReindexDistance(out, histogram_symbols, in_size)
+}
diff --git a/vendor/github.com/andybalholm/brotli/cluster_literal.go b/vendor/github.com/andybalholm/brotli/cluster_literal.go
new file mode 100644
index 0000000..6ba66f3
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/cluster_literal.go
@@ -0,0 +1,326 @@
+package brotli
+
+import "math"
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if
+ it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */
+func compareAndPushToQueueLiteral(out []histogramLiteral, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) {
+ var is_good_pair bool = false
+ var p histogramPair
+ p.idx2 = 0
+ p.idx1 = p.idx2
+ p.cost_combo = 0
+ p.cost_diff = p.cost_combo
+ if idx1 == idx2 {
+ return
+ }
+
+ if idx2 < idx1 {
+ var t uint32 = idx2
+ idx2 = idx1
+ idx1 = t
+ }
+
+ p.idx1 = idx1
+ p.idx2 = idx2
+ p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2]))
+ p.cost_diff -= out[idx1].bit_cost_
+ p.cost_diff -= out[idx2].bit_cost_
+
+ if out[idx1].total_count_ == 0 {
+ p.cost_combo = out[idx2].bit_cost_
+ is_good_pair = true
+ } else if out[idx2].total_count_ == 0 {
+ p.cost_combo = out[idx1].bit_cost_
+ is_good_pair = true
+ } else {
+ var threshold float64
+ if *num_pairs == 0 {
+ threshold = 1e99
+ } else {
+ threshold = brotli_max_double(0.0, pairs[0].cost_diff)
+ }
+ var combo histogramLiteral = out[idx1]
+ var cost_combo float64
+ histogramAddHistogramLiteral(&combo, &out[idx2])
+ cost_combo = populationCostLiteral(&combo)
+ if cost_combo < threshold-p.cost_diff {
+ p.cost_combo = cost_combo
+ is_good_pair = true
+ }
+ }
+
+ if is_good_pair {
+ p.cost_diff += p.cost_combo
+ if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) {
+ /* Replace the top of the queue if needed. */
+ if *num_pairs < max_num_pairs {
+ pairs[*num_pairs] = pairs[0]
+ (*num_pairs)++
+ }
+
+ pairs[0] = p
+ } else if *num_pairs < max_num_pairs {
+ pairs[*num_pairs] = p
+ (*num_pairs)++
+ }
+ }
+}
+
+func histogramCombineLiteral(out []histogramLiteral, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint {
+ var cost_diff_threshold float64 = 0.0
+ var min_cluster_size uint = 1
+ var num_pairs uint = 0
+ {
+ /* We maintain a vector of histogram pairs, with the property that the pair
+ with the maximum bit cost reduction is the first. */
+ var idx1 uint
+ for idx1 = 0; idx1 < num_clusters; idx1++ {
+ var idx2 uint
+ for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ {
+ compareAndPushToQueueLiteral(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs)
+ }
+ }
+ }
+
+ for num_clusters > min_cluster_size {
+ var best_idx1 uint32
+ var best_idx2 uint32
+ var i uint
+ if pairs[0].cost_diff >= cost_diff_threshold {
+ cost_diff_threshold = 1e99
+ min_cluster_size = max_clusters
+ continue
+ }
+
+ /* Take the best pair from the top of heap. */
+ best_idx1 = pairs[0].idx1
+
+ best_idx2 = pairs[0].idx2
+ histogramAddHistogramLiteral(&out[best_idx1], &out[best_idx2])
+ out[best_idx1].bit_cost_ = pairs[0].cost_combo
+ cluster_size[best_idx1] += cluster_size[best_idx2]
+ for i = 0; i < symbols_size; i++ {
+ if symbols[i] == best_idx2 {
+ symbols[i] = best_idx1
+ }
+ }
+
+ for i = 0; i < num_clusters; i++ {
+ if clusters[i] == best_idx2 {
+ copy(clusters[i:], clusters[i+1:][:num_clusters-i-1])
+ break
+ }
+ }
+
+ num_clusters--
+ {
+ /* Remove pairs intersecting the just combined best pair. */
+ var copy_to_idx uint = 0
+ for i = 0; i < num_pairs; i++ {
+ var p *histogramPair = &pairs[i]
+ if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 {
+ /* Remove invalid pair from the queue. */
+ continue
+ }
+
+ if histogramPairIsLess(&pairs[0], p) {
+ /* Replace the top of the queue if needed. */
+ var front histogramPair = pairs[0]
+ pairs[0] = *p
+ pairs[copy_to_idx] = front
+ } else {
+ pairs[copy_to_idx] = *p
+ }
+
+ copy_to_idx++
+ }
+
+ num_pairs = copy_to_idx
+ }
+
+ /* Push new pairs formed with the combined histogram to the heap. */
+ for i = 0; i < num_clusters; i++ {
+ compareAndPushToQueueLiteral(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs)
+ }
+ }
+
+ return num_clusters
+}
+
+/* What is the bit cost of moving histogram from cur_symbol to candidate. */
+func histogramBitCostDistanceLiteral(histogram *histogramLiteral, candidate *histogramLiteral) float64 {
+ if histogram.total_count_ == 0 {
+ return 0.0
+ } else {
+ var tmp histogramLiteral = *histogram
+ histogramAddHistogramLiteral(&tmp, candidate)
+ return populationCostLiteral(&tmp) - candidate.bit_cost_
+ }
+}
+
+/* Find the best 'out' histogram for each of the 'in' histograms.
+ When called, clusters[0..num_clusters) contains the unique values from
+ symbols[0..in_size), but this property is not preserved in this function.
+ Note: we assume that out[]->bit_cost_ is already up-to-date. */
+func histogramRemapLiteral(in []histogramLiteral, in_size uint, clusters []uint32, num_clusters uint, out []histogramLiteral, symbols []uint32) {
+ var i uint
+ for i = 0; i < in_size; i++ {
+ var best_out uint32
+ if i == 0 {
+ best_out = symbols[0]
+ } else {
+ best_out = symbols[i-1]
+ }
+ var best_bits float64 = histogramBitCostDistanceLiteral(&in[i], &out[best_out])
+ var j uint
+ for j = 0; j < num_clusters; j++ {
+ var cur_bits float64 = histogramBitCostDistanceLiteral(&in[i], &out[clusters[j]])
+ if cur_bits < best_bits {
+ best_bits = cur_bits
+ best_out = clusters[j]
+ }
+ }
+
+ symbols[i] = best_out
+ }
+
+ /* Recompute each out based on raw and symbols. */
+ for i = 0; i < num_clusters; i++ {
+ histogramClearLiteral(&out[clusters[i]])
+ }
+
+ for i = 0; i < in_size; i++ {
+ histogramAddHistogramLiteral(&out[symbols[i]], &in[i])
+ }
+}
+
+/* Reorders elements of the out[0..length) array and changes values in
+ symbols[0..length) array in the following way:
+ * when called, symbols[] contains indexes into out[], and has N unique
+ values (possibly N < length)
+ * on return, symbols'[i] = f(symbols[i]) and
+ out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length,
+ where f is a bijection between the range of symbols[] and [0..N), and
+ the first occurrences of values in symbols'[i] come in consecutive
+ increasing order.
+ Returns N, the number of unique values in symbols[]. */
+
+var histogramReindexLiteral_kInvalidIndex uint32 = math.MaxUint32
+
+func histogramReindexLiteral(out []histogramLiteral, symbols []uint32, length uint) uint {
+ var new_index []uint32 = make([]uint32, length)
+ var next_index uint32
+ var tmp []histogramLiteral
+ var i uint
+ for i = 0; i < length; i++ {
+ new_index[i] = histogramReindexLiteral_kInvalidIndex
+ }
+
+ next_index = 0
+ for i = 0; i < length; i++ {
+ if new_index[symbols[i]] == histogramReindexLiteral_kInvalidIndex {
+ new_index[symbols[i]] = next_index
+ next_index++
+ }
+ }
+
+ /* TODO: by using idea of "cycle-sort" we can avoid allocation of
+ tmp and reduce the number of copying by the factor of 2. */
+ tmp = make([]histogramLiteral, next_index)
+
+ next_index = 0
+ for i = 0; i < length; i++ {
+ if new_index[symbols[i]] == next_index {
+ tmp[next_index] = out[symbols[i]]
+ next_index++
+ }
+
+ symbols[i] = new_index[symbols[i]]
+ }
+
+ new_index = nil
+ for i = 0; uint32(i) < next_index; i++ {
+ out[i] = tmp[i]
+ }
+
+ tmp = nil
+ return uint(next_index)
+}
+
+func clusterHistogramsLiteral(in []histogramLiteral, in_size uint, max_histograms uint, out []histogramLiteral, out_size *uint, histogram_symbols []uint32) {
+ var cluster_size []uint32 = make([]uint32, in_size)
+ var clusters []uint32 = make([]uint32, in_size)
+ var num_clusters uint = 0
+ var max_input_histograms uint = 64
+ var pairs_capacity uint = max_input_histograms * max_input_histograms / 2
+ var pairs []histogramPair = make([]histogramPair, (pairs_capacity + 1))
+ var i uint
+
+ /* For the first pass of clustering, we allow all pairs. */
+ for i = 0; i < in_size; i++ {
+ cluster_size[i] = 1
+ }
+
+ for i = 0; i < in_size; i++ {
+ out[i] = in[i]
+ out[i].bit_cost_ = populationCostLiteral(&in[i])
+ histogram_symbols[i] = uint32(i)
+ }
+
+ for i = 0; i < in_size; i += max_input_histograms {
+ var num_to_combine uint = brotli_min_size_t(in_size-i, max_input_histograms)
+ var num_new_clusters uint
+ var j uint
+ for j = 0; j < num_to_combine; j++ {
+ clusters[num_clusters+j] = uint32(i + j)
+ }
+
+ num_new_clusters = histogramCombineLiteral(out, cluster_size, histogram_symbols[i:], clusters[num_clusters:], pairs, num_to_combine, num_to_combine, max_histograms, pairs_capacity)
+ num_clusters += num_new_clusters
+ }
+ {
+ /* For the second pass, we limit the total number of histogram pairs.
+ After this limit is reached, we only keep searching for the best pair. */
+ var max_num_pairs uint = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters)
+ if pairs_capacity < (max_num_pairs + 1) {
+ var _new_size uint
+ if pairs_capacity == 0 {
+ _new_size = max_num_pairs + 1
+ } else {
+ _new_size = pairs_capacity
+ }
+ var new_array []histogramPair
+ for _new_size < (max_num_pairs + 1) {
+ _new_size *= 2
+ }
+ new_array = make([]histogramPair, _new_size)
+ if pairs_capacity != 0 {
+ copy(new_array, pairs[:pairs_capacity])
+ }
+
+ pairs = new_array
+ pairs_capacity = _new_size
+ }
+
+ /* Collapse similar histograms. */
+ num_clusters = histogramCombineLiteral(out, cluster_size, histogram_symbols, clusters, pairs, num_clusters, in_size, max_histograms, max_num_pairs)
+ }
+
+ pairs = nil
+ cluster_size = nil
+
+ /* Find the optimal map from original histograms to the final ones. */
+ histogramRemapLiteral(in, in_size, clusters, num_clusters, out, histogram_symbols)
+
+ clusters = nil
+
+ /* Convert the context map to a canonical form. */
+ *out_size = histogramReindexLiteral(out, histogram_symbols, in_size)
+}
diff --git a/vendor/github.com/andybalholm/brotli/command.go b/vendor/github.com/andybalholm/brotli/command.go
new file mode 100644
index 0000000..b1662a5
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/command.go
@@ -0,0 +1,254 @@
+package brotli
+
+var kInsBase = []uint32{
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 8,
+ 10,
+ 14,
+ 18,
+ 26,
+ 34,
+ 50,
+ 66,
+ 98,
+ 130,
+ 194,
+ 322,
+ 578,
+ 1090,
+ 2114,
+ 6210,
+ 22594,
+}
+
+var kInsExtra = []uint32{
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 2,
+ 2,
+ 3,
+ 3,
+ 4,
+ 4,
+ 5,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 12,
+ 14,
+ 24,
+}
+
+var kCopyBase = []uint32{
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 12,
+ 14,
+ 18,
+ 22,
+ 30,
+ 38,
+ 54,
+ 70,
+ 102,
+ 134,
+ 198,
+ 326,
+ 582,
+ 1094,
+ 2118,
+}
+
+var kCopyExtra = []uint32{
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 2,
+ 2,
+ 3,
+ 3,
+ 4,
+ 4,
+ 5,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 24,
+}
+
+func getInsertLengthCode(insertlen uint) uint16 {
+ if insertlen < 6 {
+ return uint16(insertlen)
+ } else if insertlen < 130 {
+ var nbits uint32 = log2FloorNonZero(insertlen-2) - 1
+ return uint16((nbits << 1) + uint32((insertlen-2)>>nbits) + 2)
+ } else if insertlen < 2114 {
+ return uint16(log2FloorNonZero(insertlen-66) + 10)
+ } else if insertlen < 6210 {
+ return 21
+ } else if insertlen < 22594 {
+ return 22
+ } else {
+ return 23
+ }
+}
+
+func getCopyLengthCode(copylen uint) uint16 {
+ if copylen < 10 {
+ return uint16(copylen - 2)
+ } else if copylen < 134 {
+ var nbits uint32 = log2FloorNonZero(copylen-6) - 1
+ return uint16((nbits << 1) + uint32((copylen-6)>>nbits) + 4)
+ } else if copylen < 2118 {
+ return uint16(log2FloorNonZero(copylen-70) + 12)
+ } else {
+ return 23
+ }
+}
+
+func combineLengthCodes(inscode uint16, copycode uint16, use_last_distance bool) uint16 {
+ var bits64 uint16 = uint16(copycode&0x7 | (inscode&0x7)<<3)
+ if use_last_distance && inscode < 8 && copycode < 16 {
+ if copycode < 8 {
+ return bits64
+ } else {
+ return bits64 | 64
+ }
+ } else {
+ /* Specification: 5 Encoding of ... (last table) */
+ /* offset = 2 * index, where index is in range [0..8] */
+ var offset uint32 = 2 * ((uint32(copycode) >> 3) + 3*(uint32(inscode)>>3))
+
+ /* All values in specification are K * 64,
+ where K = [2, 3, 6, 4, 5, 8, 7, 9, 10],
+ i + 1 = [1, 2, 3, 4, 5, 6, 7, 8, 9],
+ K - i - 1 = [1, 1, 3, 0, 0, 2, 0, 1, 2] = D.
+ All values in D require only 2 bits to encode.
+ Magic constant is shifted 6 bits left, to avoid final multiplication. */
+ offset = (offset << 5) + 0x40 + ((0x520D40 >> offset) & 0xC0)
+
+ return uint16(offset | uint32(bits64))
+ }
+}
+
+func getLengthCode(insertlen uint, copylen uint, use_last_distance bool, code *uint16) {
+ var inscode uint16 = getInsertLengthCode(insertlen)
+ var copycode uint16 = getCopyLengthCode(copylen)
+ *code = combineLengthCodes(inscode, copycode, use_last_distance)
+}
+
+func getInsertBase(inscode uint16) uint32 {
+ return kInsBase[inscode]
+}
+
+func getInsertExtra(inscode uint16) uint32 {
+ return kInsExtra[inscode]
+}
+
+func getCopyBase(copycode uint16) uint32 {
+ return kCopyBase[copycode]
+}
+
+func getCopyExtra(copycode uint16) uint32 {
+ return kCopyExtra[copycode]
+}
+
+type command struct {
+ insert_len_ uint32
+ copy_len_ uint32
+ dist_extra_ uint32
+ cmd_prefix_ uint16
+ dist_prefix_ uint16
+}
+
+/* distance_code is e.g. 0 for same-as-last short code, or 16 for offset 1. */
+func makeCommand(dist *distanceParams, insertlen uint, copylen uint, copylen_code_delta int, distance_code uint) (cmd command) {
+ /* Don't rely on signed int representation, use honest casts. */
+ var delta uint32 = uint32(byte(int8(copylen_code_delta)))
+ cmd.insert_len_ = uint32(insertlen)
+ cmd.copy_len_ = uint32(uint32(copylen) | delta<<25)
+
+ /* The distance prefix and extra bits are stored in this Command as if
+ npostfix and ndirect were 0, they are only recomputed later after the
+ clustering if needed. */
+ prefixEncodeCopyDistance(distance_code, uint(dist.num_direct_distance_codes), uint(dist.distance_postfix_bits), &cmd.dist_prefix_, &cmd.dist_extra_)
+ getLengthCode(insertlen, uint(int(copylen)+copylen_code_delta), (cmd.dist_prefix_&0x3FF == 0), &cmd.cmd_prefix_)
+
+ return cmd
+}
+
+func makeInsertCommand(insertlen uint) (cmd command) {
+ cmd.insert_len_ = uint32(insertlen)
+ cmd.copy_len_ = 4 << 25
+ cmd.dist_extra_ = 0
+ cmd.dist_prefix_ = numDistanceShortCodes
+ getLengthCode(insertlen, 4, false, &cmd.cmd_prefix_)
+ return cmd
+}
+
+func commandRestoreDistanceCode(self *command, dist *distanceParams) uint32 {
+ if uint32(self.dist_prefix_&0x3FF) < numDistanceShortCodes+dist.num_direct_distance_codes {
+ return uint32(self.dist_prefix_) & 0x3FF
+ } else {
+ var dcode uint32 = uint32(self.dist_prefix_) & 0x3FF
+ var nbits uint32 = uint32(self.dist_prefix_) >> 10
+ var extra uint32 = self.dist_extra_
+ var postfix_mask uint32 = (1 << dist.distance_postfix_bits) - 1
+ var hcode uint32 = (dcode - dist.num_direct_distance_codes - numDistanceShortCodes) >> dist.distance_postfix_bits
+ var lcode uint32 = (dcode - dist.num_direct_distance_codes - numDistanceShortCodes) & postfix_mask
+ var offset uint32 = ((2 + (hcode & 1)) << nbits) - 4
+ return ((offset + extra) << dist.distance_postfix_bits) + lcode + dist.num_direct_distance_codes + numDistanceShortCodes
+ }
+}
+
+func commandDistanceContext(self *command) uint32 {
+ var r uint32 = uint32(self.cmd_prefix_) >> 6
+ var c uint32 = uint32(self.cmd_prefix_) & 7
+ if (r == 0 || r == 2 || r == 4 || r == 7) && (c <= 2) {
+ return c
+ }
+
+ return 3
+}
+
+func commandCopyLen(self *command) uint32 {
+ return self.copy_len_ & 0x1FFFFFF
+}
+
+func commandCopyLenCode(self *command) uint32 {
+ var modifier uint32 = self.copy_len_ >> 25
+ var delta int32 = int32(int8(byte(modifier | (modifier&0x40)<<1)))
+ return uint32(int32(self.copy_len_&0x1FFFFFF) + delta)
+}
diff --git a/vendor/github.com/andybalholm/brotli/compress_fragment.go b/vendor/github.com/andybalholm/brotli/compress_fragment.go
new file mode 100644
index 0000000..c9bd057
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/compress_fragment.go
@@ -0,0 +1,834 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2015 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Function for fast encoding of an input fragment, independently from the input
+ history. This function uses one-pass processing: when we find a backward
+ match, we immediately emit the corresponding command and literal codes to
+ the bit stream.
+
+ Adapted from the CompressFragment() function in
+ https://github.com/google/snappy/blob/master/snappy.cc */
+
+const maxDistance_compress_fragment = 262128
+
+func hash5(p []byte, shift uint) uint32 {
+ var h uint64 = (binary.LittleEndian.Uint64(p) << 24) * uint64(kHashMul32)
+ return uint32(h >> shift)
+}
+
+func hashBytesAtOffset5(v uint64, offset int, shift uint) uint32 {
+ assert(offset >= 0)
+ assert(offset <= 3)
+ {
+ var h uint64 = ((v >> uint(8*offset)) << 24) * uint64(kHashMul32)
+ return uint32(h >> shift)
+ }
+}
+
+func isMatch5(p1 []byte, p2 []byte) bool {
+ return binary.LittleEndian.Uint32(p1) == binary.LittleEndian.Uint32(p2) &&
+ p1[4] == p2[4]
+}
+
+/* Builds a literal prefix code into "depths" and "bits" based on the statistics
+ of the "input" string and stores it into the bit stream.
+ Note that the prefix code here is built from the pre-LZ77 input, therefore
+ we can only approximate the statistics of the actual literal stream.
+ Moreover, for long inputs we build a histogram from a sample of the input
+ and thus have to assign a non-zero depth for each literal.
+ Returns estimated compression ratio millibytes/char for encoding given input
+ with generated code. */
+func buildAndStoreLiteralPrefixCode(input []byte, input_size uint, depths []byte, bits []uint16, storage_ix *uint, storage []byte) uint {
+ var histogram = [256]uint32{0}
+ var histogram_total uint
+ var i uint
+ if input_size < 1<<15 {
+ for i = 0; i < input_size; i++ {
+ histogram[input[i]]++
+ }
+
+ histogram_total = input_size
+ for i = 0; i < 256; i++ {
+ /* We weigh the first 11 samples with weight 3 to account for the
+ balancing effect of the LZ77 phase on the histogram. */
+ var adjust uint32 = 2 * brotli_min_uint32_t(histogram[i], 11)
+ histogram[i] += adjust
+ histogram_total += uint(adjust)
+ }
+ } else {
+ const kSampleRate uint = 29
+ for i = 0; i < input_size; i += kSampleRate {
+ histogram[input[i]]++
+ }
+
+ histogram_total = (input_size + kSampleRate - 1) / kSampleRate
+ for i = 0; i < 256; i++ {
+ /* We add 1 to each population count to avoid 0 bit depths (since this is
+ only a sample and we don't know if the symbol appears or not), and we
+ weigh the first 11 samples with weight 3 to account for the balancing
+ effect of the LZ77 phase on the histogram (more frequent symbols are
+ more likely to be in backward references instead as literals). */
+ var adjust uint32 = 1 + 2*brotli_min_uint32_t(histogram[i], 11)
+ histogram[i] += adjust
+ histogram_total += uint(adjust)
+ }
+ }
+
+ buildAndStoreHuffmanTreeFast(histogram[:], histogram_total, /* max_bits = */
+ 8, depths, bits, storage_ix, storage)
+ {
+ var literal_ratio uint = 0
+ for i = 0; i < 256; i++ {
+ if histogram[i] != 0 {
+ literal_ratio += uint(histogram[i] * uint32(depths[i]))
+ }
+ }
+
+ /* Estimated encoding ratio, millibytes per symbol. */
+ return (literal_ratio * 125) / histogram_total
+ }
+}
+
+/* Builds a command and distance prefix code (each 64 symbols) into "depth" and
+ "bits" based on "histogram" and stores it into the bit stream. */
+func buildAndStoreCommandPrefixCode1(histogram []uint32, depth []byte, bits []uint16, storage_ix *uint, storage []byte) {
+ var tree [129]huffmanTree
+ var cmd_depth = [numCommandSymbols]byte{0}
+ /* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */
+
+ var cmd_bits [64]uint16
+
+ createHuffmanTree(histogram, 64, 15, tree[:], depth)
+ createHuffmanTree(histogram[64:], 64, 14, tree[:], depth[64:])
+
+ /* We have to jump through a few hoops here in order to compute
+ the command bits because the symbols are in a different order than in
+ the full alphabet. This looks complicated, but having the symbols
+ in this order in the command bits saves a few branches in the Emit*
+ functions. */
+ copy(cmd_depth[:], depth[:24])
+
+ copy(cmd_depth[24:][:], depth[40:][:8])
+ copy(cmd_depth[32:][:], depth[24:][:8])
+ copy(cmd_depth[40:][:], depth[48:][:8])
+ copy(cmd_depth[48:][:], depth[32:][:8])
+ copy(cmd_depth[56:][:], depth[56:][:8])
+ convertBitDepthsToSymbols(cmd_depth[:], 64, cmd_bits[:])
+ copy(bits, cmd_bits[:24])
+ copy(bits[24:], cmd_bits[32:][:8])
+ copy(bits[32:], cmd_bits[48:][:8])
+ copy(bits[40:], cmd_bits[24:][:8])
+ copy(bits[48:], cmd_bits[40:][:8])
+ copy(bits[56:], cmd_bits[56:][:8])
+ convertBitDepthsToSymbols(depth[64:], 64, bits[64:])
+ {
+ /* Create the bit length array for the full command alphabet. */
+ var i uint
+ for i := 0; i < int(64); i++ {
+ cmd_depth[i] = 0
+ } /* only 64 first values were used */
+ copy(cmd_depth[:], depth[:8])
+ copy(cmd_depth[64:][:], depth[8:][:8])
+ copy(cmd_depth[128:][:], depth[16:][:8])
+ copy(cmd_depth[192:][:], depth[24:][:8])
+ copy(cmd_depth[384:][:], depth[32:][:8])
+ for i = 0; i < 8; i++ {
+ cmd_depth[128+8*i] = depth[40+i]
+ cmd_depth[256+8*i] = depth[48+i]
+ cmd_depth[448+8*i] = depth[56+i]
+ }
+
+ storeHuffmanTree(cmd_depth[:], numCommandSymbols, tree[:], storage_ix, storage)
+ }
+
+ storeHuffmanTree(depth[64:], 64, tree[:], storage_ix, storage)
+}
+
+/* REQUIRES: insertlen < 6210 */
+func emitInsertLen1(insertlen uint, depth []byte, bits []uint16, histo []uint32, storage_ix *uint, storage []byte) {
+ if insertlen < 6 {
+ var code uint = insertlen + 40
+ writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage)
+ histo[code]++
+ } else if insertlen < 130 {
+ var tail uint = insertlen - 2
+ var nbits uint32 = log2FloorNonZero(tail) - 1
+ var prefix uint = tail >> nbits
+ var inscode uint = uint((nbits << 1) + uint32(prefix) + 42)
+ writeBits(uint(depth[inscode]), uint64(bits[inscode]), storage_ix, storage)
+ writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> nbits
+ var code uint = uint((nbits << 1) + uint32(prefix) + 20)
+ writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage)
+ writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> nbits
+ var code uint = uint((nbits << 1) + uint32(prefix) + 4)
+ writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage)
+ writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> 5) + 30
+ writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage)
+ writeBits(5, uint64(tail)&31, storage_ix, storage)
+ writeBits(uint(depth[64]), uint64(bits[64]), storage_ix, storage)
+ histo[code]++
+ histo[64]++
+ } else if copylen < 2120 {
+ var tail uint = copylen - 72
+ var nbits uint32 = log2FloorNonZero(tail)
+ var code uint = uint(nbits + 28)
+ writeBits(uint(depth[code]), uint64(bits[code]), storage_ix, storage)
+ writeBits(uint(nbits), uint64(tail)-(uint64(uint(1))<> nbits) & 1
+ var offset uint = (2 + prefix) << nbits
+ var distcode uint = uint(2*(nbits-1) + uint32(prefix) + 80)
+ writeBits(uint(depth[distcode]), uint64(bits[distcode]), storage_ix, storage)
+ writeBits(uint(nbits), uint64(d)-uint64(offset), storage_ix, storage)
+ histo[distcode]++
+}
+
+func emitLiterals(input []byte, len uint, depth []byte, bits []uint16, storage_ix *uint, storage []byte) {
+ var j uint
+ for j = 0; j < len; j++ {
+ var lit byte = input[j]
+ writeBits(uint(depth[lit]), uint64(bits[lit]), storage_ix, storage)
+ }
+}
+
+/* REQUIRES: len <= 1 << 24. */
+func storeMetaBlockHeader1(len uint, is_uncompressed bool, storage_ix *uint, storage []byte) {
+ var nibbles uint = 6
+
+ /* ISLAST */
+ writeBits(1, 0, storage_ix, storage)
+
+ if len <= 1<<16 {
+ nibbles = 4
+ } else if len <= 1<<20 {
+ nibbles = 5
+ }
+
+ writeBits(2, uint64(nibbles)-4, storage_ix, storage)
+ writeBits(nibbles*4, uint64(len)-1, storage_ix, storage)
+
+ /* ISUNCOMPRESSED */
+ writeSingleBit(is_uncompressed, storage_ix, storage)
+}
+
+func updateBits(n_bits uint, bits uint32, pos uint, array []byte) {
+ for n_bits > 0 {
+ var byte_pos uint = pos >> 3
+ var n_unchanged_bits uint = pos & 7
+ var n_changed_bits uint = brotli_min_size_t(n_bits, 8-n_unchanged_bits)
+ var total_bits uint = n_unchanged_bits + n_changed_bits
+ var mask uint32 = (^((1 << total_bits) - 1)) | ((1 << n_unchanged_bits) - 1)
+ var unchanged_bits uint32 = uint32(array[byte_pos]) & mask
+ var changed_bits uint32 = bits & ((1 << n_changed_bits) - 1)
+ array[byte_pos] = byte(changed_bits<>= n_changed_bits
+ pos += n_changed_bits
+ }
+}
+
+func rewindBitPosition1(new_storage_ix uint, storage_ix *uint, storage []byte) {
+ var bitpos uint = new_storage_ix & 7
+ var mask uint = (1 << bitpos) - 1
+ storage[new_storage_ix>>3] &= byte(mask)
+ *storage_ix = new_storage_ix
+}
+
+var shouldMergeBlock_kSampleRate uint = 43
+
+func shouldMergeBlock(data []byte, len uint, depths []byte) bool {
+ var histo = [256]uint{0}
+ var i uint
+ for i = 0; i < len; i += shouldMergeBlock_kSampleRate {
+ histo[data[i]]++
+ }
+ {
+ var total uint = (len + shouldMergeBlock_kSampleRate - 1) / shouldMergeBlock_kSampleRate
+ var r float64 = (fastLog2(total)+0.5)*float64(total) + 200
+ for i = 0; i < 256; i++ {
+ r -= float64(histo[i]) * (float64(depths[i]) + fastLog2(histo[i]))
+ }
+
+ return r >= 0.0
+ }
+}
+
+func shouldUseUncompressedMode(metablock_start []byte, next_emit []byte, insertlen uint, literal_ratio uint) bool {
+ var compressed uint = uint(-cap(next_emit) + cap(metablock_start))
+ if compressed*50 > insertlen {
+ return false
+ } else {
+ return literal_ratio > 980
+ }
+}
+
+func emitUncompressedMetaBlock1(begin []byte, end []byte, storage_ix_start uint, storage_ix *uint, storage []byte) {
+ var len uint = uint(-cap(end) + cap(begin))
+ rewindBitPosition1(storage_ix_start, storage_ix, storage)
+ storeMetaBlockHeader1(uint(len), true, storage_ix, storage)
+ *storage_ix = (*storage_ix + 7) &^ 7
+ copy(storage[*storage_ix>>3:], begin[:len])
+ *storage_ix += uint(len << 3)
+ storage[*storage_ix>>3] = 0
+}
+
+var kCmdHistoSeed = [128]uint32{
+ 0,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 0,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 0,
+ 0,
+ 0,
+ 0,
+}
+
+var compressFragmentFastImpl_kFirstBlockSize uint = 3 << 15
+var compressFragmentFastImpl_kMergeBlockSize uint = 1 << 16
+
+func compressFragmentFastImpl(in []byte, input_size uint, is_last bool, table []int, table_bits uint, cmd_depth []byte, cmd_bits []uint16, cmd_code_numbits *uint, cmd_code []byte, storage_ix *uint, storage []byte) {
+ var cmd_histo [128]uint32
+ var ip_end int
+ var next_emit int = 0
+ var base_ip int = 0
+ var input int = 0
+ const kInputMarginBytes uint = windowGap
+ const kMinMatchLen uint = 5
+ var metablock_start int = input
+ var block_size uint = brotli_min_size_t(input_size, compressFragmentFastImpl_kFirstBlockSize)
+ var total_block_size uint = block_size
+ var mlen_storage_ix uint = *storage_ix + 3
+ var lit_depth [256]byte
+ var lit_bits [256]uint16
+ var literal_ratio uint
+ var ip int
+ var last_distance int
+ var shift uint = 64 - table_bits
+
+ /* "next_emit" is a pointer to the first byte that is not covered by a
+ previous copy. Bytes between "next_emit" and the start of the next copy or
+ the end of the input will be emitted as literal bytes. */
+
+ /* Save the start of the first block for position and distance computations.
+ */
+
+ /* Save the bit position of the MLEN field of the meta-block header, so that
+ we can update it later if we decide to extend this meta-block. */
+ storeMetaBlockHeader1(block_size, false, storage_ix, storage)
+
+ /* No block splits, no contexts. */
+ writeBits(13, 0, storage_ix, storage)
+
+ literal_ratio = buildAndStoreLiteralPrefixCode(in[input:], block_size, lit_depth[:], lit_bits[:], storage_ix, storage)
+ {
+ /* Store the pre-compressed command and distance prefix codes. */
+ var i uint
+ for i = 0; i+7 < *cmd_code_numbits; i += 8 {
+ writeBits(8, uint64(cmd_code[i>>3]), storage_ix, storage)
+ }
+ }
+
+ writeBits(*cmd_code_numbits&7, uint64(cmd_code[*cmd_code_numbits>>3]), storage_ix, storage)
+
+ /* Initialize the command and distance histograms. We will gather
+ statistics of command and distance codes during the processing
+ of this block and use it to update the command and distance
+ prefix codes for the next block. */
+emit_commands:
+ copy(cmd_histo[:], kCmdHistoSeed[:])
+
+ /* "ip" is the input pointer. */
+ ip = input
+
+ last_distance = -1
+ ip_end = int(uint(input) + block_size)
+
+ if block_size >= kInputMarginBytes {
+ var len_limit uint = brotli_min_size_t(block_size-kMinMatchLen, input_size-kInputMarginBytes)
+ var ip_limit int = int(uint(input) + len_limit)
+ /* For the last block, we need to keep a 16 bytes margin so that we can be
+ sure that all distances are at most window size - 16.
+ For all other blocks, we only need to keep a margin of 5 bytes so that
+ we don't go over the block size with a copy. */
+
+ var next_hash uint32
+ ip++
+ for next_hash = hash5(in[ip:], shift); ; {
+ var skip uint32 = 32
+ var next_ip int = ip
+ /* Step 1: Scan forward in the input looking for a 5-byte-long match.
+ If we get close to exhausting the input then goto emit_remainder.
+
+ Heuristic match skipping: If 32 bytes are scanned with no matches
+ found, start looking only at every other byte. If 32 more bytes are
+ scanned, look at every third byte, etc.. When a match is found,
+ immediately go back to looking at every byte. This is a small loss
+ (~5% performance, ~0.1% density) for compressible data due to more
+ bookkeeping, but for non-compressible data (such as JPEG) it's a huge
+ win since the compressor quickly "realizes" the data is incompressible
+ and doesn't bother looking for matches everywhere.
+
+ The "skip" variable keeps track of how many bytes there are since the
+ last match; dividing it by 32 (i.e. right-shifting by five) gives the
+ number of bytes to move ahead for each iteration. */
+
+ var candidate int
+ assert(next_emit < ip)
+
+ trawl:
+ for {
+ var hash uint32 = next_hash
+ var bytes_between_hash_lookups uint32 = skip >> 5
+ skip++
+ assert(hash == hash5(in[next_ip:], shift))
+ ip = next_ip
+ next_ip = int(uint32(ip) + bytes_between_hash_lookups)
+ if next_ip > ip_limit {
+ goto emit_remainder
+ }
+
+ next_hash = hash5(in[next_ip:], shift)
+ candidate = ip - last_distance
+ if isMatch5(in[ip:], in[candidate:]) {
+ if candidate < ip {
+ table[hash] = int(ip - base_ip)
+ break
+ }
+ }
+
+ candidate = base_ip + table[hash]
+ assert(candidate >= base_ip)
+ assert(candidate < ip)
+
+ table[hash] = int(ip - base_ip)
+ if isMatch5(in[ip:], in[candidate:]) {
+ break
+ }
+ }
+
+ /* Check copy distance. If candidate is not feasible, continue search.
+ Checking is done outside of hot loop to reduce overhead. */
+ if ip-candidate > maxDistance_compress_fragment {
+ goto trawl
+ }
+
+ /* Step 2: Emit the found match together with the literal bytes from
+ "next_emit" to the bit stream, and then see if we can find a next match
+ immediately afterwards. Repeat until we find no match for the input
+ without emitting some literal bytes. */
+ {
+ var base int = ip
+ /* > 0 */
+ var matched uint = 5 + findMatchLengthWithLimit(in[candidate+5:], in[ip+5:], uint(ip_end-ip)-5)
+ var distance int = int(base - candidate)
+ /* We have a 5-byte match at ip, and we need to emit bytes in
+ [next_emit, ip). */
+
+ var insert uint = uint(base - next_emit)
+ ip += int(matched)
+ if insert < 6210 {
+ emitInsertLen1(insert, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage)
+ } else if shouldUseUncompressedMode(in[metablock_start:], in[next_emit:], insert, literal_ratio) {
+ emitUncompressedMetaBlock1(in[metablock_start:], in[base:], mlen_storage_ix-3, storage_ix, storage)
+ input_size -= uint(base - input)
+ input = base
+ next_emit = input
+ goto next_block
+ } else {
+ emitLongInsertLen(insert, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage)
+ }
+
+ emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], storage_ix, storage)
+ if distance == last_distance {
+ writeBits(uint(cmd_depth[64]), uint64(cmd_bits[64]), storage_ix, storage)
+ cmd_histo[64]++
+ } else {
+ emitDistance1(uint(distance), cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage)
+ last_distance = distance
+ }
+
+ emitCopyLenLastDistance1(matched, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage)
+
+ next_emit = ip
+ if ip >= ip_limit {
+ goto emit_remainder
+ }
+
+ /* We could immediately start working at ip now, but to improve
+ compression we first update "table" with the hashes of some positions
+ within the last copy. */
+ {
+ var input_bytes uint64 = binary.LittleEndian.Uint64(in[ip-3:])
+ var prev_hash uint32 = hashBytesAtOffset5(input_bytes, 0, shift)
+ var cur_hash uint32 = hashBytesAtOffset5(input_bytes, 3, shift)
+ table[prev_hash] = int(ip - base_ip - 3)
+ prev_hash = hashBytesAtOffset5(input_bytes, 1, shift)
+ table[prev_hash] = int(ip - base_ip - 2)
+ prev_hash = hashBytesAtOffset5(input_bytes, 2, shift)
+ table[prev_hash] = int(ip - base_ip - 1)
+
+ candidate = base_ip + table[cur_hash]
+ table[cur_hash] = int(ip - base_ip)
+ }
+ }
+
+ for isMatch5(in[ip:], in[candidate:]) {
+ var base int = ip
+ /* We have a 5-byte match at ip, and no need to emit any literal bytes
+ prior to ip. */
+
+ var matched uint = 5 + findMatchLengthWithLimit(in[candidate+5:], in[ip+5:], uint(ip_end-ip)-5)
+ if ip-candidate > maxDistance_compress_fragment {
+ break
+ }
+ ip += int(matched)
+ last_distance = int(base - candidate) /* > 0 */
+ emitCopyLen1(matched, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage)
+ emitDistance1(uint(last_distance), cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage)
+
+ next_emit = ip
+ if ip >= ip_limit {
+ goto emit_remainder
+ }
+
+ /* We could immediately start working at ip now, but to improve
+ compression we first update "table" with the hashes of some positions
+ within the last copy. */
+ {
+ var input_bytes uint64 = binary.LittleEndian.Uint64(in[ip-3:])
+ var prev_hash uint32 = hashBytesAtOffset5(input_bytes, 0, shift)
+ var cur_hash uint32 = hashBytesAtOffset5(input_bytes, 3, shift)
+ table[prev_hash] = int(ip - base_ip - 3)
+ prev_hash = hashBytesAtOffset5(input_bytes, 1, shift)
+ table[prev_hash] = int(ip - base_ip - 2)
+ prev_hash = hashBytesAtOffset5(input_bytes, 2, shift)
+ table[prev_hash] = int(ip - base_ip - 1)
+
+ candidate = base_ip + table[cur_hash]
+ table[cur_hash] = int(ip - base_ip)
+ }
+ }
+
+ ip++
+ next_hash = hash5(in[ip:], shift)
+ }
+ }
+
+emit_remainder:
+ assert(next_emit <= ip_end)
+ input += int(block_size)
+ input_size -= block_size
+ block_size = brotli_min_size_t(input_size, compressFragmentFastImpl_kMergeBlockSize)
+
+ /* Decide if we want to continue this meta-block instead of emitting the
+ last insert-only command. */
+ if input_size > 0 && total_block_size+block_size <= 1<<20 && shouldMergeBlock(in[input:], block_size, lit_depth[:]) {
+ assert(total_block_size > 1<<16)
+
+ /* Update the size of the current meta-block and continue emitting commands.
+ We can do this because the current size and the new size both have 5
+ nibbles. */
+ total_block_size += block_size
+
+ updateBits(20, uint32(total_block_size-1), mlen_storage_ix, storage)
+ goto emit_commands
+ }
+
+ /* Emit the remaining bytes as literals. */
+ if next_emit < ip_end {
+ var insert uint = uint(ip_end - next_emit)
+ if insert < 6210 {
+ emitInsertLen1(insert, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage)
+ emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], storage_ix, storage)
+ } else if shouldUseUncompressedMode(in[metablock_start:], in[next_emit:], insert, literal_ratio) {
+ emitUncompressedMetaBlock1(in[metablock_start:], in[ip_end:], mlen_storage_ix-3, storage_ix, storage)
+ } else {
+ emitLongInsertLen(insert, cmd_depth, cmd_bits, cmd_histo[:], storage_ix, storage)
+ emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], storage_ix, storage)
+ }
+ }
+
+ next_emit = ip_end
+
+ /* If we have more data, write a new meta-block header and prefix codes and
+ then continue emitting commands. */
+next_block:
+ if input_size > 0 {
+ metablock_start = input
+ block_size = brotli_min_size_t(input_size, compressFragmentFastImpl_kFirstBlockSize)
+ total_block_size = block_size
+
+ /* Save the bit position of the MLEN field of the meta-block header, so that
+ we can update it later if we decide to extend this meta-block. */
+ mlen_storage_ix = *storage_ix + 3
+
+ storeMetaBlockHeader1(block_size, false, storage_ix, storage)
+
+ /* No block splits, no contexts. */
+ writeBits(13, 0, storage_ix, storage)
+
+ literal_ratio = buildAndStoreLiteralPrefixCode(in[input:], block_size, lit_depth[:], lit_bits[:], storage_ix, storage)
+ buildAndStoreCommandPrefixCode1(cmd_histo[:], cmd_depth, cmd_bits, storage_ix, storage)
+ goto emit_commands
+ }
+
+ if !is_last {
+ /* If this is not the last block, update the command and distance prefix
+ codes for the next block and store the compressed forms. */
+ cmd_code[0] = 0
+
+ *cmd_code_numbits = 0
+ buildAndStoreCommandPrefixCode1(cmd_histo[:], cmd_depth, cmd_bits, cmd_code_numbits, cmd_code)
+ }
+}
+
+/* Compresses "input" string to the "*storage" buffer as one or more complete
+ meta-blocks, and updates the "*storage_ix" bit position.
+
+ If "is_last" is 1, emits an additional empty last meta-block.
+
+ "cmd_depth" and "cmd_bits" contain the command and distance prefix codes
+ (see comment in encode.h) used for the encoding of this input fragment.
+ If "is_last" is 0, they are updated to reflect the statistics
+ of this input fragment, to be used for the encoding of the next fragment.
+
+ "*cmd_code_numbits" is the number of bits of the compressed representation
+ of the command and distance prefix codes, and "cmd_code" is an array of
+ at least "(*cmd_code_numbits + 7) >> 3" size that contains the compressed
+ command and distance prefix codes. If "is_last" is 0, these are also
+ updated to represent the updated "cmd_depth" and "cmd_bits".
+
+ REQUIRES: "input_size" is greater than zero, or "is_last" is 1.
+ REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24).
+ REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero.
+ REQUIRES: "table_size" is an odd (9, 11, 13, 15) power of two
+ OUTPUT: maximal copy distance <= |input_size|
+ OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */
+func compressFragmentFast(input []byte, input_size uint, is_last bool, table []int, table_size uint, cmd_depth []byte, cmd_bits []uint16, cmd_code_numbits *uint, cmd_code []byte, storage_ix *uint, storage []byte) {
+ var initial_storage_ix uint = *storage_ix
+ var table_bits uint = uint(log2FloorNonZero(table_size))
+
+ if input_size == 0 {
+ assert(is_last)
+ writeBits(1, 1, storage_ix, storage) /* islast */
+ writeBits(1, 1, storage_ix, storage) /* isempty */
+ *storage_ix = (*storage_ix + 7) &^ 7
+ return
+ }
+
+ compressFragmentFastImpl(input, input_size, is_last, table, table_bits, cmd_depth, cmd_bits, cmd_code_numbits, cmd_code, storage_ix, storage)
+
+ /* If output is larger than single uncompressed block, rewrite it. */
+ if *storage_ix-initial_storage_ix > 31+(input_size<<3) {
+ emitUncompressedMetaBlock1(input, input[input_size:], initial_storage_ix, storage_ix, storage)
+ }
+
+ if is_last {
+ writeBits(1, 1, storage_ix, storage) /* islast */
+ writeBits(1, 1, storage_ix, storage) /* isempty */
+ *storage_ix = (*storage_ix + 7) &^ 7
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go b/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go
new file mode 100644
index 0000000..172dc7f
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go
@@ -0,0 +1,748 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2015 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Function for fast encoding of an input fragment, independently from the input
+ history. This function uses two-pass processing: in the first pass we save
+ the found backward matches and literal bytes into a buffer, and in the
+ second pass we emit them into the bit stream using prefix codes built based
+ on the actual command and literal byte histograms. */
+
+const kCompressFragmentTwoPassBlockSize uint = 1 << 17
+
+func hash1(p []byte, shift uint, length uint) uint32 {
+ var h uint64 = (binary.LittleEndian.Uint64(p) << ((8 - length) * 8)) * uint64(kHashMul32)
+ return uint32(h >> shift)
+}
+
+func hashBytesAtOffset(v uint64, offset uint, shift uint, length uint) uint32 {
+ assert(offset <= 8-length)
+ {
+ var h uint64 = ((v >> (8 * offset)) << ((8 - length) * 8)) * uint64(kHashMul32)
+ return uint32(h >> shift)
+ }
+}
+
+func isMatch1(p1 []byte, p2 []byte, length uint) bool {
+ if binary.LittleEndian.Uint32(p1) != binary.LittleEndian.Uint32(p2) {
+ return false
+ }
+ if length == 4 {
+ return true
+ }
+ return p1[4] == p2[4] && p1[5] == p2[5]
+}
+
+/* Builds a command and distance prefix code (each 64 symbols) into "depth" and
+ "bits" based on "histogram" and stores it into the bit stream. */
+func buildAndStoreCommandPrefixCode(histogram []uint32, depth []byte, bits []uint16, storage_ix *uint, storage []byte) {
+ var tree [129]huffmanTree
+ var cmd_depth = [numCommandSymbols]byte{0}
+ /* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */
+
+ var cmd_bits [64]uint16
+ createHuffmanTree(histogram, 64, 15, tree[:], depth)
+ createHuffmanTree(histogram[64:], 64, 14, tree[:], depth[64:])
+
+ /* We have to jump through a few hoops here in order to compute
+ the command bits because the symbols are in a different order than in
+ the full alphabet. This looks complicated, but having the symbols
+ in this order in the command bits saves a few branches in the Emit*
+ functions. */
+ copy(cmd_depth[:], depth[24:][:24])
+
+ copy(cmd_depth[24:][:], depth[:8])
+ copy(cmd_depth[32:][:], depth[48:][:8])
+ copy(cmd_depth[40:][:], depth[8:][:8])
+ copy(cmd_depth[48:][:], depth[56:][:8])
+ copy(cmd_depth[56:][:], depth[16:][:8])
+ convertBitDepthsToSymbols(cmd_depth[:], 64, cmd_bits[:])
+ copy(bits, cmd_bits[24:][:8])
+ copy(bits[8:], cmd_bits[40:][:8])
+ copy(bits[16:], cmd_bits[56:][:8])
+ copy(bits[24:], cmd_bits[:24])
+ copy(bits[48:], cmd_bits[32:][:8])
+ copy(bits[56:], cmd_bits[48:][:8])
+ convertBitDepthsToSymbols(depth[64:], 64, bits[64:])
+ {
+ /* Create the bit length array for the full command alphabet. */
+ var i uint
+ for i := 0; i < int(64); i++ {
+ cmd_depth[i] = 0
+ } /* only 64 first values were used */
+ copy(cmd_depth[:], depth[24:][:8])
+ copy(cmd_depth[64:][:], depth[32:][:8])
+ copy(cmd_depth[128:][:], depth[40:][:8])
+ copy(cmd_depth[192:][:], depth[48:][:8])
+ copy(cmd_depth[384:][:], depth[56:][:8])
+ for i = 0; i < 8; i++ {
+ cmd_depth[128+8*i] = depth[i]
+ cmd_depth[256+8*i] = depth[8+i]
+ cmd_depth[448+8*i] = depth[16+i]
+ }
+
+ storeHuffmanTree(cmd_depth[:], numCommandSymbols, tree[:], storage_ix, storage)
+ }
+
+ storeHuffmanTree(depth[64:], 64, tree[:], storage_ix, storage)
+}
+
+func emitInsertLen(insertlen uint32, commands *[]uint32) {
+ if insertlen < 6 {
+ (*commands)[0] = insertlen
+ } else if insertlen < 130 {
+ var tail uint32 = insertlen - 2
+ var nbits uint32 = log2FloorNonZero(uint(tail)) - 1
+ var prefix uint32 = tail >> nbits
+ var inscode uint32 = (nbits << 1) + prefix + 2
+ var extra uint32 = tail - (prefix << nbits)
+ (*commands)[0] = inscode | extra<<8
+ } else if insertlen < 2114 {
+ var tail uint32 = insertlen - 66
+ var nbits uint32 = log2FloorNonZero(uint(tail))
+ var code uint32 = nbits + 10
+ var extra uint32 = tail - (1 << nbits)
+ (*commands)[0] = code | extra<<8
+ } else if insertlen < 6210 {
+ var extra uint32 = insertlen - 2114
+ (*commands)[0] = 21 | extra<<8
+ } else if insertlen < 22594 {
+ var extra uint32 = insertlen - 6210
+ (*commands)[0] = 22 | extra<<8
+ } else {
+ var extra uint32 = insertlen - 22594
+ (*commands)[0] = 23 | extra<<8
+ }
+
+ *commands = (*commands)[1:]
+}
+
+func emitCopyLen(copylen uint, commands *[]uint32) {
+ if copylen < 10 {
+ (*commands)[0] = uint32(copylen + 38)
+ } else if copylen < 134 {
+ var tail uint = copylen - 6
+ var nbits uint = uint(log2FloorNonZero(tail) - 1)
+ var prefix uint = tail >> nbits
+ var code uint = (nbits << 1) + prefix + 44
+ var extra uint = tail - (prefix << nbits)
+ (*commands)[0] = uint32(code | extra<<8)
+ } else if copylen < 2118 {
+ var tail uint = copylen - 70
+ var nbits uint = uint(log2FloorNonZero(tail))
+ var code uint = nbits + 52
+ var extra uint = tail - (uint(1) << nbits)
+ (*commands)[0] = uint32(code | extra<<8)
+ } else {
+ var extra uint = copylen - 2118
+ (*commands)[0] = uint32(63 | extra<<8)
+ }
+
+ *commands = (*commands)[1:]
+}
+
+func emitCopyLenLastDistance(copylen uint, commands *[]uint32) {
+ if copylen < 12 {
+ (*commands)[0] = uint32(copylen + 20)
+ *commands = (*commands)[1:]
+ } else if copylen < 72 {
+ var tail uint = copylen - 8
+ var nbits uint = uint(log2FloorNonZero(tail) - 1)
+ var prefix uint = tail >> nbits
+ var code uint = (nbits << 1) + prefix + 28
+ var extra uint = tail - (prefix << nbits)
+ (*commands)[0] = uint32(code | extra<<8)
+ *commands = (*commands)[1:]
+ } else if copylen < 136 {
+ var tail uint = copylen - 8
+ var code uint = (tail >> 5) + 54
+ var extra uint = tail & 31
+ (*commands)[0] = uint32(code | extra<<8)
+ *commands = (*commands)[1:]
+ (*commands)[0] = 64
+ *commands = (*commands)[1:]
+ } else if copylen < 2120 {
+ var tail uint = copylen - 72
+ var nbits uint = uint(log2FloorNonZero(tail))
+ var code uint = nbits + 52
+ var extra uint = tail - (uint(1) << nbits)
+ (*commands)[0] = uint32(code | extra<<8)
+ *commands = (*commands)[1:]
+ (*commands)[0] = 64
+ *commands = (*commands)[1:]
+ } else {
+ var extra uint = copylen - 2120
+ (*commands)[0] = uint32(63 | extra<<8)
+ *commands = (*commands)[1:]
+ (*commands)[0] = 64
+ *commands = (*commands)[1:]
+ }
+}
+
+func emitDistance(distance uint32, commands *[]uint32) {
+ var d uint32 = distance + 3
+ var nbits uint32 = log2FloorNonZero(uint(d)) - 1
+ var prefix uint32 = (d >> nbits) & 1
+ var offset uint32 = (2 + prefix) << nbits
+ var distcode uint32 = 2*(nbits-1) + prefix + 80
+ var extra uint32 = d - offset
+ (*commands)[0] = distcode | extra<<8
+ *commands = (*commands)[1:]
+}
+
+/* REQUIRES: len <= 1 << 24. */
+func storeMetaBlockHeader(len uint, is_uncompressed bool, storage_ix *uint, storage []byte) {
+ var nibbles uint = 6
+
+ /* ISLAST */
+ writeBits(1, 0, storage_ix, storage)
+
+ if len <= 1<<16 {
+ nibbles = 4
+ } else if len <= 1<<20 {
+ nibbles = 5
+ }
+
+ writeBits(2, uint64(nibbles)-4, storage_ix, storage)
+ writeBits(nibbles*4, uint64(len)-1, storage_ix, storage)
+
+ /* ISUNCOMPRESSED */
+ writeSingleBit(is_uncompressed, storage_ix, storage)
+}
+
+func createCommands(input []byte, block_size uint, input_size uint, base_ip_ptr []byte, table []int, table_bits uint, min_match uint, literals *[]byte, commands *[]uint32) {
+ var ip int = 0
+ var shift uint = 64 - table_bits
+ var ip_end int = int(block_size)
+ var base_ip int = -cap(base_ip_ptr) + cap(input)
+ var next_emit int = 0
+ var last_distance int = -1
+ /* "ip" is the input pointer. */
+
+ const kInputMarginBytes uint = windowGap
+
+ /* "next_emit" is a pointer to the first byte that is not covered by a
+ previous copy. Bytes between "next_emit" and the start of the next copy or
+ the end of the input will be emitted as literal bytes. */
+ if block_size >= kInputMarginBytes {
+ var len_limit uint = brotli_min_size_t(block_size-min_match, input_size-kInputMarginBytes)
+ var ip_limit int = int(len_limit)
+ /* For the last block, we need to keep a 16 bytes margin so that we can be
+ sure that all distances are at most window size - 16.
+ For all other blocks, we only need to keep a margin of 5 bytes so that
+ we don't go over the block size with a copy. */
+
+ var next_hash uint32
+ ip++
+ for next_hash = hash1(input[ip:], shift, min_match); ; {
+ var skip uint32 = 32
+ var next_ip int = ip
+ /* Step 1: Scan forward in the input looking for a 6-byte-long match.
+ If we get close to exhausting the input then goto emit_remainder.
+
+ Heuristic match skipping: If 32 bytes are scanned with no matches
+ found, start looking only at every other byte. If 32 more bytes are
+ scanned, look at every third byte, etc.. When a match is found,
+ immediately go back to looking at every byte. This is a small loss
+ (~5% performance, ~0.1% density) for compressible data due to more
+ bookkeeping, but for non-compressible data (such as JPEG) it's a huge
+ win since the compressor quickly "realizes" the data is incompressible
+ and doesn't bother looking for matches everywhere.
+
+ The "skip" variable keeps track of how many bytes there are since the
+ last match; dividing it by 32 (ie. right-shifting by five) gives the
+ number of bytes to move ahead for each iteration. */
+
+ var candidate int
+
+ assert(next_emit < ip)
+
+ trawl:
+ for {
+ var hash uint32 = next_hash
+ var bytes_between_hash_lookups uint32 = skip >> 5
+ skip++
+ ip = next_ip
+ assert(hash == hash1(input[ip:], shift, min_match))
+ next_ip = int(uint32(ip) + bytes_between_hash_lookups)
+ if next_ip > ip_limit {
+ goto emit_remainder
+ }
+
+ next_hash = hash1(input[next_ip:], shift, min_match)
+ candidate = ip - last_distance
+ if isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) {
+ if candidate < ip {
+ table[hash] = int(ip - base_ip)
+ break
+ }
+ }
+
+ candidate = base_ip + table[hash]
+ assert(candidate >= base_ip)
+ assert(candidate < ip)
+
+ table[hash] = int(ip - base_ip)
+ if isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) {
+ break
+ }
+ }
+
+ /* Check copy distance. If candidate is not feasible, continue search.
+ Checking is done outside of hot loop to reduce overhead. */
+ if ip-candidate > maxDistance_compress_fragment {
+ goto trawl
+ }
+
+ /* Step 2: Emit the found match together with the literal bytes from
+ "next_emit", and then see if we can find a next match immediately
+ afterwards. Repeat until we find no match for the input
+ without emitting some literal bytes. */
+ {
+ var base int = ip
+ /* > 0 */
+ var matched uint = min_match + findMatchLengthWithLimit(base_ip_ptr[uint(candidate-base_ip)+min_match:], input[uint(ip)+min_match:], uint(ip_end-ip)-min_match)
+ var distance int = int(base - candidate)
+ /* We have a 6-byte match at ip, and we need to emit bytes in
+ [next_emit, ip). */
+
+ var insert int = int(base - next_emit)
+ ip += int(matched)
+ emitInsertLen(uint32(insert), commands)
+ copy(*literals, input[next_emit:][:uint(insert)])
+ *literals = (*literals)[insert:]
+ if distance == last_distance {
+ (*commands)[0] = 64
+ *commands = (*commands)[1:]
+ } else {
+ emitDistance(uint32(distance), commands)
+ last_distance = distance
+ }
+
+ emitCopyLenLastDistance(matched, commands)
+
+ next_emit = ip
+ if ip >= ip_limit {
+ goto emit_remainder
+ }
+ {
+ var input_bytes uint64
+ var cur_hash uint32
+ /* We could immediately start working at ip now, but to improve
+ compression we first update "table" with the hashes of some
+ positions within the last copy. */
+
+ var prev_hash uint32
+ if min_match == 4 {
+ input_bytes = binary.LittleEndian.Uint64(input[ip-3:])
+ cur_hash = hashBytesAtOffset(input_bytes, 3, shift, min_match)
+ prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 3)
+ prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 2)
+ prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 1)
+ } else {
+ input_bytes = binary.LittleEndian.Uint64(input[ip-5:])
+ prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 5)
+ prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 4)
+ prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 3)
+ input_bytes = binary.LittleEndian.Uint64(input[ip-2:])
+ cur_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
+ prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 2)
+ prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 1)
+ }
+
+ candidate = base_ip + table[cur_hash]
+ table[cur_hash] = int(ip - base_ip)
+ }
+ }
+
+ for ip-candidate <= maxDistance_compress_fragment && isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) {
+ var base int = ip
+ /* We have a 6-byte match at ip, and no need to emit any
+ literal bytes prior to ip. */
+
+ var matched uint = min_match + findMatchLengthWithLimit(base_ip_ptr[uint(candidate-base_ip)+min_match:], input[uint(ip)+min_match:], uint(ip_end-ip)-min_match)
+ ip += int(matched)
+ last_distance = int(base - candidate) /* > 0 */
+ emitCopyLen(matched, commands)
+ emitDistance(uint32(last_distance), commands)
+
+ next_emit = ip
+ if ip >= ip_limit {
+ goto emit_remainder
+ }
+ {
+ var input_bytes uint64
+ var cur_hash uint32
+ /* We could immediately start working at ip now, but to improve
+ compression we first update "table" with the hashes of some
+ positions within the last copy. */
+
+ var prev_hash uint32
+ if min_match == 4 {
+ input_bytes = binary.LittleEndian.Uint64(input[ip-3:])
+ cur_hash = hashBytesAtOffset(input_bytes, 3, shift, min_match)
+ prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 3)
+ prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 2)
+ prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 1)
+ } else {
+ input_bytes = binary.LittleEndian.Uint64(input[ip-5:])
+ prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 5)
+ prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 4)
+ prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 3)
+ input_bytes = binary.LittleEndian.Uint64(input[ip-2:])
+ cur_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
+ prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 2)
+ prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
+ table[prev_hash] = int(ip - base_ip - 1)
+ }
+
+ candidate = base_ip + table[cur_hash]
+ table[cur_hash] = int(ip - base_ip)
+ }
+ }
+
+ ip++
+ next_hash = hash1(input[ip:], shift, min_match)
+ }
+ }
+
+emit_remainder:
+ assert(next_emit <= ip_end)
+
+ /* Emit the remaining bytes as literals. */
+ if next_emit < ip_end {
+ var insert uint32 = uint32(ip_end - next_emit)
+ emitInsertLen(insert, commands)
+ copy(*literals, input[next_emit:][:insert])
+ *literals = (*literals)[insert:]
+ }
+}
+
+var storeCommands_kNumExtraBits = [128]uint32{
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 2,
+ 2,
+ 3,
+ 3,
+ 4,
+ 4,
+ 5,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 12,
+ 14,
+ 24,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 2,
+ 2,
+ 3,
+ 3,
+ 4,
+ 4,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 2,
+ 2,
+ 3,
+ 3,
+ 4,
+ 4,
+ 5,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 24,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 2,
+ 2,
+ 3,
+ 3,
+ 4,
+ 4,
+ 5,
+ 5,
+ 6,
+ 6,
+ 7,
+ 7,
+ 8,
+ 8,
+ 9,
+ 9,
+ 10,
+ 10,
+ 11,
+ 11,
+ 12,
+ 12,
+ 13,
+ 13,
+ 14,
+ 14,
+ 15,
+ 15,
+ 16,
+ 16,
+ 17,
+ 17,
+ 18,
+ 18,
+ 19,
+ 19,
+ 20,
+ 20,
+ 21,
+ 21,
+ 22,
+ 22,
+ 23,
+ 23,
+ 24,
+ 24,
+}
+var storeCommands_kInsertOffset = [24]uint32{
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 8,
+ 10,
+ 14,
+ 18,
+ 26,
+ 34,
+ 50,
+ 66,
+ 98,
+ 130,
+ 194,
+ 322,
+ 578,
+ 1090,
+ 2114,
+ 6210,
+ 22594,
+}
+
+func storeCommands(literals []byte, num_literals uint, commands []uint32, num_commands uint, storage_ix *uint, storage []byte) {
+ var lit_depths [256]byte
+ var lit_bits [256]uint16
+ var lit_histo = [256]uint32{0}
+ var cmd_depths = [128]byte{0}
+ var cmd_bits = [128]uint16{0}
+ var cmd_histo = [128]uint32{0}
+ var i uint
+ for i = 0; i < num_literals; i++ {
+ lit_histo[literals[i]]++
+ }
+
+ buildAndStoreHuffmanTreeFast(lit_histo[:], num_literals, /* max_bits = */
+ 8, lit_depths[:], lit_bits[:], storage_ix, storage)
+
+ for i = 0; i < num_commands; i++ {
+ var code uint32 = commands[i] & 0xFF
+ assert(code < 128)
+ cmd_histo[code]++
+ }
+
+ cmd_histo[1] += 1
+ cmd_histo[2] += 1
+ cmd_histo[64] += 1
+ cmd_histo[84] += 1
+ buildAndStoreCommandPrefixCode(cmd_histo[:], cmd_depths[:], cmd_bits[:], storage_ix, storage)
+
+ for i = 0; i < num_commands; i++ {
+ var cmd uint32 = commands[i]
+ var code uint32 = cmd & 0xFF
+ var extra uint32 = cmd >> 8
+ assert(code < 128)
+ writeBits(uint(cmd_depths[code]), uint64(cmd_bits[code]), storage_ix, storage)
+ writeBits(uint(storeCommands_kNumExtraBits[code]), uint64(extra), storage_ix, storage)
+ if code < 24 {
+ var insert uint32 = storeCommands_kInsertOffset[code] + extra
+ var j uint32
+ for j = 0; j < insert; j++ {
+ var lit byte = literals[0]
+ writeBits(uint(lit_depths[lit]), uint64(lit_bits[lit]), storage_ix, storage)
+ literals = literals[1:]
+ }
+ }
+ }
+}
+
+/* Acceptable loss for uncompressible speedup is 2% */
+const minRatio = 0.98
+
+const sampleRate = 43
+
+func shouldCompress(input []byte, input_size uint, num_literals uint) bool {
+ var corpus_size float64 = float64(input_size)
+ if float64(num_literals) < minRatio*corpus_size {
+ return true
+ } else {
+ var literal_histo = [256]uint32{0}
+ var max_total_bit_cost float64 = corpus_size * 8 * minRatio / sampleRate
+ var i uint
+ for i = 0; i < input_size; i += sampleRate {
+ literal_histo[input[i]]++
+ }
+
+ return bitsEntropy(literal_histo[:], 256) < max_total_bit_cost
+ }
+}
+
+func rewindBitPosition(new_storage_ix uint, storage_ix *uint, storage []byte) {
+ var bitpos uint = new_storage_ix & 7
+ var mask uint = (1 << bitpos) - 1
+ storage[new_storage_ix>>3] &= byte(mask)
+ *storage_ix = new_storage_ix
+}
+
+func emitUncompressedMetaBlock(input []byte, input_size uint, storage_ix *uint, storage []byte) {
+ storeMetaBlockHeader(input_size, true, storage_ix, storage)
+ *storage_ix = (*storage_ix + 7) &^ 7
+ copy(storage[*storage_ix>>3:], input[:input_size])
+ *storage_ix += input_size << 3
+ storage[*storage_ix>>3] = 0
+}
+
+func compressFragmentTwoPassImpl(input []byte, input_size uint, is_last bool, command_buf []uint32, literal_buf []byte, table []int, table_bits uint, min_match uint, storage_ix *uint, storage []byte) {
+ /* Save the start of the first block for position and distance computations.
+ */
+ var base_ip []byte = input
+
+ for input_size > 0 {
+ var block_size uint = brotli_min_size_t(input_size, kCompressFragmentTwoPassBlockSize)
+ var commands []uint32 = command_buf
+ var literals []byte = literal_buf
+ var num_literals uint
+ createCommands(input, block_size, input_size, base_ip, table, table_bits, min_match, &literals, &commands)
+ num_literals = uint(-cap(literals) + cap(literal_buf))
+ if shouldCompress(input, block_size, num_literals) {
+ var num_commands uint = uint(-cap(commands) + cap(command_buf))
+ storeMetaBlockHeader(block_size, false, storage_ix, storage)
+
+ /* No block splits, no contexts. */
+ writeBits(13, 0, storage_ix, storage)
+
+ storeCommands(literal_buf, num_literals, command_buf, num_commands, storage_ix, storage)
+ } else {
+ /* Since we did not find many backward references and the entropy of
+ the data is close to 8 bits, we can simply emit an uncompressed block.
+ This makes compression speed of uncompressible data about 3x faster. */
+ emitUncompressedMetaBlock(input, block_size, storage_ix, storage)
+ }
+
+ input = input[block_size:]
+ input_size -= block_size
+ }
+}
+
+/* Compresses "input" string to the "*storage" buffer as one or more complete
+ meta-blocks, and updates the "*storage_ix" bit position.
+
+ If "is_last" is 1, emits an additional empty last meta-block.
+
+ REQUIRES: "input_size" is greater than zero, or "is_last" is 1.
+ REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24).
+ REQUIRES: "command_buf" and "literal_buf" point to at least
+ kCompressFragmentTwoPassBlockSize long arrays.
+ REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero.
+ REQUIRES: "table_size" is a power of two
+ OUTPUT: maximal copy distance <= |input_size|
+ OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */
+func compressFragmentTwoPass(input []byte, input_size uint, is_last bool, command_buf []uint32, literal_buf []byte, table []int, table_size uint, storage_ix *uint, storage []byte) {
+ var initial_storage_ix uint = *storage_ix
+ var table_bits uint = uint(log2FloorNonZero(table_size))
+ var min_match uint
+ if table_bits <= 15 {
+ min_match = 4
+ } else {
+ min_match = 6
+ }
+ compressFragmentTwoPassImpl(input, input_size, is_last, command_buf, literal_buf, table, table_bits, min_match, storage_ix, storage)
+
+ /* If output is larger than single uncompressed block, rewrite it. */
+ if *storage_ix-initial_storage_ix > 31+(input_size<<3) {
+ rewindBitPosition(initial_storage_ix, storage_ix, storage)
+ emitUncompressedMetaBlock(input, input_size, storage_ix, storage)
+ }
+
+ if is_last {
+ writeBits(1, 1, storage_ix, storage) /* islast */
+ writeBits(1, 1, storage_ix, storage) /* isempty */
+ *storage_ix = (*storage_ix + 7) &^ 7
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/constants.go b/vendor/github.com/andybalholm/brotli/constants.go
new file mode 100644
index 0000000..a880dff
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/constants.go
@@ -0,0 +1,77 @@
+package brotli
+
+/* Copyright 2016 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Specification: 7.3. Encoding of the context map */
+const contextMapMaxRle = 16
+
+/* Specification: 2. Compressed representation overview */
+const maxNumberOfBlockTypes = 256
+
+/* Specification: 3.3. Alphabet sizes: insert-and-copy length */
+const numLiteralSymbols = 256
+
+const numCommandSymbols = 704
+
+const numBlockLenSymbols = 26
+
+const maxContextMapSymbols = (maxNumberOfBlockTypes + contextMapMaxRle)
+
+const maxBlockTypeSymbols = (maxNumberOfBlockTypes + 2)
+
+/* Specification: 3.5. Complex prefix codes */
+const repeatPreviousCodeLength = 16
+
+const repeatZeroCodeLength = 17
+
+const codeLengthCodes = (repeatZeroCodeLength + 1)
+
+/* "code length of 8 is repeated" */
+const initialRepeatedCodeLength = 8
+
+/* "Large Window Brotli" */
+const largeMaxDistanceBits = 62
+
+const largeMinWbits = 10
+
+const largeMaxWbits = 30
+
+/* Specification: 4. Encoding of distances */
+const numDistanceShortCodes = 16
+
+const maxNpostfix = 3
+
+const maxNdirect = 120
+
+const maxDistanceBits = 24
+
+func distanceAlphabetSize(NPOSTFIX uint, NDIRECT uint, MAXNBITS uint) uint {
+ return numDistanceShortCodes + NDIRECT + uint(MAXNBITS<<(NPOSTFIX+1))
+}
+
+/* numDistanceSymbols == 1128 */
+const numDistanceSymbols = 1128
+
+const maxDistance = 0x3FFFFFC
+
+const maxAllowedDistance = 0x7FFFFFFC
+
+/* 7.1. Context modes and context ID lookup for literals */
+/* "context IDs for literals are in the range of 0..63" */
+const literalContextBits = 6
+
+/* 7.2. Context ID for distances */
+const distanceContextBits = 2
+
+/* 9.1. Format of the Stream Header */
+/* Number of slack bytes for window size. Don't confuse
+ with BROTLI_NUM_DISTANCE_SHORT_CODES. */
+const windowGap = 16
+
+func maxBackwardLimit(W uint) uint {
+ return (uint(1) << W) - windowGap
+}
diff --git a/vendor/github.com/andybalholm/brotli/context.go b/vendor/github.com/andybalholm/brotli/context.go
new file mode 100644
index 0000000..884ff8a
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/context.go
@@ -0,0 +1,2176 @@
+package brotli
+
+/* Lookup table to map the previous two bytes to a context id.
+
+There are four different context modeling modes defined here:
+ contextLSB6: context id is the least significant 6 bits of the last byte,
+ contextMSB6: context id is the most significant 6 bits of the last byte,
+ contextUTF8: second-order context model tuned for UTF8-encoded text,
+ contextSigned: second-order context model tuned for signed integers.
+
+If |p1| and |p2| are the previous two bytes, and |mode| is current context
+mode, we calculate the context as:
+
+ context = ContextLut(mode)[p1] | ContextLut(mode)[p2 + 256].
+
+For contextUTF8 mode, if the previous two bytes are ASCII characters
+(i.e. < 128), this will be equivalent to
+
+ context = 4 * context1(p1) + context2(p2),
+
+where context1 is based on the previous byte in the following way:
+
+ 0 : non-ASCII control
+ 1 : \t, \n, \r
+ 2 : space
+ 3 : other punctuation
+ 4 : " '
+ 5 : %
+ 6 : ( < [ {
+ 7 : ) > ] }
+ 8 : , ; :
+ 9 : .
+ 10 : =
+ 11 : number
+ 12 : upper-case vowel
+ 13 : upper-case consonant
+ 14 : lower-case vowel
+ 15 : lower-case consonant
+
+and context2 is based on the second last byte:
+
+ 0 : control, space
+ 1 : punctuation
+ 2 : upper-case letter, number
+ 3 : lower-case letter
+
+If the last byte is ASCII, and the second last byte is not (in a valid UTF8
+stream it will be a continuation byte, value between 128 and 191), the
+context is the same as if the second last byte was an ASCII control or space.
+
+If the last byte is a UTF8 lead byte (value >= 192), then the next byte will
+be a continuation byte and the context id is 2 or 3 depending on the LSB of
+the last byte and to a lesser extent on the second last byte if it is ASCII.
+
+If the last byte is a UTF8 continuation byte, the second last byte can be:
+ - continuation byte: the next byte is probably ASCII or lead byte (assuming
+ 4-byte UTF8 characters are rare) and the context id is 0 or 1.
+ - lead byte (192 - 207): next byte is ASCII or lead byte, context is 0 or 1
+ - lead byte (208 - 255): next byte is continuation byte, context is 2 or 3
+
+The possible value combinations of the previous two bytes, the range of
+context ids and the type of the next byte is summarized in the table below:
+
+|--------\-----------------------------------------------------------------|
+| \ Last byte |
+| Second \---------------------------------------------------------------|
+| last byte \ ASCII | cont. byte | lead byte |
+| \ (0-127) | (128-191) | (192-) |
+|=============|===================|=====================|==================|
+| ASCII | next: ASCII/lead | not valid | next: cont. |
+| (0-127) | context: 4 - 63 | | context: 2 - 3 |
+|-------------|-------------------|---------------------|------------------|
+| cont. byte | next: ASCII/lead | next: ASCII/lead | next: cont. |
+| (128-191) | context: 4 - 63 | context: 0 - 1 | context: 2 - 3 |
+|-------------|-------------------|---------------------|------------------|
+| lead byte | not valid | next: ASCII/lead | not valid |
+| (192-207) | | context: 0 - 1 | |
+|-------------|-------------------|---------------------|------------------|
+| lead byte | not valid | next: cont. | not valid |
+| (208-) | | context: 2 - 3 | |
+|-------------|-------------------|---------------------|------------------|
+*/
+
+const (
+ contextLSB6 = 0
+ contextMSB6 = 1
+ contextUTF8 = 2
+ contextSigned = 3
+)
+
+/* Common context lookup table for all context modes. */
+var kContextLookup = [2048]byte{
+ /* CONTEXT_LSB6, last byte. */
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26,
+ 27,
+ 28,
+ 29,
+ 30,
+ 31,
+ 32,
+ 33,
+ 34,
+ 35,
+ 36,
+ 37,
+ 38,
+ 39,
+ 40,
+ 41,
+ 42,
+ 43,
+ 44,
+ 45,
+ 46,
+ 47,
+ 48,
+ 49,
+ 50,
+ 51,
+ 52,
+ 53,
+ 54,
+ 55,
+ 56,
+ 57,
+ 58,
+ 59,
+ 60,
+ 61,
+ 62,
+ 63,
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26,
+ 27,
+ 28,
+ 29,
+ 30,
+ 31,
+ 32,
+ 33,
+ 34,
+ 35,
+ 36,
+ 37,
+ 38,
+ 39,
+ 40,
+ 41,
+ 42,
+ 43,
+ 44,
+ 45,
+ 46,
+ 47,
+ 48,
+ 49,
+ 50,
+ 51,
+ 52,
+ 53,
+ 54,
+ 55,
+ 56,
+ 57,
+ 58,
+ 59,
+ 60,
+ 61,
+ 62,
+ 63,
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26,
+ 27,
+ 28,
+ 29,
+ 30,
+ 31,
+ 32,
+ 33,
+ 34,
+ 35,
+ 36,
+ 37,
+ 38,
+ 39,
+ 40,
+ 41,
+ 42,
+ 43,
+ 44,
+ 45,
+ 46,
+ 47,
+ 48,
+ 49,
+ 50,
+ 51,
+ 52,
+ 53,
+ 54,
+ 55,
+ 56,
+ 57,
+ 58,
+ 59,
+ 60,
+ 61,
+ 62,
+ 63,
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26,
+ 27,
+ 28,
+ 29,
+ 30,
+ 31,
+ 32,
+ 33,
+ 34,
+ 35,
+ 36,
+ 37,
+ 38,
+ 39,
+ 40,
+ 41,
+ 42,
+ 43,
+ 44,
+ 45,
+ 46,
+ 47,
+ 48,
+ 49,
+ 50,
+ 51,
+ 52,
+ 53,
+ 54,
+ 55,
+ 56,
+ 57,
+ 58,
+ 59,
+ 60,
+ 61,
+ 62,
+ 63,
+
+ /* CONTEXT_LSB6, second last byte, */
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+
+ /* CONTEXT_MSB6, last byte. */
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 1,
+ 1,
+ 2,
+ 2,
+ 2,
+ 2,
+ 3,
+ 3,
+ 3,
+ 3,
+ 4,
+ 4,
+ 4,
+ 4,
+ 5,
+ 5,
+ 5,
+ 5,
+ 6,
+ 6,
+ 6,
+ 6,
+ 7,
+ 7,
+ 7,
+ 7,
+ 8,
+ 8,
+ 8,
+ 8,
+ 9,
+ 9,
+ 9,
+ 9,
+ 10,
+ 10,
+ 10,
+ 10,
+ 11,
+ 11,
+ 11,
+ 11,
+ 12,
+ 12,
+ 12,
+ 12,
+ 13,
+ 13,
+ 13,
+ 13,
+ 14,
+ 14,
+ 14,
+ 14,
+ 15,
+ 15,
+ 15,
+ 15,
+ 16,
+ 16,
+ 16,
+ 16,
+ 17,
+ 17,
+ 17,
+ 17,
+ 18,
+ 18,
+ 18,
+ 18,
+ 19,
+ 19,
+ 19,
+ 19,
+ 20,
+ 20,
+ 20,
+ 20,
+ 21,
+ 21,
+ 21,
+ 21,
+ 22,
+ 22,
+ 22,
+ 22,
+ 23,
+ 23,
+ 23,
+ 23,
+ 24,
+ 24,
+ 24,
+ 24,
+ 25,
+ 25,
+ 25,
+ 25,
+ 26,
+ 26,
+ 26,
+ 26,
+ 27,
+ 27,
+ 27,
+ 27,
+ 28,
+ 28,
+ 28,
+ 28,
+ 29,
+ 29,
+ 29,
+ 29,
+ 30,
+ 30,
+ 30,
+ 30,
+ 31,
+ 31,
+ 31,
+ 31,
+ 32,
+ 32,
+ 32,
+ 32,
+ 33,
+ 33,
+ 33,
+ 33,
+ 34,
+ 34,
+ 34,
+ 34,
+ 35,
+ 35,
+ 35,
+ 35,
+ 36,
+ 36,
+ 36,
+ 36,
+ 37,
+ 37,
+ 37,
+ 37,
+ 38,
+ 38,
+ 38,
+ 38,
+ 39,
+ 39,
+ 39,
+ 39,
+ 40,
+ 40,
+ 40,
+ 40,
+ 41,
+ 41,
+ 41,
+ 41,
+ 42,
+ 42,
+ 42,
+ 42,
+ 43,
+ 43,
+ 43,
+ 43,
+ 44,
+ 44,
+ 44,
+ 44,
+ 45,
+ 45,
+ 45,
+ 45,
+ 46,
+ 46,
+ 46,
+ 46,
+ 47,
+ 47,
+ 47,
+ 47,
+ 48,
+ 48,
+ 48,
+ 48,
+ 49,
+ 49,
+ 49,
+ 49,
+ 50,
+ 50,
+ 50,
+ 50,
+ 51,
+ 51,
+ 51,
+ 51,
+ 52,
+ 52,
+ 52,
+ 52,
+ 53,
+ 53,
+ 53,
+ 53,
+ 54,
+ 54,
+ 54,
+ 54,
+ 55,
+ 55,
+ 55,
+ 55,
+ 56,
+ 56,
+ 56,
+ 56,
+ 57,
+ 57,
+ 57,
+ 57,
+ 58,
+ 58,
+ 58,
+ 58,
+ 59,
+ 59,
+ 59,
+ 59,
+ 60,
+ 60,
+ 60,
+ 60,
+ 61,
+ 61,
+ 61,
+ 61,
+ 62,
+ 62,
+ 62,
+ 62,
+ 63,
+ 63,
+ 63,
+ 63,
+
+ /* CONTEXT_MSB6, second last byte, */
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+
+ /* CONTEXT_UTF8, last byte. */
+ /* ASCII range. */
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4,
+ 4,
+ 0,
+ 0,
+ 4,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8,
+ 12,
+ 16,
+ 12,
+ 12,
+ 20,
+ 12,
+ 16,
+ 24,
+ 28,
+ 12,
+ 12,
+ 32,
+ 12,
+ 36,
+ 12,
+ 44,
+ 44,
+ 44,
+ 44,
+ 44,
+ 44,
+ 44,
+ 44,
+ 44,
+ 44,
+ 32,
+ 32,
+ 24,
+ 40,
+ 28,
+ 12,
+ 12,
+ 48,
+ 52,
+ 52,
+ 52,
+ 48,
+ 52,
+ 52,
+ 52,
+ 48,
+ 52,
+ 52,
+ 52,
+ 52,
+ 52,
+ 48,
+ 52,
+ 52,
+ 52,
+ 52,
+ 52,
+ 48,
+ 52,
+ 52,
+ 52,
+ 52,
+ 52,
+ 24,
+ 12,
+ 28,
+ 12,
+ 12,
+ 12,
+ 56,
+ 60,
+ 60,
+ 60,
+ 56,
+ 60,
+ 60,
+ 60,
+ 56,
+ 60,
+ 60,
+ 60,
+ 60,
+ 60,
+ 56,
+ 60,
+ 60,
+ 60,
+ 60,
+ 60,
+ 56,
+ 60,
+ 60,
+ 60,
+ 60,
+ 60,
+ 24,
+ 12,
+ 28,
+ 12,
+ 0,
+
+ /* UTF8 continuation byte range. */
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+
+ /* UTF8 lead byte range. */
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+ 2,
+ 3,
+
+ /* CONTEXT_UTF8 second last byte. */
+ /* ASCII range. */
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 1,
+ 1,
+ 1,
+ 1,
+ 0,
+
+ /* UTF8 continuation byte range. */
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+
+ /* UTF8 lead byte range. */
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+
+ /* CONTEXT_SIGNED, last byte, same as the above values shifted by 3 bits. */
+ 0,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 8,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 16,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 32,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 40,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 48,
+ 56,
+
+ /* CONTEXT_SIGNED, second last byte. */
+ 0,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 2,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 3,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 5,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 7,
+}
+
+type contextLUT []byte
+
+func getContextLUT(mode int) contextLUT {
+ return kContextLookup[mode<<9:]
+}
+
+func getContext(p1 byte, p2 byte, lut contextLUT) byte {
+ return lut[p1] | lut[256+int(p2)]
+}
diff --git a/vendor/github.com/andybalholm/brotli/decode.go b/vendor/github.com/andybalholm/brotli/decode.go
new file mode 100644
index 0000000..9d9513b
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/decode.go
@@ -0,0 +1,2581 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+const (
+ decoderResultError = 0
+ decoderResultSuccess = 1
+ decoderResultNeedsMoreInput = 2
+ decoderResultNeedsMoreOutput = 3
+)
+
+/**
+ * Error code for detailed logging / production debugging.
+ *
+ * See ::BrotliDecoderGetErrorCode and ::BROTLI_LAST_ERROR_CODE.
+ */
+const (
+ decoderNoError = 0
+ decoderSuccess = 1
+ decoderNeedsMoreInput = 2
+ decoderNeedsMoreOutput = 3
+ decoderErrorFormatExuberantNibble = -1
+ decoderErrorFormatReserved = -2
+ decoderErrorFormatExuberantMetaNibble = -3
+ decoderErrorFormatSimpleHuffmanAlphabet = -4
+ decoderErrorFormatSimpleHuffmanSame = -5
+ decoderErrorFormatClSpace = -6
+ decoderErrorFormatHuffmanSpace = -7
+ decoderErrorFormatContextMapRepeat = -8
+ decoderErrorFormatBlockLength1 = -9
+ decoderErrorFormatBlockLength2 = -10
+ decoderErrorFormatTransform = -11
+ decoderErrorFormatDictionary = -12
+ decoderErrorFormatWindowBits = -13
+ decoderErrorFormatPadding1 = -14
+ decoderErrorFormatPadding2 = -15
+ decoderErrorFormatDistance = -16
+ decoderErrorDictionaryNotSet = -19
+ decoderErrorInvalidArguments = -20
+ decoderErrorAllocContextModes = -21
+ decoderErrorAllocTreeGroups = -22
+ decoderErrorAllocContextMap = -25
+ decoderErrorAllocRingBuffer1 = -26
+ decoderErrorAllocRingBuffer2 = -27
+ decoderErrorAllocBlockTypeTrees = -30
+ decoderErrorUnreachable = -31
+)
+
+const huffmanTableBits = 8
+
+const huffmanTableMask = 0xFF
+
+/* We need the slack region for the following reasons:
+ - doing up to two 16-byte copies for fast backward copying
+ - inserting transformed dictionary word (5 prefix + 24 base + 8 suffix) */
+const kRingBufferWriteAheadSlack uint32 = 42
+
+var kCodeLengthCodeOrder = [codeLengthCodes]byte{1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15}
+
+/* Static prefix code for the complex code length code lengths. */
+var kCodeLengthPrefixLength = [16]byte{2, 2, 2, 3, 2, 2, 2, 4, 2, 2, 2, 3, 2, 2, 2, 4}
+
+var kCodeLengthPrefixValue = [16]byte{0, 4, 3, 2, 0, 4, 3, 1, 0, 4, 3, 2, 0, 4, 3, 5}
+
+/* Saves error code and converts it to BrotliDecoderResult. */
+func saveErrorCode(s *Reader, e int) int {
+ s.error_code = int(e)
+ switch e {
+ case decoderSuccess:
+ return decoderResultSuccess
+
+ case decoderNeedsMoreInput:
+ return decoderResultNeedsMoreInput
+
+ case decoderNeedsMoreOutput:
+ return decoderResultNeedsMoreOutput
+
+ default:
+ return decoderResultError
+ }
+}
+
+/* Decodes WBITS by reading 1 - 7 bits, or 0x11 for "Large Window Brotli".
+ Precondition: bit-reader accumulator has at least 8 bits. */
+func decodeWindowBits(s *Reader, br *bitReader) int {
+ var n uint32
+ var large_window bool = s.large_window
+ s.large_window = false
+ takeBits(br, 1, &n)
+ if n == 0 {
+ s.window_bits = 16
+ return decoderSuccess
+ }
+
+ takeBits(br, 3, &n)
+ if n != 0 {
+ s.window_bits = 17 + n
+ return decoderSuccess
+ }
+
+ takeBits(br, 3, &n)
+ if n == 1 {
+ if large_window {
+ takeBits(br, 1, &n)
+ if n == 1 {
+ return decoderErrorFormatWindowBits
+ }
+
+ s.large_window = true
+ return decoderSuccess
+ } else {
+ return decoderErrorFormatWindowBits
+ }
+ }
+
+ if n != 0 {
+ s.window_bits = 8 + n
+ return decoderSuccess
+ }
+
+ s.window_bits = 17
+ return decoderSuccess
+}
+
+/* Decodes a number in the range [0..255], by reading 1 - 11 bits. */
+func decodeVarLenUint8(s *Reader, br *bitReader, value *uint32) int {
+ var bits uint32
+ switch s.substate_decode_uint8 {
+ case stateDecodeUint8None:
+ if !safeReadBits(br, 1, &bits) {
+ return decoderNeedsMoreInput
+ }
+
+ if bits == 0 {
+ *value = 0
+ return decoderSuccess
+ }
+ fallthrough
+
+ /* Fall through. */
+ case stateDecodeUint8Short:
+ if !safeReadBits(br, 3, &bits) {
+ s.substate_decode_uint8 = stateDecodeUint8Short
+ return decoderNeedsMoreInput
+ }
+
+ if bits == 0 {
+ *value = 1
+ s.substate_decode_uint8 = stateDecodeUint8None
+ return decoderSuccess
+ }
+
+ /* Use output value as a temporary storage. It MUST be persisted. */
+ *value = bits
+ fallthrough
+
+ /* Fall through. */
+ case stateDecodeUint8Long:
+ if !safeReadBits(br, *value, &bits) {
+ s.substate_decode_uint8 = stateDecodeUint8Long
+ return decoderNeedsMoreInput
+ }
+
+ *value = (1 << *value) + bits
+ s.substate_decode_uint8 = stateDecodeUint8None
+ return decoderSuccess
+
+ default:
+ return decoderErrorUnreachable
+ }
+}
+
+/* Decodes a metablock length and flags by reading 2 - 31 bits. */
+func decodeMetaBlockLength(s *Reader, br *bitReader) int {
+ var bits uint32
+ var i int
+ for {
+ switch s.substate_metablock_header {
+ case stateMetablockHeaderNone:
+ if !safeReadBits(br, 1, &bits) {
+ return decoderNeedsMoreInput
+ }
+
+ if bits != 0 {
+ s.is_last_metablock = 1
+ } else {
+ s.is_last_metablock = 0
+ }
+ s.meta_block_remaining_len = 0
+ s.is_uncompressed = 0
+ s.is_metadata = 0
+ if s.is_last_metablock == 0 {
+ s.substate_metablock_header = stateMetablockHeaderNibbles
+ break
+ }
+
+ s.substate_metablock_header = stateMetablockHeaderEmpty
+ fallthrough
+
+ /* Fall through. */
+ case stateMetablockHeaderEmpty:
+ if !safeReadBits(br, 1, &bits) {
+ return decoderNeedsMoreInput
+ }
+
+ if bits != 0 {
+ s.substate_metablock_header = stateMetablockHeaderNone
+ return decoderSuccess
+ }
+
+ s.substate_metablock_header = stateMetablockHeaderNibbles
+ fallthrough
+
+ /* Fall through. */
+ case stateMetablockHeaderNibbles:
+ if !safeReadBits(br, 2, &bits) {
+ return decoderNeedsMoreInput
+ }
+
+ s.size_nibbles = uint(byte(bits + 4))
+ s.loop_counter = 0
+ if bits == 3 {
+ s.is_metadata = 1
+ s.substate_metablock_header = stateMetablockHeaderReserved
+ break
+ }
+
+ s.substate_metablock_header = stateMetablockHeaderSize
+ fallthrough
+
+ /* Fall through. */
+ case stateMetablockHeaderSize:
+ i = s.loop_counter
+
+ for ; i < int(s.size_nibbles); i++ {
+ if !safeReadBits(br, 4, &bits) {
+ s.loop_counter = i
+ return decoderNeedsMoreInput
+ }
+
+ if uint(i+1) == s.size_nibbles && s.size_nibbles > 4 && bits == 0 {
+ return decoderErrorFormatExuberantNibble
+ }
+
+ s.meta_block_remaining_len |= int(bits << uint(i*4))
+ }
+
+ s.substate_metablock_header = stateMetablockHeaderUncompressed
+ fallthrough
+
+ /* Fall through. */
+ case stateMetablockHeaderUncompressed:
+ if s.is_last_metablock == 0 {
+ if !safeReadBits(br, 1, &bits) {
+ return decoderNeedsMoreInput
+ }
+
+ if bits != 0 {
+ s.is_uncompressed = 1
+ } else {
+ s.is_uncompressed = 0
+ }
+ }
+
+ s.meta_block_remaining_len++
+ s.substate_metablock_header = stateMetablockHeaderNone
+ return decoderSuccess
+
+ case stateMetablockHeaderReserved:
+ if !safeReadBits(br, 1, &bits) {
+ return decoderNeedsMoreInput
+ }
+
+ if bits != 0 {
+ return decoderErrorFormatReserved
+ }
+
+ s.substate_metablock_header = stateMetablockHeaderBytes
+ fallthrough
+
+ /* Fall through. */
+ case stateMetablockHeaderBytes:
+ if !safeReadBits(br, 2, &bits) {
+ return decoderNeedsMoreInput
+ }
+
+ if bits == 0 {
+ s.substate_metablock_header = stateMetablockHeaderNone
+ return decoderSuccess
+ }
+
+ s.size_nibbles = uint(byte(bits))
+ s.substate_metablock_header = stateMetablockHeaderMetadata
+ fallthrough
+
+ /* Fall through. */
+ case stateMetablockHeaderMetadata:
+ i = s.loop_counter
+
+ for ; i < int(s.size_nibbles); i++ {
+ if !safeReadBits(br, 8, &bits) {
+ s.loop_counter = i
+ return decoderNeedsMoreInput
+ }
+
+ if uint(i+1) == s.size_nibbles && s.size_nibbles > 1 && bits == 0 {
+ return decoderErrorFormatExuberantMetaNibble
+ }
+
+ s.meta_block_remaining_len |= int(bits << uint(i*8))
+ }
+
+ s.meta_block_remaining_len++
+ s.substate_metablock_header = stateMetablockHeaderNone
+ return decoderSuccess
+
+ default:
+ return decoderErrorUnreachable
+ }
+ }
+}
+
+/* Decodes the Huffman code.
+ This method doesn't read data from the bit reader, BUT drops the amount of
+ bits that correspond to the decoded symbol.
+ bits MUST contain at least 15 (BROTLI_HUFFMAN_MAX_CODE_LENGTH) valid bits. */
+func decodeSymbol(bits uint32, table []huffmanCode, br *bitReader) uint32 {
+ table = table[bits&huffmanTableMask:]
+ if table[0].bits > huffmanTableBits {
+ var nbits uint32 = uint32(table[0].bits) - huffmanTableBits
+ dropBits(br, huffmanTableBits)
+ table = table[uint32(table[0].value)+((bits>>huffmanTableBits)&bitMask(nbits)):]
+ }
+
+ dropBits(br, uint32(table[0].bits))
+ return uint32(table[0].value)
+}
+
+/* Reads and decodes the next Huffman code from bit-stream.
+ This method peeks 16 bits of input and drops 0 - 15 of them. */
+func readSymbol(table []huffmanCode, br *bitReader) uint32 {
+ return decodeSymbol(get16BitsUnmasked(br), table, br)
+}
+
+/* Same as DecodeSymbol, but it is known that there is less than 15 bits of
+ input are currently available. */
+func safeDecodeSymbol(table []huffmanCode, br *bitReader, result *uint32) bool {
+ var val uint32
+ var available_bits uint32 = getAvailableBits(br)
+ if available_bits == 0 {
+ if table[0].bits == 0 {
+ *result = uint32(table[0].value)
+ return true
+ }
+
+ return false /* No valid bits at all. */
+ }
+
+ val = uint32(getBitsUnmasked(br))
+ table = table[val&huffmanTableMask:]
+ if table[0].bits <= huffmanTableBits {
+ if uint32(table[0].bits) <= available_bits {
+ dropBits(br, uint32(table[0].bits))
+ *result = uint32(table[0].value)
+ return true
+ } else {
+ return false /* Not enough bits for the first level. */
+ }
+ }
+
+ if available_bits <= huffmanTableBits {
+ return false /* Not enough bits to move to the second level. */
+ }
+
+ /* Speculatively drop HUFFMAN_TABLE_BITS. */
+ val = (val & bitMask(uint32(table[0].bits))) >> huffmanTableBits
+
+ available_bits -= huffmanTableBits
+ table = table[uint32(table[0].value)+val:]
+ if available_bits < uint32(table[0].bits) {
+ return false /* Not enough bits for the second level. */
+ }
+
+ dropBits(br, huffmanTableBits+uint32(table[0].bits))
+ *result = uint32(table[0].value)
+ return true
+}
+
+func safeReadSymbol(table []huffmanCode, br *bitReader, result *uint32) bool {
+ var val uint32
+ if safeGetBits(br, 15, &val) {
+ *result = decodeSymbol(val, table, br)
+ return true
+ }
+
+ return safeDecodeSymbol(table, br, result)
+}
+
+/* Makes a look-up in first level Huffman table. Peeks 8 bits. */
+func preloadSymbol(safe int, table []huffmanCode, br *bitReader, bits *uint32, value *uint32) {
+ if safe != 0 {
+ return
+ }
+
+ table = table[getBits(br, huffmanTableBits):]
+ *bits = uint32(table[0].bits)
+ *value = uint32(table[0].value)
+}
+
+/* Decodes the next Huffman code using data prepared by PreloadSymbol.
+ Reads 0 - 15 bits. Also peeks 8 following bits. */
+func readPreloadedSymbol(table []huffmanCode, br *bitReader, bits *uint32, value *uint32) uint32 {
+ var result uint32 = *value
+ var ext []huffmanCode
+ if *bits > huffmanTableBits {
+ var val uint32 = get16BitsUnmasked(br)
+ ext = table[val&huffmanTableMask:][*value:]
+ var mask uint32 = bitMask((*bits - huffmanTableBits))
+ dropBits(br, huffmanTableBits)
+ ext = ext[(val>>huffmanTableBits)&mask:]
+ dropBits(br, uint32(ext[0].bits))
+ result = uint32(ext[0].value)
+ } else {
+ dropBits(br, *bits)
+ }
+
+ preloadSymbol(0, table, br, bits, value)
+ return result
+}
+
+func log2Floor(x uint32) uint32 {
+ var result uint32 = 0
+ for x != 0 {
+ x >>= 1
+ result++
+ }
+
+ return result
+}
+
+/* Reads (s->symbol + 1) symbols.
+ Totally 1..4 symbols are read, 1..11 bits each.
+ The list of symbols MUST NOT contain duplicates. */
+func readSimpleHuffmanSymbols(alphabet_size uint32, max_symbol uint32, s *Reader) int {
+ var br *bitReader = &s.br
+ var max_bits uint32 = log2Floor(alphabet_size - 1)
+ var i uint32 = s.sub_loop_counter
+ /* max_bits == 1..11; symbol == 0..3; 1..44 bits will be read. */
+
+ var num_symbols uint32 = s.symbol
+ for i <= num_symbols {
+ var v uint32
+ if !safeReadBits(br, max_bits, &v) {
+ s.sub_loop_counter = i
+ s.substate_huffman = stateHuffmanSimpleRead
+ return decoderNeedsMoreInput
+ }
+
+ if v >= max_symbol {
+ return decoderErrorFormatSimpleHuffmanAlphabet
+ }
+
+ s.symbols_lists_array[i] = uint16(v)
+ i++
+ }
+
+ for i = 0; i < num_symbols; i++ {
+ var k uint32 = i + 1
+ for ; k <= num_symbols; k++ {
+ if s.symbols_lists_array[i] == s.symbols_lists_array[k] {
+ return decoderErrorFormatSimpleHuffmanSame
+ }
+ }
+ }
+
+ return decoderSuccess
+}
+
+/* Process single decoded symbol code length:
+ A) reset the repeat variable
+ B) remember code length (if it is not 0)
+ C) extend corresponding index-chain
+ D) reduce the Huffman space
+ E) update the histogram */
+func processSingleCodeLength(code_len uint32, symbol *uint32, repeat *uint32, space *uint32, prev_code_len *uint32, symbol_lists symbolList, code_length_histo []uint16, next_symbol []int) {
+ *repeat = 0
+ if code_len != 0 { /* code_len == 1..15 */
+ symbolListPut(symbol_lists, next_symbol[code_len], uint16(*symbol))
+ next_symbol[code_len] = int(*symbol)
+ *prev_code_len = code_len
+ *space -= 32768 >> code_len
+ code_length_histo[code_len]++
+ }
+
+ (*symbol)++
+}
+
+/* Process repeated symbol code length.
+ A) Check if it is the extension of previous repeat sequence; if the decoded
+ value is not BROTLI_REPEAT_PREVIOUS_CODE_LENGTH, then it is a new
+ symbol-skip
+ B) Update repeat variable
+ C) Check if operation is feasible (fits alphabet)
+ D) For each symbol do the same operations as in ProcessSingleCodeLength
+
+ PRECONDITION: code_len == BROTLI_REPEAT_PREVIOUS_CODE_LENGTH or
+ code_len == BROTLI_REPEAT_ZERO_CODE_LENGTH */
+func processRepeatedCodeLength(code_len uint32, repeat_delta uint32, alphabet_size uint32, symbol *uint32, repeat *uint32, space *uint32, prev_code_len *uint32, repeat_code_len *uint32, symbol_lists symbolList, code_length_histo []uint16, next_symbol []int) {
+ var old_repeat uint32 /* for BROTLI_REPEAT_ZERO_CODE_LENGTH */ /* for BROTLI_REPEAT_ZERO_CODE_LENGTH */
+ var extra_bits uint32 = 3
+ var new_len uint32 = 0
+ if code_len == repeatPreviousCodeLength {
+ new_len = *prev_code_len
+ extra_bits = 2
+ }
+
+ if *repeat_code_len != new_len {
+ *repeat = 0
+ *repeat_code_len = new_len
+ }
+
+ old_repeat = *repeat
+ if *repeat > 0 {
+ *repeat -= 2
+ *repeat <<= extra_bits
+ }
+
+ *repeat += repeat_delta + 3
+ repeat_delta = *repeat - old_repeat
+ if *symbol+repeat_delta > alphabet_size {
+ *symbol = alphabet_size
+ *space = 0xFFFFF
+ return
+ }
+
+ if *repeat_code_len != 0 {
+ var last uint = uint(*symbol + repeat_delta)
+ var next int = next_symbol[*repeat_code_len]
+ for {
+ symbolListPut(symbol_lists, next, uint16(*symbol))
+ next = int(*symbol)
+ (*symbol)++
+ if (*symbol) == uint32(last) {
+ break
+ }
+ }
+
+ next_symbol[*repeat_code_len] = next
+ *space -= repeat_delta << (15 - *repeat_code_len)
+ code_length_histo[*repeat_code_len] = uint16(uint32(code_length_histo[*repeat_code_len]) + repeat_delta)
+ } else {
+ *symbol += repeat_delta
+ }
+}
+
+/* Reads and decodes symbol codelengths. */
+func readSymbolCodeLengths(alphabet_size uint32, s *Reader) int {
+ var br *bitReader = &s.br
+ var symbol uint32 = s.symbol
+ var repeat uint32 = s.repeat
+ var space uint32 = s.space
+ var prev_code_len uint32 = s.prev_code_len
+ var repeat_code_len uint32 = s.repeat_code_len
+ var symbol_lists symbolList = s.symbol_lists
+ var code_length_histo []uint16 = s.code_length_histo[:]
+ var next_symbol []int = s.next_symbol[:]
+ if !warmupBitReader(br) {
+ return decoderNeedsMoreInput
+ }
+ var p []huffmanCode
+ for symbol < alphabet_size && space > 0 {
+ p = s.table[:]
+ var code_len uint32
+ if !checkInputAmount(br, shortFillBitWindowRead) {
+ s.symbol = symbol
+ s.repeat = repeat
+ s.prev_code_len = prev_code_len
+ s.repeat_code_len = repeat_code_len
+ s.space = space
+ return decoderNeedsMoreInput
+ }
+
+ fillBitWindow16(br)
+ p = p[getBitsUnmasked(br)&uint64(bitMask(huffmanMaxCodeLengthCodeLength)):]
+ dropBits(br, uint32(p[0].bits)) /* Use 1..5 bits. */
+ code_len = uint32(p[0].value) /* code_len == 0..17 */
+ if code_len < repeatPreviousCodeLength {
+ processSingleCodeLength(code_len, &symbol, &repeat, &space, &prev_code_len, symbol_lists, code_length_histo, next_symbol) /* code_len == 16..17, extra_bits == 2..3 */
+ } else {
+ var extra_bits uint32
+ if code_len == repeatPreviousCodeLength {
+ extra_bits = 2
+ } else {
+ extra_bits = 3
+ }
+ var repeat_delta uint32 = uint32(getBitsUnmasked(br)) & bitMask(extra_bits)
+ dropBits(br, extra_bits)
+ processRepeatedCodeLength(code_len, repeat_delta, alphabet_size, &symbol, &repeat, &space, &prev_code_len, &repeat_code_len, symbol_lists, code_length_histo, next_symbol)
+ }
+ }
+
+ s.space = space
+ return decoderSuccess
+}
+
+func safeReadSymbolCodeLengths(alphabet_size uint32, s *Reader) int {
+ var br *bitReader = &s.br
+ var get_byte bool = false
+ var p []huffmanCode
+ for s.symbol < alphabet_size && s.space > 0 {
+ p = s.table[:]
+ var code_len uint32
+ var available_bits uint32
+ var bits uint32 = 0
+ if get_byte && !pullByte(br) {
+ return decoderNeedsMoreInput
+ }
+ get_byte = false
+ available_bits = getAvailableBits(br)
+ if available_bits != 0 {
+ bits = uint32(getBitsUnmasked(br))
+ }
+
+ p = p[bits&bitMask(huffmanMaxCodeLengthCodeLength):]
+ if uint32(p[0].bits) > available_bits {
+ get_byte = true
+ continue
+ }
+
+ code_len = uint32(p[0].value) /* code_len == 0..17 */
+ if code_len < repeatPreviousCodeLength {
+ dropBits(br, uint32(p[0].bits))
+ processSingleCodeLength(code_len, &s.symbol, &s.repeat, &s.space, &s.prev_code_len, s.symbol_lists, s.code_length_histo[:], s.next_symbol[:]) /* code_len == 16..17, extra_bits == 2..3 */
+ } else {
+ var extra_bits uint32 = code_len - 14
+ var repeat_delta uint32 = (bits >> p[0].bits) & bitMask(extra_bits)
+ if available_bits < uint32(p[0].bits)+extra_bits {
+ get_byte = true
+ continue
+ }
+
+ dropBits(br, uint32(p[0].bits)+extra_bits)
+ processRepeatedCodeLength(code_len, repeat_delta, alphabet_size, &s.symbol, &s.repeat, &s.space, &s.prev_code_len, &s.repeat_code_len, s.symbol_lists, s.code_length_histo[:], s.next_symbol[:])
+ }
+ }
+
+ return decoderSuccess
+}
+
+/* Reads and decodes 15..18 codes using static prefix code.
+ Each code is 2..4 bits long. In total 30..72 bits are used. */
+func readCodeLengthCodeLengths(s *Reader) int {
+ var br *bitReader = &s.br
+ var num_codes uint32 = s.repeat
+ var space uint32 = s.space
+ var i uint32 = s.sub_loop_counter
+ for ; i < codeLengthCodes; i++ {
+ var code_len_idx byte = kCodeLengthCodeOrder[i]
+ var ix uint32
+ var v uint32
+ if !safeGetBits(br, 4, &ix) {
+ var available_bits uint32 = getAvailableBits(br)
+ if available_bits != 0 {
+ ix = uint32(getBitsUnmasked(br) & 0xF)
+ } else {
+ ix = 0
+ }
+
+ if uint32(kCodeLengthPrefixLength[ix]) > available_bits {
+ s.sub_loop_counter = i
+ s.repeat = num_codes
+ s.space = space
+ s.substate_huffman = stateHuffmanComplex
+ return decoderNeedsMoreInput
+ }
+ }
+
+ v = uint32(kCodeLengthPrefixValue[ix])
+ dropBits(br, uint32(kCodeLengthPrefixLength[ix]))
+ s.code_length_code_lengths[code_len_idx] = byte(v)
+ if v != 0 {
+ space = space - (32 >> v)
+ num_codes++
+ s.code_length_histo[v]++
+ if space-1 >= 32 {
+ /* space is 0 or wrapped around. */
+ break
+ }
+ }
+ }
+
+ if num_codes != 1 && space != 0 {
+ return decoderErrorFormatClSpace
+ }
+
+ return decoderSuccess
+}
+
+/* Decodes the Huffman tables.
+ There are 2 scenarios:
+ A) Huffman code contains only few symbols (1..4). Those symbols are read
+ directly; their code lengths are defined by the number of symbols.
+ For this scenario 4 - 49 bits will be read.
+
+ B) 2-phase decoding:
+ B.1) Small Huffman table is decoded; it is specified with code lengths
+ encoded with predefined entropy code. 32 - 74 bits are used.
+ B.2) Decoded table is used to decode code lengths of symbols in resulting
+ Huffman table. In worst case 3520 bits are read. */
+func readHuffmanCode(alphabet_size uint32, max_symbol uint32, table []huffmanCode, opt_table_size *uint32, s *Reader) int {
+ var br *bitReader = &s.br
+
+ /* Unnecessary masking, but might be good for safety. */
+ alphabet_size &= 0x7FF
+
+ /* State machine. */
+ for {
+ switch s.substate_huffman {
+ case stateHuffmanNone:
+ if !safeReadBits(br, 2, &s.sub_loop_counter) {
+ return decoderNeedsMoreInput
+ }
+
+ /* The value is used as follows:
+ 1 for simple code;
+ 0 for no skipping, 2 skips 2 code lengths, 3 skips 3 code lengths */
+ if s.sub_loop_counter != 1 {
+ s.space = 32
+ s.repeat = 0 /* num_codes */
+ var i int
+ for i = 0; i <= huffmanMaxCodeLengthCodeLength; i++ {
+ s.code_length_histo[i] = 0
+ }
+
+ for i = 0; i < codeLengthCodes; i++ {
+ s.code_length_code_lengths[i] = 0
+ }
+
+ s.substate_huffman = stateHuffmanComplex
+ continue
+ }
+ fallthrough
+
+ /* Read symbols, codes & code lengths directly. */
+ case stateHuffmanSimpleSize:
+ if !safeReadBits(br, 2, &s.symbol) { /* num_symbols */
+ s.substate_huffman = stateHuffmanSimpleSize
+ return decoderNeedsMoreInput
+ }
+
+ s.sub_loop_counter = 0
+ fallthrough
+
+ case stateHuffmanSimpleRead:
+ {
+ var result int = readSimpleHuffmanSymbols(alphabet_size, max_symbol, s)
+ if result != decoderSuccess {
+ return result
+ }
+ }
+ fallthrough
+
+ case stateHuffmanSimpleBuild:
+ var table_size uint32
+ if s.symbol == 3 {
+ var bits uint32
+ if !safeReadBits(br, 1, &bits) {
+ s.substate_huffman = stateHuffmanSimpleBuild
+ return decoderNeedsMoreInput
+ }
+
+ s.symbol += bits
+ }
+
+ table_size = buildSimpleHuffmanTable(table, huffmanTableBits, s.symbols_lists_array[:], s.symbol)
+ if opt_table_size != nil {
+ *opt_table_size = table_size
+ }
+
+ s.substate_huffman = stateHuffmanNone
+ return decoderSuccess
+
+ /* Decode Huffman-coded code lengths. */
+ case stateHuffmanComplex:
+ {
+ var i uint32
+ var result int = readCodeLengthCodeLengths(s)
+ if result != decoderSuccess {
+ return result
+ }
+
+ buildCodeLengthsHuffmanTable(s.table[:], s.code_length_code_lengths[:], s.code_length_histo[:])
+ for i = 0; i < 16; i++ {
+ s.code_length_histo[i] = 0
+ }
+
+ for i = 0; i <= huffmanMaxCodeLength; i++ {
+ s.next_symbol[i] = int(i) - (huffmanMaxCodeLength + 1)
+ symbolListPut(s.symbol_lists, s.next_symbol[i], 0xFFFF)
+ }
+
+ s.symbol = 0
+ s.prev_code_len = initialRepeatedCodeLength
+ s.repeat = 0
+ s.repeat_code_len = 0
+ s.space = 32768
+ s.substate_huffman = stateHuffmanLengthSymbols
+ }
+ fallthrough
+
+ case stateHuffmanLengthSymbols:
+ var table_size uint32
+ var result int = readSymbolCodeLengths(max_symbol, s)
+ if result == decoderNeedsMoreInput {
+ result = safeReadSymbolCodeLengths(max_symbol, s)
+ }
+
+ if result != decoderSuccess {
+ return result
+ }
+
+ if s.space != 0 {
+ return decoderErrorFormatHuffmanSpace
+ }
+
+ table_size = buildHuffmanTable(table, huffmanTableBits, s.symbol_lists, s.code_length_histo[:])
+ if opt_table_size != nil {
+ *opt_table_size = table_size
+ }
+
+ s.substate_huffman = stateHuffmanNone
+ return decoderSuccess
+
+ default:
+ return decoderErrorUnreachable
+ }
+ }
+}
+
+/* Decodes a block length by reading 3..39 bits. */
+func readBlockLength(table []huffmanCode, br *bitReader) uint32 {
+ var code uint32
+ var nbits uint32
+ code = readSymbol(table, br)
+ nbits = kBlockLengthPrefixCode[code].nbits /* nbits == 2..24 */
+ return kBlockLengthPrefixCode[code].offset + readBits(br, nbits)
+}
+
+/* WARNING: if state is not BROTLI_STATE_READ_BLOCK_LENGTH_NONE, then
+ reading can't be continued with ReadBlockLength. */
+func safeReadBlockLength(s *Reader, result *uint32, table []huffmanCode, br *bitReader) bool {
+ var index uint32
+ if s.substate_read_block_length == stateReadBlockLengthNone {
+ if !safeReadSymbol(table, br, &index) {
+ return false
+ }
+ } else {
+ index = s.block_length_index
+ }
+ {
+ var bits uint32 /* nbits == 2..24 */
+ var nbits uint32 = kBlockLengthPrefixCode[index].nbits
+ if !safeReadBits(br, nbits, &bits) {
+ s.block_length_index = index
+ s.substate_read_block_length = stateReadBlockLengthSuffix
+ return false
+ }
+
+ *result = kBlockLengthPrefixCode[index].offset + bits
+ s.substate_read_block_length = stateReadBlockLengthNone
+ return true
+ }
+}
+
+/* Transform:
+ 1) initialize list L with values 0, 1,... 255
+ 2) For each input element X:
+ 2.1) let Y = L[X]
+ 2.2) remove X-th element from L
+ 2.3) prepend Y to L
+ 2.4) append Y to output
+
+ In most cases max(Y) <= 7, so most of L remains intact.
+ To reduce the cost of initialization, we reuse L, remember the upper bound
+ of Y values, and reinitialize only first elements in L.
+
+ Most of input values are 0 and 1. To reduce number of branches, we replace
+ inner for loop with do-while. */
+func inverseMoveToFrontTransform(v []byte, v_len uint32, state *Reader) {
+ var mtf [256]byte
+ var i int
+ for i = 1; i < 256; i++ {
+ mtf[i] = byte(i)
+ }
+ var mtf_1 byte
+
+ /* Transform the input. */
+ for i = 0; uint32(i) < v_len; i++ {
+ var index int = int(v[i])
+ var value byte = mtf[index]
+ v[i] = value
+ mtf_1 = value
+ for index >= 1 {
+ index--
+ mtf[index+1] = mtf[index]
+ }
+
+ mtf[0] = mtf_1
+ }
+}
+
+/* Decodes a series of Huffman table using ReadHuffmanCode function. */
+func huffmanTreeGroupDecode(group *huffmanTreeGroup, s *Reader) int {
+ if s.substate_tree_group != stateTreeGroupLoop {
+ s.next = group.codes
+ s.htree_index = 0
+ s.substate_tree_group = stateTreeGroupLoop
+ }
+
+ for s.htree_index < int(group.num_htrees) {
+ var table_size uint32
+ var result int = readHuffmanCode(uint32(group.alphabet_size), uint32(group.max_symbol), s.next, &table_size, s)
+ if result != decoderSuccess {
+ return result
+ }
+ group.htrees[s.htree_index] = s.next
+ s.next = s.next[table_size:]
+ s.htree_index++
+ }
+
+ s.substate_tree_group = stateTreeGroupNone
+ return decoderSuccess
+}
+
+/* Decodes a context map.
+ Decoding is done in 4 phases:
+ 1) Read auxiliary information (6..16 bits) and allocate memory.
+ In case of trivial context map, decoding is finished at this phase.
+ 2) Decode Huffman table using ReadHuffmanCode function.
+ This table will be used for reading context map items.
+ 3) Read context map items; "0" values could be run-length encoded.
+ 4) Optionally, apply InverseMoveToFront transform to the resulting map. */
+func decodeContextMap(context_map_size uint32, num_htrees *uint32, context_map_arg *[]byte, s *Reader) int {
+ var br *bitReader = &s.br
+ var result int = decoderSuccess
+
+ switch int(s.substate_context_map) {
+ case stateContextMapNone:
+ result = decodeVarLenUint8(s, br, num_htrees)
+ if result != decoderSuccess {
+ return result
+ }
+
+ (*num_htrees)++
+ s.context_index = 0
+ *context_map_arg = make([]byte, uint(context_map_size))
+ if *context_map_arg == nil {
+ return decoderErrorAllocContextMap
+ }
+
+ if *num_htrees <= 1 {
+ for i := 0; i < int(context_map_size); i++ {
+ (*context_map_arg)[i] = 0
+ }
+ return decoderSuccess
+ }
+
+ s.substate_context_map = stateContextMapReadPrefix
+ fallthrough
+ /* Fall through. */
+ case stateContextMapReadPrefix:
+ {
+ var bits uint32
+
+ /* In next stage ReadHuffmanCode uses at least 4 bits, so it is safe
+ to peek 4 bits ahead. */
+ if !safeGetBits(br, 5, &bits) {
+ return decoderNeedsMoreInput
+ }
+
+ if bits&1 != 0 { /* Use RLE for zeros. */
+ s.max_run_length_prefix = (bits >> 1) + 1
+ dropBits(br, 5)
+ } else {
+ s.max_run_length_prefix = 0
+ dropBits(br, 1)
+ }
+
+ s.substate_context_map = stateContextMapHuffman
+ }
+ fallthrough
+
+ /* Fall through. */
+ case stateContextMapHuffman:
+ {
+ var alphabet_size uint32 = *num_htrees + s.max_run_length_prefix
+ result = readHuffmanCode(alphabet_size, alphabet_size, s.context_map_table[:], nil, s)
+ if result != decoderSuccess {
+ return result
+ }
+ s.code = 0xFFFF
+ s.substate_context_map = stateContextMapDecode
+ }
+ fallthrough
+
+ /* Fall through. */
+ case stateContextMapDecode:
+ {
+ var context_index uint32 = s.context_index
+ var max_run_length_prefix uint32 = s.max_run_length_prefix
+ var context_map []byte = *context_map_arg
+ var code uint32 = s.code
+ var skip_preamble bool = (code != 0xFFFF)
+ for context_index < context_map_size || skip_preamble {
+ if !skip_preamble {
+ if !safeReadSymbol(s.context_map_table[:], br, &code) {
+ s.code = 0xFFFF
+ s.context_index = context_index
+ return decoderNeedsMoreInput
+ }
+
+ if code == 0 {
+ context_map[context_index] = 0
+ context_index++
+ continue
+ }
+
+ if code > max_run_length_prefix {
+ context_map[context_index] = byte(code - max_run_length_prefix)
+ context_index++
+ continue
+ }
+ } else {
+ skip_preamble = false
+ }
+
+ /* RLE sub-stage. */
+ {
+ var reps uint32
+ if !safeReadBits(br, code, &reps) {
+ s.code = code
+ s.context_index = context_index
+ return decoderNeedsMoreInput
+ }
+
+ reps += 1 << code
+ if context_index+reps > context_map_size {
+ return decoderErrorFormatContextMapRepeat
+ }
+
+ for {
+ context_map[context_index] = 0
+ context_index++
+ reps--
+ if reps == 0 {
+ break
+ }
+ }
+ }
+ }
+ }
+ fallthrough
+
+ case stateContextMapTransform:
+ var bits uint32
+ if !safeReadBits(br, 1, &bits) {
+ s.substate_context_map = stateContextMapTransform
+ return decoderNeedsMoreInput
+ }
+
+ if bits != 0 {
+ inverseMoveToFrontTransform(*context_map_arg, context_map_size, s)
+ }
+
+ s.substate_context_map = stateContextMapNone
+ return decoderSuccess
+
+ default:
+ return decoderErrorUnreachable
+ }
+}
+
+/* Decodes a command or literal and updates block type ring-buffer.
+ Reads 3..54 bits. */
+func decodeBlockTypeAndLength(safe int, s *Reader, tree_type int) bool {
+ var max_block_type uint32 = s.num_block_types[tree_type]
+ type_tree := s.block_type_trees[tree_type*huffmanMaxSize258:]
+ len_tree := s.block_len_trees[tree_type*huffmanMaxSize26:]
+ var br *bitReader = &s.br
+ var ringbuffer []uint32 = s.block_type_rb[tree_type*2:]
+ var block_type uint32
+ if max_block_type <= 1 {
+ return false
+ }
+
+ /* Read 0..15 + 3..39 bits. */
+ if safe == 0 {
+ block_type = readSymbol(type_tree, br)
+ s.block_length[tree_type] = readBlockLength(len_tree, br)
+ } else {
+ var memento bitReaderState
+ bitReaderSaveState(br, &memento)
+ if !safeReadSymbol(type_tree, br, &block_type) {
+ return false
+ }
+ if !safeReadBlockLength(s, &s.block_length[tree_type], len_tree, br) {
+ s.substate_read_block_length = stateReadBlockLengthNone
+ bitReaderRestoreState(br, &memento)
+ return false
+ }
+ }
+
+ if block_type == 1 {
+ block_type = ringbuffer[1] + 1
+ } else if block_type == 0 {
+ block_type = ringbuffer[0]
+ } else {
+ block_type -= 2
+ }
+
+ if block_type >= max_block_type {
+ block_type -= max_block_type
+ }
+
+ ringbuffer[0] = ringbuffer[1]
+ ringbuffer[1] = block_type
+ return true
+}
+
+func detectTrivialLiteralBlockTypes(s *Reader) {
+ var i uint
+ for i = 0; i < 8; i++ {
+ s.trivial_literal_contexts[i] = 0
+ }
+ for i = 0; uint32(i) < s.num_block_types[0]; i++ {
+ var offset uint = i << literalContextBits
+ var error uint = 0
+ var sample uint = uint(s.context_map[offset])
+ var j uint
+ for j = 0; j < 1<>5] |= 1 << (i & 31)
+ }
+ }
+}
+
+func prepareLiteralDecoding(s *Reader) {
+ var context_mode byte
+ var trivial uint
+ var block_type uint32 = s.block_type_rb[1]
+ var context_offset uint32 = block_type << literalContextBits
+ s.context_map_slice = s.context_map[context_offset:]
+ trivial = uint(s.trivial_literal_contexts[block_type>>5])
+ s.trivial_literal_context = int((trivial >> (block_type & 31)) & 1)
+ s.literal_htree = []huffmanCode(s.literal_hgroup.htrees[s.context_map_slice[0]])
+ context_mode = s.context_modes[block_type] & 3
+ s.context_lookup = getContextLUT(int(context_mode))
+}
+
+/* Decodes the block type and updates the state for literal context.
+ Reads 3..54 bits. */
+func decodeLiteralBlockSwitchInternal(safe int, s *Reader) bool {
+ if !decodeBlockTypeAndLength(safe, s, 0) {
+ return false
+ }
+
+ prepareLiteralDecoding(s)
+ return true
+}
+
+func decodeLiteralBlockSwitch(s *Reader) {
+ decodeLiteralBlockSwitchInternal(0, s)
+}
+
+func safeDecodeLiteralBlockSwitch(s *Reader) bool {
+ return decodeLiteralBlockSwitchInternal(1, s)
+}
+
+/* Block switch for insert/copy length.
+ Reads 3..54 bits. */
+func decodeCommandBlockSwitchInternal(safe int, s *Reader) bool {
+ if !decodeBlockTypeAndLength(safe, s, 1) {
+ return false
+ }
+
+ s.htree_command = []huffmanCode(s.insert_copy_hgroup.htrees[s.block_type_rb[3]])
+ return true
+}
+
+func decodeCommandBlockSwitch(s *Reader) {
+ decodeCommandBlockSwitchInternal(0, s)
+}
+
+func safeDecodeCommandBlockSwitch(s *Reader) bool {
+ return decodeCommandBlockSwitchInternal(1, s)
+}
+
+/* Block switch for distance codes.
+ Reads 3..54 bits. */
+func decodeDistanceBlockSwitchInternal(safe int, s *Reader) bool {
+ if !decodeBlockTypeAndLength(safe, s, 2) {
+ return false
+ }
+
+ s.dist_context_map_slice = s.dist_context_map[s.block_type_rb[5]< s.ringbuffer_size {
+ pos = uint(s.ringbuffer_size)
+ } else {
+ pos = uint(s.pos)
+ }
+ var partial_pos_rb uint = (s.rb_roundtrips * uint(s.ringbuffer_size)) + pos
+ return partial_pos_rb - s.partial_pos_out
+}
+
+/* Dumps output.
+ Returns BROTLI_DECODER_NEEDS_MORE_OUTPUT only if there is more output to push
+ and either ring-buffer is as big as window size, or |force| is true. */
+func writeRingBuffer(s *Reader, available_out *uint, next_out *[]byte, total_out *uint, force bool) int {
+ start := s.ringbuffer[s.partial_pos_out&uint(s.ringbuffer_mask):]
+ var to_write uint = unwrittenBytes(s, true)
+ var num_written uint = *available_out
+ if num_written > to_write {
+ num_written = to_write
+ }
+
+ if s.meta_block_remaining_len < 0 {
+ return decoderErrorFormatBlockLength1
+ }
+
+ if next_out != nil && *next_out == nil {
+ *next_out = start
+ } else {
+ if next_out != nil {
+ copy(*next_out, start[:num_written])
+ *next_out = (*next_out)[num_written:]
+ }
+ }
+
+ *available_out -= num_written
+ s.partial_pos_out += num_written
+ if total_out != nil {
+ *total_out = s.partial_pos_out
+ }
+
+ if num_written < to_write {
+ if s.ringbuffer_size == 1<= s.ringbuffer_size {
+ s.pos -= s.ringbuffer_size
+ s.rb_roundtrips++
+ if uint(s.pos) != 0 {
+ s.should_wrap_ringbuffer = 1
+ } else {
+ s.should_wrap_ringbuffer = 0
+ }
+ }
+
+ return decoderSuccess
+}
+
+func wrapRingBuffer(s *Reader) {
+ if s.should_wrap_ringbuffer != 0 {
+ copy(s.ringbuffer, s.ringbuffer_end[:uint(s.pos)])
+ s.should_wrap_ringbuffer = 0
+ }
+}
+
+/* Allocates ring-buffer.
+
+ s->ringbuffer_size MUST be updated by BrotliCalculateRingBufferSize before
+ this function is called.
+
+ Last two bytes of ring-buffer are initialized to 0, so context calculation
+ could be done uniformly for the first two and all other positions. */
+func ensureRingBuffer(s *Reader) bool {
+ var old_ringbuffer []byte
+ if s.ringbuffer_size == s.new_ringbuffer_size {
+ return true
+ }
+ spaceNeeded := int(s.new_ringbuffer_size) + int(kRingBufferWriteAheadSlack)
+ if len(s.ringbuffer) < spaceNeeded {
+ old_ringbuffer = s.ringbuffer
+ s.ringbuffer = make([]byte, spaceNeeded)
+ }
+
+ s.ringbuffer[s.new_ringbuffer_size-2] = 0
+ s.ringbuffer[s.new_ringbuffer_size-1] = 0
+
+ if old_ringbuffer != nil {
+ copy(s.ringbuffer, old_ringbuffer[:uint(s.pos)])
+ }
+
+ s.ringbuffer_size = s.new_ringbuffer_size
+ s.ringbuffer_mask = s.new_ringbuffer_size - 1
+ s.ringbuffer_end = s.ringbuffer[s.ringbuffer_size:]
+
+ return true
+}
+
+func copyUncompressedBlockToOutput(available_out *uint, next_out *[]byte, total_out *uint, s *Reader) int {
+ /* TODO: avoid allocation for single uncompressed block. */
+ if !ensureRingBuffer(s) {
+ return decoderErrorAllocRingBuffer1
+ }
+
+ /* State machine */
+ for {
+ switch s.substate_uncompressed {
+ case stateUncompressedNone:
+ {
+ var nbytes int = int(getRemainingBytes(&s.br))
+ if nbytes > s.meta_block_remaining_len {
+ nbytes = s.meta_block_remaining_len
+ }
+
+ if s.pos+nbytes > s.ringbuffer_size {
+ nbytes = s.ringbuffer_size - s.pos
+ }
+
+ /* Copy remaining bytes from s->br.buf_ to ring-buffer. */
+ copyBytes(s.ringbuffer[s.pos:], &s.br, uint(nbytes))
+
+ s.pos += nbytes
+ s.meta_block_remaining_len -= nbytes
+ if s.pos < 1<>1 >= min_size {
+ new_ringbuffer_size >>= 1
+ }
+ }
+
+ s.new_ringbuffer_size = new_ringbuffer_size
+}
+
+/* Reads 1..256 2-bit context modes. */
+func readContextModes(s *Reader) int {
+ var br *bitReader = &s.br
+ var i int = s.loop_counter
+
+ for i < int(s.num_block_types[0]) {
+ var bits uint32
+ if !safeReadBits(br, 2, &bits) {
+ s.loop_counter = i
+ return decoderNeedsMoreInput
+ }
+
+ s.context_modes[i] = byte(bits)
+ i++
+ }
+
+ return decoderSuccess
+}
+
+func takeDistanceFromRingBuffer(s *Reader) {
+ if s.distance_code == 0 {
+ s.dist_rb_idx--
+ s.distance_code = s.dist_rb[s.dist_rb_idx&3]
+
+ /* Compensate double distance-ring-buffer roll for dictionary items. */
+ s.distance_context = 1
+ } else {
+ var distance_code int = s.distance_code << 1
+ const kDistanceShortCodeIndexOffset uint32 = 0xAAAFFF1B
+ const kDistanceShortCodeValueOffset uint32 = 0xFA5FA500
+ var v int = (s.dist_rb_idx + int(kDistanceShortCodeIndexOffset>>uint(distance_code))) & 0x3
+ /* kDistanceShortCodeIndexOffset has 2-bit values from LSB:
+ 3, 2, 1, 0, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2 */
+
+ /* kDistanceShortCodeValueOffset has 2-bit values from LSB:
+ -0, 0,-0, 0,-1, 1,-2, 2,-3, 3,-1, 1,-2, 2,-3, 3 */
+ s.distance_code = s.dist_rb[v]
+
+ v = int(kDistanceShortCodeValueOffset>>uint(distance_code)) & 0x3
+ if distance_code&0x3 != 0 {
+ s.distance_code += v
+ } else {
+ s.distance_code -= v
+ if s.distance_code <= 0 {
+ /* A huge distance will cause a () soon.
+ This is a little faster than failing here. */
+ s.distance_code = 0x7FFFFFFF
+ }
+ }
+ }
+}
+
+func safeReadBitsMaybeZero(br *bitReader, n_bits uint32, val *uint32) bool {
+ if n_bits != 0 {
+ return safeReadBits(br, n_bits, val)
+ } else {
+ *val = 0
+ return true
+ }
+}
+
+/* Precondition: s->distance_code < 0. */
+func readDistanceInternal(safe int, s *Reader, br *bitReader) bool {
+ var distval int
+ var memento bitReaderState
+ var distance_tree []huffmanCode = []huffmanCode(s.distance_hgroup.htrees[s.dist_htree_index])
+ if safe == 0 {
+ s.distance_code = int(readSymbol(distance_tree, br))
+ } else {
+ var code uint32
+ bitReaderSaveState(br, &memento)
+ if !safeReadSymbol(distance_tree, br, &code) {
+ return false
+ }
+
+ s.distance_code = int(code)
+ }
+
+ /* Convert the distance code to the actual distance by possibly
+ looking up past distances from the s->ringbuffer. */
+ s.distance_context = 0
+
+ if s.distance_code&^0xF == 0 {
+ takeDistanceFromRingBuffer(s)
+ s.block_length[2]--
+ return true
+ }
+
+ distval = s.distance_code - int(s.num_direct_distance_codes)
+ if distval >= 0 {
+ var nbits uint32
+ var postfix int
+ var offset int
+ if safe == 0 && (s.distance_postfix_bits == 0) {
+ nbits = (uint32(distval) >> 1) + 1
+ offset = ((2 + (distval & 1)) << nbits) - 4
+ s.distance_code = int(s.num_direct_distance_codes) + offset + int(readBits(br, nbits))
+ } else {
+ /* This branch also works well when s->distance_postfix_bits == 0. */
+ var bits uint32
+ postfix = distval & s.distance_postfix_mask
+ distval >>= s.distance_postfix_bits
+ nbits = (uint32(distval) >> 1) + 1
+ if safe != 0 {
+ if !safeReadBitsMaybeZero(br, nbits, &bits) {
+ s.distance_code = -1 /* Restore precondition. */
+ bitReaderRestoreState(br, &memento)
+ return false
+ }
+ } else {
+ bits = readBits(br, nbits)
+ }
+
+ offset = ((2 + (distval & 1)) << nbits) - 4
+ s.distance_code = int(s.num_direct_distance_codes) + ((offset + int(bits)) << s.distance_postfix_bits) + postfix
+ }
+ }
+
+ s.distance_code = s.distance_code - numDistanceShortCodes + 1
+ s.block_length[2]--
+ return true
+}
+
+func readDistance(s *Reader, br *bitReader) {
+ readDistanceInternal(0, s, br)
+}
+
+func safeReadDistance(s *Reader, br *bitReader) bool {
+ return readDistanceInternal(1, s, br)
+}
+
+func readCommandInternal(safe int, s *Reader, br *bitReader, insert_length *int) bool {
+ var cmd_code uint32
+ var insert_len_extra uint32 = 0
+ var copy_length uint32
+ var v cmdLutElement
+ var memento bitReaderState
+ if safe == 0 {
+ cmd_code = readSymbol(s.htree_command, br)
+ } else {
+ bitReaderSaveState(br, &memento)
+ if !safeReadSymbol(s.htree_command, br, &cmd_code) {
+ return false
+ }
+ }
+
+ v = kCmdLut[cmd_code]
+ s.distance_code = int(v.distance_code)
+ s.distance_context = int(v.context)
+ s.dist_htree_index = s.dist_context_map_slice[s.distance_context]
+ *insert_length = int(v.insert_len_offset)
+ if safe == 0 {
+ if v.insert_len_extra_bits != 0 {
+ insert_len_extra = readBits(br, uint32(v.insert_len_extra_bits))
+ }
+
+ copy_length = readBits(br, uint32(v.copy_len_extra_bits))
+ } else {
+ if !safeReadBitsMaybeZero(br, uint32(v.insert_len_extra_bits), &insert_len_extra) || !safeReadBitsMaybeZero(br, uint32(v.copy_len_extra_bits), ©_length) {
+ bitReaderRestoreState(br, &memento)
+ return false
+ }
+ }
+
+ s.copy_length = int(copy_length) + int(v.copy_len_offset)
+ s.block_length[1]--
+ *insert_length += int(insert_len_extra)
+ return true
+}
+
+func readCommand(s *Reader, br *bitReader, insert_length *int) {
+ readCommandInternal(0, s, br, insert_length)
+}
+
+func safeReadCommand(s *Reader, br *bitReader, insert_length *int) bool {
+ return readCommandInternal(1, s, br, insert_length)
+}
+
+func checkInputAmountMaybeSafe(safe int, br *bitReader, num uint) bool {
+ if safe != 0 {
+ return true
+ }
+
+ return checkInputAmount(br, num)
+}
+
+func processCommandsInternal(safe int, s *Reader) int {
+ var pos int = s.pos
+ var i int = s.loop_counter
+ var result int = decoderSuccess
+ var br *bitReader = &s.br
+ var hc []huffmanCode
+
+ if !checkInputAmountMaybeSafe(safe, br, 28) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+
+ if safe == 0 {
+ warmupBitReader(br)
+ }
+
+ /* Jump into state machine. */
+ if s.state == stateCommandBegin {
+ goto CommandBegin
+ } else if s.state == stateCommandInner {
+ goto CommandInner
+ } else if s.state == stateCommandPostDecodeLiterals {
+ goto CommandPostDecodeLiterals
+ } else if s.state == stateCommandPostWrapCopy {
+ goto CommandPostWrapCopy
+ } else {
+ return decoderErrorUnreachable
+ }
+
+CommandBegin:
+ if safe != 0 {
+ s.state = stateCommandBegin
+ }
+
+ if !checkInputAmountMaybeSafe(safe, br, 28) { /* 156 bits + 7 bytes */
+ s.state = stateCommandBegin
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+
+ if s.block_length[1] == 0 {
+ if safe != 0 {
+ if !safeDecodeCommandBlockSwitch(s) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+ } else {
+ decodeCommandBlockSwitch(s)
+ }
+
+ goto CommandBegin
+ }
+
+ /* Read the insert/copy length in the command. */
+ if safe != 0 {
+ if !safeReadCommand(s, br, &i) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+ } else {
+ readCommand(s, br, &i)
+ }
+
+ if i == 0 {
+ goto CommandPostDecodeLiterals
+ }
+
+ s.meta_block_remaining_len -= i
+
+CommandInner:
+ if safe != 0 {
+ s.state = stateCommandInner
+ }
+
+ /* Read the literals in the command. */
+ if s.trivial_literal_context != 0 {
+ var bits uint32
+ var value uint32
+ preloadSymbol(safe, s.literal_htree, br, &bits, &value)
+ for {
+ if !checkInputAmountMaybeSafe(safe, br, 28) { /* 162 bits + 7 bytes */
+ s.state = stateCommandInner
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+
+ if s.block_length[0] == 0 {
+ if safe != 0 {
+ if !safeDecodeLiteralBlockSwitch(s) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+ } else {
+ decodeLiteralBlockSwitch(s)
+ }
+
+ preloadSymbol(safe, s.literal_htree, br, &bits, &value)
+ if s.trivial_literal_context == 0 {
+ goto CommandInner
+ }
+ }
+
+ if safe == 0 {
+ s.ringbuffer[pos] = byte(readPreloadedSymbol(s.literal_htree, br, &bits, &value))
+ } else {
+ var literal uint32
+ if !safeReadSymbol(s.literal_htree, br, &literal) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+
+ s.ringbuffer[pos] = byte(literal)
+ }
+
+ s.block_length[0]--
+ pos++
+ if pos == s.ringbuffer_size {
+ s.state = stateCommandInnerWrite
+ i--
+ goto saveStateAndReturn
+ }
+ i--
+ if i == 0 {
+ break
+ }
+ }
+ } else {
+ var p1 byte = s.ringbuffer[(pos-1)&s.ringbuffer_mask]
+ var p2 byte = s.ringbuffer[(pos-2)&s.ringbuffer_mask]
+ for {
+ var context byte
+ if !checkInputAmountMaybeSafe(safe, br, 28) { /* 162 bits + 7 bytes */
+ s.state = stateCommandInner
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+
+ if s.block_length[0] == 0 {
+ if safe != 0 {
+ if !safeDecodeLiteralBlockSwitch(s) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+ } else {
+ decodeLiteralBlockSwitch(s)
+ }
+
+ if s.trivial_literal_context != 0 {
+ goto CommandInner
+ }
+ }
+
+ context = getContext(p1, p2, s.context_lookup)
+ hc = []huffmanCode(s.literal_hgroup.htrees[s.context_map_slice[context]])
+ p2 = p1
+ if safe == 0 {
+ p1 = byte(readSymbol(hc, br))
+ } else {
+ var literal uint32
+ if !safeReadSymbol(hc, br, &literal) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+
+ p1 = byte(literal)
+ }
+
+ s.ringbuffer[pos] = p1
+ s.block_length[0]--
+ pos++
+ if pos == s.ringbuffer_size {
+ s.state = stateCommandInnerWrite
+ i--
+ goto saveStateAndReturn
+ }
+ i--
+ if i == 0 {
+ break
+ }
+ }
+ }
+
+ if s.meta_block_remaining_len <= 0 {
+ s.state = stateMetablockDone
+ goto saveStateAndReturn
+ }
+
+CommandPostDecodeLiterals:
+ if safe != 0 {
+ s.state = stateCommandPostDecodeLiterals
+ }
+
+ if s.distance_code >= 0 {
+ /* Implicit distance case. */
+ if s.distance_code != 0 {
+ s.distance_context = 0
+ } else {
+ s.distance_context = 1
+ }
+
+ s.dist_rb_idx--
+ s.distance_code = s.dist_rb[s.dist_rb_idx&3]
+ } else {
+ /* Read distance code in the command, unless it was implicitly zero. */
+ if s.block_length[2] == 0 {
+ if safe != 0 {
+ if !safeDecodeDistanceBlockSwitch(s) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+ } else {
+ decodeDistanceBlockSwitch(s)
+ }
+ }
+
+ if safe != 0 {
+ if !safeReadDistance(s, br) {
+ result = decoderNeedsMoreInput
+ goto saveStateAndReturn
+ }
+ } else {
+ readDistance(s, br)
+ }
+ }
+
+ if s.max_distance != s.max_backward_distance {
+ if pos < s.max_backward_distance {
+ s.max_distance = pos
+ } else {
+ s.max_distance = s.max_backward_distance
+ }
+ }
+
+ i = s.copy_length
+
+ /* Apply copy of LZ77 back-reference, or static dictionary reference if
+ the distance is larger than the max LZ77 distance */
+ if s.distance_code > s.max_distance {
+ /* The maximum allowed distance is BROTLI_MAX_ALLOWED_DISTANCE = 0x7FFFFFFC.
+ With this choice, no signed overflow can occur after decoding
+ a special distance code (e.g., after adding 3 to the last distance). */
+ if s.distance_code > maxAllowedDistance {
+ return decoderErrorFormatDistance
+ }
+
+ if i >= minDictionaryWordLength && i <= maxDictionaryWordLength {
+ var address int = s.distance_code - s.max_distance - 1
+ var words *dictionary = s.dictionary
+ var trans *transforms = s.transforms
+ var offset int = int(s.dictionary.offsets_by_length[i])
+ var shift uint32 = uint32(s.dictionary.size_bits_by_length[i])
+ var mask int = int(bitMask(shift))
+ var word_idx int = address & mask
+ var transform_idx int = address >> shift
+
+ /* Compensate double distance-ring-buffer roll. */
+ s.dist_rb_idx += s.distance_context
+
+ offset += word_idx * i
+ if words.data == nil {
+ return decoderErrorDictionaryNotSet
+ }
+
+ if transform_idx < int(trans.num_transforms) {
+ word := words.data[offset:]
+ var len int = i
+ if transform_idx == int(trans.cutOffTransforms[0]) {
+ copy(s.ringbuffer[pos:], word[:uint(len)])
+ } else {
+ len = transformDictionaryWord(s.ringbuffer[pos:], word, int(len), trans, transform_idx)
+ }
+
+ pos += int(len)
+ s.meta_block_remaining_len -= int(len)
+ if pos >= s.ringbuffer_size {
+ s.state = stateCommandPostWrite1
+ goto saveStateAndReturn
+ }
+ } else {
+ return decoderErrorFormatTransform
+ }
+ } else {
+ return decoderErrorFormatDictionary
+ }
+ } else {
+ var src_start int = (pos - s.distance_code) & s.ringbuffer_mask
+ copy_dst := s.ringbuffer[pos:]
+ copy_src := s.ringbuffer[src_start:]
+ var dst_end int = pos + i
+ var src_end int = src_start + i
+
+ /* Update the recent distances cache. */
+ s.dist_rb[s.dist_rb_idx&3] = s.distance_code
+
+ s.dist_rb_idx++
+ s.meta_block_remaining_len -= i
+
+ /* There are 32+ bytes of slack in the ring-buffer allocation.
+ Also, we have 16 short codes, that make these 16 bytes irrelevant
+ in the ring-buffer. Let's copy over them as a first guess. */
+ copy(copy_dst, copy_src[:16])
+
+ if src_end > pos && dst_end > src_start {
+ /* Regions intersect. */
+ goto CommandPostWrapCopy
+ }
+
+ if dst_end >= s.ringbuffer_size || src_end >= s.ringbuffer_size {
+ /* At least one region wraps. */
+ goto CommandPostWrapCopy
+ }
+
+ pos += i
+ if i > 16 {
+ if i > 32 {
+ copy(copy_dst[16:], copy_src[16:][:uint(i-16)])
+ } else {
+ /* This branch covers about 45% cases.
+ Fixed size short copy allows more compiler optimizations. */
+ copy(copy_dst[16:], copy_src[16:][:16])
+ }
+ }
+ }
+
+ if s.meta_block_remaining_len <= 0 {
+ /* Next metablock, if any. */
+ s.state = stateMetablockDone
+
+ goto saveStateAndReturn
+ } else {
+ goto CommandBegin
+ }
+CommandPostWrapCopy:
+ {
+ var wrap_guard int = s.ringbuffer_size - pos
+ for {
+ i--
+ if i < 0 {
+ break
+ }
+ s.ringbuffer[pos] = s.ringbuffer[(pos-s.distance_code)&s.ringbuffer_mask]
+ pos++
+ wrap_guard--
+ if wrap_guard == 0 {
+ s.state = stateCommandPostWrite2
+ goto saveStateAndReturn
+ }
+ }
+ }
+
+ if s.meta_block_remaining_len <= 0 {
+ /* Next metablock, if any. */
+ s.state = stateMetablockDone
+
+ goto saveStateAndReturn
+ } else {
+ goto CommandBegin
+ }
+
+saveStateAndReturn:
+ s.pos = pos
+ s.loop_counter = i
+ return result
+}
+
+func processCommands(s *Reader) int {
+ return processCommandsInternal(0, s)
+}
+
+func safeProcessCommands(s *Reader) int {
+ return processCommandsInternal(1, s)
+}
+
+/* Returns the maximum number of distance symbols which can only represent
+ distances not exceeding BROTLI_MAX_ALLOWED_DISTANCE. */
+
+var maxDistanceSymbol_bound = [maxNpostfix + 1]uint32{0, 4, 12, 28}
+var maxDistanceSymbol_diff = [maxNpostfix + 1]uint32{73, 126, 228, 424}
+
+func maxDistanceSymbol(ndirect uint32, npostfix uint32) uint32 {
+ var postfix uint32 = 1 << npostfix
+ if ndirect < maxDistanceSymbol_bound[npostfix] {
+ return ndirect + maxDistanceSymbol_diff[npostfix] + postfix
+ } else if ndirect > maxDistanceSymbol_bound[npostfix]+postfix {
+ return ndirect + maxDistanceSymbol_diff[npostfix]
+ } else {
+ return maxDistanceSymbol_bound[npostfix] + maxDistanceSymbol_diff[npostfix] + postfix
+ }
+}
+
+/* Invariant: input stream is never overconsumed:
+ - invalid input implies that the whole stream is invalid -> any amount of
+ input could be read and discarded
+ - when result is "needs more input", then at least one more byte is REQUIRED
+ to complete decoding; all input data MUST be consumed by decoder, so
+ client could swap the input buffer
+ - when result is "needs more output" decoder MUST ensure that it doesn't
+ hold more than 7 bits in bit reader; this saves client from swapping input
+ buffer ahead of time
+ - when result is "success" decoder MUST return all unused data back to input
+ buffer; this is possible because the invariant is held on enter */
+func decoderDecompressStream(s *Reader, available_in *uint, next_in *[]byte, available_out *uint, next_out *[]byte) int {
+ var result int = decoderSuccess
+ var br *bitReader = &s.br
+
+ /* Do not try to process further in a case of unrecoverable error. */
+ if int(s.error_code) < 0 {
+ return decoderResultError
+ }
+
+ if *available_out != 0 && (next_out == nil || *next_out == nil) {
+ return saveErrorCode(s, decoderErrorInvalidArguments)
+ }
+
+ if *available_out == 0 {
+ next_out = nil
+ }
+ if s.buffer_length == 0 { /* Just connect bit reader to input stream. */
+ br.input_len = *available_in
+ br.input = *next_in
+ br.byte_pos = 0
+ } else {
+ /* At least one byte of input is required. More than one byte of input may
+ be required to complete the transaction -> reading more data must be
+ done in a loop -> do it in a main loop. */
+ result = decoderNeedsMoreInput
+
+ br.input = s.buffer.u8[:]
+ br.byte_pos = 0
+ }
+
+ /* State machine */
+ for {
+ if result != decoderSuccess {
+ /* Error, needs more input/output. */
+ if result == decoderNeedsMoreInput {
+ if s.ringbuffer != nil { /* Pro-actively push output. */
+ var intermediate_result int = writeRingBuffer(s, available_out, next_out, nil, true)
+
+ /* WriteRingBuffer checks s->meta_block_remaining_len validity. */
+ if int(intermediate_result) < 0 {
+ result = intermediate_result
+ break
+ }
+ }
+
+ if s.buffer_length != 0 { /* Used with internal buffer. */
+ if br.byte_pos == br.input_len {
+ /* Successfully finished read transaction.
+ Accumulator contains less than 8 bits, because internal buffer
+ is expanded byte-by-byte until it is enough to complete read. */
+ s.buffer_length = 0
+
+ /* Switch to input stream and restart. */
+ result = decoderSuccess
+
+ br.input_len = *available_in
+ br.input = *next_in
+ br.byte_pos = 0
+ continue
+ } else if *available_in != 0 {
+ /* Not enough data in buffer, but can take one more byte from
+ input stream. */
+ result = decoderSuccess
+
+ s.buffer.u8[s.buffer_length] = (*next_in)[0]
+ s.buffer_length++
+ br.input_len = uint(s.buffer_length)
+ *next_in = (*next_in)[1:]
+ (*available_in)--
+
+ /* Retry with more data in buffer. */
+ continue
+ }
+
+ /* Can't finish reading and no more input. */
+ break
+ /* Input stream doesn't contain enough input. */
+ } else {
+ /* Copy tail to internal buffer and return. */
+ *next_in = br.input[br.byte_pos:]
+
+ *available_in = br.input_len - br.byte_pos
+ for *available_in != 0 {
+ s.buffer.u8[s.buffer_length] = (*next_in)[0]
+ s.buffer_length++
+ *next_in = (*next_in)[1:]
+ (*available_in)--
+ }
+
+ break
+ }
+ }
+
+ /* Unreachable. */
+
+ /* Fail or needs more output. */
+ if s.buffer_length != 0 {
+ /* Just consumed the buffered input and produced some output. Otherwise
+ it would result in "needs more input". Reset internal buffer. */
+ s.buffer_length = 0
+ } else {
+ /* Using input stream in last iteration. When decoder switches to input
+ stream it has less than 8 bits in accumulator, so it is safe to
+ return unused accumulator bits there. */
+ bitReaderUnload(br)
+
+ *available_in = br.input_len - br.byte_pos
+ *next_in = br.input[br.byte_pos:]
+ }
+
+ break
+ }
+
+ switch s.state {
+ /* Prepare to the first read. */
+ case stateUninited:
+ if !warmupBitReader(br) {
+ result = decoderNeedsMoreInput
+ break
+ }
+
+ /* Decode window size. */
+ result = decodeWindowBits(s, br) /* Reads 1..8 bits. */
+ if result != decoderSuccess {
+ break
+ }
+
+ if s.large_window {
+ s.state = stateLargeWindowBits
+ break
+ }
+
+ s.state = stateInitialize
+
+ case stateLargeWindowBits:
+ if !safeReadBits(br, 6, &s.window_bits) {
+ result = decoderNeedsMoreInput
+ break
+ }
+
+ if s.window_bits < largeMinWbits || s.window_bits > largeMaxWbits {
+ result = decoderErrorFormatWindowBits
+ break
+ }
+
+ s.state = stateInitialize
+ fallthrough
+
+ /* Maximum distance, see section 9.1. of the spec. */
+ /* Fall through. */
+ case stateInitialize:
+ s.max_backward_distance = (1 << s.window_bits) - windowGap
+
+ /* Allocate memory for both block_type_trees and block_len_trees. */
+ s.block_type_trees = make([]huffmanCode, (3 * (huffmanMaxSize258 + huffmanMaxSize26)))
+
+ if s.block_type_trees == nil {
+ result = decoderErrorAllocBlockTypeTrees
+ break
+ }
+
+ s.block_len_trees = s.block_type_trees[3*huffmanMaxSize258:]
+
+ s.state = stateMetablockBegin
+ fallthrough
+
+ /* Fall through. */
+ case stateMetablockBegin:
+ decoderStateMetablockBegin(s)
+
+ s.state = stateMetablockHeader
+ fallthrough
+
+ /* Fall through. */
+ case stateMetablockHeader:
+ result = decodeMetaBlockLength(s, br)
+ /* Reads 2 - 31 bits. */
+ if result != decoderSuccess {
+ break
+ }
+
+ if s.is_metadata != 0 || s.is_uncompressed != 0 {
+ if !bitReaderJumpToByteBoundary(br) {
+ result = decoderErrorFormatPadding1
+ break
+ }
+ }
+
+ if s.is_metadata != 0 {
+ s.state = stateMetadata
+ break
+ }
+
+ if s.meta_block_remaining_len == 0 {
+ s.state = stateMetablockDone
+ break
+ }
+
+ calculateRingBufferSize(s)
+ if s.is_uncompressed != 0 {
+ s.state = stateUncompressed
+ break
+ }
+
+ s.loop_counter = 0
+ s.state = stateHuffmanCode0
+
+ case stateUncompressed:
+ result = copyUncompressedBlockToOutput(available_out, next_out, nil, s)
+ if result == decoderSuccess {
+ s.state = stateMetablockDone
+ }
+
+ case stateMetadata:
+ for ; s.meta_block_remaining_len > 0; s.meta_block_remaining_len-- {
+ var bits uint32
+
+ /* Read one byte and ignore it. */
+ if !safeReadBits(br, 8, &bits) {
+ result = decoderNeedsMoreInput
+ break
+ }
+ }
+
+ if result == decoderSuccess {
+ s.state = stateMetablockDone
+ }
+
+ case stateHuffmanCode0:
+ if s.loop_counter >= 3 {
+ s.state = stateMetablockHeader2
+ break
+ }
+
+ /* Reads 1..11 bits. */
+ result = decodeVarLenUint8(s, br, &s.num_block_types[s.loop_counter])
+
+ if result != decoderSuccess {
+ break
+ }
+
+ s.num_block_types[s.loop_counter]++
+ if s.num_block_types[s.loop_counter] < 2 {
+ s.loop_counter++
+ break
+ }
+
+ s.state = stateHuffmanCode1
+ fallthrough
+
+ case stateHuffmanCode1:
+ {
+ var alphabet_size uint32 = s.num_block_types[s.loop_counter] + 2
+ var tree_offset int = s.loop_counter * huffmanMaxSize258
+ result = readHuffmanCode(alphabet_size, alphabet_size, s.block_type_trees[tree_offset:], nil, s)
+ if result != decoderSuccess {
+ break
+ }
+ s.state = stateHuffmanCode2
+ }
+ fallthrough
+
+ case stateHuffmanCode2:
+ {
+ var alphabet_size uint32 = numBlockLenSymbols
+ var tree_offset int = s.loop_counter * huffmanMaxSize26
+ result = readHuffmanCode(alphabet_size, alphabet_size, s.block_len_trees[tree_offset:], nil, s)
+ if result != decoderSuccess {
+ break
+ }
+ s.state = stateHuffmanCode3
+ }
+ fallthrough
+
+ case stateHuffmanCode3:
+ var tree_offset int = s.loop_counter * huffmanMaxSize26
+ if !safeReadBlockLength(s, &s.block_length[s.loop_counter], s.block_len_trees[tree_offset:], br) {
+ result = decoderNeedsMoreInput
+ break
+ }
+
+ s.loop_counter++
+ s.state = stateHuffmanCode0
+
+ case stateMetablockHeader2:
+ {
+ var bits uint32
+ if !safeReadBits(br, 6, &bits) {
+ result = decoderNeedsMoreInput
+ break
+ }
+
+ s.distance_postfix_bits = bits & bitMask(2)
+ bits >>= 2
+ s.num_direct_distance_codes = numDistanceShortCodes + (bits << s.distance_postfix_bits)
+ s.distance_postfix_mask = int(bitMask(s.distance_postfix_bits))
+ s.context_modes = make([]byte, uint(s.num_block_types[0]))
+ if s.context_modes == nil {
+ result = decoderErrorAllocContextModes
+ break
+ }
+
+ s.loop_counter = 0
+ s.state = stateContextModes
+ }
+ fallthrough
+
+ case stateContextModes:
+ result = readContextModes(s)
+
+ if result != decoderSuccess {
+ break
+ }
+
+ s.state = stateContextMap1
+ fallthrough
+
+ case stateContextMap1:
+ result = decodeContextMap(s.num_block_types[0]<= 3 {
+ prepareLiteralDecoding(s)
+ s.dist_context_map_slice = s.dist_context_map
+ s.htree_command = []huffmanCode(s.insert_copy_hgroup.htrees[0])
+ if !ensureRingBuffer(s) {
+ result = decoderErrorAllocRingBuffer2
+ break
+ }
+
+ s.state = stateCommandBegin
+ }
+
+ case stateCommandBegin, stateCommandInner, stateCommandPostDecodeLiterals, stateCommandPostWrapCopy:
+ result = processCommands(s)
+
+ if result == decoderNeedsMoreInput {
+ result = safeProcessCommands(s)
+ }
+
+ case stateCommandInnerWrite, stateCommandPostWrite1, stateCommandPostWrite2:
+ result = writeRingBuffer(s, available_out, next_out, nil, false)
+
+ if result != decoderSuccess {
+ break
+ }
+
+ wrapRingBuffer(s)
+ if s.ringbuffer_size == 1<= uint64(block_size) {
+ return 0
+ }
+ return block_size - uint(delta)
+}
+
+/* Wraps 64-bit input position to 32-bit ring-buffer position preserving
+ "not-a-first-lap" feature. */
+func wrapPosition(position uint64) uint32 {
+ var result uint32 = uint32(position)
+ var gb uint64 = position >> 30
+ if gb > 2 {
+ /* Wrap every 2GiB; The first 3GB are continuous. */
+ result = result&((1<<30)-1) | (uint32((gb-1)&1)+1)<<30
+ }
+
+ return result
+}
+
+func (s *Writer) getStorage(size int) []byte {
+ if len(s.storage) < size {
+ s.storage = make([]byte, size)
+ }
+
+ return s.storage
+}
+
+func hashTableSize(max_table_size uint, input_size uint) uint {
+ var htsize uint = 256
+ for htsize < max_table_size && htsize < input_size {
+ htsize <<= 1
+ }
+
+ return htsize
+}
+
+func getHashTable(s *Writer, quality int, input_size uint, table_size *uint) []int {
+ var max_table_size uint = maxHashTableSize(quality)
+ var htsize uint = hashTableSize(max_table_size, input_size)
+ /* Use smaller hash table when input.size() is smaller, since we
+ fill the table, incurring O(hash table size) overhead for
+ compression, and if the input is short, we won't need that
+ many hash table entries anyway. */
+
+ var table []int
+ assert(max_table_size >= 256)
+ if quality == fastOnePassCompressionQuality {
+ /* Only odd shifts are supported by fast-one-pass. */
+ if htsize&0xAAAAA == 0 {
+ htsize <<= 1
+ }
+ }
+
+ if htsize <= uint(len(s.small_table_)) {
+ table = s.small_table_[:]
+ } else {
+ if htsize > s.large_table_size_ {
+ s.large_table_size_ = htsize
+ s.large_table_ = nil
+ s.large_table_ = make([]int, htsize)
+ }
+
+ table = s.large_table_
+ }
+
+ *table_size = htsize
+ for i := 0; i < int(htsize); i++ {
+ table[i] = 0
+ }
+ return table
+}
+
+func encodeWindowBits(lgwin int, large_window bool, last_bytes *uint16, last_bytes_bits *byte) {
+ if large_window {
+ *last_bytes = uint16((lgwin&0x3F)<<8 | 0x11)
+ *last_bytes_bits = 14
+ } else {
+ if lgwin == 16 {
+ *last_bytes = 0
+ *last_bytes_bits = 1
+ } else if lgwin == 17 {
+ *last_bytes = 1
+ *last_bytes_bits = 7
+ } else if lgwin > 17 {
+ *last_bytes = uint16((lgwin-17)<<1 | 0x01)
+ *last_bytes_bits = 4
+ } else {
+ *last_bytes = uint16((lgwin-8)<<4 | 0x01)
+ *last_bytes_bits = 7
+ }
+ }
+}
+
+/* Decide about the context map based on the ability of the prediction
+ ability of the previous byte UTF8-prefix on the next byte. The
+ prediction ability is calculated as Shannon entropy. Here we need
+ Shannon entropy instead of 'BitsEntropy' since the prefix will be
+ encoded with the remaining 6 bits of the following byte, and
+ BitsEntropy will assume that symbol to be stored alone using Huffman
+ coding. */
+
+var kStaticContextMapContinuation = [64]uint32{
+ 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+}
+var kStaticContextMapSimpleUTF8 = [64]uint32{
+ 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+}
+
+func chooseContextMap(quality int, bigram_histo []uint32, num_literal_contexts *uint, literal_context_map *[]uint32) {
+ var monogram_histo = [3]uint32{0}
+ var two_prefix_histo = [6]uint32{0}
+ var total uint
+ var i uint
+ var dummy uint
+ var entropy [4]float64
+ for i = 0; i < 9; i++ {
+ monogram_histo[i%3] += bigram_histo[i]
+ two_prefix_histo[i%6] += bigram_histo[i]
+ }
+
+ entropy[1] = shannonEntropy(monogram_histo[:], 3, &dummy)
+ entropy[2] = (shannonEntropy(two_prefix_histo[:], 3, &dummy) + shannonEntropy(two_prefix_histo[3:], 3, &dummy))
+ entropy[3] = 0
+ for i = 0; i < 3; i++ {
+ entropy[3] += shannonEntropy(bigram_histo[3*i:], 3, &dummy)
+ }
+
+ total = uint(monogram_histo[0] + monogram_histo[1] + monogram_histo[2])
+ assert(total != 0)
+ entropy[0] = 1.0 / float64(total)
+ entropy[1] *= entropy[0]
+ entropy[2] *= entropy[0]
+ entropy[3] *= entropy[0]
+
+ if quality < minQualityForHqContextModeling {
+ /* 3 context models is a bit slower, don't use it at lower qualities. */
+ entropy[3] = entropy[1] * 10
+ }
+
+ /* If expected savings by symbol are less than 0.2 bits, skip the
+ context modeling -- in exchange for faster decoding speed. */
+ if entropy[1]-entropy[2] < 0.2 && entropy[1]-entropy[3] < 0.2 {
+ *num_literal_contexts = 1
+ } else if entropy[2]-entropy[3] < 0.02 {
+ *num_literal_contexts = 2
+ *literal_context_map = kStaticContextMapSimpleUTF8[:]
+ } else {
+ *num_literal_contexts = 3
+ *literal_context_map = kStaticContextMapContinuation[:]
+ }
+}
+
+/* Decide if we want to use a more complex static context map containing 13
+ context values, based on the entropy reduction of histograms over the
+ first 5 bits of literals. */
+
+var kStaticContextMapComplexUTF8 = [64]uint32{
+ 11, 11, 12, 12, /* 0 special */
+ 0, 0, 0, 0, /* 4 lf */
+ 1, 1, 9, 9, /* 8 space */
+ 2, 2, 2, 2, /* !, first after space/lf and after something else. */
+ 1, 1, 1, 1, /* " */
+ 8, 3, 3, 3, /* % */
+ 1, 1, 1, 1, /* ({[ */
+ 2, 2, 2, 2, /* }]) */
+ 8, 4, 4, 4, /* :; */
+ 8, 7, 4, 4, /* . */
+ 8, 0, 0, 0, /* > */
+ 3, 3, 3, 3, /* [0..9] */
+ 5, 5, 10, 5, /* [A-Z] */
+ 5, 5, 10, 5,
+ 6, 6, 6, 6, /* [a-z] */
+ 6, 6, 6, 6,
+}
+
+func shouldUseComplexStaticContextMap(input []byte, start_pos uint, length uint, mask uint, quality int, size_hint uint, num_literal_contexts *uint, literal_context_map *[]uint32) bool {
+ /* Try the more complex static context map only for long data. */
+ if size_hint < 1<<20 {
+ return false
+ } else {
+ var end_pos uint = start_pos + length
+ var combined_histo = [32]uint32{0}
+ var context_histo = [13][32]uint32{[32]uint32{0}}
+ var total uint32 = 0
+ var entropy [3]float64
+ var dummy uint
+ var i uint
+ var utf8_lut contextLUT = getContextLUT(contextUTF8)
+ /* To make entropy calculations faster and to fit on the stack, we collect
+ histograms over the 5 most significant bits of literals. One histogram
+ without context and 13 additional histograms for each context value. */
+ for ; start_pos+64 <= end_pos; start_pos += 4096 {
+ var stride_end_pos uint = start_pos + 64
+ var prev2 byte = input[start_pos&mask]
+ var prev1 byte = input[(start_pos+1)&mask]
+ var pos uint
+
+ /* To make the analysis of the data faster we only examine 64 byte long
+ strides at every 4kB intervals. */
+ for pos = start_pos + 2; pos < stride_end_pos; pos++ {
+ var literal byte = input[pos&mask]
+ var context byte = byte(kStaticContextMapComplexUTF8[getContext(prev1, prev2, utf8_lut)])
+ total++
+ combined_histo[literal>>3]++
+ context_histo[context][literal>>3]++
+ prev2 = prev1
+ prev1 = literal
+ }
+ }
+
+ entropy[1] = shannonEntropy(combined_histo[:], 32, &dummy)
+ entropy[2] = 0
+ for i = 0; i < 13; i++ {
+ entropy[2] += shannonEntropy(context_histo[i][0:], 32, &dummy)
+ }
+
+ entropy[0] = 1.0 / float64(total)
+ entropy[1] *= entropy[0]
+ entropy[2] *= entropy[0]
+
+ /* The triggering heuristics below were tuned by compressing the individual
+ files of the silesia corpus. If we skip this kind of context modeling
+ for not very well compressible input (i.e. entropy using context modeling
+ is 60% of maximal entropy) or if expected savings by symbol are less
+ than 0.2 bits, then in every case when it triggers, the final compression
+ ratio is improved. Note however that this heuristics might be too strict
+ for some cases and could be tuned further. */
+ if entropy[2] > 3.0 || entropy[1]-entropy[2] < 0.2 {
+ return false
+ } else {
+ *num_literal_contexts = 13
+ *literal_context_map = kStaticContextMapComplexUTF8[:]
+ return true
+ }
+ }
+}
+
+func decideOverLiteralContextModeling(input []byte, start_pos uint, length uint, mask uint, quality int, size_hint uint, num_literal_contexts *uint, literal_context_map *[]uint32) {
+ if quality < minQualityForContextModeling || length < 64 {
+ return
+ } else if shouldUseComplexStaticContextMap(input, start_pos, length, mask, quality, size_hint, num_literal_contexts, literal_context_map) {
+ } else /* Context map was already set, nothing else to do. */
+ {
+ var end_pos uint = start_pos + length
+ /* Gather bi-gram data of the UTF8 byte prefixes. To make the analysis of
+ UTF8 data faster we only examine 64 byte long strides at every 4kB
+ intervals. */
+
+ var bigram_prefix_histo = [9]uint32{0}
+ for ; start_pos+64 <= end_pos; start_pos += 4096 {
+ var lut = [4]int{0, 0, 1, 2}
+ var stride_end_pos uint = start_pos + 64
+ var prev int = lut[input[start_pos&mask]>>6] * 3
+ var pos uint
+ for pos = start_pos + 1; pos < stride_end_pos; pos++ {
+ var literal byte = input[pos&mask]
+ bigram_prefix_histo[prev+lut[literal>>6]]++
+ prev = lut[literal>>6] * 3
+ }
+ }
+
+ chooseContextMap(quality, bigram_prefix_histo[0:], num_literal_contexts, literal_context_map)
+ }
+}
+
+func shouldCompress_encode(data []byte, mask uint, last_flush_pos uint64, bytes uint, num_literals uint, num_commands uint) bool {
+ /* TODO: find more precise minimal block overhead. */
+ if bytes <= 2 {
+ return false
+ }
+ if num_commands < (bytes>>8)+2 {
+ if float64(num_literals) > 0.99*float64(bytes) {
+ var literal_histo = [256]uint32{0}
+ const kSampleRate uint32 = 13
+ const kMinEntropy float64 = 7.92
+ var bit_cost_threshold float64 = float64(bytes) * kMinEntropy / float64(kSampleRate)
+ var t uint = uint((uint32(bytes) + kSampleRate - 1) / kSampleRate)
+ var pos uint32 = uint32(last_flush_pos)
+ var i uint
+ for i = 0; i < t; i++ {
+ literal_histo[data[pos&uint32(mask)]]++
+ pos += kSampleRate
+ }
+
+ if bitsEntropy(literal_histo[:], 256) > bit_cost_threshold {
+ return false
+ }
+ }
+ }
+
+ return true
+}
+
+/* Chooses the literal context mode for a metablock */
+func chooseContextMode(params *encoderParams, data []byte, pos uint, mask uint, length uint) int {
+ /* We only do the computation for the option of something else than
+ CONTEXT_UTF8 for the highest qualities */
+ if params.quality >= minQualityForHqBlockSplitting && !isMostlyUTF8(data, pos, mask, length, kMinUTF8Ratio) {
+ return contextSigned
+ }
+
+ return contextUTF8
+}
+
+func writeMetaBlockInternal(data []byte, mask uint, last_flush_pos uint64, bytes uint, is_last bool, literal_context_mode int, params *encoderParams, prev_byte byte, prev_byte2 byte, num_literals uint, commands []command, saved_dist_cache []int, dist_cache []int, storage_ix *uint, storage []byte) {
+ var wrapped_last_flush_pos uint32 = wrapPosition(last_flush_pos)
+ var last_bytes uint16
+ var last_bytes_bits byte
+ var literal_context_lut contextLUT = getContextLUT(literal_context_mode)
+ var block_params encoderParams = *params
+
+ if bytes == 0 {
+ /* Write the ISLAST and ISEMPTY bits. */
+ writeBits(2, 3, storage_ix, storage)
+
+ *storage_ix = (*storage_ix + 7) &^ 7
+ return
+ }
+
+ if !shouldCompress_encode(data, mask, last_flush_pos, bytes, num_literals, uint(len(commands))) {
+ /* Restore the distance cache, as its last update by
+ CreateBackwardReferences is now unused. */
+ copy(dist_cache, saved_dist_cache[:4])
+
+ storeUncompressedMetaBlock(is_last, data, uint(wrapped_last_flush_pos), mask, bytes, storage_ix, storage)
+ return
+ }
+
+ assert(*storage_ix <= 14)
+ last_bytes = uint16(storage[1])<<8 | uint16(storage[0])
+ last_bytes_bits = byte(*storage_ix)
+ if params.quality <= maxQualityForStaticEntropyCodes {
+ storeMetaBlockFast(data, uint(wrapped_last_flush_pos), bytes, mask, is_last, params, commands, storage_ix, storage)
+ } else if params.quality < minQualityForBlockSplit {
+ storeMetaBlockTrivial(data, uint(wrapped_last_flush_pos), bytes, mask, is_last, params, commands, storage_ix, storage)
+ } else {
+ mb := getMetaBlockSplit()
+ if params.quality < minQualityForHqBlockSplitting {
+ var num_literal_contexts uint = 1
+ var literal_context_map []uint32 = nil
+ if !params.disable_literal_context_modeling {
+ decideOverLiteralContextModeling(data, uint(wrapped_last_flush_pos), bytes, mask, params.quality, params.size_hint, &num_literal_contexts, &literal_context_map)
+ }
+
+ buildMetaBlockGreedy(data, uint(wrapped_last_flush_pos), mask, prev_byte, prev_byte2, literal_context_lut, num_literal_contexts, literal_context_map, commands, mb)
+ } else {
+ buildMetaBlock(data, uint(wrapped_last_flush_pos), mask, &block_params, prev_byte, prev_byte2, commands, literal_context_mode, mb)
+ }
+
+ if params.quality >= minQualityForOptimizeHistograms {
+ /* The number of distance symbols effectively used for distance
+ histograms. It might be less than distance alphabet size
+ for "Large Window Brotli" (32-bit). */
+ var num_effective_dist_codes uint32 = block_params.dist.alphabet_size
+ if num_effective_dist_codes > numHistogramDistanceSymbols {
+ num_effective_dist_codes = numHistogramDistanceSymbols
+ }
+
+ optimizeHistograms(num_effective_dist_codes, mb)
+ }
+
+ storeMetaBlock(data, uint(wrapped_last_flush_pos), bytes, mask, prev_byte, prev_byte2, is_last, &block_params, literal_context_mode, commands, mb, storage_ix, storage)
+ freeMetaBlockSplit(mb)
+ }
+
+ if bytes+4 < *storage_ix>>3 {
+ /* Restore the distance cache and last byte. */
+ copy(dist_cache, saved_dist_cache[:4])
+
+ storage[0] = byte(last_bytes)
+ storage[1] = byte(last_bytes >> 8)
+ *storage_ix = uint(last_bytes_bits)
+ storeUncompressedMetaBlock(is_last, data, uint(wrapped_last_flush_pos), mask, bytes, storage_ix, storage)
+ }
+}
+
+func chooseDistanceParams(params *encoderParams) {
+ var distance_postfix_bits uint32 = 0
+ var num_direct_distance_codes uint32 = 0
+
+ if params.quality >= minQualityForNonzeroDistanceParams {
+ var ndirect_msb uint32
+ if params.mode == modeFont {
+ distance_postfix_bits = 1
+ num_direct_distance_codes = 12
+ } else {
+ distance_postfix_bits = params.dist.distance_postfix_bits
+ num_direct_distance_codes = params.dist.num_direct_distance_codes
+ }
+
+ ndirect_msb = (num_direct_distance_codes >> distance_postfix_bits) & 0x0F
+ if distance_postfix_bits > maxNpostfix || num_direct_distance_codes > maxNdirect || ndirect_msb<>25)), (last_command.dist_prefix_&0x3FF == 0), &last_command.cmd_prefix_)
+ }
+}
+
+/*
+ Processes the accumulated input data and writes
+ the new output meta-block to s.dest, if one has been
+ created (otherwise the processed input data is buffered internally).
+ If |is_last| or |force_flush| is true, an output meta-block is
+ always created. However, until |is_last| is true encoder may retain up
+ to 7 bits of the last byte of output. To force encoder to dump the remaining
+ bits use WriteMetadata() to append an empty meta-data block.
+ Returns false if the size of the input data is larger than
+ input_block_size().
+*/
+func encodeData(s *Writer, is_last bool, force_flush bool) bool {
+ var delta uint64 = unprocessedInputSize(s)
+ var bytes uint32 = uint32(delta)
+ var wrapped_last_processed_pos uint32 = wrapPosition(s.last_processed_pos_)
+ var data []byte
+ var mask uint32
+ var literal_context_mode int
+
+ data = s.ringbuffer_.buffer_
+ mask = s.ringbuffer_.mask_
+
+ /* Adding more blocks after "last" block is forbidden. */
+ if s.is_last_block_emitted_ {
+ return false
+ }
+ if is_last {
+ s.is_last_block_emitted_ = true
+ }
+
+ if delta > uint64(inputBlockSize(s)) {
+ return false
+ }
+
+ if s.params.quality == fastTwoPassCompressionQuality {
+ if s.command_buf_ == nil || cap(s.command_buf_) < int(kCompressFragmentTwoPassBlockSize) {
+ s.command_buf_ = make([]uint32, kCompressFragmentTwoPassBlockSize)
+ s.literal_buf_ = make([]byte, kCompressFragmentTwoPassBlockSize)
+ } else {
+ s.command_buf_ = s.command_buf_[:kCompressFragmentTwoPassBlockSize]
+ s.literal_buf_ = s.literal_buf_[:kCompressFragmentTwoPassBlockSize]
+ }
+ }
+
+ if s.params.quality == fastOnePassCompressionQuality || s.params.quality == fastTwoPassCompressionQuality {
+ var storage []byte
+ var storage_ix uint = uint(s.last_bytes_bits_)
+ var table_size uint
+ var table []int
+
+ if delta == 0 && !is_last {
+ /* We have no new input data and we don't have to finish the stream, so
+ nothing to do. */
+ return true
+ }
+
+ storage = s.getStorage(int(2*bytes + 503))
+ storage[0] = byte(s.last_bytes_)
+ storage[1] = byte(s.last_bytes_ >> 8)
+ table = getHashTable(s, s.params.quality, uint(bytes), &table_size)
+ if s.params.quality == fastOnePassCompressionQuality {
+ compressFragmentFast(data[wrapped_last_processed_pos&mask:], uint(bytes), is_last, table, table_size, s.cmd_depths_[:], s.cmd_bits_[:], &s.cmd_code_numbits_, s.cmd_code_[:], &storage_ix, storage)
+ } else {
+ compressFragmentTwoPass(data[wrapped_last_processed_pos&mask:], uint(bytes), is_last, s.command_buf_, s.literal_buf_, table, table_size, &storage_ix, storage)
+ }
+
+ s.last_bytes_ = uint16(storage[storage_ix>>3])
+ s.last_bytes_bits_ = byte(storage_ix & 7)
+ updateLastProcessedPos(s)
+ s.writeOutput(storage[:storage_ix>>3])
+ return true
+ }
+ {
+ /* Theoretical max number of commands is 1 per 2 bytes. */
+ newsize := len(s.commands) + int(bytes)/2 + 1
+ if newsize > cap(s.commands) {
+ /* Reserve a bit more memory to allow merging with a next block
+ without reallocation: that would impact speed. */
+ newsize += int(bytes/4) + 16
+
+ new_commands := make([]command, len(s.commands), newsize)
+ if s.commands != nil {
+ copy(new_commands, s.commands)
+ }
+
+ s.commands = new_commands
+ }
+ }
+
+ initOrStitchToPreviousBlock(&s.hasher_, data, uint(mask), &s.params, uint(wrapped_last_processed_pos), uint(bytes), is_last)
+
+ literal_context_mode = chooseContextMode(&s.params, data, uint(wrapPosition(s.last_flush_pos_)), uint(mask), uint(s.input_pos_-s.last_flush_pos_))
+
+ if len(s.commands) != 0 && s.last_insert_len_ == 0 {
+ extendLastCommand(s, &bytes, &wrapped_last_processed_pos)
+ }
+
+ if s.params.quality == zopflificationQuality {
+ assert(s.params.hasher.type_ == 10)
+ createZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_.(*h10), s.dist_cache_[:], &s.last_insert_len_, &s.commands, &s.num_literals_)
+ } else if s.params.quality == hqZopflificationQuality {
+ assert(s.params.hasher.type_ == 10)
+ createHqZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, &s.commands, &s.num_literals_)
+ } else {
+ createBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, &s.commands, &s.num_literals_)
+ }
+ {
+ var max_length uint = maxMetablockSize(&s.params)
+ var max_literals uint = max_length / 8
+ max_commands := int(max_length / 8)
+ var processed_bytes uint = uint(s.input_pos_ - s.last_flush_pos_)
+ var next_input_fits_metablock bool = (processed_bytes+inputBlockSize(s) <= max_length)
+ var should_flush bool = (s.params.quality < minQualityForBlockSplit && s.num_literals_+uint(len(s.commands)) >= maxNumDelayedSymbols)
+ /* If maximal possible additional block doesn't fit metablock, flush now. */
+ /* TODO: Postpone decision until next block arrives? */
+
+ /* If block splitting is not used, then flush as soon as there is some
+ amount of commands / literals produced. */
+ if !is_last && !force_flush && !should_flush && next_input_fits_metablock && s.num_literals_ < max_literals && len(s.commands) < max_commands {
+ /* Merge with next input block. Everything will happen later. */
+ if updateLastProcessedPos(s) {
+ hasherReset(s.hasher_)
+ }
+
+ return true
+ }
+ }
+
+ /* Create the last insert-only command. */
+ if s.last_insert_len_ > 0 {
+ s.commands = append(s.commands, makeInsertCommand(s.last_insert_len_))
+ s.num_literals_ += s.last_insert_len_
+ s.last_insert_len_ = 0
+ }
+
+ if !is_last && s.input_pos_ == s.last_flush_pos_ {
+ /* We have no new input data and we don't have to finish the stream, so
+ nothing to do. */
+ return true
+ }
+
+ assert(s.input_pos_ >= s.last_flush_pos_)
+ assert(s.input_pos_ > s.last_flush_pos_ || is_last)
+ assert(s.input_pos_-s.last_flush_pos_ <= 1<<24)
+ {
+ var metablock_size uint32 = uint32(s.input_pos_ - s.last_flush_pos_)
+ var storage []byte = s.getStorage(int(2*metablock_size + 503))
+ var storage_ix uint = uint(s.last_bytes_bits_)
+ storage[0] = byte(s.last_bytes_)
+ storage[1] = byte(s.last_bytes_ >> 8)
+ writeMetaBlockInternal(data, uint(mask), s.last_flush_pos_, uint(metablock_size), is_last, literal_context_mode, &s.params, s.prev_byte_, s.prev_byte2_, s.num_literals_, s.commands, s.saved_dist_cache_[:], s.dist_cache_[:], &storage_ix, storage)
+ s.last_bytes_ = uint16(storage[storage_ix>>3])
+ s.last_bytes_bits_ = byte(storage_ix & 7)
+ s.last_flush_pos_ = s.input_pos_
+ if updateLastProcessedPos(s) {
+ hasherReset(s.hasher_)
+ }
+
+ if s.last_flush_pos_ > 0 {
+ s.prev_byte_ = data[(uint32(s.last_flush_pos_)-1)&mask]
+ }
+
+ if s.last_flush_pos_ > 1 {
+ s.prev_byte2_ = data[uint32(s.last_flush_pos_-2)&mask]
+ }
+
+ s.commands = s.commands[:0]
+ s.num_literals_ = 0
+
+ /* Save the state of the distance cache in case we need to restore it for
+ emitting an uncompressed block. */
+ copy(s.saved_dist_cache_[:], s.dist_cache_[:])
+
+ s.writeOutput(storage[:storage_ix>>3])
+ return true
+ }
+}
+
+/* Dumps remaining output bits and metadata header to |header|.
+ Returns number of produced bytes.
+ REQUIRED: |header| should be 8-byte aligned and at least 16 bytes long.
+ REQUIRED: |block_size| <= (1 << 24). */
+func writeMetadataHeader(s *Writer, block_size uint, header []byte) uint {
+ storage_ix := uint(s.last_bytes_bits_)
+ header[0] = byte(s.last_bytes_)
+ header[1] = byte(s.last_bytes_ >> 8)
+ s.last_bytes_ = 0
+ s.last_bytes_bits_ = 0
+
+ writeBits(1, 0, &storage_ix, header)
+ writeBits(2, 3, &storage_ix, header)
+ writeBits(1, 0, &storage_ix, header)
+ if block_size == 0 {
+ writeBits(2, 0, &storage_ix, header)
+ } else {
+ var nbits uint32
+ if block_size == 1 {
+ nbits = 0
+ } else {
+ nbits = log2FloorNonZero(uint(uint32(block_size)-1)) + 1
+ }
+ var nbytes uint32 = (nbits + 7) / 8
+ writeBits(2, uint64(nbytes), &storage_ix, header)
+ writeBits(uint(8*nbytes), uint64(block_size)-1, &storage_ix, header)
+ }
+
+ return (storage_ix + 7) >> 3
+}
+
+func injectBytePaddingBlock(s *Writer) {
+ var seal uint32 = uint32(s.last_bytes_)
+ var seal_bits uint = uint(s.last_bytes_bits_)
+ s.last_bytes_ = 0
+ s.last_bytes_bits_ = 0
+
+ /* is_last = 0, data_nibbles = 11, reserved = 0, meta_nibbles = 00 */
+ seal |= 0x6 << seal_bits
+
+ seal_bits += 6
+
+ destination := s.tiny_buf_.u8[:]
+
+ destination[0] = byte(seal)
+ if seal_bits > 8 {
+ destination[1] = byte(seal >> 8)
+ }
+ if seal_bits > 16 {
+ destination[2] = byte(seal >> 16)
+ }
+ s.writeOutput(destination[:(seal_bits+7)>>3])
+}
+
+func checkFlushComplete(s *Writer) {
+ if s.stream_state_ == streamFlushRequested && s.err == nil {
+ s.stream_state_ = streamProcessing
+ }
+}
+
+func encoderCompressStreamFast(s *Writer, op int, available_in *uint, next_in *[]byte) bool {
+ var block_size_limit uint = uint(1) << s.params.lgwin
+ var buf_size uint = brotli_min_size_t(kCompressFragmentTwoPassBlockSize, brotli_min_size_t(*available_in, block_size_limit))
+ var command_buf []uint32 = nil
+ var literal_buf []byte = nil
+ if s.params.quality != fastOnePassCompressionQuality && s.params.quality != fastTwoPassCompressionQuality {
+ return false
+ }
+
+ if s.params.quality == fastTwoPassCompressionQuality {
+ if s.command_buf_ == nil || cap(s.command_buf_) < int(buf_size) {
+ s.command_buf_ = make([]uint32, buf_size)
+ s.literal_buf_ = make([]byte, buf_size)
+ } else {
+ s.command_buf_ = s.command_buf_[:buf_size]
+ s.literal_buf_ = s.literal_buf_[:buf_size]
+ }
+
+ command_buf = s.command_buf_
+ literal_buf = s.literal_buf_
+ }
+
+ for {
+ if s.stream_state_ == streamFlushRequested && s.last_bytes_bits_ != 0 {
+ injectBytePaddingBlock(s)
+ continue
+ }
+
+ /* Compress block only when stream is not
+ finished, there is no pending flush request, and there is either
+ additional input or pending operation. */
+ if s.stream_state_ == streamProcessing && (*available_in != 0 || op != int(operationProcess)) {
+ var block_size uint = brotli_min_size_t(block_size_limit, *available_in)
+ var is_last bool = (*available_in == block_size) && (op == int(operationFinish))
+ var force_flush bool = (*available_in == block_size) && (op == int(operationFlush))
+ var max_out_size uint = 2*block_size + 503
+ var storage []byte = nil
+ var storage_ix uint = uint(s.last_bytes_bits_)
+ var table_size uint
+ var table []int
+
+ if force_flush && block_size == 0 {
+ s.stream_state_ = streamFlushRequested
+ continue
+ }
+
+ storage = s.getStorage(int(max_out_size))
+
+ storage[0] = byte(s.last_bytes_)
+ storage[1] = byte(s.last_bytes_ >> 8)
+ table = getHashTable(s, s.params.quality, block_size, &table_size)
+
+ if s.params.quality == fastOnePassCompressionQuality {
+ compressFragmentFast(*next_in, block_size, is_last, table, table_size, s.cmd_depths_[:], s.cmd_bits_[:], &s.cmd_code_numbits_, s.cmd_code_[:], &storage_ix, storage)
+ } else {
+ compressFragmentTwoPass(*next_in, block_size, is_last, command_buf, literal_buf, table, table_size, &storage_ix, storage)
+ }
+
+ *next_in = (*next_in)[block_size:]
+ *available_in -= block_size
+ var out_bytes uint = storage_ix >> 3
+ s.writeOutput(storage[:out_bytes])
+
+ s.last_bytes_ = uint16(storage[storage_ix>>3])
+ s.last_bytes_bits_ = byte(storage_ix & 7)
+
+ if force_flush {
+ s.stream_state_ = streamFlushRequested
+ }
+ if is_last {
+ s.stream_state_ = streamFinished
+ }
+ continue
+ }
+
+ break
+ }
+
+ checkFlushComplete(s)
+ return true
+}
+
+func processMetadata(s *Writer, available_in *uint, next_in *[]byte) bool {
+ if *available_in > 1<<24 {
+ return false
+ }
+
+ /* Switch to metadata block workflow, if required. */
+ if s.stream_state_ == streamProcessing {
+ s.remaining_metadata_bytes_ = uint32(*available_in)
+ s.stream_state_ = streamMetadataHead
+ }
+
+ if s.stream_state_ != streamMetadataHead && s.stream_state_ != streamMetadataBody {
+ return false
+ }
+
+ for {
+ if s.stream_state_ == streamFlushRequested && s.last_bytes_bits_ != 0 {
+ injectBytePaddingBlock(s)
+ continue
+ }
+
+ if s.input_pos_ != s.last_flush_pos_ {
+ var result bool = encodeData(s, false, true)
+ if !result {
+ return false
+ }
+ continue
+ }
+
+ if s.stream_state_ == streamMetadataHead {
+ n := writeMetadataHeader(s, uint(s.remaining_metadata_bytes_), s.tiny_buf_.u8[:])
+ s.writeOutput(s.tiny_buf_.u8[:n])
+ s.stream_state_ = streamMetadataBody
+ continue
+ } else {
+ /* Exit workflow only when there is no more input and no more output.
+ Otherwise client may continue producing empty metadata blocks. */
+ if s.remaining_metadata_bytes_ == 0 {
+ s.remaining_metadata_bytes_ = math.MaxUint32
+ s.stream_state_ = streamProcessing
+ break
+ }
+
+ /* This guarantees progress in "TakeOutput" workflow. */
+ var c uint32 = brotli_min_uint32_t(s.remaining_metadata_bytes_, 16)
+ copy(s.tiny_buf_.u8[:], (*next_in)[:c])
+ *next_in = (*next_in)[c:]
+ *available_in -= uint(c)
+ s.remaining_metadata_bytes_ -= c
+ s.writeOutput(s.tiny_buf_.u8[:c])
+
+ continue
+ }
+ }
+
+ return true
+}
+
+func updateSizeHint(s *Writer, available_in uint) {
+ if s.params.size_hint == 0 {
+ var delta uint64 = unprocessedInputSize(s)
+ var tail uint64 = uint64(available_in)
+ var limit uint32 = 1 << 30
+ var total uint32
+ if (delta >= uint64(limit)) || (tail >= uint64(limit)) || ((delta + tail) >= uint64(limit)) {
+ total = limit
+ } else {
+ total = uint32(delta + tail)
+ }
+
+ s.params.size_hint = uint(total)
+ }
+}
+
+func encoderCompressStream(s *Writer, op int, available_in *uint, next_in *[]byte) bool {
+ if !ensureInitialized(s) {
+ return false
+ }
+
+ /* Unfinished metadata block; check requirements. */
+ if s.remaining_metadata_bytes_ != math.MaxUint32 {
+ if uint32(*available_in) != s.remaining_metadata_bytes_ {
+ return false
+ }
+ if op != int(operationEmitMetadata) {
+ return false
+ }
+ }
+
+ if op == int(operationEmitMetadata) {
+ updateSizeHint(s, 0) /* First data metablock might be emitted here. */
+ return processMetadata(s, available_in, next_in)
+ }
+
+ if s.stream_state_ == streamMetadataHead || s.stream_state_ == streamMetadataBody {
+ return false
+ }
+
+ if s.stream_state_ != streamProcessing && *available_in != 0 {
+ return false
+ }
+
+ if s.params.quality == fastOnePassCompressionQuality || s.params.quality == fastTwoPassCompressionQuality {
+ return encoderCompressStreamFast(s, op, available_in, next_in)
+ }
+
+ for {
+ var remaining_block_size uint = remainingInputBlockSize(s)
+
+ if remaining_block_size != 0 && *available_in != 0 {
+ var copy_input_size uint = brotli_min_size_t(remaining_block_size, *available_in)
+ copyInputToRingBuffer(s, copy_input_size, *next_in)
+ *next_in = (*next_in)[copy_input_size:]
+ *available_in -= copy_input_size
+ continue
+ }
+
+ if s.stream_state_ == streamFlushRequested && s.last_bytes_bits_ != 0 {
+ injectBytePaddingBlock(s)
+ continue
+ }
+
+ /* Compress data only when stream is not
+ finished and there is no pending flush request. */
+ if s.stream_state_ == streamProcessing {
+ if remaining_block_size == 0 || op != int(operationProcess) {
+ var is_last bool = ((*available_in == 0) && op == int(operationFinish))
+ var force_flush bool = ((*available_in == 0) && op == int(operationFlush))
+ var result bool
+ updateSizeHint(s, *available_in)
+ result = encodeData(s, is_last, force_flush)
+ if !result {
+ return false
+ }
+ if force_flush {
+ s.stream_state_ = streamFlushRequested
+ }
+ if is_last {
+ s.stream_state_ = streamFinished
+ }
+ continue
+ }
+ }
+
+ break
+ }
+
+ checkFlushComplete(s)
+ return true
+}
+
+func (w *Writer) writeOutput(data []byte) {
+ if w.err != nil {
+ return
+ }
+
+ _, w.err = w.dst.Write(data)
+ if w.err == nil {
+ checkFlushComplete(w)
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/encoder_dict.go b/vendor/github.com/andybalholm/brotli/encoder_dict.go
new file mode 100644
index 0000000..55c051c
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/encoder_dict.go
@@ -0,0 +1,22 @@
+package brotli
+
+/* Dictionary data (words and transforms) for 1 possible context */
+type encoderDictionary struct {
+ words *dictionary
+ cutoffTransformsCount uint32
+ cutoffTransforms uint64
+ hash_table []uint16
+ buckets []uint16
+ dict_words []dictWord
+}
+
+func initEncoderDictionary(dict *encoderDictionary) {
+ dict.words = getDictionary()
+
+ dict.hash_table = kStaticDictionaryHash[:]
+ dict.buckets = kStaticDictionaryBuckets[:]
+ dict.dict_words = kStaticDictionaryWords[:]
+
+ dict.cutoffTransformsCount = kCutoffTransformsCount
+ dict.cutoffTransforms = kCutoffTransforms
+}
diff --git a/vendor/github.com/andybalholm/brotli/entropy_encode.go b/vendor/github.com/andybalholm/brotli/entropy_encode.go
new file mode 100644
index 0000000..3f469a3
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/entropy_encode.go
@@ -0,0 +1,592 @@
+package brotli
+
+import "math"
+
+/* Copyright 2010 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Entropy encoding (Huffman) utilities. */
+
+/* A node of a Huffman tree. */
+type huffmanTree struct {
+ total_count_ uint32
+ index_left_ int16
+ index_right_or_value_ int16
+}
+
+func initHuffmanTree(self *huffmanTree, count uint32, left int16, right int16) {
+ self.total_count_ = count
+ self.index_left_ = left
+ self.index_right_or_value_ = right
+}
+
+/* Input size optimized Shell sort. */
+type huffmanTreeComparator func(huffmanTree, huffmanTree) bool
+
+var sortHuffmanTreeItems_gaps = []uint{132, 57, 23, 10, 4, 1}
+
+func sortHuffmanTreeItems(items []huffmanTree, n uint, comparator huffmanTreeComparator) {
+ if n < 13 {
+ /* Insertion sort. */
+ var i uint
+ for i = 1; i < n; i++ {
+ var tmp huffmanTree = items[i]
+ var k uint = i
+ var j uint = i - 1
+ for comparator(tmp, items[j]) {
+ items[k] = items[j]
+ k = j
+ if j == 0 {
+ break
+ }
+ j--
+ }
+
+ items[k] = tmp
+ }
+
+ return
+ } else {
+ var g int
+ if n < 57 {
+ g = 2
+ } else {
+ g = 0
+ }
+ for ; g < 6; g++ {
+ var gap uint = sortHuffmanTreeItems_gaps[g]
+ var i uint
+ for i = gap; i < n; i++ {
+ var j uint = i
+ var tmp huffmanTree = items[i]
+ for ; j >= gap && comparator(tmp, items[j-gap]); j -= gap {
+ items[j] = items[j-gap]
+ }
+
+ items[j] = tmp
+ }
+ }
+ }
+}
+
+/* Returns 1 if assignment of depths succeeded, otherwise 0. */
+func setDepth(p0 int, pool []huffmanTree, depth []byte, max_depth int) bool {
+ var stack [16]int
+ var level int = 0
+ var p int = p0
+ assert(max_depth <= 15)
+ stack[0] = -1
+ for {
+ if pool[p].index_left_ >= 0 {
+ level++
+ if level > max_depth {
+ return false
+ }
+ stack[level] = int(pool[p].index_right_or_value_)
+ p = int(pool[p].index_left_)
+ continue
+ } else {
+ depth[pool[p].index_right_or_value_] = byte(level)
+ }
+
+ for level >= 0 && stack[level] == -1 {
+ level--
+ }
+ if level < 0 {
+ return true
+ }
+ p = stack[level]
+ stack[level] = -1
+ }
+}
+
+/* Sort the root nodes, least popular first. */
+func sortHuffmanTree(v0 huffmanTree, v1 huffmanTree) bool {
+ if v0.total_count_ != v1.total_count_ {
+ return v0.total_count_ < v1.total_count_
+ }
+
+ return v0.index_right_or_value_ > v1.index_right_or_value_
+}
+
+/* This function will create a Huffman tree.
+
+ The catch here is that the tree cannot be arbitrarily deep.
+ Brotli specifies a maximum depth of 15 bits for "code trees"
+ and 7 bits for "code length code trees."
+
+ count_limit is the value that is to be faked as the minimum value
+ and this minimum value is raised until the tree matches the
+ maximum length requirement.
+
+ This algorithm is not of excellent performance for very long data blocks,
+ especially when population counts are longer than 2**tree_limit, but
+ we are not planning to use this with extremely long blocks.
+
+ See http://en.wikipedia.org/wiki/Huffman_coding */
+func createHuffmanTree(data []uint32, length uint, tree_limit int, tree []huffmanTree, depth []byte) {
+ var count_limit uint32
+ var sentinel huffmanTree
+ initHuffmanTree(&sentinel, math.MaxUint32, -1, -1)
+
+ /* For block sizes below 64 kB, we never need to do a second iteration
+ of this loop. Probably all of our block sizes will be smaller than
+ that, so this loop is mostly of academic interest. If we actually
+ would need this, we would be better off with the Katajainen algorithm. */
+ for count_limit = 1; ; count_limit *= 2 {
+ var n uint = 0
+ var i uint
+ var j uint
+ var k uint
+ for i = length; i != 0; {
+ i--
+ if data[i] != 0 {
+ var count uint32 = brotli_max_uint32_t(data[i], count_limit)
+ initHuffmanTree(&tree[n], count, -1, int16(i))
+ n++
+ }
+ }
+
+ if n == 1 {
+ depth[tree[0].index_right_or_value_] = 1 /* Only one element. */
+ break
+ }
+
+ sortHuffmanTreeItems(tree, n, huffmanTreeComparator(sortHuffmanTree))
+
+ /* The nodes are:
+ [0, n): the sorted leaf nodes that we start with.
+ [n]: we add a sentinel here.
+ [n + 1, 2n): new parent nodes are added here, starting from
+ (n+1). These are naturally in ascending order.
+ [2n]: we add a sentinel at the end as well.
+ There will be (2n+1) elements at the end. */
+ tree[n] = sentinel
+
+ tree[n+1] = sentinel
+
+ i = 0 /* Points to the next leaf node. */
+ j = n + 1 /* Points to the next non-leaf node. */
+ for k = n - 1; k != 0; k-- {
+ var left uint
+ var right uint
+ if tree[i].total_count_ <= tree[j].total_count_ {
+ left = i
+ i++
+ } else {
+ left = j
+ j++
+ }
+
+ if tree[i].total_count_ <= tree[j].total_count_ {
+ right = i
+ i++
+ } else {
+ right = j
+ j++
+ }
+ {
+ /* The sentinel node becomes the parent node. */
+ var j_end uint = 2*n - k
+ tree[j_end].total_count_ = tree[left].total_count_ + tree[right].total_count_
+ tree[j_end].index_left_ = int16(left)
+ tree[j_end].index_right_or_value_ = int16(right)
+
+ /* Add back the last sentinel node. */
+ tree[j_end+1] = sentinel
+ }
+ }
+
+ if setDepth(int(2*n-1), tree[0:], depth, tree_limit) {
+ /* We need to pack the Huffman tree in tree_limit bits. If this was not
+ successful, add fake entities to the lowest values and retry. */
+ break
+ }
+ }
+}
+
+func reverse(v []byte, start uint, end uint) {
+ end--
+ for start < end {
+ var tmp byte = v[start]
+ v[start] = v[end]
+ v[end] = tmp
+ start++
+ end--
+ }
+}
+
+func writeHuffmanTreeRepetitions(previous_value byte, value byte, repetitions uint, tree_size *uint, tree []byte, extra_bits_data []byte) {
+ assert(repetitions > 0)
+ if previous_value != value {
+ tree[*tree_size] = value
+ extra_bits_data[*tree_size] = 0
+ (*tree_size)++
+ repetitions--
+ }
+
+ if repetitions == 7 {
+ tree[*tree_size] = value
+ extra_bits_data[*tree_size] = 0
+ (*tree_size)++
+ repetitions--
+ }
+
+ if repetitions < 3 {
+ var i uint
+ for i = 0; i < repetitions; i++ {
+ tree[*tree_size] = value
+ extra_bits_data[*tree_size] = 0
+ (*tree_size)++
+ }
+ } else {
+ var start uint = *tree_size
+ repetitions -= 3
+ for {
+ tree[*tree_size] = repeatPreviousCodeLength
+ extra_bits_data[*tree_size] = byte(repetitions & 0x3)
+ (*tree_size)++
+ repetitions >>= 2
+ if repetitions == 0 {
+ break
+ }
+
+ repetitions--
+ }
+
+ reverse(tree, start, *tree_size)
+ reverse(extra_bits_data, start, *tree_size)
+ }
+}
+
+func writeHuffmanTreeRepetitionsZeros(repetitions uint, tree_size *uint, tree []byte, extra_bits_data []byte) {
+ if repetitions == 11 {
+ tree[*tree_size] = 0
+ extra_bits_data[*tree_size] = 0
+ (*tree_size)++
+ repetitions--
+ }
+
+ if repetitions < 3 {
+ var i uint
+ for i = 0; i < repetitions; i++ {
+ tree[*tree_size] = 0
+ extra_bits_data[*tree_size] = 0
+ (*tree_size)++
+ }
+ } else {
+ var start uint = *tree_size
+ repetitions -= 3
+ for {
+ tree[*tree_size] = repeatZeroCodeLength
+ extra_bits_data[*tree_size] = byte(repetitions & 0x7)
+ (*tree_size)++
+ repetitions >>= 3
+ if repetitions == 0 {
+ break
+ }
+
+ repetitions--
+ }
+
+ reverse(tree, start, *tree_size)
+ reverse(extra_bits_data, start, *tree_size)
+ }
+}
+
+/* Change the population counts in a way that the consequent
+ Huffman tree compression, especially its RLE-part will be more
+ likely to compress this data more efficiently.
+
+ length contains the size of the histogram.
+ counts contains the population counts.
+ good_for_rle is a buffer of at least length size */
+func optimizeHuffmanCountsForRLE(length uint, counts []uint32, good_for_rle []byte) {
+ var nonzero_count uint = 0
+ var stride uint
+ var limit uint
+ var sum uint
+ var streak_limit uint = 1240
+ var i uint
+ /* Let's make the Huffman code more compatible with RLE encoding. */
+ for i = 0; i < length; i++ {
+ if counts[i] != 0 {
+ nonzero_count++
+ }
+ }
+
+ if nonzero_count < 16 {
+ return
+ }
+
+ for length != 0 && counts[length-1] == 0 {
+ length--
+ }
+
+ if length == 0 {
+ return /* All zeros. */
+ }
+
+ /* Now counts[0..length - 1] does not have trailing zeros. */
+ {
+ var nonzeros uint = 0
+ var smallest_nonzero uint32 = 1 << 30
+ for i = 0; i < length; i++ {
+ if counts[i] != 0 {
+ nonzeros++
+ if smallest_nonzero > counts[i] {
+ smallest_nonzero = counts[i]
+ }
+ }
+ }
+
+ if nonzeros < 5 {
+ /* Small histogram will model it well. */
+ return
+ }
+
+ if smallest_nonzero < 4 {
+ var zeros uint = length - nonzeros
+ if zeros < 6 {
+ for i = 1; i < length-1; i++ {
+ if counts[i-1] != 0 && counts[i] == 0 && counts[i+1] != 0 {
+ counts[i] = 1
+ }
+ }
+ }
+ }
+
+ if nonzeros < 28 {
+ return
+ }
+ }
+
+ /* 2) Let's mark all population counts that already can be encoded
+ with an RLE code. */
+ for i := 0; i < int(length); i++ {
+ good_for_rle[i] = 0
+ }
+ {
+ var symbol uint32 = counts[0]
+ /* Let's not spoil any of the existing good RLE codes.
+ Mark any seq of 0's that is longer as 5 as a good_for_rle.
+ Mark any seq of non-0's that is longer as 7 as a good_for_rle. */
+
+ var step uint = 0
+ for i = 0; i <= length; i++ {
+ if i == length || counts[i] != symbol {
+ if (symbol == 0 && step >= 5) || (symbol != 0 && step >= 7) {
+ var k uint
+ for k = 0; k < step; k++ {
+ good_for_rle[i-k-1] = 1
+ }
+ }
+
+ step = 1
+ if i != length {
+ symbol = counts[i]
+ }
+ } else {
+ step++
+ }
+ }
+ }
+
+ /* 3) Let's replace those population counts that lead to more RLE codes.
+ Math here is in 24.8 fixed point representation. */
+ stride = 0
+
+ limit = uint(256*(counts[0]+counts[1]+counts[2])/3 + 420)
+ sum = 0
+ for i = 0; i <= length; i++ {
+ if i == length || good_for_rle[i] != 0 || (i != 0 && good_for_rle[i-1] != 0) || (256*counts[i]-uint32(limit)+uint32(streak_limit)) >= uint32(2*streak_limit) {
+ if stride >= 4 || (stride >= 3 && sum == 0) {
+ var k uint
+ var count uint = (sum + stride/2) / stride
+ /* The stride must end, collapse what we have, if we have enough (4). */
+ if count == 0 {
+ count = 1
+ }
+
+ if sum == 0 {
+ /* Don't make an all zeros stride to be upgraded to ones. */
+ count = 0
+ }
+
+ for k = 0; k < stride; k++ {
+ /* We don't want to change value at counts[i],
+ that is already belonging to the next stride. Thus - 1. */
+ counts[i-k-1] = uint32(count)
+ }
+ }
+
+ stride = 0
+ sum = 0
+ if i < length-2 {
+ /* All interesting strides have a count of at least 4, */
+ /* at least when non-zeros. */
+ limit = uint(256*(counts[i]+counts[i+1]+counts[i+2])/3 + 420)
+ } else if i < length {
+ limit = uint(256 * counts[i])
+ } else {
+ limit = 0
+ }
+ }
+
+ stride++
+ if i != length {
+ sum += uint(counts[i])
+ if stride >= 4 {
+ limit = (256*sum + stride/2) / stride
+ }
+
+ if stride == 4 {
+ limit += 120
+ }
+ }
+ }
+}
+
+func decideOverRLEUse(depth []byte, length uint, use_rle_for_non_zero *bool, use_rle_for_zero *bool) {
+ var total_reps_zero uint = 0
+ var total_reps_non_zero uint = 0
+ var count_reps_zero uint = 1
+ var count_reps_non_zero uint = 1
+ var i uint
+ for i = 0; i < length; {
+ var value byte = depth[i]
+ var reps uint = 1
+ var k uint
+ for k = i + 1; k < length && depth[k] == value; k++ {
+ reps++
+ }
+
+ if reps >= 3 && value == 0 {
+ total_reps_zero += reps
+ count_reps_zero++
+ }
+
+ if reps >= 4 && value != 0 {
+ total_reps_non_zero += reps
+ count_reps_non_zero++
+ }
+
+ i += reps
+ }
+
+ *use_rle_for_non_zero = total_reps_non_zero > count_reps_non_zero*2
+ *use_rle_for_zero = total_reps_zero > count_reps_zero*2
+}
+
+/* Write a Huffman tree from bit depths into the bit-stream representation
+ of a Huffman tree. The generated Huffman tree is to be compressed once
+ more using a Huffman tree */
+func writeHuffmanTree(depth []byte, length uint, tree_size *uint, tree []byte, extra_bits_data []byte) {
+ var previous_value byte = initialRepeatedCodeLength
+ var i uint
+ var use_rle_for_non_zero bool = false
+ var use_rle_for_zero bool = false
+ var new_length uint = length
+ /* Throw away trailing zeros. */
+ for i = 0; i < length; i++ {
+ if depth[length-i-1] == 0 {
+ new_length--
+ } else {
+ break
+ }
+ }
+
+ /* First gather statistics on if it is a good idea to do RLE. */
+ if length > 50 {
+ /* Find RLE coding for longer codes.
+ Shorter codes seem not to benefit from RLE. */
+ decideOverRLEUse(depth, new_length, &use_rle_for_non_zero, &use_rle_for_zero)
+ }
+
+ /* Actual RLE coding. */
+ for i = 0; i < new_length; {
+ var value byte = depth[i]
+ var reps uint = 1
+ if (value != 0 && use_rle_for_non_zero) || (value == 0 && use_rle_for_zero) {
+ var k uint
+ for k = i + 1; k < new_length && depth[k] == value; k++ {
+ reps++
+ }
+ }
+
+ if value == 0 {
+ writeHuffmanTreeRepetitionsZeros(reps, tree_size, tree, extra_bits_data)
+ } else {
+ writeHuffmanTreeRepetitions(previous_value, value, reps, tree_size, tree, extra_bits_data)
+ previous_value = value
+ }
+
+ i += reps
+ }
+}
+
+var reverseBits_kLut = [16]uint{
+ 0x00,
+ 0x08,
+ 0x04,
+ 0x0C,
+ 0x02,
+ 0x0A,
+ 0x06,
+ 0x0E,
+ 0x01,
+ 0x09,
+ 0x05,
+ 0x0D,
+ 0x03,
+ 0x0B,
+ 0x07,
+ 0x0F,
+}
+
+func reverseBits(num_bits uint, bits uint16) uint16 {
+ var retval uint = reverseBits_kLut[bits&0x0F]
+ var i uint
+ for i = 4; i < num_bits; i += 4 {
+ retval <<= 4
+ bits = uint16(bits >> 4)
+ retval |= reverseBits_kLut[bits&0x0F]
+ }
+
+ retval >>= ((0 - num_bits) & 0x03)
+ return uint16(retval)
+}
+
+/* 0..15 are values for bits */
+const maxHuffmanBits = 16
+
+/* Get the actual bit values for a tree of bit depths. */
+func convertBitDepthsToSymbols(depth []byte, len uint, bits []uint16) {
+ var bl_count = [maxHuffmanBits]uint16{0}
+ var next_code [maxHuffmanBits]uint16
+ var i uint
+ /* In Brotli, all bit depths are [1..15]
+ 0 bit depth means that the symbol does not exist. */
+
+ var code int = 0
+ for i = 0; i < len; i++ {
+ bl_count[depth[i]]++
+ }
+
+ bl_count[0] = 0
+ next_code[0] = 0
+ for i = 1; i < maxHuffmanBits; i++ {
+ code = (code + int(bl_count[i-1])) << 1
+ next_code[i] = uint16(code)
+ }
+
+ for i = 0; i < len; i++ {
+ if depth[i] != 0 {
+ bits[i] = reverseBits(uint(depth[i]), next_code[depth[i]])
+ next_code[depth[i]]++
+ }
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/entropy_encode_static.go b/vendor/github.com/andybalholm/brotli/entropy_encode_static.go
new file mode 100644
index 0000000..5ddf3fc
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/entropy_encode_static.go
@@ -0,0 +1,4394 @@
+package brotli
+
+var kCodeLengthDepth = [18]byte{4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 0, 4, 4}
+
+var kStaticCommandCodeDepth = [numCommandSymbols]byte{
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 9,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+ 11,
+}
+
+var kStaticDistanceCodeDepth = [64]byte{
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+ 6,
+}
+
+var kCodeLengthBits = [18]uint32{0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 15, 31, 0, 11, 7}
+
+func storeStaticCodeLengthCode(storage_ix *uint, storage []byte) {
+ writeBits(40, 0x0000FF55555554, storage_ix, storage)
+}
+
+var kZeroRepsBits = [numCommandSymbols]uint64{
+ 0x00000000,
+ 0x00000000,
+ 0x00000000,
+ 0x00000007,
+ 0x00000017,
+ 0x00000027,
+ 0x00000037,
+ 0x00000047,
+ 0x00000057,
+ 0x00000067,
+ 0x00000077,
+ 0x00000770,
+ 0x00000b87,
+ 0x00001387,
+ 0x00001b87,
+ 0x00002387,
+ 0x00002b87,
+ 0x00003387,
+ 0x00003b87,
+ 0x00000397,
+ 0x00000b97,
+ 0x00001397,
+ 0x00001b97,
+ 0x00002397,
+ 0x00002b97,
+ 0x00003397,
+ 0x00003b97,
+ 0x000003a7,
+ 0x00000ba7,
+ 0x000013a7,
+ 0x00001ba7,
+ 0x000023a7,
+ 0x00002ba7,
+ 0x000033a7,
+ 0x00003ba7,
+ 0x000003b7,
+ 0x00000bb7,
+ 0x000013b7,
+ 0x00001bb7,
+ 0x000023b7,
+ 0x00002bb7,
+ 0x000033b7,
+ 0x00003bb7,
+ 0x000003c7,
+ 0x00000bc7,
+ 0x000013c7,
+ 0x00001bc7,
+ 0x000023c7,
+ 0x00002bc7,
+ 0x000033c7,
+ 0x00003bc7,
+ 0x000003d7,
+ 0x00000bd7,
+ 0x000013d7,
+ 0x00001bd7,
+ 0x000023d7,
+ 0x00002bd7,
+ 0x000033d7,
+ 0x00003bd7,
+ 0x000003e7,
+ 0x00000be7,
+ 0x000013e7,
+ 0x00001be7,
+ 0x000023e7,
+ 0x00002be7,
+ 0x000033e7,
+ 0x00003be7,
+ 0x000003f7,
+ 0x00000bf7,
+ 0x000013f7,
+ 0x00001bf7,
+ 0x000023f7,
+ 0x00002bf7,
+ 0x000033f7,
+ 0x00003bf7,
+ 0x0001c387,
+ 0x0005c387,
+ 0x0009c387,
+ 0x000dc387,
+ 0x0011c387,
+ 0x0015c387,
+ 0x0019c387,
+ 0x001dc387,
+ 0x0001cb87,
+ 0x0005cb87,
+ 0x0009cb87,
+ 0x000dcb87,
+ 0x0011cb87,
+ 0x0015cb87,
+ 0x0019cb87,
+ 0x001dcb87,
+ 0x0001d387,
+ 0x0005d387,
+ 0x0009d387,
+ 0x000dd387,
+ 0x0011d387,
+ 0x0015d387,
+ 0x0019d387,
+ 0x001dd387,
+ 0x0001db87,
+ 0x0005db87,
+ 0x0009db87,
+ 0x000ddb87,
+ 0x0011db87,
+ 0x0015db87,
+ 0x0019db87,
+ 0x001ddb87,
+ 0x0001e387,
+ 0x0005e387,
+ 0x0009e387,
+ 0x000de387,
+ 0x0011e387,
+ 0x0015e387,
+ 0x0019e387,
+ 0x001de387,
+ 0x0001eb87,
+ 0x0005eb87,
+ 0x0009eb87,
+ 0x000deb87,
+ 0x0011eb87,
+ 0x0015eb87,
+ 0x0019eb87,
+ 0x001deb87,
+ 0x0001f387,
+ 0x0005f387,
+ 0x0009f387,
+ 0x000df387,
+ 0x0011f387,
+ 0x0015f387,
+ 0x0019f387,
+ 0x001df387,
+ 0x0001fb87,
+ 0x0005fb87,
+ 0x0009fb87,
+ 0x000dfb87,
+ 0x0011fb87,
+ 0x0015fb87,
+ 0x0019fb87,
+ 0x001dfb87,
+ 0x0001c397,
+ 0x0005c397,
+ 0x0009c397,
+ 0x000dc397,
+ 0x0011c397,
+ 0x0015c397,
+ 0x0019c397,
+ 0x001dc397,
+ 0x0001cb97,
+ 0x0005cb97,
+ 0x0009cb97,
+ 0x000dcb97,
+ 0x0011cb97,
+ 0x0015cb97,
+ 0x0019cb97,
+ 0x001dcb97,
+ 0x0001d397,
+ 0x0005d397,
+ 0x0009d397,
+ 0x000dd397,
+ 0x0011d397,
+ 0x0015d397,
+ 0x0019d397,
+ 0x001dd397,
+ 0x0001db97,
+ 0x0005db97,
+ 0x0009db97,
+ 0x000ddb97,
+ 0x0011db97,
+ 0x0015db97,
+ 0x0019db97,
+ 0x001ddb97,
+ 0x0001e397,
+ 0x0005e397,
+ 0x0009e397,
+ 0x000de397,
+ 0x0011e397,
+ 0x0015e397,
+ 0x0019e397,
+ 0x001de397,
+ 0x0001eb97,
+ 0x0005eb97,
+ 0x0009eb97,
+ 0x000deb97,
+ 0x0011eb97,
+ 0x0015eb97,
+ 0x0019eb97,
+ 0x001deb97,
+ 0x0001f397,
+ 0x0005f397,
+ 0x0009f397,
+ 0x000df397,
+ 0x0011f397,
+ 0x0015f397,
+ 0x0019f397,
+ 0x001df397,
+ 0x0001fb97,
+ 0x0005fb97,
+ 0x0009fb97,
+ 0x000dfb97,
+ 0x0011fb97,
+ 0x0015fb97,
+ 0x0019fb97,
+ 0x001dfb97,
+ 0x0001c3a7,
+ 0x0005c3a7,
+ 0x0009c3a7,
+ 0x000dc3a7,
+ 0x0011c3a7,
+ 0x0015c3a7,
+ 0x0019c3a7,
+ 0x001dc3a7,
+ 0x0001cba7,
+ 0x0005cba7,
+ 0x0009cba7,
+ 0x000dcba7,
+ 0x0011cba7,
+ 0x0015cba7,
+ 0x0019cba7,
+ 0x001dcba7,
+ 0x0001d3a7,
+ 0x0005d3a7,
+ 0x0009d3a7,
+ 0x000dd3a7,
+ 0x0011d3a7,
+ 0x0015d3a7,
+ 0x0019d3a7,
+ 0x001dd3a7,
+ 0x0001dba7,
+ 0x0005dba7,
+ 0x0009dba7,
+ 0x000ddba7,
+ 0x0011dba7,
+ 0x0015dba7,
+ 0x0019dba7,
+ 0x001ddba7,
+ 0x0001e3a7,
+ 0x0005e3a7,
+ 0x0009e3a7,
+ 0x000de3a7,
+ 0x0011e3a7,
+ 0x0015e3a7,
+ 0x0019e3a7,
+ 0x001de3a7,
+ 0x0001eba7,
+ 0x0005eba7,
+ 0x0009eba7,
+ 0x000deba7,
+ 0x0011eba7,
+ 0x0015eba7,
+ 0x0019eba7,
+ 0x001deba7,
+ 0x0001f3a7,
+ 0x0005f3a7,
+ 0x0009f3a7,
+ 0x000df3a7,
+ 0x0011f3a7,
+ 0x0015f3a7,
+ 0x0019f3a7,
+ 0x001df3a7,
+ 0x0001fba7,
+ 0x0005fba7,
+ 0x0009fba7,
+ 0x000dfba7,
+ 0x0011fba7,
+ 0x0015fba7,
+ 0x0019fba7,
+ 0x001dfba7,
+ 0x0001c3b7,
+ 0x0005c3b7,
+ 0x0009c3b7,
+ 0x000dc3b7,
+ 0x0011c3b7,
+ 0x0015c3b7,
+ 0x0019c3b7,
+ 0x001dc3b7,
+ 0x0001cbb7,
+ 0x0005cbb7,
+ 0x0009cbb7,
+ 0x000dcbb7,
+ 0x0011cbb7,
+ 0x0015cbb7,
+ 0x0019cbb7,
+ 0x001dcbb7,
+ 0x0001d3b7,
+ 0x0005d3b7,
+ 0x0009d3b7,
+ 0x000dd3b7,
+ 0x0011d3b7,
+ 0x0015d3b7,
+ 0x0019d3b7,
+ 0x001dd3b7,
+ 0x0001dbb7,
+ 0x0005dbb7,
+ 0x0009dbb7,
+ 0x000ddbb7,
+ 0x0011dbb7,
+ 0x0015dbb7,
+ 0x0019dbb7,
+ 0x001ddbb7,
+ 0x0001e3b7,
+ 0x0005e3b7,
+ 0x0009e3b7,
+ 0x000de3b7,
+ 0x0011e3b7,
+ 0x0015e3b7,
+ 0x0019e3b7,
+ 0x001de3b7,
+ 0x0001ebb7,
+ 0x0005ebb7,
+ 0x0009ebb7,
+ 0x000debb7,
+ 0x0011ebb7,
+ 0x0015ebb7,
+ 0x0019ebb7,
+ 0x001debb7,
+ 0x0001f3b7,
+ 0x0005f3b7,
+ 0x0009f3b7,
+ 0x000df3b7,
+ 0x0011f3b7,
+ 0x0015f3b7,
+ 0x0019f3b7,
+ 0x001df3b7,
+ 0x0001fbb7,
+ 0x0005fbb7,
+ 0x0009fbb7,
+ 0x000dfbb7,
+ 0x0011fbb7,
+ 0x0015fbb7,
+ 0x0019fbb7,
+ 0x001dfbb7,
+ 0x0001c3c7,
+ 0x0005c3c7,
+ 0x0009c3c7,
+ 0x000dc3c7,
+ 0x0011c3c7,
+ 0x0015c3c7,
+ 0x0019c3c7,
+ 0x001dc3c7,
+ 0x0001cbc7,
+ 0x0005cbc7,
+ 0x0009cbc7,
+ 0x000dcbc7,
+ 0x0011cbc7,
+ 0x0015cbc7,
+ 0x0019cbc7,
+ 0x001dcbc7,
+ 0x0001d3c7,
+ 0x0005d3c7,
+ 0x0009d3c7,
+ 0x000dd3c7,
+ 0x0011d3c7,
+ 0x0015d3c7,
+ 0x0019d3c7,
+ 0x001dd3c7,
+ 0x0001dbc7,
+ 0x0005dbc7,
+ 0x0009dbc7,
+ 0x000ddbc7,
+ 0x0011dbc7,
+ 0x0015dbc7,
+ 0x0019dbc7,
+ 0x001ddbc7,
+ 0x0001e3c7,
+ 0x0005e3c7,
+ 0x0009e3c7,
+ 0x000de3c7,
+ 0x0011e3c7,
+ 0x0015e3c7,
+ 0x0019e3c7,
+ 0x001de3c7,
+ 0x0001ebc7,
+ 0x0005ebc7,
+ 0x0009ebc7,
+ 0x000debc7,
+ 0x0011ebc7,
+ 0x0015ebc7,
+ 0x0019ebc7,
+ 0x001debc7,
+ 0x0001f3c7,
+ 0x0005f3c7,
+ 0x0009f3c7,
+ 0x000df3c7,
+ 0x0011f3c7,
+ 0x0015f3c7,
+ 0x0019f3c7,
+ 0x001df3c7,
+ 0x0001fbc7,
+ 0x0005fbc7,
+ 0x0009fbc7,
+ 0x000dfbc7,
+ 0x0011fbc7,
+ 0x0015fbc7,
+ 0x0019fbc7,
+ 0x001dfbc7,
+ 0x0001c3d7,
+ 0x0005c3d7,
+ 0x0009c3d7,
+ 0x000dc3d7,
+ 0x0011c3d7,
+ 0x0015c3d7,
+ 0x0019c3d7,
+ 0x001dc3d7,
+ 0x0001cbd7,
+ 0x0005cbd7,
+ 0x0009cbd7,
+ 0x000dcbd7,
+ 0x0011cbd7,
+ 0x0015cbd7,
+ 0x0019cbd7,
+ 0x001dcbd7,
+ 0x0001d3d7,
+ 0x0005d3d7,
+ 0x0009d3d7,
+ 0x000dd3d7,
+ 0x0011d3d7,
+ 0x0015d3d7,
+ 0x0019d3d7,
+ 0x001dd3d7,
+ 0x0001dbd7,
+ 0x0005dbd7,
+ 0x0009dbd7,
+ 0x000ddbd7,
+ 0x0011dbd7,
+ 0x0015dbd7,
+ 0x0019dbd7,
+ 0x001ddbd7,
+ 0x0001e3d7,
+ 0x0005e3d7,
+ 0x0009e3d7,
+ 0x000de3d7,
+ 0x0011e3d7,
+ 0x0015e3d7,
+ 0x0019e3d7,
+ 0x001de3d7,
+ 0x0001ebd7,
+ 0x0005ebd7,
+ 0x0009ebd7,
+ 0x000debd7,
+ 0x0011ebd7,
+ 0x0015ebd7,
+ 0x0019ebd7,
+ 0x001debd7,
+ 0x0001f3d7,
+ 0x0005f3d7,
+ 0x0009f3d7,
+ 0x000df3d7,
+ 0x0011f3d7,
+ 0x0015f3d7,
+ 0x0019f3d7,
+ 0x001df3d7,
+ 0x0001fbd7,
+ 0x0005fbd7,
+ 0x0009fbd7,
+ 0x000dfbd7,
+ 0x0011fbd7,
+ 0x0015fbd7,
+ 0x0019fbd7,
+ 0x001dfbd7,
+ 0x0001c3e7,
+ 0x0005c3e7,
+ 0x0009c3e7,
+ 0x000dc3e7,
+ 0x0011c3e7,
+ 0x0015c3e7,
+ 0x0019c3e7,
+ 0x001dc3e7,
+ 0x0001cbe7,
+ 0x0005cbe7,
+ 0x0009cbe7,
+ 0x000dcbe7,
+ 0x0011cbe7,
+ 0x0015cbe7,
+ 0x0019cbe7,
+ 0x001dcbe7,
+ 0x0001d3e7,
+ 0x0005d3e7,
+ 0x0009d3e7,
+ 0x000dd3e7,
+ 0x0011d3e7,
+ 0x0015d3e7,
+ 0x0019d3e7,
+ 0x001dd3e7,
+ 0x0001dbe7,
+ 0x0005dbe7,
+ 0x0009dbe7,
+ 0x000ddbe7,
+ 0x0011dbe7,
+ 0x0015dbe7,
+ 0x0019dbe7,
+ 0x001ddbe7,
+ 0x0001e3e7,
+ 0x0005e3e7,
+ 0x0009e3e7,
+ 0x000de3e7,
+ 0x0011e3e7,
+ 0x0015e3e7,
+ 0x0019e3e7,
+ 0x001de3e7,
+ 0x0001ebe7,
+ 0x0005ebe7,
+ 0x0009ebe7,
+ 0x000debe7,
+ 0x0011ebe7,
+ 0x0015ebe7,
+ 0x0019ebe7,
+ 0x001debe7,
+ 0x0001f3e7,
+ 0x0005f3e7,
+ 0x0009f3e7,
+ 0x000df3e7,
+ 0x0011f3e7,
+ 0x0015f3e7,
+ 0x0019f3e7,
+ 0x001df3e7,
+ 0x0001fbe7,
+ 0x0005fbe7,
+ 0x0009fbe7,
+ 0x000dfbe7,
+ 0x0011fbe7,
+ 0x0015fbe7,
+ 0x0019fbe7,
+ 0x001dfbe7,
+ 0x0001c3f7,
+ 0x0005c3f7,
+ 0x0009c3f7,
+ 0x000dc3f7,
+ 0x0011c3f7,
+ 0x0015c3f7,
+ 0x0019c3f7,
+ 0x001dc3f7,
+ 0x0001cbf7,
+ 0x0005cbf7,
+ 0x0009cbf7,
+ 0x000dcbf7,
+ 0x0011cbf7,
+ 0x0015cbf7,
+ 0x0019cbf7,
+ 0x001dcbf7,
+ 0x0001d3f7,
+ 0x0005d3f7,
+ 0x0009d3f7,
+ 0x000dd3f7,
+ 0x0011d3f7,
+ 0x0015d3f7,
+ 0x0019d3f7,
+ 0x001dd3f7,
+ 0x0001dbf7,
+ 0x0005dbf7,
+ 0x0009dbf7,
+ 0x000ddbf7,
+ 0x0011dbf7,
+ 0x0015dbf7,
+ 0x0019dbf7,
+ 0x001ddbf7,
+ 0x0001e3f7,
+ 0x0005e3f7,
+ 0x0009e3f7,
+ 0x000de3f7,
+ 0x0011e3f7,
+ 0x0015e3f7,
+ 0x0019e3f7,
+ 0x001de3f7,
+ 0x0001ebf7,
+ 0x0005ebf7,
+ 0x0009ebf7,
+ 0x000debf7,
+ 0x0011ebf7,
+ 0x0015ebf7,
+ 0x0019ebf7,
+ 0x001debf7,
+ 0x0001f3f7,
+ 0x0005f3f7,
+ 0x0009f3f7,
+ 0x000df3f7,
+ 0x0011f3f7,
+ 0x0015f3f7,
+ 0x0019f3f7,
+ 0x001df3f7,
+ 0x0001fbf7,
+ 0x0005fbf7,
+ 0x0009fbf7,
+ 0x000dfbf7,
+ 0x0011fbf7,
+ 0x0015fbf7,
+ 0x0019fbf7,
+ 0x001dfbf7,
+ 0x00e1c387,
+ 0x02e1c387,
+ 0x04e1c387,
+ 0x06e1c387,
+ 0x08e1c387,
+ 0x0ae1c387,
+ 0x0ce1c387,
+ 0x0ee1c387,
+ 0x00e5c387,
+ 0x02e5c387,
+ 0x04e5c387,
+ 0x06e5c387,
+ 0x08e5c387,
+ 0x0ae5c387,
+ 0x0ce5c387,
+ 0x0ee5c387,
+ 0x00e9c387,
+ 0x02e9c387,
+ 0x04e9c387,
+ 0x06e9c387,
+ 0x08e9c387,
+ 0x0ae9c387,
+ 0x0ce9c387,
+ 0x0ee9c387,
+ 0x00edc387,
+ 0x02edc387,
+ 0x04edc387,
+ 0x06edc387,
+ 0x08edc387,
+ 0x0aedc387,
+ 0x0cedc387,
+ 0x0eedc387,
+ 0x00f1c387,
+ 0x02f1c387,
+ 0x04f1c387,
+ 0x06f1c387,
+ 0x08f1c387,
+ 0x0af1c387,
+ 0x0cf1c387,
+ 0x0ef1c387,
+ 0x00f5c387,
+ 0x02f5c387,
+ 0x04f5c387,
+ 0x06f5c387,
+ 0x08f5c387,
+ 0x0af5c387,
+ 0x0cf5c387,
+ 0x0ef5c387,
+ 0x00f9c387,
+ 0x02f9c387,
+ 0x04f9c387,
+ 0x06f9c387,
+ 0x08f9c387,
+ 0x0af9c387,
+ 0x0cf9c387,
+ 0x0ef9c387,
+ 0x00fdc387,
+ 0x02fdc387,
+ 0x04fdc387,
+ 0x06fdc387,
+ 0x08fdc387,
+ 0x0afdc387,
+ 0x0cfdc387,
+ 0x0efdc387,
+ 0x00e1cb87,
+ 0x02e1cb87,
+ 0x04e1cb87,
+ 0x06e1cb87,
+ 0x08e1cb87,
+ 0x0ae1cb87,
+ 0x0ce1cb87,
+ 0x0ee1cb87,
+ 0x00e5cb87,
+ 0x02e5cb87,
+ 0x04e5cb87,
+ 0x06e5cb87,
+ 0x08e5cb87,
+ 0x0ae5cb87,
+ 0x0ce5cb87,
+ 0x0ee5cb87,
+ 0x00e9cb87,
+ 0x02e9cb87,
+ 0x04e9cb87,
+ 0x06e9cb87,
+ 0x08e9cb87,
+ 0x0ae9cb87,
+ 0x0ce9cb87,
+ 0x0ee9cb87,
+ 0x00edcb87,
+ 0x02edcb87,
+ 0x04edcb87,
+ 0x06edcb87,
+ 0x08edcb87,
+ 0x0aedcb87,
+ 0x0cedcb87,
+ 0x0eedcb87,
+ 0x00f1cb87,
+ 0x02f1cb87,
+ 0x04f1cb87,
+ 0x06f1cb87,
+ 0x08f1cb87,
+ 0x0af1cb87,
+ 0x0cf1cb87,
+ 0x0ef1cb87,
+ 0x00f5cb87,
+ 0x02f5cb87,
+ 0x04f5cb87,
+ 0x06f5cb87,
+ 0x08f5cb87,
+ 0x0af5cb87,
+ 0x0cf5cb87,
+ 0x0ef5cb87,
+ 0x00f9cb87,
+ 0x02f9cb87,
+ 0x04f9cb87,
+ 0x06f9cb87,
+ 0x08f9cb87,
+}
+
+var kZeroRepsDepth = [numCommandSymbols]uint32{
+ 0,
+ 4,
+ 8,
+ 7,
+ 7,
+ 7,
+ 7,
+ 7,
+ 7,
+ 7,
+ 7,
+ 11,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 14,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 21,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+ 28,
+}
+
+var kNonZeroRepsBits = [numCommandSymbols]uint64{
+ 0x0000000b,
+ 0x0000001b,
+ 0x0000002b,
+ 0x0000003b,
+ 0x000002cb,
+ 0x000006cb,
+ 0x00000acb,
+ 0x00000ecb,
+ 0x000002db,
+ 0x000006db,
+ 0x00000adb,
+ 0x00000edb,
+ 0x000002eb,
+ 0x000006eb,
+ 0x00000aeb,
+ 0x00000eeb,
+ 0x000002fb,
+ 0x000006fb,
+ 0x00000afb,
+ 0x00000efb,
+ 0x0000b2cb,
+ 0x0001b2cb,
+ 0x0002b2cb,
+ 0x0003b2cb,
+ 0x0000b6cb,
+ 0x0001b6cb,
+ 0x0002b6cb,
+ 0x0003b6cb,
+ 0x0000bacb,
+ 0x0001bacb,
+ 0x0002bacb,
+ 0x0003bacb,
+ 0x0000becb,
+ 0x0001becb,
+ 0x0002becb,
+ 0x0003becb,
+ 0x0000b2db,
+ 0x0001b2db,
+ 0x0002b2db,
+ 0x0003b2db,
+ 0x0000b6db,
+ 0x0001b6db,
+ 0x0002b6db,
+ 0x0003b6db,
+ 0x0000badb,
+ 0x0001badb,
+ 0x0002badb,
+ 0x0003badb,
+ 0x0000bedb,
+ 0x0001bedb,
+ 0x0002bedb,
+ 0x0003bedb,
+ 0x0000b2eb,
+ 0x0001b2eb,
+ 0x0002b2eb,
+ 0x0003b2eb,
+ 0x0000b6eb,
+ 0x0001b6eb,
+ 0x0002b6eb,
+ 0x0003b6eb,
+ 0x0000baeb,
+ 0x0001baeb,
+ 0x0002baeb,
+ 0x0003baeb,
+ 0x0000beeb,
+ 0x0001beeb,
+ 0x0002beeb,
+ 0x0003beeb,
+ 0x0000b2fb,
+ 0x0001b2fb,
+ 0x0002b2fb,
+ 0x0003b2fb,
+ 0x0000b6fb,
+ 0x0001b6fb,
+ 0x0002b6fb,
+ 0x0003b6fb,
+ 0x0000bafb,
+ 0x0001bafb,
+ 0x0002bafb,
+ 0x0003bafb,
+ 0x0000befb,
+ 0x0001befb,
+ 0x0002befb,
+ 0x0003befb,
+ 0x002cb2cb,
+ 0x006cb2cb,
+ 0x00acb2cb,
+ 0x00ecb2cb,
+ 0x002db2cb,
+ 0x006db2cb,
+ 0x00adb2cb,
+ 0x00edb2cb,
+ 0x002eb2cb,
+ 0x006eb2cb,
+ 0x00aeb2cb,
+ 0x00eeb2cb,
+ 0x002fb2cb,
+ 0x006fb2cb,
+ 0x00afb2cb,
+ 0x00efb2cb,
+ 0x002cb6cb,
+ 0x006cb6cb,
+ 0x00acb6cb,
+ 0x00ecb6cb,
+ 0x002db6cb,
+ 0x006db6cb,
+ 0x00adb6cb,
+ 0x00edb6cb,
+ 0x002eb6cb,
+ 0x006eb6cb,
+ 0x00aeb6cb,
+ 0x00eeb6cb,
+ 0x002fb6cb,
+ 0x006fb6cb,
+ 0x00afb6cb,
+ 0x00efb6cb,
+ 0x002cbacb,
+ 0x006cbacb,
+ 0x00acbacb,
+ 0x00ecbacb,
+ 0x002dbacb,
+ 0x006dbacb,
+ 0x00adbacb,
+ 0x00edbacb,
+ 0x002ebacb,
+ 0x006ebacb,
+ 0x00aebacb,
+ 0x00eebacb,
+ 0x002fbacb,
+ 0x006fbacb,
+ 0x00afbacb,
+ 0x00efbacb,
+ 0x002cbecb,
+ 0x006cbecb,
+ 0x00acbecb,
+ 0x00ecbecb,
+ 0x002dbecb,
+ 0x006dbecb,
+ 0x00adbecb,
+ 0x00edbecb,
+ 0x002ebecb,
+ 0x006ebecb,
+ 0x00aebecb,
+ 0x00eebecb,
+ 0x002fbecb,
+ 0x006fbecb,
+ 0x00afbecb,
+ 0x00efbecb,
+ 0x002cb2db,
+ 0x006cb2db,
+ 0x00acb2db,
+ 0x00ecb2db,
+ 0x002db2db,
+ 0x006db2db,
+ 0x00adb2db,
+ 0x00edb2db,
+ 0x002eb2db,
+ 0x006eb2db,
+ 0x00aeb2db,
+ 0x00eeb2db,
+ 0x002fb2db,
+ 0x006fb2db,
+ 0x00afb2db,
+ 0x00efb2db,
+ 0x002cb6db,
+ 0x006cb6db,
+ 0x00acb6db,
+ 0x00ecb6db,
+ 0x002db6db,
+ 0x006db6db,
+ 0x00adb6db,
+ 0x00edb6db,
+ 0x002eb6db,
+ 0x006eb6db,
+ 0x00aeb6db,
+ 0x00eeb6db,
+ 0x002fb6db,
+ 0x006fb6db,
+ 0x00afb6db,
+ 0x00efb6db,
+ 0x002cbadb,
+ 0x006cbadb,
+ 0x00acbadb,
+ 0x00ecbadb,
+ 0x002dbadb,
+ 0x006dbadb,
+ 0x00adbadb,
+ 0x00edbadb,
+ 0x002ebadb,
+ 0x006ebadb,
+ 0x00aebadb,
+ 0x00eebadb,
+ 0x002fbadb,
+ 0x006fbadb,
+ 0x00afbadb,
+ 0x00efbadb,
+ 0x002cbedb,
+ 0x006cbedb,
+ 0x00acbedb,
+ 0x00ecbedb,
+ 0x002dbedb,
+ 0x006dbedb,
+ 0x00adbedb,
+ 0x00edbedb,
+ 0x002ebedb,
+ 0x006ebedb,
+ 0x00aebedb,
+ 0x00eebedb,
+ 0x002fbedb,
+ 0x006fbedb,
+ 0x00afbedb,
+ 0x00efbedb,
+ 0x002cb2eb,
+ 0x006cb2eb,
+ 0x00acb2eb,
+ 0x00ecb2eb,
+ 0x002db2eb,
+ 0x006db2eb,
+ 0x00adb2eb,
+ 0x00edb2eb,
+ 0x002eb2eb,
+ 0x006eb2eb,
+ 0x00aeb2eb,
+ 0x00eeb2eb,
+ 0x002fb2eb,
+ 0x006fb2eb,
+ 0x00afb2eb,
+ 0x00efb2eb,
+ 0x002cb6eb,
+ 0x006cb6eb,
+ 0x00acb6eb,
+ 0x00ecb6eb,
+ 0x002db6eb,
+ 0x006db6eb,
+ 0x00adb6eb,
+ 0x00edb6eb,
+ 0x002eb6eb,
+ 0x006eb6eb,
+ 0x00aeb6eb,
+ 0x00eeb6eb,
+ 0x002fb6eb,
+ 0x006fb6eb,
+ 0x00afb6eb,
+ 0x00efb6eb,
+ 0x002cbaeb,
+ 0x006cbaeb,
+ 0x00acbaeb,
+ 0x00ecbaeb,
+ 0x002dbaeb,
+ 0x006dbaeb,
+ 0x00adbaeb,
+ 0x00edbaeb,
+ 0x002ebaeb,
+ 0x006ebaeb,
+ 0x00aebaeb,
+ 0x00eebaeb,
+ 0x002fbaeb,
+ 0x006fbaeb,
+ 0x00afbaeb,
+ 0x00efbaeb,
+ 0x002cbeeb,
+ 0x006cbeeb,
+ 0x00acbeeb,
+ 0x00ecbeeb,
+ 0x002dbeeb,
+ 0x006dbeeb,
+ 0x00adbeeb,
+ 0x00edbeeb,
+ 0x002ebeeb,
+ 0x006ebeeb,
+ 0x00aebeeb,
+ 0x00eebeeb,
+ 0x002fbeeb,
+ 0x006fbeeb,
+ 0x00afbeeb,
+ 0x00efbeeb,
+ 0x002cb2fb,
+ 0x006cb2fb,
+ 0x00acb2fb,
+ 0x00ecb2fb,
+ 0x002db2fb,
+ 0x006db2fb,
+ 0x00adb2fb,
+ 0x00edb2fb,
+ 0x002eb2fb,
+ 0x006eb2fb,
+ 0x00aeb2fb,
+ 0x00eeb2fb,
+ 0x002fb2fb,
+ 0x006fb2fb,
+ 0x00afb2fb,
+ 0x00efb2fb,
+ 0x002cb6fb,
+ 0x006cb6fb,
+ 0x00acb6fb,
+ 0x00ecb6fb,
+ 0x002db6fb,
+ 0x006db6fb,
+ 0x00adb6fb,
+ 0x00edb6fb,
+ 0x002eb6fb,
+ 0x006eb6fb,
+ 0x00aeb6fb,
+ 0x00eeb6fb,
+ 0x002fb6fb,
+ 0x006fb6fb,
+ 0x00afb6fb,
+ 0x00efb6fb,
+ 0x002cbafb,
+ 0x006cbafb,
+ 0x00acbafb,
+ 0x00ecbafb,
+ 0x002dbafb,
+ 0x006dbafb,
+ 0x00adbafb,
+ 0x00edbafb,
+ 0x002ebafb,
+ 0x006ebafb,
+ 0x00aebafb,
+ 0x00eebafb,
+ 0x002fbafb,
+ 0x006fbafb,
+ 0x00afbafb,
+ 0x00efbafb,
+ 0x002cbefb,
+ 0x006cbefb,
+ 0x00acbefb,
+ 0x00ecbefb,
+ 0x002dbefb,
+ 0x006dbefb,
+ 0x00adbefb,
+ 0x00edbefb,
+ 0x002ebefb,
+ 0x006ebefb,
+ 0x00aebefb,
+ 0x00eebefb,
+ 0x002fbefb,
+ 0x006fbefb,
+ 0x00afbefb,
+ 0x00efbefb,
+ 0x0b2cb2cb,
+ 0x1b2cb2cb,
+ 0x2b2cb2cb,
+ 0x3b2cb2cb,
+ 0x0b6cb2cb,
+ 0x1b6cb2cb,
+ 0x2b6cb2cb,
+ 0x3b6cb2cb,
+ 0x0bacb2cb,
+ 0x1bacb2cb,
+ 0x2bacb2cb,
+ 0x3bacb2cb,
+ 0x0becb2cb,
+ 0x1becb2cb,
+ 0x2becb2cb,
+ 0x3becb2cb,
+ 0x0b2db2cb,
+ 0x1b2db2cb,
+ 0x2b2db2cb,
+ 0x3b2db2cb,
+ 0x0b6db2cb,
+ 0x1b6db2cb,
+ 0x2b6db2cb,
+ 0x3b6db2cb,
+ 0x0badb2cb,
+ 0x1badb2cb,
+ 0x2badb2cb,
+ 0x3badb2cb,
+ 0x0bedb2cb,
+ 0x1bedb2cb,
+ 0x2bedb2cb,
+ 0x3bedb2cb,
+ 0x0b2eb2cb,
+ 0x1b2eb2cb,
+ 0x2b2eb2cb,
+ 0x3b2eb2cb,
+ 0x0b6eb2cb,
+ 0x1b6eb2cb,
+ 0x2b6eb2cb,
+ 0x3b6eb2cb,
+ 0x0baeb2cb,
+ 0x1baeb2cb,
+ 0x2baeb2cb,
+ 0x3baeb2cb,
+ 0x0beeb2cb,
+ 0x1beeb2cb,
+ 0x2beeb2cb,
+ 0x3beeb2cb,
+ 0x0b2fb2cb,
+ 0x1b2fb2cb,
+ 0x2b2fb2cb,
+ 0x3b2fb2cb,
+ 0x0b6fb2cb,
+ 0x1b6fb2cb,
+ 0x2b6fb2cb,
+ 0x3b6fb2cb,
+ 0x0bafb2cb,
+ 0x1bafb2cb,
+ 0x2bafb2cb,
+ 0x3bafb2cb,
+ 0x0befb2cb,
+ 0x1befb2cb,
+ 0x2befb2cb,
+ 0x3befb2cb,
+ 0x0b2cb6cb,
+ 0x1b2cb6cb,
+ 0x2b2cb6cb,
+ 0x3b2cb6cb,
+ 0x0b6cb6cb,
+ 0x1b6cb6cb,
+ 0x2b6cb6cb,
+ 0x3b6cb6cb,
+ 0x0bacb6cb,
+ 0x1bacb6cb,
+ 0x2bacb6cb,
+ 0x3bacb6cb,
+ 0x0becb6cb,
+ 0x1becb6cb,
+ 0x2becb6cb,
+ 0x3becb6cb,
+ 0x0b2db6cb,
+ 0x1b2db6cb,
+ 0x2b2db6cb,
+ 0x3b2db6cb,
+ 0x0b6db6cb,
+ 0x1b6db6cb,
+ 0x2b6db6cb,
+ 0x3b6db6cb,
+ 0x0badb6cb,
+ 0x1badb6cb,
+ 0x2badb6cb,
+ 0x3badb6cb,
+ 0x0bedb6cb,
+ 0x1bedb6cb,
+ 0x2bedb6cb,
+ 0x3bedb6cb,
+ 0x0b2eb6cb,
+ 0x1b2eb6cb,
+ 0x2b2eb6cb,
+ 0x3b2eb6cb,
+ 0x0b6eb6cb,
+ 0x1b6eb6cb,
+ 0x2b6eb6cb,
+ 0x3b6eb6cb,
+ 0x0baeb6cb,
+ 0x1baeb6cb,
+ 0x2baeb6cb,
+ 0x3baeb6cb,
+ 0x0beeb6cb,
+ 0x1beeb6cb,
+ 0x2beeb6cb,
+ 0x3beeb6cb,
+ 0x0b2fb6cb,
+ 0x1b2fb6cb,
+ 0x2b2fb6cb,
+ 0x3b2fb6cb,
+ 0x0b6fb6cb,
+ 0x1b6fb6cb,
+ 0x2b6fb6cb,
+ 0x3b6fb6cb,
+ 0x0bafb6cb,
+ 0x1bafb6cb,
+ 0x2bafb6cb,
+ 0x3bafb6cb,
+ 0x0befb6cb,
+ 0x1befb6cb,
+ 0x2befb6cb,
+ 0x3befb6cb,
+ 0x0b2cbacb,
+ 0x1b2cbacb,
+ 0x2b2cbacb,
+ 0x3b2cbacb,
+ 0x0b6cbacb,
+ 0x1b6cbacb,
+ 0x2b6cbacb,
+ 0x3b6cbacb,
+ 0x0bacbacb,
+ 0x1bacbacb,
+ 0x2bacbacb,
+ 0x3bacbacb,
+ 0x0becbacb,
+ 0x1becbacb,
+ 0x2becbacb,
+ 0x3becbacb,
+ 0x0b2dbacb,
+ 0x1b2dbacb,
+ 0x2b2dbacb,
+ 0x3b2dbacb,
+ 0x0b6dbacb,
+ 0x1b6dbacb,
+ 0x2b6dbacb,
+ 0x3b6dbacb,
+ 0x0badbacb,
+ 0x1badbacb,
+ 0x2badbacb,
+ 0x3badbacb,
+ 0x0bedbacb,
+ 0x1bedbacb,
+ 0x2bedbacb,
+ 0x3bedbacb,
+ 0x0b2ebacb,
+ 0x1b2ebacb,
+ 0x2b2ebacb,
+ 0x3b2ebacb,
+ 0x0b6ebacb,
+ 0x1b6ebacb,
+ 0x2b6ebacb,
+ 0x3b6ebacb,
+ 0x0baebacb,
+ 0x1baebacb,
+ 0x2baebacb,
+ 0x3baebacb,
+ 0x0beebacb,
+ 0x1beebacb,
+ 0x2beebacb,
+ 0x3beebacb,
+ 0x0b2fbacb,
+ 0x1b2fbacb,
+ 0x2b2fbacb,
+ 0x3b2fbacb,
+ 0x0b6fbacb,
+ 0x1b6fbacb,
+ 0x2b6fbacb,
+ 0x3b6fbacb,
+ 0x0bafbacb,
+ 0x1bafbacb,
+ 0x2bafbacb,
+ 0x3bafbacb,
+ 0x0befbacb,
+ 0x1befbacb,
+ 0x2befbacb,
+ 0x3befbacb,
+ 0x0b2cbecb,
+ 0x1b2cbecb,
+ 0x2b2cbecb,
+ 0x3b2cbecb,
+ 0x0b6cbecb,
+ 0x1b6cbecb,
+ 0x2b6cbecb,
+ 0x3b6cbecb,
+ 0x0bacbecb,
+ 0x1bacbecb,
+ 0x2bacbecb,
+ 0x3bacbecb,
+ 0x0becbecb,
+ 0x1becbecb,
+ 0x2becbecb,
+ 0x3becbecb,
+ 0x0b2dbecb,
+ 0x1b2dbecb,
+ 0x2b2dbecb,
+ 0x3b2dbecb,
+ 0x0b6dbecb,
+ 0x1b6dbecb,
+ 0x2b6dbecb,
+ 0x3b6dbecb,
+ 0x0badbecb,
+ 0x1badbecb,
+ 0x2badbecb,
+ 0x3badbecb,
+ 0x0bedbecb,
+ 0x1bedbecb,
+ 0x2bedbecb,
+ 0x3bedbecb,
+ 0x0b2ebecb,
+ 0x1b2ebecb,
+ 0x2b2ebecb,
+ 0x3b2ebecb,
+ 0x0b6ebecb,
+ 0x1b6ebecb,
+ 0x2b6ebecb,
+ 0x3b6ebecb,
+ 0x0baebecb,
+ 0x1baebecb,
+ 0x2baebecb,
+ 0x3baebecb,
+ 0x0beebecb,
+ 0x1beebecb,
+ 0x2beebecb,
+ 0x3beebecb,
+ 0x0b2fbecb,
+ 0x1b2fbecb,
+ 0x2b2fbecb,
+ 0x3b2fbecb,
+ 0x0b6fbecb,
+ 0x1b6fbecb,
+ 0x2b6fbecb,
+ 0x3b6fbecb,
+ 0x0bafbecb,
+ 0x1bafbecb,
+ 0x2bafbecb,
+ 0x3bafbecb,
+ 0x0befbecb,
+ 0x1befbecb,
+ 0x2befbecb,
+ 0x3befbecb,
+ 0x0b2cb2db,
+ 0x1b2cb2db,
+ 0x2b2cb2db,
+ 0x3b2cb2db,
+ 0x0b6cb2db,
+ 0x1b6cb2db,
+ 0x2b6cb2db,
+ 0x3b6cb2db,
+ 0x0bacb2db,
+ 0x1bacb2db,
+ 0x2bacb2db,
+ 0x3bacb2db,
+ 0x0becb2db,
+ 0x1becb2db,
+ 0x2becb2db,
+ 0x3becb2db,
+ 0x0b2db2db,
+ 0x1b2db2db,
+ 0x2b2db2db,
+ 0x3b2db2db,
+ 0x0b6db2db,
+ 0x1b6db2db,
+ 0x2b6db2db,
+ 0x3b6db2db,
+ 0x0badb2db,
+ 0x1badb2db,
+ 0x2badb2db,
+ 0x3badb2db,
+ 0x0bedb2db,
+ 0x1bedb2db,
+ 0x2bedb2db,
+ 0x3bedb2db,
+ 0x0b2eb2db,
+ 0x1b2eb2db,
+ 0x2b2eb2db,
+ 0x3b2eb2db,
+ 0x0b6eb2db,
+ 0x1b6eb2db,
+ 0x2b6eb2db,
+ 0x3b6eb2db,
+ 0x0baeb2db,
+ 0x1baeb2db,
+ 0x2baeb2db,
+ 0x3baeb2db,
+ 0x0beeb2db,
+ 0x1beeb2db,
+ 0x2beeb2db,
+ 0x3beeb2db,
+ 0x0b2fb2db,
+ 0x1b2fb2db,
+ 0x2b2fb2db,
+ 0x3b2fb2db,
+ 0x0b6fb2db,
+ 0x1b6fb2db,
+ 0x2b6fb2db,
+ 0x3b6fb2db,
+ 0x0bafb2db,
+ 0x1bafb2db,
+ 0x2bafb2db,
+ 0x3bafb2db,
+ 0x0befb2db,
+ 0x1befb2db,
+ 0x2befb2db,
+ 0x3befb2db,
+ 0x0b2cb6db,
+ 0x1b2cb6db,
+ 0x2b2cb6db,
+ 0x3b2cb6db,
+ 0x0b6cb6db,
+ 0x1b6cb6db,
+ 0x2b6cb6db,
+ 0x3b6cb6db,
+ 0x0bacb6db,
+ 0x1bacb6db,
+ 0x2bacb6db,
+ 0x3bacb6db,
+ 0x0becb6db,
+ 0x1becb6db,
+ 0x2becb6db,
+ 0x3becb6db,
+ 0x0b2db6db,
+ 0x1b2db6db,
+ 0x2b2db6db,
+ 0x3b2db6db,
+ 0x0b6db6db,
+ 0x1b6db6db,
+ 0x2b6db6db,
+ 0x3b6db6db,
+ 0x0badb6db,
+ 0x1badb6db,
+ 0x2badb6db,
+ 0x3badb6db,
+ 0x0bedb6db,
+ 0x1bedb6db,
+ 0x2bedb6db,
+ 0x3bedb6db,
+ 0x0b2eb6db,
+ 0x1b2eb6db,
+ 0x2b2eb6db,
+ 0x3b2eb6db,
+ 0x0b6eb6db,
+ 0x1b6eb6db,
+ 0x2b6eb6db,
+ 0x3b6eb6db,
+ 0x0baeb6db,
+ 0x1baeb6db,
+ 0x2baeb6db,
+ 0x3baeb6db,
+}
+
+var kNonZeroRepsDepth = [numCommandSymbols]uint32{
+ 6,
+ 6,
+ 6,
+ 6,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 12,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 18,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 24,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+ 30,
+}
+
+var kStaticCommandCodeBits = [numCommandSymbols]uint16{
+ 0,
+ 256,
+ 128,
+ 384,
+ 64,
+ 320,
+ 192,
+ 448,
+ 32,
+ 288,
+ 160,
+ 416,
+ 96,
+ 352,
+ 224,
+ 480,
+ 16,
+ 272,
+ 144,
+ 400,
+ 80,
+ 336,
+ 208,
+ 464,
+ 48,
+ 304,
+ 176,
+ 432,
+ 112,
+ 368,
+ 240,
+ 496,
+ 8,
+ 264,
+ 136,
+ 392,
+ 72,
+ 328,
+ 200,
+ 456,
+ 40,
+ 296,
+ 168,
+ 424,
+ 104,
+ 360,
+ 232,
+ 488,
+ 24,
+ 280,
+ 152,
+ 408,
+ 88,
+ 344,
+ 216,
+ 472,
+ 56,
+ 312,
+ 184,
+ 440,
+ 120,
+ 376,
+ 248,
+ 504,
+ 4,
+ 260,
+ 132,
+ 388,
+ 68,
+ 324,
+ 196,
+ 452,
+ 36,
+ 292,
+ 164,
+ 420,
+ 100,
+ 356,
+ 228,
+ 484,
+ 20,
+ 276,
+ 148,
+ 404,
+ 84,
+ 340,
+ 212,
+ 468,
+ 52,
+ 308,
+ 180,
+ 436,
+ 116,
+ 372,
+ 244,
+ 500,
+ 12,
+ 268,
+ 140,
+ 396,
+ 76,
+ 332,
+ 204,
+ 460,
+ 44,
+ 300,
+ 172,
+ 428,
+ 108,
+ 364,
+ 236,
+ 492,
+ 28,
+ 284,
+ 156,
+ 412,
+ 92,
+ 348,
+ 220,
+ 476,
+ 60,
+ 316,
+ 188,
+ 444,
+ 124,
+ 380,
+ 252,
+ 508,
+ 2,
+ 258,
+ 130,
+ 386,
+ 66,
+ 322,
+ 194,
+ 450,
+ 34,
+ 290,
+ 162,
+ 418,
+ 98,
+ 354,
+ 226,
+ 482,
+ 18,
+ 274,
+ 146,
+ 402,
+ 82,
+ 338,
+ 210,
+ 466,
+ 50,
+ 306,
+ 178,
+ 434,
+ 114,
+ 370,
+ 242,
+ 498,
+ 10,
+ 266,
+ 138,
+ 394,
+ 74,
+ 330,
+ 202,
+ 458,
+ 42,
+ 298,
+ 170,
+ 426,
+ 106,
+ 362,
+ 234,
+ 490,
+ 26,
+ 282,
+ 154,
+ 410,
+ 90,
+ 346,
+ 218,
+ 474,
+ 58,
+ 314,
+ 186,
+ 442,
+ 122,
+ 378,
+ 250,
+ 506,
+ 6,
+ 262,
+ 134,
+ 390,
+ 70,
+ 326,
+ 198,
+ 454,
+ 38,
+ 294,
+ 166,
+ 422,
+ 102,
+ 358,
+ 230,
+ 486,
+ 22,
+ 278,
+ 150,
+ 406,
+ 86,
+ 342,
+ 214,
+ 470,
+ 54,
+ 310,
+ 182,
+ 438,
+ 118,
+ 374,
+ 246,
+ 502,
+ 14,
+ 270,
+ 142,
+ 398,
+ 78,
+ 334,
+ 206,
+ 462,
+ 46,
+ 302,
+ 174,
+ 430,
+ 110,
+ 366,
+ 238,
+ 494,
+ 30,
+ 286,
+ 158,
+ 414,
+ 94,
+ 350,
+ 222,
+ 478,
+ 62,
+ 318,
+ 190,
+ 446,
+ 126,
+ 382,
+ 254,
+ 510,
+ 1,
+ 257,
+ 129,
+ 385,
+ 65,
+ 321,
+ 193,
+ 449,
+ 33,
+ 289,
+ 161,
+ 417,
+ 97,
+ 353,
+ 225,
+ 481,
+ 17,
+ 273,
+ 145,
+ 401,
+ 81,
+ 337,
+ 209,
+ 465,
+ 49,
+ 305,
+ 177,
+ 433,
+ 113,
+ 369,
+ 241,
+ 497,
+ 9,
+ 265,
+ 137,
+ 393,
+ 73,
+ 329,
+ 201,
+ 457,
+ 41,
+ 297,
+ 169,
+ 425,
+ 105,
+ 361,
+ 233,
+ 489,
+ 25,
+ 281,
+ 153,
+ 409,
+ 89,
+ 345,
+ 217,
+ 473,
+ 57,
+ 313,
+ 185,
+ 441,
+ 121,
+ 377,
+ 249,
+ 505,
+ 5,
+ 261,
+ 133,
+ 389,
+ 69,
+ 325,
+ 197,
+ 453,
+ 37,
+ 293,
+ 165,
+ 421,
+ 101,
+ 357,
+ 229,
+ 485,
+ 21,
+ 277,
+ 149,
+ 405,
+ 85,
+ 341,
+ 213,
+ 469,
+ 53,
+ 309,
+ 181,
+ 437,
+ 117,
+ 373,
+ 245,
+ 501,
+ 13,
+ 269,
+ 141,
+ 397,
+ 77,
+ 333,
+ 205,
+ 461,
+ 45,
+ 301,
+ 173,
+ 429,
+ 109,
+ 365,
+ 237,
+ 493,
+ 29,
+ 285,
+ 157,
+ 413,
+ 93,
+ 349,
+ 221,
+ 477,
+ 61,
+ 317,
+ 189,
+ 445,
+ 125,
+ 381,
+ 253,
+ 509,
+ 3,
+ 259,
+ 131,
+ 387,
+ 67,
+ 323,
+ 195,
+ 451,
+ 35,
+ 291,
+ 163,
+ 419,
+ 99,
+ 355,
+ 227,
+ 483,
+ 19,
+ 275,
+ 147,
+ 403,
+ 83,
+ 339,
+ 211,
+ 467,
+ 51,
+ 307,
+ 179,
+ 435,
+ 115,
+ 371,
+ 243,
+ 499,
+ 11,
+ 267,
+ 139,
+ 395,
+ 75,
+ 331,
+ 203,
+ 459,
+ 43,
+ 299,
+ 171,
+ 427,
+ 107,
+ 363,
+ 235,
+ 491,
+ 27,
+ 283,
+ 155,
+ 411,
+ 91,
+ 347,
+ 219,
+ 475,
+ 59,
+ 315,
+ 187,
+ 443,
+ 123,
+ 379,
+ 251,
+ 507,
+ 7,
+ 1031,
+ 519,
+ 1543,
+ 263,
+ 1287,
+ 775,
+ 1799,
+ 135,
+ 1159,
+ 647,
+ 1671,
+ 391,
+ 1415,
+ 903,
+ 1927,
+ 71,
+ 1095,
+ 583,
+ 1607,
+ 327,
+ 1351,
+ 839,
+ 1863,
+ 199,
+ 1223,
+ 711,
+ 1735,
+ 455,
+ 1479,
+ 967,
+ 1991,
+ 39,
+ 1063,
+ 551,
+ 1575,
+ 295,
+ 1319,
+ 807,
+ 1831,
+ 167,
+ 1191,
+ 679,
+ 1703,
+ 423,
+ 1447,
+ 935,
+ 1959,
+ 103,
+ 1127,
+ 615,
+ 1639,
+ 359,
+ 1383,
+ 871,
+ 1895,
+ 231,
+ 1255,
+ 743,
+ 1767,
+ 487,
+ 1511,
+ 999,
+ 2023,
+ 23,
+ 1047,
+ 535,
+ 1559,
+ 279,
+ 1303,
+ 791,
+ 1815,
+ 151,
+ 1175,
+ 663,
+ 1687,
+ 407,
+ 1431,
+ 919,
+ 1943,
+ 87,
+ 1111,
+ 599,
+ 1623,
+ 343,
+ 1367,
+ 855,
+ 1879,
+ 215,
+ 1239,
+ 727,
+ 1751,
+ 471,
+ 1495,
+ 983,
+ 2007,
+ 55,
+ 1079,
+ 567,
+ 1591,
+ 311,
+ 1335,
+ 823,
+ 1847,
+ 183,
+ 1207,
+ 695,
+ 1719,
+ 439,
+ 1463,
+ 951,
+ 1975,
+ 119,
+ 1143,
+ 631,
+ 1655,
+ 375,
+ 1399,
+ 887,
+ 1911,
+ 247,
+ 1271,
+ 759,
+ 1783,
+ 503,
+ 1527,
+ 1015,
+ 2039,
+ 15,
+ 1039,
+ 527,
+ 1551,
+ 271,
+ 1295,
+ 783,
+ 1807,
+ 143,
+ 1167,
+ 655,
+ 1679,
+ 399,
+ 1423,
+ 911,
+ 1935,
+ 79,
+ 1103,
+ 591,
+ 1615,
+ 335,
+ 1359,
+ 847,
+ 1871,
+ 207,
+ 1231,
+ 719,
+ 1743,
+ 463,
+ 1487,
+ 975,
+ 1999,
+ 47,
+ 1071,
+ 559,
+ 1583,
+ 303,
+ 1327,
+ 815,
+ 1839,
+ 175,
+ 1199,
+ 687,
+ 1711,
+ 431,
+ 1455,
+ 943,
+ 1967,
+ 111,
+ 1135,
+ 623,
+ 1647,
+ 367,
+ 1391,
+ 879,
+ 1903,
+ 239,
+ 1263,
+ 751,
+ 1775,
+ 495,
+ 1519,
+ 1007,
+ 2031,
+ 31,
+ 1055,
+ 543,
+ 1567,
+ 287,
+ 1311,
+ 799,
+ 1823,
+ 159,
+ 1183,
+ 671,
+ 1695,
+ 415,
+ 1439,
+ 927,
+ 1951,
+ 95,
+ 1119,
+ 607,
+ 1631,
+ 351,
+ 1375,
+ 863,
+ 1887,
+ 223,
+ 1247,
+ 735,
+ 1759,
+ 479,
+ 1503,
+ 991,
+ 2015,
+ 63,
+ 1087,
+ 575,
+ 1599,
+ 319,
+ 1343,
+ 831,
+ 1855,
+ 191,
+ 1215,
+ 703,
+ 1727,
+ 447,
+ 1471,
+ 959,
+ 1983,
+ 127,
+ 1151,
+ 639,
+ 1663,
+ 383,
+ 1407,
+ 895,
+ 1919,
+ 255,
+ 1279,
+ 767,
+ 1791,
+ 511,
+ 1535,
+ 1023,
+ 2047,
+}
+
+func storeStaticCommandHuffmanTree(storage_ix *uint, storage []byte) {
+ writeBits(56, 0x92624416307003, storage_ix, storage)
+ writeBits(3, 0x00000000, storage_ix, storage)
+}
+
+var kStaticDistanceCodeBits = [64]uint16{
+ 0,
+ 32,
+ 16,
+ 48,
+ 8,
+ 40,
+ 24,
+ 56,
+ 4,
+ 36,
+ 20,
+ 52,
+ 12,
+ 44,
+ 28,
+ 60,
+ 2,
+ 34,
+ 18,
+ 50,
+ 10,
+ 42,
+ 26,
+ 58,
+ 6,
+ 38,
+ 22,
+ 54,
+ 14,
+ 46,
+ 30,
+ 62,
+ 1,
+ 33,
+ 17,
+ 49,
+ 9,
+ 41,
+ 25,
+ 57,
+ 5,
+ 37,
+ 21,
+ 53,
+ 13,
+ 45,
+ 29,
+ 61,
+ 3,
+ 35,
+ 19,
+ 51,
+ 11,
+ 43,
+ 27,
+ 59,
+ 7,
+ 39,
+ 23,
+ 55,
+ 15,
+ 47,
+ 31,
+ 63,
+}
+
+func storeStaticDistanceHuffmanTree(storage_ix *uint, storage []byte) {
+ writeBits(28, 0x0369DC03, storage_ix, storage)
+}
diff --git a/vendor/github.com/andybalholm/brotli/fast_log.go b/vendor/github.com/andybalholm/brotli/fast_log.go
new file mode 100644
index 0000000..9d6607f
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/fast_log.go
@@ -0,0 +1,290 @@
+package brotli
+
+import (
+ "math"
+ "math/bits"
+)
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Utilities for fast computation of logarithms. */
+
+func log2FloorNonZero(n uint) uint32 {
+ return uint32(bits.Len(n)) - 1
+}
+
+/* A lookup table for small values of log2(int) to be used in entropy
+ computation.
+
+ ", ".join(["%.16ff" % x for x in [0.0]+[log2(x) for x in range(1, 256)]]) */
+var kLog2Table = []float32{
+ 0.0000000000000000,
+ 0.0000000000000000,
+ 1.0000000000000000,
+ 1.5849625007211563,
+ 2.0000000000000000,
+ 2.3219280948873622,
+ 2.5849625007211561,
+ 2.8073549220576042,
+ 3.0000000000000000,
+ 3.1699250014423126,
+ 3.3219280948873626,
+ 3.4594316186372978,
+ 3.5849625007211565,
+ 3.7004397181410922,
+ 3.8073549220576037,
+ 3.9068905956085187,
+ 4.0000000000000000,
+ 4.0874628412503400,
+ 4.1699250014423122,
+ 4.2479275134435852,
+ 4.3219280948873626,
+ 4.3923174227787607,
+ 4.4594316186372973,
+ 4.5235619560570131,
+ 4.5849625007211570,
+ 4.6438561897747244,
+ 4.7004397181410926,
+ 4.7548875021634691,
+ 4.8073549220576037,
+ 4.8579809951275728,
+ 4.9068905956085187,
+ 4.9541963103868758,
+ 5.0000000000000000,
+ 5.0443941193584534,
+ 5.0874628412503400,
+ 5.1292830169449664,
+ 5.1699250014423122,
+ 5.2094533656289501,
+ 5.2479275134435852,
+ 5.2854022188622487,
+ 5.3219280948873626,
+ 5.3575520046180838,
+ 5.3923174227787607,
+ 5.4262647547020979,
+ 5.4594316186372973,
+ 5.4918530963296748,
+ 5.5235619560570131,
+ 5.5545888516776376,
+ 5.5849625007211570,
+ 5.6147098441152083,
+ 5.6438561897747244,
+ 5.6724253419714961,
+ 5.7004397181410926,
+ 5.7279204545631996,
+ 5.7548875021634691,
+ 5.7813597135246599,
+ 5.8073549220576046,
+ 5.8328900141647422,
+ 5.8579809951275719,
+ 5.8826430493618416,
+ 5.9068905956085187,
+ 5.9307373375628867,
+ 5.9541963103868758,
+ 5.9772799234999168,
+ 6.0000000000000000,
+ 6.0223678130284544,
+ 6.0443941193584534,
+ 6.0660891904577721,
+ 6.0874628412503400,
+ 6.1085244567781700,
+ 6.1292830169449672,
+ 6.1497471195046822,
+ 6.1699250014423122,
+ 6.1898245588800176,
+ 6.2094533656289510,
+ 6.2288186904958804,
+ 6.2479275134435861,
+ 6.2667865406949019,
+ 6.2854022188622487,
+ 6.3037807481771031,
+ 6.3219280948873617,
+ 6.3398500028846252,
+ 6.3575520046180847,
+ 6.3750394313469254,
+ 6.3923174227787598,
+ 6.4093909361377026,
+ 6.4262647547020979,
+ 6.4429434958487288,
+ 6.4594316186372982,
+ 6.4757334309663976,
+ 6.4918530963296748,
+ 6.5077946401986964,
+ 6.5235619560570131,
+ 6.5391588111080319,
+ 6.5545888516776376,
+ 6.5698556083309478,
+ 6.5849625007211561,
+ 6.5999128421871278,
+ 6.6147098441152092,
+ 6.6293566200796095,
+ 6.6438561897747253,
+ 6.6582114827517955,
+ 6.6724253419714952,
+ 6.6865005271832185,
+ 6.7004397181410917,
+ 6.7142455176661224,
+ 6.7279204545631988,
+ 6.7414669864011465,
+ 6.7548875021634691,
+ 6.7681843247769260,
+ 6.7813597135246599,
+ 6.7944158663501062,
+ 6.8073549220576037,
+ 6.8201789624151887,
+ 6.8328900141647422,
+ 6.8454900509443757,
+ 6.8579809951275719,
+ 6.8703647195834048,
+ 6.8826430493618416,
+ 6.8948177633079437,
+ 6.9068905956085187,
+ 6.9188632372745955,
+ 6.9307373375628867,
+ 6.9425145053392399,
+ 6.9541963103868758,
+ 6.9657842846620879,
+ 6.9772799234999168,
+ 6.9886846867721664,
+ 7.0000000000000000,
+ 7.0112272554232540,
+ 7.0223678130284544,
+ 7.0334230015374501,
+ 7.0443941193584534,
+ 7.0552824355011898,
+ 7.0660891904577721,
+ 7.0768155970508317,
+ 7.0874628412503400,
+ 7.0980320829605272,
+ 7.1085244567781700,
+ 7.1189410727235076,
+ 7.1292830169449664,
+ 7.1395513523987937,
+ 7.1497471195046822,
+ 7.1598713367783891,
+ 7.1699250014423130,
+ 7.1799090900149345,
+ 7.1898245588800176,
+ 7.1996723448363644,
+ 7.2094533656289492,
+ 7.2191685204621621,
+ 7.2288186904958804,
+ 7.2384047393250794,
+ 7.2479275134435861,
+ 7.2573878426926521,
+ 7.2667865406949019,
+ 7.2761244052742384,
+ 7.2854022188622487,
+ 7.2946207488916270,
+ 7.3037807481771031,
+ 7.3128829552843557,
+ 7.3219280948873617,
+ 7.3309168781146177,
+ 7.3398500028846243,
+ 7.3487281542310781,
+ 7.3575520046180847,
+ 7.3663222142458151,
+ 7.3750394313469254,
+ 7.3837042924740528,
+ 7.3923174227787607,
+ 7.4008794362821844,
+ 7.4093909361377026,
+ 7.4178525148858991,
+ 7.4262647547020979,
+ 7.4346282276367255,
+ 7.4429434958487288,
+ 7.4512111118323299,
+ 7.4594316186372973,
+ 7.4676055500829976,
+ 7.4757334309663976,
+ 7.4838157772642564,
+ 7.4918530963296748,
+ 7.4998458870832057,
+ 7.5077946401986964,
+ 7.5156998382840436,
+ 7.5235619560570131,
+ 7.5313814605163119,
+ 7.5391588111080319,
+ 7.5468944598876373,
+ 7.5545888516776376,
+ 7.5622424242210728,
+ 7.5698556083309478,
+ 7.5774288280357487,
+ 7.5849625007211561,
+ 7.5924570372680806,
+ 7.5999128421871278,
+ 7.6073303137496113,
+ 7.6147098441152075,
+ 7.6220518194563764,
+ 7.6293566200796095,
+ 7.6366246205436488,
+ 7.6438561897747244,
+ 7.6510516911789290,
+ 7.6582114827517955,
+ 7.6653359171851765,
+ 7.6724253419714952,
+ 7.6794800995054464,
+ 7.6865005271832185,
+ 7.6934869574993252,
+ 7.7004397181410926,
+ 7.7073591320808825,
+ 7.7142455176661224,
+ 7.7210991887071856,
+ 7.7279204545631996,
+ 7.7347096202258392,
+ 7.7414669864011465,
+ 7.7481928495894596,
+ 7.7548875021634691,
+ 7.7615512324444795,
+ 7.7681843247769260,
+ 7.7747870596011737,
+ 7.7813597135246608,
+ 7.7879025593914317,
+ 7.7944158663501062,
+ 7.8008998999203047,
+ 7.8073549220576037,
+ 7.8137811912170374,
+ 7.8201789624151887,
+ 7.8265484872909159,
+ 7.8328900141647422,
+ 7.8392037880969445,
+ 7.8454900509443757,
+ 7.8517490414160571,
+ 7.8579809951275719,
+ 7.8641861446542798,
+ 7.8703647195834048,
+ 7.8765169465650002,
+ 7.8826430493618425,
+ 7.8887432488982601,
+ 7.8948177633079446,
+ 7.9008668079807496,
+ 7.9068905956085187,
+ 7.9128893362299619,
+ 7.9188632372745955,
+ 7.9248125036057813,
+ 7.9307373375628867,
+ 7.9366379390025719,
+ 7.9425145053392399,
+ 7.9483672315846778,
+ 7.9541963103868758,
+ 7.9600019320680806,
+ 7.9657842846620870,
+ 7.9715435539507720,
+ 7.9772799234999168,
+ 7.9829935746943104,
+ 7.9886846867721664,
+ 7.9943534368588578,
+}
+
+/* Faster logarithm for small integers, with the property of log2(0) == 0. */
+func fastLog2(v uint) float64 {
+ if v < uint(len(kLog2Table)) {
+ return float64(kLog2Table[v])
+ }
+
+ return math.Log2(float64(v))
+}
diff --git a/vendor/github.com/andybalholm/brotli/find_match_length.go b/vendor/github.com/andybalholm/brotli/find_match_length.go
new file mode 100644
index 0000000..09d2ae6
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/find_match_length.go
@@ -0,0 +1,45 @@
+package brotli
+
+import (
+ "encoding/binary"
+ "math/bits"
+ "runtime"
+)
+
+/* Copyright 2010 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Function to find maximal matching prefixes of strings. */
+func findMatchLengthWithLimit(s1 []byte, s2 []byte, limit uint) uint {
+ var matched uint = 0
+ _, _ = s1[limit-1], s2[limit-1] // bounds check
+ switch runtime.GOARCH {
+ case "amd64":
+ // Compare 8 bytes at at time.
+ for matched+8 <= limit {
+ w1 := binary.LittleEndian.Uint64(s1[matched:])
+ w2 := binary.LittleEndian.Uint64(s2[matched:])
+ if w1 != w2 {
+ return matched + uint(bits.TrailingZeros64(w1^w2)>>3)
+ }
+ matched += 8
+ }
+ case "386":
+ // Compare 4 bytes at at time.
+ for matched+4 <= limit {
+ w1 := binary.LittleEndian.Uint32(s1[matched:])
+ w2 := binary.LittleEndian.Uint32(s2[matched:])
+ if w1 != w2 {
+ return matched + uint(bits.TrailingZeros32(w1^w2)>>3)
+ }
+ matched += 4
+ }
+ }
+ for matched < limit && s1[matched] == s2[matched] {
+ matched++
+ }
+ return matched
+}
diff --git a/vendor/github.com/andybalholm/brotli/h10.go b/vendor/github.com/andybalholm/brotli/h10.go
new file mode 100644
index 0000000..5662fbb
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/h10.go
@@ -0,0 +1,287 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2016 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+func (*h10) HashTypeLength() uint {
+ return 4
+}
+
+func (*h10) StoreLookahead() uint {
+ return 128
+}
+
+func hashBytesH10(data []byte) uint32 {
+ var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32
+
+ /* The higher bits contain more mixture from the multiplication,
+ so we take our results from there. */
+ return h >> (32 - 17)
+}
+
+/* A (forgetful) hash table where each hash bucket contains a binary tree of
+ sequences whose first 4 bytes share the same hash code.
+ Each sequence is 128 long and is identified by its starting
+ position in the input data. The binary tree is sorted by the lexicographic
+ order of the sequences, and it is also a max-heap with respect to the
+ starting positions. */
+type h10 struct {
+ hasherCommon
+ window_mask_ uint
+ buckets_ [1 << 17]uint32
+ invalid_pos_ uint32
+ forest []uint32
+}
+
+func (h *h10) Initialize(params *encoderParams) {
+ h.window_mask_ = (1 << params.lgwin) - 1
+ h.invalid_pos_ = uint32(0 - h.window_mask_)
+ var num_nodes uint = uint(1) << params.lgwin
+ h.forest = make([]uint32, 2*num_nodes)
+}
+
+func (h *h10) Prepare(one_shot bool, input_size uint, data []byte) {
+ var invalid_pos uint32 = h.invalid_pos_
+ var i uint32
+ for i = 0; i < 1<<17; i++ {
+ h.buckets_[i] = invalid_pos
+ }
+}
+
+func leftChildIndexH10(self *h10, pos uint) uint {
+ return 2 * (pos & self.window_mask_)
+}
+
+func rightChildIndexH10(self *h10, pos uint) uint {
+ return 2*(pos&self.window_mask_) + 1
+}
+
+/* Stores the hash of the next 4 bytes and in a single tree-traversal, the
+ hash bucket's binary tree is searched for matches and is re-rooted at the
+ current position.
+
+ If less than 128 data is available, the hash bucket of the
+ current position is searched for matches, but the state of the hash table
+ is not changed, since we can not know the final sorting order of the
+ current (incomplete) sequence.
+
+ This function must be called with increasing cur_ix positions. */
+func storeAndFindMatchesH10(self *h10, data []byte, cur_ix uint, ring_buffer_mask uint, max_length uint, max_backward uint, best_len *uint, matches []backwardMatch) []backwardMatch {
+ var cur_ix_masked uint = cur_ix & ring_buffer_mask
+ var max_comp_len uint = brotli_min_size_t(max_length, 128)
+ var should_reroot_tree bool = (max_length >= 128)
+ var key uint32 = hashBytesH10(data[cur_ix_masked:])
+ var forest []uint32 = self.forest
+ var prev_ix uint = uint(self.buckets_[key])
+ var node_left uint = leftChildIndexH10(self, cur_ix)
+ var node_right uint = rightChildIndexH10(self, cur_ix)
+ var best_len_left uint = 0
+ var best_len_right uint = 0
+ var depth_remaining uint
+ /* The forest index of the rightmost node of the left subtree of the new
+ root, updated as we traverse and re-root the tree of the hash bucket. */
+
+ /* The forest index of the leftmost node of the right subtree of the new
+ root, updated as we traverse and re-root the tree of the hash bucket. */
+
+ /* The match length of the rightmost node of the left subtree of the new
+ root, updated as we traverse and re-root the tree of the hash bucket. */
+
+ /* The match length of the leftmost node of the right subtree of the new
+ root, updated as we traverse and re-root the tree of the hash bucket. */
+ if should_reroot_tree {
+ self.buckets_[key] = uint32(cur_ix)
+ }
+
+ for depth_remaining = 64; ; depth_remaining-- {
+ var backward uint = cur_ix - prev_ix
+ var prev_ix_masked uint = prev_ix & ring_buffer_mask
+ if backward == 0 || backward > max_backward || depth_remaining == 0 {
+ if should_reroot_tree {
+ forest[node_left] = self.invalid_pos_
+ forest[node_right] = self.invalid_pos_
+ }
+
+ break
+ }
+ {
+ var cur_len uint = brotli_min_size_t(best_len_left, best_len_right)
+ var len uint
+ assert(cur_len <= 128)
+ len = cur_len + findMatchLengthWithLimit(data[cur_ix_masked+cur_len:], data[prev_ix_masked+cur_len:], max_length-cur_len)
+ if matches != nil && len > *best_len {
+ *best_len = uint(len)
+ initBackwardMatch(&matches[0], backward, uint(len))
+ matches = matches[1:]
+ }
+
+ if len >= max_comp_len {
+ if should_reroot_tree {
+ forest[node_left] = forest[leftChildIndexH10(self, prev_ix)]
+ forest[node_right] = forest[rightChildIndexH10(self, prev_ix)]
+ }
+
+ break
+ }
+
+ if data[cur_ix_masked+len] > data[prev_ix_masked+len] {
+ best_len_left = uint(len)
+ if should_reroot_tree {
+ forest[node_left] = uint32(prev_ix)
+ }
+
+ node_left = rightChildIndexH10(self, prev_ix)
+ prev_ix = uint(forest[node_left])
+ } else {
+ best_len_right = uint(len)
+ if should_reroot_tree {
+ forest[node_right] = uint32(prev_ix)
+ }
+
+ node_right = leftChildIndexH10(self, prev_ix)
+ prev_ix = uint(forest[node_right])
+ }
+ }
+ }
+
+ return matches
+}
+
+/* Finds all backward matches of &data[cur_ix & ring_buffer_mask] up to the
+ length of max_length and stores the position cur_ix in the hash table.
+
+ Sets *num_matches to the number of matches found, and stores the found
+ matches in matches[0] to matches[*num_matches - 1]. The matches will be
+ sorted by strictly increasing length and (non-strictly) increasing
+ distance. */
+func findAllMatchesH10(handle *h10, dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, cur_ix uint, max_length uint, max_backward uint, gap uint, params *encoderParams, matches []backwardMatch) uint {
+ var orig_matches []backwardMatch = matches
+ var cur_ix_masked uint = cur_ix & ring_buffer_mask
+ var best_len uint = 1
+ var short_match_max_backward uint
+ if params.quality != hqZopflificationQuality {
+ short_match_max_backward = 16
+ } else {
+ short_match_max_backward = 64
+ }
+ var stop uint = cur_ix - short_match_max_backward
+ var dict_matches [maxStaticDictionaryMatchLen + 1]uint32
+ var i uint
+ if cur_ix < short_match_max_backward {
+ stop = 0
+ }
+ for i = cur_ix - 1; i > stop && best_len <= 2; i-- {
+ var prev_ix uint = i
+ var backward uint = cur_ix - prev_ix
+ if backward > max_backward {
+ break
+ }
+
+ prev_ix &= ring_buffer_mask
+ if data[cur_ix_masked] != data[prev_ix] || data[cur_ix_masked+1] != data[prev_ix+1] {
+ continue
+ }
+ {
+ var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len > best_len {
+ best_len = uint(len)
+ initBackwardMatch(&matches[0], backward, uint(len))
+ matches = matches[1:]
+ }
+ }
+ }
+
+ if best_len < max_length {
+ matches = storeAndFindMatchesH10(handle, data, cur_ix, ring_buffer_mask, max_length, max_backward, &best_len, matches)
+ }
+
+ for i = 0; i <= maxStaticDictionaryMatchLen; i++ {
+ dict_matches[i] = kInvalidMatch
+ }
+ {
+ var minlen uint = brotli_max_size_t(4, best_len+1)
+ if findAllStaticDictionaryMatches(dictionary, data[cur_ix_masked:], minlen, max_length, dict_matches[0:]) {
+ var maxlen uint = brotli_min_size_t(maxStaticDictionaryMatchLen, max_length)
+ var l uint
+ for l = minlen; l <= maxlen; l++ {
+ var dict_id uint32 = dict_matches[l]
+ if dict_id < kInvalidMatch {
+ var distance uint = max_backward + gap + uint(dict_id>>5) + 1
+ if distance <= params.dist.max_distance {
+ initDictionaryBackwardMatch(&matches[0], distance, l, uint(dict_id&31))
+ matches = matches[1:]
+ }
+ }
+ }
+ }
+ }
+
+ return uint(-cap(matches) + cap(orig_matches))
+}
+
+/* Stores the hash of the next 4 bytes and re-roots the binary tree at the
+ current sequence, without returning any matches.
+ REQUIRES: ix + 128 <= end-of-current-block */
+func (h *h10) Store(data []byte, mask uint, ix uint) {
+ var max_backward uint = h.window_mask_ - windowGap + 1
+ /* Maximum distance is window size - 16, see section 9.1. of the spec. */
+ storeAndFindMatchesH10(h, data, ix, mask, 128, max_backward, nil, nil)
+}
+
+func (h *h10) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
+ var i uint = ix_start
+ var j uint = ix_start
+ if ix_start+63 <= ix_end {
+ i = ix_end - 63
+ }
+
+ if ix_start+512 <= i {
+ for ; j < i; j += 8 {
+ h.Store(data, mask, j)
+ }
+ }
+
+ for ; i < ix_end; i++ {
+ h.Store(data, mask, i)
+ }
+}
+
+func (h *h10) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
+ if num_bytes >= h.HashTypeLength()-1 && position >= 128 {
+ var i_start uint = position - 128 + 1
+ var i_end uint = brotli_min_size_t(position, i_start+num_bytes)
+ /* Store the last `128 - 1` positions in the hasher.
+ These could not be calculated before, since they require knowledge
+ of both the previous and the current block. */
+
+ var i uint
+ for i = i_start; i < i_end; i++ {
+ /* Maximum distance is window size - 16, see section 9.1. of the spec.
+ Furthermore, we have to make sure that we don't look further back
+ from the start of the next block than the window size, otherwise we
+ could access already overwritten areas of the ring-buffer. */
+ var max_backward uint = h.window_mask_ - brotli_max_size_t(windowGap-1, position-i)
+
+ /* We know that i + 128 <= position + num_bytes, i.e. the
+ end of the current block and that we have at least
+ 128 tail in the ring-buffer. */
+ storeAndFindMatchesH10(h, ringbuffer, i, ringbuffer_mask, 128, max_backward, nil, nil)
+ }
+ }
+}
+
+/* MAX_NUM_MATCHES == 64 + MAX_TREE_SEARCH_DEPTH */
+const maxNumMatchesH10 = 128
+
+func (*h10) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) {
+ panic("unimplemented")
+}
+
+func (*h10) PrepareDistanceCache(distance_cache []int) {
+ panic("unimplemented")
+}
diff --git a/vendor/github.com/andybalholm/brotli/h5.go b/vendor/github.com/andybalholm/brotli/h5.go
new file mode 100644
index 0000000..f391b73
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/h5.go
@@ -0,0 +1,214 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2010 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* A (forgetful) hash table to the data seen by the compressor, to
+ help create backward references to previous data.
+
+ This is a hash map of fixed size (bucket_size_) to a ring buffer of
+ fixed size (block_size_). The ring buffer contains the last block_size_
+ index positions of the given hash key in the compressed data. */
+func (*h5) HashTypeLength() uint {
+ return 4
+}
+
+func (*h5) StoreLookahead() uint {
+ return 4
+}
+
+/* HashBytes is the function that chooses the bucket to place the address in. */
+func hashBytesH5(data []byte, shift int) uint32 {
+ var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32
+
+ /* The higher bits contain more mixture from the multiplication,
+ so we take our results from there. */
+ return uint32(h >> uint(shift))
+}
+
+type h5 struct {
+ hasherCommon
+ bucket_size_ uint
+ block_size_ uint
+ hash_shift_ int
+ block_mask_ uint32
+ num []uint16
+ buckets []uint32
+}
+
+func (h *h5) Initialize(params *encoderParams) {
+ h.hash_shift_ = 32 - h.params.bucket_bits
+ h.bucket_size_ = uint(1) << uint(h.params.bucket_bits)
+ h.block_size_ = uint(1) << uint(h.params.block_bits)
+ h.block_mask_ = uint32(h.block_size_ - 1)
+ h.num = make([]uint16, h.bucket_size_)
+ h.buckets = make([]uint32, h.block_size_*h.bucket_size_)
+}
+
+func (h *h5) Prepare(one_shot bool, input_size uint, data []byte) {
+ var num []uint16 = h.num
+ var partial_prepare_threshold uint = h.bucket_size_ >> 6
+ /* Partial preparation is 100 times slower (per socket). */
+ if one_shot && input_size <= partial_prepare_threshold {
+ var i uint
+ for i = 0; i < input_size; i++ {
+ var key uint32 = hashBytesH5(data[i:], h.hash_shift_)
+ num[key] = 0
+ }
+ } else {
+ for i := 0; i < int(h.bucket_size_); i++ {
+ num[i] = 0
+ }
+ }
+}
+
+/* Look at 4 bytes at &data[ix & mask].
+ Compute a hash from these, and store the value of ix at that position. */
+func (h *h5) Store(data []byte, mask uint, ix uint) {
+ var num []uint16 = h.num
+ var key uint32 = hashBytesH5(data[ix&mask:], h.hash_shift_)
+ var minor_ix uint = uint(num[key]) & uint(h.block_mask_)
+ var offset uint = minor_ix + uint(key<= h.HashTypeLength()-1 && position >= 3 {
+ /* Prepare the hashes for three last bytes of the last write.
+ These could not be calculated before, since they require knowledge
+ of both the previous and the current block. */
+ h.Store(ringbuffer, ringbuffer_mask, position-3)
+ h.Store(ringbuffer, ringbuffer_mask, position-2)
+ h.Store(ringbuffer, ringbuffer_mask, position-1)
+ }
+}
+
+func (h *h5) PrepareDistanceCache(distance_cache []int) {
+ prepareDistanceCache(distance_cache, h.params.num_last_distances_to_check)
+}
+
+/* Find a longest backward match of &data[cur_ix] up to the length of
+ max_length and stores the position cur_ix in the hash table.
+
+ REQUIRES: PrepareDistanceCacheH5 must be invoked for current distance cache
+ values; if this method is invoked repeatedly with the same distance
+ cache values, it is enough to invoke PrepareDistanceCacheH5 once.
+
+ Does not look for matches longer than max_length.
+ Does not look for matches further away than max_backward.
+ Writes the best match into |out|.
+ |out|->score is updated only if a better match is found. */
+func (h *h5) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) {
+ var num []uint16 = h.num
+ var buckets []uint32 = h.buckets
+ var cur_ix_masked uint = cur_ix & ring_buffer_mask
+ var min_score uint = out.score
+ var best_score uint = out.score
+ var best_len uint = out.len
+ var i uint
+ var bucket []uint32
+ /* Don't accept a short copy from far away. */
+ out.len = 0
+
+ out.len_code_delta = 0
+
+ /* Try last distance first. */
+ for i = 0; i < uint(h.params.num_last_distances_to_check); i++ {
+ var backward uint = uint(distance_cache[i])
+ var prev_ix uint = uint(cur_ix - backward)
+ if prev_ix >= cur_ix {
+ continue
+ }
+
+ if backward > max_backward {
+ continue
+ }
+
+ prev_ix &= ring_buffer_mask
+
+ if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] {
+ continue
+ }
+ {
+ var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 3 || (len == 2 && i < 2) {
+ /* Comparing for >= 2 does not change the semantics, but just saves for
+ a few unnecessary binary logarithms in backward reference score,
+ since we are not interested in such short matches. */
+ var score uint = backwardReferenceScoreUsingLastDistance(uint(len))
+ if best_score < score {
+ if i != 0 {
+ score -= backwardReferencePenaltyUsingLastDistance(i)
+ }
+ if best_score < score {
+ best_score = score
+ best_len = uint(len)
+ out.len = best_len
+ out.distance = backward
+ out.score = best_score
+ }
+ }
+ }
+ }
+ }
+ {
+ var key uint32 = hashBytesH5(data[cur_ix_masked:], h.hash_shift_)
+ bucket = buckets[key< h.block_size_ {
+ down = uint(num[key]) - h.block_size_
+ } else {
+ down = 0
+ }
+ for i = uint(num[key]); i > down; {
+ var prev_ix uint
+ i--
+ prev_ix = uint(bucket[uint32(i)&h.block_mask_])
+ var backward uint = cur_ix - prev_ix
+ if backward > max_backward {
+ break
+ }
+
+ prev_ix &= ring_buffer_mask
+ if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] {
+ continue
+ }
+ {
+ var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 4 {
+ /* Comparing for >= 3 does not change the semantics, but just saves
+ for a few unnecessary binary logarithms in backward reference
+ score, since we are not interested in such short matches. */
+ var score uint = backwardReferenceScore(uint(len), backward)
+ if best_score < score {
+ best_score = score
+ best_len = uint(len)
+ out.len = best_len
+ out.distance = backward
+ out.score = best_score
+ }
+ }
+ }
+ }
+
+ bucket[uint32(num[key])&h.block_mask_] = uint32(cur_ix)
+ num[key]++
+ }
+
+ if min_score == out.score {
+ searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false)
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/h6.go b/vendor/github.com/andybalholm/brotli/h6.go
new file mode 100644
index 0000000..80bb224
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/h6.go
@@ -0,0 +1,216 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2010 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* A (forgetful) hash table to the data seen by the compressor, to
+ help create backward references to previous data.
+
+ This is a hash map of fixed size (bucket_size_) to a ring buffer of
+ fixed size (block_size_). The ring buffer contains the last block_size_
+ index positions of the given hash key in the compressed data. */
+func (*h6) HashTypeLength() uint {
+ return 8
+}
+
+func (*h6) StoreLookahead() uint {
+ return 8
+}
+
+/* HashBytes is the function that chooses the bucket to place the address in. */
+func hashBytesH6(data []byte, mask uint64, shift int) uint32 {
+ var h uint64 = (binary.LittleEndian.Uint64(data) & mask) * kHashMul64Long
+
+ /* The higher bits contain more mixture from the multiplication,
+ so we take our results from there. */
+ return uint32(h >> uint(shift))
+}
+
+type h6 struct {
+ hasherCommon
+ bucket_size_ uint
+ block_size_ uint
+ hash_shift_ int
+ hash_mask_ uint64
+ block_mask_ uint32
+ num []uint16
+ buckets []uint32
+}
+
+func (h *h6) Initialize(params *encoderParams) {
+ h.hash_shift_ = 64 - h.params.bucket_bits
+ h.hash_mask_ = (^(uint64(0))) >> uint(64-8*h.params.hash_len)
+ h.bucket_size_ = uint(1) << uint(h.params.bucket_bits)
+ h.block_size_ = uint(1) << uint(h.params.block_bits)
+ h.block_mask_ = uint32(h.block_size_ - 1)
+ h.num = make([]uint16, h.bucket_size_)
+ h.buckets = make([]uint32, h.block_size_*h.bucket_size_)
+}
+
+func (h *h6) Prepare(one_shot bool, input_size uint, data []byte) {
+ var num []uint16 = h.num
+ var partial_prepare_threshold uint = h.bucket_size_ >> 6
+ /* Partial preparation is 100 times slower (per socket). */
+ if one_shot && input_size <= partial_prepare_threshold {
+ var i uint
+ for i = 0; i < input_size; i++ {
+ var key uint32 = hashBytesH6(data[i:], h.hash_mask_, h.hash_shift_)
+ num[key] = 0
+ }
+ } else {
+ for i := 0; i < int(h.bucket_size_); i++ {
+ num[i] = 0
+ }
+ }
+}
+
+/* Look at 4 bytes at &data[ix & mask].
+ Compute a hash from these, and store the value of ix at that position. */
+func (h *h6) Store(data []byte, mask uint, ix uint) {
+ var num []uint16 = h.num
+ var key uint32 = hashBytesH6(data[ix&mask:], h.hash_mask_, h.hash_shift_)
+ var minor_ix uint = uint(num[key]) & uint(h.block_mask_)
+ var offset uint = minor_ix + uint(key<= h.HashTypeLength()-1 && position >= 3 {
+ /* Prepare the hashes for three last bytes of the last write.
+ These could not be calculated before, since they require knowledge
+ of both the previous and the current block. */
+ h.Store(ringbuffer, ringbuffer_mask, position-3)
+ h.Store(ringbuffer, ringbuffer_mask, position-2)
+ h.Store(ringbuffer, ringbuffer_mask, position-1)
+ }
+}
+
+func (h *h6) PrepareDistanceCache(distance_cache []int) {
+ prepareDistanceCache(distance_cache, h.params.num_last_distances_to_check)
+}
+
+/* Find a longest backward match of &data[cur_ix] up to the length of
+ max_length and stores the position cur_ix in the hash table.
+
+ REQUIRES: PrepareDistanceCacheH6 must be invoked for current distance cache
+ values; if this method is invoked repeatedly with the same distance
+ cache values, it is enough to invoke PrepareDistanceCacheH6 once.
+
+ Does not look for matches longer than max_length.
+ Does not look for matches further away than max_backward.
+ Writes the best match into |out|.
+ |out|->score is updated only if a better match is found. */
+func (h *h6) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) {
+ var num []uint16 = h.num
+ var buckets []uint32 = h.buckets
+ var cur_ix_masked uint = cur_ix & ring_buffer_mask
+ var min_score uint = out.score
+ var best_score uint = out.score
+ var best_len uint = out.len
+ var i uint
+ var bucket []uint32
+ /* Don't accept a short copy from far away. */
+ out.len = 0
+
+ out.len_code_delta = 0
+
+ /* Try last distance first. */
+ for i = 0; i < uint(h.params.num_last_distances_to_check); i++ {
+ var backward uint = uint(distance_cache[i])
+ var prev_ix uint = uint(cur_ix - backward)
+ if prev_ix >= cur_ix {
+ continue
+ }
+
+ if backward > max_backward {
+ continue
+ }
+
+ prev_ix &= ring_buffer_mask
+
+ if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] {
+ continue
+ }
+ {
+ var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 3 || (len == 2 && i < 2) {
+ /* Comparing for >= 2 does not change the semantics, but just saves for
+ a few unnecessary binary logarithms in backward reference score,
+ since we are not interested in such short matches. */
+ var score uint = backwardReferenceScoreUsingLastDistance(uint(len))
+ if best_score < score {
+ if i != 0 {
+ score -= backwardReferencePenaltyUsingLastDistance(i)
+ }
+ if best_score < score {
+ best_score = score
+ best_len = uint(len)
+ out.len = best_len
+ out.distance = backward
+ out.score = best_score
+ }
+ }
+ }
+ }
+ }
+ {
+ var key uint32 = hashBytesH6(data[cur_ix_masked:], h.hash_mask_, h.hash_shift_)
+ bucket = buckets[key< h.block_size_ {
+ down = uint(num[key]) - h.block_size_
+ } else {
+ down = 0
+ }
+ for i = uint(num[key]); i > down; {
+ var prev_ix uint
+ i--
+ prev_ix = uint(bucket[uint32(i)&h.block_mask_])
+ var backward uint = cur_ix - prev_ix
+ if backward > max_backward {
+ break
+ }
+
+ prev_ix &= ring_buffer_mask
+ if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] {
+ continue
+ }
+ {
+ var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 4 {
+ /* Comparing for >= 3 does not change the semantics, but just saves
+ for a few unnecessary binary logarithms in backward reference
+ score, since we are not interested in such short matches. */
+ var score uint = backwardReferenceScore(uint(len), backward)
+ if best_score < score {
+ best_score = score
+ best_len = uint(len)
+ out.len = best_len
+ out.distance = backward
+ out.score = best_score
+ }
+ }
+ }
+ }
+
+ bucket[uint32(num[key])&h.block_mask_] = uint32(cur_ix)
+ num[key]++
+ }
+
+ if min_score == out.score {
+ searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false)
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/hash.go b/vendor/github.com/andybalholm/brotli/hash.go
new file mode 100644
index 0000000..00f812e
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/hash.go
@@ -0,0 +1,342 @@
+package brotli
+
+import (
+ "encoding/binary"
+ "fmt"
+)
+
+type hasherCommon struct {
+ params hasherParams
+ is_prepared_ bool
+ dict_num_lookups uint
+ dict_num_matches uint
+}
+
+func (h *hasherCommon) Common() *hasherCommon {
+ return h
+}
+
+type hasherHandle interface {
+ Common() *hasherCommon
+ Initialize(params *encoderParams)
+ Prepare(one_shot bool, input_size uint, data []byte)
+ StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint)
+ HashTypeLength() uint
+ StoreLookahead() uint
+ PrepareDistanceCache(distance_cache []int)
+ FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult)
+ StoreRange(data []byte, mask uint, ix_start uint, ix_end uint)
+ Store(data []byte, mask uint, ix uint)
+}
+
+const kCutoffTransformsCount uint32 = 10
+
+/* 0, 12, 27, 23, 42, 63, 56, 48, 59, 64 */
+/* 0+0, 4+8, 8+19, 12+11, 16+26, 20+43, 24+32, 28+20, 32+27, 36+28 */
+const kCutoffTransforms uint64 = 0x071B520ADA2D3200
+
+type hasherSearchResult struct {
+ len uint
+ distance uint
+ score uint
+ len_code_delta int
+}
+
+/* kHashMul32 multiplier has these properties:
+ * The multiplier must be odd. Otherwise we may lose the highest bit.
+ * No long streaks of ones or zeros.
+ * There is no effort to ensure that it is a prime, the oddity is enough
+ for this use.
+ * The number has been tuned heuristically against compression benchmarks. */
+const kHashMul32 uint32 = 0x1E35A7BD
+
+const kHashMul64 uint64 = 0x1E35A7BD1E35A7BD
+
+const kHashMul64Long uint64 = 0x1FE35A7BD3579BD3
+
+func hash14(data []byte) uint32 {
+ var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32
+
+ /* The higher bits contain more mixture from the multiplication,
+ so we take our results from there. */
+ return h >> (32 - 14)
+}
+
+func prepareDistanceCache(distance_cache []int, num_distances int) {
+ if num_distances > 4 {
+ var last_distance int = distance_cache[0]
+ distance_cache[4] = last_distance - 1
+ distance_cache[5] = last_distance + 1
+ distance_cache[6] = last_distance - 2
+ distance_cache[7] = last_distance + 2
+ distance_cache[8] = last_distance - 3
+ distance_cache[9] = last_distance + 3
+ if num_distances > 10 {
+ var next_last_distance int = distance_cache[1]
+ distance_cache[10] = next_last_distance - 1
+ distance_cache[11] = next_last_distance + 1
+ distance_cache[12] = next_last_distance - 2
+ distance_cache[13] = next_last_distance + 2
+ distance_cache[14] = next_last_distance - 3
+ distance_cache[15] = next_last_distance + 3
+ }
+ }
+}
+
+const literalByteScore = 135
+
+const distanceBitPenalty = 30
+
+/* Score must be positive after applying maximal penalty. */
+const scoreBase = (distanceBitPenalty * 8 * 8)
+
+/* Usually, we always choose the longest backward reference. This function
+ allows for the exception of that rule.
+
+ If we choose a backward reference that is further away, it will
+ usually be coded with more bits. We approximate this by assuming
+ log2(distance). If the distance can be expressed in terms of the
+ last four distances, we use some heuristic constants to estimate
+ the bits cost. For the first up to four literals we use the bit
+ cost of the literals from the literal cost model, after that we
+ use the average bit cost of the cost model.
+
+ This function is used to sometimes discard a longer backward reference
+ when it is not much longer and the bit cost for encoding it is more
+ than the saved literals.
+
+ backward_reference_offset MUST be positive. */
+func backwardReferenceScore(copy_length uint, backward_reference_offset uint) uint {
+ return scoreBase + literalByteScore*uint(copy_length) - distanceBitPenalty*uint(log2FloorNonZero(backward_reference_offset))
+}
+
+func backwardReferenceScoreUsingLastDistance(copy_length uint) uint {
+ return literalByteScore*uint(copy_length) + scoreBase + 15
+}
+
+func backwardReferencePenaltyUsingLastDistance(distance_short_code uint) uint {
+ return uint(39) + ((0x1CA10 >> (distance_short_code & 0xE)) & 0xE)
+}
+
+func testStaticDictionaryItem(dictionary *encoderDictionary, item uint, data []byte, max_length uint, max_backward uint, max_distance uint, out *hasherSearchResult) bool {
+ var len uint
+ var word_idx uint
+ var offset uint
+ var matchlen uint
+ var backward uint
+ var score uint
+ len = item & 0x1F
+ word_idx = item >> 5
+ offset = uint(dictionary.words.offsets_by_length[len]) + len*word_idx
+ if len > max_length {
+ return false
+ }
+
+ matchlen = findMatchLengthWithLimit(data, dictionary.words.data[offset:], uint(len))
+ if matchlen+uint(dictionary.cutoffTransformsCount) <= len || matchlen == 0 {
+ return false
+ }
+ {
+ var cut uint = len - matchlen
+ var transform_id uint = (cut << 2) + uint((dictionary.cutoffTransforms>>(cut*6))&0x3F)
+ backward = max_backward + 1 + word_idx + (transform_id << dictionary.words.size_bits_by_length[len])
+ }
+
+ if backward > max_distance {
+ return false
+ }
+
+ score = backwardReferenceScore(matchlen, backward)
+ if score < out.score {
+ return false
+ }
+
+ out.len = matchlen
+ out.len_code_delta = int(len) - int(matchlen)
+ out.distance = backward
+ out.score = score
+ return true
+}
+
+func searchInStaticDictionary(dictionary *encoderDictionary, handle hasherHandle, data []byte, max_length uint, max_backward uint, max_distance uint, out *hasherSearchResult, shallow bool) {
+ var key uint
+ var i uint
+ var self *hasherCommon = handle.Common()
+ if self.dict_num_matches < self.dict_num_lookups>>7 {
+ return
+ }
+
+ key = uint(hash14(data) << 1)
+ for i = 0; ; (func() { i++; key++ })() {
+ var tmp uint
+ if shallow {
+ tmp = 1
+ } else {
+ tmp = 2
+ }
+ if i >= tmp {
+ break
+ }
+ var item uint = uint(dictionary.hash_table[key])
+ self.dict_num_lookups++
+ if item != 0 {
+ var item_matches bool = testStaticDictionaryItem(dictionary, item, data, max_length, max_backward, max_distance, out)
+ if item_matches {
+ self.dict_num_matches++
+ }
+ }
+ }
+}
+
+type backwardMatch struct {
+ distance uint32
+ length_and_code uint32
+}
+
+func initBackwardMatch(self *backwardMatch, dist uint, len uint) {
+ self.distance = uint32(dist)
+ self.length_and_code = uint32(len << 5)
+}
+
+func initDictionaryBackwardMatch(self *backwardMatch, dist uint, len uint, len_code uint) {
+ self.distance = uint32(dist)
+ var tmp uint
+ if len == len_code {
+ tmp = 0
+ } else {
+ tmp = len_code
+ }
+ self.length_and_code = uint32(len<<5 | tmp)
+}
+
+func backwardMatchLength(self *backwardMatch) uint {
+ return uint(self.length_and_code >> 5)
+}
+
+func backwardMatchLengthCode(self *backwardMatch) uint {
+ var code uint = uint(self.length_and_code) & 31
+ if code != 0 {
+ return code
+ } else {
+ return backwardMatchLength(self)
+ }
+}
+
+func hasherReset(handle hasherHandle) {
+ if handle == nil {
+ return
+ }
+ handle.Common().is_prepared_ = false
+}
+
+func newHasher(typ int) hasherHandle {
+ switch typ {
+ case 2:
+ return &hashLongestMatchQuickly{
+ bucketBits: 16,
+ bucketSweep: 1,
+ hashLen: 5,
+ useDictionary: true,
+ }
+ case 3:
+ return &hashLongestMatchQuickly{
+ bucketBits: 16,
+ bucketSweep: 2,
+ hashLen: 5,
+ useDictionary: false,
+ }
+ case 4:
+ return &hashLongestMatchQuickly{
+ bucketBits: 17,
+ bucketSweep: 4,
+ hashLen: 5,
+ useDictionary: true,
+ }
+ case 5:
+ return new(h5)
+ case 6:
+ return new(h6)
+ case 10:
+ return new(h10)
+ case 35:
+ return &hashComposite{
+ ha: newHasher(3),
+ hb: &hashRolling{jump: 4},
+ }
+ case 40:
+ return &hashForgetfulChain{
+ bucketBits: 15,
+ numBanks: 1,
+ bankBits: 16,
+ numLastDistancesToCheck: 4,
+ }
+ case 41:
+ return &hashForgetfulChain{
+ bucketBits: 15,
+ numBanks: 1,
+ bankBits: 16,
+ numLastDistancesToCheck: 10,
+ }
+ case 42:
+ return &hashForgetfulChain{
+ bucketBits: 15,
+ numBanks: 512,
+ bankBits: 9,
+ numLastDistancesToCheck: 16,
+ }
+ case 54:
+ return &hashLongestMatchQuickly{
+ bucketBits: 20,
+ bucketSweep: 4,
+ hashLen: 7,
+ useDictionary: false,
+ }
+ case 55:
+ return &hashComposite{
+ ha: newHasher(54),
+ hb: &hashRolling{jump: 4},
+ }
+ case 65:
+ return &hashComposite{
+ ha: newHasher(6),
+ hb: &hashRolling{jump: 1},
+ }
+ }
+
+ panic(fmt.Sprintf("unknown hasher type: %d", typ))
+}
+
+func hasherSetup(handle *hasherHandle, params *encoderParams, data []byte, position uint, input_size uint, is_last bool) {
+ var self hasherHandle = nil
+ var common *hasherCommon = nil
+ var one_shot bool = (position == 0 && is_last)
+ if *handle == nil {
+ chooseHasher(params, ¶ms.hasher)
+ self = newHasher(params.hasher.type_)
+
+ *handle = self
+ common = self.Common()
+ common.params = params.hasher
+ self.Initialize(params)
+ }
+
+ self = *handle
+ common = self.Common()
+ if !common.is_prepared_ {
+ self.Prepare(one_shot, input_size, data)
+
+ if position == 0 {
+ common.dict_num_lookups = 0
+ common.dict_num_matches = 0
+ }
+
+ common.is_prepared_ = true
+ }
+}
+
+func initOrStitchToPreviousBlock(handle *hasherHandle, data []byte, mask uint, params *encoderParams, position uint, input_size uint, is_last bool) {
+ var self hasherHandle
+ hasherSetup(handle, params, data, position, input_size, is_last)
+ self = *handle
+ self.StitchToPreviousBlock(input_size, position, data, mask)
+}
diff --git a/vendor/github.com/andybalholm/brotli/hash_composite.go b/vendor/github.com/andybalholm/brotli/hash_composite.go
new file mode 100644
index 0000000..a65fe2e
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/hash_composite.go
@@ -0,0 +1,93 @@
+package brotli
+
+/* Copyright 2018 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+func (h *hashComposite) HashTypeLength() uint {
+ var a uint = h.ha.HashTypeLength()
+ var b uint = h.hb.HashTypeLength()
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func (h *hashComposite) StoreLookahead() uint {
+ var a uint = h.ha.StoreLookahead()
+ var b uint = h.hb.StoreLookahead()
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
+
+/* Composite hasher: This hasher allows to combine two other hashers, HASHER_A
+ and HASHER_B. */
+type hashComposite struct {
+ hasherCommon
+ ha hasherHandle
+ hb hasherHandle
+ params *encoderParams
+}
+
+func (h *hashComposite) Initialize(params *encoderParams) {
+ h.params = params
+}
+
+/* TODO: Initialize of the hashers is defered to Prepare (and params
+ remembered here) because we don't get the one_shot and input_size params
+ here that are needed to know the memory size of them. Instead provide
+ those params to all hashers InitializehashComposite */
+func (h *hashComposite) Prepare(one_shot bool, input_size uint, data []byte) {
+ if h.ha == nil {
+ var common_a *hasherCommon
+ var common_b *hasherCommon
+
+ common_a = h.ha.Common()
+ common_a.params = h.params.hasher
+ common_a.is_prepared_ = false
+ common_a.dict_num_lookups = 0
+ common_a.dict_num_matches = 0
+ h.ha.Initialize(h.params)
+
+ common_b = h.hb.Common()
+ common_b.params = h.params.hasher
+ common_b.is_prepared_ = false
+ common_b.dict_num_lookups = 0
+ common_b.dict_num_matches = 0
+ h.hb.Initialize(h.params)
+ }
+
+ h.ha.Prepare(one_shot, input_size, data)
+ h.hb.Prepare(one_shot, input_size, data)
+}
+
+func (h *hashComposite) Store(data []byte, mask uint, ix uint) {
+ h.ha.Store(data, mask, ix)
+ h.hb.Store(data, mask, ix)
+}
+
+func (h *hashComposite) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
+ h.ha.StoreRange(data, mask, ix_start, ix_end)
+ h.hb.StoreRange(data, mask, ix_start, ix_end)
+}
+
+func (h *hashComposite) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
+ h.ha.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask)
+ h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask)
+}
+
+func (h *hashComposite) PrepareDistanceCache(distance_cache []int) {
+ h.ha.PrepareDistanceCache(distance_cache)
+ h.hb.PrepareDistanceCache(distance_cache)
+}
+
+func (h *hashComposite) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) {
+ h.ha.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out)
+ h.hb.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out)
+}
diff --git a/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go b/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go
new file mode 100644
index 0000000..306e46d
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go
@@ -0,0 +1,252 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2016 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+func (*hashForgetfulChain) HashTypeLength() uint {
+ return 4
+}
+
+func (*hashForgetfulChain) StoreLookahead() uint {
+ return 4
+}
+
+/* HashBytes is the function that chooses the bucket to place the address in.*/
+func (h *hashForgetfulChain) HashBytes(data []byte) uint {
+ var hash uint32 = binary.LittleEndian.Uint32(data) * kHashMul32
+
+ /* The higher bits contain more mixture from the multiplication,
+ so we take our results from there. */
+ return uint(hash >> (32 - h.bucketBits))
+}
+
+type slot struct {
+ delta uint16
+ next uint16
+}
+
+/* A (forgetful) hash table to the data seen by the compressor, to
+ help create backward references to previous data.
+
+ Hashes are stored in chains which are bucketed to groups. Group of chains
+ share a storage "bank". When more than "bank size" chain nodes are added,
+ oldest nodes are replaced; this way several chains may share a tail. */
+type hashForgetfulChain struct {
+ hasherCommon
+
+ bucketBits uint
+ numBanks uint
+ bankBits uint
+ numLastDistancesToCheck int
+
+ addr []uint32
+ head []uint16
+ tiny_hash [65536]byte
+ banks [][]slot
+ free_slot_idx []uint16
+ max_hops uint
+}
+
+func (h *hashForgetfulChain) Initialize(params *encoderParams) {
+ var q uint
+ if params.quality > 6 {
+ q = 7
+ } else {
+ q = 8
+ }
+ h.max_hops = q << uint(params.quality-4)
+
+ bankSize := 1 << h.bankBits
+ bucketSize := 1 << h.bucketBits
+
+ h.addr = make([]uint32, bucketSize)
+ h.head = make([]uint16, bucketSize)
+ h.banks = make([][]slot, h.numBanks)
+ for i := range h.banks {
+ h.banks[i] = make([]slot, bankSize)
+ }
+ h.free_slot_idx = make([]uint16, h.numBanks)
+}
+
+func (h *hashForgetfulChain) Prepare(one_shot bool, input_size uint, data []byte) {
+ var partial_prepare_threshold uint = (1 << h.bucketBits) >> 6
+ /* Partial preparation is 100 times slower (per socket). */
+ if one_shot && input_size <= partial_prepare_threshold {
+ var i uint
+ for i = 0; i < input_size; i++ {
+ var bucket uint = h.HashBytes(data[i:])
+
+ /* See InitEmpty comment. */
+ h.addr[bucket] = 0xCCCCCCCC
+
+ h.head[bucket] = 0xCCCC
+ }
+ } else {
+ /* Fill |addr| array with 0xCCCCCCCC value. Because of wrapping, position
+ processed by hasher never reaches 3GB + 64M; this makes all new chains
+ to be terminated after the first node. */
+ for i := range h.addr {
+ h.addr[i] = 0xCCCCCCCC
+ }
+
+ for i := range h.head {
+ h.head[i] = 0
+ }
+ }
+
+ h.tiny_hash = [65536]byte{}
+ for i := range h.free_slot_idx {
+ h.free_slot_idx[i] = 0
+ }
+}
+
+/* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend
+ node to corresponding chain; also update tiny_hash for current position. */
+func (h *hashForgetfulChain) Store(data []byte, mask uint, ix uint) {
+ var key uint = h.HashBytes(data[ix&mask:])
+ var bank uint = key & (h.numBanks - 1)
+ idx := uint(h.free_slot_idx[bank]) & ((1 << h.bankBits) - 1)
+ h.free_slot_idx[bank]++
+ var delta uint = ix - uint(h.addr[key])
+ h.tiny_hash[uint16(ix)] = byte(key)
+ if delta > 0xFFFF {
+ delta = 0xFFFF
+ }
+ h.banks[bank][idx].delta = uint16(delta)
+ h.banks[bank][idx].next = h.head[key]
+ h.addr[key] = uint32(ix)
+ h.head[key] = uint16(idx)
+}
+
+func (h *hashForgetfulChain) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
+ var i uint
+ for i = ix_start; i < ix_end; i++ {
+ h.Store(data, mask, i)
+ }
+}
+
+func (h *hashForgetfulChain) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
+ if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
+ /* Prepare the hashes for three last bytes of the last write.
+ These could not be calculated before, since they require knowledge
+ of both the previous and the current block. */
+ h.Store(ringbuffer, ring_buffer_mask, position-3)
+ h.Store(ringbuffer, ring_buffer_mask, position-2)
+ h.Store(ringbuffer, ring_buffer_mask, position-1)
+ }
+}
+
+func (h *hashForgetfulChain) PrepareDistanceCache(distance_cache []int) {
+ prepareDistanceCache(distance_cache, h.numLastDistancesToCheck)
+}
+
+/* Find a longest backward match of &data[cur_ix] up to the length of
+ max_length and stores the position cur_ix in the hash table.
+
+ REQUIRES: PrepareDistanceCachehashForgetfulChain must be invoked for current distance cache
+ values; if this method is invoked repeatedly with the same distance
+ cache values, it is enough to invoke PrepareDistanceCachehashForgetfulChain once.
+
+ Does not look for matches longer than max_length.
+ Does not look for matches further away than max_backward.
+ Writes the best match into |out|.
+ |out|->score is updated only if a better match is found. */
+func (h *hashForgetfulChain) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) {
+ var cur_ix_masked uint = cur_ix & ring_buffer_mask
+ var min_score uint = out.score
+ var best_score uint = out.score
+ var best_len uint = out.len
+ var key uint = h.HashBytes(data[cur_ix_masked:])
+ var tiny_hash byte = byte(key)
+ /* Don't accept a short copy from far away. */
+ out.len = 0
+
+ out.len_code_delta = 0
+
+ /* Try last distance first. */
+ for i := 0; i < h.numLastDistancesToCheck; i++ {
+ var backward uint = uint(distance_cache[i])
+ var prev_ix uint = (cur_ix - backward)
+
+ /* For distance code 0 we want to consider 2-byte matches. */
+ if i > 0 && h.tiny_hash[uint16(prev_ix)] != tiny_hash {
+ continue
+ }
+ if prev_ix >= cur_ix || backward > max_backward {
+ continue
+ }
+
+ prev_ix &= ring_buffer_mask
+ {
+ var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 2 {
+ var score uint = backwardReferenceScoreUsingLastDistance(uint(len))
+ if best_score < score {
+ if i != 0 {
+ score -= backwardReferencePenaltyUsingLastDistance(uint(i))
+ }
+ if best_score < score {
+ best_score = score
+ best_len = uint(len)
+ out.len = best_len
+ out.distance = backward
+ out.score = best_score
+ }
+ }
+ }
+ }
+ }
+ {
+ var bank uint = key & (h.numBanks - 1)
+ var backward uint = 0
+ var hops uint = h.max_hops
+ var delta uint = cur_ix - uint(h.addr[key])
+ var slot uint = uint(h.head[key])
+ for {
+ tmp6 := hops
+ hops--
+ if tmp6 == 0 {
+ break
+ }
+ var prev_ix uint
+ var last uint = slot
+ backward += delta
+ if backward > max_backward {
+ break
+ }
+ prev_ix = (cur_ix - backward) & ring_buffer_mask
+ slot = uint(h.banks[bank][last].next)
+ delta = uint(h.banks[bank][last].delta)
+ if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] {
+ continue
+ }
+ {
+ var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 4 {
+ /* Comparing for >= 3 does not change the semantics, but just saves
+ for a few unnecessary binary logarithms in backward reference
+ score, since we are not interested in such short matches. */
+ var score uint = backwardReferenceScore(uint(len), backward)
+ if best_score < score {
+ best_score = score
+ best_len = uint(len)
+ out.len = best_len
+ out.distance = backward
+ out.score = best_score
+ }
+ }
+ }
+ }
+
+ h.Store(data, ring_buffer_mask, cur_ix)
+ }
+
+ if out.score == min_score {
+ searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false)
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go b/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go
new file mode 100644
index 0000000..9375dc1
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go
@@ -0,0 +1,214 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2010 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* For BUCKET_SWEEP == 1, enabling the dictionary lookup makes compression
+ a little faster (0.5% - 1%) and it compresses 0.15% better on small text
+ and HTML inputs. */
+
+func (*hashLongestMatchQuickly) HashTypeLength() uint {
+ return 8
+}
+
+func (*hashLongestMatchQuickly) StoreLookahead() uint {
+ return 8
+}
+
+/* HashBytes is the function that chooses the bucket to place
+ the address in. The HashLongestMatch and hashLongestMatchQuickly
+ classes have separate, different implementations of hashing. */
+func (h *hashLongestMatchQuickly) HashBytes(data []byte) uint32 {
+ var hash uint64 = ((binary.LittleEndian.Uint64(data) << (64 - 8*h.hashLen)) * kHashMul64)
+
+ /* The higher bits contain more mixture from the multiplication,
+ so we take our results from there. */
+ return uint32(hash >> (64 - h.bucketBits))
+}
+
+/* A (forgetful) hash table to the data seen by the compressor, to
+ help create backward references to previous data.
+
+ This is a hash map of fixed size (1 << 16). Starting from the
+ given index, 1 buckets are used to store values of a key. */
+type hashLongestMatchQuickly struct {
+ hasherCommon
+
+ bucketBits uint
+ bucketSweep int
+ hashLen uint
+ useDictionary bool
+
+ buckets []uint32
+}
+
+func (h *hashLongestMatchQuickly) Initialize(params *encoderParams) {
+ h.buckets = make([]uint32, 1<> 7
+ /* Partial preparation is 100 times slower (per socket). */
+ if one_shot && input_size <= partial_prepare_threshold {
+ var i uint
+ for i = 0; i < input_size; i++ {
+ var key uint32 = h.HashBytes(data[i:])
+ for j := 0; j < h.bucketSweep; j++ {
+ h.buckets[key+uint32(j)] = 0
+ }
+ }
+ } else {
+ /* It is not strictly necessary to fill this buffer here, but
+ not filling will make the results of the compression stochastic
+ (but correct). This is because random data would cause the
+ system to find accidentally good backward references here and there. */
+ for i := range h.buckets {
+ h.buckets[i] = 0
+ }
+ }
+}
+
+/* Look at 5 bytes at &data[ix & mask].
+ Compute a hash from these, and store the value somewhere within
+ [ix .. ix+3]. */
+func (h *hashLongestMatchQuickly) Store(data []byte, mask uint, ix uint) {
+ var key uint32 = h.HashBytes(data[ix&mask:])
+ var off uint32 = uint32(ix>>3) % uint32(h.bucketSweep)
+ /* Wiggle the value with the bucket sweep range. */
+ h.buckets[key+off] = uint32(ix)
+}
+
+func (h *hashLongestMatchQuickly) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
+ var i uint
+ for i = ix_start; i < ix_end; i++ {
+ h.Store(data, mask, i)
+ }
+}
+
+func (h *hashLongestMatchQuickly) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
+ if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
+ /* Prepare the hashes for three last bytes of the last write.
+ These could not be calculated before, since they require knowledge
+ of both the previous and the current block. */
+ h.Store(ringbuffer, ringbuffer_mask, position-3)
+ h.Store(ringbuffer, ringbuffer_mask, position-2)
+ h.Store(ringbuffer, ringbuffer_mask, position-1)
+ }
+}
+
+func (*hashLongestMatchQuickly) PrepareDistanceCache(distance_cache []int) {
+}
+
+/* Find a longest backward match of &data[cur_ix & ring_buffer_mask]
+ up to the length of max_length and stores the position cur_ix in the
+ hash table.
+
+ Does not look for matches longer than max_length.
+ Does not look for matches further away than max_backward.
+ Writes the best match into |out|.
+ |out|->score is updated only if a better match is found. */
+func (h *hashLongestMatchQuickly) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) {
+ var best_len_in uint = out.len
+ var cur_ix_masked uint = cur_ix & ring_buffer_mask
+ var key uint32 = h.HashBytes(data[cur_ix_masked:])
+ var compare_char int = int(data[cur_ix_masked+best_len_in])
+ var min_score uint = out.score
+ var best_score uint = out.score
+ var best_len uint = best_len_in
+ var cached_backward uint = uint(distance_cache[0])
+ var prev_ix uint = cur_ix - cached_backward
+ var bucket []uint32
+ out.len_code_delta = 0
+ if prev_ix < cur_ix {
+ prev_ix &= uint(uint32(ring_buffer_mask))
+ if compare_char == int(data[prev_ix+best_len]) {
+ var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 4 {
+ var score uint = backwardReferenceScoreUsingLastDistance(uint(len))
+ if best_score < score {
+ best_score = score
+ best_len = uint(len)
+ out.len = uint(len)
+ out.distance = cached_backward
+ out.score = best_score
+ compare_char = int(data[cur_ix_masked+best_len])
+ if h.bucketSweep == 1 {
+ h.buckets[key] = uint32(cur_ix)
+ return
+ }
+ }
+ }
+ }
+ }
+
+ if h.bucketSweep == 1 {
+ var backward uint
+ var len uint
+
+ /* Only one to look for, don't bother to prepare for a loop. */
+ prev_ix = uint(h.buckets[key])
+
+ h.buckets[key] = uint32(cur_ix)
+ backward = cur_ix - prev_ix
+ prev_ix &= uint(uint32(ring_buffer_mask))
+ if compare_char != int(data[prev_ix+best_len_in]) {
+ return
+ }
+
+ if backward == 0 || backward > max_backward {
+ return
+ }
+
+ len = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 4 {
+ var score uint = backwardReferenceScore(uint(len), backward)
+ if best_score < score {
+ out.len = uint(len)
+ out.distance = backward
+ out.score = score
+ return
+ }
+ }
+ } else {
+ bucket = h.buckets[key:]
+ var i int
+ prev_ix = uint(bucket[0])
+ bucket = bucket[1:]
+ for i = 0; i < h.bucketSweep; (func() { i++; tmp3 := bucket; bucket = bucket[1:]; prev_ix = uint(tmp3[0]) })() {
+ var backward uint = cur_ix - prev_ix
+ var len uint
+ prev_ix &= uint(uint32(ring_buffer_mask))
+ if compare_char != int(data[prev_ix+best_len]) {
+ continue
+ }
+
+ if backward == 0 || backward > max_backward {
+ continue
+ }
+
+ len = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length)
+ if len >= 4 {
+ var score uint = backwardReferenceScore(uint(len), backward)
+ if best_score < score {
+ best_score = score
+ best_len = uint(len)
+ out.len = best_len
+ out.distance = backward
+ out.score = score
+ compare_char = int(data[cur_ix_masked+best_len])
+ }
+ }
+ }
+ }
+
+ if h.useDictionary && min_score == out.score {
+ searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, true)
+ }
+
+ h.buckets[key+uint32((cur_ix>>3)%uint(h.bucketSweep))] = uint32(cur_ix)
+}
diff --git a/vendor/github.com/andybalholm/brotli/hash_rolling.go b/vendor/github.com/andybalholm/brotli/hash_rolling.go
new file mode 100644
index 0000000..6630fc0
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/hash_rolling.go
@@ -0,0 +1,168 @@
+package brotli
+
+/* Copyright 2018 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* NOTE: this hasher does not search in the dictionary. It is used as
+ backup-hasher, the main hasher already searches in it. */
+
+const kRollingHashMul32 uint32 = 69069
+
+const kInvalidPosHashRolling uint32 = 0xffffffff
+
+/* This hasher uses a longer forward length, but returning a higher value here
+ will hurt compression by the main hasher when combined with a composite
+ hasher. The hasher tests for forward itself instead. */
+func (*hashRolling) HashTypeLength() uint {
+ return 4
+}
+
+func (*hashRolling) StoreLookahead() uint {
+ return 4
+}
+
+/* Computes a code from a single byte. A lookup table of 256 values could be
+ used, but simply adding 1 works about as good. */
+func (*hashRolling) HashByte(b byte) uint32 {
+ return uint32(b) + 1
+}
+
+func (h *hashRolling) HashRollingFunctionInitial(state uint32, add byte, factor uint32) uint32 {
+ return uint32(factor*state + h.HashByte(add))
+}
+
+func (h *hashRolling) HashRollingFunction(state uint32, add byte, rem byte, factor uint32, factor_remove uint32) uint32 {
+ return uint32(factor*state + h.HashByte(add) - factor_remove*h.HashByte(rem))
+}
+
+/* Rolling hash for long distance long string matches. Stores one position
+ per bucket, bucket key is computed over a long region. */
+type hashRolling struct {
+ hasherCommon
+
+ jump int
+
+ state uint32
+ table []uint32
+ next_ix uint
+ factor uint32
+ factor_remove uint32
+}
+
+func (h *hashRolling) Initialize(params *encoderParams) {
+ h.state = 0
+ h.next_ix = 0
+
+ h.factor = kRollingHashMul32
+
+ /* Compute the factor of the oldest byte to remove: factor**steps modulo
+ 0xffffffff (the multiplications rely on 32-bit overflow) */
+ h.factor_remove = 1
+
+ for i := 0; i < 32; i += h.jump {
+ h.factor_remove *= h.factor
+ }
+
+ h.table = make([]uint32, 16777216)
+ for i := 0; i < 16777216; i++ {
+ h.table[i] = kInvalidPosHashRolling
+ }
+}
+
+func (h *hashRolling) Prepare(one_shot bool, input_size uint, data []byte) {
+ /* Too small size, cannot use this hasher. */
+ if input_size < 32 {
+ return
+ }
+ h.state = 0
+ for i := 0; i < 32; i += h.jump {
+ h.state = h.HashRollingFunctionInitial(h.state, data[i], h.factor)
+ }
+}
+
+func (*hashRolling) Store(data []byte, mask uint, ix uint) {
+}
+
+func (*hashRolling) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
+}
+
+func (h *hashRolling) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
+ var position_masked uint
+ /* In this case we must re-initialize the hasher from scratch from the
+ current position. */
+
+ var available uint = num_bytes
+ if position&uint(h.jump-1) != 0 {
+ var diff uint = uint(h.jump) - (position & uint(h.jump-1))
+ if diff > available {
+ available = 0
+ } else {
+ available = available - diff
+ }
+ position += diff
+ }
+
+ position_masked = position & ring_buffer_mask
+
+ /* wrapping around ringbuffer not handled. */
+ if available > ring_buffer_mask-position_masked {
+ available = ring_buffer_mask - position_masked
+ }
+
+ h.Prepare(false, available, ringbuffer[position&ring_buffer_mask:])
+ h.next_ix = position
+}
+
+func (*hashRolling) PrepareDistanceCache(distance_cache []int) {
+}
+
+func (h *hashRolling) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) {
+ var cur_ix_masked uint = cur_ix & ring_buffer_mask
+ var pos uint = h.next_ix
+
+ if cur_ix&uint(h.jump-1) != 0 {
+ return
+ }
+
+ /* Not enough lookahead */
+ if max_length < 32 {
+ return
+ }
+
+ for pos = h.next_ix; pos <= cur_ix; pos += uint(h.jump) {
+ var code uint32 = h.state & ((16777216 * 64) - 1)
+ var rem byte = data[pos&ring_buffer_mask]
+ var add byte = data[(pos+32)&ring_buffer_mask]
+ var found_ix uint = uint(kInvalidPosHashRolling)
+
+ h.state = h.HashRollingFunction(h.state, add, rem, h.factor, h.factor_remove)
+
+ if code < 16777216 {
+ found_ix = uint(h.table[code])
+ h.table[code] = uint32(pos)
+ if pos == cur_ix && uint32(found_ix) != kInvalidPosHashRolling {
+ /* The cast to 32-bit makes backward distances up to 4GB work even
+ if cur_ix is above 4GB, despite using 32-bit values in the table. */
+ var backward uint = uint(uint32(cur_ix - found_ix))
+ if backward <= max_backward {
+ var found_ix_masked uint = found_ix & ring_buffer_mask
+ var len uint = findMatchLengthWithLimit(data[found_ix_masked:], data[cur_ix_masked:], max_length)
+ if len >= 4 && len > out.len {
+ var score uint = backwardReferenceScore(uint(len), backward)
+ if score > out.score {
+ out.len = uint(len)
+ out.distance = backward
+ out.score = score
+ out.len_code_delta = 0
+ }
+ }
+ }
+ }
+ }
+ }
+
+ h.next_ix = cur_ix + uint(h.jump)
+}
diff --git a/vendor/github.com/andybalholm/brotli/histogram.go b/vendor/github.com/andybalholm/brotli/histogram.go
new file mode 100644
index 0000000..0346622
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/histogram.go
@@ -0,0 +1,226 @@
+package brotli
+
+import "math"
+
+/* The distance symbols effectively used by "Large Window Brotli" (32-bit). */
+const numHistogramDistanceSymbols = 544
+
+type histogramLiteral struct {
+ data_ [numLiteralSymbols]uint32
+ total_count_ uint
+ bit_cost_ float64
+}
+
+func histogramClearLiteral(self *histogramLiteral) {
+ self.data_ = [numLiteralSymbols]uint32{}
+ self.total_count_ = 0
+ self.bit_cost_ = math.MaxFloat64
+}
+
+func clearHistogramsLiteral(array []histogramLiteral, length uint) {
+ var i uint
+ for i = 0; i < length; i++ {
+ histogramClearLiteral(&array[i:][0])
+ }
+}
+
+func histogramAddLiteral(self *histogramLiteral, val uint) {
+ self.data_[val]++
+ self.total_count_++
+}
+
+func histogramAddVectorLiteral(self *histogramLiteral, p []byte, n uint) {
+ self.total_count_ += n
+ n += 1
+ for {
+ n--
+ if n == 0 {
+ break
+ }
+ self.data_[p[0]]++
+ p = p[1:]
+ }
+}
+
+func histogramAddHistogramLiteral(self *histogramLiteral, v *histogramLiteral) {
+ var i uint
+ self.total_count_ += v.total_count_
+ for i = 0; i < numLiteralSymbols; i++ {
+ self.data_[i] += v.data_[i]
+ }
+}
+
+func histogramDataSizeLiteral() uint {
+ return numLiteralSymbols
+}
+
+type histogramCommand struct {
+ data_ [numCommandSymbols]uint32
+ total_count_ uint
+ bit_cost_ float64
+}
+
+func histogramClearCommand(self *histogramCommand) {
+ self.data_ = [numCommandSymbols]uint32{}
+ self.total_count_ = 0
+ self.bit_cost_ = math.MaxFloat64
+}
+
+func clearHistogramsCommand(array []histogramCommand, length uint) {
+ var i uint
+ for i = 0; i < length; i++ {
+ histogramClearCommand(&array[i:][0])
+ }
+}
+
+func histogramAddCommand(self *histogramCommand, val uint) {
+ self.data_[val]++
+ self.total_count_++
+}
+
+func histogramAddVectorCommand(self *histogramCommand, p []uint16, n uint) {
+ self.total_count_ += n
+ n += 1
+ for {
+ n--
+ if n == 0 {
+ break
+ }
+ self.data_[p[0]]++
+ p = p[1:]
+ }
+}
+
+func histogramAddHistogramCommand(self *histogramCommand, v *histogramCommand) {
+ var i uint
+ self.total_count_ += v.total_count_
+ for i = 0; i < numCommandSymbols; i++ {
+ self.data_[i] += v.data_[i]
+ }
+}
+
+func histogramDataSizeCommand() uint {
+ return numCommandSymbols
+}
+
+type histogramDistance struct {
+ data_ [numDistanceSymbols]uint32
+ total_count_ uint
+ bit_cost_ float64
+}
+
+func histogramClearDistance(self *histogramDistance) {
+ self.data_ = [numDistanceSymbols]uint32{}
+ self.total_count_ = 0
+ self.bit_cost_ = math.MaxFloat64
+}
+
+func clearHistogramsDistance(array []histogramDistance, length uint) {
+ var i uint
+ for i = 0; i < length; i++ {
+ histogramClearDistance(&array[i:][0])
+ }
+}
+
+func histogramAddDistance(self *histogramDistance, val uint) {
+ self.data_[val]++
+ self.total_count_++
+}
+
+func histogramAddVectorDistance(self *histogramDistance, p []uint16, n uint) {
+ self.total_count_ += n
+ n += 1
+ for {
+ n--
+ if n == 0 {
+ break
+ }
+ self.data_[p[0]]++
+ p = p[1:]
+ }
+}
+
+func histogramAddHistogramDistance(self *histogramDistance, v *histogramDistance) {
+ var i uint
+ self.total_count_ += v.total_count_
+ for i = 0; i < numDistanceSymbols; i++ {
+ self.data_[i] += v.data_[i]
+ }
+}
+
+func histogramDataSizeDistance() uint {
+ return numDistanceSymbols
+}
+
+type blockSplitIterator struct {
+ split_ *blockSplit
+ idx_ uint
+ type_ uint
+ length_ uint
+}
+
+func initBlockSplitIterator(self *blockSplitIterator, split *blockSplit) {
+ self.split_ = split
+ self.idx_ = 0
+ self.type_ = 0
+ if len(split.lengths) > 0 {
+ self.length_ = uint(split.lengths[0])
+ } else {
+ self.length_ = 0
+ }
+}
+
+func blockSplitIteratorNext(self *blockSplitIterator) {
+ if self.length_ == 0 {
+ self.idx_++
+ self.type_ = uint(self.split_.types[self.idx_])
+ self.length_ = uint(self.split_.lengths[self.idx_])
+ }
+
+ self.length_--
+}
+
+func buildHistogramsWithContext(cmds []command, literal_split *blockSplit, insert_and_copy_split *blockSplit, dist_split *blockSplit, ringbuffer []byte, start_pos uint, mask uint, prev_byte byte, prev_byte2 byte, context_modes []int, literal_histograms []histogramLiteral, insert_and_copy_histograms []histogramCommand, copy_dist_histograms []histogramDistance) {
+ var pos uint = start_pos
+ var literal_it blockSplitIterator
+ var insert_and_copy_it blockSplitIterator
+ var dist_it blockSplitIterator
+
+ initBlockSplitIterator(&literal_it, literal_split)
+ initBlockSplitIterator(&insert_and_copy_it, insert_and_copy_split)
+ initBlockSplitIterator(&dist_it, dist_split)
+ for i := range cmds {
+ var cmd *command = &cmds[i]
+ var j uint
+ blockSplitIteratorNext(&insert_and_copy_it)
+ histogramAddCommand(&insert_and_copy_histograms[insert_and_copy_it.type_], uint(cmd.cmd_prefix_))
+
+ /* TODO: unwrap iterator blocks. */
+ for j = uint(cmd.insert_len_); j != 0; j-- {
+ var context uint
+ blockSplitIteratorNext(&literal_it)
+ context = literal_it.type_
+ if context_modes != nil {
+ var lut contextLUT = getContextLUT(context_modes[context])
+ context = (context << literalContextBits) + uint(getContext(prev_byte, prev_byte2, lut))
+ }
+
+ histogramAddLiteral(&literal_histograms[context], uint(ringbuffer[pos&mask]))
+ prev_byte2 = prev_byte
+ prev_byte = ringbuffer[pos&mask]
+ pos++
+ }
+
+ pos += uint(commandCopyLen(cmd))
+ if commandCopyLen(cmd) != 0 {
+ prev_byte2 = ringbuffer[(pos-2)&mask]
+ prev_byte = ringbuffer[(pos-1)&mask]
+ if cmd.cmd_prefix_ >= 128 {
+ var context uint
+ blockSplitIteratorNext(&dist_it)
+ context = uint(uint32(dist_it.type_< bestQ &&
+ (spec.Value == "*" || spec.Value == offer) {
+ bestQ = spec.Q
+ bestOffer = offer
+ }
+ }
+ }
+ if bestQ == 0 {
+ bestOffer = ""
+ }
+ return bestOffer
+}
+
+// acceptSpec describes an Accept* header.
+type acceptSpec struct {
+ Value string
+ Q float64
+}
+
+// parseAccept parses Accept* headers.
+func parseAccept(header http.Header, key string) (specs []acceptSpec) {
+loop:
+ for _, s := range header[key] {
+ for {
+ var spec acceptSpec
+ spec.Value, s = expectTokenSlash(s)
+ if spec.Value == "" {
+ continue loop
+ }
+ spec.Q = 1.0
+ s = skipSpace(s)
+ if strings.HasPrefix(s, ";") {
+ s = skipSpace(s[1:])
+ if !strings.HasPrefix(s, "q=") {
+ continue loop
+ }
+ spec.Q, s = expectQuality(s[2:])
+ if spec.Q < 0.0 {
+ continue loop
+ }
+ }
+ specs = append(specs, spec)
+ s = skipSpace(s)
+ if !strings.HasPrefix(s, ",") {
+ continue loop
+ }
+ s = skipSpace(s[1:])
+ }
+ }
+ return
+}
+
+func skipSpace(s string) (rest string) {
+ i := 0
+ for ; i < len(s); i++ {
+ if octetTypes[s[i]]&isSpace == 0 {
+ break
+ }
+ }
+ return s[i:]
+}
+
+func expectTokenSlash(s string) (token, rest string) {
+ i := 0
+ for ; i < len(s); i++ {
+ b := s[i]
+ if (octetTypes[b]&isToken == 0) && b != '/' {
+ break
+ }
+ }
+ return s[:i], s[i:]
+}
+
+func expectQuality(s string) (q float64, rest string) {
+ switch {
+ case len(s) == 0:
+ return -1, ""
+ case s[0] == '0':
+ q = 0
+ case s[0] == '1':
+ q = 1
+ default:
+ return -1, ""
+ }
+ s = s[1:]
+ if !strings.HasPrefix(s, ".") {
+ return q, s
+ }
+ s = s[1:]
+ i := 0
+ n := 0
+ d := 1
+ for ; i < len(s); i++ {
+ b := s[i]
+ if b < '0' || b > '9' {
+ break
+ }
+ n = n*10 + int(b) - '0'
+ d *= 10
+ }
+ return q + float64(n)/float64(d), s[i:]
+}
+
+// Octet types from RFC 2616.
+var octetTypes [256]octetType
+
+type octetType byte
+
+const (
+ isToken octetType = 1 << iota
+ isSpace
+)
+
+func init() {
+ // OCTET =
+ // CHAR =
+ // CTL =
+ // CR =
+ // LF =
+ // SP =
+ // HT =
+ // <"> =
+ // CRLF = CR LF
+ // LWS = [CRLF] 1*( SP | HT )
+ // TEXT =
+ // separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <">
+ // | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT
+ // token = 1*
+ // qdtext = >
+
+ for c := 0; c < 256; c++ {
+ var t octetType
+ isCtl := c <= 31 || c == 127
+ isChar := 0 <= c && c <= 127
+ isSeparator := strings.ContainsRune(" \t\"(),/:;<=>?@[]\\{}", rune(c))
+ if strings.ContainsRune(" \t\r\n", rune(c)) {
+ t |= isSpace
+ }
+ if isChar && !isCtl && !isSeparator {
+ t |= isToken
+ }
+ octetTypes[c] = t
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/huffman.go b/vendor/github.com/andybalholm/brotli/huffman.go
new file mode 100644
index 0000000..182f3d2
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/huffman.go
@@ -0,0 +1,653 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Utilities for building Huffman decoding tables. */
+
+const huffmanMaxCodeLength = 15
+
+/* Maximum possible Huffman table size for an alphabet size of (index * 32),
+ max code length 15 and root table bits 8. */
+var kMaxHuffmanTableSize = []uint16{
+ 256,
+ 402,
+ 436,
+ 468,
+ 500,
+ 534,
+ 566,
+ 598,
+ 630,
+ 662,
+ 694,
+ 726,
+ 758,
+ 790,
+ 822,
+ 854,
+ 886,
+ 920,
+ 952,
+ 984,
+ 1016,
+ 1048,
+ 1080,
+ 1112,
+ 1144,
+ 1176,
+ 1208,
+ 1240,
+ 1272,
+ 1304,
+ 1336,
+ 1368,
+ 1400,
+ 1432,
+ 1464,
+ 1496,
+ 1528,
+}
+
+/* BROTLI_NUM_BLOCK_LEN_SYMBOLS == 26 */
+const huffmanMaxSize26 = 396
+
+/* BROTLI_MAX_BLOCK_TYPE_SYMBOLS == 258 */
+const huffmanMaxSize258 = 632
+
+/* BROTLI_MAX_CONTEXT_MAP_SYMBOLS == 272 */
+const huffmanMaxSize272 = 646
+
+const huffmanMaxCodeLengthCodeLength = 5
+
+/* Do not create this struct directly - use the ConstructHuffmanCode
+ * constructor below! */
+type huffmanCode struct {
+ bits byte
+ value uint16
+}
+
+func constructHuffmanCode(bits byte, value uint16) huffmanCode {
+ var h huffmanCode
+ h.bits = bits
+ h.value = value
+ return h
+}
+
+/* Builds Huffman lookup table assuming code lengths are in symbol order. */
+
+/* Builds Huffman lookup table assuming code lengths are in symbol order.
+ Returns size of resulting table. */
+
+/* Builds a simple Huffman table. The |num_symbols| parameter is to be
+ interpreted as follows: 0 means 1 symbol, 1 means 2 symbols,
+ 2 means 3 symbols, 3 means 4 symbols with lengths [2, 2, 2, 2],
+ 4 means 4 symbols with lengths [1, 2, 3, 3]. */
+
+/* Contains a collection of Huffman trees with the same alphabet size. */
+/* max_symbol is needed due to simple codes since log2(alphabet_size) could be
+ greater than log2(max_symbol). */
+type huffmanTreeGroup struct {
+ htrees [][]huffmanCode
+ codes []huffmanCode
+ alphabet_size uint16
+ max_symbol uint16
+ num_htrees uint16
+}
+
+const reverseBitsMax = 8
+
+const reverseBitsBase = 0
+
+var kReverseBits = [1 << reverseBitsMax]byte{
+ 0x00,
+ 0x80,
+ 0x40,
+ 0xC0,
+ 0x20,
+ 0xA0,
+ 0x60,
+ 0xE0,
+ 0x10,
+ 0x90,
+ 0x50,
+ 0xD0,
+ 0x30,
+ 0xB0,
+ 0x70,
+ 0xF0,
+ 0x08,
+ 0x88,
+ 0x48,
+ 0xC8,
+ 0x28,
+ 0xA8,
+ 0x68,
+ 0xE8,
+ 0x18,
+ 0x98,
+ 0x58,
+ 0xD8,
+ 0x38,
+ 0xB8,
+ 0x78,
+ 0xF8,
+ 0x04,
+ 0x84,
+ 0x44,
+ 0xC4,
+ 0x24,
+ 0xA4,
+ 0x64,
+ 0xE4,
+ 0x14,
+ 0x94,
+ 0x54,
+ 0xD4,
+ 0x34,
+ 0xB4,
+ 0x74,
+ 0xF4,
+ 0x0C,
+ 0x8C,
+ 0x4C,
+ 0xCC,
+ 0x2C,
+ 0xAC,
+ 0x6C,
+ 0xEC,
+ 0x1C,
+ 0x9C,
+ 0x5C,
+ 0xDC,
+ 0x3C,
+ 0xBC,
+ 0x7C,
+ 0xFC,
+ 0x02,
+ 0x82,
+ 0x42,
+ 0xC2,
+ 0x22,
+ 0xA2,
+ 0x62,
+ 0xE2,
+ 0x12,
+ 0x92,
+ 0x52,
+ 0xD2,
+ 0x32,
+ 0xB2,
+ 0x72,
+ 0xF2,
+ 0x0A,
+ 0x8A,
+ 0x4A,
+ 0xCA,
+ 0x2A,
+ 0xAA,
+ 0x6A,
+ 0xEA,
+ 0x1A,
+ 0x9A,
+ 0x5A,
+ 0xDA,
+ 0x3A,
+ 0xBA,
+ 0x7A,
+ 0xFA,
+ 0x06,
+ 0x86,
+ 0x46,
+ 0xC6,
+ 0x26,
+ 0xA6,
+ 0x66,
+ 0xE6,
+ 0x16,
+ 0x96,
+ 0x56,
+ 0xD6,
+ 0x36,
+ 0xB6,
+ 0x76,
+ 0xF6,
+ 0x0E,
+ 0x8E,
+ 0x4E,
+ 0xCE,
+ 0x2E,
+ 0xAE,
+ 0x6E,
+ 0xEE,
+ 0x1E,
+ 0x9E,
+ 0x5E,
+ 0xDE,
+ 0x3E,
+ 0xBE,
+ 0x7E,
+ 0xFE,
+ 0x01,
+ 0x81,
+ 0x41,
+ 0xC1,
+ 0x21,
+ 0xA1,
+ 0x61,
+ 0xE1,
+ 0x11,
+ 0x91,
+ 0x51,
+ 0xD1,
+ 0x31,
+ 0xB1,
+ 0x71,
+ 0xF1,
+ 0x09,
+ 0x89,
+ 0x49,
+ 0xC9,
+ 0x29,
+ 0xA9,
+ 0x69,
+ 0xE9,
+ 0x19,
+ 0x99,
+ 0x59,
+ 0xD9,
+ 0x39,
+ 0xB9,
+ 0x79,
+ 0xF9,
+ 0x05,
+ 0x85,
+ 0x45,
+ 0xC5,
+ 0x25,
+ 0xA5,
+ 0x65,
+ 0xE5,
+ 0x15,
+ 0x95,
+ 0x55,
+ 0xD5,
+ 0x35,
+ 0xB5,
+ 0x75,
+ 0xF5,
+ 0x0D,
+ 0x8D,
+ 0x4D,
+ 0xCD,
+ 0x2D,
+ 0xAD,
+ 0x6D,
+ 0xED,
+ 0x1D,
+ 0x9D,
+ 0x5D,
+ 0xDD,
+ 0x3D,
+ 0xBD,
+ 0x7D,
+ 0xFD,
+ 0x03,
+ 0x83,
+ 0x43,
+ 0xC3,
+ 0x23,
+ 0xA3,
+ 0x63,
+ 0xE3,
+ 0x13,
+ 0x93,
+ 0x53,
+ 0xD3,
+ 0x33,
+ 0xB3,
+ 0x73,
+ 0xF3,
+ 0x0B,
+ 0x8B,
+ 0x4B,
+ 0xCB,
+ 0x2B,
+ 0xAB,
+ 0x6B,
+ 0xEB,
+ 0x1B,
+ 0x9B,
+ 0x5B,
+ 0xDB,
+ 0x3B,
+ 0xBB,
+ 0x7B,
+ 0xFB,
+ 0x07,
+ 0x87,
+ 0x47,
+ 0xC7,
+ 0x27,
+ 0xA7,
+ 0x67,
+ 0xE7,
+ 0x17,
+ 0x97,
+ 0x57,
+ 0xD7,
+ 0x37,
+ 0xB7,
+ 0x77,
+ 0xF7,
+ 0x0F,
+ 0x8F,
+ 0x4F,
+ 0xCF,
+ 0x2F,
+ 0xAF,
+ 0x6F,
+ 0xEF,
+ 0x1F,
+ 0x9F,
+ 0x5F,
+ 0xDF,
+ 0x3F,
+ 0xBF,
+ 0x7F,
+ 0xFF,
+}
+
+const reverseBitsLowest = (uint64(1) << (reverseBitsMax - 1 + reverseBitsBase))
+
+/* Returns reverse(num >> BROTLI_REVERSE_BITS_BASE, BROTLI_REVERSE_BITS_MAX),
+ where reverse(value, len) is the bit-wise reversal of the len least
+ significant bits of value. */
+func reverseBits8(num uint64) uint64 {
+ return uint64(kReverseBits[num])
+}
+
+/* Stores code in table[0], table[step], table[2*step], ..., table[end] */
+/* Assumes that end is an integer multiple of step */
+func replicateValue(table []huffmanCode, step int, end int, code huffmanCode) {
+ for {
+ end -= step
+ table[end] = code
+ if end <= 0 {
+ break
+ }
+ }
+}
+
+/* Returns the table width of the next 2nd level table. |count| is the histogram
+ of bit lengths for the remaining symbols, |len| is the code length of the
+ next processed symbol. */
+func nextTableBitSize(count []uint16, len int, root_bits int) int {
+ var left int = 1 << uint(len-root_bits)
+ for len < huffmanMaxCodeLength {
+ left -= int(count[len])
+ if left <= 0 {
+ break
+ }
+ len++
+ left <<= 1
+ }
+
+ return len - root_bits
+}
+
+func buildCodeLengthsHuffmanTable(table []huffmanCode, code_lengths []byte, count []uint16) {
+ var code huffmanCode /* current table entry */ /* symbol index in original or sorted table */ /* prefix code */ /* prefix code addend */ /* step size to replicate values in current table */ /* size of current table */ /* symbols sorted by code length */
+ var symbol int
+ var key uint64
+ var key_step uint64
+ var step int
+ var table_size int
+ var sorted [codeLengthCodes]int
+ var offset [huffmanMaxCodeLengthCodeLength + 1]int
+ var bits int
+ var bits_count int
+ /* offsets in sorted table for each length */
+ assert(huffmanMaxCodeLengthCodeLength <= reverseBitsMax)
+
+ /* Generate offsets into sorted symbol table by code length. */
+ symbol = -1
+
+ bits = 1
+ var i int
+ for i = 0; i < huffmanMaxCodeLengthCodeLength; i++ {
+ symbol += int(count[bits])
+ offset[bits] = symbol
+ bits++
+ }
+
+ /* Symbols with code length 0 are placed after all other symbols. */
+ offset[0] = codeLengthCodes - 1
+
+ /* Sort symbols by length, by symbol order within each length. */
+ symbol = codeLengthCodes
+
+ for {
+ var i int
+ for i = 0; i < 6; i++ {
+ symbol--
+ sorted[offset[code_lengths[symbol]]] = symbol
+ offset[code_lengths[symbol]]--
+ }
+ if symbol == 0 {
+ break
+ }
+ }
+
+ table_size = 1 << huffmanMaxCodeLengthCodeLength
+
+ /* Special case: all symbols but one have 0 code length. */
+ if offset[0] == 0 {
+ code = constructHuffmanCode(0, uint16(sorted[0]))
+ for key = 0; key < uint64(table_size); key++ {
+ table[key] = code
+ }
+
+ return
+ }
+
+ /* Fill in table. */
+ key = 0
+
+ key_step = reverseBitsLowest
+ symbol = 0
+ bits = 1
+ step = 2
+ for {
+ for bits_count = int(count[bits]); bits_count != 0; bits_count-- {
+ code = constructHuffmanCode(byte(bits), uint16(sorted[symbol]))
+ symbol++
+ replicateValue(table[reverseBits8(key):], step, table_size, code)
+ key += key_step
+ }
+
+ step <<= 1
+ key_step >>= 1
+ bits++
+ if bits > huffmanMaxCodeLengthCodeLength {
+ break
+ }
+ }
+}
+
+func buildHuffmanTable(root_table []huffmanCode, root_bits int, symbol_lists symbolList, count []uint16) uint32 {
+ var code huffmanCode /* current table entry */ /* next available space in table */ /* current code length */ /* symbol index in original or sorted table */ /* prefix code */ /* prefix code addend */ /* 2nd level table prefix code */ /* 2nd level table prefix code addend */ /* step size to replicate values in current table */ /* key length of current table */ /* size of current table */ /* sum of root table size and 2nd level table sizes */
+ var table []huffmanCode
+ var len int
+ var symbol int
+ var key uint64
+ var key_step uint64
+ var sub_key uint64
+ var sub_key_step uint64
+ var step int
+ var table_bits int
+ var table_size int
+ var total_size int
+ var max_length int = -1
+ var bits int
+ var bits_count int
+
+ assert(root_bits <= reverseBitsMax)
+ assert(huffmanMaxCodeLength-root_bits <= reverseBitsMax)
+
+ for symbolListGet(symbol_lists, max_length) == 0xFFFF {
+ max_length--
+ }
+ max_length += huffmanMaxCodeLength + 1
+
+ table = root_table
+ table_bits = root_bits
+ table_size = 1 << uint(table_bits)
+ total_size = table_size
+
+ /* Fill in the root table. Reduce the table size to if possible,
+ and create the repetitions by memcpy. */
+ if table_bits > max_length {
+ table_bits = max_length
+ table_size = 1 << uint(table_bits)
+ }
+
+ key = 0
+ key_step = reverseBitsLowest
+ bits = 1
+ step = 2
+ for {
+ symbol = bits - (huffmanMaxCodeLength + 1)
+ for bits_count = int(count[bits]); bits_count != 0; bits_count-- {
+ symbol = int(symbolListGet(symbol_lists, symbol))
+ code = constructHuffmanCode(byte(bits), uint16(symbol))
+ replicateValue(table[reverseBits8(key):], step, table_size, code)
+ key += key_step
+ }
+
+ step <<= 1
+ key_step >>= 1
+ bits++
+ if bits > table_bits {
+ break
+ }
+ }
+
+ /* If root_bits != table_bits then replicate to fill the remaining slots. */
+ for total_size != table_size {
+ copy(table[table_size:], table[:uint(table_size)])
+ table_size <<= 1
+ }
+
+ /* Fill in 2nd level tables and add pointers to root table. */
+ key_step = reverseBitsLowest >> uint(root_bits-1)
+
+ sub_key = reverseBitsLowest << 1
+ sub_key_step = reverseBitsLowest
+ len = root_bits + 1
+ step = 2
+ for ; len <= max_length; len++ {
+ symbol = len - (huffmanMaxCodeLength + 1)
+ for ; count[len] != 0; count[len]-- {
+ if sub_key == reverseBitsLowest<<1 {
+ table = table[table_size:]
+ table_bits = nextTableBitSize(count, int(len), root_bits)
+ table_size = 1 << uint(table_bits)
+ total_size += table_size
+ sub_key = reverseBits8(key)
+ key += key_step
+ root_table[sub_key] = constructHuffmanCode(byte(table_bits+root_bits), uint16(uint64(uint(-cap(table)+cap(root_table)))-sub_key))
+ sub_key = 0
+ }
+
+ symbol = int(symbolListGet(symbol_lists, symbol))
+ code = constructHuffmanCode(byte(len-root_bits), uint16(symbol))
+ replicateValue(table[reverseBits8(sub_key):], step, table_size, code)
+ sub_key += sub_key_step
+ }
+
+ step <<= 1
+ sub_key_step >>= 1
+ }
+
+ return uint32(total_size)
+}
+
+func buildSimpleHuffmanTable(table []huffmanCode, root_bits int, val []uint16, num_symbols uint32) uint32 {
+ var table_size uint32 = 1
+ var goal_size uint32 = 1 << uint(root_bits)
+ switch num_symbols {
+ case 0:
+ table[0] = constructHuffmanCode(0, val[0])
+
+ case 1:
+ if val[1] > val[0] {
+ table[0] = constructHuffmanCode(1, val[0])
+ table[1] = constructHuffmanCode(1, val[1])
+ } else {
+ table[0] = constructHuffmanCode(1, val[1])
+ table[1] = constructHuffmanCode(1, val[0])
+ }
+
+ table_size = 2
+
+ case 2:
+ table[0] = constructHuffmanCode(1, val[0])
+ table[2] = constructHuffmanCode(1, val[0])
+ if val[2] > val[1] {
+ table[1] = constructHuffmanCode(2, val[1])
+ table[3] = constructHuffmanCode(2, val[2])
+ } else {
+ table[1] = constructHuffmanCode(2, val[2])
+ table[3] = constructHuffmanCode(2, val[1])
+ }
+
+ table_size = 4
+
+ case 3:
+ var i int
+ var k int
+ for i = 0; i < 3; i++ {
+ for k = i + 1; k < 4; k++ {
+ if val[k] < val[i] {
+ var t uint16 = val[k]
+ val[k] = val[i]
+ val[i] = t
+ }
+ }
+ }
+
+ table[0] = constructHuffmanCode(2, val[0])
+ table[2] = constructHuffmanCode(2, val[1])
+ table[1] = constructHuffmanCode(2, val[2])
+ table[3] = constructHuffmanCode(2, val[3])
+ table_size = 4
+
+ case 4:
+ if val[3] < val[2] {
+ var t uint16 = val[3]
+ val[3] = val[2]
+ val[2] = t
+ }
+
+ table[0] = constructHuffmanCode(1, val[0])
+ table[1] = constructHuffmanCode(2, val[1])
+ table[2] = constructHuffmanCode(1, val[0])
+ table[3] = constructHuffmanCode(3, val[2])
+ table[4] = constructHuffmanCode(1, val[0])
+ table[5] = constructHuffmanCode(2, val[1])
+ table[6] = constructHuffmanCode(1, val[0])
+ table[7] = constructHuffmanCode(3, val[3])
+ table_size = 8
+ }
+
+ for table_size != goal_size {
+ copy(table[table_size:], table[:uint(table_size)])
+ table_size <<= 1
+ }
+
+ return goal_size
+}
diff --git a/vendor/github.com/andybalholm/brotli/literal_cost.go b/vendor/github.com/andybalholm/brotli/literal_cost.go
new file mode 100644
index 0000000..5a9ace9
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/literal_cost.go
@@ -0,0 +1,182 @@
+package brotli
+
+func utf8Position(last uint, c uint, clamp uint) uint {
+ if c < 128 {
+ return 0 /* Next one is the 'Byte 1' again. */
+ } else if c >= 192 { /* Next one is the 'Byte 2' of utf-8 encoding. */
+ return brotli_min_size_t(1, clamp)
+ } else {
+ /* Let's decide over the last byte if this ends the sequence. */
+ if last < 0xE0 {
+ return 0 /* Completed two or three byte coding. */ /* Next one is the 'Byte 3' of utf-8 encoding. */
+ } else {
+ return brotli_min_size_t(2, clamp)
+ }
+ }
+}
+
+func decideMultiByteStatsLevel(pos uint, len uint, mask uint, data []byte) uint {
+ var counts = [3]uint{0} /* should be 2, but 1 compresses better. */
+ var max_utf8 uint = 1
+ var last_c uint = 0
+ var i uint
+ for i = 0; i < len; i++ {
+ var c uint = uint(data[(pos+i)&mask])
+ counts[utf8Position(last_c, c, 2)]++
+ last_c = c
+ }
+
+ if counts[2] < 500 {
+ max_utf8 = 1
+ }
+
+ if counts[1]+counts[2] < 25 {
+ max_utf8 = 0
+ }
+
+ return max_utf8
+}
+
+func estimateBitCostsForLiteralsUTF8(pos uint, len uint, mask uint, data []byte, cost []float32) {
+ var max_utf8 uint = decideMultiByteStatsLevel(pos, uint(len), mask, data)
+ /* Bootstrap histograms. */
+ var histogram = [3][256]uint{[256]uint{0}}
+ var window_half uint = 495
+ var in_window uint = brotli_min_size_t(window_half, uint(len))
+ var in_window_utf8 = [3]uint{0}
+ /* max_utf8 is 0 (normal ASCII single byte modeling),
+ 1 (for 2-byte UTF-8 modeling), or 2 (for 3-byte UTF-8 modeling). */
+
+ var i uint
+ {
+ var last_c uint = 0
+ var utf8_pos uint = 0
+ for i = 0; i < in_window; i++ {
+ var c uint = uint(data[(pos+i)&mask])
+ histogram[utf8_pos][c]++
+ in_window_utf8[utf8_pos]++
+ utf8_pos = utf8Position(last_c, c, max_utf8)
+ last_c = c
+ }
+ }
+
+ /* Compute bit costs with sliding window. */
+ for i = 0; i < len; i++ {
+ if i >= window_half {
+ var c uint
+ var last_c uint
+ if i < window_half+1 {
+ c = 0
+ } else {
+ c = uint(data[(pos+i-window_half-1)&mask])
+ }
+ if i < window_half+2 {
+ last_c = 0
+ } else {
+ last_c = uint(data[(pos+i-window_half-2)&mask])
+ }
+ /* Remove a byte in the past. */
+
+ var utf8_pos2 uint = utf8Position(last_c, c, max_utf8)
+ histogram[utf8_pos2][data[(pos+i-window_half)&mask]]--
+ in_window_utf8[utf8_pos2]--
+ }
+
+ if i+window_half < len {
+ var c uint = uint(data[(pos+i+window_half-1)&mask])
+ var last_c uint = uint(data[(pos+i+window_half-2)&mask])
+ /* Add a byte in the future. */
+
+ var utf8_pos2 uint = utf8Position(last_c, c, max_utf8)
+ histogram[utf8_pos2][data[(pos+i+window_half)&mask]]++
+ in_window_utf8[utf8_pos2]++
+ }
+ {
+ var c uint
+ var last_c uint
+ if i < 1 {
+ c = 0
+ } else {
+ c = uint(data[(pos+i-1)&mask])
+ }
+ if i < 2 {
+ last_c = 0
+ } else {
+ last_c = uint(data[(pos+i-2)&mask])
+ }
+ var utf8_pos uint = utf8Position(last_c, c, max_utf8)
+ var masked_pos uint = (pos + i) & mask
+ var histo uint = histogram[utf8_pos][data[masked_pos]]
+ var lit_cost float64
+ if histo == 0 {
+ histo = 1
+ }
+
+ lit_cost = fastLog2(in_window_utf8[utf8_pos]) - fastLog2(histo)
+ lit_cost += 0.02905
+ if lit_cost < 1.0 {
+ lit_cost *= 0.5
+ lit_cost += 0.5
+ }
+
+ /* Make the first bytes more expensive -- seems to help, not sure why.
+ Perhaps because the entropy source is changing its properties
+ rapidly in the beginning of the file, perhaps because the beginning
+ of the data is a statistical "anomaly". */
+ if i < 2000 {
+ lit_cost += 0.7 - (float64(2000-i) / 2000.0 * 0.35)
+ }
+
+ cost[i] = float32(lit_cost)
+ }
+ }
+}
+
+func estimateBitCostsForLiterals(pos uint, len uint, mask uint, data []byte, cost []float32) {
+ if isMostlyUTF8(data, pos, mask, uint(len), kMinUTF8Ratio) {
+ estimateBitCostsForLiteralsUTF8(pos, uint(len), mask, data, cost)
+ return
+ } else {
+ var histogram = [256]uint{0}
+ var window_half uint = 2000
+ var in_window uint = brotli_min_size_t(window_half, uint(len))
+ var i uint
+ /* Bootstrap histogram. */
+ for i = 0; i < in_window; i++ {
+ histogram[data[(pos+i)&mask]]++
+ }
+
+ /* Compute bit costs with sliding window. */
+ for i = 0; i < len; i++ {
+ var histo uint
+ if i >= window_half {
+ /* Remove a byte in the past. */
+ histogram[data[(pos+i-window_half)&mask]]--
+
+ in_window--
+ }
+
+ if i+window_half < len {
+ /* Add a byte in the future. */
+ histogram[data[(pos+i+window_half)&mask]]++
+
+ in_window++
+ }
+
+ histo = histogram[data[(pos+i)&mask]]
+ if histo == 0 {
+ histo = 1
+ }
+ {
+ var lit_cost float64 = fastLog2(in_window) - fastLog2(histo)
+ lit_cost += 0.029
+ if lit_cost < 1.0 {
+ lit_cost *= 0.5
+ lit_cost += 0.5
+ }
+
+ cost[i] = float32(lit_cost)
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/memory.go b/vendor/github.com/andybalholm/brotli/memory.go
new file mode 100644
index 0000000..a07c705
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/memory.go
@@ -0,0 +1,66 @@
+package brotli
+
+/* Copyright 2016 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/*
+Dynamically grows array capacity to at least the requested size
+T: data type
+A: array
+C: capacity
+R: requested size
+*/
+func brotli_ensure_capacity_uint8_t(a *[]byte, c *uint, r uint) {
+ if *c < r {
+ var new_size uint = *c
+ if new_size == 0 {
+ new_size = r
+ }
+
+ for new_size < r {
+ new_size *= 2
+ }
+
+ if cap(*a) < int(new_size) {
+ var new_array []byte = make([]byte, new_size)
+ if *c != 0 {
+ copy(new_array, (*a)[:*c])
+ }
+
+ *a = new_array
+ } else {
+ *a = (*a)[:new_size]
+ }
+
+ *c = new_size
+ }
+}
+
+func brotli_ensure_capacity_uint32_t(a *[]uint32, c *uint, r uint) {
+ var new_array []uint32
+ if *c < r {
+ var new_size uint = *c
+ if new_size == 0 {
+ new_size = r
+ }
+
+ for new_size < r {
+ new_size *= 2
+ }
+
+ if cap(*a) < int(new_size) {
+ new_array = make([]uint32, new_size)
+ if *c != 0 {
+ copy(new_array, (*a)[:*c])
+ }
+
+ *a = new_array
+ } else {
+ *a = (*a)[:new_size]
+ }
+ *c = new_size
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/metablock.go b/vendor/github.com/andybalholm/brotli/metablock.go
new file mode 100644
index 0000000..3014df8
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/metablock.go
@@ -0,0 +1,574 @@
+package brotli
+
+import (
+ "sync"
+)
+
+/* Copyright 2014 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Algorithms for distributing the literals and commands of a metablock between
+ block types and contexts. */
+
+type metaBlockSplit struct {
+ literal_split blockSplit
+ command_split blockSplit
+ distance_split blockSplit
+ literal_context_map []uint32
+ literal_context_map_size uint
+ distance_context_map []uint32
+ distance_context_map_size uint
+ literal_histograms []histogramLiteral
+ literal_histograms_size uint
+ command_histograms []histogramCommand
+ command_histograms_size uint
+ distance_histograms []histogramDistance
+ distance_histograms_size uint
+}
+
+var metaBlockPool sync.Pool
+
+func getMetaBlockSplit() *metaBlockSplit {
+ mb, _ := metaBlockPool.Get().(*metaBlockSplit)
+
+ if mb == nil {
+ mb = &metaBlockSplit{}
+ } else {
+ initBlockSplit(&mb.literal_split)
+ initBlockSplit(&mb.command_split)
+ initBlockSplit(&mb.distance_split)
+ mb.literal_context_map = mb.literal_context_map[:0]
+ mb.literal_context_map_size = 0
+ mb.distance_context_map = mb.distance_context_map[:0]
+ mb.distance_context_map_size = 0
+ mb.literal_histograms = mb.literal_histograms[:0]
+ mb.command_histograms = mb.command_histograms[:0]
+ mb.distance_histograms = mb.distance_histograms[:0]
+ }
+ return mb
+}
+
+func freeMetaBlockSplit(mb *metaBlockSplit) {
+ metaBlockPool.Put(mb)
+}
+
+func initDistanceParams(params *encoderParams, npostfix uint32, ndirect uint32) {
+ var dist_params *distanceParams = ¶ms.dist
+ var alphabet_size uint32
+ var max_distance uint32
+
+ dist_params.distance_postfix_bits = npostfix
+ dist_params.num_direct_distance_codes = ndirect
+
+ alphabet_size = uint32(distanceAlphabetSize(uint(npostfix), uint(ndirect), maxDistanceBits))
+ max_distance = ndirect + (1 << (maxDistanceBits + npostfix + 2)) - (1 << (npostfix + 2))
+
+ if params.large_window {
+ var bound = [maxNpostfix + 1]uint32{0, 4, 12, 28}
+ var postfix uint32 = 1 << npostfix
+ alphabet_size = uint32(distanceAlphabetSize(uint(npostfix), uint(ndirect), largeMaxDistanceBits))
+
+ /* The maximum distance is set so that no distance symbol used can encode
+ a distance larger than BROTLI_MAX_ALLOWED_DISTANCE with all
+ its extra bits set. */
+ if ndirect < bound[npostfix] {
+ max_distance = maxAllowedDistance - (bound[npostfix] - ndirect)
+ } else if ndirect >= bound[npostfix]+postfix {
+ max_distance = (3 << 29) - 4 + (ndirect - bound[npostfix])
+ } else {
+ max_distance = maxAllowedDistance
+ }
+ }
+
+ dist_params.alphabet_size = alphabet_size
+ dist_params.max_distance = uint(max_distance)
+}
+
+func recomputeDistancePrefixes(cmds []command, orig_params *distanceParams, new_params *distanceParams) {
+ if orig_params.distance_postfix_bits == new_params.distance_postfix_bits && orig_params.num_direct_distance_codes == new_params.num_direct_distance_codes {
+ return
+ }
+
+ for i := range cmds {
+ var cmd *command = &cmds[i]
+ if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 {
+ prefixEncodeCopyDistance(uint(commandRestoreDistanceCode(cmd, orig_params)), uint(new_params.num_direct_distance_codes), uint(new_params.distance_postfix_bits), &cmd.dist_prefix_, &cmd.dist_extra_)
+ }
+ }
+}
+
+func computeDistanceCost(cmds []command, orig_params *distanceParams, new_params *distanceParams, cost *float64) bool {
+ var equal_params bool = false
+ var dist_prefix uint16
+ var dist_extra uint32
+ var extra_bits float64 = 0.0
+ var histo histogramDistance
+ histogramClearDistance(&histo)
+
+ if orig_params.distance_postfix_bits == new_params.distance_postfix_bits && orig_params.num_direct_distance_codes == new_params.num_direct_distance_codes {
+ equal_params = true
+ }
+
+ for i := range cmds {
+ cmd := &cmds[i]
+ if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 {
+ if equal_params {
+ dist_prefix = cmd.dist_prefix_
+ } else {
+ var distance uint32 = commandRestoreDistanceCode(cmd, orig_params)
+ if distance > uint32(new_params.max_distance) {
+ return false
+ }
+
+ prefixEncodeCopyDistance(uint(distance), uint(new_params.num_direct_distance_codes), uint(new_params.distance_postfix_bits), &dist_prefix, &dist_extra)
+ }
+
+ histogramAddDistance(&histo, uint(dist_prefix)&0x3FF)
+ extra_bits += float64(dist_prefix >> 10)
+ }
+ }
+
+ *cost = populationCostDistance(&histo) + extra_bits
+ return true
+}
+
+var buildMetaBlock_kMaxNumberOfHistograms uint = 256
+
+func buildMetaBlock(ringbuffer []byte, pos uint, mask uint, params *encoderParams, prev_byte byte, prev_byte2 byte, cmds []command, literal_context_mode int, mb *metaBlockSplit) {
+ var distance_histograms []histogramDistance
+ var literal_histograms []histogramLiteral
+ var literal_context_modes []int = nil
+ var literal_histograms_size uint
+ var distance_histograms_size uint
+ var i uint
+ var literal_context_multiplier uint = 1
+ var npostfix uint32
+ var ndirect_msb uint32 = 0
+ var check_orig bool = true
+ var best_dist_cost float64 = 1e99
+ var orig_params encoderParams = *params
+ /* Histogram ids need to fit in one byte. */
+
+ var new_params encoderParams = *params
+
+ for npostfix = 0; npostfix <= maxNpostfix; npostfix++ {
+ for ; ndirect_msb < 16; ndirect_msb++ {
+ var ndirect uint32 = ndirect_msb << npostfix
+ var skip bool
+ var dist_cost float64
+ initDistanceParams(&new_params, npostfix, ndirect)
+ if npostfix == orig_params.dist.distance_postfix_bits && ndirect == orig_params.dist.num_direct_distance_codes {
+ check_orig = false
+ }
+
+ skip = !computeDistanceCost(cmds, &orig_params.dist, &new_params.dist, &dist_cost)
+ if skip || (dist_cost > best_dist_cost) {
+ break
+ }
+
+ best_dist_cost = dist_cost
+ params.dist = new_params.dist
+ }
+
+ if ndirect_msb > 0 {
+ ndirect_msb--
+ }
+ ndirect_msb /= 2
+ }
+
+ if check_orig {
+ var dist_cost float64
+ computeDistanceCost(cmds, &orig_params.dist, &orig_params.dist, &dist_cost)
+ if dist_cost < best_dist_cost {
+ /* NB: currently unused; uncomment when more param tuning is added. */
+ /* best_dist_cost = dist_cost; */
+ params.dist = orig_params.dist
+ }
+ }
+
+ recomputeDistancePrefixes(cmds, &orig_params.dist, ¶ms.dist)
+
+ splitBlock(cmds, ringbuffer, pos, mask, params, &mb.literal_split, &mb.command_split, &mb.distance_split)
+
+ if !params.disable_literal_context_modeling {
+ literal_context_multiplier = 1 << literalContextBits
+ literal_context_modes = make([]int, (mb.literal_split.num_types))
+ for i = 0; i < mb.literal_split.num_types; i++ {
+ literal_context_modes[i] = literal_context_mode
+ }
+ }
+
+ literal_histograms_size = mb.literal_split.num_types * literal_context_multiplier
+ literal_histograms = make([]histogramLiteral, literal_histograms_size)
+ clearHistogramsLiteral(literal_histograms, literal_histograms_size)
+
+ distance_histograms_size = mb.distance_split.num_types << distanceContextBits
+ distance_histograms = make([]histogramDistance, distance_histograms_size)
+ clearHistogramsDistance(distance_histograms, distance_histograms_size)
+
+ mb.command_histograms_size = mb.command_split.num_types
+ if cap(mb.command_histograms) < int(mb.command_histograms_size) {
+ mb.command_histograms = make([]histogramCommand, (mb.command_histograms_size))
+ } else {
+ mb.command_histograms = mb.command_histograms[:mb.command_histograms_size]
+ }
+ clearHistogramsCommand(mb.command_histograms, mb.command_histograms_size)
+
+ buildHistogramsWithContext(cmds, &mb.literal_split, &mb.command_split, &mb.distance_split, ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_modes, literal_histograms, mb.command_histograms, distance_histograms)
+ literal_context_modes = nil
+
+ mb.literal_context_map_size = mb.literal_split.num_types << literalContextBits
+ if cap(mb.literal_context_map) < int(mb.literal_context_map_size) {
+ mb.literal_context_map = make([]uint32, (mb.literal_context_map_size))
+ } else {
+ mb.literal_context_map = mb.literal_context_map[:mb.literal_context_map_size]
+ }
+
+ mb.literal_histograms_size = mb.literal_context_map_size
+ if cap(mb.literal_histograms) < int(mb.literal_histograms_size) {
+ mb.literal_histograms = make([]histogramLiteral, (mb.literal_histograms_size))
+ } else {
+ mb.literal_histograms = mb.literal_histograms[:mb.literal_histograms_size]
+ }
+
+ clusterHistogramsLiteral(literal_histograms, literal_histograms_size, buildMetaBlock_kMaxNumberOfHistograms, mb.literal_histograms, &mb.literal_histograms_size, mb.literal_context_map)
+ literal_histograms = nil
+
+ if params.disable_literal_context_modeling {
+ /* Distribute assignment to all contexts. */
+ for i = mb.literal_split.num_types; i != 0; {
+ var j uint = 0
+ i--
+ for ; j < 1< 0 {
+ var entropy [maxStaticContexts]float64
+ var combined_histo []histogramLiteral = make([]histogramLiteral, (2 * num_contexts))
+ var combined_entropy [2 * maxStaticContexts]float64
+ var diff = [2]float64{0.0}
+ /* Try merging the set of histograms for the current block type with the
+ respective set of histograms for the last and second last block types.
+ Decide over the split based on the total reduction of entropy across
+ all contexts. */
+
+ var i uint
+ for i = 0; i < num_contexts; i++ {
+ var curr_histo_ix uint = self.curr_histogram_ix_ + i
+ var j uint
+ entropy[i] = bitsEntropy(histograms[curr_histo_ix].data_[:], self.alphabet_size_)
+ for j = 0; j < 2; j++ {
+ var jx uint = j*num_contexts + i
+ var last_histogram_ix uint = self.last_histogram_ix_[j] + i
+ combined_histo[jx] = histograms[curr_histo_ix]
+ histogramAddHistogramLiteral(&combined_histo[jx], &histograms[last_histogram_ix])
+ combined_entropy[jx] = bitsEntropy(combined_histo[jx].data_[0:], self.alphabet_size_)
+ diff[j] += combined_entropy[jx] - entropy[i] - last_entropy[jx]
+ }
+ }
+
+ if split.num_types < self.max_block_types_ && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ {
+ /* Create new block. */
+ split.lengths[self.num_blocks_] = uint32(self.block_size_)
+
+ split.types[self.num_blocks_] = byte(split.num_types)
+ self.last_histogram_ix_[1] = self.last_histogram_ix_[0]
+ self.last_histogram_ix_[0] = split.num_types * num_contexts
+ for i = 0; i < num_contexts; i++ {
+ last_entropy[num_contexts+i] = last_entropy[i]
+ last_entropy[i] = entropy[i]
+ }
+
+ self.num_blocks_++
+ split.num_types++
+ self.curr_histogram_ix_ += num_contexts
+ if self.curr_histogram_ix_ < *self.histograms_size_ {
+ clearHistogramsLiteral(self.histograms_[self.curr_histogram_ix_:], self.num_contexts_)
+ }
+
+ self.block_size_ = 0
+ self.merge_last_count_ = 0
+ self.target_block_size_ = self.min_block_size_
+ } else if diff[1] < diff[0]-20.0 {
+ split.lengths[self.num_blocks_] = uint32(self.block_size_)
+ split.types[self.num_blocks_] = split.types[self.num_blocks_-2]
+ /* Combine this block with second last block. */
+
+ var tmp uint = self.last_histogram_ix_[0]
+ self.last_histogram_ix_[0] = self.last_histogram_ix_[1]
+ self.last_histogram_ix_[1] = tmp
+ for i = 0; i < num_contexts; i++ {
+ histograms[self.last_histogram_ix_[0]+i] = combined_histo[num_contexts+i]
+ last_entropy[num_contexts+i] = last_entropy[i]
+ last_entropy[i] = combined_entropy[num_contexts+i]
+ histogramClearLiteral(&histograms[self.curr_histogram_ix_+i])
+ }
+
+ self.num_blocks_++
+ self.block_size_ = 0
+ self.merge_last_count_ = 0
+ self.target_block_size_ = self.min_block_size_
+ } else {
+ /* Combine this block with last block. */
+ split.lengths[self.num_blocks_-1] += uint32(self.block_size_)
+
+ for i = 0; i < num_contexts; i++ {
+ histograms[self.last_histogram_ix_[0]+i] = combined_histo[i]
+ last_entropy[i] = combined_entropy[i]
+ if split.num_types == 1 {
+ last_entropy[num_contexts+i] = last_entropy[i]
+ }
+
+ histogramClearLiteral(&histograms[self.curr_histogram_ix_+i])
+ }
+
+ self.block_size_ = 0
+ self.merge_last_count_++
+ if self.merge_last_count_ > 1 {
+ self.target_block_size_ += self.min_block_size_
+ }
+ }
+
+ combined_histo = nil
+ }
+
+ if is_final {
+ *self.histograms_size_ = split.num_types * num_contexts
+ split.num_blocks = self.num_blocks_
+ }
+}
+
+/* Adds the next symbol to the current block type and context. When the
+ current block reaches the target size, decides on merging the block. */
+func contextBlockSplitterAddSymbol(self *contextBlockSplitter, symbol uint, context uint) {
+ histogramAddLiteral(&self.histograms_[self.curr_histogram_ix_+context], symbol)
+ self.block_size_++
+ if self.block_size_ == self.target_block_size_ {
+ contextBlockSplitterFinishBlock(self, false) /* is_final = */
+ }
+}
+
+func mapStaticContexts(num_contexts uint, static_context_map []uint32, mb *metaBlockSplit) {
+ var i uint
+ mb.literal_context_map_size = mb.literal_split.num_types << literalContextBits
+ if cap(mb.literal_context_map) < int(mb.literal_context_map_size) {
+ mb.literal_context_map = make([]uint32, (mb.literal_context_map_size))
+ } else {
+ mb.literal_context_map = mb.literal_context_map[:mb.literal_context_map_size]
+ }
+
+ for i = 0; i < mb.literal_split.num_types; i++ {
+ var offset uint32 = uint32(i * num_contexts)
+ var j uint
+ for j = 0; j < 1<= 128 {
+ blockSplitterAddSymbolDistance(&dist_blocks, uint(cmd.dist_prefix_)&0x3FF)
+ }
+ }
+ }
+
+ if num_contexts == 1 {
+ blockSplitterFinishBlockLiteral(&lit_blocks.plain, true) /* is_final = */
+ } else {
+ contextBlockSplitterFinishBlock(&lit_blocks.ctx, true) /* is_final = */
+ }
+
+ blockSplitterFinishBlockCommand(&cmd_blocks, true) /* is_final = */
+ blockSplitterFinishBlockDistance(&dist_blocks, true) /* is_final = */
+
+ if num_contexts > 1 {
+ mapStaticContexts(num_contexts, static_context_map, mb)
+ }
+}
+
+func buildMetaBlockGreedy(ringbuffer []byte, pos uint, mask uint, prev_byte byte, prev_byte2 byte, literal_context_lut contextLUT, num_contexts uint, static_context_map []uint32, commands []command, mb *metaBlockSplit) {
+ if num_contexts == 1 {
+ buildMetaBlockGreedyInternal(ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_lut, 1, nil, commands, mb)
+ } else {
+ buildMetaBlockGreedyInternal(ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_lut, num_contexts, static_context_map, commands, mb)
+ }
+}
+
+func optimizeHistograms(num_distance_codes uint32, mb *metaBlockSplit) {
+ var good_for_rle [numCommandSymbols]byte
+ var i uint
+ for i = 0; i < mb.literal_histograms_size; i++ {
+ optimizeHuffmanCountsForRLE(256, mb.literal_histograms[i].data_[:], good_for_rle[:])
+ }
+
+ for i = 0; i < mb.command_histograms_size; i++ {
+ optimizeHuffmanCountsForRLE(numCommandSymbols, mb.command_histograms[i].data_[:], good_for_rle[:])
+ }
+
+ for i = 0; i < mb.distance_histograms_size; i++ {
+ optimizeHuffmanCountsForRLE(uint(num_distance_codes), mb.distance_histograms[i].data_[:], good_for_rle[:])
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/metablock_command.go b/vendor/github.com/andybalholm/brotli/metablock_command.go
new file mode 100644
index 0000000..14c7b77
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/metablock_command.go
@@ -0,0 +1,165 @@
+package brotli
+
+/* Copyright 2015 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Greedy block splitter for one block category (literal, command or distance).
+ */
+type blockSplitterCommand struct {
+ alphabet_size_ uint
+ min_block_size_ uint
+ split_threshold_ float64
+ num_blocks_ uint
+ split_ *blockSplit
+ histograms_ []histogramCommand
+ histograms_size_ *uint
+ target_block_size_ uint
+ block_size_ uint
+ curr_histogram_ix_ uint
+ last_histogram_ix_ [2]uint
+ last_entropy_ [2]float64
+ merge_last_count_ uint
+}
+
+func initBlockSplitterCommand(self *blockSplitterCommand, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramCommand, histograms_size *uint) {
+ var max_num_blocks uint = num_symbols/min_block_size + 1
+ var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1)
+ /* We have to allocate one more histogram than the maximum number of block
+ types for the current histogram when the meta-block is too big. */
+ self.alphabet_size_ = alphabet_size
+
+ self.min_block_size_ = min_block_size
+ self.split_threshold_ = split_threshold
+ self.num_blocks_ = 0
+ self.split_ = split
+ self.histograms_size_ = histograms_size
+ self.target_block_size_ = min_block_size
+ self.block_size_ = 0
+ self.curr_histogram_ix_ = 0
+ self.merge_last_count_ = 0
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks)
+ self.split_.num_blocks = max_num_blocks
+ *histograms_size = max_num_types
+ if histograms == nil || cap(*histograms) < int(*histograms_size) {
+ *histograms = make([]histogramCommand, (*histograms_size))
+ } else {
+ *histograms = (*histograms)[:*histograms_size]
+ }
+ self.histograms_ = *histograms
+
+ /* Clear only current histogram. */
+ histogramClearCommand(&self.histograms_[0])
+
+ self.last_histogram_ix_[1] = 0
+ self.last_histogram_ix_[0] = self.last_histogram_ix_[1]
+}
+
+/* Does either of three things:
+ (1) emits the current block with a new block type;
+ (2) emits the current block with the type of the second last block;
+ (3) merges the current block with the last block. */
+func blockSplitterFinishBlockCommand(self *blockSplitterCommand, is_final bool) {
+ var split *blockSplit = self.split_
+ var last_entropy []float64 = self.last_entropy_[:]
+ var histograms []histogramCommand = self.histograms_
+ self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_)
+ if self.num_blocks_ == 0 {
+ /* Create first block. */
+ split.lengths[0] = uint32(self.block_size_)
+
+ split.types[0] = 0
+ last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_)
+ last_entropy[1] = last_entropy[0]
+ self.num_blocks_++
+ split.num_types++
+ self.curr_histogram_ix_++
+ if self.curr_histogram_ix_ < *self.histograms_size_ {
+ histogramClearCommand(&histograms[self.curr_histogram_ix_])
+ }
+ self.block_size_ = 0
+ } else if self.block_size_ > 0 {
+ var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_)
+ var combined_histo [2]histogramCommand
+ var combined_entropy [2]float64
+ var diff [2]float64
+ var j uint
+ for j = 0; j < 2; j++ {
+ var last_histogram_ix uint = self.last_histogram_ix_[j]
+ combined_histo[j] = histograms[self.curr_histogram_ix_]
+ histogramAddHistogramCommand(&combined_histo[j], &histograms[last_histogram_ix])
+ combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_)
+ diff[j] = combined_entropy[j] - entropy - last_entropy[j]
+ }
+
+ if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ {
+ /* Create new block. */
+ split.lengths[self.num_blocks_] = uint32(self.block_size_)
+
+ split.types[self.num_blocks_] = byte(split.num_types)
+ self.last_histogram_ix_[1] = self.last_histogram_ix_[0]
+ self.last_histogram_ix_[0] = uint(byte(split.num_types))
+ last_entropy[1] = last_entropy[0]
+ last_entropy[0] = entropy
+ self.num_blocks_++
+ split.num_types++
+ self.curr_histogram_ix_++
+ if self.curr_histogram_ix_ < *self.histograms_size_ {
+ histogramClearCommand(&histograms[self.curr_histogram_ix_])
+ }
+ self.block_size_ = 0
+ self.merge_last_count_ = 0
+ self.target_block_size_ = self.min_block_size_
+ } else if diff[1] < diff[0]-20.0 {
+ split.lengths[self.num_blocks_] = uint32(self.block_size_)
+ split.types[self.num_blocks_] = split.types[self.num_blocks_-2]
+ /* Combine this block with second last block. */
+
+ var tmp uint = self.last_histogram_ix_[0]
+ self.last_histogram_ix_[0] = self.last_histogram_ix_[1]
+ self.last_histogram_ix_[1] = tmp
+ histograms[self.last_histogram_ix_[0]] = combined_histo[1]
+ last_entropy[1] = last_entropy[0]
+ last_entropy[0] = combined_entropy[1]
+ self.num_blocks_++
+ self.block_size_ = 0
+ histogramClearCommand(&histograms[self.curr_histogram_ix_])
+ self.merge_last_count_ = 0
+ self.target_block_size_ = self.min_block_size_
+ } else {
+ /* Combine this block with last block. */
+ split.lengths[self.num_blocks_-1] += uint32(self.block_size_)
+
+ histograms[self.last_histogram_ix_[0]] = combined_histo[0]
+ last_entropy[0] = combined_entropy[0]
+ if split.num_types == 1 {
+ last_entropy[1] = last_entropy[0]
+ }
+
+ self.block_size_ = 0
+ histogramClearCommand(&histograms[self.curr_histogram_ix_])
+ self.merge_last_count_++
+ if self.merge_last_count_ > 1 {
+ self.target_block_size_ += self.min_block_size_
+ }
+ }
+ }
+
+ if is_final {
+ *self.histograms_size_ = split.num_types
+ split.num_blocks = self.num_blocks_
+ }
+}
+
+/* Adds the next symbol to the current histogram. When the current histogram
+ reaches the target size, decides on merging the block. */
+func blockSplitterAddSymbolCommand(self *blockSplitterCommand, symbol uint) {
+ histogramAddCommand(&self.histograms_[self.curr_histogram_ix_], symbol)
+ self.block_size_++
+ if self.block_size_ == self.target_block_size_ {
+ blockSplitterFinishBlockCommand(self, false) /* is_final = */
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/metablock_distance.go b/vendor/github.com/andybalholm/brotli/metablock_distance.go
new file mode 100644
index 0000000..5110a81
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/metablock_distance.go
@@ -0,0 +1,165 @@
+package brotli
+
+/* Copyright 2015 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Greedy block splitter for one block category (literal, command or distance).
+ */
+type blockSplitterDistance struct {
+ alphabet_size_ uint
+ min_block_size_ uint
+ split_threshold_ float64
+ num_blocks_ uint
+ split_ *blockSplit
+ histograms_ []histogramDistance
+ histograms_size_ *uint
+ target_block_size_ uint
+ block_size_ uint
+ curr_histogram_ix_ uint
+ last_histogram_ix_ [2]uint
+ last_entropy_ [2]float64
+ merge_last_count_ uint
+}
+
+func initBlockSplitterDistance(self *blockSplitterDistance, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramDistance, histograms_size *uint) {
+ var max_num_blocks uint = num_symbols/min_block_size + 1
+ var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1)
+ /* We have to allocate one more histogram than the maximum number of block
+ types for the current histogram when the meta-block is too big. */
+ self.alphabet_size_ = alphabet_size
+
+ self.min_block_size_ = min_block_size
+ self.split_threshold_ = split_threshold
+ self.num_blocks_ = 0
+ self.split_ = split
+ self.histograms_size_ = histograms_size
+ self.target_block_size_ = min_block_size
+ self.block_size_ = 0
+ self.curr_histogram_ix_ = 0
+ self.merge_last_count_ = 0
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks)
+ self.split_.num_blocks = max_num_blocks
+ *histograms_size = max_num_types
+ if histograms == nil || cap(*histograms) < int(*histograms_size) {
+ *histograms = make([]histogramDistance, *histograms_size)
+ } else {
+ *histograms = (*histograms)[:*histograms_size]
+ }
+ self.histograms_ = *histograms
+
+ /* Clear only current histogram. */
+ histogramClearDistance(&self.histograms_[0])
+
+ self.last_histogram_ix_[1] = 0
+ self.last_histogram_ix_[0] = self.last_histogram_ix_[1]
+}
+
+/* Does either of three things:
+ (1) emits the current block with a new block type;
+ (2) emits the current block with the type of the second last block;
+ (3) merges the current block with the last block. */
+func blockSplitterFinishBlockDistance(self *blockSplitterDistance, is_final bool) {
+ var split *blockSplit = self.split_
+ var last_entropy []float64 = self.last_entropy_[:]
+ var histograms []histogramDistance = self.histograms_
+ self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_)
+ if self.num_blocks_ == 0 {
+ /* Create first block. */
+ split.lengths[0] = uint32(self.block_size_)
+
+ split.types[0] = 0
+ last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_)
+ last_entropy[1] = last_entropy[0]
+ self.num_blocks_++
+ split.num_types++
+ self.curr_histogram_ix_++
+ if self.curr_histogram_ix_ < *self.histograms_size_ {
+ histogramClearDistance(&histograms[self.curr_histogram_ix_])
+ }
+ self.block_size_ = 0
+ } else if self.block_size_ > 0 {
+ var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_)
+ var combined_histo [2]histogramDistance
+ var combined_entropy [2]float64
+ var diff [2]float64
+ var j uint
+ for j = 0; j < 2; j++ {
+ var last_histogram_ix uint = self.last_histogram_ix_[j]
+ combined_histo[j] = histograms[self.curr_histogram_ix_]
+ histogramAddHistogramDistance(&combined_histo[j], &histograms[last_histogram_ix])
+ combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_)
+ diff[j] = combined_entropy[j] - entropy - last_entropy[j]
+ }
+
+ if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ {
+ /* Create new block. */
+ split.lengths[self.num_blocks_] = uint32(self.block_size_)
+
+ split.types[self.num_blocks_] = byte(split.num_types)
+ self.last_histogram_ix_[1] = self.last_histogram_ix_[0]
+ self.last_histogram_ix_[0] = uint(byte(split.num_types))
+ last_entropy[1] = last_entropy[0]
+ last_entropy[0] = entropy
+ self.num_blocks_++
+ split.num_types++
+ self.curr_histogram_ix_++
+ if self.curr_histogram_ix_ < *self.histograms_size_ {
+ histogramClearDistance(&histograms[self.curr_histogram_ix_])
+ }
+ self.block_size_ = 0
+ self.merge_last_count_ = 0
+ self.target_block_size_ = self.min_block_size_
+ } else if diff[1] < diff[0]-20.0 {
+ split.lengths[self.num_blocks_] = uint32(self.block_size_)
+ split.types[self.num_blocks_] = split.types[self.num_blocks_-2]
+ /* Combine this block with second last block. */
+
+ var tmp uint = self.last_histogram_ix_[0]
+ self.last_histogram_ix_[0] = self.last_histogram_ix_[1]
+ self.last_histogram_ix_[1] = tmp
+ histograms[self.last_histogram_ix_[0]] = combined_histo[1]
+ last_entropy[1] = last_entropy[0]
+ last_entropy[0] = combined_entropy[1]
+ self.num_blocks_++
+ self.block_size_ = 0
+ histogramClearDistance(&histograms[self.curr_histogram_ix_])
+ self.merge_last_count_ = 0
+ self.target_block_size_ = self.min_block_size_
+ } else {
+ /* Combine this block with last block. */
+ split.lengths[self.num_blocks_-1] += uint32(self.block_size_)
+
+ histograms[self.last_histogram_ix_[0]] = combined_histo[0]
+ last_entropy[0] = combined_entropy[0]
+ if split.num_types == 1 {
+ last_entropy[1] = last_entropy[0]
+ }
+
+ self.block_size_ = 0
+ histogramClearDistance(&histograms[self.curr_histogram_ix_])
+ self.merge_last_count_++
+ if self.merge_last_count_ > 1 {
+ self.target_block_size_ += self.min_block_size_
+ }
+ }
+ }
+
+ if is_final {
+ *self.histograms_size_ = split.num_types
+ split.num_blocks = self.num_blocks_
+ }
+}
+
+/* Adds the next symbol to the current histogram. When the current histogram
+ reaches the target size, decides on merging the block. */
+func blockSplitterAddSymbolDistance(self *blockSplitterDistance, symbol uint) {
+ histogramAddDistance(&self.histograms_[self.curr_histogram_ix_], symbol)
+ self.block_size_++
+ if self.block_size_ == self.target_block_size_ {
+ blockSplitterFinishBlockDistance(self, false) /* is_final = */
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/metablock_literal.go b/vendor/github.com/andybalholm/brotli/metablock_literal.go
new file mode 100644
index 0000000..307f8da
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/metablock_literal.go
@@ -0,0 +1,165 @@
+package brotli
+
+/* Copyright 2015 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Greedy block splitter for one block category (literal, command or distance).
+ */
+type blockSplitterLiteral struct {
+ alphabet_size_ uint
+ min_block_size_ uint
+ split_threshold_ float64
+ num_blocks_ uint
+ split_ *blockSplit
+ histograms_ []histogramLiteral
+ histograms_size_ *uint
+ target_block_size_ uint
+ block_size_ uint
+ curr_histogram_ix_ uint
+ last_histogram_ix_ [2]uint
+ last_entropy_ [2]float64
+ merge_last_count_ uint
+}
+
+func initBlockSplitterLiteral(self *blockSplitterLiteral, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramLiteral, histograms_size *uint) {
+ var max_num_blocks uint = num_symbols/min_block_size + 1
+ var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1)
+ /* We have to allocate one more histogram than the maximum number of block
+ types for the current histogram when the meta-block is too big. */
+ self.alphabet_size_ = alphabet_size
+
+ self.min_block_size_ = min_block_size
+ self.split_threshold_ = split_threshold
+ self.num_blocks_ = 0
+ self.split_ = split
+ self.histograms_size_ = histograms_size
+ self.target_block_size_ = min_block_size
+ self.block_size_ = 0
+ self.curr_histogram_ix_ = 0
+ self.merge_last_count_ = 0
+ brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks)
+ brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks)
+ self.split_.num_blocks = max_num_blocks
+ *histograms_size = max_num_types
+ if histograms == nil || cap(*histograms) < int(*histograms_size) {
+ *histograms = make([]histogramLiteral, *histograms_size)
+ } else {
+ *histograms = (*histograms)[:*histograms_size]
+ }
+ self.histograms_ = *histograms
+
+ /* Clear only current histogram. */
+ histogramClearLiteral(&self.histograms_[0])
+
+ self.last_histogram_ix_[1] = 0
+ self.last_histogram_ix_[0] = self.last_histogram_ix_[1]
+}
+
+/* Does either of three things:
+ (1) emits the current block with a new block type;
+ (2) emits the current block with the type of the second last block;
+ (3) merges the current block with the last block. */
+func blockSplitterFinishBlockLiteral(self *blockSplitterLiteral, is_final bool) {
+ var split *blockSplit = self.split_
+ var last_entropy []float64 = self.last_entropy_[:]
+ var histograms []histogramLiteral = self.histograms_
+ self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_)
+ if self.num_blocks_ == 0 {
+ /* Create first block. */
+ split.lengths[0] = uint32(self.block_size_)
+
+ split.types[0] = 0
+ last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_)
+ last_entropy[1] = last_entropy[0]
+ self.num_blocks_++
+ split.num_types++
+ self.curr_histogram_ix_++
+ if self.curr_histogram_ix_ < *self.histograms_size_ {
+ histogramClearLiteral(&histograms[self.curr_histogram_ix_])
+ }
+ self.block_size_ = 0
+ } else if self.block_size_ > 0 {
+ var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_)
+ var combined_histo [2]histogramLiteral
+ var combined_entropy [2]float64
+ var diff [2]float64
+ var j uint
+ for j = 0; j < 2; j++ {
+ var last_histogram_ix uint = self.last_histogram_ix_[j]
+ combined_histo[j] = histograms[self.curr_histogram_ix_]
+ histogramAddHistogramLiteral(&combined_histo[j], &histograms[last_histogram_ix])
+ combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_)
+ diff[j] = combined_entropy[j] - entropy - last_entropy[j]
+ }
+
+ if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ {
+ /* Create new block. */
+ split.lengths[self.num_blocks_] = uint32(self.block_size_)
+
+ split.types[self.num_blocks_] = byte(split.num_types)
+ self.last_histogram_ix_[1] = self.last_histogram_ix_[0]
+ self.last_histogram_ix_[0] = uint(byte(split.num_types))
+ last_entropy[1] = last_entropy[0]
+ last_entropy[0] = entropy
+ self.num_blocks_++
+ split.num_types++
+ self.curr_histogram_ix_++
+ if self.curr_histogram_ix_ < *self.histograms_size_ {
+ histogramClearLiteral(&histograms[self.curr_histogram_ix_])
+ }
+ self.block_size_ = 0
+ self.merge_last_count_ = 0
+ self.target_block_size_ = self.min_block_size_
+ } else if diff[1] < diff[0]-20.0 {
+ split.lengths[self.num_blocks_] = uint32(self.block_size_)
+ split.types[self.num_blocks_] = split.types[self.num_blocks_-2]
+ /* Combine this block with second last block. */
+
+ var tmp uint = self.last_histogram_ix_[0]
+ self.last_histogram_ix_[0] = self.last_histogram_ix_[1]
+ self.last_histogram_ix_[1] = tmp
+ histograms[self.last_histogram_ix_[0]] = combined_histo[1]
+ last_entropy[1] = last_entropy[0]
+ last_entropy[0] = combined_entropy[1]
+ self.num_blocks_++
+ self.block_size_ = 0
+ histogramClearLiteral(&histograms[self.curr_histogram_ix_])
+ self.merge_last_count_ = 0
+ self.target_block_size_ = self.min_block_size_
+ } else {
+ /* Combine this block with last block. */
+ split.lengths[self.num_blocks_-1] += uint32(self.block_size_)
+
+ histograms[self.last_histogram_ix_[0]] = combined_histo[0]
+ last_entropy[0] = combined_entropy[0]
+ if split.num_types == 1 {
+ last_entropy[1] = last_entropy[0]
+ }
+
+ self.block_size_ = 0
+ histogramClearLiteral(&histograms[self.curr_histogram_ix_])
+ self.merge_last_count_++
+ if self.merge_last_count_ > 1 {
+ self.target_block_size_ += self.min_block_size_
+ }
+ }
+ }
+
+ if is_final {
+ *self.histograms_size_ = split.num_types
+ split.num_blocks = self.num_blocks_
+ }
+}
+
+/* Adds the next symbol to the current histogram. When the current histogram
+ reaches the target size, decides on merging the block. */
+func blockSplitterAddSymbolLiteral(self *blockSplitterLiteral, symbol uint) {
+ histogramAddLiteral(&self.histograms_[self.curr_histogram_ix_], symbol)
+ self.block_size_++
+ if self.block_size_ == self.target_block_size_ {
+ blockSplitterFinishBlockLiteral(self, false) /* is_final = */
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/params.go b/vendor/github.com/andybalholm/brotli/params.go
new file mode 100644
index 0000000..0a4c687
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/params.go
@@ -0,0 +1,37 @@
+package brotli
+
+/* Copyright 2017 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Parameters for the Brotli encoder with chosen quality levels. */
+type hasherParams struct {
+ type_ int
+ bucket_bits int
+ block_bits int
+ hash_len int
+ num_last_distances_to_check int
+}
+
+type distanceParams struct {
+ distance_postfix_bits uint32
+ num_direct_distance_codes uint32
+ alphabet_size uint32
+ max_distance uint
+}
+
+/* Encoding parameters */
+type encoderParams struct {
+ mode int
+ quality int
+ lgwin uint
+ lgblock int
+ size_hint uint
+ disable_literal_context_modeling bool
+ large_window bool
+ hasher hasherParams
+ dist distanceParams
+ dictionary encoderDictionary
+}
diff --git a/vendor/github.com/andybalholm/brotli/platform.go b/vendor/github.com/andybalholm/brotli/platform.go
new file mode 100644
index 0000000..4ebfb15
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/platform.go
@@ -0,0 +1,103 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+func brotli_min_double(a float64, b float64) float64 {
+ if a < b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_max_double(a float64, b float64) float64 {
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_min_float(a float32, b float32) float32 {
+ if a < b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_max_float(a float32, b float32) float32 {
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_min_int(a int, b int) int {
+ if a < b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_max_int(a int, b int) int {
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_min_size_t(a uint, b uint) uint {
+ if a < b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_max_size_t(a uint, b uint) uint {
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_min_uint32_t(a uint32, b uint32) uint32 {
+ if a < b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_max_uint32_t(a uint32, b uint32) uint32 {
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_min_uint8_t(a byte, b byte) byte {
+ if a < b {
+ return a
+ } else {
+ return b
+ }
+}
+
+func brotli_max_uint8_t(a byte, b byte) byte {
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/prefix.go b/vendor/github.com/andybalholm/brotli/prefix.go
new file mode 100644
index 0000000..484df0d
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/prefix.go
@@ -0,0 +1,30 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Functions for encoding of integers into prefix codes the amount of extra
+ bits, and the actual values of the extra bits. */
+
+/* Here distance_code is an intermediate code, i.e. one of the special codes or
+ the actual distance increased by BROTLI_NUM_DISTANCE_SHORT_CODES - 1. */
+func prefixEncodeCopyDistance(distance_code uint, num_direct_codes uint, postfix_bits uint, code *uint16, extra_bits *uint32) {
+ if distance_code < numDistanceShortCodes+num_direct_codes {
+ *code = uint16(distance_code)
+ *extra_bits = 0
+ return
+ } else {
+ var dist uint = (uint(1) << (postfix_bits + 2)) + (distance_code - numDistanceShortCodes - num_direct_codes)
+ var bucket uint = uint(log2FloorNonZero(dist) - 1)
+ var postfix_mask uint = (1 << postfix_bits) - 1
+ var postfix uint = dist & postfix_mask
+ var prefix uint = (dist >> bucket) & 1
+ var offset uint = (2 + prefix) << bucket
+ var nbits uint = bucket - postfix_bits
+ *code = uint16(nbits<<10 | (numDistanceShortCodes + num_direct_codes + ((2*(nbits-1) + prefix) << postfix_bits) + postfix))
+ *extra_bits = uint32((dist - offset) >> postfix_bits)
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/prefix_dec.go b/vendor/github.com/andybalholm/brotli/prefix_dec.go
new file mode 100644
index 0000000..183f0d5
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/prefix_dec.go
@@ -0,0 +1,723 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+type cmdLutElement struct {
+ insert_len_extra_bits byte
+ copy_len_extra_bits byte
+ distance_code int8
+ context byte
+ insert_len_offset uint16
+ copy_len_offset uint16
+}
+
+var kCmdLut = [numCommandSymbols]cmdLutElement{
+ cmdLutElement{0x00, 0x00, 0, 0x00, 0x0000, 0x0002},
+ cmdLutElement{0x00, 0x00, 0, 0x01, 0x0000, 0x0003},
+ cmdLutElement{0x00, 0x00, 0, 0x02, 0x0000, 0x0004},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0005},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0006},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0007},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0008},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0009},
+ cmdLutElement{0x00, 0x00, 0, 0x00, 0x0001, 0x0002},
+ cmdLutElement{0x00, 0x00, 0, 0x01, 0x0001, 0x0003},
+ cmdLutElement{0x00, 0x00, 0, 0x02, 0x0001, 0x0004},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0005},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0006},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0007},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0008},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0009},
+ cmdLutElement{0x00, 0x00, 0, 0x00, 0x0002, 0x0002},
+ cmdLutElement{0x00, 0x00, 0, 0x01, 0x0002, 0x0003},
+ cmdLutElement{0x00, 0x00, 0, 0x02, 0x0002, 0x0004},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0005},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0006},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0007},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0008},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0009},
+ cmdLutElement{0x00, 0x00, 0, 0x00, 0x0003, 0x0002},
+ cmdLutElement{0x00, 0x00, 0, 0x01, 0x0003, 0x0003},
+ cmdLutElement{0x00, 0x00, 0, 0x02, 0x0003, 0x0004},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0005},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0006},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0007},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0008},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0009},
+ cmdLutElement{0x00, 0x00, 0, 0x00, 0x0004, 0x0002},
+ cmdLutElement{0x00, 0x00, 0, 0x01, 0x0004, 0x0003},
+ cmdLutElement{0x00, 0x00, 0, 0x02, 0x0004, 0x0004},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0005},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0006},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0007},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0008},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0009},
+ cmdLutElement{0x00, 0x00, 0, 0x00, 0x0005, 0x0002},
+ cmdLutElement{0x00, 0x00, 0, 0x01, 0x0005, 0x0003},
+ cmdLutElement{0x00, 0x00, 0, 0x02, 0x0005, 0x0004},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0005},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0006},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0007},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0008},
+ cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0009},
+ cmdLutElement{0x01, 0x00, 0, 0x00, 0x0006, 0x0002},
+ cmdLutElement{0x01, 0x00, 0, 0x01, 0x0006, 0x0003},
+ cmdLutElement{0x01, 0x00, 0, 0x02, 0x0006, 0x0004},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0005},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0006},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0007},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0008},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0009},
+ cmdLutElement{0x01, 0x00, 0, 0x00, 0x0008, 0x0002},
+ cmdLutElement{0x01, 0x00, 0, 0x01, 0x0008, 0x0003},
+ cmdLutElement{0x01, 0x00, 0, 0x02, 0x0008, 0x0004},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0005},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0006},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0007},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0008},
+ cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0009},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0000, 0x000a},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0000, 0x000c},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0000, 0x000e},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0000, 0x0012},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0000, 0x0016},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0000, 0x001e},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0000, 0x0026},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0000, 0x0036},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0001, 0x000a},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0001, 0x000c},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0001, 0x000e},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0001, 0x0012},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0001, 0x0016},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0001, 0x001e},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0001, 0x0026},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0001, 0x0036},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0002, 0x000a},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0002, 0x000c},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0002, 0x000e},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0002, 0x0012},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0002, 0x0016},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0002, 0x001e},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0002, 0x0026},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0002, 0x0036},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0003, 0x000a},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0003, 0x000c},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0003, 0x000e},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0003, 0x0012},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0003, 0x0016},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0003, 0x001e},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0003, 0x0026},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0003, 0x0036},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0004, 0x000a},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0004, 0x000c},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0004, 0x000e},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0004, 0x0012},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0004, 0x0016},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0004, 0x001e},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0004, 0x0026},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0004, 0x0036},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0005, 0x000a},
+ cmdLutElement{0x00, 0x01, 0, 0x03, 0x0005, 0x000c},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0005, 0x000e},
+ cmdLutElement{0x00, 0x02, 0, 0x03, 0x0005, 0x0012},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0005, 0x0016},
+ cmdLutElement{0x00, 0x03, 0, 0x03, 0x0005, 0x001e},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0005, 0x0026},
+ cmdLutElement{0x00, 0x04, 0, 0x03, 0x0005, 0x0036},
+ cmdLutElement{0x01, 0x01, 0, 0x03, 0x0006, 0x000a},
+ cmdLutElement{0x01, 0x01, 0, 0x03, 0x0006, 0x000c},
+ cmdLutElement{0x01, 0x02, 0, 0x03, 0x0006, 0x000e},
+ cmdLutElement{0x01, 0x02, 0, 0x03, 0x0006, 0x0012},
+ cmdLutElement{0x01, 0x03, 0, 0x03, 0x0006, 0x0016},
+ cmdLutElement{0x01, 0x03, 0, 0x03, 0x0006, 0x001e},
+ cmdLutElement{0x01, 0x04, 0, 0x03, 0x0006, 0x0026},
+ cmdLutElement{0x01, 0x04, 0, 0x03, 0x0006, 0x0036},
+ cmdLutElement{0x01, 0x01, 0, 0x03, 0x0008, 0x000a},
+ cmdLutElement{0x01, 0x01, 0, 0x03, 0x0008, 0x000c},
+ cmdLutElement{0x01, 0x02, 0, 0x03, 0x0008, 0x000e},
+ cmdLutElement{0x01, 0x02, 0, 0x03, 0x0008, 0x0012},
+ cmdLutElement{0x01, 0x03, 0, 0x03, 0x0008, 0x0016},
+ cmdLutElement{0x01, 0x03, 0, 0x03, 0x0008, 0x001e},
+ cmdLutElement{0x01, 0x04, 0, 0x03, 0x0008, 0x0026},
+ cmdLutElement{0x01, 0x04, 0, 0x03, 0x0008, 0x0036},
+ cmdLutElement{0x00, 0x00, -1, 0x00, 0x0000, 0x0002},
+ cmdLutElement{0x00, 0x00, -1, 0x01, 0x0000, 0x0003},
+ cmdLutElement{0x00, 0x00, -1, 0x02, 0x0000, 0x0004},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0005},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0006},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0007},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0008},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0009},
+ cmdLutElement{0x00, 0x00, -1, 0x00, 0x0001, 0x0002},
+ cmdLutElement{0x00, 0x00, -1, 0x01, 0x0001, 0x0003},
+ cmdLutElement{0x00, 0x00, -1, 0x02, 0x0001, 0x0004},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0005},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0006},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0007},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0008},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0009},
+ cmdLutElement{0x00, 0x00, -1, 0x00, 0x0002, 0x0002},
+ cmdLutElement{0x00, 0x00, -1, 0x01, 0x0002, 0x0003},
+ cmdLutElement{0x00, 0x00, -1, 0x02, 0x0002, 0x0004},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0005},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0006},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0007},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0008},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0009},
+ cmdLutElement{0x00, 0x00, -1, 0x00, 0x0003, 0x0002},
+ cmdLutElement{0x00, 0x00, -1, 0x01, 0x0003, 0x0003},
+ cmdLutElement{0x00, 0x00, -1, 0x02, 0x0003, 0x0004},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0005},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0006},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0007},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0008},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0009},
+ cmdLutElement{0x00, 0x00, -1, 0x00, 0x0004, 0x0002},
+ cmdLutElement{0x00, 0x00, -1, 0x01, 0x0004, 0x0003},
+ cmdLutElement{0x00, 0x00, -1, 0x02, 0x0004, 0x0004},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0005},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0006},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0007},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0008},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0009},
+ cmdLutElement{0x00, 0x00, -1, 0x00, 0x0005, 0x0002},
+ cmdLutElement{0x00, 0x00, -1, 0x01, 0x0005, 0x0003},
+ cmdLutElement{0x00, 0x00, -1, 0x02, 0x0005, 0x0004},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0005},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0006},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0007},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0008},
+ cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0009},
+ cmdLutElement{0x01, 0x00, -1, 0x00, 0x0006, 0x0002},
+ cmdLutElement{0x01, 0x00, -1, 0x01, 0x0006, 0x0003},
+ cmdLutElement{0x01, 0x00, -1, 0x02, 0x0006, 0x0004},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0005},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0006},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0007},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0008},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0009},
+ cmdLutElement{0x01, 0x00, -1, 0x00, 0x0008, 0x0002},
+ cmdLutElement{0x01, 0x00, -1, 0x01, 0x0008, 0x0003},
+ cmdLutElement{0x01, 0x00, -1, 0x02, 0x0008, 0x0004},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0005},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0006},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0007},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0008},
+ cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0009},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0000, 0x000a},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0000, 0x000c},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0000, 0x000e},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0000, 0x0012},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0000, 0x0016},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0000, 0x001e},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0000, 0x0026},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0000, 0x0036},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0001, 0x000a},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0001, 0x000c},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0001, 0x000e},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0001, 0x0012},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0001, 0x0016},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0001, 0x001e},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0001, 0x0026},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0001, 0x0036},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0002, 0x000a},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0002, 0x000c},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0002, 0x000e},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0002, 0x0012},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0002, 0x0016},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0002, 0x001e},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0002, 0x0026},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0002, 0x0036},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0003, 0x000a},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0003, 0x000c},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0003, 0x000e},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0003, 0x0012},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0003, 0x0016},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0003, 0x001e},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0003, 0x0026},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0003, 0x0036},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0004, 0x000a},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0004, 0x000c},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0004, 0x000e},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0004, 0x0012},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0004, 0x0016},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0004, 0x001e},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0004, 0x0026},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0004, 0x0036},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0005, 0x000a},
+ cmdLutElement{0x00, 0x01, -1, 0x03, 0x0005, 0x000c},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0005, 0x000e},
+ cmdLutElement{0x00, 0x02, -1, 0x03, 0x0005, 0x0012},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0005, 0x0016},
+ cmdLutElement{0x00, 0x03, -1, 0x03, 0x0005, 0x001e},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0005, 0x0026},
+ cmdLutElement{0x00, 0x04, -1, 0x03, 0x0005, 0x0036},
+ cmdLutElement{0x01, 0x01, -1, 0x03, 0x0006, 0x000a},
+ cmdLutElement{0x01, 0x01, -1, 0x03, 0x0006, 0x000c},
+ cmdLutElement{0x01, 0x02, -1, 0x03, 0x0006, 0x000e},
+ cmdLutElement{0x01, 0x02, -1, 0x03, 0x0006, 0x0012},
+ cmdLutElement{0x01, 0x03, -1, 0x03, 0x0006, 0x0016},
+ cmdLutElement{0x01, 0x03, -1, 0x03, 0x0006, 0x001e},
+ cmdLutElement{0x01, 0x04, -1, 0x03, 0x0006, 0x0026},
+ cmdLutElement{0x01, 0x04, -1, 0x03, 0x0006, 0x0036},
+ cmdLutElement{0x01, 0x01, -1, 0x03, 0x0008, 0x000a},
+ cmdLutElement{0x01, 0x01, -1, 0x03, 0x0008, 0x000c},
+ cmdLutElement{0x01, 0x02, -1, 0x03, 0x0008, 0x000e},
+ cmdLutElement{0x01, 0x02, -1, 0x03, 0x0008, 0x0012},
+ cmdLutElement{0x01, 0x03, -1, 0x03, 0x0008, 0x0016},
+ cmdLutElement{0x01, 0x03, -1, 0x03, 0x0008, 0x001e},
+ cmdLutElement{0x01, 0x04, -1, 0x03, 0x0008, 0x0026},
+ cmdLutElement{0x01, 0x04, -1, 0x03, 0x0008, 0x0036},
+ cmdLutElement{0x02, 0x00, -1, 0x00, 0x000a, 0x0002},
+ cmdLutElement{0x02, 0x00, -1, 0x01, 0x000a, 0x0003},
+ cmdLutElement{0x02, 0x00, -1, 0x02, 0x000a, 0x0004},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0005},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0006},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0007},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0008},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0009},
+ cmdLutElement{0x02, 0x00, -1, 0x00, 0x000e, 0x0002},
+ cmdLutElement{0x02, 0x00, -1, 0x01, 0x000e, 0x0003},
+ cmdLutElement{0x02, 0x00, -1, 0x02, 0x000e, 0x0004},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0005},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0006},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0007},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0008},
+ cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0009},
+ cmdLutElement{0x03, 0x00, -1, 0x00, 0x0012, 0x0002},
+ cmdLutElement{0x03, 0x00, -1, 0x01, 0x0012, 0x0003},
+ cmdLutElement{0x03, 0x00, -1, 0x02, 0x0012, 0x0004},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0005},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0006},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0007},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0008},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0009},
+ cmdLutElement{0x03, 0x00, -1, 0x00, 0x001a, 0x0002},
+ cmdLutElement{0x03, 0x00, -1, 0x01, 0x001a, 0x0003},
+ cmdLutElement{0x03, 0x00, -1, 0x02, 0x001a, 0x0004},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0005},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0006},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0007},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0008},
+ cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0009},
+ cmdLutElement{0x04, 0x00, -1, 0x00, 0x0022, 0x0002},
+ cmdLutElement{0x04, 0x00, -1, 0x01, 0x0022, 0x0003},
+ cmdLutElement{0x04, 0x00, -1, 0x02, 0x0022, 0x0004},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0005},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0006},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0007},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0008},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0009},
+ cmdLutElement{0x04, 0x00, -1, 0x00, 0x0032, 0x0002},
+ cmdLutElement{0x04, 0x00, -1, 0x01, 0x0032, 0x0003},
+ cmdLutElement{0x04, 0x00, -1, 0x02, 0x0032, 0x0004},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0005},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0006},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0007},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0008},
+ cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0009},
+ cmdLutElement{0x05, 0x00, -1, 0x00, 0x0042, 0x0002},
+ cmdLutElement{0x05, 0x00, -1, 0x01, 0x0042, 0x0003},
+ cmdLutElement{0x05, 0x00, -1, 0x02, 0x0042, 0x0004},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0005},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0006},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0007},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0008},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0009},
+ cmdLutElement{0x05, 0x00, -1, 0x00, 0x0062, 0x0002},
+ cmdLutElement{0x05, 0x00, -1, 0x01, 0x0062, 0x0003},
+ cmdLutElement{0x05, 0x00, -1, 0x02, 0x0062, 0x0004},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0005},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0006},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0007},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0008},
+ cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0009},
+ cmdLutElement{0x02, 0x01, -1, 0x03, 0x000a, 0x000a},
+ cmdLutElement{0x02, 0x01, -1, 0x03, 0x000a, 0x000c},
+ cmdLutElement{0x02, 0x02, -1, 0x03, 0x000a, 0x000e},
+ cmdLutElement{0x02, 0x02, -1, 0x03, 0x000a, 0x0012},
+ cmdLutElement{0x02, 0x03, -1, 0x03, 0x000a, 0x0016},
+ cmdLutElement{0x02, 0x03, -1, 0x03, 0x000a, 0x001e},
+ cmdLutElement{0x02, 0x04, -1, 0x03, 0x000a, 0x0026},
+ cmdLutElement{0x02, 0x04, -1, 0x03, 0x000a, 0x0036},
+ cmdLutElement{0x02, 0x01, -1, 0x03, 0x000e, 0x000a},
+ cmdLutElement{0x02, 0x01, -1, 0x03, 0x000e, 0x000c},
+ cmdLutElement{0x02, 0x02, -1, 0x03, 0x000e, 0x000e},
+ cmdLutElement{0x02, 0x02, -1, 0x03, 0x000e, 0x0012},
+ cmdLutElement{0x02, 0x03, -1, 0x03, 0x000e, 0x0016},
+ cmdLutElement{0x02, 0x03, -1, 0x03, 0x000e, 0x001e},
+ cmdLutElement{0x02, 0x04, -1, 0x03, 0x000e, 0x0026},
+ cmdLutElement{0x02, 0x04, -1, 0x03, 0x000e, 0x0036},
+ cmdLutElement{0x03, 0x01, -1, 0x03, 0x0012, 0x000a},
+ cmdLutElement{0x03, 0x01, -1, 0x03, 0x0012, 0x000c},
+ cmdLutElement{0x03, 0x02, -1, 0x03, 0x0012, 0x000e},
+ cmdLutElement{0x03, 0x02, -1, 0x03, 0x0012, 0x0012},
+ cmdLutElement{0x03, 0x03, -1, 0x03, 0x0012, 0x0016},
+ cmdLutElement{0x03, 0x03, -1, 0x03, 0x0012, 0x001e},
+ cmdLutElement{0x03, 0x04, -1, 0x03, 0x0012, 0x0026},
+ cmdLutElement{0x03, 0x04, -1, 0x03, 0x0012, 0x0036},
+ cmdLutElement{0x03, 0x01, -1, 0x03, 0x001a, 0x000a},
+ cmdLutElement{0x03, 0x01, -1, 0x03, 0x001a, 0x000c},
+ cmdLutElement{0x03, 0x02, -1, 0x03, 0x001a, 0x000e},
+ cmdLutElement{0x03, 0x02, -1, 0x03, 0x001a, 0x0012},
+ cmdLutElement{0x03, 0x03, -1, 0x03, 0x001a, 0x0016},
+ cmdLutElement{0x03, 0x03, -1, 0x03, 0x001a, 0x001e},
+ cmdLutElement{0x03, 0x04, -1, 0x03, 0x001a, 0x0026},
+ cmdLutElement{0x03, 0x04, -1, 0x03, 0x001a, 0x0036},
+ cmdLutElement{0x04, 0x01, -1, 0x03, 0x0022, 0x000a},
+ cmdLutElement{0x04, 0x01, -1, 0x03, 0x0022, 0x000c},
+ cmdLutElement{0x04, 0x02, -1, 0x03, 0x0022, 0x000e},
+ cmdLutElement{0x04, 0x02, -1, 0x03, 0x0022, 0x0012},
+ cmdLutElement{0x04, 0x03, -1, 0x03, 0x0022, 0x0016},
+ cmdLutElement{0x04, 0x03, -1, 0x03, 0x0022, 0x001e},
+ cmdLutElement{0x04, 0x04, -1, 0x03, 0x0022, 0x0026},
+ cmdLutElement{0x04, 0x04, -1, 0x03, 0x0022, 0x0036},
+ cmdLutElement{0x04, 0x01, -1, 0x03, 0x0032, 0x000a},
+ cmdLutElement{0x04, 0x01, -1, 0x03, 0x0032, 0x000c},
+ cmdLutElement{0x04, 0x02, -1, 0x03, 0x0032, 0x000e},
+ cmdLutElement{0x04, 0x02, -1, 0x03, 0x0032, 0x0012},
+ cmdLutElement{0x04, 0x03, -1, 0x03, 0x0032, 0x0016},
+ cmdLutElement{0x04, 0x03, -1, 0x03, 0x0032, 0x001e},
+ cmdLutElement{0x04, 0x04, -1, 0x03, 0x0032, 0x0026},
+ cmdLutElement{0x04, 0x04, -1, 0x03, 0x0032, 0x0036},
+ cmdLutElement{0x05, 0x01, -1, 0x03, 0x0042, 0x000a},
+ cmdLutElement{0x05, 0x01, -1, 0x03, 0x0042, 0x000c},
+ cmdLutElement{0x05, 0x02, -1, 0x03, 0x0042, 0x000e},
+ cmdLutElement{0x05, 0x02, -1, 0x03, 0x0042, 0x0012},
+ cmdLutElement{0x05, 0x03, -1, 0x03, 0x0042, 0x0016},
+ cmdLutElement{0x05, 0x03, -1, 0x03, 0x0042, 0x001e},
+ cmdLutElement{0x05, 0x04, -1, 0x03, 0x0042, 0x0026},
+ cmdLutElement{0x05, 0x04, -1, 0x03, 0x0042, 0x0036},
+ cmdLutElement{0x05, 0x01, -1, 0x03, 0x0062, 0x000a},
+ cmdLutElement{0x05, 0x01, -1, 0x03, 0x0062, 0x000c},
+ cmdLutElement{0x05, 0x02, -1, 0x03, 0x0062, 0x000e},
+ cmdLutElement{0x05, 0x02, -1, 0x03, 0x0062, 0x0012},
+ cmdLutElement{0x05, 0x03, -1, 0x03, 0x0062, 0x0016},
+ cmdLutElement{0x05, 0x03, -1, 0x03, 0x0062, 0x001e},
+ cmdLutElement{0x05, 0x04, -1, 0x03, 0x0062, 0x0026},
+ cmdLutElement{0x05, 0x04, -1, 0x03, 0x0062, 0x0036},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0000, 0x0046},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0000, 0x0066},
+ cmdLutElement{0x00, 0x06, -1, 0x03, 0x0000, 0x0086},
+ cmdLutElement{0x00, 0x07, -1, 0x03, 0x0000, 0x00c6},
+ cmdLutElement{0x00, 0x08, -1, 0x03, 0x0000, 0x0146},
+ cmdLutElement{0x00, 0x09, -1, 0x03, 0x0000, 0x0246},
+ cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0000, 0x0446},
+ cmdLutElement{0x00, 0x18, -1, 0x03, 0x0000, 0x0846},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0001, 0x0046},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0001, 0x0066},
+ cmdLutElement{0x00, 0x06, -1, 0x03, 0x0001, 0x0086},
+ cmdLutElement{0x00, 0x07, -1, 0x03, 0x0001, 0x00c6},
+ cmdLutElement{0x00, 0x08, -1, 0x03, 0x0001, 0x0146},
+ cmdLutElement{0x00, 0x09, -1, 0x03, 0x0001, 0x0246},
+ cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0001, 0x0446},
+ cmdLutElement{0x00, 0x18, -1, 0x03, 0x0001, 0x0846},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0002, 0x0046},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0002, 0x0066},
+ cmdLutElement{0x00, 0x06, -1, 0x03, 0x0002, 0x0086},
+ cmdLutElement{0x00, 0x07, -1, 0x03, 0x0002, 0x00c6},
+ cmdLutElement{0x00, 0x08, -1, 0x03, 0x0002, 0x0146},
+ cmdLutElement{0x00, 0x09, -1, 0x03, 0x0002, 0x0246},
+ cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0002, 0x0446},
+ cmdLutElement{0x00, 0x18, -1, 0x03, 0x0002, 0x0846},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0003, 0x0046},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0003, 0x0066},
+ cmdLutElement{0x00, 0x06, -1, 0x03, 0x0003, 0x0086},
+ cmdLutElement{0x00, 0x07, -1, 0x03, 0x0003, 0x00c6},
+ cmdLutElement{0x00, 0x08, -1, 0x03, 0x0003, 0x0146},
+ cmdLutElement{0x00, 0x09, -1, 0x03, 0x0003, 0x0246},
+ cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0003, 0x0446},
+ cmdLutElement{0x00, 0x18, -1, 0x03, 0x0003, 0x0846},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0004, 0x0046},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0004, 0x0066},
+ cmdLutElement{0x00, 0x06, -1, 0x03, 0x0004, 0x0086},
+ cmdLutElement{0x00, 0x07, -1, 0x03, 0x0004, 0x00c6},
+ cmdLutElement{0x00, 0x08, -1, 0x03, 0x0004, 0x0146},
+ cmdLutElement{0x00, 0x09, -1, 0x03, 0x0004, 0x0246},
+ cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0004, 0x0446},
+ cmdLutElement{0x00, 0x18, -1, 0x03, 0x0004, 0x0846},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0005, 0x0046},
+ cmdLutElement{0x00, 0x05, -1, 0x03, 0x0005, 0x0066},
+ cmdLutElement{0x00, 0x06, -1, 0x03, 0x0005, 0x0086},
+ cmdLutElement{0x00, 0x07, -1, 0x03, 0x0005, 0x00c6},
+ cmdLutElement{0x00, 0x08, -1, 0x03, 0x0005, 0x0146},
+ cmdLutElement{0x00, 0x09, -1, 0x03, 0x0005, 0x0246},
+ cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0005, 0x0446},
+ cmdLutElement{0x00, 0x18, -1, 0x03, 0x0005, 0x0846},
+ cmdLutElement{0x01, 0x05, -1, 0x03, 0x0006, 0x0046},
+ cmdLutElement{0x01, 0x05, -1, 0x03, 0x0006, 0x0066},
+ cmdLutElement{0x01, 0x06, -1, 0x03, 0x0006, 0x0086},
+ cmdLutElement{0x01, 0x07, -1, 0x03, 0x0006, 0x00c6},
+ cmdLutElement{0x01, 0x08, -1, 0x03, 0x0006, 0x0146},
+ cmdLutElement{0x01, 0x09, -1, 0x03, 0x0006, 0x0246},
+ cmdLutElement{0x01, 0x0a, -1, 0x03, 0x0006, 0x0446},
+ cmdLutElement{0x01, 0x18, -1, 0x03, 0x0006, 0x0846},
+ cmdLutElement{0x01, 0x05, -1, 0x03, 0x0008, 0x0046},
+ cmdLutElement{0x01, 0x05, -1, 0x03, 0x0008, 0x0066},
+ cmdLutElement{0x01, 0x06, -1, 0x03, 0x0008, 0x0086},
+ cmdLutElement{0x01, 0x07, -1, 0x03, 0x0008, 0x00c6},
+ cmdLutElement{0x01, 0x08, -1, 0x03, 0x0008, 0x0146},
+ cmdLutElement{0x01, 0x09, -1, 0x03, 0x0008, 0x0246},
+ cmdLutElement{0x01, 0x0a, -1, 0x03, 0x0008, 0x0446},
+ cmdLutElement{0x01, 0x18, -1, 0x03, 0x0008, 0x0846},
+ cmdLutElement{0x06, 0x00, -1, 0x00, 0x0082, 0x0002},
+ cmdLutElement{0x06, 0x00, -1, 0x01, 0x0082, 0x0003},
+ cmdLutElement{0x06, 0x00, -1, 0x02, 0x0082, 0x0004},
+ cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0005},
+ cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0006},
+ cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0007},
+ cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0008},
+ cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0009},
+ cmdLutElement{0x07, 0x00, -1, 0x00, 0x00c2, 0x0002},
+ cmdLutElement{0x07, 0x00, -1, 0x01, 0x00c2, 0x0003},
+ cmdLutElement{0x07, 0x00, -1, 0x02, 0x00c2, 0x0004},
+ cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0005},
+ cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0006},
+ cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0007},
+ cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0008},
+ cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0009},
+ cmdLutElement{0x08, 0x00, -1, 0x00, 0x0142, 0x0002},
+ cmdLutElement{0x08, 0x00, -1, 0x01, 0x0142, 0x0003},
+ cmdLutElement{0x08, 0x00, -1, 0x02, 0x0142, 0x0004},
+ cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0005},
+ cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0006},
+ cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0007},
+ cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0008},
+ cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0009},
+ cmdLutElement{0x09, 0x00, -1, 0x00, 0x0242, 0x0002},
+ cmdLutElement{0x09, 0x00, -1, 0x01, 0x0242, 0x0003},
+ cmdLutElement{0x09, 0x00, -1, 0x02, 0x0242, 0x0004},
+ cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0005},
+ cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0006},
+ cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0007},
+ cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0008},
+ cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0009},
+ cmdLutElement{0x0a, 0x00, -1, 0x00, 0x0442, 0x0002},
+ cmdLutElement{0x0a, 0x00, -1, 0x01, 0x0442, 0x0003},
+ cmdLutElement{0x0a, 0x00, -1, 0x02, 0x0442, 0x0004},
+ cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0005},
+ cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0006},
+ cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0007},
+ cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0008},
+ cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0009},
+ cmdLutElement{0x0c, 0x00, -1, 0x00, 0x0842, 0x0002},
+ cmdLutElement{0x0c, 0x00, -1, 0x01, 0x0842, 0x0003},
+ cmdLutElement{0x0c, 0x00, -1, 0x02, 0x0842, 0x0004},
+ cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0005},
+ cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0006},
+ cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0007},
+ cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0008},
+ cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0009},
+ cmdLutElement{0x0e, 0x00, -1, 0x00, 0x1842, 0x0002},
+ cmdLutElement{0x0e, 0x00, -1, 0x01, 0x1842, 0x0003},
+ cmdLutElement{0x0e, 0x00, -1, 0x02, 0x1842, 0x0004},
+ cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0005},
+ cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0006},
+ cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0007},
+ cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0008},
+ cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0009},
+ cmdLutElement{0x18, 0x00, -1, 0x00, 0x5842, 0x0002},
+ cmdLutElement{0x18, 0x00, -1, 0x01, 0x5842, 0x0003},
+ cmdLutElement{0x18, 0x00, -1, 0x02, 0x5842, 0x0004},
+ cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0005},
+ cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0006},
+ cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0007},
+ cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0008},
+ cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0009},
+ cmdLutElement{0x02, 0x05, -1, 0x03, 0x000a, 0x0046},
+ cmdLutElement{0x02, 0x05, -1, 0x03, 0x000a, 0x0066},
+ cmdLutElement{0x02, 0x06, -1, 0x03, 0x000a, 0x0086},
+ cmdLutElement{0x02, 0x07, -1, 0x03, 0x000a, 0x00c6},
+ cmdLutElement{0x02, 0x08, -1, 0x03, 0x000a, 0x0146},
+ cmdLutElement{0x02, 0x09, -1, 0x03, 0x000a, 0x0246},
+ cmdLutElement{0x02, 0x0a, -1, 0x03, 0x000a, 0x0446},
+ cmdLutElement{0x02, 0x18, -1, 0x03, 0x000a, 0x0846},
+ cmdLutElement{0x02, 0x05, -1, 0x03, 0x000e, 0x0046},
+ cmdLutElement{0x02, 0x05, -1, 0x03, 0x000e, 0x0066},
+ cmdLutElement{0x02, 0x06, -1, 0x03, 0x000e, 0x0086},
+ cmdLutElement{0x02, 0x07, -1, 0x03, 0x000e, 0x00c6},
+ cmdLutElement{0x02, 0x08, -1, 0x03, 0x000e, 0x0146},
+ cmdLutElement{0x02, 0x09, -1, 0x03, 0x000e, 0x0246},
+ cmdLutElement{0x02, 0x0a, -1, 0x03, 0x000e, 0x0446},
+ cmdLutElement{0x02, 0x18, -1, 0x03, 0x000e, 0x0846},
+ cmdLutElement{0x03, 0x05, -1, 0x03, 0x0012, 0x0046},
+ cmdLutElement{0x03, 0x05, -1, 0x03, 0x0012, 0x0066},
+ cmdLutElement{0x03, 0x06, -1, 0x03, 0x0012, 0x0086},
+ cmdLutElement{0x03, 0x07, -1, 0x03, 0x0012, 0x00c6},
+ cmdLutElement{0x03, 0x08, -1, 0x03, 0x0012, 0x0146},
+ cmdLutElement{0x03, 0x09, -1, 0x03, 0x0012, 0x0246},
+ cmdLutElement{0x03, 0x0a, -1, 0x03, 0x0012, 0x0446},
+ cmdLutElement{0x03, 0x18, -1, 0x03, 0x0012, 0x0846},
+ cmdLutElement{0x03, 0x05, -1, 0x03, 0x001a, 0x0046},
+ cmdLutElement{0x03, 0x05, -1, 0x03, 0x001a, 0x0066},
+ cmdLutElement{0x03, 0x06, -1, 0x03, 0x001a, 0x0086},
+ cmdLutElement{0x03, 0x07, -1, 0x03, 0x001a, 0x00c6},
+ cmdLutElement{0x03, 0x08, -1, 0x03, 0x001a, 0x0146},
+ cmdLutElement{0x03, 0x09, -1, 0x03, 0x001a, 0x0246},
+ cmdLutElement{0x03, 0x0a, -1, 0x03, 0x001a, 0x0446},
+ cmdLutElement{0x03, 0x18, -1, 0x03, 0x001a, 0x0846},
+ cmdLutElement{0x04, 0x05, -1, 0x03, 0x0022, 0x0046},
+ cmdLutElement{0x04, 0x05, -1, 0x03, 0x0022, 0x0066},
+ cmdLutElement{0x04, 0x06, -1, 0x03, 0x0022, 0x0086},
+ cmdLutElement{0x04, 0x07, -1, 0x03, 0x0022, 0x00c6},
+ cmdLutElement{0x04, 0x08, -1, 0x03, 0x0022, 0x0146},
+ cmdLutElement{0x04, 0x09, -1, 0x03, 0x0022, 0x0246},
+ cmdLutElement{0x04, 0x0a, -1, 0x03, 0x0022, 0x0446},
+ cmdLutElement{0x04, 0x18, -1, 0x03, 0x0022, 0x0846},
+ cmdLutElement{0x04, 0x05, -1, 0x03, 0x0032, 0x0046},
+ cmdLutElement{0x04, 0x05, -1, 0x03, 0x0032, 0x0066},
+ cmdLutElement{0x04, 0x06, -1, 0x03, 0x0032, 0x0086},
+ cmdLutElement{0x04, 0x07, -1, 0x03, 0x0032, 0x00c6},
+ cmdLutElement{0x04, 0x08, -1, 0x03, 0x0032, 0x0146},
+ cmdLutElement{0x04, 0x09, -1, 0x03, 0x0032, 0x0246},
+ cmdLutElement{0x04, 0x0a, -1, 0x03, 0x0032, 0x0446},
+ cmdLutElement{0x04, 0x18, -1, 0x03, 0x0032, 0x0846},
+ cmdLutElement{0x05, 0x05, -1, 0x03, 0x0042, 0x0046},
+ cmdLutElement{0x05, 0x05, -1, 0x03, 0x0042, 0x0066},
+ cmdLutElement{0x05, 0x06, -1, 0x03, 0x0042, 0x0086},
+ cmdLutElement{0x05, 0x07, -1, 0x03, 0x0042, 0x00c6},
+ cmdLutElement{0x05, 0x08, -1, 0x03, 0x0042, 0x0146},
+ cmdLutElement{0x05, 0x09, -1, 0x03, 0x0042, 0x0246},
+ cmdLutElement{0x05, 0x0a, -1, 0x03, 0x0042, 0x0446},
+ cmdLutElement{0x05, 0x18, -1, 0x03, 0x0042, 0x0846},
+ cmdLutElement{0x05, 0x05, -1, 0x03, 0x0062, 0x0046},
+ cmdLutElement{0x05, 0x05, -1, 0x03, 0x0062, 0x0066},
+ cmdLutElement{0x05, 0x06, -1, 0x03, 0x0062, 0x0086},
+ cmdLutElement{0x05, 0x07, -1, 0x03, 0x0062, 0x00c6},
+ cmdLutElement{0x05, 0x08, -1, 0x03, 0x0062, 0x0146},
+ cmdLutElement{0x05, 0x09, -1, 0x03, 0x0062, 0x0246},
+ cmdLutElement{0x05, 0x0a, -1, 0x03, 0x0062, 0x0446},
+ cmdLutElement{0x05, 0x18, -1, 0x03, 0x0062, 0x0846},
+ cmdLutElement{0x06, 0x01, -1, 0x03, 0x0082, 0x000a},
+ cmdLutElement{0x06, 0x01, -1, 0x03, 0x0082, 0x000c},
+ cmdLutElement{0x06, 0x02, -1, 0x03, 0x0082, 0x000e},
+ cmdLutElement{0x06, 0x02, -1, 0x03, 0x0082, 0x0012},
+ cmdLutElement{0x06, 0x03, -1, 0x03, 0x0082, 0x0016},
+ cmdLutElement{0x06, 0x03, -1, 0x03, 0x0082, 0x001e},
+ cmdLutElement{0x06, 0x04, -1, 0x03, 0x0082, 0x0026},
+ cmdLutElement{0x06, 0x04, -1, 0x03, 0x0082, 0x0036},
+ cmdLutElement{0x07, 0x01, -1, 0x03, 0x00c2, 0x000a},
+ cmdLutElement{0x07, 0x01, -1, 0x03, 0x00c2, 0x000c},
+ cmdLutElement{0x07, 0x02, -1, 0x03, 0x00c2, 0x000e},
+ cmdLutElement{0x07, 0x02, -1, 0x03, 0x00c2, 0x0012},
+ cmdLutElement{0x07, 0x03, -1, 0x03, 0x00c2, 0x0016},
+ cmdLutElement{0x07, 0x03, -1, 0x03, 0x00c2, 0x001e},
+ cmdLutElement{0x07, 0x04, -1, 0x03, 0x00c2, 0x0026},
+ cmdLutElement{0x07, 0x04, -1, 0x03, 0x00c2, 0x0036},
+ cmdLutElement{0x08, 0x01, -1, 0x03, 0x0142, 0x000a},
+ cmdLutElement{0x08, 0x01, -1, 0x03, 0x0142, 0x000c},
+ cmdLutElement{0x08, 0x02, -1, 0x03, 0x0142, 0x000e},
+ cmdLutElement{0x08, 0x02, -1, 0x03, 0x0142, 0x0012},
+ cmdLutElement{0x08, 0x03, -1, 0x03, 0x0142, 0x0016},
+ cmdLutElement{0x08, 0x03, -1, 0x03, 0x0142, 0x001e},
+ cmdLutElement{0x08, 0x04, -1, 0x03, 0x0142, 0x0026},
+ cmdLutElement{0x08, 0x04, -1, 0x03, 0x0142, 0x0036},
+ cmdLutElement{0x09, 0x01, -1, 0x03, 0x0242, 0x000a},
+ cmdLutElement{0x09, 0x01, -1, 0x03, 0x0242, 0x000c},
+ cmdLutElement{0x09, 0x02, -1, 0x03, 0x0242, 0x000e},
+ cmdLutElement{0x09, 0x02, -1, 0x03, 0x0242, 0x0012},
+ cmdLutElement{0x09, 0x03, -1, 0x03, 0x0242, 0x0016},
+ cmdLutElement{0x09, 0x03, -1, 0x03, 0x0242, 0x001e},
+ cmdLutElement{0x09, 0x04, -1, 0x03, 0x0242, 0x0026},
+ cmdLutElement{0x09, 0x04, -1, 0x03, 0x0242, 0x0036},
+ cmdLutElement{0x0a, 0x01, -1, 0x03, 0x0442, 0x000a},
+ cmdLutElement{0x0a, 0x01, -1, 0x03, 0x0442, 0x000c},
+ cmdLutElement{0x0a, 0x02, -1, 0x03, 0x0442, 0x000e},
+ cmdLutElement{0x0a, 0x02, -1, 0x03, 0x0442, 0x0012},
+ cmdLutElement{0x0a, 0x03, -1, 0x03, 0x0442, 0x0016},
+ cmdLutElement{0x0a, 0x03, -1, 0x03, 0x0442, 0x001e},
+ cmdLutElement{0x0a, 0x04, -1, 0x03, 0x0442, 0x0026},
+ cmdLutElement{0x0a, 0x04, -1, 0x03, 0x0442, 0x0036},
+ cmdLutElement{0x0c, 0x01, -1, 0x03, 0x0842, 0x000a},
+ cmdLutElement{0x0c, 0x01, -1, 0x03, 0x0842, 0x000c},
+ cmdLutElement{0x0c, 0x02, -1, 0x03, 0x0842, 0x000e},
+ cmdLutElement{0x0c, 0x02, -1, 0x03, 0x0842, 0x0012},
+ cmdLutElement{0x0c, 0x03, -1, 0x03, 0x0842, 0x0016},
+ cmdLutElement{0x0c, 0x03, -1, 0x03, 0x0842, 0x001e},
+ cmdLutElement{0x0c, 0x04, -1, 0x03, 0x0842, 0x0026},
+ cmdLutElement{0x0c, 0x04, -1, 0x03, 0x0842, 0x0036},
+ cmdLutElement{0x0e, 0x01, -1, 0x03, 0x1842, 0x000a},
+ cmdLutElement{0x0e, 0x01, -1, 0x03, 0x1842, 0x000c},
+ cmdLutElement{0x0e, 0x02, -1, 0x03, 0x1842, 0x000e},
+ cmdLutElement{0x0e, 0x02, -1, 0x03, 0x1842, 0x0012},
+ cmdLutElement{0x0e, 0x03, -1, 0x03, 0x1842, 0x0016},
+ cmdLutElement{0x0e, 0x03, -1, 0x03, 0x1842, 0x001e},
+ cmdLutElement{0x0e, 0x04, -1, 0x03, 0x1842, 0x0026},
+ cmdLutElement{0x0e, 0x04, -1, 0x03, 0x1842, 0x0036},
+ cmdLutElement{0x18, 0x01, -1, 0x03, 0x5842, 0x000a},
+ cmdLutElement{0x18, 0x01, -1, 0x03, 0x5842, 0x000c},
+ cmdLutElement{0x18, 0x02, -1, 0x03, 0x5842, 0x000e},
+ cmdLutElement{0x18, 0x02, -1, 0x03, 0x5842, 0x0012},
+ cmdLutElement{0x18, 0x03, -1, 0x03, 0x5842, 0x0016},
+ cmdLutElement{0x18, 0x03, -1, 0x03, 0x5842, 0x001e},
+ cmdLutElement{0x18, 0x04, -1, 0x03, 0x5842, 0x0026},
+ cmdLutElement{0x18, 0x04, -1, 0x03, 0x5842, 0x0036},
+ cmdLutElement{0x06, 0x05, -1, 0x03, 0x0082, 0x0046},
+ cmdLutElement{0x06, 0x05, -1, 0x03, 0x0082, 0x0066},
+ cmdLutElement{0x06, 0x06, -1, 0x03, 0x0082, 0x0086},
+ cmdLutElement{0x06, 0x07, -1, 0x03, 0x0082, 0x00c6},
+ cmdLutElement{0x06, 0x08, -1, 0x03, 0x0082, 0x0146},
+ cmdLutElement{0x06, 0x09, -1, 0x03, 0x0082, 0x0246},
+ cmdLutElement{0x06, 0x0a, -1, 0x03, 0x0082, 0x0446},
+ cmdLutElement{0x06, 0x18, -1, 0x03, 0x0082, 0x0846},
+ cmdLutElement{0x07, 0x05, -1, 0x03, 0x00c2, 0x0046},
+ cmdLutElement{0x07, 0x05, -1, 0x03, 0x00c2, 0x0066},
+ cmdLutElement{0x07, 0x06, -1, 0x03, 0x00c2, 0x0086},
+ cmdLutElement{0x07, 0x07, -1, 0x03, 0x00c2, 0x00c6},
+ cmdLutElement{0x07, 0x08, -1, 0x03, 0x00c2, 0x0146},
+ cmdLutElement{0x07, 0x09, -1, 0x03, 0x00c2, 0x0246},
+ cmdLutElement{0x07, 0x0a, -1, 0x03, 0x00c2, 0x0446},
+ cmdLutElement{0x07, 0x18, -1, 0x03, 0x00c2, 0x0846},
+ cmdLutElement{0x08, 0x05, -1, 0x03, 0x0142, 0x0046},
+ cmdLutElement{0x08, 0x05, -1, 0x03, 0x0142, 0x0066},
+ cmdLutElement{0x08, 0x06, -1, 0x03, 0x0142, 0x0086},
+ cmdLutElement{0x08, 0x07, -1, 0x03, 0x0142, 0x00c6},
+ cmdLutElement{0x08, 0x08, -1, 0x03, 0x0142, 0x0146},
+ cmdLutElement{0x08, 0x09, -1, 0x03, 0x0142, 0x0246},
+ cmdLutElement{0x08, 0x0a, -1, 0x03, 0x0142, 0x0446},
+ cmdLutElement{0x08, 0x18, -1, 0x03, 0x0142, 0x0846},
+ cmdLutElement{0x09, 0x05, -1, 0x03, 0x0242, 0x0046},
+ cmdLutElement{0x09, 0x05, -1, 0x03, 0x0242, 0x0066},
+ cmdLutElement{0x09, 0x06, -1, 0x03, 0x0242, 0x0086},
+ cmdLutElement{0x09, 0x07, -1, 0x03, 0x0242, 0x00c6},
+ cmdLutElement{0x09, 0x08, -1, 0x03, 0x0242, 0x0146},
+ cmdLutElement{0x09, 0x09, -1, 0x03, 0x0242, 0x0246},
+ cmdLutElement{0x09, 0x0a, -1, 0x03, 0x0242, 0x0446},
+ cmdLutElement{0x09, 0x18, -1, 0x03, 0x0242, 0x0846},
+ cmdLutElement{0x0a, 0x05, -1, 0x03, 0x0442, 0x0046},
+ cmdLutElement{0x0a, 0x05, -1, 0x03, 0x0442, 0x0066},
+ cmdLutElement{0x0a, 0x06, -1, 0x03, 0x0442, 0x0086},
+ cmdLutElement{0x0a, 0x07, -1, 0x03, 0x0442, 0x00c6},
+ cmdLutElement{0x0a, 0x08, -1, 0x03, 0x0442, 0x0146},
+ cmdLutElement{0x0a, 0x09, -1, 0x03, 0x0442, 0x0246},
+ cmdLutElement{0x0a, 0x0a, -1, 0x03, 0x0442, 0x0446},
+ cmdLutElement{0x0a, 0x18, -1, 0x03, 0x0442, 0x0846},
+ cmdLutElement{0x0c, 0x05, -1, 0x03, 0x0842, 0x0046},
+ cmdLutElement{0x0c, 0x05, -1, 0x03, 0x0842, 0x0066},
+ cmdLutElement{0x0c, 0x06, -1, 0x03, 0x0842, 0x0086},
+ cmdLutElement{0x0c, 0x07, -1, 0x03, 0x0842, 0x00c6},
+ cmdLutElement{0x0c, 0x08, -1, 0x03, 0x0842, 0x0146},
+ cmdLutElement{0x0c, 0x09, -1, 0x03, 0x0842, 0x0246},
+ cmdLutElement{0x0c, 0x0a, -1, 0x03, 0x0842, 0x0446},
+ cmdLutElement{0x0c, 0x18, -1, 0x03, 0x0842, 0x0846},
+ cmdLutElement{0x0e, 0x05, -1, 0x03, 0x1842, 0x0046},
+ cmdLutElement{0x0e, 0x05, -1, 0x03, 0x1842, 0x0066},
+ cmdLutElement{0x0e, 0x06, -1, 0x03, 0x1842, 0x0086},
+ cmdLutElement{0x0e, 0x07, -1, 0x03, 0x1842, 0x00c6},
+ cmdLutElement{0x0e, 0x08, -1, 0x03, 0x1842, 0x0146},
+ cmdLutElement{0x0e, 0x09, -1, 0x03, 0x1842, 0x0246},
+ cmdLutElement{0x0e, 0x0a, -1, 0x03, 0x1842, 0x0446},
+ cmdLutElement{0x0e, 0x18, -1, 0x03, 0x1842, 0x0846},
+ cmdLutElement{0x18, 0x05, -1, 0x03, 0x5842, 0x0046},
+ cmdLutElement{0x18, 0x05, -1, 0x03, 0x5842, 0x0066},
+ cmdLutElement{0x18, 0x06, -1, 0x03, 0x5842, 0x0086},
+ cmdLutElement{0x18, 0x07, -1, 0x03, 0x5842, 0x00c6},
+ cmdLutElement{0x18, 0x08, -1, 0x03, 0x5842, 0x0146},
+ cmdLutElement{0x18, 0x09, -1, 0x03, 0x5842, 0x0246},
+ cmdLutElement{0x18, 0x0a, -1, 0x03, 0x5842, 0x0446},
+ cmdLutElement{0x18, 0x18, -1, 0x03, 0x5842, 0x0846},
+}
diff --git a/vendor/github.com/andybalholm/brotli/quality.go b/vendor/github.com/andybalholm/brotli/quality.go
new file mode 100644
index 0000000..49709a3
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/quality.go
@@ -0,0 +1,196 @@
+package brotli
+
+const fastOnePassCompressionQuality = 0
+
+const fastTwoPassCompressionQuality = 1
+
+const zopflificationQuality = 10
+
+const hqZopflificationQuality = 11
+
+const maxQualityForStaticEntropyCodes = 2
+
+const minQualityForBlockSplit = 4
+
+const minQualityForNonzeroDistanceParams = 4
+
+const minQualityForOptimizeHistograms = 4
+
+const minQualityForExtensiveReferenceSearch = 5
+
+const minQualityForContextModeling = 5
+
+const minQualityForHqContextModeling = 7
+
+const minQualityForHqBlockSplitting = 10
+
+/* For quality below MIN_QUALITY_FOR_BLOCK_SPLIT there is no block splitting,
+ so we buffer at most this much literals and commands. */
+const maxNumDelayedSymbols = 0x2FFF
+
+/* Returns hash-table size for quality levels 0 and 1. */
+func maxHashTableSize(quality int) uint {
+ if quality == fastOnePassCompressionQuality {
+ return 1 << 15
+ } else {
+ return 1 << 17
+ }
+}
+
+/* The maximum length for which the zopflification uses distinct distances. */
+const maxZopfliLenQuality10 = 150
+
+const maxZopfliLenQuality11 = 325
+
+/* Do not thoroughly search when a long copy is found. */
+const longCopyQuickStep = 16384
+
+func maxZopfliLen(params *encoderParams) uint {
+ if params.quality <= 10 {
+ return maxZopfliLenQuality10
+ } else {
+ return maxZopfliLenQuality11
+ }
+}
+
+/* Number of best candidates to evaluate to expand Zopfli chain. */
+func maxZopfliCandidates(params *encoderParams) uint {
+ if params.quality <= 10 {
+ return 1
+ } else {
+ return 5
+ }
+}
+
+func sanitizeParams(params *encoderParams) {
+ params.quality = brotli_min_int(maxQuality, brotli_max_int(minQuality, params.quality))
+ if params.quality <= maxQualityForStaticEntropyCodes {
+ params.large_window = false
+ }
+
+ if params.lgwin < minWindowBits {
+ params.lgwin = minWindowBits
+ } else {
+ var max_lgwin int
+ if params.large_window {
+ max_lgwin = largeMaxWindowBits
+ } else {
+ max_lgwin = maxWindowBits
+ }
+ if params.lgwin > uint(max_lgwin) {
+ params.lgwin = uint(max_lgwin)
+ }
+ }
+}
+
+/* Returns optimized lg_block value. */
+func computeLgBlock(params *encoderParams) int {
+ var lgblock int = params.lgblock
+ if params.quality == fastOnePassCompressionQuality || params.quality == fastTwoPassCompressionQuality {
+ lgblock = int(params.lgwin)
+ } else if params.quality < minQualityForBlockSplit {
+ lgblock = 14
+ } else if lgblock == 0 {
+ lgblock = 16
+ if params.quality >= 9 && params.lgwin > uint(lgblock) {
+ lgblock = brotli_min_int(18, int(params.lgwin))
+ }
+ } else {
+ lgblock = brotli_min_int(maxInputBlockBits, brotli_max_int(minInputBlockBits, lgblock))
+ }
+
+ return lgblock
+}
+
+/* Returns log2 of the size of main ring buffer area.
+ Allocate at least lgwin + 1 bits for the ring buffer so that the newly
+ added block fits there completely and we still get lgwin bits and at least
+ read_block_size_bits + 1 bits because the copy tail length needs to be
+ smaller than ring-buffer size. */
+func computeRbBits(params *encoderParams) int {
+ return 1 + brotli_max_int(int(params.lgwin), params.lgblock)
+}
+
+func maxMetablockSize(params *encoderParams) uint {
+ var bits int = brotli_min_int(computeRbBits(params), maxInputBlockBits)
+ return uint(1) << uint(bits)
+}
+
+/* When searching for backward references and have not seen matches for a long
+ time, we can skip some match lookups. Unsuccessful match lookups are very
+ expensive and this kind of a heuristic speeds up compression quite a lot.
+ At first 8 byte strides are taken and every second byte is put to hasher.
+ After 4x more literals stride by 16 bytes, every put 4-th byte to hasher.
+ Applied only to qualities 2 to 9. */
+func literalSpreeLengthForSparseSearch(params *encoderParams) uint {
+ if params.quality < 9 {
+ return 64
+ } else {
+ return 512
+ }
+}
+
+func chooseHasher(params *encoderParams, hparams *hasherParams) {
+ if params.quality > 9 {
+ hparams.type_ = 10
+ } else if params.quality == 4 && params.size_hint >= 1<<20 {
+ hparams.type_ = 54
+ } else if params.quality < 5 {
+ hparams.type_ = params.quality
+ } else if params.lgwin <= 16 {
+ if params.quality < 7 {
+ hparams.type_ = 40
+ } else if params.quality < 9 {
+ hparams.type_ = 41
+ } else {
+ hparams.type_ = 42
+ }
+ } else if params.size_hint >= 1<<20 && params.lgwin >= 19 {
+ hparams.type_ = 6
+ hparams.block_bits = params.quality - 1
+ hparams.bucket_bits = 15
+ hparams.hash_len = 5
+ if params.quality < 7 {
+ hparams.num_last_distances_to_check = 4
+ } else if params.quality < 9 {
+ hparams.num_last_distances_to_check = 10
+ } else {
+ hparams.num_last_distances_to_check = 16
+ }
+ } else {
+ hparams.type_ = 5
+ hparams.block_bits = params.quality - 1
+ if params.quality < 7 {
+ hparams.bucket_bits = 14
+ } else {
+ hparams.bucket_bits = 15
+ }
+ if params.quality < 7 {
+ hparams.num_last_distances_to_check = 4
+ } else if params.quality < 9 {
+ hparams.num_last_distances_to_check = 10
+ } else {
+ hparams.num_last_distances_to_check = 16
+ }
+ }
+
+ if params.lgwin > 24 {
+ /* Different hashers for large window brotli: not for qualities <= 2,
+ these are too fast for large window. Not for qualities >= 10: their
+ hasher already works well with large window. So the changes are:
+ H3 --> H35: for quality 3.
+ H54 --> H55: for quality 4 with size hint > 1MB
+ H6 --> H65: for qualities 5, 6, 7, 8, 9. */
+ if hparams.type_ == 3 {
+ hparams.type_ = 35
+ }
+
+ if hparams.type_ == 54 {
+ hparams.type_ = 55
+ }
+
+ if hparams.type_ == 6 {
+ hparams.type_ = 65
+ }
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/reader.go b/vendor/github.com/andybalholm/brotli/reader.go
new file mode 100644
index 0000000..9419c79
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/reader.go
@@ -0,0 +1,108 @@
+package brotli
+
+import (
+ "errors"
+ "io"
+)
+
+type decodeError int
+
+func (err decodeError) Error() string {
+ return "brotli: " + string(decoderErrorString(int(err)))
+}
+
+var errExcessiveInput = errors.New("brotli: excessive input")
+var errInvalidState = errors.New("brotli: invalid state")
+
+// readBufSize is a "good" buffer size that avoids excessive round-trips
+// between C and Go but doesn't waste too much memory on buffering.
+// It is arbitrarily chosen to be equal to the constant used in io.Copy.
+const readBufSize = 32 * 1024
+
+// NewReader creates a new Reader reading the given reader.
+func NewReader(src io.Reader) *Reader {
+ r := new(Reader)
+ r.Reset(src)
+ return r
+}
+
+// Reset discards the Reader's state and makes it equivalent to the result of
+// its original state from NewReader, but reading from src instead.
+// This permits reusing a Reader rather than allocating a new one.
+// Error is always nil
+func (r *Reader) Reset(src io.Reader) error {
+ if r.error_code < 0 {
+ // There was an unrecoverable error, leaving the Reader's state
+ // undefined. Clear out everything but the buffer.
+ *r = Reader{buf: r.buf}
+ }
+
+ decoderStateInit(r)
+ r.src = src
+ if r.buf == nil {
+ r.buf = make([]byte, readBufSize)
+ }
+ return nil
+}
+
+func (r *Reader) Read(p []byte) (n int, err error) {
+ if !decoderHasMoreOutput(r) && len(r.in) == 0 {
+ m, readErr := r.src.Read(r.buf)
+ if m == 0 {
+ // If readErr is `nil`, we just proxy underlying stream behavior.
+ return 0, readErr
+ }
+ r.in = r.buf[:m]
+ }
+
+ if len(p) == 0 {
+ return 0, nil
+ }
+
+ for {
+ var written uint
+ in_len := uint(len(r.in))
+ out_len := uint(len(p))
+ in_remaining := in_len
+ out_remaining := out_len
+ result := decoderDecompressStream(r, &in_remaining, &r.in, &out_remaining, &p)
+ written = out_len - out_remaining
+ n = int(written)
+
+ switch result {
+ case decoderResultSuccess:
+ if len(r.in) > 0 {
+ return n, errExcessiveInput
+ }
+ return n, nil
+ case decoderResultError:
+ return n, decodeError(decoderGetErrorCode(r))
+ case decoderResultNeedsMoreOutput:
+ if n == 0 {
+ return 0, io.ErrShortBuffer
+ }
+ return n, nil
+ case decoderNeedsMoreInput:
+ }
+
+ if len(r.in) != 0 {
+ return 0, errInvalidState
+ }
+
+ // Calling r.src.Read may block. Don't block if we have data to return.
+ if n > 0 {
+ return n, nil
+ }
+
+ // Top off the buffer.
+ encN, err := r.src.Read(r.buf)
+ if encN == 0 {
+ // Not enough data to complete decoding.
+ if err == io.EOF {
+ return 0, io.ErrUnexpectedEOF
+ }
+ return 0, err
+ }
+ r.in = r.buf[:encN]
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/ringbuffer.go b/vendor/github.com/andybalholm/brotli/ringbuffer.go
new file mode 100644
index 0000000..1c8f86f
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/ringbuffer.go
@@ -0,0 +1,134 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* A ringBuffer(window_bits, tail_bits) contains `1 << window_bits' bytes of
+ data in a circular manner: writing a byte writes it to:
+ `position() % (1 << window_bits)'.
+ For convenience, the ringBuffer array contains another copy of the
+ first `1 << tail_bits' bytes:
+ buffer_[i] == buffer_[i + (1 << window_bits)], if i < (1 << tail_bits),
+ and another copy of the last two bytes:
+ buffer_[-1] == buffer_[(1 << window_bits) - 1] and
+ buffer_[-2] == buffer_[(1 << window_bits) - 2]. */
+type ringBuffer struct {
+ size_ uint32
+ mask_ uint32
+ tail_size_ uint32
+ total_size_ uint32
+ cur_size_ uint32
+ pos_ uint32
+ data_ []byte
+ buffer_ []byte
+}
+
+func ringBufferInit(rb *ringBuffer) {
+ rb.pos_ = 0
+}
+
+func ringBufferSetup(params *encoderParams, rb *ringBuffer) {
+ var window_bits int = computeRbBits(params)
+ var tail_bits int = params.lgblock
+ *(*uint32)(&rb.size_) = 1 << uint(window_bits)
+ *(*uint32)(&rb.mask_) = (1 << uint(window_bits)) - 1
+ *(*uint32)(&rb.tail_size_) = 1 << uint(tail_bits)
+ *(*uint32)(&rb.total_size_) = rb.size_ + rb.tail_size_
+}
+
+const kSlackForEightByteHashingEverywhere uint = 7
+
+/* Allocates or re-allocates data_ to the given length + plus some slack
+ region before and after. Fills the slack regions with zeros. */
+func ringBufferInitBuffer(buflen uint32, rb *ringBuffer) {
+ var new_data []byte
+ var i uint
+ size := 2 + int(buflen) + int(kSlackForEightByteHashingEverywhere)
+ if cap(rb.data_) < size {
+ new_data = make([]byte, size)
+ } else {
+ new_data = rb.data_[:size]
+ }
+ if rb.data_ != nil {
+ copy(new_data, rb.data_[:2+rb.cur_size_+uint32(kSlackForEightByteHashingEverywhere)])
+ }
+
+ rb.data_ = new_data
+ rb.cur_size_ = buflen
+ rb.buffer_ = rb.data_[2:]
+ rb.data_[1] = 0
+ rb.data_[0] = rb.data_[1]
+ for i = 0; i < kSlackForEightByteHashingEverywhere; i++ {
+ rb.buffer_[rb.cur_size_+uint32(i)] = 0
+ }
+}
+
+func ringBufferWriteTail(bytes []byte, n uint, rb *ringBuffer) {
+ var masked_pos uint = uint(rb.pos_ & rb.mask_)
+ if uint32(masked_pos) < rb.tail_size_ {
+ /* Just fill the tail buffer with the beginning data. */
+ var p uint = uint(rb.size_ + uint32(masked_pos))
+ copy(rb.buffer_[p:], bytes[:brotli_min_size_t(n, uint(rb.tail_size_-uint32(masked_pos)))])
+ }
+}
+
+/* Push bytes into the ring buffer. */
+func ringBufferWrite(bytes []byte, n uint, rb *ringBuffer) {
+ if rb.pos_ == 0 && uint32(n) < rb.tail_size_ {
+ /* Special case for the first write: to process the first block, we don't
+ need to allocate the whole ring-buffer and we don't need the tail
+ either. However, we do this memory usage optimization only if the
+ first write is less than the tail size, which is also the input block
+ size, otherwise it is likely that other blocks will follow and we
+ will need to reallocate to the full size anyway. */
+ rb.pos_ = uint32(n)
+
+ ringBufferInitBuffer(rb.pos_, rb)
+ copy(rb.buffer_, bytes[:n])
+ return
+ }
+
+ if rb.cur_size_ < rb.total_size_ {
+ /* Lazily allocate the full buffer. */
+ ringBufferInitBuffer(rb.total_size_, rb)
+
+ /* Initialize the last two bytes to zero, so that we don't have to worry
+ later when we copy the last two bytes to the first two positions. */
+ rb.buffer_[rb.size_-2] = 0
+
+ rb.buffer_[rb.size_-1] = 0
+ }
+ {
+ var masked_pos uint = uint(rb.pos_ & rb.mask_)
+
+ /* The length of the writes is limited so that we do not need to worry
+ about a write */
+ ringBufferWriteTail(bytes, n, rb)
+
+ if uint32(masked_pos+n) <= rb.size_ {
+ /* A single write fits. */
+ copy(rb.buffer_[masked_pos:], bytes[:n])
+ } else {
+ /* Split into two writes.
+ Copy into the end of the buffer, including the tail buffer. */
+ copy(rb.buffer_[masked_pos:], bytes[:brotli_min_size_t(n, uint(rb.total_size_-uint32(masked_pos)))])
+
+ /* Copy into the beginning of the buffer */
+ copy(rb.buffer_, bytes[rb.size_-uint32(masked_pos):][:uint32(n)-(rb.size_-uint32(masked_pos))])
+ }
+ }
+ {
+ var not_first_lap bool = rb.pos_&(1<<31) != 0
+ var rb_pos_mask uint32 = (1 << 31) - 1
+ rb.data_[0] = rb.buffer_[rb.size_-2]
+ rb.data_[1] = rb.buffer_[rb.size_-1]
+ rb.pos_ = (rb.pos_ & rb_pos_mask) + uint32(uint32(n)&rb_pos_mask)
+ if not_first_lap {
+ /* Wrap, but preserve not-a-first-lap feature. */
+ rb.pos_ |= 1 << 31
+ }
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/state.go b/vendor/github.com/andybalholm/brotli/state.go
new file mode 100644
index 0000000..38d753e
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/state.go
@@ -0,0 +1,294 @@
+package brotli
+
+import "io"
+
+/* Copyright 2015 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Brotli state for partial streaming decoding. */
+const (
+ stateUninited = iota
+ stateLargeWindowBits
+ stateInitialize
+ stateMetablockBegin
+ stateMetablockHeader
+ stateMetablockHeader2
+ stateContextModes
+ stateCommandBegin
+ stateCommandInner
+ stateCommandPostDecodeLiterals
+ stateCommandPostWrapCopy
+ stateUncompressed
+ stateMetadata
+ stateCommandInnerWrite
+ stateMetablockDone
+ stateCommandPostWrite1
+ stateCommandPostWrite2
+ stateHuffmanCode0
+ stateHuffmanCode1
+ stateHuffmanCode2
+ stateHuffmanCode3
+ stateContextMap1
+ stateContextMap2
+ stateTreeGroup
+ stateDone
+)
+
+const (
+ stateMetablockHeaderNone = iota
+ stateMetablockHeaderEmpty
+ stateMetablockHeaderNibbles
+ stateMetablockHeaderSize
+ stateMetablockHeaderUncompressed
+ stateMetablockHeaderReserved
+ stateMetablockHeaderBytes
+ stateMetablockHeaderMetadata
+)
+
+const (
+ stateUncompressedNone = iota
+ stateUncompressedWrite
+)
+
+const (
+ stateTreeGroupNone = iota
+ stateTreeGroupLoop
+)
+
+const (
+ stateContextMapNone = iota
+ stateContextMapReadPrefix
+ stateContextMapHuffman
+ stateContextMapDecode
+ stateContextMapTransform
+)
+
+const (
+ stateHuffmanNone = iota
+ stateHuffmanSimpleSize
+ stateHuffmanSimpleRead
+ stateHuffmanSimpleBuild
+ stateHuffmanComplex
+ stateHuffmanLengthSymbols
+)
+
+const (
+ stateDecodeUint8None = iota
+ stateDecodeUint8Short
+ stateDecodeUint8Long
+)
+
+const (
+ stateReadBlockLengthNone = iota
+ stateReadBlockLengthSuffix
+)
+
+type Reader struct {
+ src io.Reader
+ buf []byte // scratch space for reading from src
+ in []byte // current chunk to decode; usually aliases buf
+
+ state int
+ loop_counter int
+ br bitReader
+ buffer struct {
+ u64 uint64
+ u8 [8]byte
+ }
+ buffer_length uint32
+ pos int
+ max_backward_distance int
+ max_distance int
+ ringbuffer_size int
+ ringbuffer_mask int
+ dist_rb_idx int
+ dist_rb [4]int
+ error_code int
+ sub_loop_counter uint32
+ ringbuffer []byte
+ ringbuffer_end []byte
+ htree_command []huffmanCode
+ context_lookup []byte
+ context_map_slice []byte
+ dist_context_map_slice []byte
+ literal_hgroup huffmanTreeGroup
+ insert_copy_hgroup huffmanTreeGroup
+ distance_hgroup huffmanTreeGroup
+ block_type_trees []huffmanCode
+ block_len_trees []huffmanCode
+ trivial_literal_context int
+ distance_context int
+ meta_block_remaining_len int
+ block_length_index uint32
+ block_length [3]uint32
+ num_block_types [3]uint32
+ block_type_rb [6]uint32
+ distance_postfix_bits uint32
+ num_direct_distance_codes uint32
+ distance_postfix_mask int
+ num_dist_htrees uint32
+ dist_context_map []byte
+ literal_htree []huffmanCode
+ dist_htree_index byte
+ repeat_code_len uint32
+ prev_code_len uint32
+ copy_length int
+ distance_code int
+ rb_roundtrips uint
+ partial_pos_out uint
+ symbol uint32
+ repeat uint32
+ space uint32
+ table [32]huffmanCode
+ symbol_lists symbolList
+ symbols_lists_array [huffmanMaxCodeLength + 1 + numCommandSymbols]uint16
+ next_symbol [32]int
+ code_length_code_lengths [codeLengthCodes]byte
+ code_length_histo [16]uint16
+ htree_index int
+ next []huffmanCode
+ context_index uint32
+ max_run_length_prefix uint32
+ code uint32
+ context_map_table [huffmanMaxSize272]huffmanCode
+ substate_metablock_header int
+ substate_tree_group int
+ substate_context_map int
+ substate_uncompressed int
+ substate_huffman int
+ substate_decode_uint8 int
+ substate_read_block_length int
+ is_last_metablock uint
+ is_uncompressed uint
+ is_metadata uint
+ should_wrap_ringbuffer uint
+ canny_ringbuffer_allocation uint
+ large_window bool
+ size_nibbles uint
+ window_bits uint32
+ new_ringbuffer_size int
+ num_literal_htrees uint32
+ context_map []byte
+ context_modes []byte
+ dictionary *dictionary
+ transforms *transforms
+ trivial_literal_contexts [8]uint32
+}
+
+func decoderStateInit(s *Reader) bool {
+ s.error_code = 0 /* BROTLI_DECODER_NO_ERROR */
+
+ initBitReader(&s.br)
+ s.state = stateUninited
+ s.large_window = false
+ s.substate_metablock_header = stateMetablockHeaderNone
+ s.substate_tree_group = stateTreeGroupNone
+ s.substate_context_map = stateContextMapNone
+ s.substate_uncompressed = stateUncompressedNone
+ s.substate_huffman = stateHuffmanNone
+ s.substate_decode_uint8 = stateDecodeUint8None
+ s.substate_read_block_length = stateReadBlockLengthNone
+
+ s.buffer_length = 0
+ s.loop_counter = 0
+ s.pos = 0
+ s.rb_roundtrips = 0
+ s.partial_pos_out = 0
+
+ s.block_type_trees = nil
+ s.block_len_trees = nil
+ s.ringbuffer_size = 0
+ s.new_ringbuffer_size = 0
+ s.ringbuffer_mask = 0
+
+ s.context_map = nil
+ s.context_modes = nil
+ s.dist_context_map = nil
+ s.context_map_slice = nil
+ s.dist_context_map_slice = nil
+
+ s.sub_loop_counter = 0
+
+ s.literal_hgroup.codes = nil
+ s.literal_hgroup.htrees = nil
+ s.insert_copy_hgroup.codes = nil
+ s.insert_copy_hgroup.htrees = nil
+ s.distance_hgroup.codes = nil
+ s.distance_hgroup.htrees = nil
+
+ s.is_last_metablock = 0
+ s.is_uncompressed = 0
+ s.is_metadata = 0
+ s.should_wrap_ringbuffer = 0
+ s.canny_ringbuffer_allocation = 1
+
+ s.window_bits = 0
+ s.max_distance = 0
+ s.dist_rb[0] = 16
+ s.dist_rb[1] = 15
+ s.dist_rb[2] = 11
+ s.dist_rb[3] = 4
+ s.dist_rb_idx = 0
+ s.block_type_trees = nil
+ s.block_len_trees = nil
+
+ s.symbol_lists.storage = s.symbols_lists_array[:]
+ s.symbol_lists.offset = huffmanMaxCodeLength + 1
+
+ s.dictionary = getDictionary()
+ s.transforms = getTransforms()
+
+ return true
+}
+
+func decoderStateMetablockBegin(s *Reader) {
+ s.meta_block_remaining_len = 0
+ s.block_length[0] = 1 << 24
+ s.block_length[1] = 1 << 24
+ s.block_length[2] = 1 << 24
+ s.num_block_types[0] = 1
+ s.num_block_types[1] = 1
+ s.num_block_types[2] = 1
+ s.block_type_rb[0] = 1
+ s.block_type_rb[1] = 0
+ s.block_type_rb[2] = 1
+ s.block_type_rb[3] = 0
+ s.block_type_rb[4] = 1
+ s.block_type_rb[5] = 0
+ s.context_map = nil
+ s.context_modes = nil
+ s.dist_context_map = nil
+ s.context_map_slice = nil
+ s.literal_htree = nil
+ s.dist_context_map_slice = nil
+ s.dist_htree_index = 0
+ s.context_lookup = nil
+ s.literal_hgroup.codes = nil
+ s.literal_hgroup.htrees = nil
+ s.insert_copy_hgroup.codes = nil
+ s.insert_copy_hgroup.htrees = nil
+ s.distance_hgroup.codes = nil
+ s.distance_hgroup.htrees = nil
+}
+
+func decoderStateCleanupAfterMetablock(s *Reader) {
+ s.context_modes = nil
+ s.context_map = nil
+ s.dist_context_map = nil
+ s.literal_hgroup.htrees = nil
+ s.insert_copy_hgroup.htrees = nil
+ s.distance_hgroup.htrees = nil
+}
+
+func decoderHuffmanTreeGroupInit(s *Reader, group *huffmanTreeGroup, alphabet_size uint32, max_symbol uint32, ntrees uint32) bool {
+ var max_table_size uint = uint(kMaxHuffmanTableSize[(alphabet_size+31)>>5])
+ group.alphabet_size = uint16(alphabet_size)
+ group.max_symbol = uint16(max_symbol)
+ group.num_htrees = uint16(ntrees)
+ group.htrees = make([][]huffmanCode, ntrees)
+ group.codes = make([]huffmanCode, (uint(ntrees) * max_table_size))
+ return !(group.codes == nil)
+}
diff --git a/vendor/github.com/andybalholm/brotli/static_dict.go b/vendor/github.com/andybalholm/brotli/static_dict.go
new file mode 100644
index 0000000..bc05566
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/static_dict.go
@@ -0,0 +1,662 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Class to model the static dictionary. */
+
+const maxStaticDictionaryMatchLen = 37
+
+const kInvalidMatch uint32 = 0xFFFFFFF
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+func hash(data []byte) uint32 {
+ var h uint32 = binary.LittleEndian.Uint32(data) * kDictHashMul32
+
+ /* The higher bits contain more mixture from the multiplication,
+ so we take our results from there. */
+ return h >> uint(32-kDictNumBits)
+}
+
+func addMatch(distance uint, len uint, len_code uint, matches []uint32) {
+ var match uint32 = uint32((distance << 5) + len_code)
+ matches[len] = brotli_min_uint32_t(matches[len], match)
+}
+
+func dictMatchLength(dict *dictionary, data []byte, id uint, len uint, maxlen uint) uint {
+ var offset uint = uint(dict.offsets_by_length[len]) + len*id
+ return findMatchLengthWithLimit(dict.data[offset:], data, brotli_min_size_t(uint(len), maxlen))
+}
+
+func isMatch(d *dictionary, w dictWord, data []byte, max_length uint) bool {
+ if uint(w.len) > max_length {
+ return false
+ } else {
+ var offset uint = uint(d.offsets_by_length[w.len]) + uint(w.len)*uint(w.idx)
+ var dict []byte = d.data[offset:]
+ if w.transform == 0 {
+ /* Match against base dictionary word. */
+ return findMatchLengthWithLimit(dict, data, uint(w.len)) == uint(w.len)
+ } else if w.transform == 10 {
+ /* Match against uppercase first transform.
+ Note that there are only ASCII uppercase words in the lookup table. */
+ return dict[0] >= 'a' && dict[0] <= 'z' && (dict[0]^32) == data[0] && findMatchLengthWithLimit(dict[1:], data[1:], uint(w.len)-1) == uint(w.len-1)
+ } else {
+ /* Match against uppercase all transform.
+ Note that there are only ASCII uppercase words in the lookup table. */
+ var i uint
+ for i = 0; i < uint(w.len); i++ {
+ if dict[i] >= 'a' && dict[i] <= 'z' {
+ if (dict[i] ^ 32) != data[i] {
+ return false
+ }
+ } else {
+ if dict[i] != data[i] {
+ return false
+ }
+ }
+ }
+
+ return true
+ }
+ }
+}
+
+func findAllStaticDictionaryMatches(dict *encoderDictionary, data []byte, min_length uint, max_length uint, matches []uint32) bool {
+ var has_found_match bool = false
+ {
+ var offset uint = uint(dict.buckets[hash(data)])
+ var end bool = offset == 0
+ for !end {
+ w := dict.dict_words[offset]
+ offset++
+ var l uint = uint(w.len) & 0x1F
+ var n uint = uint(1) << dict.words.size_bits_by_length[l]
+ var id uint = uint(w.idx)
+ end = !(w.len&0x80 == 0)
+ w.len = byte(l)
+ if w.transform == 0 {
+ var matchlen uint = dictMatchLength(dict.words, data, id, l, max_length)
+ var s []byte
+ var minlen uint
+ var maxlen uint
+ var len uint
+
+ /* Transform "" + BROTLI_TRANSFORM_IDENTITY + "" */
+ if matchlen == l {
+ addMatch(id, l, l, matches)
+ has_found_match = true
+ }
+
+ /* Transforms "" + BROTLI_TRANSFORM_OMIT_LAST_1 + "" and
+ "" + BROTLI_TRANSFORM_OMIT_LAST_1 + "ing " */
+ if matchlen >= l-1 {
+ addMatch(id+12*n, l-1, l, matches)
+ if l+2 < max_length && data[l-1] == 'i' && data[l] == 'n' && data[l+1] == 'g' && data[l+2] == ' ' {
+ addMatch(id+49*n, l+3, l, matches)
+ }
+
+ has_found_match = true
+ }
+
+ /* Transform "" + BROTLI_TRANSFORM_OMIT_LAST_# + "" (# = 2 .. 9) */
+ minlen = min_length
+
+ if l > 9 {
+ minlen = brotli_max_size_t(minlen, l-9)
+ }
+ maxlen = brotli_min_size_t(matchlen, l-2)
+ for len = minlen; len <= maxlen; len++ {
+ var cut uint = l - len
+ var transform_id uint = (cut << 2) + uint((dict.cutoffTransforms>>(cut*6))&0x3F)
+ addMatch(id+transform_id*n, uint(len), l, matches)
+ has_found_match = true
+ }
+
+ if matchlen < l || l+6 >= max_length {
+ continue
+ }
+
+ s = data[l:]
+
+ /* Transforms "" + BROTLI_TRANSFORM_IDENTITY + */
+ if s[0] == ' ' {
+ addMatch(id+n, l+1, l, matches)
+ if s[1] == 'a' {
+ if s[2] == ' ' {
+ addMatch(id+28*n, l+3, l, matches)
+ } else if s[2] == 's' {
+ if s[3] == ' ' {
+ addMatch(id+46*n, l+4, l, matches)
+ }
+ } else if s[2] == 't' {
+ if s[3] == ' ' {
+ addMatch(id+60*n, l+4, l, matches)
+ }
+ } else if s[2] == 'n' {
+ if s[3] == 'd' && s[4] == ' ' {
+ addMatch(id+10*n, l+5, l, matches)
+ }
+ }
+ } else if s[1] == 'b' {
+ if s[2] == 'y' && s[3] == ' ' {
+ addMatch(id+38*n, l+4, l, matches)
+ }
+ } else if s[1] == 'i' {
+ if s[2] == 'n' {
+ if s[3] == ' ' {
+ addMatch(id+16*n, l+4, l, matches)
+ }
+ } else if s[2] == 's' {
+ if s[3] == ' ' {
+ addMatch(id+47*n, l+4, l, matches)
+ }
+ }
+ } else if s[1] == 'f' {
+ if s[2] == 'o' {
+ if s[3] == 'r' && s[4] == ' ' {
+ addMatch(id+25*n, l+5, l, matches)
+ }
+ } else if s[2] == 'r' {
+ if s[3] == 'o' && s[4] == 'm' && s[5] == ' ' {
+ addMatch(id+37*n, l+6, l, matches)
+ }
+ }
+ } else if s[1] == 'o' {
+ if s[2] == 'f' {
+ if s[3] == ' ' {
+ addMatch(id+8*n, l+4, l, matches)
+ }
+ } else if s[2] == 'n' {
+ if s[3] == ' ' {
+ addMatch(id+45*n, l+4, l, matches)
+ }
+ }
+ } else if s[1] == 'n' {
+ if s[2] == 'o' && s[3] == 't' && s[4] == ' ' {
+ addMatch(id+80*n, l+5, l, matches)
+ }
+ } else if s[1] == 't' {
+ if s[2] == 'h' {
+ if s[3] == 'e' {
+ if s[4] == ' ' {
+ addMatch(id+5*n, l+5, l, matches)
+ }
+ } else if s[3] == 'a' {
+ if s[4] == 't' && s[5] == ' ' {
+ addMatch(id+29*n, l+6, l, matches)
+ }
+ }
+ } else if s[2] == 'o' {
+ if s[3] == ' ' {
+ addMatch(id+17*n, l+4, l, matches)
+ }
+ }
+ } else if s[1] == 'w' {
+ if s[2] == 'i' && s[3] == 't' && s[4] == 'h' && s[5] == ' ' {
+ addMatch(id+35*n, l+6, l, matches)
+ }
+ }
+ } else if s[0] == '"' {
+ addMatch(id+19*n, l+1, l, matches)
+ if s[1] == '>' {
+ addMatch(id+21*n, l+2, l, matches)
+ }
+ } else if s[0] == '.' {
+ addMatch(id+20*n, l+1, l, matches)
+ if s[1] == ' ' {
+ addMatch(id+31*n, l+2, l, matches)
+ if s[2] == 'T' && s[3] == 'h' {
+ if s[4] == 'e' {
+ if s[5] == ' ' {
+ addMatch(id+43*n, l+6, l, matches)
+ }
+ } else if s[4] == 'i' {
+ if s[5] == 's' && s[6] == ' ' {
+ addMatch(id+75*n, l+7, l, matches)
+ }
+ }
+ }
+ }
+ } else if s[0] == ',' {
+ addMatch(id+76*n, l+1, l, matches)
+ if s[1] == ' ' {
+ addMatch(id+14*n, l+2, l, matches)
+ }
+ } else if s[0] == '\n' {
+ addMatch(id+22*n, l+1, l, matches)
+ if s[1] == '\t' {
+ addMatch(id+50*n, l+2, l, matches)
+ }
+ } else if s[0] == ']' {
+ addMatch(id+24*n, l+1, l, matches)
+ } else if s[0] == '\'' {
+ addMatch(id+36*n, l+1, l, matches)
+ } else if s[0] == ':' {
+ addMatch(id+51*n, l+1, l, matches)
+ } else if s[0] == '(' {
+ addMatch(id+57*n, l+1, l, matches)
+ } else if s[0] == '=' {
+ if s[1] == '"' {
+ addMatch(id+70*n, l+2, l, matches)
+ } else if s[1] == '\'' {
+ addMatch(id+86*n, l+2, l, matches)
+ }
+ } else if s[0] == 'a' {
+ if s[1] == 'l' && s[2] == ' ' {
+ addMatch(id+84*n, l+3, l, matches)
+ }
+ } else if s[0] == 'e' {
+ if s[1] == 'd' {
+ if s[2] == ' ' {
+ addMatch(id+53*n, l+3, l, matches)
+ }
+ } else if s[1] == 'r' {
+ if s[2] == ' ' {
+ addMatch(id+82*n, l+3, l, matches)
+ }
+ } else if s[1] == 's' {
+ if s[2] == 't' && s[3] == ' ' {
+ addMatch(id+95*n, l+4, l, matches)
+ }
+ }
+ } else if s[0] == 'f' {
+ if s[1] == 'u' && s[2] == 'l' && s[3] == ' ' {
+ addMatch(id+90*n, l+4, l, matches)
+ }
+ } else if s[0] == 'i' {
+ if s[1] == 'v' {
+ if s[2] == 'e' && s[3] == ' ' {
+ addMatch(id+92*n, l+4, l, matches)
+ }
+ } else if s[1] == 'z' {
+ if s[2] == 'e' && s[3] == ' ' {
+ addMatch(id+100*n, l+4, l, matches)
+ }
+ }
+ } else if s[0] == 'l' {
+ if s[1] == 'e' {
+ if s[2] == 's' && s[3] == 's' && s[4] == ' ' {
+ addMatch(id+93*n, l+5, l, matches)
+ }
+ } else if s[1] == 'y' {
+ if s[2] == ' ' {
+ addMatch(id+61*n, l+3, l, matches)
+ }
+ }
+ } else if s[0] == 'o' {
+ if s[1] == 'u' && s[2] == 's' && s[3] == ' ' {
+ addMatch(id+106*n, l+4, l, matches)
+ }
+ }
+ } else {
+ var is_all_caps bool = (w.transform != transformUppercaseFirst)
+ /* Set is_all_caps=0 for BROTLI_TRANSFORM_UPPERCASE_FIRST and
+ is_all_caps=1 otherwise (BROTLI_TRANSFORM_UPPERCASE_ALL)
+ transform. */
+
+ var s []byte
+ if !isMatch(dict.words, w, data, max_length) {
+ continue
+ }
+
+ /* Transform "" + kUppercase{First,All} + "" */
+ var tmp int
+ if is_all_caps {
+ tmp = 44
+ } else {
+ tmp = 9
+ }
+ addMatch(id+uint(tmp)*n, l, l, matches)
+
+ has_found_match = true
+ if l+1 >= max_length {
+ continue
+ }
+
+ /* Transforms "" + kUppercase{First,All} + */
+ s = data[l:]
+
+ if s[0] == ' ' {
+ var tmp int
+ if is_all_caps {
+ tmp = 68
+ } else {
+ tmp = 4
+ }
+ addMatch(id+uint(tmp)*n, l+1, l, matches)
+ } else if s[0] == '"' {
+ var tmp int
+ if is_all_caps {
+ tmp = 87
+ } else {
+ tmp = 66
+ }
+ addMatch(id+uint(tmp)*n, l+1, l, matches)
+ if s[1] == '>' {
+ var tmp int
+ if is_all_caps {
+ tmp = 97
+ } else {
+ tmp = 69
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ }
+ } else if s[0] == '.' {
+ var tmp int
+ if is_all_caps {
+ tmp = 101
+ } else {
+ tmp = 79
+ }
+ addMatch(id+uint(tmp)*n, l+1, l, matches)
+ if s[1] == ' ' {
+ var tmp int
+ if is_all_caps {
+ tmp = 114
+ } else {
+ tmp = 88
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ }
+ } else if s[0] == ',' {
+ var tmp int
+ if is_all_caps {
+ tmp = 112
+ } else {
+ tmp = 99
+ }
+ addMatch(id+uint(tmp)*n, l+1, l, matches)
+ if s[1] == ' ' {
+ var tmp int
+ if is_all_caps {
+ tmp = 107
+ } else {
+ tmp = 58
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ }
+ } else if s[0] == '\'' {
+ var tmp int
+ if is_all_caps {
+ tmp = 94
+ } else {
+ tmp = 74
+ }
+ addMatch(id+uint(tmp)*n, l+1, l, matches)
+ } else if s[0] == '(' {
+ var tmp int
+ if is_all_caps {
+ tmp = 113
+ } else {
+ tmp = 78
+ }
+ addMatch(id+uint(tmp)*n, l+1, l, matches)
+ } else if s[0] == '=' {
+ if s[1] == '"' {
+ var tmp int
+ if is_all_caps {
+ tmp = 105
+ } else {
+ tmp = 104
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ } else if s[1] == '\'' {
+ var tmp int
+ if is_all_caps {
+ tmp = 116
+ } else {
+ tmp = 108
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ }
+ }
+ }
+ }
+ }
+
+ /* Transforms with prefixes " " and "." */
+ if max_length >= 5 && (data[0] == ' ' || data[0] == '.') {
+ var is_space bool = (data[0] == ' ')
+ var offset uint = uint(dict.buckets[hash(data[1:])])
+ var end bool = offset == 0
+ for !end {
+ w := dict.dict_words[offset]
+ offset++
+ var l uint = uint(w.len) & 0x1F
+ var n uint = uint(1) << dict.words.size_bits_by_length[l]
+ var id uint = uint(w.idx)
+ end = !(w.len&0x80 == 0)
+ w.len = byte(l)
+ if w.transform == 0 {
+ var s []byte
+ if !isMatch(dict.words, w, data[1:], max_length-1) {
+ continue
+ }
+
+ /* Transforms " " + BROTLI_TRANSFORM_IDENTITY + "" and
+ "." + BROTLI_TRANSFORM_IDENTITY + "" */
+ var tmp int
+ if is_space {
+ tmp = 6
+ } else {
+ tmp = 32
+ }
+ addMatch(id+uint(tmp)*n, l+1, l, matches)
+
+ has_found_match = true
+ if l+2 >= max_length {
+ continue
+ }
+
+ /* Transforms " " + BROTLI_TRANSFORM_IDENTITY + and
+ "." + BROTLI_TRANSFORM_IDENTITY +
+ */
+ s = data[l+1:]
+
+ if s[0] == ' ' {
+ var tmp int
+ if is_space {
+ tmp = 2
+ } else {
+ tmp = 77
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ } else if s[0] == '(' {
+ var tmp int
+ if is_space {
+ tmp = 89
+ } else {
+ tmp = 67
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ } else if is_space {
+ if s[0] == ',' {
+ addMatch(id+103*n, l+2, l, matches)
+ if s[1] == ' ' {
+ addMatch(id+33*n, l+3, l, matches)
+ }
+ } else if s[0] == '.' {
+ addMatch(id+71*n, l+2, l, matches)
+ if s[1] == ' ' {
+ addMatch(id+52*n, l+3, l, matches)
+ }
+ } else if s[0] == '=' {
+ if s[1] == '"' {
+ addMatch(id+81*n, l+3, l, matches)
+ } else if s[1] == '\'' {
+ addMatch(id+98*n, l+3, l, matches)
+ }
+ }
+ }
+ } else if is_space {
+ var is_all_caps bool = (w.transform != transformUppercaseFirst)
+ /* Set is_all_caps=0 for BROTLI_TRANSFORM_UPPERCASE_FIRST and
+ is_all_caps=1 otherwise (BROTLI_TRANSFORM_UPPERCASE_ALL)
+ transform. */
+
+ var s []byte
+ if !isMatch(dict.words, w, data[1:], max_length-1) {
+ continue
+ }
+
+ /* Transforms " " + kUppercase{First,All} + "" */
+ var tmp int
+ if is_all_caps {
+ tmp = 85
+ } else {
+ tmp = 30
+ }
+ addMatch(id+uint(tmp)*n, l+1, l, matches)
+
+ has_found_match = true
+ if l+2 >= max_length {
+ continue
+ }
+
+ /* Transforms " " + kUppercase{First,All} + */
+ s = data[l+1:]
+
+ if s[0] == ' ' {
+ var tmp int
+ if is_all_caps {
+ tmp = 83
+ } else {
+ tmp = 15
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ } else if s[0] == ',' {
+ if !is_all_caps {
+ addMatch(id+109*n, l+2, l, matches)
+ }
+
+ if s[1] == ' ' {
+ var tmp int
+ if is_all_caps {
+ tmp = 111
+ } else {
+ tmp = 65
+ }
+ addMatch(id+uint(tmp)*n, l+3, l, matches)
+ }
+ } else if s[0] == '.' {
+ var tmp int
+ if is_all_caps {
+ tmp = 115
+ } else {
+ tmp = 96
+ }
+ addMatch(id+uint(tmp)*n, l+2, l, matches)
+ if s[1] == ' ' {
+ var tmp int
+ if is_all_caps {
+ tmp = 117
+ } else {
+ tmp = 91
+ }
+ addMatch(id+uint(tmp)*n, l+3, l, matches)
+ }
+ } else if s[0] == '=' {
+ if s[1] == '"' {
+ var tmp int
+ if is_all_caps {
+ tmp = 110
+ } else {
+ tmp = 118
+ }
+ addMatch(id+uint(tmp)*n, l+3, l, matches)
+ } else if s[1] == '\'' {
+ var tmp int
+ if is_all_caps {
+ tmp = 119
+ } else {
+ tmp = 120
+ }
+ addMatch(id+uint(tmp)*n, l+3, l, matches)
+ }
+ }
+ }
+ }
+ }
+
+ if max_length >= 6 {
+ /* Transforms with prefixes "e ", "s ", ", " and "\xC2\xA0" */
+ if (data[1] == ' ' && (data[0] == 'e' || data[0] == 's' || data[0] == ',')) || (data[0] == 0xC2 && data[1] == 0xA0) {
+ var offset uint = uint(dict.buckets[hash(data[2:])])
+ var end bool = offset == 0
+ for !end {
+ w := dict.dict_words[offset]
+ offset++
+ var l uint = uint(w.len) & 0x1F
+ var n uint = uint(1) << dict.words.size_bits_by_length[l]
+ var id uint = uint(w.idx)
+ end = !(w.len&0x80 == 0)
+ w.len = byte(l)
+ if w.transform == 0 && isMatch(dict.words, w, data[2:], max_length-2) {
+ if data[0] == 0xC2 {
+ addMatch(id+102*n, l+2, l, matches)
+ has_found_match = true
+ } else if l+2 < max_length && data[l+2] == ' ' {
+ var t uint = 13
+ if data[0] == 'e' {
+ t = 18
+ } else if data[0] == 's' {
+ t = 7
+ }
+ addMatch(id+t*n, l+3, l, matches)
+ has_found_match = true
+ }
+ }
+ }
+ }
+ }
+
+ if max_length >= 9 {
+ /* Transforms with prefixes " the " and ".com/" */
+ if (data[0] == ' ' && data[1] == 't' && data[2] == 'h' && data[3] == 'e' && data[4] == ' ') || (data[0] == '.' && data[1] == 'c' && data[2] == 'o' && data[3] == 'm' && data[4] == '/') {
+ var offset uint = uint(dict.buckets[hash(data[5:])])
+ var end bool = offset == 0
+ for !end {
+ w := dict.dict_words[offset]
+ offset++
+ var l uint = uint(w.len) & 0x1F
+ var n uint = uint(1) << dict.words.size_bits_by_length[l]
+ var id uint = uint(w.idx)
+ end = !(w.len&0x80 == 0)
+ w.len = byte(l)
+ if w.transform == 0 && isMatch(dict.words, w, data[5:], max_length-5) {
+ var tmp int
+ if data[0] == ' ' {
+ tmp = 41
+ } else {
+ tmp = 72
+ }
+ addMatch(id+uint(tmp)*n, l+5, l, matches)
+ has_found_match = true
+ if l+5 < max_length {
+ var s []byte = data[l+5:]
+ if data[0] == ' ' {
+ if l+8 < max_length && s[0] == ' ' && s[1] == 'o' && s[2] == 'f' && s[3] == ' ' {
+ addMatch(id+62*n, l+9, l, matches)
+ if l+12 < max_length && s[4] == 't' && s[5] == 'h' && s[6] == 'e' && s[7] == ' ' {
+ addMatch(id+73*n, l+13, l, matches)
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return has_found_match
+}
diff --git a/vendor/github.com/andybalholm/brotli/static_dict_lut.go b/vendor/github.com/andybalholm/brotli/static_dict_lut.go
new file mode 100644
index 0000000..b33963e
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/static_dict_lut.go
@@ -0,0 +1,75094 @@
+package brotli
+
+/* Copyright 2017 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Lookup table for static dictionary and transforms. */
+
+type dictWord struct {
+ len byte
+ transform byte
+ idx uint16
+}
+
+const kDictNumBits int = 15
+
+const kDictHashMul32 uint32 = 0x1E35A7BD
+
+var kStaticDictionaryBuckets = [32768]uint16{
+ 1,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3,
+ 6,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20,
+ 0,
+ 0,
+ 0,
+ 21,
+ 0,
+ 22,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23,
+ 0,
+ 0,
+ 25,
+ 0,
+ 29,
+ 0,
+ 53,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 55,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 61,
+ 76,
+ 0,
+ 0,
+ 0,
+ 94,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 96,
+ 0,
+ 97,
+ 0,
+ 98,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 99,
+ 101,
+ 106,
+ 108,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 110,
+ 0,
+ 111,
+ 112,
+ 0,
+ 113,
+ 118,
+ 124,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 125,
+ 128,
+ 0,
+ 0,
+ 0,
+ 0,
+ 129,
+ 0,
+ 0,
+ 131,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 132,
+ 0,
+ 0,
+ 135,
+ 0,
+ 0,
+ 0,
+ 137,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 138,
+ 139,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 142,
+ 143,
+ 144,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 145,
+ 0,
+ 0,
+ 0,
+ 146,
+ 149,
+ 151,
+ 152,
+ 0,
+ 0,
+ 153,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 154,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 155,
+ 0,
+ 0,
+ 0,
+ 0,
+ 160,
+ 182,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 183,
+ 0,
+ 0,
+ 0,
+ 188,
+ 189,
+ 0,
+ 0,
+ 192,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 194,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 197,
+ 202,
+ 209,
+ 0,
+ 0,
+ 210,
+ 0,
+ 224,
+ 0,
+ 0,
+ 0,
+ 225,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 231,
+ 0,
+ 0,
+ 0,
+ 232,
+ 0,
+ 240,
+ 0,
+ 0,
+ 242,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 244,
+ 0,
+ 0,
+ 0,
+ 246,
+ 0,
+ 0,
+ 249,
+ 251,
+ 253,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 258,
+ 0,
+ 0,
+ 261,
+ 263,
+ 0,
+ 0,
+ 0,
+ 267,
+ 0,
+ 0,
+ 268,
+ 0,
+ 269,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 271,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 272,
+ 0,
+ 273,
+ 0,
+ 277,
+ 0,
+ 278,
+ 286,
+ 0,
+ 0,
+ 0,
+ 0,
+ 287,
+ 0,
+ 289,
+ 290,
+ 291,
+ 0,
+ 0,
+ 0,
+ 295,
+ 0,
+ 0,
+ 296,
+ 297,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 298,
+ 0,
+ 0,
+ 0,
+ 299,
+ 0,
+ 0,
+ 305,
+ 0,
+ 324,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 327,
+ 0,
+ 328,
+ 329,
+ 0,
+ 0,
+ 0,
+ 0,
+ 336,
+ 0,
+ 0,
+ 340,
+ 0,
+ 341,
+ 342,
+ 343,
+ 0,
+ 0,
+ 346,
+ 0,
+ 348,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 349,
+ 351,
+ 0,
+ 0,
+ 355,
+ 0,
+ 363,
+ 0,
+ 364,
+ 0,
+ 368,
+ 369,
+ 0,
+ 370,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 372,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 373,
+ 0,
+ 375,
+ 0,
+ 0,
+ 0,
+ 0,
+ 376,
+ 377,
+ 0,
+ 0,
+ 394,
+ 395,
+ 396,
+ 0,
+ 0,
+ 398,
+ 0,
+ 0,
+ 0,
+ 0,
+ 400,
+ 0,
+ 0,
+ 408,
+ 0,
+ 0,
+ 0,
+ 0,
+ 420,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 421,
+ 0,
+ 0,
+ 422,
+ 423,
+ 0,
+ 0,
+ 429,
+ 435,
+ 436,
+ 442,
+ 0,
+ 0,
+ 443,
+ 0,
+ 444,
+ 445,
+ 453,
+ 456,
+ 0,
+ 457,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 458,
+ 0,
+ 0,
+ 0,
+ 459,
+ 0,
+ 0,
+ 0,
+ 460,
+ 0,
+ 462,
+ 463,
+ 465,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 466,
+ 469,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 470,
+ 0,
+ 0,
+ 0,
+ 474,
+ 0,
+ 476,
+ 0,
+ 0,
+ 0,
+ 0,
+ 483,
+ 0,
+ 485,
+ 0,
+ 0,
+ 0,
+ 486,
+ 0,
+ 0,
+ 488,
+ 491,
+ 492,
+ 0,
+ 0,
+ 497,
+ 499,
+ 500,
+ 0,
+ 501,
+ 0,
+ 0,
+ 0,
+ 505,
+ 0,
+ 0,
+ 506,
+ 0,
+ 0,
+ 0,
+ 507,
+ 0,
+ 0,
+ 0,
+ 509,
+ 0,
+ 0,
+ 0,
+ 0,
+ 511,
+ 512,
+ 519,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 529,
+ 530,
+ 0,
+ 0,
+ 0,
+ 534,
+ 0,
+ 0,
+ 0,
+ 0,
+ 543,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 553,
+ 0,
+ 0,
+ 0,
+ 0,
+ 557,
+ 560,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 561,
+ 0,
+ 564,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 565,
+ 566,
+ 0,
+ 575,
+ 0,
+ 619,
+ 0,
+ 620,
+ 0,
+ 0,
+ 623,
+ 624,
+ 0,
+ 0,
+ 0,
+ 625,
+ 0,
+ 0,
+ 626,
+ 627,
+ 0,
+ 0,
+ 628,
+ 0,
+ 0,
+ 0,
+ 0,
+ 630,
+ 0,
+ 631,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 641,
+ 0,
+ 0,
+ 0,
+ 0,
+ 643,
+ 656,
+ 668,
+ 0,
+ 0,
+ 0,
+ 673,
+ 0,
+ 0,
+ 0,
+ 674,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 682,
+ 0,
+ 687,
+ 0,
+ 690,
+ 0,
+ 693,
+ 699,
+ 700,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 704,
+ 705,
+ 0,
+ 0,
+ 0,
+ 0,
+ 707,
+ 710,
+ 0,
+ 711,
+ 0,
+ 0,
+ 0,
+ 0,
+ 726,
+ 0,
+ 0,
+ 729,
+ 0,
+ 0,
+ 0,
+ 730,
+ 731,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 752,
+ 0,
+ 0,
+ 0,
+ 762,
+ 0,
+ 763,
+ 0,
+ 0,
+ 767,
+ 0,
+ 0,
+ 0,
+ 770,
+ 774,
+ 0,
+ 0,
+ 775,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 776,
+ 0,
+ 0,
+ 0,
+ 777,
+ 783,
+ 0,
+ 0,
+ 0,
+ 785,
+ 788,
+ 0,
+ 0,
+ 0,
+ 0,
+ 790,
+ 0,
+ 0,
+ 0,
+ 793,
+ 0,
+ 0,
+ 0,
+ 0,
+ 794,
+ 0,
+ 0,
+ 804,
+ 819,
+ 821,
+ 0,
+ 827,
+ 0,
+ 0,
+ 0,
+ 834,
+ 0,
+ 0,
+ 835,
+ 0,
+ 0,
+ 0,
+ 841,
+ 0,
+ 844,
+ 0,
+ 850,
+ 851,
+ 859,
+ 0,
+ 860,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 874,
+ 0,
+ 876,
+ 0,
+ 877,
+ 890,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 893,
+ 894,
+ 898,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 899,
+ 0,
+ 0,
+ 0,
+ 900,
+ 904,
+ 906,
+ 0,
+ 0,
+ 0,
+ 907,
+ 0,
+ 908,
+ 909,
+ 0,
+ 910,
+ 0,
+ 0,
+ 0,
+ 0,
+ 911,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 916,
+ 0,
+ 0,
+ 0,
+ 922,
+ 925,
+ 0,
+ 930,
+ 0,
+ 934,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 943,
+ 0,
+ 0,
+ 944,
+ 0,
+ 953,
+ 954,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 955,
+ 0,
+ 962,
+ 963,
+ 0,
+ 0,
+ 976,
+ 0,
+ 0,
+ 977,
+ 978,
+ 979,
+ 980,
+ 0,
+ 981,
+ 0,
+ 0,
+ 0,
+ 0,
+ 984,
+ 0,
+ 0,
+ 985,
+ 0,
+ 0,
+ 987,
+ 989,
+ 991,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 992,
+ 0,
+ 0,
+ 0,
+ 993,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 996,
+ 0,
+ 0,
+ 0,
+ 1000,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1002,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1005,
+ 1007,
+ 0,
+ 0,
+ 0,
+ 1009,
+ 0,
+ 0,
+ 0,
+ 1010,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1011,
+ 0,
+ 1012,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1014,
+ 1016,
+ 0,
+ 0,
+ 0,
+ 1020,
+ 0,
+ 1021,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1022,
+ 0,
+ 0,
+ 0,
+ 1024,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1025,
+ 0,
+ 0,
+ 1026,
+ 1027,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1031,
+ 0,
+ 1033,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1034,
+ 0,
+ 0,
+ 0,
+ 1037,
+ 1040,
+ 0,
+ 0,
+ 0,
+ 1042,
+ 1043,
+ 0,
+ 0,
+ 1053,
+ 0,
+ 1054,
+ 0,
+ 0,
+ 1057,
+ 0,
+ 0,
+ 0,
+ 1058,
+ 0,
+ 0,
+ 1060,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1061,
+ 0,
+ 0,
+ 1062,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1063,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1064,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1065,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1066,
+ 1067,
+ 0,
+ 0,
+ 0,
+ 1069,
+ 1070,
+ 1072,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1073,
+ 0,
+ 1075,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1080,
+ 1084,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1088,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1094,
+ 0,
+ 1095,
+ 0,
+ 1107,
+ 0,
+ 0,
+ 0,
+ 1112,
+ 1114,
+ 0,
+ 1119,
+ 0,
+ 1122,
+ 0,
+ 0,
+ 1126,
+ 0,
+ 1129,
+ 0,
+ 1130,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1132,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1144,
+ 0,
+ 0,
+ 1145,
+ 1146,
+ 0,
+ 1148,
+ 1149,
+ 0,
+ 0,
+ 1150,
+ 1151,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1152,
+ 0,
+ 1153,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1154,
+ 0,
+ 1163,
+ 0,
+ 0,
+ 0,
+ 1164,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1165,
+ 0,
+ 1167,
+ 0,
+ 1170,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1171,
+ 1172,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1173,
+ 1175,
+ 1177,
+ 0,
+ 1186,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1195,
+ 0,
+ 0,
+ 1221,
+ 0,
+ 0,
+ 1224,
+ 0,
+ 0,
+ 1227,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1228,
+ 1229,
+ 0,
+ 0,
+ 1230,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1231,
+ 0,
+ 0,
+ 0,
+ 1233,
+ 0,
+ 0,
+ 1243,
+ 1244,
+ 1246,
+ 1248,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1254,
+ 1255,
+ 1258,
+ 1259,
+ 0,
+ 0,
+ 0,
+ 1260,
+ 0,
+ 0,
+ 1261,
+ 0,
+ 0,
+ 0,
+ 1262,
+ 1264,
+ 0,
+ 0,
+ 1265,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1266,
+ 0,
+ 1267,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1273,
+ 1274,
+ 1276,
+ 1289,
+ 0,
+ 0,
+ 1291,
+ 1292,
+ 1293,
+ 0,
+ 0,
+ 1294,
+ 1295,
+ 1296,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1302,
+ 0,
+ 1304,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1311,
+ 1312,
+ 0,
+ 1314,
+ 0,
+ 1316,
+ 1320,
+ 1321,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1322,
+ 1323,
+ 1324,
+ 0,
+ 1335,
+ 0,
+ 1336,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1341,
+ 1342,
+ 0,
+ 1346,
+ 0,
+ 1357,
+ 0,
+ 0,
+ 0,
+ 1358,
+ 1360,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1361,
+ 0,
+ 0,
+ 0,
+ 1362,
+ 1365,
+ 0,
+ 1366,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1379,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1386,
+ 0,
+ 1388,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1395,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1403,
+ 0,
+ 1405,
+ 0,
+ 0,
+ 1407,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1408,
+ 1409,
+ 0,
+ 1410,
+ 0,
+ 0,
+ 0,
+ 1412,
+ 1413,
+ 1416,
+ 0,
+ 0,
+ 1429,
+ 1451,
+ 0,
+ 0,
+ 1454,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1455,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1456,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1459,
+ 1460,
+ 1461,
+ 1475,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1477,
+ 0,
+ 1480,
+ 0,
+ 1481,
+ 0,
+ 0,
+ 1486,
+ 0,
+ 0,
+ 1495,
+ 0,
+ 0,
+ 0,
+ 1496,
+ 0,
+ 0,
+ 1498,
+ 1499,
+ 1501,
+ 1520,
+ 1521,
+ 0,
+ 0,
+ 0,
+ 1526,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1528,
+ 1529,
+ 0,
+ 1533,
+ 1536,
+ 0,
+ 0,
+ 0,
+ 1537,
+ 1538,
+ 1549,
+ 0,
+ 1550,
+ 1558,
+ 1559,
+ 1572,
+ 0,
+ 1573,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1575,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1579,
+ 0,
+ 1599,
+ 0,
+ 1603,
+ 0,
+ 1604,
+ 0,
+ 1605,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1608,
+ 1610,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1611,
+ 0,
+ 1615,
+ 0,
+ 1616,
+ 1618,
+ 0,
+ 1619,
+ 0,
+ 0,
+ 1622,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1634,
+ 0,
+ 0,
+ 0,
+ 1635,
+ 0,
+ 0,
+ 0,
+ 1641,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1643,
+ 0,
+ 0,
+ 0,
+ 1650,
+ 0,
+ 0,
+ 1652,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1653,
+ 0,
+ 0,
+ 0,
+ 1654,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1655,
+ 0,
+ 1662,
+ 0,
+ 0,
+ 1663,
+ 1664,
+ 0,
+ 0,
+ 1668,
+ 0,
+ 0,
+ 1669,
+ 1670,
+ 0,
+ 1672,
+ 1673,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1674,
+ 0,
+ 0,
+ 0,
+ 1675,
+ 1676,
+ 1680,
+ 0,
+ 1682,
+ 0,
+ 0,
+ 1687,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1704,
+ 0,
+ 0,
+ 1705,
+ 0,
+ 0,
+ 1721,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1734,
+ 1735,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1737,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1739,
+ 0,
+ 0,
+ 1740,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1741,
+ 1743,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1745,
+ 0,
+ 0,
+ 0,
+ 1749,
+ 0,
+ 0,
+ 0,
+ 1751,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1760,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1765,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1784,
+ 0,
+ 1785,
+ 1787,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1788,
+ 1789,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1790,
+ 1791,
+ 1793,
+ 0,
+ 1798,
+ 1799,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1801,
+ 0,
+ 1803,
+ 1805,
+ 0,
+ 0,
+ 0,
+ 1806,
+ 1811,
+ 0,
+ 1812,
+ 1814,
+ 0,
+ 1821,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1822,
+ 1833,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1848,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1857,
+ 0,
+ 0,
+ 0,
+ 1859,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1861,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1866,
+ 0,
+ 1921,
+ 1925,
+ 0,
+ 0,
+ 0,
+ 1929,
+ 1930,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1931,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1932,
+ 0,
+ 0,
+ 0,
+ 1934,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1946,
+ 0,
+ 0,
+ 1948,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1950,
+ 0,
+ 1957,
+ 0,
+ 1958,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1965,
+ 1967,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1968,
+ 0,
+ 1969,
+ 0,
+ 1971,
+ 1972,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1973,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1975,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1976,
+ 1979,
+ 0,
+ 1982,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1984,
+ 1988,
+ 0,
+ 0,
+ 0,
+ 0,
+ 1990,
+ 2004,
+ 2008,
+ 0,
+ 0,
+ 0,
+ 2012,
+ 2013,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2015,
+ 0,
+ 2016,
+ 2017,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2021,
+ 0,
+ 0,
+ 2025,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2029,
+ 2036,
+ 2040,
+ 0,
+ 2042,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2043,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2045,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2046,
+ 2047,
+ 0,
+ 2048,
+ 2049,
+ 0,
+ 2059,
+ 0,
+ 0,
+ 2063,
+ 0,
+ 2064,
+ 2065,
+ 0,
+ 0,
+ 2066,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2069,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2070,
+ 0,
+ 2071,
+ 0,
+ 2072,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2080,
+ 2082,
+ 2083,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2085,
+ 0,
+ 2086,
+ 2088,
+ 2089,
+ 2105,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2107,
+ 0,
+ 0,
+ 2116,
+ 2117,
+ 0,
+ 2120,
+ 0,
+ 0,
+ 2122,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2123,
+ 0,
+ 0,
+ 2125,
+ 2127,
+ 2128,
+ 0,
+ 0,
+ 0,
+ 2130,
+ 0,
+ 0,
+ 0,
+ 2137,
+ 2139,
+ 2140,
+ 2141,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2144,
+ 2145,
+ 0,
+ 0,
+ 2146,
+ 2149,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2150,
+ 0,
+ 0,
+ 2151,
+ 2158,
+ 0,
+ 2159,
+ 0,
+ 2160,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2161,
+ 2162,
+ 0,
+ 0,
+ 2194,
+ 2202,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2205,
+ 2217,
+ 0,
+ 2220,
+ 0,
+ 2221,
+ 0,
+ 2222,
+ 2224,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2237,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2238,
+ 0,
+ 2239,
+ 2241,
+ 0,
+ 0,
+ 2242,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2243,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2252,
+ 0,
+ 0,
+ 2253,
+ 0,
+ 0,
+ 0,
+ 2257,
+ 2258,
+ 0,
+ 0,
+ 0,
+ 2260,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2262,
+ 0,
+ 2264,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2269,
+ 2270,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2271,
+ 0,
+ 2273,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2277,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2278,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2279,
+ 0,
+ 2280,
+ 0,
+ 2283,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2287,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2289,
+ 2290,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2291,
+ 0,
+ 2292,
+ 0,
+ 0,
+ 0,
+ 2293,
+ 2295,
+ 2296,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2298,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2303,
+ 0,
+ 2305,
+ 0,
+ 0,
+ 2306,
+ 0,
+ 2307,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2313,
+ 2314,
+ 2315,
+ 2316,
+ 0,
+ 0,
+ 2318,
+ 0,
+ 2319,
+ 0,
+ 2322,
+ 0,
+ 0,
+ 2323,
+ 0,
+ 2324,
+ 0,
+ 2326,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2335,
+ 0,
+ 2336,
+ 2338,
+ 2339,
+ 0,
+ 2340,
+ 0,
+ 0,
+ 0,
+ 2355,
+ 0,
+ 2375,
+ 0,
+ 2382,
+ 2386,
+ 0,
+ 2387,
+ 0,
+ 0,
+ 2394,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2395,
+ 0,
+ 2397,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2398,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2399,
+ 2402,
+ 2404,
+ 2408,
+ 2411,
+ 0,
+ 0,
+ 0,
+ 2413,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2415,
+ 0,
+ 0,
+ 2416,
+ 2417,
+ 2419,
+ 0,
+ 2420,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2425,
+ 0,
+ 0,
+ 0,
+ 2426,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2427,
+ 2428,
+ 0,
+ 2429,
+ 0,
+ 0,
+ 2430,
+ 2434,
+ 0,
+ 2436,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2441,
+ 2442,
+ 0,
+ 2445,
+ 0,
+ 0,
+ 2446,
+ 2457,
+ 0,
+ 2459,
+ 0,
+ 0,
+ 2462,
+ 0,
+ 2464,
+ 0,
+ 2477,
+ 0,
+ 2478,
+ 2486,
+ 0,
+ 0,
+ 0,
+ 2491,
+ 0,
+ 0,
+ 2493,
+ 0,
+ 0,
+ 2494,
+ 0,
+ 2495,
+ 0,
+ 2513,
+ 2523,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2524,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2528,
+ 2529,
+ 2530,
+ 0,
+ 0,
+ 2531,
+ 0,
+ 2533,
+ 0,
+ 0,
+ 2534,
+ 2535,
+ 0,
+ 2536,
+ 2537,
+ 0,
+ 2538,
+ 0,
+ 2539,
+ 2540,
+ 0,
+ 0,
+ 0,
+ 2545,
+ 2546,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2548,
+ 0,
+ 0,
+ 2549,
+ 0,
+ 2550,
+ 2555,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2557,
+ 0,
+ 2560,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2561,
+ 0,
+ 2576,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2577,
+ 2578,
+ 0,
+ 0,
+ 0,
+ 2579,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2580,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2581,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2583,
+ 0,
+ 2584,
+ 0,
+ 2588,
+ 2590,
+ 0,
+ 0,
+ 0,
+ 2591,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2593,
+ 2594,
+ 0,
+ 2595,
+ 0,
+ 2601,
+ 2602,
+ 0,
+ 0,
+ 2603,
+ 0,
+ 2605,
+ 0,
+ 0,
+ 0,
+ 2606,
+ 2607,
+ 2611,
+ 0,
+ 2615,
+ 0,
+ 0,
+ 0,
+ 2617,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2619,
+ 0,
+ 0,
+ 2620,
+ 0,
+ 0,
+ 0,
+ 2621,
+ 0,
+ 2623,
+ 0,
+ 2625,
+ 0,
+ 0,
+ 2628,
+ 2629,
+ 0,
+ 0,
+ 2635,
+ 2636,
+ 2637,
+ 0,
+ 0,
+ 2639,
+ 0,
+ 0,
+ 0,
+ 2642,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2643,
+ 0,
+ 2644,
+ 0,
+ 2649,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2655,
+ 2656,
+ 0,
+ 0,
+ 2657,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2658,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2659,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2664,
+ 2685,
+ 0,
+ 2687,
+ 0,
+ 2688,
+ 0,
+ 0,
+ 2689,
+ 0,
+ 0,
+ 2694,
+ 0,
+ 2695,
+ 0,
+ 0,
+ 2698,
+ 0,
+ 2701,
+ 2706,
+ 0,
+ 0,
+ 0,
+ 2707,
+ 0,
+ 2709,
+ 2710,
+ 2711,
+ 0,
+ 0,
+ 0,
+ 2720,
+ 2730,
+ 2735,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2738,
+ 2740,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2747,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2748,
+ 0,
+ 0,
+ 2749,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2750,
+ 0,
+ 0,
+ 2752,
+ 2754,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2758,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2762,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2763,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2764,
+ 2767,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2768,
+ 0,
+ 0,
+ 2770,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2771,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2772,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2773,
+ 2776,
+ 0,
+ 0,
+ 2783,
+ 0,
+ 0,
+ 2784,
+ 0,
+ 2789,
+ 0,
+ 2790,
+ 0,
+ 0,
+ 0,
+ 2792,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2793,
+ 2795,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2796,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2797,
+ 2799,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2803,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2806,
+ 0,
+ 2807,
+ 2808,
+ 2817,
+ 2819,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2821,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2822,
+ 2823,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2824,
+ 0,
+ 0,
+ 2828,
+ 0,
+ 2834,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2836,
+ 0,
+ 2838,
+ 0,
+ 0,
+ 2839,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2841,
+ 0,
+ 0,
+ 0,
+ 2842,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2843,
+ 2844,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2846,
+ 0,
+ 0,
+ 2847,
+ 0,
+ 2849,
+ 0,
+ 2853,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2857,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2858,
+ 0,
+ 2859,
+ 0,
+ 0,
+ 2860,
+ 0,
+ 2862,
+ 2868,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2875,
+ 0,
+ 2876,
+ 0,
+ 0,
+ 2877,
+ 2878,
+ 2884,
+ 2889,
+ 2890,
+ 0,
+ 0,
+ 2891,
+ 0,
+ 0,
+ 2892,
+ 0,
+ 0,
+ 0,
+ 2906,
+ 2912,
+ 0,
+ 2913,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2916,
+ 0,
+ 2934,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2935,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2939,
+ 0,
+ 2940,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2941,
+ 0,
+ 0,
+ 0,
+ 2946,
+ 0,
+ 2949,
+ 0,
+ 0,
+ 2950,
+ 2954,
+ 2955,
+ 0,
+ 0,
+ 0,
+ 2959,
+ 2961,
+ 0,
+ 0,
+ 2962,
+ 0,
+ 2963,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2964,
+ 2965,
+ 2966,
+ 2967,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2969,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2970,
+ 2975,
+ 0,
+ 2982,
+ 2983,
+ 2984,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2989,
+ 0,
+ 0,
+ 2990,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2991,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2998,
+ 0,
+ 3000,
+ 3001,
+ 0,
+ 0,
+ 3002,
+ 0,
+ 0,
+ 0,
+ 3003,
+ 0,
+ 0,
+ 3012,
+ 0,
+ 0,
+ 3022,
+ 0,
+ 0,
+ 3024,
+ 0,
+ 0,
+ 3025,
+ 3027,
+ 0,
+ 0,
+ 0,
+ 3030,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3034,
+ 3035,
+ 0,
+ 0,
+ 3036,
+ 0,
+ 3039,
+ 0,
+ 3049,
+ 0,
+ 0,
+ 3050,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3051,
+ 0,
+ 3053,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3057,
+ 0,
+ 3058,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3063,
+ 0,
+ 0,
+ 3073,
+ 3074,
+ 3078,
+ 3079,
+ 0,
+ 3080,
+ 3086,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3087,
+ 0,
+ 3092,
+ 0,
+ 3095,
+ 0,
+ 3099,
+ 0,
+ 0,
+ 0,
+ 3100,
+ 0,
+ 3101,
+ 3102,
+ 0,
+ 3122,
+ 0,
+ 0,
+ 0,
+ 3124,
+ 0,
+ 3125,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3132,
+ 3134,
+ 0,
+ 0,
+ 3136,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3147,
+ 0,
+ 0,
+ 3149,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3150,
+ 3151,
+ 3152,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3158,
+ 0,
+ 0,
+ 3160,
+ 0,
+ 0,
+ 3161,
+ 0,
+ 0,
+ 3162,
+ 0,
+ 3163,
+ 3166,
+ 3168,
+ 0,
+ 0,
+ 3169,
+ 3170,
+ 0,
+ 0,
+ 3171,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3182,
+ 0,
+ 3184,
+ 0,
+ 0,
+ 3188,
+ 0,
+ 0,
+ 3194,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3204,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3209,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3216,
+ 3217,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3219,
+ 0,
+ 0,
+ 3220,
+ 3222,
+ 0,
+ 3223,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3224,
+ 0,
+ 3225,
+ 3226,
+ 0,
+ 3228,
+ 3233,
+ 0,
+ 3239,
+ 3241,
+ 3242,
+ 0,
+ 0,
+ 3251,
+ 3252,
+ 3253,
+ 3255,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3260,
+ 0,
+ 0,
+ 3261,
+ 0,
+ 0,
+ 0,
+ 3267,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3271,
+ 0,
+ 0,
+ 0,
+ 3278,
+ 0,
+ 3282,
+ 0,
+ 0,
+ 0,
+ 3284,
+ 0,
+ 0,
+ 0,
+ 3285,
+ 3286,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3287,
+ 3292,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3294,
+ 3296,
+ 0,
+ 0,
+ 3299,
+ 3300,
+ 3301,
+ 0,
+ 3302,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3304,
+ 3306,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3308,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3311,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3312,
+ 3314,
+ 3315,
+ 0,
+ 3318,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3319,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3321,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3322,
+ 0,
+ 0,
+ 3324,
+ 3325,
+ 0,
+ 0,
+ 3326,
+ 0,
+ 0,
+ 3328,
+ 3329,
+ 3331,
+ 0,
+ 0,
+ 3335,
+ 0,
+ 0,
+ 3337,
+ 0,
+ 3338,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3343,
+ 3347,
+ 0,
+ 0,
+ 0,
+ 3348,
+ 0,
+ 0,
+ 3351,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3354,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3355,
+ 0,
+ 0,
+ 3365,
+ 3366,
+ 3367,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3368,
+ 3369,
+ 0,
+ 3370,
+ 0,
+ 0,
+ 3373,
+ 0,
+ 0,
+ 3376,
+ 0,
+ 0,
+ 3377,
+ 0,
+ 3379,
+ 3387,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3390,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3402,
+ 0,
+ 3403,
+ 3436,
+ 3437,
+ 3439,
+ 0,
+ 0,
+ 3441,
+ 0,
+ 0,
+ 0,
+ 3442,
+ 0,
+ 0,
+ 3449,
+ 0,
+ 0,
+ 0,
+ 3450,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3451,
+ 0,
+ 0,
+ 3452,
+ 0,
+ 3453,
+ 3456,
+ 0,
+ 3457,
+ 0,
+ 0,
+ 3458,
+ 0,
+ 3459,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3460,
+ 0,
+ 0,
+ 3469,
+ 3470,
+ 0,
+ 0,
+ 3475,
+ 0,
+ 0,
+ 0,
+ 3480,
+ 3487,
+ 3489,
+ 0,
+ 3490,
+ 0,
+ 0,
+ 3491,
+ 3499,
+ 0,
+ 3500,
+ 0,
+ 0,
+ 3501,
+ 0,
+ 0,
+ 0,
+ 3502,
+ 0,
+ 3514,
+ 0,
+ 0,
+ 0,
+ 3516,
+ 3517,
+ 0,
+ 0,
+ 0,
+ 3518,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3520,
+ 3521,
+ 3522,
+ 0,
+ 0,
+ 3526,
+ 3530,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3531,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3536,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3539,
+ 3541,
+ 0,
+ 0,
+ 3542,
+ 3544,
+ 0,
+ 3547,
+ 3548,
+ 0,
+ 0,
+ 3550,
+ 0,
+ 3553,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3554,
+ 0,
+ 3555,
+ 0,
+ 3558,
+ 0,
+ 3559,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3563,
+ 0,
+ 3581,
+ 0,
+ 0,
+ 0,
+ 3599,
+ 0,
+ 0,
+ 0,
+ 3600,
+ 0,
+ 3601,
+ 0,
+ 3602,
+ 3603,
+ 0,
+ 0,
+ 3606,
+ 3608,
+ 0,
+ 3610,
+ 3611,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3612,
+ 3616,
+ 3619,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3624,
+ 3628,
+ 0,
+ 3629,
+ 3634,
+ 3635,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3636,
+ 0,
+ 3637,
+ 0,
+ 0,
+ 3638,
+ 3651,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3652,
+ 3653,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3656,
+ 3657,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3658,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3659,
+ 0,
+ 3661,
+ 3663,
+ 3664,
+ 0,
+ 3665,
+ 0,
+ 3692,
+ 0,
+ 0,
+ 0,
+ 3694,
+ 3696,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3698,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3700,
+ 0,
+ 0,
+ 3701,
+ 0,
+ 0,
+ 0,
+ 3708,
+ 3709,
+ 0,
+ 0,
+ 0,
+ 3711,
+ 3712,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3723,
+ 0,
+ 3724,
+ 3725,
+ 0,
+ 0,
+ 3726,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3728,
+ 3729,
+ 0,
+ 3734,
+ 3735,
+ 3737,
+ 0,
+ 0,
+ 0,
+ 3743,
+ 0,
+ 3745,
+ 0,
+ 0,
+ 3746,
+ 0,
+ 0,
+ 3747,
+ 3748,
+ 0,
+ 3757,
+ 0,
+ 3759,
+ 3766,
+ 3767,
+ 0,
+ 3768,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3769,
+ 0,
+ 0,
+ 3771,
+ 0,
+ 3774,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3775,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3776,
+ 0,
+ 3777,
+ 3786,
+ 0,
+ 3788,
+ 3789,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3791,
+ 0,
+ 3811,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3814,
+ 3815,
+ 3816,
+ 3820,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3821,
+ 0,
+ 0,
+ 3825,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3835,
+ 0,
+ 0,
+ 3848,
+ 3849,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3850,
+ 3851,
+ 3853,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3859,
+ 0,
+ 3860,
+ 3862,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3863,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3873,
+ 0,
+ 3874,
+ 0,
+ 3875,
+ 3886,
+ 0,
+ 3887,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3892,
+ 3913,
+ 0,
+ 3914,
+ 0,
+ 0,
+ 0,
+ 3925,
+ 3931,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3934,
+ 3941,
+ 3942,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3943,
+ 0,
+ 0,
+ 0,
+ 3944,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3945,
+ 0,
+ 3947,
+ 0,
+ 0,
+ 0,
+ 3956,
+ 3957,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3958,
+ 0,
+ 3959,
+ 3965,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3966,
+ 0,
+ 0,
+ 0,
+ 3967,
+ 0,
+ 0,
+ 0,
+ 3968,
+ 3974,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3975,
+ 3977,
+ 3978,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3980,
+ 0,
+ 3985,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 3986,
+ 4011,
+ 0,
+ 0,
+ 4017,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4018,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4019,
+ 0,
+ 4023,
+ 0,
+ 0,
+ 0,
+ 4027,
+ 4028,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4031,
+ 4034,
+ 0,
+ 0,
+ 4035,
+ 4037,
+ 4039,
+ 4040,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4059,
+ 0,
+ 4060,
+ 4061,
+ 0,
+ 4062,
+ 4063,
+ 4066,
+ 0,
+ 0,
+ 4072,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4088,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4091,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4094,
+ 4095,
+ 0,
+ 0,
+ 4096,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4098,
+ 4099,
+ 0,
+ 0,
+ 0,
+ 4101,
+ 0,
+ 4104,
+ 0,
+ 0,
+ 0,
+ 4105,
+ 4108,
+ 0,
+ 4113,
+ 0,
+ 0,
+ 4115,
+ 4116,
+ 0,
+ 4126,
+ 0,
+ 0,
+ 4127,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4128,
+ 4132,
+ 4133,
+ 0,
+ 4134,
+ 0,
+ 0,
+ 0,
+ 4137,
+ 0,
+ 0,
+ 4141,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4144,
+ 4146,
+ 4147,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4148,
+ 0,
+ 0,
+ 4311,
+ 0,
+ 0,
+ 0,
+ 4314,
+ 4329,
+ 0,
+ 4331,
+ 4332,
+ 0,
+ 4333,
+ 0,
+ 4334,
+ 0,
+ 0,
+ 0,
+ 4335,
+ 0,
+ 4336,
+ 0,
+ 0,
+ 0,
+ 4337,
+ 0,
+ 0,
+ 0,
+ 4342,
+ 4345,
+ 4346,
+ 4350,
+ 0,
+ 4351,
+ 4352,
+ 0,
+ 4354,
+ 4355,
+ 0,
+ 0,
+ 4364,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4369,
+ 0,
+ 0,
+ 0,
+ 4373,
+ 0,
+ 4374,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4377,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4378,
+ 0,
+ 0,
+ 0,
+ 4380,
+ 0,
+ 0,
+ 0,
+ 4381,
+ 4382,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4384,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4385,
+ 0,
+ 0,
+ 0,
+ 4386,
+ 0,
+ 0,
+ 0,
+ 4391,
+ 4398,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4407,
+ 4409,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4410,
+ 0,
+ 0,
+ 4411,
+ 0,
+ 4414,
+ 4415,
+ 4418,
+ 0,
+ 4427,
+ 4428,
+ 4430,
+ 0,
+ 4431,
+ 0,
+ 4448,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4449,
+ 0,
+ 0,
+ 0,
+ 4451,
+ 4452,
+ 0,
+ 4453,
+ 4454,
+ 0,
+ 4456,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4459,
+ 0,
+ 4463,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4466,
+ 0,
+ 4467,
+ 0,
+ 4469,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4470,
+ 4471,
+ 0,
+ 4473,
+ 0,
+ 0,
+ 4475,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4477,
+ 4478,
+ 0,
+ 0,
+ 0,
+ 4479,
+ 4481,
+ 0,
+ 4482,
+ 0,
+ 4484,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4486,
+ 0,
+ 0,
+ 4488,
+ 0,
+ 0,
+ 4497,
+ 0,
+ 4508,
+ 0,
+ 0,
+ 4510,
+ 4511,
+ 0,
+ 4520,
+ 4523,
+ 0,
+ 4524,
+ 0,
+ 4525,
+ 0,
+ 4527,
+ 0,
+ 0,
+ 4528,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4530,
+ 0,
+ 4531,
+ 0,
+ 0,
+ 4532,
+ 0,
+ 0,
+ 0,
+ 4533,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4535,
+ 0,
+ 0,
+ 0,
+ 4536,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4541,
+ 4543,
+ 4544,
+ 4545,
+ 4547,
+ 0,
+ 4548,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4550,
+ 4551,
+ 0,
+ 4553,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4562,
+ 0,
+ 0,
+ 4571,
+ 0,
+ 0,
+ 0,
+ 4574,
+ 0,
+ 0,
+ 0,
+ 4575,
+ 0,
+ 4576,
+ 0,
+ 4577,
+ 0,
+ 0,
+ 0,
+ 4581,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4582,
+ 0,
+ 0,
+ 4586,
+ 0,
+ 0,
+ 0,
+ 4588,
+ 0,
+ 0,
+ 4597,
+ 0,
+ 4598,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4616,
+ 4617,
+ 0,
+ 4618,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4619,
+ 0,
+ 4620,
+ 0,
+ 0,
+ 4621,
+ 0,
+ 4624,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4625,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4657,
+ 0,
+ 4659,
+ 0,
+ 4667,
+ 0,
+ 0,
+ 0,
+ 4668,
+ 4670,
+ 0,
+ 4672,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4673,
+ 4676,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4687,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4697,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4699,
+ 0,
+ 4701,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4702,
+ 0,
+ 0,
+ 4706,
+ 0,
+ 0,
+ 4713,
+ 0,
+ 0,
+ 0,
+ 4714,
+ 4715,
+ 4716,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4717,
+ 0,
+ 0,
+ 4720,
+ 0,
+ 4721,
+ 4729,
+ 4735,
+ 0,
+ 0,
+ 0,
+ 4737,
+ 0,
+ 0,
+ 0,
+ 4739,
+ 0,
+ 0,
+ 0,
+ 4740,
+ 0,
+ 0,
+ 0,
+ 4741,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4742,
+ 0,
+ 4745,
+ 4746,
+ 4747,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4748,
+ 0,
+ 0,
+ 0,
+ 4749,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4751,
+ 4786,
+ 0,
+ 4787,
+ 0,
+ 4788,
+ 4796,
+ 0,
+ 0,
+ 4797,
+ 4798,
+ 0,
+ 4799,
+ 4806,
+ 4807,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4809,
+ 4810,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4811,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4812,
+ 0,
+ 4813,
+ 0,
+ 0,
+ 4815,
+ 0,
+ 4821,
+ 4822,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4823,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4824,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4826,
+ 0,
+ 0,
+ 0,
+ 4828,
+ 0,
+ 4829,
+ 0,
+ 0,
+ 0,
+ 4843,
+ 0,
+ 0,
+ 4847,
+ 0,
+ 4853,
+ 4855,
+ 4858,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4859,
+ 0,
+ 4864,
+ 0,
+ 0,
+ 4879,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4880,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4881,
+ 0,
+ 4882,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4883,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4884,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4886,
+ 4887,
+ 4888,
+ 4894,
+ 4896,
+ 0,
+ 4902,
+ 0,
+ 0,
+ 4905,
+ 0,
+ 0,
+ 4915,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4916,
+ 4917,
+ 4919,
+ 4921,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4926,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4927,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 4929,
+ 0,
+ 4930,
+ 4931,
+ 0,
+ 4938,
+ 0,
+ 4952,
+ 0,
+ 4953,
+ 4957,
+ 4960,
+ 4964,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5019,
+ 5020,
+ 5022,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5023,
+ 0,
+ 0,
+ 0,
+ 5024,
+ 0,
+ 0,
+ 0,
+ 5025,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5028,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5029,
+ 5030,
+ 5031,
+ 0,
+ 5033,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5034,
+ 5035,
+ 0,
+ 5036,
+ 0,
+ 0,
+ 5037,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5038,
+ 0,
+ 0,
+ 5039,
+ 0,
+ 0,
+ 0,
+ 5041,
+ 5042,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5044,
+ 5049,
+ 5054,
+ 0,
+ 5055,
+ 0,
+ 5057,
+ 0,
+ 0,
+ 0,
+ 5060,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5063,
+ 0,
+ 5064,
+ 5065,
+ 0,
+ 5067,
+ 0,
+ 0,
+ 0,
+ 5068,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5076,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5077,
+ 0,
+ 0,
+ 5078,
+ 5080,
+ 0,
+ 0,
+ 5083,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5085,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5098,
+ 5099,
+ 5101,
+ 5105,
+ 5107,
+ 0,
+ 5108,
+ 0,
+ 5109,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5110,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5117,
+ 5118,
+ 0,
+ 5121,
+ 0,
+ 5122,
+ 0,
+ 0,
+ 5130,
+ 0,
+ 0,
+ 0,
+ 5137,
+ 0,
+ 0,
+ 0,
+ 5148,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5151,
+ 5154,
+ 0,
+ 0,
+ 0,
+ 5155,
+ 0,
+ 0,
+ 5156,
+ 5159,
+ 5161,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5162,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5163,
+ 5164,
+ 0,
+ 5166,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5167,
+ 0,
+ 0,
+ 0,
+ 5172,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5178,
+ 5179,
+ 0,
+ 0,
+ 5190,
+ 0,
+ 0,
+ 5191,
+ 5192,
+ 5194,
+ 0,
+ 0,
+ 5198,
+ 5201,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5203,
+ 0,
+ 5206,
+ 5209,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5213,
+ 0,
+ 5214,
+ 5216,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5217,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5218,
+ 5219,
+ 0,
+ 5231,
+ 0,
+ 0,
+ 5244,
+ 5249,
+ 0,
+ 5254,
+ 0,
+ 5255,
+ 0,
+ 0,
+ 5257,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5258,
+ 0,
+ 5260,
+ 5270,
+ 0,
+ 5277,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5280,
+ 5281,
+ 5282,
+ 5283,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5284,
+ 0,
+ 5285,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5287,
+ 5288,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5289,
+ 5291,
+ 0,
+ 0,
+ 5294,
+ 0,
+ 0,
+ 5295,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5304,
+ 0,
+ 0,
+ 5306,
+ 5307,
+ 5308,
+ 0,
+ 5309,
+ 0,
+ 0,
+ 5310,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5311,
+ 5312,
+ 0,
+ 5313,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5316,
+ 0,
+ 0,
+ 0,
+ 5317,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5325,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5326,
+ 0,
+ 5327,
+ 5329,
+ 0,
+ 5332,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5338,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5340,
+ 0,
+ 0,
+ 5341,
+ 0,
+ 0,
+ 0,
+ 5342,
+ 0,
+ 5343,
+ 5344,
+ 0,
+ 0,
+ 5345,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5347,
+ 5348,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5349,
+ 0,
+ 5350,
+ 0,
+ 5354,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5358,
+ 0,
+ 0,
+ 5359,
+ 0,
+ 0,
+ 5361,
+ 0,
+ 0,
+ 5365,
+ 0,
+ 5367,
+ 0,
+ 5373,
+ 0,
+ 0,
+ 0,
+ 5379,
+ 0,
+ 0,
+ 0,
+ 5380,
+ 0,
+ 0,
+ 0,
+ 5382,
+ 0,
+ 5384,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5385,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5387,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5388,
+ 5390,
+ 5393,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5396,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5397,
+ 5402,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5403,
+ 0,
+ 0,
+ 0,
+ 5404,
+ 5405,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5406,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5410,
+ 0,
+ 0,
+ 5411,
+ 0,
+ 5415,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5416,
+ 5434,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5438,
+ 0,
+ 5440,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5441,
+ 5442,
+ 0,
+ 0,
+ 0,
+ 5443,
+ 5444,
+ 5447,
+ 0,
+ 0,
+ 5448,
+ 5449,
+ 5451,
+ 0,
+ 0,
+ 0,
+ 5456,
+ 5457,
+ 0,
+ 0,
+ 0,
+ 5459,
+ 0,
+ 0,
+ 0,
+ 5461,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5464,
+ 0,
+ 5466,
+ 0,
+ 0,
+ 5467,
+ 0,
+ 5470,
+ 0,
+ 0,
+ 5473,
+ 0,
+ 0,
+ 5474,
+ 0,
+ 0,
+ 5476,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5477,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5484,
+ 0,
+ 0,
+ 5485,
+ 5486,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5488,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5489,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5507,
+ 0,
+ 0,
+ 0,
+ 5510,
+ 0,
+ 5511,
+ 0,
+ 0,
+ 5512,
+ 0,
+ 0,
+ 0,
+ 5513,
+ 0,
+ 5515,
+ 0,
+ 0,
+ 5516,
+ 5517,
+ 0,
+ 5518,
+ 0,
+ 0,
+ 5522,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5534,
+ 5535,
+ 0,
+ 0,
+ 5536,
+ 0,
+ 5538,
+ 0,
+ 0,
+ 5543,
+ 0,
+ 5544,
+ 0,
+ 0,
+ 5545,
+ 0,
+ 5547,
+ 0,
+ 5557,
+ 0,
+ 0,
+ 5558,
+ 0,
+ 5560,
+ 5567,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5568,
+ 0,
+ 0,
+ 0,
+ 5571,
+ 5573,
+ 0,
+ 5574,
+ 0,
+ 5575,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5577,
+ 0,
+ 0,
+ 5598,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5600,
+ 5609,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5610,
+ 0,
+ 0,
+ 5612,
+ 0,
+ 5624,
+ 0,
+ 5625,
+ 0,
+ 0,
+ 0,
+ 5629,
+ 0,
+ 5641,
+ 0,
+ 5642,
+ 5643,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5651,
+ 0,
+ 0,
+ 0,
+ 5652,
+ 5653,
+ 0,
+ 5661,
+ 5662,
+ 5678,
+ 0,
+ 5679,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5685,
+ 5686,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5690,
+ 5692,
+ 0,
+ 5703,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5706,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5707,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5708,
+ 0,
+ 0,
+ 5709,
+ 0,
+ 5710,
+ 0,
+ 0,
+ 0,
+ 5712,
+ 0,
+ 5733,
+ 0,
+ 5734,
+ 5735,
+ 0,
+ 0,
+ 5744,
+ 5751,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5752,
+ 0,
+ 5754,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5757,
+ 5758,
+ 0,
+ 5760,
+ 5761,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5763,
+ 5764,
+ 5765,
+ 0,
+ 5766,
+ 0,
+ 5767,
+ 5768,
+ 0,
+ 5770,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5776,
+ 5780,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5782,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5784,
+ 0,
+ 0,
+ 5788,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5797,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5799,
+ 0,
+ 0,
+ 5801,
+ 0,
+ 0,
+ 0,
+ 5811,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5816,
+ 0,
+ 0,
+ 5827,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5830,
+ 5831,
+ 0,
+ 0,
+ 5832,
+ 0,
+ 0,
+ 5833,
+ 0,
+ 5835,
+ 5844,
+ 5845,
+ 0,
+ 5846,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5850,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5852,
+ 0,
+ 5855,
+ 5857,
+ 0,
+ 0,
+ 5859,
+ 0,
+ 5861,
+ 0,
+ 0,
+ 5863,
+ 0,
+ 5865,
+ 0,
+ 0,
+ 0,
+ 5873,
+ 5875,
+ 0,
+ 0,
+ 0,
+ 5877,
+ 0,
+ 5879,
+ 0,
+ 0,
+ 0,
+ 5888,
+ 0,
+ 0,
+ 5889,
+ 5891,
+ 0,
+ 5894,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5895,
+ 0,
+ 5897,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5907,
+ 0,
+ 5911,
+ 0,
+ 0,
+ 5912,
+ 0,
+ 5913,
+ 5922,
+ 5924,
+ 0,
+ 5927,
+ 5928,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5929,
+ 5930,
+ 0,
+ 5933,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5949,
+ 0,
+ 0,
+ 5951,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5953,
+ 0,
+ 0,
+ 5954,
+ 0,
+ 5959,
+ 5960,
+ 5961,
+ 0,
+ 5964,
+ 0,
+ 0,
+ 0,
+ 5976,
+ 5978,
+ 5987,
+ 5990,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 5991,
+ 0,
+ 5992,
+ 0,
+ 0,
+ 0,
+ 5994,
+ 5995,
+ 0,
+ 0,
+ 5996,
+ 0,
+ 0,
+ 6001,
+ 6003,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6007,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6008,
+ 0,
+ 0,
+ 6009,
+ 0,
+ 6010,
+ 0,
+ 0,
+ 0,
+ 6011,
+ 6015,
+ 0,
+ 6017,
+ 0,
+ 6019,
+ 0,
+ 6023,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6025,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6026,
+ 0,
+ 6030,
+ 0,
+ 0,
+ 6032,
+ 0,
+ 0,
+ 0,
+ 6033,
+ 6038,
+ 6040,
+ 0,
+ 0,
+ 0,
+ 6041,
+ 6045,
+ 0,
+ 0,
+ 6046,
+ 0,
+ 0,
+ 6053,
+ 0,
+ 0,
+ 6054,
+ 0,
+ 6055,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6057,
+ 0,
+ 6063,
+ 0,
+ 0,
+ 0,
+ 6064,
+ 0,
+ 6066,
+ 6071,
+ 6072,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6075,
+ 6076,
+ 0,
+ 0,
+ 6077,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6078,
+ 6079,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6080,
+ 0,
+ 6083,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6084,
+ 0,
+ 0,
+ 6088,
+ 0,
+ 6089,
+ 0,
+ 0,
+ 6093,
+ 6105,
+ 0,
+ 0,
+ 6107,
+ 0,
+ 6110,
+ 0,
+ 0,
+ 0,
+ 6111,
+ 6125,
+ 6126,
+ 0,
+ 0,
+ 0,
+ 6129,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6130,
+ 0,
+ 0,
+ 0,
+ 6131,
+ 6134,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6142,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6144,
+ 0,
+ 0,
+ 6146,
+ 6151,
+ 6153,
+ 0,
+ 6156,
+ 0,
+ 6163,
+ 0,
+ 6180,
+ 6181,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6182,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6184,
+ 6195,
+ 0,
+ 0,
+ 6206,
+ 0,
+ 6208,
+ 0,
+ 0,
+ 6212,
+ 6213,
+ 6214,
+ 0,
+ 6215,
+ 0,
+ 0,
+ 0,
+ 6228,
+ 0,
+ 0,
+ 0,
+ 6234,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6235,
+ 6240,
+ 0,
+ 6242,
+ 6243,
+ 6244,
+ 0,
+ 6250,
+ 6255,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6257,
+ 0,
+ 0,
+ 0,
+ 6258,
+ 6278,
+ 0,
+ 6284,
+ 0,
+ 0,
+ 0,
+ 6285,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6286,
+ 0,
+ 0,
+ 0,
+ 6320,
+ 0,
+ 0,
+ 6322,
+ 6332,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6334,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6335,
+ 0,
+ 0,
+ 6337,
+ 0,
+ 6338,
+ 0,
+ 6339,
+ 6340,
+ 0,
+ 0,
+ 6356,
+ 6357,
+ 6369,
+ 0,
+ 0,
+ 0,
+ 6370,
+ 6371,
+ 6372,
+ 0,
+ 6373,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6376,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6382,
+ 6383,
+ 6384,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6386,
+ 0,
+ 6389,
+ 6397,
+ 6400,
+ 6411,
+ 0,
+ 6414,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6415,
+ 6416,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6417,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6418,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6420,
+ 0,
+ 6421,
+ 6423,
+ 6425,
+ 0,
+ 6429,
+ 6430,
+ 0,
+ 6433,
+ 6438,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6439,
+ 6440,
+ 0,
+ 0,
+ 6441,
+ 0,
+ 0,
+ 6444,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6446,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6447,
+ 6448,
+ 0,
+ 0,
+ 6450,
+ 0,
+ 0,
+ 0,
+ 6454,
+ 0,
+ 0,
+ 6455,
+ 0,
+ 6461,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6462,
+ 0,
+ 0,
+ 6463,
+ 0,
+ 6464,
+ 0,
+ 6465,
+ 6467,
+ 0,
+ 0,
+ 0,
+ 6468,
+ 0,
+ 6479,
+ 6480,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6481,
+ 0,
+ 0,
+ 6485,
+ 6487,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6493,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6494,
+ 6495,
+ 6496,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6498,
+ 0,
+ 0,
+ 0,
+ 6507,
+ 6508,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6511,
+ 6512,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6513,
+ 0,
+ 0,
+ 0,
+ 6514,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6516,
+ 0,
+ 0,
+ 6517,
+ 6518,
+ 0,
+ 0,
+ 0,
+ 6519,
+ 6520,
+ 6521,
+ 0,
+ 6523,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6524,
+ 6528,
+ 0,
+ 6530,
+ 0,
+ 0,
+ 6532,
+ 0,
+ 6578,
+ 0,
+ 0,
+ 0,
+ 6583,
+ 0,
+ 6584,
+ 0,
+ 0,
+ 0,
+ 6587,
+ 0,
+ 0,
+ 0,
+ 6590,
+ 0,
+ 6591,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6592,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6593,
+ 6594,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6599,
+ 6600,
+ 0,
+ 0,
+ 6601,
+ 6602,
+ 6604,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6608,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6610,
+ 6611,
+ 0,
+ 6615,
+ 0,
+ 6616,
+ 6618,
+ 6620,
+ 0,
+ 6637,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6639,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6641,
+ 0,
+ 6642,
+ 0,
+ 0,
+ 0,
+ 6647,
+ 0,
+ 6660,
+ 6663,
+ 0,
+ 6664,
+ 0,
+ 6666,
+ 6669,
+ 0,
+ 6675,
+ 6676,
+ 6677,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6678,
+ 0,
+ 0,
+ 0,
+ 6679,
+ 0,
+ 6680,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6693,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6704,
+ 6705,
+ 6706,
+ 0,
+ 0,
+ 6711,
+ 6713,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6716,
+ 0,
+ 0,
+ 0,
+ 6717,
+ 0,
+ 6719,
+ 6724,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6725,
+ 6726,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6728,
+ 6729,
+ 6735,
+ 0,
+ 6737,
+ 6742,
+ 0,
+ 0,
+ 6743,
+ 6750,
+ 0,
+ 6751,
+ 0,
+ 0,
+ 6752,
+ 6753,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6754,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6756,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6763,
+ 0,
+ 0,
+ 6764,
+ 6765,
+ 0,
+ 0,
+ 0,
+ 6770,
+ 0,
+ 0,
+ 0,
+ 6776,
+ 6780,
+ 0,
+ 6781,
+ 0,
+ 0,
+ 0,
+ 6783,
+ 0,
+ 6784,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6785,
+ 0,
+ 0,
+ 0,
+ 6792,
+ 0,
+ 0,
+ 0,
+ 6793,
+ 0,
+ 0,
+ 6802,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6803,
+ 0,
+ 0,
+ 0,
+ 6804,
+ 0,
+ 0,
+ 0,
+ 6812,
+ 0,
+ 0,
+ 6823,
+ 0,
+ 6824,
+ 6839,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6852,
+ 0,
+ 0,
+ 6854,
+ 0,
+ 6856,
+ 6857,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6867,
+ 0,
+ 6868,
+ 6870,
+ 6872,
+ 0,
+ 0,
+ 0,
+ 6873,
+ 6874,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6875,
+ 0,
+ 0,
+ 6877,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6878,
+ 0,
+ 0,
+ 0,
+ 6879,
+ 0,
+ 6880,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6887,
+ 0,
+ 6888,
+ 6891,
+ 6893,
+ 0,
+ 6895,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6899,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6901,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6910,
+ 0,
+ 6911,
+ 0,
+ 0,
+ 6912,
+ 0,
+ 0,
+ 6913,
+ 6914,
+ 0,
+ 0,
+ 0,
+ 6915,
+ 0,
+ 0,
+ 0,
+ 6916,
+ 6919,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6924,
+ 0,
+ 6925,
+ 0,
+ 0,
+ 0,
+ 6926,
+ 6927,
+ 6928,
+ 0,
+ 6929,
+ 0,
+ 6930,
+ 0,
+ 0,
+ 6931,
+ 6935,
+ 0,
+ 6936,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6939,
+ 6940,
+ 6941,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6942,
+ 6948,
+ 6949,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6952,
+ 6954,
+ 6963,
+ 6965,
+ 6966,
+ 0,
+ 0,
+ 6967,
+ 6968,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6969,
+ 0,
+ 0,
+ 6970,
+ 6979,
+ 0,
+ 0,
+ 6980,
+ 0,
+ 0,
+ 6983,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6984,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6988,
+ 6990,
+ 6992,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 6995,
+ 0,
+ 0,
+ 0,
+ 7012,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7019,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7021,
+ 0,
+ 0,
+ 7022,
+ 7023,
+ 7028,
+ 0,
+ 7030,
+ 7033,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7038,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7039,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7046,
+ 0,
+ 7047,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7048,
+ 7052,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7054,
+ 0,
+ 7060,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7061,
+ 0,
+ 7065,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7067,
+ 7069,
+ 0,
+ 7070,
+ 7071,
+ 7072,
+ 0,
+ 0,
+ 7078,
+ 0,
+ 7080,
+ 7081,
+ 0,
+ 7083,
+ 0,
+ 0,
+ 0,
+ 7084,
+ 7087,
+ 7088,
+ 0,
+ 0,
+ 7090,
+ 0,
+ 7093,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7107,
+ 0,
+ 0,
+ 7108,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7110,
+ 0,
+ 7114,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7115,
+ 0,
+ 7116,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7117,
+ 0,
+ 0,
+ 7118,
+ 0,
+ 0,
+ 7124,
+ 0,
+ 7125,
+ 0,
+ 0,
+ 7126,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7128,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7129,
+ 0,
+ 7130,
+ 0,
+ 7132,
+ 7133,
+ 0,
+ 0,
+ 7134,
+ 0,
+ 0,
+ 7139,
+ 0,
+ 7148,
+ 7150,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7152,
+ 0,
+ 0,
+ 0,
+ 7153,
+ 7156,
+ 7157,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7158,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7163,
+ 7165,
+ 7169,
+ 0,
+ 7171,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7172,
+ 0,
+ 7173,
+ 7181,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7182,
+ 7185,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7187,
+ 0,
+ 7201,
+ 7204,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7206,
+ 7207,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7211,
+ 7216,
+ 0,
+ 7218,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7226,
+ 7228,
+ 7230,
+ 7232,
+ 7233,
+ 7235,
+ 7237,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7238,
+ 7241,
+ 0,
+ 7242,
+ 0,
+ 0,
+ 7247,
+ 0,
+ 0,
+ 0,
+ 7266,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7289,
+ 0,
+ 0,
+ 7290,
+ 7291,
+ 0,
+ 0,
+ 7292,
+ 0,
+ 7297,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7300,
+ 0,
+ 7301,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7302,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7305,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7307,
+ 0,
+ 7308,
+ 0,
+ 7310,
+ 0,
+ 7335,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7337,
+ 0,
+ 7343,
+ 7347,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7348,
+ 0,
+ 7349,
+ 7350,
+ 7352,
+ 7354,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7357,
+ 0,
+ 7358,
+ 7366,
+ 0,
+ 7367,
+ 7368,
+ 0,
+ 0,
+ 7373,
+ 0,
+ 0,
+ 0,
+ 7374,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7376,
+ 0,
+ 0,
+ 0,
+ 7377,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7378,
+ 0,
+ 7379,
+ 7380,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7383,
+ 0,
+ 0,
+ 7386,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7398,
+ 0,
+ 0,
+ 0,
+ 7399,
+ 7400,
+ 0,
+ 7401,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7402,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7405,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7406,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7421,
+ 7427,
+ 7429,
+ 0,
+ 0,
+ 0,
+ 7435,
+ 0,
+ 0,
+ 7436,
+ 0,
+ 0,
+ 0,
+ 7437,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7438,
+ 7443,
+ 0,
+ 7446,
+ 0,
+ 7448,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7456,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7457,
+ 0,
+ 0,
+ 7461,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7462,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7463,
+ 7466,
+ 7472,
+ 0,
+ 7476,
+ 0,
+ 0,
+ 7490,
+ 0,
+ 7491,
+ 0,
+ 0,
+ 7493,
+ 0,
+ 0,
+ 0,
+ 7498,
+ 7499,
+ 0,
+ 0,
+ 7508,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7512,
+ 0,
+ 0,
+ 0,
+ 7513,
+ 7514,
+ 7516,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7518,
+ 0,
+ 0,
+ 7519,
+ 7521,
+ 7522,
+ 0,
+ 0,
+ 0,
+ 7526,
+ 0,
+ 0,
+ 7529,
+ 0,
+ 0,
+ 7531,
+ 0,
+ 7536,
+ 0,
+ 7538,
+ 0,
+ 7539,
+ 0,
+ 0,
+ 7541,
+ 7542,
+ 7546,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7547,
+ 0,
+ 7548,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7550,
+ 0,
+ 0,
+ 7552,
+ 7553,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7554,
+ 7563,
+ 0,
+ 7573,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7574,
+ 7576,
+ 0,
+ 7578,
+ 7581,
+ 7583,
+ 0,
+ 0,
+ 0,
+ 7584,
+ 0,
+ 7587,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7589,
+ 0,
+ 0,
+ 0,
+ 7594,
+ 0,
+ 0,
+ 7595,
+ 0,
+ 0,
+ 7600,
+ 7602,
+ 7610,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7612,
+ 0,
+ 7613,
+ 7614,
+ 0,
+ 0,
+ 7615,
+ 0,
+ 0,
+ 7616,
+ 0,
+ 7620,
+ 0,
+ 7621,
+ 7622,
+ 0,
+ 7623,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7626,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7627,
+ 7629,
+ 7631,
+ 0,
+ 0,
+ 7633,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7639,
+ 0,
+ 7640,
+ 7642,
+ 0,
+ 0,
+ 7643,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7644,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7645,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7661,
+ 7662,
+ 7663,
+ 7665,
+ 0,
+ 7666,
+ 0,
+ 7667,
+ 0,
+ 7684,
+ 7688,
+ 7690,
+ 0,
+ 7691,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7692,
+ 0,
+ 0,
+ 7700,
+ 0,
+ 7707,
+ 0,
+ 7708,
+ 0,
+ 7709,
+ 0,
+ 7721,
+ 0,
+ 0,
+ 0,
+ 7722,
+ 0,
+ 7724,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7729,
+ 7731,
+ 0,
+ 7732,
+ 0,
+ 7733,
+ 7735,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7739,
+ 0,
+ 0,
+ 7741,
+ 7745,
+ 0,
+ 7748,
+ 0,
+ 0,
+ 0,
+ 7751,
+ 0,
+ 0,
+ 0,
+ 7752,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7753,
+ 0,
+ 0,
+ 7756,
+ 0,
+ 7757,
+ 0,
+ 7759,
+ 0,
+ 7760,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7761,
+ 7768,
+ 0,
+ 0,
+ 7769,
+ 0,
+ 0,
+ 7770,
+ 0,
+ 0,
+ 7771,
+ 0,
+ 0,
+ 7772,
+ 0,
+ 0,
+ 7773,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7778,
+ 7783,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7784,
+ 7785,
+ 0,
+ 7790,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7792,
+ 0,
+ 7798,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7799,
+ 0,
+ 7810,
+ 0,
+ 0,
+ 7813,
+ 0,
+ 7814,
+ 0,
+ 7816,
+ 0,
+ 7818,
+ 7824,
+ 7825,
+ 7826,
+ 0,
+ 7828,
+ 7830,
+ 0,
+ 0,
+ 0,
+ 7840,
+ 0,
+ 7842,
+ 0,
+ 7843,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7844,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7846,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7856,
+ 7857,
+ 7858,
+ 7862,
+ 0,
+ 7865,
+ 0,
+ 0,
+ 7866,
+ 0,
+ 0,
+ 7913,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7914,
+ 0,
+ 0,
+ 7915,
+ 7917,
+ 7918,
+ 7919,
+ 0,
+ 7920,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7921,
+ 7922,
+ 0,
+ 7924,
+ 0,
+ 0,
+ 7925,
+ 0,
+ 0,
+ 7927,
+ 0,
+ 7930,
+ 7935,
+ 0,
+ 0,
+ 7937,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7939,
+ 0,
+ 7940,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7941,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7945,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7949,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7950,
+ 0,
+ 7953,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7968,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7969,
+ 7972,
+ 7992,
+ 0,
+ 7993,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 7994,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8007,
+ 8008,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8010,
+ 0,
+ 0,
+ 0,
+ 8012,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8018,
+ 0,
+ 8028,
+ 8029,
+ 0,
+ 0,
+ 8030,
+ 0,
+ 0,
+ 8032,
+ 8033,
+ 0,
+ 0,
+ 8034,
+ 8036,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8037,
+ 0,
+ 0,
+ 0,
+ 8043,
+ 8052,
+ 8059,
+ 8060,
+ 0,
+ 0,
+ 8061,
+ 0,
+ 0,
+ 0,
+ 8062,
+ 0,
+ 8063,
+ 0,
+ 8064,
+ 0,
+ 8066,
+ 8068,
+ 0,
+ 0,
+ 0,
+ 8080,
+ 8081,
+ 0,
+ 8089,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8092,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8093,
+ 8110,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8111,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8112,
+ 8115,
+ 0,
+ 8117,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8120,
+ 8121,
+ 8122,
+ 8128,
+ 8129,
+ 8130,
+ 8131,
+ 0,
+ 0,
+ 8139,
+ 0,
+ 0,
+ 8144,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8145,
+ 8146,
+ 8153,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8154,
+ 0,
+ 8157,
+ 8160,
+ 8162,
+ 0,
+ 8164,
+ 8165,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8166,
+ 8167,
+ 0,
+ 0,
+ 8179,
+ 0,
+ 0,
+ 0,
+ 8185,
+ 0,
+ 0,
+ 0,
+ 8186,
+ 0,
+ 0,
+ 8187,
+ 0,
+ 0,
+ 0,
+ 8188,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8204,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8210,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8213,
+ 0,
+ 8214,
+ 0,
+ 0,
+ 8215,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8218,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8219,
+ 0,
+ 8221,
+ 0,
+ 0,
+ 8222,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8225,
+ 0,
+ 0,
+ 0,
+ 8233,
+ 0,
+ 0,
+ 8242,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8247,
+ 0,
+ 8248,
+ 8252,
+ 0,
+ 8256,
+ 8257,
+ 0,
+ 0,
+ 8261,
+ 0,
+ 8264,
+ 8265,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8267,
+ 0,
+ 0,
+ 0,
+ 8269,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8270,
+ 0,
+ 0,
+ 0,
+ 8278,
+ 0,
+ 8279,
+ 8283,
+ 0,
+ 0,
+ 8285,
+ 8286,
+ 8289,
+ 8292,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8293,
+ 8295,
+ 8299,
+ 8300,
+ 8301,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8304,
+ 8307,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8321,
+ 0,
+ 0,
+ 0,
+ 8322,
+ 8323,
+ 8325,
+ 8326,
+ 8327,
+ 0,
+ 0,
+ 8332,
+ 8338,
+ 0,
+ 0,
+ 8340,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8350,
+ 0,
+ 0,
+ 8351,
+ 0,
+ 8354,
+ 8355,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8360,
+ 8372,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8377,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8380,
+ 0,
+ 0,
+ 0,
+ 8383,
+ 0,
+ 8384,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8386,
+ 8392,
+ 0,
+ 0,
+ 8394,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8396,
+ 8397,
+ 0,
+ 8398,
+ 0,
+ 8399,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8400,
+ 0,
+ 8401,
+ 8410,
+ 8411,
+ 0,
+ 8412,
+ 8413,
+ 8422,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8423,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8424,
+ 0,
+ 0,
+ 8425,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8441,
+ 8442,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8443,
+ 0,
+ 0,
+ 8444,
+ 0,
+ 8447,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8451,
+ 0,
+ 8458,
+ 0,
+ 8462,
+ 0,
+ 0,
+ 8468,
+ 0,
+ 8469,
+ 0,
+ 0,
+ 0,
+ 8470,
+ 0,
+ 8473,
+ 8479,
+ 8480,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8481,
+ 8483,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8484,
+ 0,
+ 0,
+ 8490,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8491,
+ 8493,
+ 8494,
+ 0,
+ 8528,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8530,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8534,
+ 8538,
+ 8540,
+ 0,
+ 0,
+ 8541,
+ 0,
+ 0,
+ 8545,
+ 0,
+ 8557,
+ 0,
+ 0,
+ 8569,
+ 8570,
+ 0,
+ 0,
+ 8571,
+ 8574,
+ 8575,
+ 8579,
+ 0,
+ 8583,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8591,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8606,
+ 0,
+ 8607,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8608,
+ 0,
+ 0,
+ 8609,
+ 0,
+ 0,
+ 0,
+ 8610,
+ 0,
+ 0,
+ 0,
+ 8611,
+ 0,
+ 0,
+ 8613,
+ 8617,
+ 8621,
+ 0,
+ 0,
+ 8622,
+ 0,
+ 8623,
+ 0,
+ 8624,
+ 8625,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8637,
+ 8638,
+ 8639,
+ 8650,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8652,
+ 8654,
+ 8655,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8656,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8657,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8658,
+ 0,
+ 0,
+ 8659,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8660,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8661,
+ 8663,
+ 8664,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8665,
+ 0,
+ 8669,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8671,
+ 8674,
+ 0,
+ 8684,
+ 0,
+ 8686,
+ 0,
+ 0,
+ 0,
+ 8689,
+ 0,
+ 0,
+ 0,
+ 8690,
+ 0,
+ 8706,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8710,
+ 0,
+ 8711,
+ 8713,
+ 8714,
+ 8724,
+ 8727,
+ 8728,
+ 8733,
+ 8736,
+ 0,
+ 8737,
+ 8739,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8742,
+ 8743,
+ 8745,
+ 8754,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8756,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8757,
+ 8760,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8762,
+ 8763,
+ 8764,
+ 0,
+ 8766,
+ 8769,
+ 8770,
+ 8773,
+ 0,
+ 8774,
+ 0,
+ 8779,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8780,
+ 0,
+ 0,
+ 8781,
+ 0,
+ 0,
+ 8783,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8784,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8785,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8786,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8788,
+ 8790,
+ 0,
+ 0,
+ 0,
+ 8803,
+ 0,
+ 8813,
+ 8814,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8815,
+ 8816,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8818,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8822,
+ 8828,
+ 8829,
+ 0,
+ 8831,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8833,
+ 0,
+ 0,
+ 0,
+ 8834,
+ 0,
+ 0,
+ 0,
+ 8835,
+ 0,
+ 8836,
+ 0,
+ 0,
+ 0,
+ 8837,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8838,
+ 8839,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8840,
+ 0,
+ 0,
+ 0,
+ 8841,
+ 0,
+ 8842,
+ 0,
+ 0,
+ 0,
+ 8846,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8847,
+ 0,
+ 8848,
+ 0,
+ 0,
+ 8864,
+ 0,
+ 0,
+ 8866,
+ 0,
+ 0,
+ 8870,
+ 8872,
+ 0,
+ 0,
+ 8873,
+ 8874,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8875,
+ 0,
+ 8876,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8896,
+ 8900,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8901,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8904,
+ 0,
+ 8907,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8911,
+ 8912,
+ 8913,
+ 0,
+ 0,
+ 0,
+ 8914,
+ 0,
+ 8915,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8916,
+ 0,
+ 0,
+ 0,
+ 8929,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8930,
+ 0,
+ 8932,
+ 0,
+ 8943,
+ 0,
+ 0,
+ 0,
+ 8945,
+ 8947,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8949,
+ 0,
+ 8950,
+ 0,
+ 8954,
+ 8957,
+ 0,
+ 0,
+ 8970,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8971,
+ 0,
+ 8996,
+ 0,
+ 0,
+ 0,
+ 0,
+ 8997,
+ 9000,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9001,
+ 9002,
+ 0,
+ 9004,
+ 9009,
+ 9024,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9027,
+ 9082,
+ 0,
+ 0,
+ 9083,
+ 9089,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9090,
+ 0,
+ 0,
+ 0,
+ 9092,
+ 0,
+ 0,
+ 9093,
+ 0,
+ 9095,
+ 0,
+ 0,
+ 9096,
+ 9097,
+ 9101,
+ 9102,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9112,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9114,
+ 0,
+ 0,
+ 9120,
+ 0,
+ 9121,
+ 9122,
+ 0,
+ 0,
+ 0,
+ 9123,
+ 9124,
+ 0,
+ 0,
+ 9125,
+ 0,
+ 0,
+ 9126,
+ 0,
+ 9127,
+ 0,
+ 0,
+ 9129,
+ 9131,
+ 0,
+ 0,
+ 0,
+ 9132,
+ 0,
+ 0,
+ 9136,
+ 0,
+ 9144,
+ 0,
+ 0,
+ 9148,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9149,
+ 0,
+ 9152,
+ 9163,
+ 0,
+ 0,
+ 9165,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9166,
+ 0,
+ 9169,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9170,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9172,
+ 0,
+ 9174,
+ 9175,
+ 9176,
+ 0,
+ 9177,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9186,
+ 0,
+ 9187,
+ 0,
+ 0,
+ 0,
+ 9188,
+ 9189,
+ 0,
+ 0,
+ 9190,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9191,
+ 0,
+ 0,
+ 0,
+ 9193,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9197,
+ 9198,
+ 0,
+ 0,
+ 0,
+ 9208,
+ 9211,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9216,
+ 9217,
+ 0,
+ 9220,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9221,
+ 9222,
+ 9223,
+ 0,
+ 9224,
+ 9225,
+ 0,
+ 0,
+ 9227,
+ 0,
+ 9228,
+ 9229,
+ 0,
+ 0,
+ 9230,
+ 0,
+ 9232,
+ 0,
+ 9233,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9234,
+ 9235,
+ 0,
+ 0,
+ 9237,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9238,
+ 9240,
+ 0,
+ 0,
+ 9241,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9244,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9247,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9248,
+ 0,
+ 0,
+ 0,
+ 9249,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9250,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9251,
+ 0,
+ 0,
+ 9252,
+ 9255,
+ 0,
+ 0,
+ 0,
+ 9256,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9257,
+ 0,
+ 0,
+ 9258,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9259,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9262,
+ 9263,
+ 0,
+ 0,
+ 9265,
+ 9266,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9268,
+ 9271,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9273,
+ 0,
+ 0,
+ 0,
+ 9276,
+ 9277,
+ 9279,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9280,
+ 0,
+ 0,
+ 9293,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9297,
+ 9301,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9308,
+ 9309,
+ 9313,
+ 9321,
+ 9322,
+ 0,
+ 9326,
+ 9327,
+ 0,
+ 0,
+ 9477,
+ 0,
+ 9479,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9482,
+ 0,
+ 0,
+ 0,
+ 9483,
+ 0,
+ 9484,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9485,
+ 0,
+ 0,
+ 9486,
+ 0,
+ 0,
+ 0,
+ 9489,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9490,
+ 9491,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9493,
+ 0,
+ 9495,
+ 9496,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9500,
+ 0,
+ 9502,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9504,
+ 9507,
+ 0,
+ 9509,
+ 0,
+ 9511,
+ 0,
+ 0,
+ 9513,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9515,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9516,
+ 9517,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9532,
+ 0,
+ 0,
+ 9533,
+ 0,
+ 0,
+ 9538,
+ 0,
+ 9539,
+ 9540,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9541,
+ 0,
+ 0,
+ 0,
+ 9542,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9544,
+ 9545,
+ 0,
+ 9546,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9547,
+ 9548,
+ 0,
+ 0,
+ 0,
+ 9550,
+ 0,
+ 9557,
+ 0,
+ 9558,
+ 0,
+ 9561,
+ 0,
+ 9563,
+ 9570,
+ 0,
+ 9572,
+ 9574,
+ 9575,
+ 0,
+ 0,
+ 0,
+ 9577,
+ 9592,
+ 0,
+ 0,
+ 9596,
+ 0,
+ 0,
+ 0,
+ 9598,
+ 0,
+ 9600,
+ 0,
+ 9601,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9608,
+ 0,
+ 9638,
+ 9639,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9641,
+ 0,
+ 0,
+ 9643,
+ 9644,
+ 9645,
+ 9646,
+ 0,
+ 0,
+ 0,
+ 9648,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9650,
+ 9654,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9655,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9656,
+ 0,
+ 9657,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9658,
+ 0,
+ 0,
+ 9659,
+ 0,
+ 0,
+ 9664,
+ 0,
+ 0,
+ 9665,
+ 0,
+ 9667,
+ 9669,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9671,
+ 0,
+ 9673,
+ 9681,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9682,
+ 9683,
+ 9684,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9686,
+ 9698,
+ 0,
+ 0,
+ 9700,
+ 9701,
+ 9702,
+ 0,
+ 9703,
+ 9717,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9718,
+ 0,
+ 9726,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9727,
+ 0,
+ 0,
+ 0,
+ 9728,
+ 0,
+ 9742,
+ 0,
+ 9744,
+ 0,
+ 0,
+ 0,
+ 9750,
+ 0,
+ 9754,
+ 9755,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9756,
+ 0,
+ 9757,
+ 9768,
+ 0,
+ 9769,
+ 0,
+ 0,
+ 0,
+ 9770,
+ 9771,
+ 0,
+ 9773,
+ 0,
+ 9774,
+ 0,
+ 9775,
+ 0,
+ 0,
+ 0,
+ 9776,
+ 9777,
+ 9784,
+ 0,
+ 0,
+ 0,
+ 9786,
+ 0,
+ 9789,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9793,
+ 9794,
+ 0,
+ 0,
+ 0,
+ 9808,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9811,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9812,
+ 0,
+ 9820,
+ 0,
+ 9823,
+ 0,
+ 9828,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9830,
+ 0,
+ 0,
+ 9833,
+ 9836,
+ 0,
+ 0,
+ 0,
+ 9840,
+ 0,
+ 0,
+ 0,
+ 9841,
+ 0,
+ 0,
+ 9842,
+ 0,
+ 9845,
+ 0,
+ 0,
+ 0,
+ 9847,
+ 9848,
+ 0,
+ 0,
+ 9855,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9856,
+ 9863,
+ 9865,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9866,
+ 9867,
+ 9868,
+ 9873,
+ 9875,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9880,
+ 0,
+ 9886,
+ 0,
+ 0,
+ 0,
+ 9887,
+ 0,
+ 0,
+ 9891,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9906,
+ 9907,
+ 9908,
+ 0,
+ 0,
+ 0,
+ 9909,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9910,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9913,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9914,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9922,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9923,
+ 9925,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9930,
+ 0,
+ 0,
+ 0,
+ 9931,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9932,
+ 0,
+ 9939,
+ 0,
+ 0,
+ 9940,
+ 9962,
+ 9966,
+ 0,
+ 9969,
+ 9970,
+ 0,
+ 0,
+ 9974,
+ 0,
+ 9979,
+ 9981,
+ 9982,
+ 0,
+ 0,
+ 0,
+ 9985,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9987,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 9988,
+ 9993,
+ 0,
+ 0,
+ 9994,
+ 0,
+ 0,
+ 0,
+ 9997,
+ 0,
+ 10004,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10007,
+ 10019,
+ 10020,
+ 10022,
+ 0,
+ 0,
+ 0,
+ 10031,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10032,
+ 0,
+ 0,
+ 10034,
+ 0,
+ 10036,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10038,
+ 0,
+ 10039,
+ 10040,
+ 10041,
+ 10042,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10043,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10045,
+ 10054,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10055,
+ 0,
+ 0,
+ 10057,
+ 10058,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10059,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10060,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10063,
+ 0,
+ 10066,
+ 0,
+ 0,
+ 0,
+ 10070,
+ 0,
+ 10072,
+ 0,
+ 0,
+ 10076,
+ 10077,
+ 0,
+ 0,
+ 10084,
+ 0,
+ 10087,
+ 10090,
+ 10091,
+ 0,
+ 0,
+ 0,
+ 10094,
+ 10097,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10098,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10103,
+ 0,
+ 10104,
+ 0,
+ 10108,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10120,
+ 0,
+ 0,
+ 0,
+ 10122,
+ 0,
+ 0,
+ 10125,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10127,
+ 10128,
+ 0,
+ 0,
+ 10134,
+ 0,
+ 10135,
+ 10136,
+ 0,
+ 10137,
+ 0,
+ 0,
+ 10147,
+ 0,
+ 10149,
+ 10150,
+ 0,
+ 0,
+ 10156,
+ 0,
+ 10158,
+ 10159,
+ 10160,
+ 10168,
+ 0,
+ 0,
+ 10171,
+ 0,
+ 10173,
+ 0,
+ 0,
+ 0,
+ 10176,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10177,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10178,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10194,
+ 0,
+ 10202,
+ 0,
+ 0,
+ 10203,
+ 10204,
+ 0,
+ 10205,
+ 10206,
+ 0,
+ 10207,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10209,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10213,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10217,
+ 0,
+ 10229,
+ 0,
+ 10230,
+ 10231,
+ 0,
+ 0,
+ 10232,
+ 0,
+ 0,
+ 10237,
+ 10238,
+ 10244,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10250,
+ 0,
+ 10252,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10255,
+ 0,
+ 0,
+ 10257,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10258,
+ 0,
+ 10259,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10260,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10284,
+ 10288,
+ 10289,
+ 0,
+ 0,
+ 0,
+ 10290,
+ 0,
+ 10296,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10297,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10298,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10299,
+ 10303,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10306,
+ 0,
+ 0,
+ 0,
+ 10307,
+ 0,
+ 10308,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10311,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10315,
+ 10317,
+ 0,
+ 0,
+ 0,
+ 10318,
+ 10319,
+ 0,
+ 10321,
+ 0,
+ 10326,
+ 0,
+ 10328,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10329,
+ 0,
+ 0,
+ 10331,
+ 0,
+ 10332,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10334,
+ 0,
+ 0,
+ 10335,
+ 10338,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10339,
+ 10349,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10351,
+ 0,
+ 10353,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10362,
+ 0,
+ 10368,
+ 0,
+ 10369,
+ 0,
+ 0,
+ 0,
+ 10372,
+ 10373,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10374,
+ 0,
+ 0,
+ 0,
+ 10375,
+ 0,
+ 10376,
+ 0,
+ 0,
+ 10386,
+ 10388,
+ 10390,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10391,
+ 0,
+ 0,
+ 10392,
+ 10394,
+ 0,
+ 0,
+ 10396,
+ 0,
+ 10397,
+ 0,
+ 10403,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10404,
+ 0,
+ 10405,
+ 10410,
+ 0,
+ 0,
+ 10411,
+ 0,
+ 10412,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10421,
+ 10422,
+ 10423,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10425,
+ 0,
+ 0,
+ 10427,
+ 0,
+ 0,
+ 10430,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10432,
+ 0,
+ 10433,
+ 10434,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10436,
+ 10437,
+ 0,
+ 10438,
+ 0,
+ 10439,
+ 0,
+ 10444,
+ 10446,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10448,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10449,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10451,
+ 0,
+ 10453,
+ 0,
+ 0,
+ 0,
+ 10454,
+ 10457,
+ 0,
+ 0,
+ 10459,
+ 0,
+ 10469,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10472,
+ 10481,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10482,
+ 10483,
+ 0,
+ 10492,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10499,
+ 0,
+ 0,
+ 0,
+ 10502,
+ 0,
+ 0,
+ 10510,
+ 0,
+ 10521,
+ 10524,
+ 0,
+ 0,
+ 10525,
+ 10526,
+ 10528,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10530,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10533,
+ 0,
+ 10534,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10535,
+ 10536,
+ 0,
+ 0,
+ 10544,
+ 0,
+ 10553,
+ 10556,
+ 0,
+ 10557,
+ 10559,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10562,
+ 10563,
+ 10564,
+ 0,
+ 10565,
+ 0,
+ 0,
+ 0,
+ 10566,
+ 0,
+ 10567,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10575,
+ 0,
+ 0,
+ 10576,
+ 0,
+ 10578,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10585,
+ 10586,
+ 10587,
+ 10589,
+ 0,
+ 10590,
+ 0,
+ 0,
+ 10594,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10598,
+ 0,
+ 0,
+ 10601,
+ 0,
+ 0,
+ 0,
+ 10602,
+ 0,
+ 10603,
+ 0,
+ 10604,
+ 0,
+ 10605,
+ 0,
+ 0,
+ 10607,
+ 0,
+ 10626,
+ 0,
+ 10627,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10629,
+ 10630,
+ 10631,
+ 0,
+ 0,
+ 0,
+ 10646,
+ 0,
+ 0,
+ 0,
+ 10647,
+ 0,
+ 10650,
+ 0,
+ 10651,
+ 0,
+ 0,
+ 0,
+ 10652,
+ 10653,
+ 10655,
+ 0,
+ 10658,
+ 0,
+ 0,
+ 10659,
+ 0,
+ 10667,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10669,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10670,
+ 0,
+ 0,
+ 0,
+ 10671,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10672,
+ 10673,
+ 0,
+ 10674,
+ 0,
+ 0,
+ 0,
+ 10676,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10678,
+ 0,
+ 10682,
+ 0,
+ 0,
+ 10692,
+ 0,
+ 10697,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10698,
+ 0,
+ 0,
+ 0,
+ 10700,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10703,
+ 0,
+ 10704,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10705,
+ 0,
+ 10715,
+ 10718,
+ 10720,
+ 0,
+ 0,
+ 10722,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10723,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10726,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10727,
+ 10730,
+ 10743,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10744,
+ 0,
+ 0,
+ 10745,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10748,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10750,
+ 0,
+ 0,
+ 10752,
+ 10753,
+ 0,
+ 0,
+ 0,
+ 10756,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10758,
+ 0,
+ 0,
+ 0,
+ 10759,
+ 0,
+ 10769,
+ 0,
+ 0,
+ 10772,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10773,
+ 0,
+ 0,
+ 0,
+ 10777,
+ 0,
+ 0,
+ 10779,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10780,
+ 10784,
+ 0,
+ 0,
+ 0,
+ 10789,
+ 0,
+ 0,
+ 0,
+ 10791,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10795,
+ 0,
+ 0,
+ 10796,
+ 0,
+ 10808,
+ 0,
+ 10809,
+ 0,
+ 0,
+ 0,
+ 10810,
+ 0,
+ 0,
+ 0,
+ 10812,
+ 0,
+ 0,
+ 10814,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10815,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10816,
+ 10817,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10819,
+ 0,
+ 10820,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10821,
+ 10822,
+ 10823,
+ 0,
+ 10826,
+ 10849,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10850,
+ 0,
+ 0,
+ 10852,
+ 0,
+ 10853,
+ 0,
+ 0,
+ 10856,
+ 0,
+ 0,
+ 10857,
+ 10858,
+ 10859,
+ 10860,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10863,
+ 0,
+ 10866,
+ 10867,
+ 10872,
+ 10890,
+ 0,
+ 0,
+ 10891,
+ 10892,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10893,
+ 0,
+ 0,
+ 0,
+ 10896,
+ 10899,
+ 0,
+ 0,
+ 10900,
+ 10902,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10903,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10905,
+ 0,
+ 10906,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10908,
+ 10911,
+ 0,
+ 10912,
+ 0,
+ 0,
+ 10916,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10917,
+ 0,
+ 10918,
+ 0,
+ 0,
+ 0,
+ 10923,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10924,
+ 0,
+ 0,
+ 10928,
+ 10929,
+ 0,
+ 0,
+ 10930,
+ 0,
+ 0,
+ 0,
+ 10932,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10939,
+ 0,
+ 0,
+ 10945,
+ 0,
+ 0,
+ 0,
+ 10947,
+ 0,
+ 0,
+ 10948,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10958,
+ 0,
+ 10960,
+ 10962,
+ 0,
+ 0,
+ 10964,
+ 0,
+ 0,
+ 0,
+ 10966,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10967,
+ 0,
+ 0,
+ 0,
+ 10968,
+ 0,
+ 0,
+ 0,
+ 10973,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10975,
+ 0,
+ 0,
+ 0,
+ 10976,
+ 10978,
+ 0,
+ 0,
+ 10982,
+ 10984,
+ 10987,
+ 0,
+ 0,
+ 10988,
+ 0,
+ 10989,
+ 0,
+ 0,
+ 10991,
+ 0,
+ 0,
+ 0,
+ 0,
+ 10992,
+ 0,
+ 0,
+ 0,
+ 10993,
+ 0,
+ 10995,
+ 0,
+ 0,
+ 0,
+ 10996,
+ 10997,
+ 0,
+ 0,
+ 0,
+ 10998,
+ 0,
+ 10999,
+ 0,
+ 11001,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11010,
+ 11012,
+ 0,
+ 11013,
+ 11016,
+ 11017,
+ 0,
+ 0,
+ 11019,
+ 11020,
+ 11021,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11022,
+ 0,
+ 0,
+ 11023,
+ 11029,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11031,
+ 0,
+ 0,
+ 0,
+ 11034,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11055,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11056,
+ 11060,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11061,
+ 0,
+ 0,
+ 11064,
+ 11065,
+ 0,
+ 11066,
+ 0,
+ 11069,
+ 0,
+ 11085,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11086,
+ 0,
+ 0,
+ 0,
+ 11088,
+ 0,
+ 0,
+ 0,
+ 11094,
+ 0,
+ 0,
+ 0,
+ 11095,
+ 11096,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11097,
+ 11098,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11099,
+ 0,
+ 0,
+ 11102,
+ 11108,
+ 0,
+ 0,
+ 0,
+ 11109,
+ 0,
+ 11114,
+ 11119,
+ 0,
+ 11131,
+ 0,
+ 0,
+ 0,
+ 11142,
+ 0,
+ 0,
+ 11143,
+ 0,
+ 11146,
+ 0,
+ 11147,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11148,
+ 0,
+ 11149,
+ 11152,
+ 11153,
+ 11154,
+ 0,
+ 11156,
+ 0,
+ 11157,
+ 0,
+ 0,
+ 0,
+ 11158,
+ 0,
+ 0,
+ 11159,
+ 11160,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11163,
+ 0,
+ 0,
+ 11164,
+ 11166,
+ 0,
+ 0,
+ 0,
+ 11172,
+ 11174,
+ 0,
+ 0,
+ 0,
+ 11176,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11182,
+ 11183,
+ 0,
+ 0,
+ 0,
+ 11184,
+ 11187,
+ 0,
+ 0,
+ 11188,
+ 11189,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11194,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11200,
+ 11202,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11203,
+ 0,
+ 11204,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11205,
+ 0,
+ 0,
+ 0,
+ 11206,
+ 0,
+ 11207,
+ 0,
+ 0,
+ 11209,
+ 0,
+ 11211,
+ 0,
+ 11214,
+ 0,
+ 0,
+ 11231,
+ 0,
+ 0,
+ 0,
+ 11293,
+ 11295,
+ 0,
+ 0,
+ 11296,
+ 11297,
+ 11302,
+ 0,
+ 0,
+ 0,
+ 11307,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11309,
+ 11310,
+ 0,
+ 11311,
+ 0,
+ 0,
+ 0,
+ 11313,
+ 0,
+ 11314,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11334,
+ 0,
+ 11338,
+ 0,
+ 0,
+ 0,
+ 11339,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11340,
+ 0,
+ 11341,
+ 11342,
+ 0,
+ 11344,
+ 0,
+ 11345,
+ 0,
+ 0,
+ 0,
+ 11348,
+ 11349,
+ 0,
+ 0,
+ 11350,
+ 0,
+ 0,
+ 0,
+ 11355,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11356,
+ 0,
+ 11357,
+ 11370,
+ 0,
+ 0,
+ 11371,
+ 0,
+ 11374,
+ 11376,
+ 0,
+ 0,
+ 0,
+ 11377,
+ 0,
+ 0,
+ 11378,
+ 11383,
+ 0,
+ 11386,
+ 11399,
+ 0,
+ 11400,
+ 11406,
+ 0,
+ 0,
+ 0,
+ 11408,
+ 0,
+ 0,
+ 11409,
+ 11412,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11417,
+ 0,
+ 0,
+ 0,
+ 11418,
+ 0,
+ 11421,
+ 0,
+ 11426,
+ 11429,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11430,
+ 0,
+ 11437,
+ 0,
+ 11438,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11440,
+ 11453,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11454,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11455,
+ 0,
+ 0,
+ 11456,
+ 11460,
+ 11461,
+ 11463,
+ 0,
+ 11469,
+ 0,
+ 11473,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11474,
+ 0,
+ 0,
+ 0,
+ 11475,
+ 0,
+ 11476,
+ 11477,
+ 11480,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11481,
+ 0,
+ 0,
+ 11484,
+ 0,
+ 0,
+ 11487,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11497,
+ 0,
+ 0,
+ 11502,
+ 0,
+ 11509,
+ 0,
+ 0,
+ 11510,
+ 11511,
+ 11513,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11515,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11516,
+ 0,
+ 11520,
+ 11521,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11529,
+ 11530,
+ 11531,
+ 11534,
+ 0,
+ 0,
+ 11543,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11547,
+ 0,
+ 11548,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11552,
+ 11556,
+ 0,
+ 11557,
+ 0,
+ 0,
+ 11559,
+ 0,
+ 11560,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11561,
+ 0,
+ 0,
+ 11563,
+ 11564,
+ 0,
+ 11565,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11567,
+ 0,
+ 0,
+ 0,
+ 11569,
+ 0,
+ 11574,
+ 0,
+ 11575,
+ 0,
+ 0,
+ 0,
+ 11577,
+ 0,
+ 11578,
+ 0,
+ 0,
+ 0,
+ 11580,
+ 11581,
+ 0,
+ 0,
+ 0,
+ 11582,
+ 11584,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11587,
+ 0,
+ 11588,
+ 11591,
+ 0,
+ 11595,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11596,
+ 0,
+ 11597,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11598,
+ 11601,
+ 0,
+ 0,
+ 0,
+ 11602,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11603,
+ 11604,
+ 0,
+ 11606,
+ 0,
+ 0,
+ 11608,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11610,
+ 0,
+ 0,
+ 11611,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11613,
+ 0,
+ 11622,
+ 0,
+ 0,
+ 0,
+ 11623,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11625,
+ 0,
+ 0,
+ 11626,
+ 11627,
+ 11628,
+ 11630,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11639,
+ 0,
+ 0,
+ 11646,
+ 0,
+ 11648,
+ 11649,
+ 0,
+ 11650,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11651,
+ 0,
+ 0,
+ 11652,
+ 11653,
+ 11656,
+ 0,
+ 0,
+ 11677,
+ 11679,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11680,
+ 0,
+ 0,
+ 11681,
+ 0,
+ 11685,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11688,
+ 0,
+ 0,
+ 0,
+ 11716,
+ 0,
+ 11719,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11721,
+ 0,
+ 0,
+ 11724,
+ 11743,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11745,
+ 11748,
+ 11750,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11751,
+ 0,
+ 0,
+ 0,
+ 11752,
+ 11754,
+ 0,
+ 11755,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11759,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11760,
+ 0,
+ 0,
+ 0,
+ 11761,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11766,
+ 11767,
+ 0,
+ 11772,
+ 11773,
+ 0,
+ 11774,
+ 0,
+ 0,
+ 11775,
+ 0,
+ 11777,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11778,
+ 11780,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11783,
+ 0,
+ 11784,
+ 0,
+ 0,
+ 0,
+ 11785,
+ 0,
+ 0,
+ 0,
+ 11786,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11788,
+ 0,
+ 0,
+ 11789,
+ 11791,
+ 11792,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11795,
+ 11834,
+ 11835,
+ 11836,
+ 0,
+ 0,
+ 11837,
+ 0,
+ 0,
+ 0,
+ 11838,
+ 0,
+ 0,
+ 11846,
+ 11851,
+ 0,
+ 11852,
+ 0,
+ 11869,
+ 0,
+ 0,
+ 0,
+ 11871,
+ 0,
+ 0,
+ 0,
+ 11872,
+ 11874,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11875,
+ 0,
+ 11876,
+ 11877,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11883,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11884,
+ 0,
+ 11885,
+ 0,
+ 11886,
+ 0,
+ 0,
+ 11887,
+ 0,
+ 11894,
+ 11895,
+ 11897,
+ 11909,
+ 11910,
+ 0,
+ 11912,
+ 11918,
+ 0,
+ 0,
+ 11920,
+ 0,
+ 11922,
+ 11924,
+ 11927,
+ 11928,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11929,
+ 0,
+ 11934,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11941,
+ 11943,
+ 11944,
+ 0,
+ 11945,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11948,
+ 11949,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11953,
+ 0,
+ 11954,
+ 0,
+ 11955,
+ 0,
+ 11956,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11957,
+ 0,
+ 0,
+ 11959,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11961,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11978,
+ 0,
+ 0,
+ 0,
+ 11979,
+ 11980,
+ 11986,
+ 11987,
+ 0,
+ 11992,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 11993,
+ 0,
+ 0,
+ 0,
+ 11994,
+ 0,
+ 11999,
+ 12004,
+ 12005,
+ 12006,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12011,
+ 0,
+ 0,
+ 12012,
+ 12014,
+ 0,
+ 0,
+ 12015,
+ 0,
+ 0,
+ 12019,
+ 12028,
+ 0,
+ 0,
+ 12029,
+ 0,
+ 0,
+ 12032,
+ 12033,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12034,
+ 0,
+ 12041,
+ 12043,
+ 0,
+ 0,
+ 12044,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12046,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12054,
+ 12055,
+ 0,
+ 12056,
+ 0,
+ 0,
+ 0,
+ 12060,
+ 12064,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12065,
+ 12067,
+ 12068,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12074,
+ 0,
+ 0,
+ 0,
+ 12075,
+ 12076,
+ 0,
+ 0,
+ 0,
+ 12079,
+ 0,
+ 12081,
+ 12086,
+ 12087,
+ 0,
+ 0,
+ 12088,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12089,
+ 0,
+ 12092,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12097,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12098,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12102,
+ 12103,
+ 12104,
+ 12111,
+ 0,
+ 0,
+ 12114,
+ 12116,
+ 0,
+ 0,
+ 0,
+ 12118,
+ 0,
+ 0,
+ 0,
+ 12119,
+ 12120,
+ 12128,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12130,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12131,
+ 0,
+ 0,
+ 0,
+ 12132,
+ 12134,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12137,
+ 0,
+ 12139,
+ 0,
+ 12141,
+ 0,
+ 0,
+ 12142,
+ 0,
+ 0,
+ 0,
+ 12144,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12145,
+ 0,
+ 12148,
+ 0,
+ 12153,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12154,
+ 12171,
+ 12173,
+ 0,
+ 0,
+ 0,
+ 12175,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12178,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12183,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12184,
+ 0,
+ 0,
+ 0,
+ 12186,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12187,
+ 12188,
+ 0,
+ 0,
+ 12189,
+ 0,
+ 12196,
+ 0,
+ 12197,
+ 0,
+ 0,
+ 12198,
+ 0,
+ 12201,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12203,
+ 0,
+ 12209,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12210,
+ 12211,
+ 12212,
+ 12213,
+ 0,
+ 12217,
+ 12218,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12222,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12223,
+ 0,
+ 0,
+ 12229,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12233,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12234,
+ 0,
+ 0,
+ 12236,
+ 12242,
+ 0,
+ 0,
+ 0,
+ 12243,
+ 0,
+ 0,
+ 0,
+ 12244,
+ 12253,
+ 0,
+ 12254,
+ 12256,
+ 0,
+ 12257,
+ 0,
+ 0,
+ 12275,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12277,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12278,
+ 0,
+ 12289,
+ 0,
+ 0,
+ 12290,
+ 0,
+ 12292,
+ 12293,
+ 0,
+ 0,
+ 12294,
+ 0,
+ 12295,
+ 0,
+ 0,
+ 12296,
+ 0,
+ 12297,
+ 0,
+ 12298,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12301,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12309,
+ 0,
+ 12338,
+ 12340,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12341,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12342,
+ 12343,
+ 0,
+ 12344,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12345,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12346,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12348,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12350,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12351,
+ 0,
+ 12355,
+ 12356,
+ 12357,
+ 0,
+ 0,
+ 12367,
+ 12370,
+ 12371,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12372,
+ 12376,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12379,
+ 0,
+ 12382,
+ 0,
+ 12383,
+ 0,
+ 0,
+ 12384,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12393,
+ 0,
+ 0,
+ 12394,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12398,
+ 12403,
+ 0,
+ 0,
+ 12404,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12410,
+ 0,
+ 0,
+ 0,
+ 12411,
+ 0,
+ 0,
+ 0,
+ 12412,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12420,
+ 0,
+ 12421,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12423,
+ 0,
+ 12425,
+ 12429,
+ 0,
+ 0,
+ 0,
+ 12431,
+ 12432,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12434,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12435,
+ 12436,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12437,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12438,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12445,
+ 0,
+ 0,
+ 0,
+ 12450,
+ 12451,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12452,
+ 12475,
+ 0,
+ 0,
+ 12493,
+ 12494,
+ 0,
+ 0,
+ 0,
+ 12495,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12496,
+ 12502,
+ 12509,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12510,
+ 0,
+ 12512,
+ 12513,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12514,
+ 0,
+ 0,
+ 0,
+ 12515,
+ 0,
+ 12520,
+ 0,
+ 0,
+ 0,
+ 12524,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12527,
+ 0,
+ 0,
+ 0,
+ 12528,
+ 0,
+ 0,
+ 0,
+ 12529,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12530,
+ 0,
+ 12535,
+ 0,
+ 0,
+ 12536,
+ 0,
+ 12538,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12540,
+ 0,
+ 12548,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12550,
+ 0,
+ 0,
+ 0,
+ 12551,
+ 12552,
+ 0,
+ 0,
+ 0,
+ 12554,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12555,
+ 0,
+ 0,
+ 12562,
+ 0,
+ 12565,
+ 0,
+ 12566,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12569,
+ 0,
+ 0,
+ 0,
+ 12571,
+ 12574,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12577,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12578,
+ 12579,
+ 12603,
+ 0,
+ 12608,
+ 0,
+ 0,
+ 12611,
+ 0,
+ 12612,
+ 0,
+ 12615,
+ 0,
+ 12625,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12627,
+ 12646,
+ 0,
+ 12648,
+ 0,
+ 0,
+ 12657,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12670,
+ 0,
+ 0,
+ 12671,
+ 0,
+ 12673,
+ 12677,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12679,
+ 0,
+ 12681,
+ 0,
+ 12682,
+ 12693,
+ 0,
+ 12694,
+ 0,
+ 12697,
+ 0,
+ 12701,
+ 0,
+ 0,
+ 0,
+ 12703,
+ 12704,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12707,
+ 12737,
+ 0,
+ 0,
+ 12739,
+ 0,
+ 0,
+ 12740,
+ 0,
+ 0,
+ 12742,
+ 12743,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12745,
+ 0,
+ 12746,
+ 12747,
+ 0,
+ 12748,
+ 0,
+ 0,
+ 12759,
+ 12767,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12773,
+ 0,
+ 12774,
+ 12778,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12779,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12780,
+ 12793,
+ 0,
+ 12824,
+ 0,
+ 12825,
+ 0,
+ 12836,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12839,
+ 0,
+ 12842,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12843,
+ 12845,
+ 0,
+ 12846,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12847,
+ 0,
+ 0,
+ 12850,
+ 12852,
+ 12853,
+ 0,
+ 0,
+ 0,
+ 12854,
+ 0,
+ 0,
+ 0,
+ 12855,
+ 0,
+ 12856,
+ 0,
+ 12858,
+ 0,
+ 0,
+ 12859,
+ 0,
+ 12862,
+ 0,
+ 12863,
+ 0,
+ 0,
+ 12866,
+ 0,
+ 12869,
+ 12872,
+ 12873,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12875,
+ 0,
+ 12877,
+ 0,
+ 0,
+ 12878,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12884,
+ 12885,
+ 12888,
+ 0,
+ 12889,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12893,
+ 0,
+ 0,
+ 0,
+ 12895,
+ 12896,
+ 12898,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12902,
+ 0,
+ 12909,
+ 12910,
+ 0,
+ 12926,
+ 0,
+ 12928,
+ 0,
+ 0,
+ 0,
+ 12929,
+ 0,
+ 12930,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12931,
+ 0,
+ 12932,
+ 12933,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12934,
+ 0,
+ 12942,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12944,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12946,
+ 0,
+ 0,
+ 12948,
+ 0,
+ 0,
+ 12949,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12950,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12951,
+ 0,
+ 12952,
+ 0,
+ 12953,
+ 0,
+ 0,
+ 0,
+ 12954,
+ 12958,
+ 12959,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12960,
+ 12964,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12966,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12970,
+ 0,
+ 12971,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 12972,
+ 0,
+ 0,
+ 12982,
+ 0,
+ 0,
+ 0,
+ 12984,
+ 12985,
+ 0,
+ 12986,
+ 12996,
+ 12997,
+ 13001,
+ 13002,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13004,
+ 0,
+ 0,
+ 13005,
+ 0,
+ 0,
+ 13007,
+ 13009,
+ 0,
+ 13017,
+ 0,
+ 0,
+ 0,
+ 13020,
+ 0,
+ 13021,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13022,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13024,
+ 13027,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13028,
+ 0,
+ 0,
+ 13029,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13032,
+ 0,
+ 13037,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13040,
+ 0,
+ 0,
+ 13041,
+ 0,
+ 0,
+ 0,
+ 13043,
+ 13044,
+ 13046,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13047,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13049,
+ 13054,
+ 0,
+ 13056,
+ 0,
+ 0,
+ 13060,
+ 13061,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13067,
+ 0,
+ 0,
+ 13068,
+ 0,
+ 13071,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13077,
+ 13078,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13079,
+ 13080,
+ 13081,
+ 0,
+ 13082,
+ 0,
+ 0,
+ 0,
+ 13085,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13086,
+ 0,
+ 13087,
+ 13088,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13094,
+ 0,
+ 13099,
+ 0,
+ 13100,
+ 0,
+ 0,
+ 0,
+ 13101,
+ 0,
+ 13125,
+ 13126,
+ 13128,
+ 13129,
+ 0,
+ 0,
+ 13130,
+ 0,
+ 13131,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13134,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13150,
+ 0,
+ 13168,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13169,
+ 0,
+ 0,
+ 13170,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13174,
+ 0,
+ 0,
+ 0,
+ 13176,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13177,
+ 0,
+ 13178,
+ 13183,
+ 13187,
+ 0,
+ 0,
+ 0,
+ 13189,
+ 0,
+ 0,
+ 13190,
+ 0,
+ 0,
+ 13191,
+ 0,
+ 0,
+ 13206,
+ 0,
+ 0,
+ 0,
+ 13207,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13212,
+ 0,
+ 0,
+ 13219,
+ 13232,
+ 0,
+ 0,
+ 0,
+ 13241,
+ 0,
+ 13249,
+ 13253,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13255,
+ 13259,
+ 0,
+ 13260,
+ 13261,
+ 0,
+ 13262,
+ 0,
+ 13272,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13276,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13277,
+ 13299,
+ 0,
+ 0,
+ 13301,
+ 13302,
+ 0,
+ 0,
+ 13303,
+ 0,
+ 0,
+ 13305,
+ 0,
+ 13310,
+ 0,
+ 0,
+ 0,
+ 13311,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13325,
+ 0,
+ 13328,
+ 0,
+ 0,
+ 0,
+ 13329,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13330,
+ 0,
+ 0,
+ 13331,
+ 0,
+ 13335,
+ 0,
+ 0,
+ 13342,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13343,
+ 0,
+ 13354,
+ 0,
+ 13362,
+ 0,
+ 13366,
+ 13367,
+ 13369,
+ 0,
+ 0,
+ 13371,
+ 13372,
+ 0,
+ 13373,
+ 13374,
+ 0,
+ 13376,
+ 0,
+ 13380,
+ 13381,
+ 13386,
+ 0,
+ 13387,
+ 13388,
+ 0,
+ 13389,
+ 13391,
+ 13395,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13401,
+ 13409,
+ 0,
+ 13410,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13420,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13422,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13423,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13425,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13427,
+ 0,
+ 0,
+ 0,
+ 13428,
+ 0,
+ 0,
+ 13430,
+ 13438,
+ 0,
+ 13439,
+ 0,
+ 13445,
+ 0,
+ 13448,
+ 13449,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13451,
+ 0,
+ 13457,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13458,
+ 13459,
+ 0,
+ 13460,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13464,
+ 13465,
+ 13466,
+ 13470,
+ 0,
+ 13471,
+ 13472,
+ 13474,
+ 13475,
+ 0,
+ 13476,
+ 0,
+ 0,
+ 13478,
+ 13479,
+ 0,
+ 13481,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13487,
+ 0,
+ 13490,
+ 0,
+ 13493,
+ 0,
+ 0,
+ 13494,
+ 0,
+ 0,
+ 13495,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13496,
+ 13497,
+ 0,
+ 13500,
+ 0,
+ 0,
+ 13516,
+ 13522,
+ 0,
+ 0,
+ 13525,
+ 13528,
+ 0,
+ 0,
+ 0,
+ 13530,
+ 13535,
+ 0,
+ 13537,
+ 13539,
+ 0,
+ 13540,
+ 0,
+ 13543,
+ 0,
+ 13544,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13545,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13547,
+ 0,
+ 0,
+ 0,
+ 13549,
+ 13555,
+ 0,
+ 0,
+ 0,
+ 13556,
+ 13557,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13558,
+ 0,
+ 13563,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13564,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13566,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13569,
+ 0,
+ 0,
+ 13571,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13573,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13578,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13581,
+ 0,
+ 13586,
+ 0,
+ 13595,
+ 0,
+ 13600,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13601,
+ 13603,
+ 0,
+ 13604,
+ 13605,
+ 13606,
+ 13607,
+ 0,
+ 0,
+ 13617,
+ 13618,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13623,
+ 0,
+ 13625,
+ 13627,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13629,
+ 0,
+ 0,
+ 0,
+ 13634,
+ 0,
+ 0,
+ 0,
+ 13638,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13654,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13656,
+ 0,
+ 13659,
+ 0,
+ 0,
+ 13660,
+ 0,
+ 0,
+ 13662,
+ 0,
+ 0,
+ 0,
+ 13663,
+ 0,
+ 13664,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13668,
+ 0,
+ 13669,
+ 13671,
+ 0,
+ 0,
+ 13672,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13675,
+ 13685,
+ 0,
+ 13686,
+ 0,
+ 0,
+ 0,
+ 13687,
+ 0,
+ 0,
+ 0,
+ 13692,
+ 13694,
+ 13697,
+ 0,
+ 0,
+ 0,
+ 13702,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13705,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13707,
+ 0,
+ 0,
+ 0,
+ 13714,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13715,
+ 0,
+ 13716,
+ 13717,
+ 0,
+ 0,
+ 13719,
+ 13724,
+ 13730,
+ 13731,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13732,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13734,
+ 0,
+ 13736,
+ 0,
+ 0,
+ 13737,
+ 13738,
+ 13747,
+ 0,
+ 13751,
+ 0,
+ 0,
+ 13752,
+ 0,
+ 0,
+ 0,
+ 13753,
+ 0,
+ 13757,
+ 0,
+ 0,
+ 13762,
+ 13763,
+ 0,
+ 13764,
+ 13765,
+ 0,
+ 13766,
+ 0,
+ 0,
+ 13767,
+ 0,
+ 0,
+ 0,
+ 13768,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13769,
+ 0,
+ 0,
+ 13772,
+ 0,
+ 13775,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13776,
+ 13778,
+ 13787,
+ 0,
+ 0,
+ 0,
+ 13797,
+ 0,
+ 13798,
+ 0,
+ 13801,
+ 0,
+ 13804,
+ 13806,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13816,
+ 13817,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13834,
+ 0,
+ 13836,
+ 0,
+ 0,
+ 13838,
+ 0,
+ 0,
+ 13839,
+ 0,
+ 13840,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13842,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13843,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13845,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13858,
+ 0,
+ 0,
+ 13860,
+ 0,
+ 0,
+ 13861,
+ 0,
+ 0,
+ 13862,
+ 13863,
+ 0,
+ 13868,
+ 0,
+ 13869,
+ 13870,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13872,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13873,
+ 13878,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13886,
+ 0,
+ 13888,
+ 13889,
+ 13890,
+ 0,
+ 0,
+ 13891,
+ 13894,
+ 0,
+ 13897,
+ 13899,
+ 13900,
+ 13904,
+ 0,
+ 0,
+ 13906,
+ 0,
+ 0,
+ 0,
+ 13909,
+ 0,
+ 0,
+ 0,
+ 13910,
+ 0,
+ 0,
+ 0,
+ 13911,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13912,
+ 13917,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13918,
+ 0,
+ 13919,
+ 0,
+ 0,
+ 13920,
+ 0,
+ 0,
+ 0,
+ 13921,
+ 0,
+ 0,
+ 13922,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13924,
+ 0,
+ 13927,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13932,
+ 0,
+ 13933,
+ 0,
+ 13934,
+ 0,
+ 0,
+ 13935,
+ 0,
+ 13944,
+ 0,
+ 0,
+ 0,
+ 13954,
+ 0,
+ 0,
+ 13955,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13956,
+ 0,
+ 13957,
+ 0,
+ 13967,
+ 13969,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13970,
+ 13990,
+ 0,
+ 13991,
+ 13994,
+ 0,
+ 13995,
+ 0,
+ 0,
+ 0,
+ 0,
+ 13996,
+ 0,
+ 0,
+ 13999,
+ 0,
+ 0,
+ 0,
+ 14018,
+ 0,
+ 14019,
+ 0,
+ 14021,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14041,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14043,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14046,
+ 0,
+ 0,
+ 0,
+ 14048,
+ 14049,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14051,
+ 0,
+ 0,
+ 14052,
+ 14056,
+ 0,
+ 14063,
+ 0,
+ 14064,
+ 14066,
+ 0,
+ 0,
+ 14067,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14068,
+ 0,
+ 0,
+ 0,
+ 14072,
+ 0,
+ 14074,
+ 14075,
+ 0,
+ 14076,
+ 14079,
+ 14085,
+ 14086,
+ 14087,
+ 14093,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14095,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14096,
+ 14097,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14098,
+ 0,
+ 14102,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14103,
+ 0,
+ 0,
+ 0,
+ 14104,
+ 0,
+ 0,
+ 14105,
+ 0,
+ 0,
+ 0,
+ 14107,
+ 14108,
+ 0,
+ 0,
+ 14109,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14117,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14118,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14119,
+ 0,
+ 0,
+ 14120,
+ 0,
+ 0,
+ 14121,
+ 0,
+ 14122,
+ 14127,
+ 0,
+ 14128,
+ 14136,
+ 0,
+ 0,
+ 14138,
+ 0,
+ 14140,
+ 0,
+ 0,
+ 0,
+ 14141,
+ 14142,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14146,
+ 0,
+ 0,
+ 14149,
+ 0,
+ 14151,
+ 0,
+ 0,
+ 0,
+ 14152,
+ 0,
+ 0,
+ 14153,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14154,
+ 0,
+ 14156,
+ 14157,
+ 0,
+ 0,
+ 14159,
+ 0,
+ 14161,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14162,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14163,
+ 0,
+ 0,
+ 14173,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14174,
+ 0,
+ 0,
+ 14176,
+ 0,
+ 0,
+ 14178,
+ 0,
+ 0,
+ 14179,
+ 14181,
+ 0,
+ 0,
+ 14182,
+ 14185,
+ 14187,
+ 0,
+ 14190,
+ 0,
+ 0,
+ 14197,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14198,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14199,
+ 14200,
+ 0,
+ 0,
+ 0,
+ 14204,
+ 0,
+ 0,
+ 14208,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14231,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14234,
+ 0,
+ 0,
+ 14235,
+ 0,
+ 0,
+ 0,
+ 14240,
+ 14241,
+ 0,
+ 0,
+ 0,
+ 14246,
+ 0,
+ 0,
+ 0,
+ 14247,
+ 0,
+ 14250,
+ 0,
+ 0,
+ 14251,
+ 0,
+ 0,
+ 14254,
+ 0,
+ 0,
+ 14256,
+ 0,
+ 0,
+ 0,
+ 14260,
+ 0,
+ 14261,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14262,
+ 14267,
+ 14269,
+ 0,
+ 0,
+ 14277,
+ 0,
+ 0,
+ 14278,
+ 0,
+ 14279,
+ 14282,
+ 0,
+ 0,
+ 0,
+ 14283,
+ 0,
+ 0,
+ 0,
+ 14284,
+ 14285,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14286,
+ 0,
+ 0,
+ 0,
+ 14288,
+ 0,
+ 0,
+ 0,
+ 14289,
+ 0,
+ 14290,
+ 0,
+ 14293,
+ 14301,
+ 14302,
+ 14304,
+ 14305,
+ 0,
+ 14307,
+ 0,
+ 14308,
+ 14309,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14311,
+ 14312,
+ 0,
+ 0,
+ 14317,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14318,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14320,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14321,
+ 14322,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14326,
+ 14329,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14330,
+ 14331,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14332,
+ 0,
+ 0,
+ 0,
+ 14333,
+ 0,
+ 0,
+ 14337,
+ 14340,
+ 0,
+ 14341,
+ 0,
+ 0,
+ 14342,
+ 0,
+ 14345,
+ 14346,
+ 0,
+ 0,
+ 14347,
+ 0,
+ 14362,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14364,
+ 14365,
+ 14371,
+ 0,
+ 14373,
+ 0,
+ 0,
+ 14374,
+ 0,
+ 14379,
+ 0,
+ 14400,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14401,
+ 0,
+ 0,
+ 14405,
+ 0,
+ 14406,
+ 0,
+ 14408,
+ 14409,
+ 0,
+ 0,
+ 0,
+ 14417,
+ 0,
+ 0,
+ 14424,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14430,
+ 0,
+ 0,
+ 0,
+ 14431,
+ 0,
+ 0,
+ 14435,
+ 0,
+ 14440,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14442,
+ 0,
+ 0,
+ 14443,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14446,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14454,
+ 0,
+ 14457,
+ 0,
+ 14460,
+ 0,
+ 0,
+ 14466,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14467,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14469,
+ 0,
+ 14477,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14478,
+ 14482,
+ 0,
+ 0,
+ 0,
+ 14483,
+ 0,
+ 0,
+ 0,
+ 14485,
+ 14486,
+ 0,
+ 0,
+ 0,
+ 14487,
+ 14488,
+ 14489,
+ 14492,
+ 14493,
+ 14494,
+ 14495,
+ 14496,
+ 14497,
+ 0,
+ 14499,
+ 0,
+ 14501,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14502,
+ 0,
+ 14507,
+ 14512,
+ 14513,
+ 14514,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14515,
+ 14526,
+ 14530,
+ 0,
+ 14537,
+ 0,
+ 14544,
+ 0,
+ 14547,
+ 0,
+ 0,
+ 14548,
+ 14550,
+ 14551,
+ 0,
+ 0,
+ 14552,
+ 0,
+ 0,
+ 0,
+ 14553,
+ 0,
+ 14554,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14556,
+ 14564,
+ 0,
+ 0,
+ 14565,
+ 14566,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14568,
+ 0,
+ 0,
+ 14569,
+ 0,
+ 0,
+ 0,
+ 14571,
+ 14576,
+ 0,
+ 0,
+ 14577,
+ 14578,
+ 14579,
+ 0,
+ 0,
+ 14580,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14582,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14583,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14587,
+ 0,
+ 14588,
+ 0,
+ 0,
+ 14600,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14601,
+ 0,
+ 0,
+ 14604,
+ 14605,
+ 14611,
+ 0,
+ 14613,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14615,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14627,
+ 0,
+ 14628,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14631,
+ 0,
+ 14633,
+ 14634,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14635,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14636,
+ 0,
+ 0,
+ 14639,
+ 14642,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14644,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14645,
+ 14646,
+ 0,
+ 14653,
+ 0,
+ 0,
+ 14654,
+ 0,
+ 14658,
+ 0,
+ 14661,
+ 0,
+ 0,
+ 0,
+ 14665,
+ 0,
+ 0,
+ 0,
+ 14668,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14669,
+ 0,
+ 0,
+ 14670,
+ 0,
+ 0,
+ 0,
+ 14680,
+ 0,
+ 0,
+ 14681,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14682,
+ 14683,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14686,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14687,
+ 14697,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14699,
+ 14705,
+ 14711,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14712,
+ 0,
+ 0,
+ 0,
+ 14713,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14719,
+ 0,
+ 14720,
+ 14721,
+ 14726,
+ 0,
+ 0,
+ 0,
+ 14728,
+ 14729,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14731,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14733,
+ 14736,
+ 14737,
+ 0,
+ 0,
+ 14740,
+ 14742,
+ 0,
+ 0,
+ 0,
+ 14744,
+ 14753,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14755,
+ 14758,
+ 14760,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14761,
+ 14762,
+ 14765,
+ 14771,
+ 0,
+ 14772,
+ 0,
+ 14773,
+ 14774,
+ 0,
+ 0,
+ 14775,
+ 0,
+ 0,
+ 14776,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14777,
+ 0,
+ 14779,
+ 0,
+ 0,
+ 14782,
+ 0,
+ 0,
+ 14785,
+ 14786,
+ 14788,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14795,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14798,
+ 0,
+ 14803,
+ 14804,
+ 14806,
+ 0,
+ 0,
+ 0,
+ 14809,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14810,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14811,
+ 0,
+ 14812,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14815,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14816,
+ 0,
+ 14818,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14819,
+ 0,
+ 14820,
+ 0,
+ 14823,
+ 0,
+ 0,
+ 0,
+ 14824,
+ 0,
+ 0,
+ 14826,
+ 14827,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14830,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14833,
+ 0,
+ 14845,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14846,
+ 0,
+ 0,
+ 14847,
+ 14871,
+ 0,
+ 14873,
+ 0,
+ 14876,
+ 0,
+ 14877,
+ 14878,
+ 14880,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14881,
+ 0,
+ 14882,
+ 14894,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14895,
+ 0,
+ 14907,
+ 0,
+ 14908,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14911,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14920,
+ 0,
+ 0,
+ 14931,
+ 0,
+ 14932,
+ 14934,
+ 14935,
+ 0,
+ 0,
+ 14936,
+ 0,
+ 14945,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14947,
+ 0,
+ 0,
+ 14948,
+ 14949,
+ 14951,
+ 0,
+ 0,
+ 14952,
+ 0,
+ 0,
+ 0,
+ 14964,
+ 14973,
+ 0,
+ 0,
+ 14990,
+ 0,
+ 0,
+ 0,
+ 0,
+ 14995,
+ 0,
+ 0,
+ 14998,
+ 15001,
+ 0,
+ 0,
+ 15002,
+ 15020,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15021,
+ 0,
+ 15022,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15023,
+ 0,
+ 0,
+ 15025,
+ 15029,
+ 15033,
+ 0,
+ 0,
+ 0,
+ 15034,
+ 0,
+ 0,
+ 0,
+ 15035,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15043,
+ 15044,
+ 0,
+ 0,
+ 0,
+ 15045,
+ 15046,
+ 15048,
+ 15050,
+ 0,
+ 15065,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15066,
+ 0,
+ 0,
+ 15075,
+ 15082,
+ 15084,
+ 0,
+ 0,
+ 15085,
+ 15086,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15088,
+ 0,
+ 0,
+ 0,
+ 15089,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15094,
+ 0,
+ 15096,
+ 0,
+ 15097,
+ 0,
+ 15100,
+ 0,
+ 0,
+ 15102,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15105,
+ 0,
+ 0,
+ 15106,
+ 0,
+ 15109,
+ 15113,
+ 0,
+ 0,
+ 0,
+ 15115,
+ 0,
+ 15118,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15119,
+ 0,
+ 0,
+ 15120,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15123,
+ 15129,
+ 0,
+ 0,
+ 0,
+ 15130,
+ 0,
+ 15131,
+ 0,
+ 0,
+ 15134,
+ 0,
+ 15135,
+ 0,
+ 0,
+ 0,
+ 15137,
+ 15138,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15139,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15140,
+ 0,
+ 0,
+ 15154,
+ 15162,
+ 0,
+ 15169,
+ 15170,
+ 0,
+ 15175,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15177,
+ 0,
+ 15178,
+ 15179,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15183,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15185,
+ 15187,
+ 0,
+ 15194,
+ 15195,
+ 15196,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15204,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15206,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15207,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15213,
+ 0,
+ 15214,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15232,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15234,
+ 0,
+ 15238,
+ 15240,
+ 0,
+ 15248,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15250,
+ 15251,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15252,
+ 0,
+ 0,
+ 0,
+ 15255,
+ 15262,
+ 15266,
+ 0,
+ 0,
+ 0,
+ 15267,
+ 0,
+ 0,
+ 0,
+ 15277,
+ 15279,
+ 0,
+ 0,
+ 0,
+ 15280,
+ 15281,
+ 15282,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15285,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15289,
+ 0,
+ 0,
+ 15291,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15296,
+ 15297,
+ 0,
+ 0,
+ 15304,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15306,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15307,
+ 15308,
+ 0,
+ 15309,
+ 0,
+ 0,
+ 15311,
+ 0,
+ 0,
+ 15312,
+ 15313,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15314,
+ 15317,
+ 0,
+ 0,
+ 0,
+ 15318,
+ 15319,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15320,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15321,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15324,
+ 0,
+ 15325,
+ 15326,
+ 0,
+ 15330,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15334,
+ 0,
+ 15335,
+ 0,
+ 15341,
+ 0,
+ 0,
+ 15342,
+ 0,
+ 0,
+ 15343,
+ 15344,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15345,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15347,
+ 0,
+ 0,
+ 15348,
+ 15349,
+ 15350,
+ 0,
+ 15356,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15357,
+ 0,
+ 15358,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15359,
+ 15360,
+ 15364,
+ 0,
+ 15380,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15392,
+ 0,
+ 0,
+ 15393,
+ 0,
+ 15395,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15396,
+ 0,
+ 0,
+ 15397,
+ 15398,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15399,
+ 0,
+ 15400,
+ 0,
+ 0,
+ 0,
+ 15402,
+ 0,
+ 15405,
+ 15410,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15411,
+ 0,
+ 0,
+ 0,
+ 15412,
+ 0,
+ 15416,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15428,
+ 0,
+ 15435,
+ 0,
+ 0,
+ 15438,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15439,
+ 0,
+ 0,
+ 0,
+ 15440,
+ 0,
+ 0,
+ 0,
+ 15441,
+ 15449,
+ 15451,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15452,
+ 0,
+ 0,
+ 15455,
+ 0,
+ 0,
+ 0,
+ 15456,
+ 0,
+ 0,
+ 15458,
+ 0,
+ 15460,
+ 15461,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15462,
+ 15464,
+ 0,
+ 15465,
+ 0,
+ 0,
+ 15466,
+ 0,
+ 0,
+ 15467,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15468,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15481,
+ 0,
+ 0,
+ 15484,
+ 0,
+ 15485,
+ 15486,
+ 0,
+ 0,
+ 0,
+ 15487,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15488,
+ 0,
+ 15492,
+ 15498,
+ 0,
+ 0,
+ 0,
+ 15499,
+ 0,
+ 0,
+ 0,
+ 15500,
+ 0,
+ 15501,
+ 0,
+ 0,
+ 15512,
+ 0,
+ 15522,
+ 0,
+ 0,
+ 0,
+ 15524,
+ 0,
+ 15525,
+ 15526,
+ 0,
+ 0,
+ 15527,
+ 0,
+ 0,
+ 15545,
+ 15546,
+ 0,
+ 15548,
+ 15552,
+ 0,
+ 15553,
+ 0,
+ 0,
+ 0,
+ 15554,
+ 0,
+ 15555,
+ 0,
+ 15557,
+ 15565,
+ 15573,
+ 15577,
+ 15578,
+ 0,
+ 15582,
+ 0,
+ 15583,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15586,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15588,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15589,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15593,
+ 15594,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15595,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15596,
+ 0,
+ 0,
+ 0,
+ 15597,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15600,
+ 0,
+ 0,
+ 15601,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15602,
+ 15603,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15604,
+ 0,
+ 15609,
+ 0,
+ 0,
+ 15612,
+ 0,
+ 0,
+ 15613,
+ 0,
+ 0,
+ 15615,
+ 15617,
+ 15618,
+ 0,
+ 0,
+ 15620,
+ 0,
+ 15636,
+ 15637,
+ 0,
+ 0,
+ 15649,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15650,
+ 0,
+ 0,
+ 15651,
+ 0,
+ 0,
+ 0,
+ 15656,
+ 0,
+ 15658,
+ 0,
+ 0,
+ 0,
+ 15664,
+ 0,
+ 0,
+ 15665,
+ 0,
+ 0,
+ 15668,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15669,
+ 0,
+ 0,
+ 15674,
+ 0,
+ 0,
+ 15675,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15676,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15677,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15678,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15679,
+ 0,
+ 0,
+ 15681,
+ 0,
+ 15686,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15687,
+ 0,
+ 15688,
+ 0,
+ 0,
+ 15690,
+ 0,
+ 0,
+ 0,
+ 15697,
+ 0,
+ 15699,
+ 15700,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15701,
+ 0,
+ 15702,
+ 15703,
+ 0,
+ 15704,
+ 0,
+ 15705,
+ 0,
+ 15707,
+ 0,
+ 15709,
+ 0,
+ 15712,
+ 15716,
+ 0,
+ 15717,
+ 0,
+ 15718,
+ 15720,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15724,
+ 0,
+ 0,
+ 0,
+ 15725,
+ 0,
+ 15726,
+ 0,
+ 0,
+ 0,
+ 15740,
+ 0,
+ 15745,
+ 15746,
+ 0,
+ 0,
+ 15747,
+ 0,
+ 15748,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15749,
+ 0,
+ 0,
+ 0,
+ 15752,
+ 0,
+ 15753,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15759,
+ 0,
+ 0,
+ 0,
+ 15765,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15767,
+ 0,
+ 0,
+ 0,
+ 15771,
+ 0,
+ 0,
+ 15784,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15785,
+ 15790,
+ 15791,
+ 0,
+ 0,
+ 15792,
+ 0,
+ 0,
+ 0,
+ 15807,
+ 0,
+ 15811,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15818,
+ 0,
+ 0,
+ 0,
+ 15819,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15821,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15822,
+ 15824,
+ 0,
+ 0,
+ 15827,
+ 0,
+ 0,
+ 15829,
+ 15831,
+ 0,
+ 15832,
+ 0,
+ 0,
+ 15833,
+ 0,
+ 15835,
+ 15838,
+ 15839,
+ 15843,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15844,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15845,
+ 15851,
+ 15856,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15858,
+ 15860,
+ 0,
+ 15861,
+ 0,
+ 0,
+ 0,
+ 15864,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15865,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15866,
+ 0,
+ 15872,
+ 0,
+ 0,
+ 15876,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15877,
+ 15878,
+ 15883,
+ 15885,
+ 0,
+ 0,
+ 15888,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15889,
+ 15890,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15892,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15893,
+ 0,
+ 0,
+ 15894,
+ 0,
+ 0,
+ 0,
+ 15895,
+ 0,
+ 15896,
+ 15897,
+ 0,
+ 15898,
+ 15901,
+ 15902,
+ 0,
+ 15911,
+ 15915,
+ 0,
+ 15916,
+ 0,
+ 15924,
+ 15935,
+ 0,
+ 15937,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15950,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15958,
+ 0,
+ 0,
+ 0,
+ 15961,
+ 0,
+ 0,
+ 15966,
+ 0,
+ 15967,
+ 0,
+ 0,
+ 15977,
+ 0,
+ 0,
+ 15978,
+ 0,
+ 0,
+ 15981,
+ 15982,
+ 15983,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 15986,
+ 0,
+ 0,
+ 0,
+ 15990,
+ 0,
+ 15991,
+ 15995,
+ 15998,
+ 0,
+ 15999,
+ 0,
+ 16000,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16008,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16009,
+ 16011,
+ 0,
+ 16013,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16014,
+ 0,
+ 0,
+ 16015,
+ 16023,
+ 16024,
+ 16025,
+ 0,
+ 0,
+ 16026,
+ 0,
+ 16030,
+ 0,
+ 16032,
+ 0,
+ 16033,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16035,
+ 16036,
+ 16037,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16039,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16041,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16043,
+ 16044,
+ 0,
+ 0,
+ 16047,
+ 0,
+ 0,
+ 0,
+ 16048,
+ 0,
+ 0,
+ 16049,
+ 16050,
+ 16052,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16055,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16056,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16058,
+ 16060,
+ 16061,
+ 0,
+ 0,
+ 16063,
+ 0,
+ 0,
+ 16064,
+ 0,
+ 0,
+ 0,
+ 16067,
+ 16068,
+ 0,
+ 0,
+ 16069,
+ 16078,
+ 0,
+ 0,
+ 0,
+ 16079,
+ 0,
+ 0,
+ 0,
+ 16080,
+ 0,
+ 16081,
+ 0,
+ 0,
+ 0,
+ 16088,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16089,
+ 16093,
+ 0,
+ 16097,
+ 0,
+ 16103,
+ 0,
+ 16104,
+ 16105,
+ 0,
+ 0,
+ 16256,
+ 0,
+ 0,
+ 16259,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16260,
+ 16261,
+ 0,
+ 0,
+ 16262,
+ 0,
+ 0,
+ 16263,
+ 0,
+ 16268,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16269,
+ 0,
+ 0,
+ 16270,
+ 16273,
+ 0,
+ 16274,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16275,
+ 16276,
+ 16277,
+ 16280,
+ 0,
+ 0,
+ 0,
+ 16281,
+ 16284,
+ 0,
+ 0,
+ 0,
+ 16286,
+ 0,
+ 16289,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16290,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16291,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16292,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16293,
+ 16295,
+ 16297,
+ 0,
+ 16302,
+ 0,
+ 16304,
+ 0,
+ 16305,
+ 0,
+ 16306,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16307,
+ 16308,
+ 16312,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16313,
+ 16315,
+ 0,
+ 16318,
+ 0,
+ 0,
+ 0,
+ 16321,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16326,
+ 16333,
+ 16336,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16337,
+ 16340,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16345,
+ 0,
+ 0,
+ 16346,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16347,
+ 0,
+ 0,
+ 16348,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16349,
+ 0,
+ 0,
+ 0,
+ 16350,
+ 0,
+ 16357,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16359,
+ 16360,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16362,
+ 16363,
+ 16364,
+ 16365,
+ 0,
+ 0,
+ 16366,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16367,
+ 16368,
+ 0,
+ 16369,
+ 16374,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16376,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16378,
+ 16379,
+ 0,
+ 16380,
+ 0,
+ 0,
+ 0,
+ 16381,
+ 16383,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16390,
+ 0,
+ 0,
+ 0,
+ 16399,
+ 0,
+ 16402,
+ 16404,
+ 16406,
+ 16407,
+ 0,
+ 0,
+ 0,
+ 16409,
+ 16411,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16412,
+ 0,
+ 16413,
+ 16415,
+ 16423,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16424,
+ 0,
+ 0,
+ 0,
+ 16428,
+ 16434,
+ 16435,
+ 16449,
+ 0,
+ 16450,
+ 16451,
+ 0,
+ 0,
+ 0,
+ 16453,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16454,
+ 0,
+ 0,
+ 16456,
+ 16458,
+ 0,
+ 0,
+ 16459,
+ 0,
+ 0,
+ 16460,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16462,
+ 0,
+ 16463,
+ 0,
+ 0,
+ 16466,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16479,
+ 0,
+ 0,
+ 16480,
+ 0,
+ 16481,
+ 16484,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16485,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16489,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16491,
+ 0,
+ 0,
+ 16498,
+ 0,
+ 0,
+ 16503,
+ 0,
+ 16505,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16506,
+ 0,
+ 0,
+ 0,
+ 16508,
+ 16509,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16511,
+ 16513,
+ 0,
+ 0,
+ 0,
+ 16516,
+ 0,
+ 16517,
+ 0,
+ 16519,
+ 0,
+ 16529,
+ 0,
+ 0,
+ 16531,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16534,
+ 0,
+ 0,
+ 16541,
+ 16542,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16543,
+ 16547,
+ 16548,
+ 0,
+ 0,
+ 0,
+ 16551,
+ 0,
+ 16552,
+ 0,
+ 0,
+ 0,
+ 16553,
+ 0,
+ 0,
+ 16558,
+ 0,
+ 0,
+ 16562,
+ 16565,
+ 0,
+ 0,
+ 0,
+ 16570,
+ 0,
+ 0,
+ 0,
+ 16573,
+ 16585,
+ 0,
+ 0,
+ 0,
+ 16586,
+ 16587,
+ 16595,
+ 0,
+ 16596,
+ 0,
+ 16598,
+ 0,
+ 0,
+ 0,
+ 16600,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16601,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16603,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16604,
+ 16612,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16613,
+ 0,
+ 16618,
+ 0,
+ 0,
+ 0,
+ 16640,
+ 0,
+ 0,
+ 16641,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16645,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16646,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16651,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16653,
+ 16654,
+ 0,
+ 0,
+ 0,
+ 16655,
+ 0,
+ 0,
+ 16656,
+ 16667,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16671,
+ 0,
+ 16672,
+ 0,
+ 0,
+ 0,
+ 16673,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16676,
+ 0,
+ 16686,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16689,
+ 0,
+ 16690,
+ 0,
+ 16692,
+ 0,
+ 16693,
+ 0,
+ 16694,
+ 0,
+ 16696,
+ 0,
+ 0,
+ 0,
+ 16705,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16707,
+ 0,
+ 0,
+ 0,
+ 16709,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16711,
+ 0,
+ 16712,
+ 16713,
+ 0,
+ 0,
+ 0,
+ 16715,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16716,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16718,
+ 16724,
+ 0,
+ 0,
+ 16726,
+ 16727,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16728,
+ 0,
+ 16729,
+ 0,
+ 0,
+ 16730,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16731,
+ 0,
+ 0,
+ 0,
+ 16732,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16734,
+ 16738,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16743,
+ 0,
+ 0,
+ 16745,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16749,
+ 0,
+ 16752,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16756,
+ 0,
+ 0,
+ 16758,
+ 0,
+ 16759,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16760,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16762,
+ 0,
+ 16769,
+ 0,
+ 16770,
+ 0,
+ 16772,
+ 0,
+ 0,
+ 0,
+ 16777,
+ 16780,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16781,
+ 0,
+ 0,
+ 16782,
+ 0,
+ 16784,
+ 0,
+ 0,
+ 16785,
+ 16787,
+ 16792,
+ 0,
+ 0,
+ 16794,
+ 0,
+ 0,
+ 0,
+ 16798,
+ 0,
+ 0,
+ 16809,
+ 0,
+ 0,
+ 16814,
+ 16816,
+ 16817,
+ 0,
+ 16819,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16820,
+ 0,
+ 0,
+ 16836,
+ 16839,
+ 0,
+ 0,
+ 16841,
+ 16851,
+ 16857,
+ 0,
+ 0,
+ 16858,
+ 16859,
+ 0,
+ 0,
+ 16860,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16862,
+ 0,
+ 16863,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16864,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16876,
+ 0,
+ 16881,
+ 16882,
+ 0,
+ 16885,
+ 16886,
+ 0,
+ 16887,
+ 0,
+ 0,
+ 0,
+ 16889,
+ 16891,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16894,
+ 16895,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16897,
+ 0,
+ 16898,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16913,
+ 0,
+ 0,
+ 16924,
+ 16925,
+ 16926,
+ 0,
+ 0,
+ 16927,
+ 0,
+ 0,
+ 0,
+ 16937,
+ 16938,
+ 0,
+ 0,
+ 0,
+ 16940,
+ 16941,
+ 0,
+ 0,
+ 0,
+ 16942,
+ 16945,
+ 0,
+ 16946,
+ 16949,
+ 16950,
+ 0,
+ 0,
+ 0,
+ 16952,
+ 16955,
+ 0,
+ 0,
+ 0,
+ 16965,
+ 0,
+ 16969,
+ 0,
+ 0,
+ 16975,
+ 0,
+ 0,
+ 16976,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16978,
+ 0,
+ 0,
+ 16981,
+ 0,
+ 16983,
+ 16989,
+ 0,
+ 0,
+ 0,
+ 0,
+ 16990,
+ 0,
+ 0,
+ 16991,
+ 0,
+ 0,
+ 0,
+ 16993,
+ 0,
+ 16994,
+ 16996,
+ 17000,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17002,
+ 17004,
+ 0,
+ 17006,
+ 0,
+ 0,
+ 17007,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17008,
+ 17013,
+ 17014,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17021,
+ 0,
+ 17031,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17033,
+ 17036,
+ 0,
+ 17038,
+ 0,
+ 0,
+ 17039,
+ 0,
+ 17045,
+ 0,
+ 0,
+ 17046,
+ 17047,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17048,
+ 0,
+ 17049,
+ 17050,
+ 0,
+ 17051,
+ 17053,
+ 0,
+ 17054,
+ 0,
+ 17055,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17063,
+ 0,
+ 0,
+ 17064,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17065,
+ 0,
+ 0,
+ 17068,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17072,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17073,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17074,
+ 0,
+ 17080,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17081,
+ 17083,
+ 17084,
+ 0,
+ 0,
+ 0,
+ 17085,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17092,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17093,
+ 0,
+ 17095,
+ 17102,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17103,
+ 0,
+ 0,
+ 17105,
+ 0,
+ 17107,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17114,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17115,
+ 17125,
+ 17127,
+ 0,
+ 0,
+ 17128,
+ 0,
+ 0,
+ 0,
+ 17129,
+ 17130,
+ 0,
+ 17131,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17132,
+ 17135,
+ 17145,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17146,
+ 0,
+ 17147,
+ 0,
+ 17148,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17149,
+ 17150,
+ 0,
+ 17151,
+ 17153,
+ 0,
+ 17155,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17163,
+ 17171,
+ 0,
+ 17174,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17179,
+ 0,
+ 0,
+ 17182,
+ 17185,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17186,
+ 0,
+ 0,
+ 17188,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17189,
+ 17191,
+ 0,
+ 17194,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17195,
+ 17196,
+ 17203,
+ 17204,
+ 0,
+ 0,
+ 17205,
+ 17217,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17218,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17219,
+ 0,
+ 17220,
+ 0,
+ 17221,
+ 0,
+ 0,
+ 17230,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17236,
+ 0,
+ 17238,
+ 17239,
+ 0,
+ 0,
+ 0,
+ 17241,
+ 17244,
+ 0,
+ 0,
+ 17245,
+ 0,
+ 17248,
+ 0,
+ 0,
+ 17251,
+ 0,
+ 17252,
+ 0,
+ 0,
+ 17264,
+ 0,
+ 17266,
+ 0,
+ 0,
+ 0,
+ 17268,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17271,
+ 17272,
+ 0,
+ 17273,
+ 0,
+ 17295,
+ 0,
+ 17302,
+ 0,
+ 17305,
+ 0,
+ 0,
+ 0,
+ 17306,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17308,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17309,
+ 0,
+ 17310,
+ 17313,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17314,
+ 17315,
+ 0,
+ 17317,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17318,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17320,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17334,
+ 0,
+ 17344,
+ 17348,
+ 0,
+ 0,
+ 0,
+ 17350,
+ 17351,
+ 0,
+ 0,
+ 17353,
+ 0,
+ 0,
+ 17354,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17355,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17356,
+ 17357,
+ 0,
+ 0,
+ 17359,
+ 0,
+ 0,
+ 0,
+ 17371,
+ 0,
+ 17372,
+ 0,
+ 0,
+ 0,
+ 17393,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17394,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17395,
+ 0,
+ 0,
+ 17399,
+ 0,
+ 0,
+ 0,
+ 17401,
+ 17417,
+ 0,
+ 17418,
+ 0,
+ 17419,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17422,
+ 17423,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17424,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17428,
+ 17429,
+ 17433,
+ 0,
+ 0,
+ 0,
+ 17437,
+ 0,
+ 0,
+ 17441,
+ 0,
+ 0,
+ 17442,
+ 0,
+ 0,
+ 17453,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17454,
+ 17456,
+ 17462,
+ 0,
+ 0,
+ 17466,
+ 0,
+ 0,
+ 17468,
+ 0,
+ 0,
+ 17469,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17470,
+ 0,
+ 17475,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17479,
+ 0,
+ 0,
+ 0,
+ 17483,
+ 17484,
+ 0,
+ 17485,
+ 0,
+ 17486,
+ 0,
+ 17491,
+ 17492,
+ 0,
+ 0,
+ 17493,
+ 0,
+ 17494,
+ 17495,
+ 0,
+ 0,
+ 0,
+ 17496,
+ 0,
+ 0,
+ 0,
+ 17497,
+ 0,
+ 0,
+ 0,
+ 17502,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17503,
+ 0,
+ 17505,
+ 0,
+ 17507,
+ 0,
+ 0,
+ 0,
+ 17512,
+ 17513,
+ 17514,
+ 0,
+ 0,
+ 17515,
+ 0,
+ 0,
+ 0,
+ 17519,
+ 0,
+ 0,
+ 0,
+ 17522,
+ 0,
+ 0,
+ 17523,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17527,
+ 0,
+ 0,
+ 0,
+ 17528,
+ 0,
+ 0,
+ 0,
+ 17534,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17536,
+ 0,
+ 0,
+ 0,
+ 17539,
+ 0,
+ 17540,
+ 17543,
+ 17549,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17556,
+ 0,
+ 0,
+ 17558,
+ 0,
+ 17559,
+ 0,
+ 0,
+ 17560,
+ 0,
+ 0,
+ 0,
+ 17563,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17564,
+ 0,
+ 0,
+ 17565,
+ 17566,
+ 0,
+ 17567,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17569,
+ 17570,
+ 0,
+ 17575,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17581,
+ 0,
+ 0,
+ 0,
+ 17582,
+ 17583,
+ 0,
+ 17586,
+ 0,
+ 0,
+ 17587,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17588,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17596,
+ 17597,
+ 0,
+ 0,
+ 17598,
+ 17600,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17601,
+ 0,
+ 0,
+ 0,
+ 17604,
+ 0,
+ 0,
+ 17605,
+ 0,
+ 0,
+ 17607,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17612,
+ 0,
+ 0,
+ 17618,
+ 0,
+ 17621,
+ 17622,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17623,
+ 0,
+ 0,
+ 17624,
+ 0,
+ 0,
+ 17630,
+ 0,
+ 0,
+ 17631,
+ 17633,
+ 17634,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17635,
+ 0,
+ 0,
+ 17636,
+ 0,
+ 0,
+ 17637,
+ 0,
+ 17638,
+ 0,
+ 17640,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17641,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17643,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17645,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17646,
+ 17662,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17663,
+ 17664,
+ 0,
+ 17665,
+ 17666,
+ 0,
+ 0,
+ 0,
+ 17669,
+ 17671,
+ 17673,
+ 0,
+ 17679,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17684,
+ 0,
+ 0,
+ 0,
+ 17686,
+ 0,
+ 17714,
+ 0,
+ 0,
+ 17720,
+ 17722,
+ 17726,
+ 0,
+ 0,
+ 17728,
+ 0,
+ 0,
+ 17729,
+ 0,
+ 0,
+ 0,
+ 17732,
+ 0,
+ 17733,
+ 0,
+ 17734,
+ 0,
+ 0,
+ 0,
+ 17735,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17737,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17739,
+ 0,
+ 0,
+ 0,
+ 17741,
+ 17742,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17743,
+ 17744,
+ 17745,
+ 0,
+ 0,
+ 0,
+ 17749,
+ 0,
+ 17750,
+ 17751,
+ 17752,
+ 17754,
+ 17761,
+ 17762,
+ 0,
+ 17763,
+ 0,
+ 17766,
+ 0,
+ 17772,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17775,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17776,
+ 0,
+ 0,
+ 17777,
+ 0,
+ 0,
+ 17778,
+ 17779,
+ 0,
+ 17782,
+ 17783,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17784,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17821,
+ 0,
+ 0,
+ 0,
+ 17822,
+ 0,
+ 0,
+ 0,
+ 17823,
+ 17825,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17826,
+ 17831,
+ 17832,
+ 17833,
+ 0,
+ 0,
+ 17845,
+ 0,
+ 0,
+ 0,
+ 17846,
+ 0,
+ 0,
+ 0,
+ 17848,
+ 17850,
+ 17854,
+ 0,
+ 17855,
+ 0,
+ 0,
+ 17859,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17860,
+ 17861,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17870,
+ 17871,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17872,
+ 0,
+ 0,
+ 0,
+ 17879,
+ 0,
+ 0,
+ 0,
+ 17881,
+ 17883,
+ 0,
+ 17884,
+ 0,
+ 17885,
+ 0,
+ 0,
+ 17886,
+ 0,
+ 0,
+ 17887,
+ 17891,
+ 17953,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17954,
+ 0,
+ 0,
+ 17955,
+ 0,
+ 17968,
+ 0,
+ 0,
+ 17972,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17974,
+ 0,
+ 0,
+ 0,
+ 0,
+ 17976,
+ 17978,
+ 0,
+ 0,
+ 17983,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18003,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18007,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18009,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18010,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18012,
+ 0,
+ 0,
+ 18014,
+ 0,
+ 0,
+ 0,
+ 18015,
+ 0,
+ 0,
+ 0,
+ 18016,
+ 0,
+ 18017,
+ 0,
+ 0,
+ 0,
+ 18030,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18031,
+ 0,
+ 0,
+ 18036,
+ 18037,
+ 18038,
+ 0,
+ 0,
+ 18049,
+ 18056,
+ 0,
+ 18057,
+ 18058,
+ 0,
+ 18059,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18062,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18064,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18067,
+ 0,
+ 0,
+ 0,
+ 18068,
+ 0,
+ 0,
+ 18075,
+ 0,
+ 0,
+ 18078,
+ 18093,
+ 18094,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18097,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18098,
+ 18100,
+ 0,
+ 0,
+ 0,
+ 18108,
+ 0,
+ 18111,
+ 0,
+ 0,
+ 18112,
+ 0,
+ 18113,
+ 0,
+ 0,
+ 18115,
+ 18116,
+ 0,
+ 18118,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18121,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18123,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18124,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18125,
+ 18126,
+ 0,
+ 18127,
+ 0,
+ 0,
+ 18128,
+ 18135,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18150,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18151,
+ 18152,
+ 0,
+ 0,
+ 18156,
+ 18164,
+ 0,
+ 18166,
+ 18171,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18172,
+ 18183,
+ 0,
+ 18184,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18185,
+ 0,
+ 18187,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18188,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18189,
+ 0,
+ 0,
+ 18190,
+ 0,
+ 0,
+ 18191,
+ 18192,
+ 0,
+ 0,
+ 18194,
+ 18195,
+ 18196,
+ 0,
+ 0,
+ 0,
+ 18197,
+ 0,
+ 18203,
+ 0,
+ 18204,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18205,
+ 0,
+ 0,
+ 0,
+ 18207,
+ 18208,
+ 0,
+ 0,
+ 18214,
+ 0,
+ 0,
+ 0,
+ 18215,
+ 18216,
+ 0,
+ 0,
+ 0,
+ 18220,
+ 0,
+ 0,
+ 18222,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18223,
+ 0,
+ 18225,
+ 18231,
+ 0,
+ 18234,
+ 0,
+ 18235,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18240,
+ 0,
+ 0,
+ 18241,
+ 18242,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18243,
+ 18251,
+ 0,
+ 18253,
+ 0,
+ 18254,
+ 0,
+ 0,
+ 0,
+ 18266,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18269,
+ 18270,
+ 18271,
+ 18273,
+ 18281,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18282,
+ 0,
+ 18283,
+ 0,
+ 18284,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18285,
+ 0,
+ 18287,
+ 18289,
+ 0,
+ 0,
+ 18290,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18308,
+ 0,
+ 0,
+ 0,
+ 18310,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18311,
+ 0,
+ 18312,
+ 18313,
+ 0,
+ 18315,
+ 0,
+ 0,
+ 18316,
+ 18320,
+ 0,
+ 18331,
+ 0,
+ 18332,
+ 0,
+ 18336,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18337,
+ 0,
+ 18340,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18341,
+ 0,
+ 18344,
+ 18345,
+ 0,
+ 18346,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18348,
+ 0,
+ 18351,
+ 0,
+ 0,
+ 18356,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18357,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18367,
+ 0,
+ 0,
+ 0,
+ 18368,
+ 0,
+ 18369,
+ 0,
+ 18370,
+ 18371,
+ 0,
+ 0,
+ 0,
+ 18437,
+ 18444,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18445,
+ 18450,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18451,
+ 0,
+ 18452,
+ 0,
+ 0,
+ 0,
+ 18453,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18455,
+ 0,
+ 0,
+ 0,
+ 18456,
+ 0,
+ 18457,
+ 0,
+ 18460,
+ 0,
+ 0,
+ 18461,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18466,
+ 0,
+ 0,
+ 18467,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18473,
+ 0,
+ 0,
+ 0,
+ 18476,
+ 0,
+ 18477,
+ 0,
+ 0,
+ 0,
+ 18478,
+ 18479,
+ 18480,
+ 0,
+ 0,
+ 0,
+ 18485,
+ 0,
+ 0,
+ 0,
+ 18486,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18488,
+ 18490,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18491,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18495,
+ 0,
+ 0,
+ 18496,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18505,
+ 0,
+ 18521,
+ 0,
+ 18522,
+ 18523,
+ 0,
+ 0,
+ 0,
+ 18525,
+ 18526,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18527,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18532,
+ 18533,
+ 0,
+ 18534,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18535,
+ 18537,
+ 0,
+ 18538,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18540,
+ 18541,
+ 18542,
+ 18543,
+ 0,
+ 18546,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18553,
+ 18556,
+ 0,
+ 0,
+ 18558,
+ 0,
+ 0,
+ 18569,
+ 18571,
+ 0,
+ 0,
+ 0,
+ 18572,
+ 0,
+ 18574,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18586,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18588,
+ 0,
+ 0,
+ 18589,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18590,
+ 0,
+ 18592,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18594,
+ 0,
+ 0,
+ 0,
+ 18596,
+ 0,
+ 0,
+ 18597,
+ 18598,
+ 0,
+ 0,
+ 18601,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18602,
+ 0,
+ 0,
+ 0,
+ 18603,
+ 18604,
+ 0,
+ 18605,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18608,
+ 0,
+ 0,
+ 18611,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18612,
+ 0,
+ 18616,
+ 0,
+ 0,
+ 18617,
+ 18619,
+ 0,
+ 0,
+ 0,
+ 18628,
+ 0,
+ 0,
+ 0,
+ 18629,
+ 0,
+ 0,
+ 18630,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18631,
+ 0,
+ 18632,
+ 0,
+ 0,
+ 18635,
+ 18637,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18641,
+ 18643,
+ 18648,
+ 0,
+ 18652,
+ 0,
+ 0,
+ 18653,
+ 0,
+ 18655,
+ 18656,
+ 0,
+ 0,
+ 0,
+ 18657,
+ 0,
+ 0,
+ 18666,
+ 18674,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18677,
+ 18684,
+ 18685,
+ 0,
+ 0,
+ 18686,
+ 0,
+ 0,
+ 18690,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18695,
+ 18696,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18697,
+ 0,
+ 0,
+ 18700,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18702,
+ 0,
+ 18708,
+ 0,
+ 0,
+ 18709,
+ 0,
+ 18710,
+ 0,
+ 0,
+ 18711,
+ 0,
+ 18714,
+ 0,
+ 0,
+ 18718,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18719,
+ 0,
+ 0,
+ 18722,
+ 0,
+ 18726,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18731,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18739,
+ 18741,
+ 0,
+ 0,
+ 18742,
+ 0,
+ 18743,
+ 18744,
+ 18746,
+ 18748,
+ 0,
+ 18752,
+ 18753,
+ 0,
+ 0,
+ 18754,
+ 18763,
+ 0,
+ 18765,
+ 0,
+ 0,
+ 0,
+ 18766,
+ 0,
+ 0,
+ 0,
+ 18769,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18773,
+ 18778,
+ 18779,
+ 18781,
+ 0,
+ 0,
+ 18784,
+ 18787,
+ 0,
+ 18788,
+ 0,
+ 18793,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18795,
+ 0,
+ 0,
+ 18800,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18801,
+ 18804,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18806,
+ 0,
+ 0,
+ 0,
+ 18811,
+ 18815,
+ 18816,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18825,
+ 0,
+ 0,
+ 18827,
+ 18829,
+ 0,
+ 0,
+ 18830,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18831,
+ 0,
+ 0,
+ 18832,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18833,
+ 0,
+ 18840,
+ 0,
+ 18841,
+ 0,
+ 18842,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18843,
+ 0,
+ 18844,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18845,
+ 18846,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18848,
+ 0,
+ 0,
+ 0,
+ 18853,
+ 18860,
+ 0,
+ 0,
+ 18862,
+ 18866,
+ 0,
+ 0,
+ 18867,
+ 18869,
+ 0,
+ 0,
+ 18874,
+ 18881,
+ 18891,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18892,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18895,
+ 0,
+ 18896,
+ 0,
+ 0,
+ 0,
+ 18900,
+ 0,
+ 0,
+ 0,
+ 18901,
+ 0,
+ 18902,
+ 18915,
+ 18916,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18919,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18920,
+ 0,
+ 0,
+ 0,
+ 18921,
+ 18929,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18930,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18932,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18934,
+ 18942,
+ 0,
+ 0,
+ 0,
+ 18951,
+ 18957,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18958,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18959,
+ 18960,
+ 0,
+ 0,
+ 18961,
+ 0,
+ 0,
+ 18962,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18963,
+ 18964,
+ 0,
+ 0,
+ 0,
+ 18965,
+ 0,
+ 18967,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18968,
+ 0,
+ 18969,
+ 0,
+ 18970,
+ 18973,
+ 18976,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18977,
+ 0,
+ 0,
+ 0,
+ 18981,
+ 0,
+ 0,
+ 0,
+ 18990,
+ 0,
+ 18998,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 18999,
+ 19003,
+ 0,
+ 0,
+ 19005,
+ 0,
+ 0,
+ 0,
+ 19006,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19008,
+ 19011,
+ 0,
+ 0,
+ 19018,
+ 0,
+ 0,
+ 19019,
+ 0,
+ 19024,
+ 0,
+ 19031,
+ 19032,
+ 0,
+ 19039,
+ 0,
+ 19041,
+ 19050,
+ 0,
+ 0,
+ 0,
+ 19051,
+ 19055,
+ 19056,
+ 0,
+ 19059,
+ 19063,
+ 19064,
+ 0,
+ 0,
+ 19088,
+ 0,
+ 0,
+ 0,
+ 19093,
+ 19094,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19095,
+ 0,
+ 19096,
+ 0,
+ 0,
+ 0,
+ 19097,
+ 0,
+ 0,
+ 19098,
+ 0,
+ 19099,
+ 19100,
+ 0,
+ 0,
+ 19103,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19111,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19112,
+ 0,
+ 0,
+ 0,
+ 19116,
+ 19117,
+ 0,
+ 19121,
+ 19122,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19123,
+ 19124,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19125,
+ 19126,
+ 0,
+ 19128,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19129,
+ 19130,
+ 19131,
+ 19132,
+ 0,
+ 0,
+ 19146,
+ 0,
+ 0,
+ 19147,
+ 19156,
+ 19158,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19182,
+ 19185,
+ 0,
+ 0,
+ 19187,
+ 0,
+ 0,
+ 0,
+ 19193,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19194,
+ 0,
+ 19197,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19198,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19202,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19203,
+ 0,
+ 19205,
+ 19210,
+ 0,
+ 0,
+ 0,
+ 19213,
+ 0,
+ 19218,
+ 0,
+ 0,
+ 0,
+ 19223,
+ 19229,
+ 0,
+ 0,
+ 19230,
+ 0,
+ 0,
+ 19231,
+ 19232,
+ 19233,
+ 19239,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19240,
+ 0,
+ 19248,
+ 19249,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19254,
+ 0,
+ 19256,
+ 19258,
+ 19259,
+ 0,
+ 0,
+ 19261,
+ 0,
+ 19266,
+ 0,
+ 0,
+ 0,
+ 19272,
+ 0,
+ 19278,
+ 19281,
+ 19282,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19283,
+ 0,
+ 0,
+ 19284,
+ 0,
+ 0,
+ 19285,
+ 19287,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19288,
+ 19291,
+ 0,
+ 19292,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19297,
+ 0,
+ 19298,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19302,
+ 19303,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19304,
+ 19305,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19314,
+ 0,
+ 0,
+ 19315,
+ 0,
+ 0,
+ 19321,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19322,
+ 0,
+ 19333,
+ 0,
+ 19334,
+ 19335,
+ 0,
+ 19336,
+ 19337,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19346,
+ 0,
+ 0,
+ 19353,
+ 0,
+ 19354,
+ 19362,
+ 0,
+ 19366,
+ 19367,
+ 0,
+ 0,
+ 19369,
+ 0,
+ 19375,
+ 0,
+ 19377,
+ 19380,
+ 19388,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19389,
+ 19390,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19392,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19402,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19412,
+ 0,
+ 0,
+ 19413,
+ 19422,
+ 0,
+ 19424,
+ 0,
+ 0,
+ 0,
+ 19425,
+ 0,
+ 0,
+ 0,
+ 19428,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19431,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19432,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19448,
+ 19459,
+ 0,
+ 0,
+ 19461,
+ 0,
+ 19462,
+ 19463,
+ 0,
+ 19467,
+ 19474,
+ 19482,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19494,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19501,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19502,
+ 19504,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19505,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19506,
+ 19507,
+ 0,
+ 0,
+ 0,
+ 19508,
+ 0,
+ 0,
+ 19511,
+ 0,
+ 0,
+ 19514,
+ 0,
+ 19515,
+ 0,
+ 19516,
+ 0,
+ 19518,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19530,
+ 0,
+ 19537,
+ 19538,
+ 0,
+ 19543,
+ 19546,
+ 0,
+ 19547,
+ 19551,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19552,
+ 19553,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19555,
+ 0,
+ 0,
+ 19556,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19560,
+ 19561,
+ 0,
+ 0,
+ 19562,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19565,
+ 19567,
+ 0,
+ 19568,
+ 0,
+ 0,
+ 0,
+ 19569,
+ 19570,
+ 0,
+ 19578,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19580,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19581,
+ 19584,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19585,
+ 19586,
+ 0,
+ 0,
+ 0,
+ 19587,
+ 19588,
+ 0,
+ 19589,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19592,
+ 19593,
+ 19599,
+ 0,
+ 19600,
+ 0,
+ 0,
+ 19604,
+ 0,
+ 0,
+ 19605,
+ 0,
+ 19606,
+ 19608,
+ 19610,
+ 0,
+ 19613,
+ 19614,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19616,
+ 19617,
+ 0,
+ 0,
+ 19618,
+ 0,
+ 0,
+ 19619,
+ 0,
+ 0,
+ 0,
+ 19620,
+ 19621,
+ 19631,
+ 0,
+ 0,
+ 19632,
+ 19634,
+ 19636,
+ 0,
+ 19643,
+ 0,
+ 0,
+ 19644,
+ 19658,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19659,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19675,
+ 19677,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19679,
+ 0,
+ 19683,
+ 0,
+ 19684,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19687,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19688,
+ 19689,
+ 19692,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19695,
+ 19697,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19698,
+ 19699,
+ 0,
+ 0,
+ 19700,
+ 0,
+ 19702,
+ 0,
+ 0,
+ 19703,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19704,
+ 19708,
+ 0,
+ 19710,
+ 0,
+ 19713,
+ 0,
+ 0,
+ 0,
+ 19715,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19718,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19720,
+ 0,
+ 19722,
+ 0,
+ 0,
+ 19725,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19730,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19731,
+ 0,
+ 19734,
+ 19735,
+ 19739,
+ 0,
+ 0,
+ 19740,
+ 0,
+ 19741,
+ 0,
+ 0,
+ 0,
+ 19746,
+ 0,
+ 0,
+ 19747,
+ 0,
+ 19771,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19772,
+ 19775,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19778,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19779,
+ 0,
+ 0,
+ 19780,
+ 19790,
+ 0,
+ 19791,
+ 0,
+ 0,
+ 19792,
+ 0,
+ 0,
+ 0,
+ 19793,
+ 0,
+ 0,
+ 19796,
+ 19797,
+ 0,
+ 0,
+ 0,
+ 19799,
+ 0,
+ 0,
+ 0,
+ 19801,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19803,
+ 0,
+ 19804,
+ 0,
+ 19805,
+ 0,
+ 0,
+ 19807,
+ 0,
+ 0,
+ 0,
+ 19808,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19809,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19816,
+ 0,
+ 19821,
+ 0,
+ 19822,
+ 19830,
+ 19831,
+ 0,
+ 0,
+ 0,
+ 19833,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19838,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19839,
+ 0,
+ 0,
+ 19843,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19845,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19847,
+ 0,
+ 0,
+ 19848,
+ 0,
+ 19849,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19851,
+ 0,
+ 0,
+ 0,
+ 19854,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19864,
+ 0,
+ 19865,
+ 0,
+ 19866,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19868,
+ 0,
+ 0,
+ 19870,
+ 0,
+ 0,
+ 19871,
+ 0,
+ 0,
+ 19872,
+ 19873,
+ 19875,
+ 0,
+ 19880,
+ 19882,
+ 19884,
+ 0,
+ 0,
+ 19885,
+ 19886,
+ 19888,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19890,
+ 19892,
+ 19893,
+ 0,
+ 0,
+ 19894,
+ 0,
+ 0,
+ 0,
+ 19895,
+ 0,
+ 19896,
+ 19902,
+ 0,
+ 0,
+ 19903,
+ 0,
+ 0,
+ 19905,
+ 0,
+ 0,
+ 0,
+ 19906,
+ 0,
+ 19908,
+ 0,
+ 19909,
+ 19911,
+ 0,
+ 0,
+ 0,
+ 19913,
+ 19920,
+ 0,
+ 19938,
+ 19939,
+ 19940,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19942,
+ 0,
+ 19943,
+ 0,
+ 19945,
+ 0,
+ 0,
+ 0,
+ 19951,
+ 19952,
+ 19954,
+ 19960,
+ 0,
+ 19965,
+ 0,
+ 19971,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 19975,
+ 0,
+ 19976,
+ 0,
+ 19990,
+ 0,
+ 0,
+ 19991,
+ 0,
+ 19993,
+ 0,
+ 19995,
+ 0,
+ 0,
+ 0,
+ 19998,
+ 19999,
+ 20001,
+ 0,
+ 20003,
+ 20005,
+ 0,
+ 20011,
+ 20012,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20014,
+ 0,
+ 20020,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20021,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20023,
+ 20024,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20025,
+ 0,
+ 0,
+ 20027,
+ 0,
+ 0,
+ 20029,
+ 0,
+ 0,
+ 20032,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20044,
+ 20045,
+ 0,
+ 20048,
+ 20049,
+ 0,
+ 0,
+ 20050,
+ 0,
+ 20052,
+ 0,
+ 0,
+ 20054,
+ 20057,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20059,
+ 0,
+ 0,
+ 20061,
+ 0,
+ 20062,
+ 0,
+ 20064,
+ 0,
+ 0,
+ 20066,
+ 0,
+ 0,
+ 20067,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20069,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20070,
+ 20071,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20072,
+ 0,
+ 0,
+ 20073,
+ 20074,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20075,
+ 0,
+ 20078,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20080,
+ 0,
+ 20081,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20095,
+ 0,
+ 20098,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20107,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20112,
+ 0,
+ 0,
+ 0,
+ 20113,
+ 20114,
+ 0,
+ 0,
+ 0,
+ 20115,
+ 20123,
+ 20124,
+ 0,
+ 0,
+ 0,
+ 20131,
+ 20133,
+ 20134,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20136,
+ 0,
+ 0,
+ 20137,
+ 20138,
+ 20150,
+ 0,
+ 20152,
+ 0,
+ 0,
+ 0,
+ 20153,
+ 0,
+ 0,
+ 20154,
+ 0,
+ 0,
+ 0,
+ 20158,
+ 0,
+ 20163,
+ 0,
+ 0,
+ 20164,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20166,
+ 0,
+ 20168,
+ 0,
+ 20170,
+ 0,
+ 20175,
+ 0,
+ 0,
+ 20178,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20223,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20224,
+ 0,
+ 20226,
+ 0,
+ 0,
+ 20230,
+ 0,
+ 20231,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20232,
+ 0,
+ 0,
+ 20233,
+ 20234,
+ 0,
+ 20244,
+ 0,
+ 20247,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20249,
+ 0,
+ 0,
+ 0,
+ 20250,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20251,
+ 0,
+ 20253,
+ 0,
+ 20254,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20256,
+ 0,
+ 0,
+ 20264,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20266,
+ 0,
+ 0,
+ 0,
+ 20278,
+ 0,
+ 0,
+ 20279,
+ 20282,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20283,
+ 0,
+ 20284,
+ 0,
+ 20285,
+ 0,
+ 20287,
+ 20290,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20292,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20293,
+ 20297,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20299,
+ 0,
+ 20300,
+ 20303,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20307,
+ 0,
+ 0,
+ 20308,
+ 0,
+ 20309,
+ 0,
+ 20310,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20312,
+ 0,
+ 0,
+ 0,
+ 20314,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20315,
+ 20316,
+ 0,
+ 20322,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20339,
+ 0,
+ 0,
+ 0,
+ 20342,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20352,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20362,
+ 0,
+ 0,
+ 20365,
+ 0,
+ 20375,
+ 20377,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20378,
+ 20379,
+ 0,
+ 20380,
+ 0,
+ 0,
+ 20381,
+ 0,
+ 20382,
+ 0,
+ 20383,
+ 0,
+ 20388,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20390,
+ 20392,
+ 20393,
+ 0,
+ 0,
+ 20395,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20396,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20398,
+ 20415,
+ 0,
+ 0,
+ 0,
+ 20417,
+ 0,
+ 0,
+ 20420,
+ 0,
+ 0,
+ 20426,
+ 20428,
+ 0,
+ 20431,
+ 0,
+ 0,
+ 20432,
+ 0,
+ 20433,
+ 20434,
+ 20435,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20440,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20442,
+ 0,
+ 20443,
+ 0,
+ 20446,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20448,
+ 0,
+ 20451,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20452,
+ 20453,
+ 0,
+ 0,
+ 20454,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20457,
+ 0,
+ 20458,
+ 0,
+ 0,
+ 0,
+ 20465,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20469,
+ 0,
+ 0,
+ 0,
+ 20473,
+ 0,
+ 20476,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20477,
+ 0,
+ 0,
+ 20485,
+ 0,
+ 0,
+ 20486,
+ 0,
+ 0,
+ 20487,
+ 0,
+ 20496,
+ 0,
+ 20497,
+ 0,
+ 0,
+ 20498,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20499,
+ 20500,
+ 0,
+ 20501,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20520,
+ 20527,
+ 0,
+ 20529,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20539,
+ 0,
+ 0,
+ 20540,
+ 0,
+ 0,
+ 0,
+ 20543,
+ 0,
+ 0,
+ 0,
+ 20546,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20548,
+ 0,
+ 0,
+ 20563,
+ 0,
+ 0,
+ 20564,
+ 0,
+ 20566,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20589,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20590,
+ 0,
+ 0,
+ 20593,
+ 20594,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20595,
+ 0,
+ 20597,
+ 20598,
+ 0,
+ 0,
+ 0,
+ 20618,
+ 20620,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20621,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20627,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20628,
+ 0,
+ 0,
+ 0,
+ 20629,
+ 0,
+ 20630,
+ 0,
+ 0,
+ 20639,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20707,
+ 0,
+ 0,
+ 20709,
+ 0,
+ 0,
+ 0,
+ 20713,
+ 20714,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20724,
+ 20725,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20726,
+ 20728,
+ 20729,
+ 0,
+ 20733,
+ 0,
+ 20734,
+ 0,
+ 20735,
+ 20736,
+ 0,
+ 20737,
+ 0,
+ 0,
+ 20744,
+ 0,
+ 20745,
+ 0,
+ 20748,
+ 0,
+ 0,
+ 20749,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20750,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20754,
+ 0,
+ 0,
+ 0,
+ 20761,
+ 0,
+ 0,
+ 20763,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20766,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20767,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20768,
+ 0,
+ 20769,
+ 20777,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20785,
+ 0,
+ 0,
+ 0,
+ 20786,
+ 20795,
+ 20801,
+ 0,
+ 20802,
+ 0,
+ 20807,
+ 0,
+ 0,
+ 20808,
+ 0,
+ 0,
+ 20810,
+ 0,
+ 0,
+ 20811,
+ 0,
+ 20812,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20813,
+ 0,
+ 0,
+ 20818,
+ 20820,
+ 20821,
+ 0,
+ 0,
+ 0,
+ 20822,
+ 0,
+ 20823,
+ 0,
+ 0,
+ 0,
+ 20826,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20829,
+ 20830,
+ 20831,
+ 0,
+ 20832,
+ 20836,
+ 0,
+ 0,
+ 20839,
+ 0,
+ 0,
+ 20840,
+ 20842,
+ 0,
+ 20843,
+ 0,
+ 20844,
+ 0,
+ 20854,
+ 0,
+ 0,
+ 0,
+ 20855,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20856,
+ 0,
+ 0,
+ 0,
+ 20869,
+ 0,
+ 0,
+ 20871,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20873,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20876,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20880,
+ 0,
+ 0,
+ 20882,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20883,
+ 20884,
+ 0,
+ 0,
+ 20890,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20891,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20905,
+ 0,
+ 20906,
+ 20910,
+ 0,
+ 0,
+ 20912,
+ 20915,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20916,
+ 0,
+ 20917,
+ 0,
+ 20919,
+ 20920,
+ 20922,
+ 0,
+ 20927,
+ 0,
+ 20928,
+ 20929,
+ 20930,
+ 0,
+ 0,
+ 20935,
+ 0,
+ 0,
+ 20939,
+ 0,
+ 0,
+ 20941,
+ 0,
+ 0,
+ 0,
+ 20943,
+ 0,
+ 0,
+ 0,
+ 20946,
+ 20947,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20950,
+ 0,
+ 20954,
+ 0,
+ 0,
+ 20955,
+ 20964,
+ 0,
+ 0,
+ 20967,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20973,
+ 20975,
+ 0,
+ 0,
+ 0,
+ 20984,
+ 0,
+ 20987,
+ 20988,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 20989,
+ 0,
+ 0,
+ 0,
+ 20995,
+ 0,
+ 20998,
+ 0,
+ 20999,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21000,
+ 21001,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21008,
+ 0,
+ 21010,
+ 0,
+ 21016,
+ 0,
+ 0,
+ 0,
+ 21017,
+ 21018,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21021,
+ 21026,
+ 21027,
+ 21028,
+ 0,
+ 0,
+ 21029,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21030,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21031,
+ 21032,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21037,
+ 0,
+ 0,
+ 21038,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21039,
+ 0,
+ 21041,
+ 0,
+ 21046,
+ 21047,
+ 0,
+ 0,
+ 0,
+ 21049,
+ 21053,
+ 0,
+ 0,
+ 21057,
+ 21064,
+ 21065,
+ 0,
+ 0,
+ 21066,
+ 21067,
+ 0,
+ 0,
+ 0,
+ 21069,
+ 0,
+ 0,
+ 0,
+ 21071,
+ 21072,
+ 0,
+ 0,
+ 21073,
+ 0,
+ 21074,
+ 0,
+ 0,
+ 21078,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21079,
+ 0,
+ 0,
+ 21080,
+ 21081,
+ 0,
+ 0,
+ 21086,
+ 21087,
+ 0,
+ 21089,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21091,
+ 0,
+ 21093,
+ 0,
+ 21094,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21095,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21096,
+ 0,
+ 21098,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21099,
+ 0,
+ 0,
+ 21100,
+ 21101,
+ 21102,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21103,
+ 0,
+ 21104,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21105,
+ 21108,
+ 21109,
+ 0,
+ 0,
+ 21112,
+ 21113,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21115,
+ 21122,
+ 21123,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21125,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21129,
+ 21131,
+ 0,
+ 0,
+ 21134,
+ 0,
+ 0,
+ 0,
+ 21137,
+ 21142,
+ 0,
+ 21143,
+ 0,
+ 0,
+ 21144,
+ 0,
+ 21145,
+ 21146,
+ 0,
+ 21152,
+ 21154,
+ 21155,
+ 21156,
+ 0,
+ 0,
+ 0,
+ 21160,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21161,
+ 0,
+ 21164,
+ 0,
+ 21166,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21170,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21171,
+ 0,
+ 0,
+ 21172,
+ 0,
+ 21174,
+ 0,
+ 21175,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21176,
+ 21179,
+ 21188,
+ 0,
+ 0,
+ 0,
+ 21189,
+ 0,
+ 0,
+ 21190,
+ 0,
+ 0,
+ 0,
+ 21192,
+ 0,
+ 0,
+ 21193,
+ 0,
+ 0,
+ 0,
+ 21198,
+ 0,
+ 21212,
+ 0,
+ 0,
+ 21213,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21215,
+ 21216,
+ 0,
+ 0,
+ 21223,
+ 21225,
+ 0,
+ 21226,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21227,
+ 21228,
+ 0,
+ 0,
+ 21229,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21230,
+ 21236,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21237,
+ 0,
+ 0,
+ 21238,
+ 21239,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21256,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21257,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21259,
+ 0,
+ 0,
+ 0,
+ 21263,
+ 0,
+ 21272,
+ 0,
+ 21274,
+ 0,
+ 21282,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21283,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21294,
+ 0,
+ 0,
+ 21297,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21298,
+ 0,
+ 0,
+ 0,
+ 21299,
+ 0,
+ 21300,
+ 21302,
+ 0,
+ 21316,
+ 0,
+ 21318,
+ 21322,
+ 21323,
+ 0,
+ 21324,
+ 0,
+ 21326,
+ 0,
+ 0,
+ 0,
+ 21327,
+ 21328,
+ 0,
+ 0,
+ 0,
+ 21352,
+ 0,
+ 0,
+ 21354,
+ 21361,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21362,
+ 0,
+ 0,
+ 0,
+ 21363,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21366,
+ 0,
+ 0,
+ 21367,
+ 21372,
+ 21374,
+ 0,
+ 0,
+ 0,
+ 21375,
+ 21377,
+ 0,
+ 21378,
+ 0,
+ 0,
+ 0,
+ 21380,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21381,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21382,
+ 0,
+ 21383,
+ 0,
+ 0,
+ 21384,
+ 0,
+ 0,
+ 21385,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21389,
+ 21390,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21397,
+ 21398,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21399,
+ 0,
+ 21400,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21402,
+ 0,
+ 0,
+ 0,
+ 21403,
+ 21404,
+ 0,
+ 21405,
+ 21406,
+ 0,
+ 0,
+ 0,
+ 21407,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21408,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21409,
+ 0,
+ 21421,
+ 0,
+ 21422,
+ 0,
+ 0,
+ 0,
+ 21425,
+ 21428,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21429,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21433,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21434,
+ 0,
+ 21443,
+ 0,
+ 21444,
+ 21449,
+ 0,
+ 21452,
+ 0,
+ 21453,
+ 21454,
+ 0,
+ 0,
+ 0,
+ 21457,
+ 0,
+ 0,
+ 21458,
+ 0,
+ 0,
+ 0,
+ 21460,
+ 21461,
+ 0,
+ 0,
+ 21464,
+ 0,
+ 0,
+ 0,
+ 21473,
+ 21478,
+ 0,
+ 0,
+ 21479,
+ 0,
+ 0,
+ 21481,
+ 21483,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21484,
+ 0,
+ 0,
+ 21485,
+ 21486,
+ 0,
+ 0,
+ 21488,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21523,
+ 0,
+ 0,
+ 21525,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21526,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21529,
+ 21530,
+ 0,
+ 0,
+ 21531,
+ 0,
+ 0,
+ 21533,
+ 0,
+ 0,
+ 21539,
+ 21564,
+ 0,
+ 21567,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21575,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21577,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21591,
+ 0,
+ 0,
+ 21604,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21605,
+ 0,
+ 21606,
+ 0,
+ 0,
+ 21617,
+ 21618,
+ 21619,
+ 21620,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21623,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21631,
+ 0,
+ 21635,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21639,
+ 21646,
+ 21653,
+ 21662,
+ 0,
+ 0,
+ 21663,
+ 21664,
+ 0,
+ 21666,
+ 0,
+ 0,
+ 21667,
+ 0,
+ 21670,
+ 21672,
+ 21673,
+ 0,
+ 21674,
+ 21683,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21684,
+ 0,
+ 21694,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21695,
+ 21700,
+ 0,
+ 21703,
+ 0,
+ 21704,
+ 0,
+ 0,
+ 21709,
+ 0,
+ 0,
+ 0,
+ 21710,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21711,
+ 0,
+ 0,
+ 0,
+ 21712,
+ 0,
+ 21717,
+ 0,
+ 21730,
+ 0,
+ 0,
+ 0,
+ 21731,
+ 21733,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21737,
+ 21741,
+ 21742,
+ 0,
+ 21747,
+ 0,
+ 0,
+ 0,
+ 21749,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21750,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21752,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21753,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21755,
+ 21756,
+ 0,
+ 21757,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21760,
+ 0,
+ 0,
+ 21763,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21764,
+ 0,
+ 0,
+ 21766,
+ 0,
+ 0,
+ 21767,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21773,
+ 0,
+ 21774,
+ 0,
+ 0,
+ 21775,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21776,
+ 0,
+ 0,
+ 21777,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21780,
+ 21787,
+ 21788,
+ 21791,
+ 0,
+ 0,
+ 0,
+ 21797,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21805,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21806,
+ 0,
+ 21807,
+ 21809,
+ 0,
+ 21810,
+ 21811,
+ 0,
+ 21817,
+ 21819,
+ 21820,
+ 0,
+ 21823,
+ 0,
+ 21824,
+ 0,
+ 0,
+ 21825,
+ 0,
+ 0,
+ 21826,
+ 21832,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21833,
+ 21848,
+ 21849,
+ 0,
+ 0,
+ 21867,
+ 21870,
+ 21871,
+ 21873,
+ 0,
+ 0,
+ 0,
+ 21874,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21875,
+ 0,
+ 21878,
+ 0,
+ 0,
+ 0,
+ 21879,
+ 0,
+ 21881,
+ 21886,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21887,
+ 0,
+ 0,
+ 21888,
+ 21894,
+ 21895,
+ 21897,
+ 0,
+ 21901,
+ 0,
+ 21904,
+ 0,
+ 0,
+ 21906,
+ 0,
+ 0,
+ 0,
+ 21909,
+ 21910,
+ 21911,
+ 0,
+ 0,
+ 21912,
+ 0,
+ 0,
+ 21913,
+ 21914,
+ 21915,
+ 0,
+ 21919,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21921,
+ 0,
+ 0,
+ 21922,
+ 21933,
+ 21939,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21944,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21945,
+ 0,
+ 21947,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21949,
+ 0,
+ 0,
+ 0,
+ 21950,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21951,
+ 0,
+ 21952,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21954,
+ 21957,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21958,
+ 0,
+ 21959,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21962,
+ 21963,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21964,
+ 21965,
+ 0,
+ 0,
+ 21969,
+ 21970,
+ 0,
+ 0,
+ 0,
+ 21974,
+ 0,
+ 0,
+ 21980,
+ 21981,
+ 0,
+ 21982,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 21985,
+ 0,
+ 21988,
+ 0,
+ 21992,
+ 0,
+ 21999,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22001,
+ 0,
+ 22002,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22003,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22004,
+ 0,
+ 0,
+ 0,
+ 22008,
+ 0,
+ 22009,
+ 22015,
+ 0,
+ 0,
+ 22016,
+ 0,
+ 0,
+ 0,
+ 22017,
+ 22019,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22020,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22021,
+ 22037,
+ 0,
+ 22039,
+ 0,
+ 0,
+ 0,
+ 22040,
+ 0,
+ 0,
+ 0,
+ 22048,
+ 22049,
+ 0,
+ 0,
+ 22053,
+ 22055,
+ 22056,
+ 22059,
+ 0,
+ 0,
+ 22060,
+ 22061,
+ 0,
+ 0,
+ 22064,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22066,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22073,
+ 0,
+ 0,
+ 0,
+ 22074,
+ 22075,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22076,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22077,
+ 22084,
+ 22099,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22104,
+ 0,
+ 0,
+ 22107,
+ 0,
+ 22108,
+ 0,
+ 22109,
+ 0,
+ 22110,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22111,
+ 22119,
+ 0,
+ 22120,
+ 22122,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22125,
+ 0,
+ 0,
+ 0,
+ 22128,
+ 22129,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22141,
+ 0,
+ 0,
+ 0,
+ 22142,
+ 0,
+ 0,
+ 22144,
+ 22146,
+ 0,
+ 22148,
+ 22149,
+ 22151,
+ 22154,
+ 0,
+ 0,
+ 0,
+ 22162,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22164,
+ 22177,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22179,
+ 0,
+ 22182,
+ 22183,
+ 0,
+ 0,
+ 22184,
+ 22188,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22190,
+ 0,
+ 22194,
+ 22201,
+ 0,
+ 0,
+ 22208,
+ 0,
+ 22209,
+ 0,
+ 22212,
+ 0,
+ 0,
+ 22215,
+ 0,
+ 22223,
+ 22231,
+ 0,
+ 0,
+ 22232,
+ 0,
+ 22234,
+ 0,
+ 0,
+ 22235,
+ 22236,
+ 0,
+ 22237,
+ 0,
+ 22240,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22241,
+ 0,
+ 0,
+ 0,
+ 22242,
+ 22246,
+ 22247,
+ 0,
+ 0,
+ 0,
+ 22259,
+ 22268,
+ 0,
+ 22269,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22270,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22271,
+ 0,
+ 22272,
+ 0,
+ 22277,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22278,
+ 22280,
+ 22283,
+ 22286,
+ 0,
+ 0,
+ 22287,
+ 22289,
+ 0,
+ 0,
+ 22290,
+ 0,
+ 22293,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22295,
+ 0,
+ 22301,
+ 22302,
+ 0,
+ 0,
+ 0,
+ 22305,
+ 0,
+ 22308,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22315,
+ 0,
+ 0,
+ 0,
+ 22317,
+ 0,
+ 22334,
+ 0,
+ 0,
+ 0,
+ 22335,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22336,
+ 0,
+ 22338,
+ 22344,
+ 0,
+ 22347,
+ 22349,
+ 0,
+ 22350,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22357,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22358,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22359,
+ 22360,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22361,
+ 22366,
+ 0,
+ 0,
+ 22369,
+ 0,
+ 22370,
+ 22373,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22375,
+ 0,
+ 22377,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22378,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22381,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22382,
+ 0,
+ 22383,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22391,
+ 0,
+ 0,
+ 22392,
+ 22395,
+ 22396,
+ 22402,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22405,
+ 0,
+ 0,
+ 22406,
+ 0,
+ 0,
+ 22408,
+ 0,
+ 0,
+ 22409,
+ 22410,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22424,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22426,
+ 0,
+ 0,
+ 0,
+ 22427,
+ 0,
+ 22428,
+ 0,
+ 22432,
+ 0,
+ 22435,
+ 22442,
+ 22443,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22444,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22446,
+ 0,
+ 22454,
+ 0,
+ 22455,
+ 0,
+ 0,
+ 0,
+ 22465,
+ 0,
+ 22470,
+ 0,
+ 22471,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22472,
+ 22473,
+ 0,
+ 22487,
+ 0,
+ 0,
+ 0,
+ 22488,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22489,
+ 0,
+ 0,
+ 22499,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22514,
+ 0,
+ 0,
+ 22515,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22516,
+ 0,
+ 0,
+ 0,
+ 22517,
+ 22520,
+ 0,
+ 0,
+ 0,
+ 22534,
+ 0,
+ 0,
+ 22535,
+ 0,
+ 0,
+ 22536,
+ 0,
+ 22540,
+ 22553,
+ 0,
+ 22555,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22561,
+ 0,
+ 0,
+ 22562,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22566,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22567,
+ 22568,
+ 0,
+ 0,
+ 22575,
+ 0,
+ 22579,
+ 0,
+ 22582,
+ 22583,
+ 22585,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22586,
+ 0,
+ 0,
+ 22587,
+ 0,
+ 0,
+ 22590,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22591,
+ 0,
+ 22592,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22593,
+ 0,
+ 22602,
+ 0,
+ 0,
+ 22604,
+ 0,
+ 0,
+ 22609,
+ 0,
+ 0,
+ 22618,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22619,
+ 0,
+ 22624,
+ 22625,
+ 0,
+ 0,
+ 22638,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22639,
+ 0,
+ 0,
+ 22640,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22644,
+ 0,
+ 22645,
+ 22647,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22652,
+ 22653,
+ 0,
+ 0,
+ 0,
+ 22654,
+ 0,
+ 22655,
+ 0,
+ 0,
+ 0,
+ 22656,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22673,
+ 22675,
+ 22676,
+ 0,
+ 0,
+ 22678,
+ 22679,
+ 0,
+ 22691,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22693,
+ 0,
+ 0,
+ 22696,
+ 0,
+ 22699,
+ 22707,
+ 22708,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22718,
+ 0,
+ 22719,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22723,
+ 0,
+ 0,
+ 0,
+ 22724,
+ 22725,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22726,
+ 22728,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22729,
+ 0,
+ 0,
+ 22731,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22732,
+ 22735,
+ 22736,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22739,
+ 0,
+ 22749,
+ 0,
+ 0,
+ 22751,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22758,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22760,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22764,
+ 22765,
+ 22766,
+ 0,
+ 22768,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22769,
+ 22770,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22771,
+ 0,
+ 0,
+ 22772,
+ 22775,
+ 0,
+ 22776,
+ 22777,
+ 22780,
+ 0,
+ 0,
+ 22782,
+ 22784,
+ 0,
+ 22787,
+ 0,
+ 22789,
+ 22796,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22798,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22802,
+ 0,
+ 22803,
+ 22804,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22805,
+ 0,
+ 0,
+ 22810,
+ 22811,
+ 22814,
+ 22816,
+ 0,
+ 22825,
+ 22826,
+ 0,
+ 22831,
+ 22833,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22834,
+ 0,
+ 22836,
+ 22838,
+ 0,
+ 22839,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22840,
+ 0,
+ 22847,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22856,
+ 22857,
+ 0,
+ 22858,
+ 22859,
+ 0,
+ 0,
+ 22862,
+ 0,
+ 0,
+ 22864,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22865,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22866,
+ 0,
+ 22867,
+ 22868,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22869,
+ 0,
+ 22871,
+ 0,
+ 22872,
+ 0,
+ 22873,
+ 22881,
+ 22882,
+ 22884,
+ 22885,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22886,
+ 22887,
+ 0,
+ 22894,
+ 0,
+ 22895,
+ 0,
+ 0,
+ 0,
+ 22900,
+ 0,
+ 22901,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22904,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22905,
+ 22907,
+ 0,
+ 0,
+ 0,
+ 22915,
+ 22917,
+ 0,
+ 0,
+ 22918,
+ 0,
+ 0,
+ 0,
+ 22920,
+ 0,
+ 0,
+ 0,
+ 22929,
+ 22930,
+ 0,
+ 0,
+ 0,
+ 22941,
+ 22942,
+ 0,
+ 0,
+ 0,
+ 22943,
+ 0,
+ 0,
+ 0,
+ 22944,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22946,
+ 0,
+ 22947,
+ 0,
+ 0,
+ 22954,
+ 0,
+ 22956,
+ 0,
+ 0,
+ 22962,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22963,
+ 0,
+ 0,
+ 22964,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22965,
+ 0,
+ 22968,
+ 0,
+ 0,
+ 0,
+ 22969,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22970,
+ 0,
+ 22971,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22978,
+ 0,
+ 0,
+ 22979,
+ 0,
+ 22987,
+ 0,
+ 0,
+ 22989,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 22990,
+ 0,
+ 23005,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23006,
+ 23007,
+ 23008,
+ 0,
+ 0,
+ 23023,
+ 23024,
+ 23029,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23030,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23032,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23035,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23038,
+ 0,
+ 0,
+ 0,
+ 23048,
+ 0,
+ 23049,
+ 23052,
+ 23053,
+ 23060,
+ 23061,
+ 0,
+ 23063,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23067,
+ 23068,
+ 0,
+ 0,
+ 0,
+ 23069,
+ 23073,
+ 0,
+ 0,
+ 0,
+ 23127,
+ 0,
+ 23128,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23129,
+ 0,
+ 23138,
+ 23141,
+ 0,
+ 23149,
+ 0,
+ 0,
+ 23150,
+ 0,
+ 0,
+ 0,
+ 23152,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23154,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23157,
+ 23159,
+ 23160,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23180,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23181,
+ 0,
+ 0,
+ 23188,
+ 0,
+ 23189,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23195,
+ 0,
+ 0,
+ 23196,
+ 23199,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23202,
+ 0,
+ 23204,
+ 0,
+ 23207,
+ 0,
+ 23209,
+ 23210,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23227,
+ 23229,
+ 0,
+ 0,
+ 23230,
+ 23234,
+ 23238,
+ 0,
+ 0,
+ 0,
+ 23245,
+ 23246,
+ 23248,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23249,
+ 23254,
+ 0,
+ 0,
+ 0,
+ 23265,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23268,
+ 0,
+ 23276,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23277,
+ 0,
+ 23297,
+ 0,
+ 23298,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23299,
+ 0,
+ 23302,
+ 0,
+ 0,
+ 23303,
+ 23312,
+ 0,
+ 0,
+ 23314,
+ 0,
+ 23320,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23324,
+ 0,
+ 23325,
+ 0,
+ 23328,
+ 0,
+ 23334,
+ 0,
+ 0,
+ 0,
+ 23337,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23343,
+ 23344,
+ 23346,
+ 0,
+ 23348,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23353,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23355,
+ 0,
+ 23356,
+ 23358,
+ 0,
+ 0,
+ 0,
+ 23359,
+ 23360,
+ 0,
+ 23361,
+ 0,
+ 23367,
+ 0,
+ 23369,
+ 0,
+ 0,
+ 23373,
+ 0,
+ 23378,
+ 23379,
+ 0,
+ 23382,
+ 23383,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23387,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23388,
+ 23390,
+ 0,
+ 0,
+ 23393,
+ 23398,
+ 0,
+ 0,
+ 0,
+ 23399,
+ 0,
+ 0,
+ 0,
+ 23400,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23401,
+ 0,
+ 0,
+ 0,
+ 23415,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23416,
+ 0,
+ 23422,
+ 0,
+ 23443,
+ 23444,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23448,
+ 0,
+ 23454,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23456,
+ 0,
+ 0,
+ 23458,
+ 23464,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23465,
+ 0,
+ 0,
+ 0,
+ 23470,
+ 23471,
+ 0,
+ 0,
+ 23472,
+ 0,
+ 0,
+ 0,
+ 23473,
+ 23496,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23497,
+ 0,
+ 23499,
+ 0,
+ 0,
+ 23502,
+ 0,
+ 0,
+ 23503,
+ 0,
+ 0,
+ 23513,
+ 0,
+ 0,
+ 23515,
+ 0,
+ 0,
+ 0,
+ 23517,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23518,
+ 23519,
+ 23521,
+ 23524,
+ 0,
+ 23525,
+ 23528,
+ 23539,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23541,
+ 0,
+ 0,
+ 23544,
+ 0,
+ 0,
+ 23556,
+ 0,
+ 0,
+ 23557,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23559,
+ 0,
+ 23560,
+ 0,
+ 0,
+ 23561,
+ 0,
+ 0,
+ 23566,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23568,
+ 23569,
+ 23570,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23571,
+ 0,
+ 23574,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23575,
+ 0,
+ 23579,
+ 0,
+ 0,
+ 23581,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23587,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23596,
+ 23598,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23602,
+ 23606,
+ 0,
+ 0,
+ 23607,
+ 0,
+ 23608,
+ 0,
+ 0,
+ 0,
+ 23614,
+ 23616,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23618,
+ 0,
+ 0,
+ 23619,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23621,
+ 23626,
+ 0,
+ 23627,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23629,
+ 0,
+ 23630,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23634,
+ 0,
+ 23636,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23638,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23640,
+ 23667,
+ 0,
+ 23669,
+ 0,
+ 0,
+ 0,
+ 23681,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23682,
+ 0,
+ 23683,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23684,
+ 0,
+ 0,
+ 0,
+ 23685,
+ 23689,
+ 0,
+ 23693,
+ 23694,
+ 23700,
+ 0,
+ 23702,
+ 0,
+ 23709,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23712,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23714,
+ 0,
+ 0,
+ 23715,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23718,
+ 0,
+ 0,
+ 23720,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23722,
+ 0,
+ 0,
+ 0,
+ 23726,
+ 23729,
+ 0,
+ 23741,
+ 23746,
+ 0,
+ 23748,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23749,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23750,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23751,
+ 0,
+ 23753,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23757,
+ 23765,
+ 0,
+ 0,
+ 0,
+ 23770,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23771,
+ 0,
+ 23772,
+ 23781,
+ 0,
+ 0,
+ 23796,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23798,
+ 0,
+ 23799,
+ 0,
+ 0,
+ 0,
+ 23802,
+ 0,
+ 0,
+ 23806,
+ 0,
+ 23807,
+ 0,
+ 0,
+ 23808,
+ 0,
+ 23809,
+ 0,
+ 23819,
+ 0,
+ 0,
+ 0,
+ 23821,
+ 0,
+ 23827,
+ 0,
+ 0,
+ 0,
+ 23829,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23830,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23832,
+ 23833,
+ 23834,
+ 23835,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23837,
+ 23838,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23846,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23847,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23879,
+ 23881,
+ 0,
+ 0,
+ 23882,
+ 23883,
+ 23895,
+ 0,
+ 23899,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23901,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23902,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23903,
+ 23905,
+ 0,
+ 23906,
+ 0,
+ 23907,
+ 23918,
+ 23919,
+ 23920,
+ 0,
+ 23922,
+ 0,
+ 23924,
+ 0,
+ 23927,
+ 0,
+ 23934,
+ 0,
+ 23937,
+ 23941,
+ 0,
+ 23942,
+ 23946,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23955,
+ 23956,
+ 23958,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23959,
+ 0,
+ 23962,
+ 23965,
+ 0,
+ 23966,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23967,
+ 23968,
+ 0,
+ 0,
+ 23973,
+ 0,
+ 0,
+ 23974,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23975,
+ 0,
+ 23976,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23977,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23980,
+ 0,
+ 0,
+ 23984,
+ 0,
+ 23985,
+ 0,
+ 0,
+ 23987,
+ 0,
+ 0,
+ 23988,
+ 23990,
+ 23991,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23992,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23994,
+ 0,
+ 0,
+ 0,
+ 23998,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 23999,
+ 0,
+ 0,
+ 24003,
+ 0,
+ 24004,
+ 0,
+ 24006,
+ 0,
+ 0,
+ 0,
+ 24007,
+ 0,
+ 0,
+ 24008,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24009,
+ 0,
+ 0,
+ 24010,
+ 0,
+ 0,
+ 24011,
+ 0,
+ 0,
+ 24013,
+ 24014,
+ 0,
+ 0,
+ 24015,
+ 24016,
+ 24027,
+ 0,
+ 24028,
+ 24029,
+ 0,
+ 24030,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24033,
+ 24034,
+ 0,
+ 24035,
+ 0,
+ 0,
+ 24036,
+ 0,
+ 0,
+ 24044,
+ 0,
+ 24048,
+ 24049,
+ 24063,
+ 24067,
+ 0,
+ 24068,
+ 24070,
+ 0,
+ 0,
+ 24071,
+ 24078,
+ 24087,
+ 0,
+ 24090,
+ 0,
+ 0,
+ 0,
+ 24095,
+ 0,
+ 24098,
+ 24101,
+ 24104,
+ 24106,
+ 0,
+ 24107,
+ 0,
+ 0,
+ 0,
+ 24108,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24110,
+ 24111,
+ 0,
+ 24113,
+ 0,
+ 0,
+ 24115,
+ 24120,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24124,
+ 0,
+ 24125,
+ 0,
+ 24126,
+ 0,
+ 24127,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24135,
+ 0,
+ 0,
+ 24136,
+ 0,
+ 24137,
+ 24142,
+ 0,
+ 0,
+ 0,
+ 24146,
+ 0,
+ 0,
+ 24147,
+ 24149,
+ 24154,
+ 0,
+ 24163,
+ 0,
+ 0,
+ 0,
+ 24165,
+ 24166,
+ 24167,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24169,
+ 24170,
+ 24175,
+ 0,
+ 0,
+ 0,
+ 24178,
+ 0,
+ 0,
+ 24179,
+ 0,
+ 0,
+ 24181,
+ 0,
+ 24184,
+ 24197,
+ 0,
+ 24201,
+ 24204,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24206,
+ 24212,
+ 24220,
+ 0,
+ 0,
+ 0,
+ 24224,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24226,
+ 0,
+ 24234,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24235,
+ 0,
+ 24236,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24239,
+ 24240,
+ 24241,
+ 0,
+ 0,
+ 24248,
+ 0,
+ 0,
+ 24249,
+ 0,
+ 24251,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24253,
+ 0,
+ 24268,
+ 0,
+ 0,
+ 0,
+ 24269,
+ 0,
+ 24271,
+ 24272,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24273,
+ 0,
+ 0,
+ 24274,
+ 0,
+ 0,
+ 24279,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24280,
+ 0,
+ 24293,
+ 24294,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24296,
+ 0,
+ 0,
+ 24323,
+ 0,
+ 0,
+ 0,
+ 24329,
+ 24330,
+ 24331,
+ 24339,
+ 0,
+ 24351,
+ 0,
+ 0,
+ 24369,
+ 24370,
+ 0,
+ 0,
+ 0,
+ 24371,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24372,
+ 24373,
+ 24374,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24378,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24379,
+ 0,
+ 24381,
+ 0,
+ 24383,
+ 24389,
+ 0,
+ 24390,
+ 0,
+ 0,
+ 24394,
+ 24395,
+ 24400,
+ 0,
+ 0,
+ 0,
+ 24401,
+ 24402,
+ 0,
+ 24406,
+ 0,
+ 0,
+ 0,
+ 24411,
+ 0,
+ 0,
+ 0,
+ 24415,
+ 0,
+ 24416,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24417,
+ 0,
+ 24419,
+ 0,
+ 24422,
+ 0,
+ 24423,
+ 24428,
+ 0,
+ 24435,
+ 0,
+ 0,
+ 0,
+ 24439,
+ 0,
+ 0,
+ 0,
+ 24440,
+ 24442,
+ 24446,
+ 0,
+ 0,
+ 0,
+ 24447,
+ 24448,
+ 24449,
+ 24452,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24453,
+ 24457,
+ 0,
+ 0,
+ 24458,
+ 24459,
+ 24460,
+ 0,
+ 24465,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24470,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24471,
+ 0,
+ 24473,
+ 24474,
+ 24475,
+ 24476,
+ 0,
+ 24478,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24480,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24481,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24482,
+ 24485,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24486,
+ 0,
+ 0,
+ 0,
+ 24488,
+ 0,
+ 0,
+ 0,
+ 24494,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24497,
+ 0,
+ 0,
+ 24498,
+ 0,
+ 0,
+ 0,
+ 24499,
+ 24506,
+ 0,
+ 0,
+ 0,
+ 24507,
+ 0,
+ 0,
+ 24511,
+ 0,
+ 0,
+ 24513,
+ 24514,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24517,
+ 0,
+ 24518,
+ 0,
+ 24520,
+ 0,
+ 24521,
+ 24524,
+ 24525,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24527,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24528,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24537,
+ 24539,
+ 0,
+ 24540,
+ 0,
+ 0,
+ 0,
+ 24548,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24549,
+ 24550,
+ 0,
+ 0,
+ 0,
+ 24553,
+ 24554,
+ 0,
+ 24555,
+ 0,
+ 24556,
+ 0,
+ 24558,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24560,
+ 0,
+ 0,
+ 0,
+ 24561,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24562,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24567,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24569,
+ 0,
+ 0,
+ 0,
+ 24574,
+ 0,
+ 24575,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24577,
+ 24581,
+ 0,
+ 24584,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24585,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24586,
+ 0,
+ 0,
+ 24587,
+ 0,
+ 24588,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24590,
+ 24591,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24592,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24594,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24596,
+ 24597,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24602,
+ 24603,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24604,
+ 0,
+ 0,
+ 24605,
+ 0,
+ 24610,
+ 0,
+ 0,
+ 24611,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24612,
+ 24615,
+ 24616,
+ 24624,
+ 0,
+ 0,
+ 0,
+ 24627,
+ 0,
+ 24638,
+ 24639,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24640,
+ 0,
+ 0,
+ 0,
+ 24655,
+ 24656,
+ 24657,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24662,
+ 0,
+ 24663,
+ 24664,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24665,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24667,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24668,
+ 24669,
+ 0,
+ 24670,
+ 24674,
+ 0,
+ 0,
+ 0,
+ 24675,
+ 0,
+ 24678,
+ 0,
+ 0,
+ 24679,
+ 0,
+ 0,
+ 0,
+ 24681,
+ 0,
+ 24683,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24684,
+ 0,
+ 24685,
+ 0,
+ 0,
+ 24686,
+ 0,
+ 0,
+ 24688,
+ 24689,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24690,
+ 24691,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24697,
+ 0,
+ 24698,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24709,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24710,
+ 0,
+ 24712,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24713,
+ 24714,
+ 0,
+ 24715,
+ 0,
+ 24716,
+ 24718,
+ 0,
+ 24719,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24720,
+ 0,
+ 0,
+ 24725,
+ 0,
+ 0,
+ 24738,
+ 0,
+ 24749,
+ 24750,
+ 0,
+ 0,
+ 0,
+ 24752,
+ 0,
+ 0,
+ 0,
+ 24753,
+ 0,
+ 0,
+ 0,
+ 24758,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24762,
+ 0,
+ 24763,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24764,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24765,
+ 24767,
+ 24768,
+ 0,
+ 24772,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24773,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24777,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24785,
+ 0,
+ 24786,
+ 24788,
+ 0,
+ 0,
+ 0,
+ 24789,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24794,
+ 24798,
+ 0,
+ 24799,
+ 24800,
+ 0,
+ 0,
+ 0,
+ 24803,
+ 0,
+ 24804,
+ 24806,
+ 0,
+ 24807,
+ 0,
+ 0,
+ 0,
+ 24810,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24827,
+ 24828,
+ 0,
+ 24835,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24836,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24839,
+ 0,
+ 24843,
+ 24844,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24847,
+ 0,
+ 0,
+ 24848,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24849,
+ 0,
+ 24850,
+ 24851,
+ 0,
+ 0,
+ 0,
+ 24852,
+ 0,
+ 24853,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24854,
+ 0,
+ 24855,
+ 0,
+ 0,
+ 24868,
+ 0,
+ 0,
+ 0,
+ 24883,
+ 0,
+ 0,
+ 0,
+ 24884,
+ 0,
+ 24895,
+ 24897,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24899,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24900,
+ 0,
+ 24913,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24914,
+ 0,
+ 0,
+ 24917,
+ 24930,
+ 24931,
+ 0,
+ 0,
+ 0,
+ 24932,
+ 0,
+ 0,
+ 24939,
+ 0,
+ 0,
+ 24942,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24945,
+ 24950,
+ 0,
+ 24951,
+ 0,
+ 0,
+ 24953,
+ 0,
+ 0,
+ 0,
+ 24954,
+ 0,
+ 24959,
+ 0,
+ 0,
+ 0,
+ 24961,
+ 0,
+ 0,
+ 24962,
+ 0,
+ 24964,
+ 24968,
+ 24970,
+ 24972,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 24976,
+ 0,
+ 0,
+ 0,
+ 24977,
+ 0,
+ 24982,
+ 0,
+ 0,
+ 24983,
+ 0,
+ 0,
+ 24984,
+ 0,
+ 0,
+ 0,
+ 24993,
+ 0,
+ 0,
+ 0,
+ 24994,
+ 0,
+ 0,
+ 25001,
+ 0,
+ 0,
+ 0,
+ 25003,
+ 0,
+ 0,
+ 25018,
+ 0,
+ 0,
+ 25023,
+ 0,
+ 0,
+ 0,
+ 25034,
+ 0,
+ 0,
+ 25035,
+ 25036,
+ 0,
+ 25037,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25039,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25040,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25042,
+ 0,
+ 0,
+ 25043,
+ 25045,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25049,
+ 0,
+ 0,
+ 25051,
+ 0,
+ 25052,
+ 25053,
+ 0,
+ 0,
+ 25054,
+ 0,
+ 0,
+ 0,
+ 25055,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25057,
+ 25059,
+ 0,
+ 0,
+ 25060,
+ 25064,
+ 0,
+ 25065,
+ 25069,
+ 25070,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25072,
+ 0,
+ 25073,
+ 0,
+ 25090,
+ 0,
+ 0,
+ 25092,
+ 25093,
+ 25101,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25105,
+ 25108,
+ 0,
+ 0,
+ 25113,
+ 0,
+ 0,
+ 25115,
+ 25116,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25117,
+ 0,
+ 0,
+ 0,
+ 25120,
+ 25121,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25125,
+ 0,
+ 0,
+ 0,
+ 25126,
+ 0,
+ 25130,
+ 25134,
+ 0,
+ 25139,
+ 0,
+ 25143,
+ 0,
+ 0,
+ 0,
+ 25151,
+ 0,
+ 25161,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25163,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25174,
+ 0,
+ 25175,
+ 0,
+ 25207,
+ 0,
+ 0,
+ 0,
+ 25209,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25213,
+ 0,
+ 25219,
+ 0,
+ 25223,
+ 0,
+ 25225,
+ 0,
+ 0,
+ 0,
+ 25227,
+ 0,
+ 0,
+ 0,
+ 25228,
+ 0,
+ 0,
+ 0,
+ 25229,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25231,
+ 25233,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25237,
+ 25239,
+ 0,
+ 0,
+ 0,
+ 25243,
+ 0,
+ 0,
+ 0,
+ 25252,
+ 0,
+ 25257,
+ 25258,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25260,
+ 25265,
+ 0,
+ 25268,
+ 0,
+ 0,
+ 25273,
+ 25324,
+ 0,
+ 25325,
+ 0,
+ 25326,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25327,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25328,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25332,
+ 0,
+ 0,
+ 0,
+ 25333,
+ 0,
+ 0,
+ 0,
+ 25336,
+ 25337,
+ 25338,
+ 0,
+ 0,
+ 25343,
+ 0,
+ 25350,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25352,
+ 0,
+ 25354,
+ 0,
+ 25375,
+ 0,
+ 25379,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25384,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25386,
+ 0,
+ 25388,
+ 0,
+ 25390,
+ 0,
+ 0,
+ 25399,
+ 0,
+ 0,
+ 25401,
+ 0,
+ 0,
+ 0,
+ 25402,
+ 0,
+ 0,
+ 0,
+ 25407,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25413,
+ 25415,
+ 0,
+ 0,
+ 25417,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25419,
+ 0,
+ 0,
+ 0,
+ 25421,
+ 0,
+ 0,
+ 0,
+ 25424,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25433,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25435,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25436,
+ 0,
+ 0,
+ 0,
+ 25437,
+ 0,
+ 0,
+ 25440,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25442,
+ 0,
+ 0,
+ 25443,
+ 0,
+ 25446,
+ 0,
+ 0,
+ 25449,
+ 0,
+ 0,
+ 0,
+ 25450,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25452,
+ 0,
+ 25453,
+ 25454,
+ 25455,
+ 0,
+ 0,
+ 0,
+ 25456,
+ 0,
+ 25457,
+ 0,
+ 0,
+ 0,
+ 25459,
+ 0,
+ 25461,
+ 0,
+ 25468,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25469,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25471,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25474,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25475,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25477,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25483,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25484,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25485,
+ 0,
+ 25497,
+ 0,
+ 0,
+ 25498,
+ 0,
+ 25504,
+ 0,
+ 25510,
+ 0,
+ 25512,
+ 0,
+ 0,
+ 25513,
+ 25514,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25517,
+ 25518,
+ 25519,
+ 0,
+ 25520,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25521,
+ 0,
+ 25522,
+ 25527,
+ 25534,
+ 0,
+ 25536,
+ 0,
+ 25537,
+ 0,
+ 0,
+ 25548,
+ 25550,
+ 0,
+ 0,
+ 25551,
+ 0,
+ 25552,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25554,
+ 0,
+ 25555,
+ 0,
+ 25556,
+ 25557,
+ 25568,
+ 0,
+ 0,
+ 0,
+ 25570,
+ 25571,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25574,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25579,
+ 0,
+ 0,
+ 0,
+ 25581,
+ 0,
+ 0,
+ 0,
+ 25582,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25588,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25589,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25590,
+ 0,
+ 25591,
+ 25592,
+ 25593,
+ 0,
+ 25594,
+ 0,
+ 0,
+ 0,
+ 25596,
+ 0,
+ 25597,
+ 25615,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25618,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25619,
+ 25623,
+ 0,
+ 0,
+ 25629,
+ 0,
+ 0,
+ 25631,
+ 0,
+ 0,
+ 0,
+ 25635,
+ 25636,
+ 0,
+ 0,
+ 25649,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25654,
+ 0,
+ 0,
+ 0,
+ 25661,
+ 25663,
+ 0,
+ 0,
+ 25671,
+ 0,
+ 0,
+ 25678,
+ 25698,
+ 0,
+ 25699,
+ 25702,
+ 25703,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25704,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25706,
+ 0,
+ 0,
+ 25710,
+ 0,
+ 25711,
+ 0,
+ 25712,
+ 0,
+ 25715,
+ 25716,
+ 25717,
+ 0,
+ 0,
+ 25718,
+ 25728,
+ 25732,
+ 0,
+ 0,
+ 0,
+ 25734,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25737,
+ 0,
+ 0,
+ 25739,
+ 0,
+ 0,
+ 0,
+ 25740,
+ 0,
+ 25741,
+ 25745,
+ 0,
+ 25746,
+ 0,
+ 25748,
+ 25772,
+ 25778,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25780,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25781,
+ 0,
+ 25782,
+ 25784,
+ 25785,
+ 0,
+ 0,
+ 0,
+ 25789,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25797,
+ 25801,
+ 0,
+ 0,
+ 0,
+ 25808,
+ 25809,
+ 0,
+ 0,
+ 25811,
+ 25814,
+ 25815,
+ 0,
+ 0,
+ 25817,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25820,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25832,
+ 25833,
+ 0,
+ 0,
+ 0,
+ 25846,
+ 0,
+ 0,
+ 0,
+ 25847,
+ 25848,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25849,
+ 25850,
+ 0,
+ 0,
+ 25851,
+ 0,
+ 0,
+ 25852,
+ 0,
+ 25862,
+ 0,
+ 0,
+ 0,
+ 25863,
+ 25865,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25867,
+ 25868,
+ 0,
+ 25869,
+ 25874,
+ 0,
+ 25875,
+ 0,
+ 25876,
+ 25877,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25878,
+ 25902,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25903,
+ 25904,
+ 25905,
+ 0,
+ 0,
+ 0,
+ 25908,
+ 25909,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25910,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25912,
+ 0,
+ 25913,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25914,
+ 0,
+ 0,
+ 25916,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25917,
+ 25927,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25928,
+ 0,
+ 0,
+ 25930,
+ 0,
+ 0,
+ 0,
+ 25933,
+ 0,
+ 0,
+ 25938,
+ 25942,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25945,
+ 0,
+ 25950,
+ 0,
+ 25956,
+ 0,
+ 0,
+ 25961,
+ 25962,
+ 0,
+ 0,
+ 25963,
+ 0,
+ 25964,
+ 25965,
+ 25966,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25967,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25968,
+ 0,
+ 0,
+ 0,
+ 25969,
+ 25971,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25973,
+ 25975,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25978,
+ 0,
+ 25981,
+ 0,
+ 0,
+ 0,
+ 25982,
+ 0,
+ 0,
+ 0,
+ 25984,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 25993,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26002,
+ 0,
+ 0,
+ 0,
+ 26005,
+ 0,
+ 0,
+ 0,
+ 26006,
+ 26007,
+ 0,
+ 0,
+ 26014,
+ 26015,
+ 26016,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26017,
+ 26018,
+ 26020,
+ 0,
+ 26022,
+ 26023,
+ 0,
+ 0,
+ 0,
+ 26024,
+ 26028,
+ 0,
+ 26029,
+ 26033,
+ 26034,
+ 26044,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26046,
+ 0,
+ 0,
+ 26047,
+ 0,
+ 0,
+ 26049,
+ 0,
+ 26050,
+ 0,
+ 26051,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26053,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26054,
+ 26059,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26060,
+ 0,
+ 26066,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26067,
+ 0,
+ 26069,
+ 0,
+ 0,
+ 26071,
+ 0,
+ 0,
+ 0,
+ 26073,
+ 0,
+ 26074,
+ 26077,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26078,
+ 0,
+ 0,
+ 0,
+ 26079,
+ 0,
+ 26090,
+ 0,
+ 0,
+ 26094,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26095,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26096,
+ 26101,
+ 0,
+ 26107,
+ 26122,
+ 0,
+ 26124,
+ 0,
+ 0,
+ 26125,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26136,
+ 26141,
+ 26155,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26164,
+ 26166,
+ 0,
+ 0,
+ 0,
+ 26167,
+ 0,
+ 26170,
+ 26171,
+ 0,
+ 0,
+ 26172,
+ 0,
+ 0,
+ 26174,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26175,
+ 0,
+ 0,
+ 0,
+ 26176,
+ 26177,
+ 0,
+ 26321,
+ 26322,
+ 0,
+ 26323,
+ 0,
+ 0,
+ 26324,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26325,
+ 0,
+ 26331,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26335,
+ 0,
+ 0,
+ 0,
+ 26350,
+ 0,
+ 0,
+ 0,
+ 26379,
+ 0,
+ 0,
+ 26382,
+ 26383,
+ 26385,
+ 0,
+ 0,
+ 26392,
+ 26406,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26411,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26412,
+ 0,
+ 0,
+ 26420,
+ 0,
+ 0,
+ 26423,
+ 0,
+ 26424,
+ 26426,
+ 26432,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26435,
+ 0,
+ 26436,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26441,
+ 0,
+ 26444,
+ 0,
+ 0,
+ 0,
+ 26446,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26447,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26449,
+ 0,
+ 26450,
+ 26452,
+ 0,
+ 26453,
+ 26454,
+ 0,
+ 0,
+ 0,
+ 26455,
+ 0,
+ 0,
+ 0,
+ 26456,
+ 0,
+ 0,
+ 26458,
+ 0,
+ 0,
+ 26460,
+ 0,
+ 26463,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26464,
+ 26470,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26473,
+ 0,
+ 0,
+ 26474,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26475,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26477,
+ 0,
+ 26485,
+ 0,
+ 0,
+ 26486,
+ 0,
+ 26487,
+ 0,
+ 0,
+ 26488,
+ 26493,
+ 26494,
+ 0,
+ 0,
+ 26495,
+ 0,
+ 26497,
+ 26504,
+ 26506,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26507,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26509,
+ 0,
+ 0,
+ 26510,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26512,
+ 0,
+ 26513,
+ 26515,
+ 0,
+ 0,
+ 0,
+ 26518,
+ 0,
+ 0,
+ 0,
+ 26519,
+ 0,
+ 26524,
+ 26526,
+ 0,
+ 0,
+ 0,
+ 26527,
+ 0,
+ 26532,
+ 0,
+ 26533,
+ 26537,
+ 26558,
+ 0,
+ 0,
+ 0,
+ 26559,
+ 0,
+ 0,
+ 0,
+ 26571,
+ 0,
+ 0,
+ 26573,
+ 0,
+ 26588,
+ 0,
+ 26593,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26603,
+ 0,
+ 26604,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26606,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26607,
+ 26609,
+ 26611,
+ 26614,
+ 0,
+ 0,
+ 0,
+ 26616,
+ 26620,
+ 0,
+ 26621,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26627,
+ 0,
+ 26629,
+ 0,
+ 0,
+ 26630,
+ 0,
+ 0,
+ 26632,
+ 26643,
+ 0,
+ 0,
+ 0,
+ 26644,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26646,
+ 26647,
+ 0,
+ 0,
+ 0,
+ 26650,
+ 0,
+ 0,
+ 26656,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26663,
+ 26670,
+ 26671,
+ 0,
+ 0,
+ 0,
+ 26685,
+ 26686,
+ 26687,
+ 0,
+ 26689,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26744,
+ 0,
+ 26745,
+ 0,
+ 26747,
+ 26748,
+ 0,
+ 26749,
+ 26750,
+ 26751,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26752,
+ 26755,
+ 0,
+ 0,
+ 0,
+ 26756,
+ 26769,
+ 0,
+ 0,
+ 0,
+ 26774,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26775,
+ 0,
+ 26777,
+ 26778,
+ 0,
+ 26786,
+ 0,
+ 0,
+ 0,
+ 26787,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26788,
+ 0,
+ 0,
+ 26789,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26791,
+ 0,
+ 26792,
+ 26793,
+ 0,
+ 0,
+ 0,
+ 26794,
+ 0,
+ 26797,
+ 26798,
+ 0,
+ 0,
+ 0,
+ 26800,
+ 0,
+ 0,
+ 26803,
+ 0,
+ 26804,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26805,
+ 0,
+ 0,
+ 26808,
+ 0,
+ 0,
+ 26809,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26812,
+ 0,
+ 26825,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26826,
+ 0,
+ 0,
+ 26827,
+ 26829,
+ 26834,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26835,
+ 0,
+ 0,
+ 26849,
+ 0,
+ 26851,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26852,
+ 0,
+ 26853,
+ 26857,
+ 0,
+ 26858,
+ 0,
+ 26859,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26876,
+ 0,
+ 26878,
+ 26882,
+ 26883,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26890,
+ 26894,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26895,
+ 26896,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26900,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26911,
+ 26913,
+ 26914,
+ 26915,
+ 26916,
+ 26919,
+ 0,
+ 0,
+ 0,
+ 26921,
+ 26922,
+ 0,
+ 0,
+ 26925,
+ 0,
+ 0,
+ 0,
+ 26928,
+ 0,
+ 0,
+ 26929,
+ 26930,
+ 0,
+ 0,
+ 0,
+ 26931,
+ 0,
+ 26932,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26933,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26937,
+ 0,
+ 0,
+ 26943,
+ 0,
+ 0,
+ 26944,
+ 0,
+ 0,
+ 0,
+ 26946,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26956,
+ 0,
+ 26958,
+ 0,
+ 0,
+ 26963,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26965,
+ 0,
+ 26969,
+ 26970,
+ 26972,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26973,
+ 0,
+ 26974,
+ 0,
+ 26978,
+ 0,
+ 26980,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 26982,
+ 0,
+ 26986,
+ 26987,
+ 0,
+ 26990,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27003,
+ 27006,
+ 0,
+ 0,
+ 27007,
+ 27010,
+ 27012,
+ 27013,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27014,
+ 27015,
+ 27018,
+ 0,
+ 27019,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27025,
+ 0,
+ 0,
+ 0,
+ 27026,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27029,
+ 27030,
+ 27031,
+ 27034,
+ 0,
+ 0,
+ 27036,
+ 27037,
+ 0,
+ 0,
+ 0,
+ 27038,
+ 27042,
+ 0,
+ 0,
+ 0,
+ 27044,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27045,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27046,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27047,
+ 27049,
+ 0,
+ 27050,
+ 0,
+ 0,
+ 0,
+ 27051,
+ 27052,
+ 0,
+ 27055,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27056,
+ 27058,
+ 27059,
+ 0,
+ 27061,
+ 0,
+ 27064,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27069,
+ 0,
+ 0,
+ 27070,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27072,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27076,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27078,
+ 0,
+ 27079,
+ 0,
+ 0,
+ 0,
+ 27081,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27082,
+ 0,
+ 27083,
+ 27086,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27087,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27088,
+ 27090,
+ 0,
+ 27094,
+ 0,
+ 0,
+ 27095,
+ 0,
+ 27099,
+ 27102,
+ 0,
+ 0,
+ 0,
+ 27103,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27105,
+ 0,
+ 0,
+ 0,
+ 27106,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27107,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27108,
+ 27117,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27118,
+ 0,
+ 0,
+ 27124,
+ 0,
+ 27126,
+ 0,
+ 0,
+ 27130,
+ 27131,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27147,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27148,
+ 27149,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27150,
+ 27151,
+ 0,
+ 27152,
+ 0,
+ 27159,
+ 0,
+ 0,
+ 0,
+ 27164,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27175,
+ 0,
+ 27189,
+ 0,
+ 0,
+ 27191,
+ 0,
+ 27193,
+ 0,
+ 27195,
+ 0,
+ 27198,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27200,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27202,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27203,
+ 0,
+ 0,
+ 27204,
+ 0,
+ 0,
+ 27206,
+ 0,
+ 27207,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27209,
+ 0,
+ 0,
+ 0,
+ 27213,
+ 0,
+ 0,
+ 27216,
+ 27219,
+ 27220,
+ 27222,
+ 27223,
+ 0,
+ 27224,
+ 0,
+ 27225,
+ 27226,
+ 0,
+ 0,
+ 27233,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27235,
+ 0,
+ 27237,
+ 0,
+ 27238,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27239,
+ 0,
+ 27242,
+ 27243,
+ 0,
+ 27250,
+ 0,
+ 0,
+ 0,
+ 27251,
+ 0,
+ 27253,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27254,
+ 27255,
+ 27258,
+ 0,
+ 0,
+ 0,
+ 27259,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27267,
+ 0,
+ 27276,
+ 27278,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27296,
+ 27297,
+ 27301,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27302,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27312,
+ 27313,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27318,
+ 0,
+ 27320,
+ 0,
+ 27329,
+ 0,
+ 27330,
+ 27331,
+ 0,
+ 27332,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27340,
+ 0,
+ 0,
+ 0,
+ 27348,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27350,
+ 0,
+ 27351,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27355,
+ 0,
+ 0,
+ 27358,
+ 27359,
+ 27361,
+ 0,
+ 0,
+ 0,
+ 27365,
+ 0,
+ 27367,
+ 0,
+ 27376,
+ 27378,
+ 0,
+ 0,
+ 27379,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27396,
+ 0,
+ 27397,
+ 27404,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27408,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27453,
+ 0,
+ 0,
+ 0,
+ 27456,
+ 0,
+ 0,
+ 0,
+ 27458,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27459,
+ 0,
+ 0,
+ 0,
+ 27460,
+ 0,
+ 0,
+ 27461,
+ 0,
+ 27465,
+ 27467,
+ 0,
+ 0,
+ 27469,
+ 0,
+ 27470,
+ 0,
+ 27471,
+ 0,
+ 27477,
+ 27482,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27484,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27485,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27493,
+ 0,
+ 27494,
+ 27502,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27511,
+ 27532,
+ 0,
+ 0,
+ 0,
+ 27533,
+ 27545,
+ 0,
+ 0,
+ 0,
+ 27546,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27547,
+ 0,
+ 0,
+ 27549,
+ 27550,
+ 0,
+ 27551,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27555,
+ 0,
+ 0,
+ 27571,
+ 0,
+ 27573,
+ 27574,
+ 27575,
+ 27577,
+ 0,
+ 27578,
+ 0,
+ 0,
+ 27579,
+ 27585,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27586,
+ 0,
+ 0,
+ 27588,
+ 27589,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27596,
+ 0,
+ 0,
+ 27600,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27608,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27610,
+ 0,
+ 0,
+ 0,
+ 27618,
+ 0,
+ 0,
+ 27620,
+ 0,
+ 0,
+ 0,
+ 27631,
+ 0,
+ 0,
+ 27632,
+ 27634,
+ 0,
+ 27636,
+ 27638,
+ 0,
+ 0,
+ 0,
+ 27643,
+ 0,
+ 27644,
+ 27649,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27651,
+ 27660,
+ 0,
+ 27661,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27662,
+ 0,
+ 0,
+ 27664,
+ 0,
+ 27665,
+ 0,
+ 0,
+ 0,
+ 27669,
+ 0,
+ 27671,
+ 0,
+ 0,
+ 0,
+ 27673,
+ 27674,
+ 0,
+ 0,
+ 0,
+ 27682,
+ 0,
+ 0,
+ 0,
+ 27711,
+ 0,
+ 27712,
+ 27713,
+ 27719,
+ 27720,
+ 0,
+ 0,
+ 27728,
+ 0,
+ 27729,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27731,
+ 0,
+ 0,
+ 27732,
+ 0,
+ 27733,
+ 0,
+ 27738,
+ 0,
+ 0,
+ 0,
+ 27742,
+ 0,
+ 0,
+ 0,
+ 27743,
+ 27744,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27745,
+ 27746,
+ 0,
+ 0,
+ 0,
+ 27747,
+ 27748,
+ 27751,
+ 27752,
+ 0,
+ 0,
+ 0,
+ 27768,
+ 27770,
+ 0,
+ 0,
+ 0,
+ 27774,
+ 27775,
+ 0,
+ 27776,
+ 27777,
+ 0,
+ 0,
+ 27781,
+ 0,
+ 27784,
+ 0,
+ 27786,
+ 0,
+ 0,
+ 27791,
+ 0,
+ 27792,
+ 27793,
+ 27804,
+ 0,
+ 27812,
+ 27813,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27814,
+ 0,
+ 27825,
+ 0,
+ 27827,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27828,
+ 27861,
+ 27862,
+ 0,
+ 0,
+ 0,
+ 27864,
+ 0,
+ 0,
+ 0,
+ 27865,
+ 27884,
+ 0,
+ 27889,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27890,
+ 0,
+ 27891,
+ 0,
+ 0,
+ 0,
+ 27892,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27897,
+ 27898,
+ 0,
+ 0,
+ 27899,
+ 0,
+ 0,
+ 0,
+ 27901,
+ 27905,
+ 0,
+ 0,
+ 27920,
+ 0,
+ 0,
+ 27921,
+ 0,
+ 27922,
+ 0,
+ 0,
+ 0,
+ 27931,
+ 27934,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27941,
+ 0,
+ 27942,
+ 0,
+ 27945,
+ 0,
+ 27947,
+ 27954,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27960,
+ 27963,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 27964,
+ 27965,
+ 0,
+ 0,
+ 0,
+ 27967,
+ 0,
+ 27969,
+ 27975,
+ 0,
+ 27976,
+ 27977,
+ 0,
+ 27981,
+ 0,
+ 27983,
+ 28051,
+ 28052,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28056,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28058,
+ 28059,
+ 0,
+ 0,
+ 28061,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28063,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28066,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28069,
+ 28070,
+ 28072,
+ 0,
+ 28073,
+ 0,
+ 0,
+ 28074,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28075,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28078,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28085,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28086,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28088,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28090,
+ 0,
+ 28097,
+ 28114,
+ 28115,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28116,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28118,
+ 0,
+ 28129,
+ 0,
+ 28131,
+ 0,
+ 0,
+ 28135,
+ 0,
+ 0,
+ 0,
+ 28140,
+ 28141,
+ 0,
+ 0,
+ 0,
+ 28146,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28152,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28155,
+ 28157,
+ 28161,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28166,
+ 0,
+ 28167,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28172,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28173,
+ 0,
+ 0,
+ 28175,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28178,
+ 28188,
+ 0,
+ 28190,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28191,
+ 0,
+ 28193,
+ 28206,
+ 0,
+ 0,
+ 28207,
+ 28209,
+ 0,
+ 28211,
+ 0,
+ 28213,
+ 0,
+ 0,
+ 0,
+ 28215,
+ 28216,
+ 28217,
+ 0,
+ 28222,
+ 0,
+ 28223,
+ 28225,
+ 0,
+ 0,
+ 0,
+ 28226,
+ 0,
+ 28227,
+ 28229,
+ 28232,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28235,
+ 0,
+ 28241,
+ 0,
+ 0,
+ 28242,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28243,
+ 0,
+ 0,
+ 0,
+ 28245,
+ 0,
+ 0,
+ 0,
+ 28248,
+ 28250,
+ 0,
+ 28251,
+ 28252,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28253,
+ 0,
+ 0,
+ 28254,
+ 28255,
+ 0,
+ 0,
+ 28256,
+ 0,
+ 0,
+ 28258,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28259,
+ 0,
+ 0,
+ 28260,
+ 0,
+ 0,
+ 28261,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28262,
+ 28263,
+ 0,
+ 0,
+ 28264,
+ 0,
+ 0,
+ 0,
+ 28266,
+ 0,
+ 28268,
+ 28269,
+ 0,
+ 28270,
+ 28272,
+ 28274,
+ 0,
+ 28277,
+ 28278,
+ 0,
+ 0,
+ 0,
+ 28279,
+ 0,
+ 28280,
+ 28281,
+ 28283,
+ 0,
+ 28292,
+ 0,
+ 28294,
+ 0,
+ 28297,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28299,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28300,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28301,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28302,
+ 28303,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28304,
+ 0,
+ 0,
+ 28305,
+ 0,
+ 28312,
+ 0,
+ 28313,
+ 28314,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28315,
+ 0,
+ 0,
+ 0,
+ 28320,
+ 28321,
+ 0,
+ 0,
+ 28328,
+ 0,
+ 0,
+ 0,
+ 28329,
+ 28338,
+ 0,
+ 28339,
+ 0,
+ 0,
+ 28344,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28347,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28348,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28411,
+ 0,
+ 28412,
+ 28413,
+ 0,
+ 28416,
+ 0,
+ 0,
+ 0,
+ 28420,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28421,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28423,
+ 0,
+ 0,
+ 0,
+ 28424,
+ 0,
+ 0,
+ 28428,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28429,
+ 0,
+ 0,
+ 0,
+ 28431,
+ 28434,
+ 0,
+ 28458,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28464,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28465,
+ 0,
+ 28467,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28471,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28474,
+ 0,
+ 28480,
+ 0,
+ 28481,
+ 0,
+ 0,
+ 28485,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28486,
+ 28488,
+ 0,
+ 0,
+ 28489,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28492,
+ 0,
+ 0,
+ 0,
+ 28495,
+ 0,
+ 28497,
+ 0,
+ 28499,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28500,
+ 0,
+ 0,
+ 28502,
+ 28503,
+ 0,
+ 0,
+ 0,
+ 28508,
+ 0,
+ 0,
+ 0,
+ 28510,
+ 0,
+ 0,
+ 28512,
+ 28513,
+ 28514,
+ 28521,
+ 0,
+ 28526,
+ 0,
+ 28527,
+ 28528,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28529,
+ 0,
+ 0,
+ 28532,
+ 0,
+ 0,
+ 28537,
+ 28538,
+ 0,
+ 0,
+ 0,
+ 28539,
+ 0,
+ 28548,
+ 0,
+ 28553,
+ 28554,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28560,
+ 28563,
+ 0,
+ 0,
+ 28564,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28565,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28566,
+ 28568,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28569,
+ 0,
+ 0,
+ 0,
+ 28570,
+ 0,
+ 28572,
+ 28573,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28575,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28576,
+ 28581,
+ 28588,
+ 0,
+ 0,
+ 28589,
+ 0,
+ 0,
+ 0,
+ 28590,
+ 28595,
+ 0,
+ 28598,
+ 0,
+ 0,
+ 28601,
+ 0,
+ 0,
+ 28605,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28614,
+ 28615,
+ 28619,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28620,
+ 0,
+ 28626,
+ 0,
+ 0,
+ 28628,
+ 0,
+ 28631,
+ 0,
+ 28632,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28635,
+ 0,
+ 0,
+ 0,
+ 28637,
+ 28638,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28639,
+ 0,
+ 28643,
+ 0,
+ 0,
+ 28652,
+ 0,
+ 0,
+ 0,
+ 28662,
+ 0,
+ 28670,
+ 28671,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28672,
+ 28673,
+ 28675,
+ 28676,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28691,
+ 0,
+ 0,
+ 0,
+ 28695,
+ 0,
+ 0,
+ 0,
+ 28696,
+ 0,
+ 28697,
+ 28698,
+ 0,
+ 28705,
+ 0,
+ 28707,
+ 28708,
+ 28710,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28711,
+ 28728,
+ 0,
+ 0,
+ 0,
+ 28736,
+ 0,
+ 0,
+ 0,
+ 28737,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28738,
+ 0,
+ 28739,
+ 0,
+ 28741,
+ 0,
+ 0,
+ 28742,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28745,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28749,
+ 28750,
+ 28752,
+ 28754,
+ 28756,
+ 0,
+ 28757,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28759,
+ 28760,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28762,
+ 0,
+ 0,
+ 0,
+ 28764,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28766,
+ 0,
+ 28767,
+ 28768,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28769,
+ 28770,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28771,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28772,
+ 0,
+ 28773,
+ 0,
+ 28782,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28784,
+ 0,
+ 28785,
+ 0,
+ 28786,
+ 0,
+ 0,
+ 0,
+ 28787,
+ 0,
+ 0,
+ 0,
+ 28797,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28799,
+ 0,
+ 0,
+ 28801,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28802,
+ 0,
+ 28805,
+ 0,
+ 0,
+ 28806,
+ 0,
+ 0,
+ 28807,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28808,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28810,
+ 28812,
+ 0,
+ 0,
+ 28816,
+ 28819,
+ 0,
+ 0,
+ 28821,
+ 0,
+ 28826,
+ 0,
+ 0,
+ 0,
+ 28842,
+ 28852,
+ 0,
+ 0,
+ 28853,
+ 0,
+ 28854,
+ 28855,
+ 0,
+ 0,
+ 0,
+ 28857,
+ 0,
+ 0,
+ 0,
+ 28858,
+ 0,
+ 28867,
+ 28868,
+ 28869,
+ 0,
+ 0,
+ 0,
+ 28874,
+ 28880,
+ 28882,
+ 28890,
+ 28892,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28895,
+ 0,
+ 0,
+ 0,
+ 28898,
+ 28899,
+ 0,
+ 0,
+ 0,
+ 28900,
+ 0,
+ 0,
+ 28904,
+ 0,
+ 28906,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28907,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28908,
+ 0,
+ 0,
+ 0,
+ 28910,
+ 0,
+ 28914,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28915,
+ 28916,
+ 28919,
+ 0,
+ 0,
+ 28920,
+ 0,
+ 28921,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28924,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28926,
+ 28929,
+ 0,
+ 0,
+ 0,
+ 28930,
+ 0,
+ 28936,
+ 0,
+ 28939,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28942,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28956,
+ 0,
+ 0,
+ 0,
+ 28966,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28967,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28968,
+ 0,
+ 28971,
+ 0,
+ 28975,
+ 28976,
+ 0,
+ 28982,
+ 28983,
+ 0,
+ 0,
+ 28984,
+ 28989,
+ 28996,
+ 28997,
+ 28998,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 28999,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29000,
+ 0,
+ 29001,
+ 0,
+ 0,
+ 0,
+ 29009,
+ 0,
+ 0,
+ 29011,
+ 0,
+ 0,
+ 29021,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29024,
+ 0,
+ 29025,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29026,
+ 0,
+ 0,
+ 0,
+ 29036,
+ 0,
+ 0,
+ 0,
+ 29037,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29038,
+ 0,
+ 29045,
+ 0,
+ 29047,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29051,
+ 0,
+ 0,
+ 0,
+ 29054,
+ 29056,
+ 29062,
+ 0,
+ 29070,
+ 29082,
+ 0,
+ 0,
+ 0,
+ 29083,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29084,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29085,
+ 29088,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29090,
+ 29097,
+ 0,
+ 0,
+ 0,
+ 29103,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29105,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29107,
+ 0,
+ 29109,
+ 0,
+ 0,
+ 0,
+ 29115,
+ 0,
+ 0,
+ 29120,
+ 0,
+ 0,
+ 29138,
+ 29140,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29152,
+ 0,
+ 29160,
+ 29174,
+ 0,
+ 29176,
+ 0,
+ 0,
+ 29180,
+ 0,
+ 29181,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29228,
+ 0,
+ 0,
+ 29229,
+ 0,
+ 0,
+ 29230,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29234,
+ 0,
+ 0,
+ 0,
+ 29241,
+ 0,
+ 29245,
+ 0,
+ 29248,
+ 0,
+ 29250,
+ 29256,
+ 29280,
+ 0,
+ 29282,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29285,
+ 0,
+ 0,
+ 29286,
+ 29291,
+ 29292,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29294,
+ 0,
+ 29295,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29296,
+ 29297,
+ 29298,
+ 29300,
+ 0,
+ 29302,
+ 0,
+ 0,
+ 29304,
+ 29307,
+ 0,
+ 29312,
+ 0,
+ 0,
+ 0,
+ 29322,
+ 0,
+ 0,
+ 29323,
+ 0,
+ 0,
+ 29324,
+ 29326,
+ 29328,
+ 0,
+ 29335,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29338,
+ 29339,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29341,
+ 29343,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29344,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29345,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29346,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29347,
+ 29348,
+ 29349,
+ 0,
+ 0,
+ 29354,
+ 0,
+ 0,
+ 29355,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29357,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29364,
+ 0,
+ 29365,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29366,
+ 0,
+ 0,
+ 29368,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29378,
+ 0,
+ 29381,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29386,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29389,
+ 0,
+ 0,
+ 0,
+ 29390,
+ 0,
+ 0,
+ 29391,
+ 29397,
+ 0,
+ 29398,
+ 29412,
+ 29414,
+ 29418,
+ 29419,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29420,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29423,
+ 0,
+ 0,
+ 0,
+ 29435,
+ 0,
+ 0,
+ 0,
+ 29437,
+ 0,
+ 0,
+ 29439,
+ 0,
+ 29441,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29443,
+ 0,
+ 29446,
+ 29450,
+ 29452,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29456,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29461,
+ 0,
+ 0,
+ 0,
+ 29464,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29468,
+ 0,
+ 29473,
+ 0,
+ 0,
+ 0,
+ 29486,
+ 0,
+ 0,
+ 0,
+ 29490,
+ 0,
+ 0,
+ 0,
+ 29491,
+ 29492,
+ 0,
+ 0,
+ 29497,
+ 0,
+ 0,
+ 0,
+ 29498,
+ 0,
+ 29499,
+ 0,
+ 29502,
+ 29505,
+ 0,
+ 29509,
+ 0,
+ 0,
+ 0,
+ 29510,
+ 0,
+ 0,
+ 0,
+ 29512,
+ 0,
+ 0,
+ 0,
+ 29516,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29518,
+ 0,
+ 29519,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29520,
+ 29521,
+ 29529,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29530,
+ 0,
+ 0,
+ 29531,
+ 29538,
+ 0,
+ 29540,
+ 0,
+ 0,
+ 0,
+ 29542,
+ 0,
+ 29543,
+ 29544,
+ 29547,
+ 0,
+ 0,
+ 29548,
+ 0,
+ 0,
+ 0,
+ 29549,
+ 0,
+ 0,
+ 0,
+ 29550,
+ 0,
+ 0,
+ 29552,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29558,
+ 29561,
+ 0,
+ 29562,
+ 29564,
+ 0,
+ 0,
+ 29565,
+ 0,
+ 0,
+ 29566,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29578,
+ 29584,
+ 29586,
+ 29591,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29593,
+ 29594,
+ 0,
+ 0,
+ 29597,
+ 0,
+ 0,
+ 29613,
+ 0,
+ 29614,
+ 0,
+ 29615,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29616,
+ 29617,
+ 0,
+ 0,
+ 29625,
+ 0,
+ 0,
+ 0,
+ 29632,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29633,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29634,
+ 29635,
+ 29637,
+ 0,
+ 29638,
+ 0,
+ 29641,
+ 29643,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29644,
+ 0,
+ 29645,
+ 0,
+ 29649,
+ 0,
+ 0,
+ 0,
+ 29650,
+ 0,
+ 29653,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29656,
+ 29659,
+ 0,
+ 0,
+ 29660,
+ 0,
+ 0,
+ 0,
+ 29661,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29664,
+ 0,
+ 0,
+ 0,
+ 29671,
+ 29673,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29675,
+ 0,
+ 29677,
+ 29679,
+ 0,
+ 0,
+ 29684,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29685,
+ 0,
+ 0,
+ 0,
+ 29687,
+ 0,
+ 0,
+ 0,
+ 29688,
+ 0,
+ 29689,
+ 29690,
+ 29700,
+ 0,
+ 29701,
+ 0,
+ 0,
+ 0,
+ 29702,
+ 0,
+ 29706,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29720,
+ 0,
+ 29721,
+ 0,
+ 29727,
+ 0,
+ 29733,
+ 29734,
+ 0,
+ 29750,
+ 29761,
+ 0,
+ 29763,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29764,
+ 0,
+ 0,
+ 29765,
+ 0,
+ 0,
+ 0,
+ 29771,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29772,
+ 0,
+ 0,
+ 0,
+ 29773,
+ 29774,
+ 29775,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29822,
+ 0,
+ 0,
+ 0,
+ 29824,
+ 0,
+ 29825,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29827,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29829,
+ 0,
+ 29832,
+ 29834,
+ 0,
+ 0,
+ 29835,
+ 0,
+ 0,
+ 29837,
+ 29838,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29843,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29844,
+ 29845,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29849,
+ 0,
+ 0,
+ 29869,
+ 29872,
+ 29890,
+ 29905,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29907,
+ 29921,
+ 0,
+ 29922,
+ 0,
+ 0,
+ 29923,
+ 29926,
+ 29944,
+ 29946,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29947,
+ 29948,
+ 0,
+ 0,
+ 0,
+ 29951,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29953,
+ 0,
+ 0,
+ 29956,
+ 0,
+ 29957,
+ 0,
+ 0,
+ 29962,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29971,
+ 0,
+ 0,
+ 0,
+ 29972,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 29978,
+ 0,
+ 29979,
+ 29992,
+ 30007,
+ 30008,
+ 30010,
+ 0,
+ 0,
+ 0,
+ 30013,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30014,
+ 30016,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30017,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30023,
+ 30031,
+ 0,
+ 0,
+ 30033,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30034,
+ 0,
+ 30038,
+ 0,
+ 30039,
+ 0,
+ 30040,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30067,
+ 30068,
+ 0,
+ 0,
+ 0,
+ 30069,
+ 0,
+ 30072,
+ 0,
+ 0,
+ 0,
+ 30073,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30075,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30079,
+ 0,
+ 0,
+ 30080,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30082,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30084,
+ 30090,
+ 0,
+ 0,
+ 30091,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30098,
+ 30118,
+ 0,
+ 30119,
+ 0,
+ 30121,
+ 30130,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30131,
+ 30132,
+ 30133,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30135,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30136,
+ 0,
+ 0,
+ 30137,
+ 30138,
+ 0,
+ 0,
+ 0,
+ 30139,
+ 30146,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30147,
+ 0,
+ 0,
+ 30148,
+ 30151,
+ 0,
+ 0,
+ 0,
+ 30168,
+ 0,
+ 30172,
+ 30173,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30180,
+ 30181,
+ 0,
+ 30192,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30194,
+ 30196,
+ 0,
+ 0,
+ 30199,
+ 0,
+ 0,
+ 30202,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30203,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30213,
+ 0,
+ 0,
+ 0,
+ 30216,
+ 0,
+ 0,
+ 30217,
+ 0,
+ 0,
+ 0,
+ 30218,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30219,
+ 0,
+ 30220,
+ 0,
+ 30222,
+ 30227,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30231,
+ 0,
+ 0,
+ 30233,
+ 30235,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30238,
+ 0,
+ 30240,
+ 30243,
+ 30245,
+ 0,
+ 30250,
+ 30252,
+ 0,
+ 0,
+ 0,
+ 30269,
+ 0,
+ 0,
+ 30271,
+ 30272,
+ 0,
+ 0,
+ 0,
+ 30278,
+ 30280,
+ 0,
+ 0,
+ 30282,
+ 0,
+ 30284,
+ 0,
+ 30294,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30295,
+ 30296,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30298,
+ 30299,
+ 30302,
+ 30304,
+ 30306,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30316,
+ 30317,
+ 0,
+ 0,
+ 0,
+ 30318,
+ 0,
+ 0,
+ 0,
+ 30319,
+ 0,
+ 30320,
+ 30322,
+ 30326,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30327,
+ 0,
+ 30332,
+ 30348,
+ 30349,
+ 0,
+ 0,
+ 30356,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30357,
+ 0,
+ 30358,
+ 0,
+ 30359,
+ 30360,
+ 0,
+ 0,
+ 30365,
+ 30366,
+ 30378,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30379,
+ 0,
+ 0,
+ 30381,
+ 0,
+ 30385,
+ 0,
+ 30388,
+ 30397,
+ 0,
+ 0,
+ 0,
+ 30401,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30403,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30404,
+ 0,
+ 0,
+ 30405,
+ 0,
+ 30406,
+ 30408,
+ 0,
+ 30409,
+ 0,
+ 30410,
+ 0,
+ 0,
+ 0,
+ 30417,
+ 0,
+ 0,
+ 30418,
+ 30419,
+ 0,
+ 30420,
+ 0,
+ 30424,
+ 0,
+ 0,
+ 0,
+ 30427,
+ 30430,
+ 30432,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30433,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30436,
+ 0,
+ 30437,
+ 30438,
+ 0,
+ 30441,
+ 30442,
+ 0,
+ 0,
+ 0,
+ 30445,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30452,
+ 30456,
+ 30457,
+ 0,
+ 0,
+ 0,
+ 30458,
+ 0,
+ 30464,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30467,
+ 0,
+ 30469,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30477,
+ 0,
+ 0,
+ 30484,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30485,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30486,
+ 30487,
+ 30497,
+ 30498,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30505,
+ 0,
+ 30508,
+ 0,
+ 0,
+ 0,
+ 30509,
+ 30510,
+ 0,
+ 30514,
+ 30516,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30523,
+ 0,
+ 30524,
+ 0,
+ 30525,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30537,
+ 0,
+ 0,
+ 30538,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30553,
+ 0,
+ 0,
+ 30555,
+ 30556,
+ 30558,
+ 30559,
+ 30560,
+ 0,
+ 0,
+ 30561,
+ 0,
+ 30562,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30563,
+ 30570,
+ 30571,
+ 0,
+ 30586,
+ 30587,
+ 0,
+ 0,
+ 30590,
+ 0,
+ 0,
+ 30594,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30611,
+ 30612,
+ 30623,
+ 30634,
+ 0,
+ 0,
+ 30636,
+ 30640,
+ 30655,
+ 30656,
+ 0,
+ 30657,
+ 0,
+ 0,
+ 30658,
+ 30669,
+ 0,
+ 30670,
+ 0,
+ 30676,
+ 30678,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30679,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30695,
+ 0,
+ 0,
+ 30698,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30700,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30701,
+ 0,
+ 30702,
+ 30703,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30707,
+ 0,
+ 0,
+ 0,
+ 30709,
+ 0,
+ 0,
+ 30710,
+ 30719,
+ 30729,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30731,
+ 0,
+ 0,
+ 30733,
+ 0,
+ 0,
+ 0,
+ 30734,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30736,
+ 30737,
+ 0,
+ 0,
+ 0,
+ 30740,
+ 0,
+ 0,
+ 0,
+ 30743,
+ 0,
+ 30746,
+ 0,
+ 30747,
+ 30748,
+ 0,
+ 0,
+ 30751,
+ 30752,
+ 30753,
+ 0,
+ 0,
+ 0,
+ 30754,
+ 0,
+ 0,
+ 30760,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30763,
+ 0,
+ 30764,
+ 0,
+ 0,
+ 30766,
+ 0,
+ 30769,
+ 30770,
+ 30771,
+ 30774,
+ 30777,
+ 0,
+ 0,
+ 30779,
+ 30780,
+ 30781,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30790,
+ 0,
+ 0,
+ 0,
+ 30792,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30810,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30812,
+ 30819,
+ 0,
+ 0,
+ 30823,
+ 30824,
+ 0,
+ 30825,
+ 0,
+ 30827,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30828,
+ 0,
+ 0,
+ 30830,
+ 0,
+ 0,
+ 0,
+ 30834,
+ 0,
+ 30835,
+ 0,
+ 30837,
+ 30838,
+ 0,
+ 30845,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30846,
+ 30847,
+ 0,
+ 0,
+ 30849,
+ 0,
+ 30851,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30852,
+ 30858,
+ 0,
+ 0,
+ 30859,
+ 0,
+ 30865,
+ 0,
+ 0,
+ 30866,
+ 0,
+ 0,
+ 30868,
+ 0,
+ 0,
+ 30869,
+ 0,
+ 0,
+ 0,
+ 30881,
+ 30883,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30889,
+ 0,
+ 30891,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30894,
+ 0,
+ 30895,
+ 0,
+ 30897,
+ 0,
+ 30898,
+ 0,
+ 0,
+ 0,
+ 30904,
+ 30906,
+ 0,
+ 30909,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30910,
+ 0,
+ 0,
+ 0,
+ 30915,
+ 30933,
+ 30942,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30943,
+ 0,
+ 0,
+ 30945,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30946,
+ 0,
+ 0,
+ 30947,
+ 0,
+ 0,
+ 30955,
+ 30956,
+ 0,
+ 0,
+ 30960,
+ 0,
+ 0,
+ 30961,
+ 30962,
+ 30966,
+ 0,
+ 0,
+ 30969,
+ 30974,
+ 0,
+ 0,
+ 0,
+ 30976,
+ 0,
+ 0,
+ 30977,
+ 0,
+ 30978,
+ 30982,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 30994,
+ 30995,
+ 30998,
+ 0,
+ 31000,
+ 0,
+ 0,
+ 31001,
+ 0,
+ 0,
+ 31003,
+ 31005,
+ 0,
+ 0,
+ 31006,
+ 31011,
+ 0,
+ 0,
+ 31014,
+ 0,
+ 31016,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31018,
+ 0,
+ 0,
+ 31020,
+ 31023,
+ 31024,
+ 31025,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31027,
+ 31028,
+ 31029,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31032,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31036,
+ 31037,
+ 31038,
+ 0,
+ 0,
+ 0,
+ 31041,
+ 31043,
+ 31045,
+ 0,
+ 31047,
+ 0,
+ 0,
+ 0,
+ 31048,
+ 0,
+ 31049,
+ 0,
+ 0,
+ 0,
+ 31053,
+ 31054,
+ 31055,
+ 0,
+ 0,
+ 31063,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31066,
+ 0,
+ 31068,
+ 31071,
+ 0,
+ 0,
+ 0,
+ 31072,
+ 31073,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31075,
+ 0,
+ 0,
+ 31076,
+ 0,
+ 0,
+ 0,
+ 31077,
+ 31079,
+ 0,
+ 31080,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31087,
+ 0,
+ 31142,
+ 0,
+ 31144,
+ 0,
+ 0,
+ 31145,
+ 31146,
+ 31147,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31149,
+ 0,
+ 31151,
+ 31152,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31162,
+ 31171,
+ 31174,
+ 31175,
+ 0,
+ 0,
+ 0,
+ 31176,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31179,
+ 0,
+ 0,
+ 0,
+ 31186,
+ 0,
+ 0,
+ 0,
+ 31192,
+ 31195,
+ 0,
+ 0,
+ 31196,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31198,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31199,
+ 0,
+ 0,
+ 0,
+ 31205,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31211,
+ 31215,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31231,
+ 0,
+ 31232,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31233,
+ 31236,
+ 31253,
+ 0,
+ 31254,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31255,
+ 0,
+ 0,
+ 31257,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31258,
+ 31259,
+ 0,
+ 0,
+ 31260,
+ 0,
+ 31261,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31262,
+ 31263,
+ 0,
+ 0,
+ 31264,
+ 0,
+ 31266,
+ 0,
+ 31267,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31281,
+ 0,
+ 31282,
+ 0,
+ 31284,
+ 0,
+ 0,
+ 31285,
+ 31287,
+ 31288,
+ 0,
+ 0,
+ 31290,
+ 0,
+ 0,
+ 0,
+ 31292,
+ 31295,
+ 0,
+ 31299,
+ 0,
+ 31300,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31302,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31303,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31304,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31305,
+ 31308,
+ 31309,
+ 31315,
+ 0,
+ 31317,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31323,
+ 0,
+ 31324,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31325,
+ 31327,
+ 0,
+ 0,
+ 31331,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31333,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31336,
+ 0,
+ 0,
+ 31337,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31338,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31339,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31342,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31345,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31347,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31348,
+ 0,
+ 0,
+ 31350,
+ 31351,
+ 0,
+ 31352,
+ 0,
+ 0,
+ 31354,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31355,
+ 0,
+ 0,
+ 31356,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31363,
+ 0,
+ 31372,
+ 0,
+ 0,
+ 31373,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31376,
+ 0,
+ 31388,
+ 0,
+ 31389,
+ 0,
+ 31392,
+ 0,
+ 31401,
+ 0,
+ 31405,
+ 31407,
+ 31408,
+ 0,
+ 31409,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31413,
+ 31415,
+ 0,
+ 0,
+ 0,
+ 31416,
+ 31418,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31422,
+ 31423,
+ 0,
+ 0,
+ 31424,
+ 0,
+ 31425,
+ 31432,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31433,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31434,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31435,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31438,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31442,
+ 0,
+ 31444,
+ 0,
+ 31448,
+ 0,
+ 0,
+ 31451,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31452,
+ 0,
+ 31461,
+ 31465,
+ 0,
+ 0,
+ 31466,
+ 0,
+ 0,
+ 31467,
+ 0,
+ 0,
+ 31468,
+ 0,
+ 0,
+ 0,
+ 31469,
+ 31473,
+ 0,
+ 31476,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31489,
+ 31490,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31492,
+ 31493,
+ 31494,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31501,
+ 31504,
+ 31505,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31509,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31510,
+ 0,
+ 0,
+ 31511,
+ 0,
+ 0,
+ 31513,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31514,
+ 0,
+ 31522,
+ 31536,
+ 31539,
+ 31540,
+ 0,
+ 31541,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31546,
+ 31553,
+ 31559,
+ 0,
+ 0,
+ 0,
+ 31560,
+ 31561,
+ 31562,
+ 0,
+ 0,
+ 31564,
+ 31567,
+ 0,
+ 31569,
+ 0,
+ 0,
+ 0,
+ 31570,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31571,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31572,
+ 31574,
+ 31580,
+ 31581,
+ 0,
+ 0,
+ 31582,
+ 31584,
+ 31585,
+ 31586,
+ 31595,
+ 0,
+ 31596,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31597,
+ 0,
+ 31599,
+ 0,
+ 31600,
+ 31601,
+ 0,
+ 0,
+ 31603,
+ 31604,
+ 0,
+ 0,
+ 31608,
+ 31610,
+ 0,
+ 0,
+ 0,
+ 31611,
+ 0,
+ 31615,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31616,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31617,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31618,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31621,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31622,
+ 31625,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31627,
+ 0,
+ 31641,
+ 0,
+ 0,
+ 31642,
+ 0,
+ 0,
+ 31643,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31644,
+ 0,
+ 31646,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31648,
+ 0,
+ 0,
+ 0,
+ 31652,
+ 0,
+ 0,
+ 0,
+ 31657,
+ 0,
+ 0,
+ 31676,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 31689,
+ 31691,
+ 31692,
+ 0,
+ 31694,
+ 0,
+ 0,
+ 0,
+ 31696,
+ 0,
+ 31702,
+ 0,
+ 31703,
+ 0,
+}
+
+var kStaticDictionaryWords = [31705]dictWord{
+ dictWord{0, 0, 0},
+ dictWord{8, 0, 1002},
+ dictWord{136, 0, 1015},
+ dictWord{4, 0, 683},
+ dictWord{4, 10, 325},
+ dictWord{138, 10, 125},
+ dictWord{7, 11, 572},
+ dictWord{
+ 9,
+ 11,
+ 592,
+ },
+ dictWord{11, 11, 680},
+ dictWord{11, 11, 842},
+ dictWord{11, 11, 924},
+ dictWord{12, 11, 356},
+ dictWord{12, 11, 550},
+ dictWord{13, 11, 317},
+ dictWord{13, 11, 370},
+ dictWord{13, 11, 469},
+ dictWord{13, 11, 471},
+ dictWord{14, 11, 397},
+ dictWord{18, 11, 69},
+ dictWord{146, 11, 145},
+ dictWord{
+ 134,
+ 0,
+ 1265,
+ },
+ dictWord{136, 11, 534},
+ dictWord{134, 0, 1431},
+ dictWord{11, 0, 138},
+ dictWord{140, 0, 40},
+ dictWord{4, 0, 155},
+ dictWord{7, 0, 1689},
+ dictWord{
+ 4,
+ 10,
+ 718,
+ },
+ dictWord{135, 10, 1216},
+ dictWord{4, 0, 245},
+ dictWord{5, 0, 151},
+ dictWord{5, 0, 741},
+ dictWord{6, 0, 1147},
+ dictWord{7, 0, 498},
+ dictWord{7, 0, 870},
+ dictWord{7, 0, 1542},
+ dictWord{12, 0, 213},
+ dictWord{14, 0, 36},
+ dictWord{14, 0, 391},
+ dictWord{17, 0, 111},
+ dictWord{18, 0, 6},
+ dictWord{18, 0, 46},
+ dictWord{
+ 18,
+ 0,
+ 151,
+ },
+ dictWord{19, 0, 36},
+ dictWord{20, 0, 32},
+ dictWord{20, 0, 56},
+ dictWord{20, 0, 69},
+ dictWord{20, 0, 102},
+ dictWord{21, 0, 4},
+ dictWord{22, 0, 8},
+ dictWord{
+ 22,
+ 0,
+ 10,
+ },
+ dictWord{22, 0, 14},
+ dictWord{150, 0, 31},
+ dictWord{4, 0, 624},
+ dictWord{135, 0, 1752},
+ dictWord{5, 10, 124},
+ dictWord{5, 10, 144},
+ dictWord{6, 10, 548},
+ dictWord{7, 10, 15},
+ dictWord{7, 10, 153},
+ dictWord{137, 10, 629},
+ dictWord{6, 0, 503},
+ dictWord{9, 0, 586},
+ dictWord{13, 0, 468},
+ dictWord{14, 0, 66},
+ dictWord{
+ 16,
+ 0,
+ 58,
+ },
+ dictWord{7, 10, 1531},
+ dictWord{8, 10, 416},
+ dictWord{9, 10, 275},
+ dictWord{10, 10, 100},
+ dictWord{11, 10, 658},
+ dictWord{11, 10, 979},
+ dictWord{
+ 12,
+ 10,
+ 86,
+ },
+ dictWord{14, 10, 207},
+ dictWord{15, 10, 20},
+ dictWord{143, 10, 25},
+ dictWord{5, 0, 603},
+ dictWord{7, 0, 1212},
+ dictWord{9, 0, 565},
+ dictWord{
+ 14,
+ 0,
+ 301,
+ },
+ dictWord{5, 10, 915},
+ dictWord{6, 10, 1783},
+ dictWord{7, 10, 211},
+ dictWord{7, 10, 1353},
+ dictWord{9, 10, 83},
+ dictWord{10, 10, 376},
+ dictWord{
+ 10,
+ 10,
+ 431,
+ },
+ dictWord{11, 10, 543},
+ dictWord{12, 10, 664},
+ dictWord{13, 10, 280},
+ dictWord{13, 10, 428},
+ dictWord{14, 10, 128},
+ dictWord{17, 10, 52},
+ dictWord{
+ 145,
+ 10,
+ 81,
+ },
+ dictWord{4, 0, 492},
+ dictWord{133, 0, 451},
+ dictWord{135, 0, 835},
+ dictWord{141, 0, 70},
+ dictWord{132, 0, 539},
+ dictWord{7, 11, 748},
+ dictWord{
+ 139,
+ 11,
+ 700,
+ },
+ dictWord{7, 11, 1517},
+ dictWord{11, 11, 597},
+ dictWord{14, 11, 76},
+ dictWord{14, 11, 335},
+ dictWord{148, 11, 33},
+ dictWord{6, 0, 113},
+ dictWord{135, 0, 436},
+ dictWord{4, 10, 338},
+ dictWord{133, 10, 400},
+ dictWord{136, 0, 718},
+ dictWord{133, 11, 127},
+ dictWord{133, 11, 418},
+ dictWord{
+ 6,
+ 0,
+ 1505,
+ },
+ dictWord{7, 0, 520},
+ dictWord{6, 11, 198},
+ dictWord{11, 10, 892},
+ dictWord{140, 11, 83},
+ dictWord{4, 10, 221},
+ dictWord{5, 10, 659},
+ dictWord{
+ 5,
+ 10,
+ 989,
+ },
+ dictWord{7, 10, 697},
+ dictWord{7, 10, 1211},
+ dictWord{138, 10, 284},
+ dictWord{135, 0, 1070},
+ dictWord{5, 11, 276},
+ dictWord{6, 11, 55},
+ dictWord{
+ 135,
+ 11,
+ 1369,
+ },
+ dictWord{134, 0, 1515},
+ dictWord{6, 11, 1752},
+ dictWord{136, 11, 726},
+ dictWord{138, 10, 507},
+ dictWord{15, 0, 78},
+ dictWord{4, 10, 188},
+ dictWord{135, 10, 805},
+ dictWord{5, 10, 884},
+ dictWord{139, 10, 991},
+ dictWord{133, 11, 764},
+ dictWord{134, 10, 1653},
+ dictWord{6, 11, 309},
+ dictWord{
+ 7,
+ 11,
+ 331,
+ },
+ dictWord{138, 11, 550},
+ dictWord{135, 11, 1861},
+ dictWord{132, 11, 348},
+ dictWord{135, 11, 986},
+ dictWord{135, 11, 1573},
+ dictWord{
+ 12,
+ 0,
+ 610,
+ },
+ dictWord{13, 0, 431},
+ dictWord{144, 0, 59},
+ dictWord{9, 11, 799},
+ dictWord{140, 10, 166},
+ dictWord{134, 0, 1530},
+ dictWord{132, 0, 750},
+ dictWord{132, 0, 307},
+ dictWord{133, 0, 964},
+ dictWord{6, 11, 194},
+ dictWord{7, 11, 133},
+ dictWord{10, 11, 493},
+ dictWord{10, 11, 570},
+ dictWord{139, 11, 664},
+ dictWord{5, 11, 24},
+ dictWord{5, 11, 569},
+ dictWord{6, 11, 3},
+ dictWord{6, 11, 119},
+ dictWord{6, 11, 143},
+ dictWord{6, 11, 440},
+ dictWord{7, 11, 295},
+ dictWord{
+ 7,
+ 11,
+ 599,
+ },
+ dictWord{7, 11, 1686},
+ dictWord{7, 11, 1854},
+ dictWord{8, 11, 424},
+ dictWord{9, 11, 43},
+ dictWord{9, 11, 584},
+ dictWord{9, 11, 760},
+ dictWord{
+ 10,
+ 11,
+ 148,
+ },
+ dictWord{10, 11, 328},
+ dictWord{11, 11, 159},
+ dictWord{11, 11, 253},
+ dictWord{11, 11, 506},
+ dictWord{12, 11, 487},
+ dictWord{12, 11, 531},
+ dictWord{144, 11, 33},
+ dictWord{136, 10, 760},
+ dictWord{5, 11, 14},
+ dictWord{5, 11, 892},
+ dictWord{6, 11, 283},
+ dictWord{7, 11, 234},
+ dictWord{136, 11, 537},
+ dictWord{135, 11, 1251},
+ dictWord{4, 11, 126},
+ dictWord{8, 11, 635},
+ dictWord{147, 11, 34},
+ dictWord{4, 11, 316},
+ dictWord{135, 11, 1561},
+ dictWord{
+ 6,
+ 0,
+ 999,
+ },
+ dictWord{6, 0, 1310},
+ dictWord{137, 11, 861},
+ dictWord{4, 11, 64},
+ dictWord{5, 11, 352},
+ dictWord{5, 11, 720},
+ dictWord{6, 11, 368},
+ dictWord{
+ 139,
+ 11,
+ 359,
+ },
+ dictWord{4, 0, 75},
+ dictWord{5, 0, 180},
+ dictWord{6, 0, 500},
+ dictWord{7, 0, 58},
+ dictWord{7, 0, 710},
+ dictWord{10, 0, 645},
+ dictWord{136, 10, 770},
+ dictWord{133, 0, 649},
+ dictWord{6, 0, 276},
+ dictWord{7, 0, 282},
+ dictWord{7, 0, 879},
+ dictWord{7, 0, 924},
+ dictWord{8, 0, 459},
+ dictWord{9, 0, 599},
+ dictWord{9, 0, 754},
+ dictWord{11, 0, 574},
+ dictWord{12, 0, 128},
+ dictWord{12, 0, 494},
+ dictWord{13, 0, 52},
+ dictWord{13, 0, 301},
+ dictWord{15, 0, 30},
+ dictWord{143, 0, 132},
+ dictWord{132, 0, 200},
+ dictWord{4, 10, 89},
+ dictWord{5, 10, 489},
+ dictWord{6, 10, 315},
+ dictWord{7, 10, 553},
+ dictWord{7, 10, 1745},
+ dictWord{138, 10, 243},
+ dictWord{135, 11, 1050},
+ dictWord{7, 0, 1621},
+ dictWord{6, 10, 1658},
+ dictWord{9, 10, 3},
+ dictWord{10, 10, 154},
+ dictWord{11, 10, 641},
+ dictWord{13, 10, 85},
+ dictWord{13, 10, 201},
+ dictWord{141, 10, 346},
+ dictWord{6, 11, 175},
+ dictWord{137, 11, 289},
+ dictWord{5, 11, 432},
+ dictWord{133, 11, 913},
+ dictWord{
+ 6,
+ 0,
+ 225,
+ },
+ dictWord{137, 0, 211},
+ dictWord{7, 0, 718},
+ dictWord{8, 0, 687},
+ dictWord{139, 0, 374},
+ dictWord{4, 10, 166},
+ dictWord{133, 10, 505},
+ dictWord{
+ 9,
+ 0,
+ 110,
+ },
+ dictWord{134, 10, 1670},
+ dictWord{8, 0, 58},
+ dictWord{9, 0, 724},
+ dictWord{11, 0, 809},
+ dictWord{13, 0, 113},
+ dictWord{145, 0, 72},
+ dictWord{6, 0, 345},
+ dictWord{7, 0, 1247},
+ dictWord{144, 11, 82},
+ dictWord{5, 11, 931},
+ dictWord{134, 11, 1698},
+ dictWord{8, 0, 767},
+ dictWord{8, 0, 803},
+ dictWord{9, 0, 301},
+ dictWord{137, 0, 903},
+ dictWord{139, 0, 203},
+ dictWord{134, 0, 1154},
+ dictWord{7, 0, 1949},
+ dictWord{136, 0, 674},
+ dictWord{134, 0, 259},
+ dictWord{
+ 135,
+ 0,
+ 1275,
+ },
+ dictWord{5, 11, 774},
+ dictWord{6, 11, 1637},
+ dictWord{6, 11, 1686},
+ dictWord{134, 11, 1751},
+ dictWord{134, 0, 1231},
+ dictWord{7, 10, 445},
+ dictWord{8, 10, 307},
+ dictWord{8, 10, 704},
+ dictWord{10, 10, 41},
+ dictWord{10, 10, 439},
+ dictWord{11, 10, 237},
+ dictWord{11, 10, 622},
+ dictWord{140, 10, 201},
+ dictWord{136, 0, 254},
+ dictWord{6, 11, 260},
+ dictWord{135, 11, 1484},
+ dictWord{139, 0, 277},
+ dictWord{135, 10, 1977},
+ dictWord{4, 10, 189},
+ dictWord{
+ 5,
+ 10,
+ 713,
+ },
+ dictWord{6, 11, 573},
+ dictWord{136, 10, 57},
+ dictWord{138, 10, 371},
+ dictWord{132, 10, 552},
+ dictWord{134, 11, 344},
+ dictWord{133, 0, 248},
+ dictWord{9, 0, 800},
+ dictWord{10, 0, 693},
+ dictWord{11, 0, 482},
+ dictWord{11, 0, 734},
+ dictWord{11, 0, 789},
+ dictWord{134, 11, 240},
+ dictWord{4, 0, 116},
+ dictWord{
+ 5,
+ 0,
+ 95,
+ },
+ dictWord{5, 0, 445},
+ dictWord{7, 0, 1688},
+ dictWord{8, 0, 29},
+ dictWord{9, 0, 272},
+ dictWord{11, 0, 509},
+ dictWord{11, 0, 915},
+ dictWord{4, 11, 292},
+ dictWord{4, 11, 736},
+ dictWord{5, 11, 871},
+ dictWord{6, 11, 171},
+ dictWord{6, 11, 1689},
+ dictWord{7, 11, 1324},
+ dictWord{7, 11, 1944},
+ dictWord{9, 11, 415},
+ dictWord{9, 11, 580},
+ dictWord{14, 11, 230},
+ dictWord{146, 11, 68},
+ dictWord{7, 0, 490},
+ dictWord{13, 0, 100},
+ dictWord{143, 0, 75},
+ dictWord{135, 0, 1641},
+ dictWord{133, 0, 543},
+ dictWord{7, 11, 209},
+ dictWord{8, 11, 661},
+ dictWord{10, 11, 42},
+ dictWord{11, 11, 58},
+ dictWord{12, 11, 58},
+ dictWord{12, 11, 118},
+ dictWord{141, 11, 32},
+ dictWord{5, 0, 181},
+ dictWord{8, 0, 41},
+ dictWord{6, 11, 63},
+ dictWord{135, 11, 920},
+ dictWord{133, 0, 657},
+ dictWord{133, 11, 793},
+ dictWord{138, 0, 709},
+ dictWord{7, 0, 25},
+ dictWord{8, 0, 202},
+ dictWord{138, 0, 536},
+ dictWord{5, 11, 665},
+ dictWord{135, 10, 1788},
+ dictWord{145, 10, 49},
+ dictWord{9, 0, 423},
+ dictWord{140, 0, 89},
+ dictWord{5, 11, 67},
+ dictWord{6, 11, 62},
+ dictWord{6, 11, 374},
+ dictWord{135, 11, 1391},
+ dictWord{8, 0, 113},
+ dictWord{
+ 9,
+ 0,
+ 877,
+ },
+ dictWord{10, 0, 554},
+ dictWord{11, 0, 83},
+ dictWord{12, 0, 136},
+ dictWord{19, 0, 109},
+ dictWord{9, 11, 790},
+ dictWord{140, 11, 47},
+ dictWord{
+ 138,
+ 10,
+ 661,
+ },
+ dictWord{4, 0, 963},
+ dictWord{10, 0, 927},
+ dictWord{14, 0, 442},
+ dictWord{135, 10, 1945},
+ dictWord{133, 0, 976},
+ dictWord{132, 0, 206},
+ dictWord{
+ 4,
+ 11,
+ 391,
+ },
+ dictWord{135, 11, 1169},
+ dictWord{134, 0, 2002},
+ dictWord{6, 0, 696},
+ dictWord{134, 0, 1008},
+ dictWord{134, 0, 1170},
+ dictWord{132, 11, 271},
+ dictWord{7, 0, 13},
+ dictWord{8, 0, 226},
+ dictWord{10, 0, 537},
+ dictWord{11, 0, 570},
+ dictWord{11, 0, 605},
+ dictWord{11, 0, 799},
+ dictWord{11, 0, 804},
+ dictWord{
+ 12,
+ 0,
+ 85,
+ },
+ dictWord{12, 0, 516},
+ dictWord{12, 0, 623},
+ dictWord{13, 0, 112},
+ dictWord{13, 0, 361},
+ dictWord{14, 0, 77},
+ dictWord{14, 0, 78},
+ dictWord{17, 0, 28},
+ dictWord{19, 0, 110},
+ dictWord{140, 11, 314},
+ dictWord{132, 0, 769},
+ dictWord{134, 0, 1544},
+ dictWord{4, 0, 551},
+ dictWord{137, 0, 678},
+ dictWord{5, 10, 84},
+ dictWord{134, 10, 163},
+ dictWord{9, 0, 57},
+ dictWord{9, 0, 459},
+ dictWord{10, 0, 425},
+ dictWord{11, 0, 119},
+ dictWord{12, 0, 184},
+ dictWord{12, 0, 371},
+ dictWord{
+ 13,
+ 0,
+ 358,
+ },
+ dictWord{145, 0, 51},
+ dictWord{5, 0, 188},
+ dictWord{5, 0, 814},
+ dictWord{8, 0, 10},
+ dictWord{9, 0, 421},
+ dictWord{9, 0, 729},
+ dictWord{10, 0, 609},
+ dictWord{11, 0, 689},
+ dictWord{4, 11, 253},
+ dictWord{5, 10, 410},
+ dictWord{5, 11, 544},
+ dictWord{7, 11, 300},
+ dictWord{137, 11, 340},
+ dictWord{134, 0, 624},
+ dictWord{138, 11, 321},
+ dictWord{135, 0, 1941},
+ dictWord{18, 0, 130},
+ dictWord{5, 10, 322},
+ dictWord{8, 10, 186},
+ dictWord{9, 10, 262},
+ dictWord{10, 10, 187},
+ dictWord{142, 10, 208},
+ dictWord{5, 11, 53},
+ dictWord{5, 11, 541},
+ dictWord{6, 11, 94},
+ dictWord{6, 11, 499},
+ dictWord{7, 11, 230},
+ dictWord{139, 11, 321},
+ dictWord{133, 10, 227},
+ dictWord{4, 0, 378},
+ dictWord{4, 11, 920},
+ dictWord{5, 11, 25},
+ dictWord{5, 11, 790},
+ dictWord{6, 11, 457},
+ dictWord{135, 11, 853},
+ dictWord{137, 0, 269},
+ dictWord{132, 0, 528},
+ dictWord{134, 0, 1146},
+ dictWord{7, 10, 1395},
+ dictWord{8, 10, 486},
+ dictWord{9, 10, 236},
+ dictWord{9, 10, 878},
+ dictWord{10, 10, 218},
+ dictWord{11, 10, 95},
+ dictWord{19, 10, 17},
+ dictWord{147, 10, 31},
+ dictWord{7, 10, 2043},
+ dictWord{8, 10, 672},
+ dictWord{
+ 141,
+ 10,
+ 448,
+ },
+ dictWord{134, 0, 1105},
+ dictWord{134, 0, 1616},
+ dictWord{134, 11, 1765},
+ dictWord{140, 11, 163},
+ dictWord{5, 10, 412},
+ dictWord{133, 11, 822},
+ dictWord{132, 11, 634},
+ dictWord{6, 0, 656},
+ dictWord{134, 11, 1730},
+ dictWord{134, 0, 1940},
+ dictWord{5, 0, 104},
+ dictWord{6, 0, 173},
+ dictWord{
+ 135,
+ 0,
+ 1631,
+ },
+ dictWord{136, 10, 562},
+ dictWord{6, 11, 36},
+ dictWord{7, 11, 658},
+ dictWord{8, 11, 454},
+ dictWord{147, 11, 86},
+ dictWord{5, 0, 457},
+ dictWord{
+ 134,
+ 10,
+ 1771,
+ },
+ dictWord{7, 0, 810},
+ dictWord{8, 0, 138},
+ dictWord{8, 0, 342},
+ dictWord{9, 0, 84},
+ dictWord{10, 0, 193},
+ dictWord{11, 0, 883},
+ dictWord{140, 0, 359},
+ dictWord{9, 0, 620},
+ dictWord{135, 10, 1190},
+ dictWord{137, 10, 132},
+ dictWord{7, 11, 975},
+ dictWord{137, 11, 789},
+ dictWord{6, 0, 95},
+ dictWord{6, 0, 1934},
+ dictWord{136, 0, 967},
+ dictWord{141, 11, 335},
+ dictWord{6, 0, 406},
+ dictWord{10, 0, 409},
+ dictWord{10, 0, 447},
+ dictWord{11, 0, 44},
+ dictWord{140, 0, 100},
+ dictWord{4, 10, 317},
+ dictWord{135, 10, 1279},
+ dictWord{132, 0, 477},
+ dictWord{134, 0, 1268},
+ dictWord{6, 0, 1941},
+ dictWord{8, 0, 944},
+ dictWord{5, 10, 63},
+ dictWord{133, 10, 509},
+ dictWord{132, 0, 629},
+ dictWord{132, 11, 104},
+ dictWord{4, 0, 246},
+ dictWord{133, 0, 375},
+ dictWord{6, 0, 1636},
+ dictWord{
+ 132,
+ 10,
+ 288,
+ },
+ dictWord{135, 11, 1614},
+ dictWord{9, 0, 49},
+ dictWord{10, 0, 774},
+ dictWord{8, 10, 89},
+ dictWord{8, 10, 620},
+ dictWord{11, 10, 628},
+ dictWord{
+ 12,
+ 10,
+ 322,
+ },
+ dictWord{143, 10, 124},
+ dictWord{4, 0, 282},
+ dictWord{7, 0, 1034},
+ dictWord{11, 0, 398},
+ dictWord{11, 0, 634},
+ dictWord{12, 0, 1},
+ dictWord{12, 0, 79},
+ dictWord{12, 0, 544},
+ dictWord{14, 0, 237},
+ dictWord{17, 0, 10},
+ dictWord{146, 0, 20},
+ dictWord{132, 0, 824},
+ dictWord{7, 11, 45},
+ dictWord{9, 11, 542},
+ dictWord{
+ 9,
+ 11,
+ 566,
+ },
+ dictWord{138, 11, 728},
+ dictWord{5, 0, 118},
+ dictWord{5, 0, 499},
+ dictWord{6, 0, 476},
+ dictWord{6, 0, 665},
+ dictWord{6, 0, 1176},
+ dictWord{
+ 6,
+ 0,
+ 1196,
+ },
+ dictWord{7, 0, 600},
+ dictWord{7, 0, 888},
+ dictWord{135, 0, 1096},
+ dictWord{7, 0, 296},
+ dictWord{7, 0, 596},
+ dictWord{8, 0, 560},
+ dictWord{8, 0, 586},
+ dictWord{9, 0, 612},
+ dictWord{11, 0, 304},
+ dictWord{12, 0, 46},
+ dictWord{13, 0, 89},
+ dictWord{14, 0, 112},
+ dictWord{145, 0, 122},
+ dictWord{5, 0, 894},
+ dictWord{
+ 6,
+ 0,
+ 1772,
+ },
+ dictWord{9, 0, 1009},
+ dictWord{138, 10, 120},
+ dictWord{5, 11, 533},
+ dictWord{7, 11, 755},
+ dictWord{138, 11, 780},
+ dictWord{151, 10, 1},
+ dictWord{
+ 6,
+ 0,
+ 1474,
+ },
+ dictWord{7, 11, 87},
+ dictWord{142, 11, 288},
+ dictWord{139, 0, 366},
+ dictWord{137, 10, 461},
+ dictWord{7, 11, 988},
+ dictWord{7, 11, 1939},
+ dictWord{
+ 9,
+ 11,
+ 64,
+ },
+ dictWord{9, 11, 502},
+ dictWord{12, 11, 7},
+ dictWord{12, 11, 34},
+ dictWord{13, 11, 12},
+ dictWord{13, 11, 234},
+ dictWord{147, 11, 77},
+ dictWord{
+ 7,
+ 0,
+ 1599,
+ },
+ dictWord{7, 0, 1723},
+ dictWord{8, 0, 79},
+ dictWord{8, 0, 106},
+ dictWord{8, 0, 190},
+ dictWord{8, 0, 302},
+ dictWord{8, 0, 383},
+ dictWord{8, 0, 713},
+ dictWord{
+ 9,
+ 0,
+ 119,
+ },
+ dictWord{9, 0, 233},
+ dictWord{9, 0, 419},
+ dictWord{9, 0, 471},
+ dictWord{10, 0, 181},
+ dictWord{10, 0, 406},
+ dictWord{11, 0, 57},
+ dictWord{11, 0, 85},
+ dictWord{11, 0, 120},
+ dictWord{11, 0, 177},
+ dictWord{11, 0, 296},
+ dictWord{11, 0, 382},
+ dictWord{11, 0, 454},
+ dictWord{11, 0, 758},
+ dictWord{11, 0, 999},
+ dictWord{
+ 12,
+ 0,
+ 27,
+ },
+ dictWord{12, 0, 98},
+ dictWord{12, 0, 131},
+ dictWord{12, 0, 245},
+ dictWord{12, 0, 312},
+ dictWord{12, 0, 446},
+ dictWord{12, 0, 454},
+ dictWord{13, 0, 25},
+ dictWord{13, 0, 98},
+ dictWord{13, 0, 426},
+ dictWord{13, 0, 508},
+ dictWord{14, 0, 70},
+ dictWord{14, 0, 163},
+ dictWord{14, 0, 272},
+ dictWord{14, 0, 277},
+ dictWord{
+ 14,
+ 0,
+ 370,
+ },
+ dictWord{15, 0, 95},
+ dictWord{15, 0, 138},
+ dictWord{15, 0, 167},
+ dictWord{17, 0, 38},
+ dictWord{148, 0, 96},
+ dictWord{135, 10, 1346},
+ dictWord{
+ 10,
+ 0,
+ 200,
+ },
+ dictWord{19, 0, 2},
+ dictWord{151, 0, 22},
+ dictWord{135, 11, 141},
+ dictWord{134, 10, 85},
+ dictWord{134, 0, 1759},
+ dictWord{138, 0, 372},
+ dictWord{
+ 145,
+ 0,
+ 16,
+ },
+ dictWord{8, 0, 943},
+ dictWord{132, 11, 619},
+ dictWord{139, 11, 88},
+ dictWord{5, 11, 246},
+ dictWord{8, 11, 189},
+ dictWord{9, 11, 355},
+ dictWord{
+ 9,
+ 11,
+ 512,
+ },
+ dictWord{10, 11, 124},
+ dictWord{10, 11, 453},
+ dictWord{11, 11, 143},
+ dictWord{11, 11, 416},
+ dictWord{11, 11, 859},
+ dictWord{141, 11, 341},
+ dictWord{
+ 5,
+ 0,
+ 258,
+ },
+ dictWord{134, 0, 719},
+ dictWord{6, 0, 1798},
+ dictWord{6, 0, 1839},
+ dictWord{8, 0, 900},
+ dictWord{10, 0, 874},
+ dictWord{10, 0, 886},
+ dictWord{
+ 12,
+ 0,
+ 698,
+ },
+ dictWord{12, 0, 732},
+ dictWord{12, 0, 770},
+ dictWord{16, 0, 106},
+ dictWord{18, 0, 163},
+ dictWord{18, 0, 170},
+ dictWord{18, 0, 171},
+ dictWord{152, 0, 20},
+ dictWord{9, 0, 707},
+ dictWord{11, 0, 326},
+ dictWord{11, 0, 339},
+ dictWord{12, 0, 423},
+ dictWord{12, 0, 502},
+ dictWord{20, 0, 62},
+ dictWord{9, 11, 707},
+ dictWord{
+ 11,
+ 11,
+ 326,
+ },
+ dictWord{11, 11, 339},
+ dictWord{12, 11, 423},
+ dictWord{12, 11, 502},
+ dictWord{148, 11, 62},
+ dictWord{5, 0, 30},
+ dictWord{7, 0, 495},
+ dictWord{
+ 8,
+ 0,
+ 134,
+ },
+ dictWord{9, 0, 788},
+ dictWord{140, 0, 438},
+ dictWord{133, 11, 678},
+ dictWord{5, 10, 279},
+ dictWord{6, 10, 235},
+ dictWord{7, 10, 468},
+ dictWord{
+ 8,
+ 10,
+ 446,
+ },
+ dictWord{9, 10, 637},
+ dictWord{10, 10, 717},
+ dictWord{11, 10, 738},
+ dictWord{140, 10, 514},
+ dictWord{5, 11, 35},
+ dictWord{6, 11, 287},
+ dictWord{
+ 7,
+ 11,
+ 862,
+ },
+ dictWord{7, 11, 1886},
+ dictWord{138, 11, 179},
+ dictWord{7, 0, 1948},
+ dictWord{7, 0, 2004},
+ dictWord{132, 11, 517},
+ dictWord{5, 10, 17},
+ dictWord{
+ 6,
+ 10,
+ 371,
+ },
+ dictWord{137, 10, 528},
+ dictWord{4, 0, 115},
+ dictWord{5, 0, 669},
+ dictWord{6, 0, 407},
+ dictWord{8, 0, 311},
+ dictWord{11, 0, 10},
+ dictWord{141, 0, 5},
+ dictWord{137, 0, 381},
+ dictWord{5, 0, 50},
+ dictWord{6, 0, 439},
+ dictWord{7, 0, 780},
+ dictWord{135, 0, 1040},
+ dictWord{136, 11, 667},
+ dictWord{11, 11, 403},
+ dictWord{146, 11, 83},
+ dictWord{5, 0, 1},
+ dictWord{6, 0, 81},
+ dictWord{138, 0, 520},
+ dictWord{134, 0, 738},
+ dictWord{5, 0, 482},
+ dictWord{8, 0, 98},
+ dictWord{9, 0, 172},
+ dictWord{10, 0, 360},
+ dictWord{10, 0, 700},
+ dictWord{10, 0, 822},
+ dictWord{11, 0, 302},
+ dictWord{11, 0, 778},
+ dictWord{12, 0, 50},
+ dictWord{12, 0, 127},
+ dictWord{
+ 12,
+ 0,
+ 396,
+ },
+ dictWord{13, 0, 62},
+ dictWord{13, 0, 328},
+ dictWord{14, 0, 122},
+ dictWord{147, 0, 72},
+ dictWord{9, 11, 157},
+ dictWord{10, 11, 131},
+ dictWord{
+ 140,
+ 11,
+ 72,
+ },
+ dictWord{135, 11, 714},
+ dictWord{135, 11, 539},
+ dictWord{5, 0, 2},
+ dictWord{6, 0, 512},
+ dictWord{7, 0, 797},
+ dictWord{7, 0, 1494},
+ dictWord{8, 0, 253},
+ dictWord{8, 0, 589},
+ dictWord{9, 0, 77},
+ dictWord{10, 0, 1},
+ dictWord{10, 0, 129},
+ dictWord{10, 0, 225},
+ dictWord{11, 0, 118},
+ dictWord{11, 0, 226},
+ dictWord{
+ 11,
+ 0,
+ 251,
+ },
+ dictWord{11, 0, 430},
+ dictWord{11, 0, 701},
+ dictWord{11, 0, 974},
+ dictWord{11, 0, 982},
+ dictWord{12, 0, 64},
+ dictWord{12, 0, 260},
+ dictWord{12, 0, 488},
+ dictWord{140, 0, 690},
+ dictWord{5, 11, 394},
+ dictWord{7, 11, 367},
+ dictWord{7, 11, 487},
+ dictWord{7, 11, 857},
+ dictWord{7, 11, 1713},
+ dictWord{8, 11, 246},
+ dictWord{9, 11, 537},
+ dictWord{10, 11, 165},
+ dictWord{12, 11, 219},
+ dictWord{140, 11, 561},
+ dictWord{136, 0, 557},
+ dictWord{5, 10, 779},
+ dictWord{5, 10, 807},
+ dictWord{6, 10, 1655},
+ dictWord{134, 10, 1676},
+ dictWord{4, 10, 196},
+ dictWord{5, 10, 558},
+ dictWord{133, 10, 949},
+ dictWord{11, 11, 827},
+ dictWord{
+ 12,
+ 11,
+ 56,
+ },
+ dictWord{14, 11, 34},
+ dictWord{143, 11, 148},
+ dictWord{137, 0, 347},
+ dictWord{133, 0, 572},
+ dictWord{134, 0, 832},
+ dictWord{4, 0, 12},
+ dictWord{
+ 7,
+ 0,
+ 504,
+ },
+ dictWord{7, 0, 522},
+ dictWord{7, 0, 809},
+ dictWord{8, 0, 797},
+ dictWord{141, 0, 88},
+ dictWord{4, 10, 752},
+ dictWord{133, 11, 449},
+ dictWord{7, 11, 86},
+ dictWord{8, 11, 103},
+ dictWord{145, 11, 69},
+ dictWord{7, 11, 2028},
+ dictWord{138, 11, 641},
+ dictWord{5, 0, 528},
+ dictWord{6, 11, 1},
+ dictWord{142, 11, 2},
+ dictWord{134, 0, 861},
+ dictWord{10, 0, 294},
+ dictWord{4, 10, 227},
+ dictWord{5, 10, 159},
+ dictWord{5, 10, 409},
+ dictWord{7, 10, 80},
+ dictWord{10, 10, 479},
+ dictWord{
+ 12,
+ 10,
+ 418,
+ },
+ dictWord{14, 10, 50},
+ dictWord{14, 10, 249},
+ dictWord{142, 10, 295},
+ dictWord{7, 10, 1470},
+ dictWord{8, 10, 66},
+ dictWord{8, 10, 137},
+ dictWord{
+ 8,
+ 10,
+ 761,
+ },
+ dictWord{9, 10, 638},
+ dictWord{11, 10, 80},
+ dictWord{11, 10, 212},
+ dictWord{11, 10, 368},
+ dictWord{11, 10, 418},
+ dictWord{12, 10, 8},
+ dictWord{
+ 13,
+ 10,
+ 15,
+ },
+ dictWord{16, 10, 61},
+ dictWord{17, 10, 59},
+ dictWord{19, 10, 28},
+ dictWord{148, 10, 84},
+ dictWord{20, 0, 109},
+ dictWord{135, 11, 1148},
+ dictWord{
+ 6,
+ 11,
+ 277,
+ },
+ dictWord{7, 11, 1274},
+ dictWord{7, 11, 1386},
+ dictWord{7, 11, 1392},
+ dictWord{12, 11, 129},
+ dictWord{146, 11, 87},
+ dictWord{6, 11, 187},
+ dictWord{7, 11, 39},
+ dictWord{7, 11, 1203},
+ dictWord{8, 11, 380},
+ dictWord{8, 11, 542},
+ dictWord{14, 11, 117},
+ dictWord{149, 11, 28},
+ dictWord{134, 0, 1187},
+ dictWord{5, 0, 266},
+ dictWord{9, 0, 290},
+ dictWord{9, 0, 364},
+ dictWord{10, 0, 293},
+ dictWord{11, 0, 606},
+ dictWord{142, 0, 45},
+ dictWord{6, 11, 297},
+ dictWord{
+ 7,
+ 11,
+ 793,
+ },
+ dictWord{139, 11, 938},
+ dictWord{4, 0, 50},
+ dictWord{6, 0, 594},
+ dictWord{9, 0, 121},
+ dictWord{10, 0, 49},
+ dictWord{10, 0, 412},
+ dictWord{139, 0, 834},
+ dictWord{136, 0, 748},
+ dictWord{7, 11, 464},
+ dictWord{8, 11, 438},
+ dictWord{11, 11, 105},
+ dictWord{11, 11, 363},
+ dictWord{12, 11, 231},
+ dictWord{
+ 14,
+ 11,
+ 386,
+ },
+ dictWord{15, 11, 102},
+ dictWord{148, 11, 75},
+ dictWord{132, 0, 466},
+ dictWord{13, 0, 399},
+ dictWord{14, 0, 337},
+ dictWord{6, 10, 38},
+ dictWord{
+ 7,
+ 10,
+ 1220,
+ },
+ dictWord{8, 10, 185},
+ dictWord{8, 10, 256},
+ dictWord{9, 10, 22},
+ dictWord{9, 10, 331},
+ dictWord{10, 10, 738},
+ dictWord{11, 10, 205},
+ dictWord{
+ 11,
+ 10,
+ 540,
+ },
+ dictWord{11, 10, 746},
+ dictWord{13, 10, 465},
+ dictWord{142, 10, 194},
+ dictWord{9, 0, 378},
+ dictWord{141, 0, 162},
+ dictWord{137, 0, 519},
+ dictWord{
+ 4,
+ 10,
+ 159,
+ },
+ dictWord{6, 10, 115},
+ dictWord{7, 10, 252},
+ dictWord{7, 10, 257},
+ dictWord{7, 10, 1928},
+ dictWord{8, 10, 69},
+ dictWord{9, 10, 384},
+ dictWord{
+ 10,
+ 10,
+ 91,
+ },
+ dictWord{10, 10, 615},
+ dictWord{12, 10, 375},
+ dictWord{14, 10, 235},
+ dictWord{18, 10, 117},
+ dictWord{147, 10, 123},
+ dictWord{5, 11, 604},
+ dictWord{
+ 5,
+ 10,
+ 911,
+ },
+ dictWord{136, 10, 278},
+ dictWord{132, 0, 667},
+ dictWord{8, 0, 351},
+ dictWord{9, 0, 322},
+ dictWord{4, 10, 151},
+ dictWord{135, 10, 1567},
+ dictWord{134, 0, 902},
+ dictWord{133, 10, 990},
+ dictWord{12, 0, 180},
+ dictWord{5, 10, 194},
+ dictWord{7, 10, 1662},
+ dictWord{137, 10, 90},
+ dictWord{4, 0, 869},
+ dictWord{134, 0, 1996},
+ dictWord{134, 0, 813},
+ dictWord{133, 10, 425},
+ dictWord{137, 11, 761},
+ dictWord{132, 0, 260},
+ dictWord{133, 10, 971},
+ dictWord{
+ 5,
+ 11,
+ 20,
+ },
+ dictWord{6, 11, 298},
+ dictWord{7, 11, 659},
+ dictWord{7, 11, 1366},
+ dictWord{137, 11, 219},
+ dictWord{4, 0, 39},
+ dictWord{5, 0, 36},
+ dictWord{
+ 7,
+ 0,
+ 1843,
+ },
+ dictWord{8, 0, 407},
+ dictWord{11, 0, 144},
+ dictWord{140, 0, 523},
+ dictWord{4, 0, 510},
+ dictWord{10, 0, 587},
+ dictWord{139, 10, 752},
+ dictWord{7, 0, 29},
+ dictWord{7, 0, 66},
+ dictWord{7, 0, 1980},
+ dictWord{10, 0, 487},
+ dictWord{138, 0, 809},
+ dictWord{13, 0, 260},
+ dictWord{14, 0, 82},
+ dictWord{18, 0, 63},
+ dictWord{
+ 137,
+ 10,
+ 662,
+ },
+ dictWord{5, 10, 72},
+ dictWord{6, 10, 264},
+ dictWord{7, 10, 21},
+ dictWord{7, 10, 46},
+ dictWord{7, 10, 2013},
+ dictWord{8, 10, 215},
+ dictWord{
+ 8,
+ 10,
+ 513,
+ },
+ dictWord{10, 10, 266},
+ dictWord{139, 10, 22},
+ dictWord{134, 0, 570},
+ dictWord{6, 0, 565},
+ dictWord{7, 0, 1667},
+ dictWord{4, 11, 439},
+ dictWord{
+ 10,
+ 10,
+ 95,
+ },
+ dictWord{11, 10, 603},
+ dictWord{12, 11, 242},
+ dictWord{13, 10, 443},
+ dictWord{14, 10, 160},
+ dictWord{143, 10, 4},
+ dictWord{134, 0, 1464},
+ dictWord{
+ 134,
+ 10,
+ 431,
+ },
+ dictWord{9, 0, 372},
+ dictWord{15, 0, 2},
+ dictWord{19, 0, 10},
+ dictWord{19, 0, 18},
+ dictWord{5, 10, 874},
+ dictWord{6, 10, 1677},
+ dictWord{143, 10, 0},
+ dictWord{132, 0, 787},
+ dictWord{6, 0, 380},
+ dictWord{12, 0, 399},
+ dictWord{21, 0, 19},
+ dictWord{7, 10, 939},
+ dictWord{7, 10, 1172},
+ dictWord{7, 10, 1671},
+ dictWord{9, 10, 540},
+ dictWord{10, 10, 696},
+ dictWord{11, 10, 265},
+ dictWord{11, 10, 732},
+ dictWord{11, 10, 928},
+ dictWord{11, 10, 937},
+ dictWord{
+ 141,
+ 10,
+ 438,
+ },
+ dictWord{137, 0, 200},
+ dictWord{132, 11, 233},
+ dictWord{132, 0, 516},
+ dictWord{134, 11, 577},
+ dictWord{132, 0, 844},
+ dictWord{11, 0, 887},
+ dictWord{14, 0, 365},
+ dictWord{142, 0, 375},
+ dictWord{132, 11, 482},
+ dictWord{8, 0, 821},
+ dictWord{140, 0, 44},
+ dictWord{7, 0, 1655},
+ dictWord{136, 0, 305},
+ dictWord{5, 10, 682},
+ dictWord{135, 10, 1887},
+ dictWord{135, 11, 346},
+ dictWord{132, 10, 696},
+ dictWord{4, 0, 10},
+ dictWord{7, 0, 917},
+ dictWord{139, 0, 786},
+ dictWord{5, 11, 795},
+ dictWord{6, 11, 1741},
+ dictWord{8, 11, 417},
+ dictWord{137, 11, 782},
+ dictWord{4, 0, 1016},
+ dictWord{134, 0, 2031},
+ dictWord{5, 0, 684},
+ dictWord{4, 10, 726},
+ dictWord{133, 10, 630},
+ dictWord{6, 0, 1021},
+ dictWord{134, 0, 1480},
+ dictWord{8, 10, 802},
+ dictWord{136, 10, 838},
+ dictWord{
+ 134,
+ 0,
+ 27,
+ },
+ dictWord{134, 0, 395},
+ dictWord{135, 11, 622},
+ dictWord{7, 11, 625},
+ dictWord{135, 11, 1750},
+ dictWord{4, 11, 203},
+ dictWord{135, 11, 1936},
+ dictWord{6, 10, 118},
+ dictWord{7, 10, 215},
+ dictWord{7, 10, 1521},
+ dictWord{140, 10, 11},
+ dictWord{132, 0, 813},
+ dictWord{136, 0, 511},
+ dictWord{7, 10, 615},
+ dictWord{138, 10, 251},
+ dictWord{135, 10, 1044},
+ dictWord{145, 0, 56},
+ dictWord{133, 10, 225},
+ dictWord{6, 0, 342},
+ dictWord{6, 0, 496},
+ dictWord{8, 0, 275},
+ dictWord{137, 0, 206},
+ dictWord{4, 0, 909},
+ dictWord{133, 0, 940},
+ dictWord{132, 0, 891},
+ dictWord{7, 11, 311},
+ dictWord{9, 11, 308},
+ dictWord{
+ 140,
+ 11,
+ 255,
+ },
+ dictWord{4, 10, 370},
+ dictWord{5, 10, 756},
+ dictWord{135, 10, 1326},
+ dictWord{4, 0, 687},
+ dictWord{134, 0, 1596},
+ dictWord{134, 0, 1342},
+ dictWord{
+ 6,
+ 10,
+ 1662,
+ },
+ dictWord{7, 10, 48},
+ dictWord{8, 10, 771},
+ dictWord{10, 10, 116},
+ dictWord{13, 10, 104},
+ dictWord{14, 10, 105},
+ dictWord{14, 10, 184},
+ dictWord{15, 10, 168},
+ dictWord{19, 10, 92},
+ dictWord{148, 10, 68},
+ dictWord{138, 10, 209},
+ dictWord{4, 11, 400},
+ dictWord{5, 11, 267},
+ dictWord{135, 11, 232},
+ dictWord{151, 11, 12},
+ dictWord{6, 0, 41},
+ dictWord{141, 0, 160},
+ dictWord{141, 11, 314},
+ dictWord{134, 0, 1718},
+ dictWord{136, 0, 778},
+ dictWord{
+ 142,
+ 11,
+ 261,
+ },
+ dictWord{134, 0, 1610},
+ dictWord{133, 0, 115},
+ dictWord{132, 0, 294},
+ dictWord{14, 0, 314},
+ dictWord{132, 10, 120},
+ dictWord{132, 0, 983},
+ dictWord{5, 0, 193},
+ dictWord{140, 0, 178},
+ dictWord{138, 10, 429},
+ dictWord{5, 10, 820},
+ dictWord{135, 10, 931},
+ dictWord{6, 0, 994},
+ dictWord{6, 0, 1051},
+ dictWord{6, 0, 1439},
+ dictWord{7, 0, 174},
+ dictWord{133, 11, 732},
+ dictWord{4, 11, 100},
+ dictWord{7, 11, 679},
+ dictWord{8, 11, 313},
+ dictWord{138, 10, 199},
+ dictWord{6, 10, 151},
+ dictWord{6, 10, 1675},
+ dictWord{7, 10, 383},
+ dictWord{151, 10, 10},
+ dictWord{6, 0, 1796},
+ dictWord{8, 0, 848},
+ dictWord{8, 0, 867},
+ dictWord{
+ 8,
+ 0,
+ 907,
+ },
+ dictWord{10, 0, 855},
+ dictWord{140, 0, 703},
+ dictWord{140, 0, 221},
+ dictWord{4, 0, 122},
+ dictWord{5, 0, 796},
+ dictWord{5, 0, 952},
+ dictWord{6, 0, 1660},
+ dictWord{6, 0, 1671},
+ dictWord{8, 0, 567},
+ dictWord{9, 0, 687},
+ dictWord{9, 0, 742},
+ dictWord{10, 0, 686},
+ dictWord{11, 0, 682},
+ dictWord{11, 0, 909},
+ dictWord{
+ 140,
+ 0,
+ 281,
+ },
+ dictWord{5, 11, 362},
+ dictWord{5, 11, 443},
+ dictWord{6, 11, 318},
+ dictWord{7, 11, 1019},
+ dictWord{139, 11, 623},
+ dictWord{5, 11, 463},
+ dictWord{136, 11, 296},
+ dictWord{11, 0, 583},
+ dictWord{13, 0, 262},
+ dictWord{6, 10, 1624},
+ dictWord{12, 10, 422},
+ dictWord{142, 10, 360},
+ dictWord{5, 0, 179},
+ dictWord{7, 0, 1095},
+ dictWord{135, 0, 1213},
+ dictWord{4, 10, 43},
+ dictWord{4, 11, 454},
+ dictWord{5, 10, 344},
+ dictWord{133, 10, 357},
+ dictWord{4, 0, 66},
+ dictWord{7, 0, 722},
+ dictWord{135, 0, 904},
+ dictWord{134, 0, 773},
+ dictWord{7, 0, 352},
+ dictWord{133, 10, 888},
+ dictWord{5, 11, 48},
+ dictWord{5, 11, 404},
+ dictWord{
+ 6,
+ 11,
+ 557,
+ },
+ dictWord{7, 11, 458},
+ dictWord{8, 11, 597},
+ dictWord{10, 11, 455},
+ dictWord{10, 11, 606},
+ dictWord{11, 11, 49},
+ dictWord{11, 11, 548},
+ dictWord{
+ 12,
+ 11,
+ 476,
+ },
+ dictWord{13, 11, 18},
+ dictWord{141, 11, 450},
+ dictWord{134, 11, 418},
+ dictWord{132, 10, 711},
+ dictWord{5, 11, 442},
+ dictWord{
+ 135,
+ 11,
+ 1984,
+ },
+ dictWord{141, 0, 35},
+ dictWord{137, 0, 152},
+ dictWord{134, 0, 1197},
+ dictWord{135, 11, 1093},
+ dictWord{137, 11, 203},
+ dictWord{137, 10, 440},
+ dictWord{10, 0, 592},
+ dictWord{10, 0, 753},
+ dictWord{12, 0, 317},
+ dictWord{12, 0, 355},
+ dictWord{12, 0, 465},
+ dictWord{12, 0, 469},
+ dictWord{12, 0, 560},
+ dictWord{12, 0, 578},
+ dictWord{141, 0, 243},
+ dictWord{133, 0, 564},
+ dictWord{134, 0, 797},
+ dictWord{5, 10, 958},
+ dictWord{133, 10, 987},
+ dictWord{5, 11, 55},
+ dictWord{7, 11, 376},
+ dictWord{140, 11, 161},
+ dictWord{133, 11, 450},
+ dictWord{134, 0, 556},
+ dictWord{134, 0, 819},
+ dictWord{11, 10, 276},
+ dictWord{
+ 142,
+ 10,
+ 293,
+ },
+ dictWord{7, 0, 544},
+ dictWord{138, 0, 61},
+ dictWord{8, 0, 719},
+ dictWord{4, 10, 65},
+ dictWord{5, 10, 479},
+ dictWord{5, 10, 1004},
+ dictWord{7, 10, 1913},
+ dictWord{8, 10, 317},
+ dictWord{9, 10, 302},
+ dictWord{10, 10, 612},
+ dictWord{141, 10, 22},
+ dictWord{4, 0, 5},
+ dictWord{5, 0, 498},
+ dictWord{8, 0, 637},
+ dictWord{
+ 9,
+ 0,
+ 521,
+ },
+ dictWord{4, 11, 213},
+ dictWord{4, 10, 261},
+ dictWord{7, 11, 223},
+ dictWord{7, 10, 510},
+ dictWord{136, 11, 80},
+ dictWord{5, 0, 927},
+ dictWord{7, 0, 101},
+ dictWord{4, 10, 291},
+ dictWord{7, 11, 381},
+ dictWord{7, 11, 806},
+ dictWord{7, 11, 820},
+ dictWord{8, 11, 354},
+ dictWord{8, 11, 437},
+ dictWord{8, 11, 787},
+ dictWord{9, 10, 515},
+ dictWord{9, 11, 657},
+ dictWord{10, 11, 58},
+ dictWord{10, 11, 339},
+ dictWord{10, 11, 749},
+ dictWord{11, 11, 914},
+ dictWord{12, 10, 152},
+ dictWord{12, 11, 162},
+ dictWord{12, 10, 443},
+ dictWord{13, 11, 75},
+ dictWord{13, 10, 392},
+ dictWord{14, 11, 106},
+ dictWord{14, 11, 198},
+ dictWord{
+ 14,
+ 11,
+ 320,
+ },
+ dictWord{14, 10, 357},
+ dictWord{14, 11, 413},
+ dictWord{146, 11, 43},
+ dictWord{6, 0, 1153},
+ dictWord{7, 0, 1441},
+ dictWord{136, 11, 747},
+ dictWord{
+ 4,
+ 0,
+ 893,
+ },
+ dictWord{5, 0, 780},
+ dictWord{133, 0, 893},
+ dictWord{138, 11, 654},
+ dictWord{133, 11, 692},
+ dictWord{133, 0, 238},
+ dictWord{134, 11, 191},
+ dictWord{4, 10, 130},
+ dictWord{135, 10, 843},
+ dictWord{6, 0, 1296},
+ dictWord{5, 10, 42},
+ dictWord{5, 10, 879},
+ dictWord{7, 10, 245},
+ dictWord{7, 10, 324},
+ dictWord{
+ 7,
+ 10,
+ 1532,
+ },
+ dictWord{11, 10, 463},
+ dictWord{11, 10, 472},
+ dictWord{13, 10, 363},
+ dictWord{144, 10, 52},
+ dictWord{134, 0, 1729},
+ dictWord{6, 0, 1999},
+ dictWord{136, 0, 969},
+ dictWord{4, 10, 134},
+ dictWord{133, 10, 372},
+ dictWord{4, 0, 60},
+ dictWord{7, 0, 941},
+ dictWord{7, 0, 1800},
+ dictWord{8, 0, 314},
+ dictWord{
+ 9,
+ 0,
+ 700,
+ },
+ dictWord{139, 0, 487},
+ dictWord{134, 0, 1144},
+ dictWord{6, 11, 162},
+ dictWord{7, 11, 1960},
+ dictWord{136, 11, 831},
+ dictWord{132, 11, 706},
+ dictWord{135, 0, 1147},
+ dictWord{138, 11, 426},
+ dictWord{138, 11, 89},
+ dictWord{7, 0, 1853},
+ dictWord{138, 0, 437},
+ dictWord{136, 0, 419},
+ dictWord{
+ 135,
+ 10,
+ 1634,
+ },
+ dictWord{133, 0, 828},
+ dictWord{5, 0, 806},
+ dictWord{7, 0, 176},
+ dictWord{7, 0, 178},
+ dictWord{7, 0, 1240},
+ dictWord{7, 0, 1976},
+ dictWord{
+ 132,
+ 10,
+ 644,
+ },
+ dictWord{135, 11, 1877},
+ dictWord{5, 11, 420},
+ dictWord{135, 11, 1449},
+ dictWord{4, 0, 51},
+ dictWord{5, 0, 39},
+ dictWord{6, 0, 4},
+ dictWord{7, 0, 591},
+ dictWord{7, 0, 849},
+ dictWord{7, 0, 951},
+ dictWord{7, 0, 1613},
+ dictWord{7, 0, 1760},
+ dictWord{7, 0, 1988},
+ dictWord{9, 0, 434},
+ dictWord{10, 0, 754},
+ dictWord{
+ 11,
+ 0,
+ 25,
+ },
+ dictWord{139, 0, 37},
+ dictWord{10, 11, 57},
+ dictWord{138, 11, 277},
+ dictWord{135, 10, 540},
+ dictWord{132, 11, 204},
+ dictWord{135, 0, 159},
+ dictWord{139, 11, 231},
+ dictWord{133, 0, 902},
+ dictWord{7, 0, 928},
+ dictWord{7, 11, 366},
+ dictWord{9, 11, 287},
+ dictWord{12, 11, 199},
+ dictWord{12, 11, 556},
+ dictWord{140, 11, 577},
+ dictWord{6, 10, 623},
+ dictWord{136, 10, 789},
+ dictWord{4, 10, 908},
+ dictWord{5, 10, 359},
+ dictWord{5, 10, 508},
+ dictWord{6, 10, 1723},
+ dictWord{7, 10, 343},
+ dictWord{7, 10, 1996},
+ dictWord{135, 10, 2026},
+ dictWord{134, 0, 270},
+ dictWord{4, 10, 341},
+ dictWord{135, 10, 480},
+ dictWord{
+ 5,
+ 11,
+ 356,
+ },
+ dictWord{135, 11, 224},
+ dictWord{11, 11, 588},
+ dictWord{11, 11, 864},
+ dictWord{11, 11, 968},
+ dictWord{143, 11, 160},
+ dictWord{132, 0, 556},
+ dictWord{137, 0, 801},
+ dictWord{132, 0, 416},
+ dictWord{142, 0, 372},
+ dictWord{5, 0, 152},
+ dictWord{5, 0, 197},
+ dictWord{7, 0, 340},
+ dictWord{7, 0, 867},
+ dictWord{
+ 10,
+ 0,
+ 548,
+ },
+ dictWord{10, 0, 581},
+ dictWord{11, 0, 6},
+ dictWord{12, 0, 3},
+ dictWord{12, 0, 19},
+ dictWord{14, 0, 110},
+ dictWord{142, 0, 289},
+ dictWord{139, 0, 369},
+ dictWord{7, 11, 630},
+ dictWord{9, 11, 567},
+ dictWord{11, 11, 150},
+ dictWord{11, 11, 444},
+ dictWord{141, 11, 119},
+ dictWord{134, 11, 539},
+ dictWord{
+ 7,
+ 10,
+ 1995,
+ },
+ dictWord{8, 10, 299},
+ dictWord{11, 10, 890},
+ dictWord{140, 10, 674},
+ dictWord{7, 0, 34},
+ dictWord{7, 0, 190},
+ dictWord{8, 0, 28},
+ dictWord{8, 0, 141},
+ dictWord{8, 0, 444},
+ dictWord{8, 0, 811},
+ dictWord{9, 0, 468},
+ dictWord{11, 0, 334},
+ dictWord{12, 0, 24},
+ dictWord{12, 0, 386},
+ dictWord{140, 0, 576},
+ dictWord{
+ 133,
+ 0,
+ 757,
+ },
+ dictWord{7, 0, 1553},
+ dictWord{136, 0, 898},
+ dictWord{133, 0, 721},
+ dictWord{136, 0, 1012},
+ dictWord{4, 0, 789},
+ dictWord{5, 0, 647},
+ dictWord{
+ 135,
+ 0,
+ 1102,
+ },
+ dictWord{132, 0, 898},
+ dictWord{10, 0, 183},
+ dictWord{4, 10, 238},
+ dictWord{5, 10, 503},
+ dictWord{6, 10, 179},
+ dictWord{7, 10, 2003},
+ dictWord{
+ 8,
+ 10,
+ 381,
+ },
+ dictWord{8, 10, 473},
+ dictWord{9, 10, 149},
+ dictWord{10, 10, 788},
+ dictWord{15, 10, 45},
+ dictWord{15, 10, 86},
+ dictWord{20, 10, 110},
+ dictWord{
+ 150,
+ 10,
+ 57,
+ },
+ dictWord{9, 0, 136},
+ dictWord{19, 0, 107},
+ dictWord{4, 10, 121},
+ dictWord{5, 10, 156},
+ dictWord{5, 10, 349},
+ dictWord{10, 10, 605},
+ dictWord{
+ 142,
+ 10,
+ 342,
+ },
+ dictWord{4, 11, 235},
+ dictWord{135, 11, 255},
+ dictWord{4, 11, 194},
+ dictWord{5, 11, 584},
+ dictWord{6, 11, 384},
+ dictWord{7, 11, 583},
+ dictWord{
+ 10,
+ 11,
+ 761,
+ },
+ dictWord{11, 11, 760},
+ dictWord{139, 11, 851},
+ dictWord{6, 10, 80},
+ dictWord{6, 10, 1694},
+ dictWord{7, 10, 173},
+ dictWord{7, 10, 1974},
+ dictWord{
+ 9,
+ 10,
+ 547,
+ },
+ dictWord{10, 10, 730},
+ dictWord{14, 10, 18},
+ dictWord{150, 10, 39},
+ dictWord{4, 10, 923},
+ dictWord{134, 10, 1711},
+ dictWord{5, 0, 277},
+ dictWord{141, 0, 247},
+ dictWord{132, 0, 435},
+ dictWord{133, 11, 562},
+ dictWord{134, 0, 1311},
+ dictWord{5, 11, 191},
+ dictWord{137, 11, 271},
+ dictWord{
+ 132,
+ 10,
+ 595,
+ },
+ dictWord{7, 11, 1537},
+ dictWord{14, 11, 96},
+ dictWord{143, 11, 73},
+ dictWord{5, 0, 437},
+ dictWord{7, 0, 502},
+ dictWord{7, 0, 519},
+ dictWord{7, 0, 1122},
+ dictWord{7, 0, 1751},
+ dictWord{14, 0, 211},
+ dictWord{6, 10, 459},
+ dictWord{7, 10, 1753},
+ dictWord{7, 10, 1805},
+ dictWord{8, 10, 658},
+ dictWord{9, 10, 1},
+ dictWord{11, 10, 959},
+ dictWord{141, 10, 446},
+ dictWord{6, 0, 814},
+ dictWord{4, 11, 470},
+ dictWord{5, 11, 473},
+ dictWord{6, 11, 153},
+ dictWord{7, 11, 1503},
+ dictWord{7, 11, 1923},
+ dictWord{10, 11, 701},
+ dictWord{11, 11, 132},
+ dictWord{11, 11, 168},
+ dictWord{11, 11, 227},
+ dictWord{11, 11, 320},
+ dictWord{
+ 11,
+ 11,
+ 436,
+ },
+ dictWord{11, 11, 525},
+ dictWord{11, 11, 855},
+ dictWord{12, 11, 41},
+ dictWord{12, 11, 286},
+ dictWord{13, 11, 103},
+ dictWord{13, 11, 284},
+ dictWord{
+ 14,
+ 11,
+ 255,
+ },
+ dictWord{14, 11, 262},
+ dictWord{15, 11, 117},
+ dictWord{143, 11, 127},
+ dictWord{5, 0, 265},
+ dictWord{6, 0, 212},
+ dictWord{135, 0, 28},
+ dictWord{
+ 138,
+ 0,
+ 750,
+ },
+ dictWord{133, 11, 327},
+ dictWord{6, 11, 552},
+ dictWord{7, 11, 1754},
+ dictWord{137, 11, 604},
+ dictWord{134, 0, 2012},
+ dictWord{132, 0, 702},
+ dictWord{5, 11, 80},
+ dictWord{6, 11, 405},
+ dictWord{7, 11, 403},
+ dictWord{7, 11, 1502},
+ dictWord{7, 11, 1626},
+ dictWord{8, 11, 456},
+ dictWord{9, 11, 487},
+ dictWord{9, 11, 853},
+ dictWord{9, 11, 889},
+ dictWord{10, 11, 309},
+ dictWord{11, 11, 721},
+ dictWord{11, 11, 994},
+ dictWord{12, 11, 430},
+ dictWord{
+ 141,
+ 11,
+ 165,
+ },
+ dictWord{5, 0, 808},
+ dictWord{135, 0, 2045},
+ dictWord{5, 0, 166},
+ dictWord{8, 0, 739},
+ dictWord{140, 0, 511},
+ dictWord{134, 10, 490},
+ dictWord{
+ 4,
+ 11,
+ 453,
+ },
+ dictWord{5, 11, 887},
+ dictWord{6, 11, 535},
+ dictWord{8, 11, 6},
+ dictWord{136, 11, 543},
+ dictWord{4, 0, 119},
+ dictWord{5, 0, 170},
+ dictWord{5, 0, 447},
+ dictWord{7, 0, 1708},
+ dictWord{7, 0, 1889},
+ dictWord{9, 0, 357},
+ dictWord{9, 0, 719},
+ dictWord{12, 0, 486},
+ dictWord{140, 0, 596},
+ dictWord{137, 0, 500},
+ dictWord{
+ 7,
+ 10,
+ 250,
+ },
+ dictWord{136, 10, 507},
+ dictWord{132, 10, 158},
+ dictWord{6, 0, 809},
+ dictWord{134, 0, 1500},
+ dictWord{9, 0, 327},
+ dictWord{11, 0, 350},
+ dictWord{11, 0, 831},
+ dictWord{13, 0, 352},
+ dictWord{4, 10, 140},
+ dictWord{7, 10, 362},
+ dictWord{8, 10, 209},
+ dictWord{9, 10, 10},
+ dictWord{9, 10, 503},
+ dictWord{
+ 9,
+ 10,
+ 614,
+ },
+ dictWord{10, 10, 689},
+ dictWord{11, 10, 327},
+ dictWord{11, 10, 725},
+ dictWord{12, 10, 252},
+ dictWord{12, 10, 583},
+ dictWord{13, 10, 192},
+ dictWord{14, 10, 269},
+ dictWord{14, 10, 356},
+ dictWord{148, 10, 50},
+ dictWord{135, 11, 741},
+ dictWord{4, 0, 450},
+ dictWord{7, 0, 1158},
+ dictWord{19, 10, 1},
+ dictWord{19, 10, 26},
+ dictWord{150, 10, 9},
+ dictWord{6, 0, 597},
+ dictWord{135, 0, 1318},
+ dictWord{134, 0, 1602},
+ dictWord{6, 10, 228},
+ dictWord{7, 10, 1341},
+ dictWord{9, 10, 408},
+ dictWord{138, 10, 343},
+ dictWord{7, 0, 1375},
+ dictWord{7, 0, 1466},
+ dictWord{138, 0, 331},
+ dictWord{132, 0, 754},
+ dictWord{
+ 132,
+ 10,
+ 557,
+ },
+ dictWord{5, 11, 101},
+ dictWord{6, 11, 88},
+ dictWord{6, 11, 543},
+ dictWord{7, 11, 1677},
+ dictWord{9, 11, 100},
+ dictWord{10, 11, 677},
+ dictWord{
+ 14,
+ 11,
+ 169,
+ },
+ dictWord{14, 11, 302},
+ dictWord{14, 11, 313},
+ dictWord{15, 11, 48},
+ dictWord{143, 11, 84},
+ dictWord{134, 0, 1368},
+ dictWord{4, 11, 310},
+ dictWord{
+ 9,
+ 11,
+ 795,
+ },
+ dictWord{10, 11, 733},
+ dictWord{11, 11, 451},
+ dictWord{12, 11, 249},
+ dictWord{14, 11, 115},
+ dictWord{14, 11, 286},
+ dictWord{143, 11, 100},
+ dictWord{132, 10, 548},
+ dictWord{10, 0, 557},
+ dictWord{7, 10, 197},
+ dictWord{8, 10, 142},
+ dictWord{8, 10, 325},
+ dictWord{9, 10, 150},
+ dictWord{9, 10, 596},
+ dictWord{10, 10, 353},
+ dictWord{11, 10, 74},
+ dictWord{11, 10, 315},
+ dictWord{12, 10, 662},
+ dictWord{12, 10, 681},
+ dictWord{14, 10, 423},
+ dictWord{
+ 143,
+ 10,
+ 141,
+ },
+ dictWord{133, 11, 587},
+ dictWord{5, 0, 850},
+ dictWord{136, 0, 799},
+ dictWord{10, 0, 908},
+ dictWord{12, 0, 701},
+ dictWord{12, 0, 757},
+ dictWord{
+ 142,
+ 0,
+ 466,
+ },
+ dictWord{4, 0, 62},
+ dictWord{5, 0, 275},
+ dictWord{18, 0, 19},
+ dictWord{6, 10, 399},
+ dictWord{6, 10, 579},
+ dictWord{7, 10, 692},
+ dictWord{7, 10, 846},
+ dictWord{
+ 7,
+ 10,
+ 1015,
+ },
+ dictWord{7, 10, 1799},
+ dictWord{8, 10, 403},
+ dictWord{9, 10, 394},
+ dictWord{10, 10, 133},
+ dictWord{12, 10, 4},
+ dictWord{12, 10, 297},
+ dictWord{12, 10, 452},
+ dictWord{16, 10, 81},
+ dictWord{18, 10, 25},
+ dictWord{21, 10, 14},
+ dictWord{22, 10, 12},
+ dictWord{151, 10, 18},
+ dictWord{12, 0, 459},
+ dictWord{
+ 7,
+ 10,
+ 1546,
+ },
+ dictWord{11, 10, 299},
+ dictWord{142, 10, 407},
+ dictWord{132, 10, 177},
+ dictWord{132, 11, 498},
+ dictWord{7, 11, 217},
+ dictWord{
+ 8,
+ 11,
+ 140,
+ },
+ dictWord{138, 11, 610},
+ dictWord{5, 10, 411},
+ dictWord{135, 10, 653},
+ dictWord{134, 0, 1802},
+ dictWord{7, 10, 439},
+ dictWord{10, 10, 727},
+ dictWord{11, 10, 260},
+ dictWord{139, 10, 684},
+ dictWord{133, 11, 905},
+ dictWord{11, 11, 580},
+ dictWord{142, 11, 201},
+ dictWord{134, 0, 1397},
+ dictWord{
+ 5,
+ 10,
+ 208,
+ },
+ dictWord{7, 10, 753},
+ dictWord{135, 10, 1528},
+ dictWord{7, 0, 238},
+ dictWord{7, 0, 2033},
+ dictWord{8, 0, 120},
+ dictWord{8, 0, 188},
+ dictWord{8, 0, 659},
+ dictWord{9, 0, 598},
+ dictWord{10, 0, 466},
+ dictWord{12, 0, 342},
+ dictWord{12, 0, 588},
+ dictWord{13, 0, 503},
+ dictWord{14, 0, 246},
+ dictWord{143, 0, 92},
+ dictWord{135, 11, 1041},
+ dictWord{4, 11, 456},
+ dictWord{7, 11, 105},
+ dictWord{7, 11, 358},
+ dictWord{7, 11, 1637},
+ dictWord{8, 11, 643},
+ dictWord{139, 11, 483},
+ dictWord{6, 0, 1318},
+ dictWord{134, 0, 1324},
+ dictWord{4, 0, 201},
+ dictWord{7, 0, 1744},
+ dictWord{8, 0, 602},
+ dictWord{11, 0, 247},
+ dictWord{11, 0, 826},
+ dictWord{17, 0, 65},
+ dictWord{133, 10, 242},
+ dictWord{8, 0, 164},
+ dictWord{146, 0, 62},
+ dictWord{133, 10, 953},
+ dictWord{139, 10, 802},
+ dictWord{133, 0, 615},
+ dictWord{7, 11, 1566},
+ dictWord{8, 11, 269},
+ dictWord{9, 11, 212},
+ dictWord{9, 11, 718},
+ dictWord{14, 11, 15},
+ dictWord{14, 11, 132},
+ dictWord{142, 11, 227},
+ dictWord{133, 10, 290},
+ dictWord{132, 10, 380},
+ dictWord{5, 10, 52},
+ dictWord{7, 10, 277},
+ dictWord{9, 10, 368},
+ dictWord{139, 10, 791},
+ dictWord{
+ 135,
+ 0,
+ 1243,
+ },
+ dictWord{133, 11, 539},
+ dictWord{11, 11, 919},
+ dictWord{141, 11, 409},
+ dictWord{136, 0, 968},
+ dictWord{133, 11, 470},
+ dictWord{134, 0, 882},
+ dictWord{132, 0, 907},
+ dictWord{5, 0, 100},
+ dictWord{10, 0, 329},
+ dictWord{12, 0, 416},
+ dictWord{149, 0, 29},
+ dictWord{10, 10, 138},
+ dictWord{139, 10, 476},
+ dictWord{5, 10, 725},
+ dictWord{5, 10, 727},
+ dictWord{6, 11, 91},
+ dictWord{7, 11, 435},
+ dictWord{135, 10, 1811},
+ dictWord{4, 11, 16},
+ dictWord{5, 11, 316},
+ dictWord{5, 11, 842},
+ dictWord{6, 11, 370},
+ dictWord{6, 11, 1778},
+ dictWord{8, 11, 166},
+ dictWord{11, 11, 812},
+ dictWord{12, 11, 206},
+ dictWord{12, 11, 351},
+ dictWord{14, 11, 418},
+ dictWord{16, 11, 15},
+ dictWord{16, 11, 34},
+ dictWord{18, 11, 3},
+ dictWord{19, 11, 3},
+ dictWord{19, 11, 7},
+ dictWord{20, 11, 4},
+ dictWord{
+ 149,
+ 11,
+ 21,
+ },
+ dictWord{132, 0, 176},
+ dictWord{5, 0, 636},
+ dictWord{5, 0, 998},
+ dictWord{7, 0, 9},
+ dictWord{7, 0, 1508},
+ dictWord{8, 0, 26},
+ dictWord{9, 0, 317},
+ dictWord{
+ 9,
+ 0,
+ 358,
+ },
+ dictWord{10, 0, 210},
+ dictWord{10, 0, 292},
+ dictWord{10, 0, 533},
+ dictWord{11, 0, 555},
+ dictWord{12, 0, 526},
+ dictWord{12, 0, 607},
+ dictWord{
+ 13,
+ 0,
+ 263,
+ },
+ dictWord{13, 0, 459},
+ dictWord{142, 0, 271},
+ dictWord{6, 0, 256},
+ dictWord{8, 0, 265},
+ dictWord{4, 10, 38},
+ dictWord{7, 10, 307},
+ dictWord{7, 10, 999},
+ dictWord{7, 10, 1481},
+ dictWord{7, 10, 1732},
+ dictWord{7, 10, 1738},
+ dictWord{9, 10, 414},
+ dictWord{11, 10, 316},
+ dictWord{12, 10, 52},
+ dictWord{13, 10, 420},
+ dictWord{147, 10, 100},
+ dictWord{135, 10, 1296},
+ dictWord{4, 11, 611},
+ dictWord{133, 11, 606},
+ dictWord{4, 0, 643},
+ dictWord{142, 11, 21},
+ dictWord{
+ 133,
+ 11,
+ 715,
+ },
+ dictWord{133, 10, 723},
+ dictWord{6, 0, 610},
+ dictWord{135, 11, 597},
+ dictWord{10, 0, 127},
+ dictWord{141, 0, 27},
+ dictWord{6, 0, 1995},
+ dictWord{
+ 6,
+ 0,
+ 2001,
+ },
+ dictWord{8, 0, 119},
+ dictWord{136, 0, 973},
+ dictWord{4, 11, 149},
+ dictWord{138, 11, 368},
+ dictWord{12, 0, 522},
+ dictWord{4, 11, 154},
+ dictWord{
+ 5,
+ 10,
+ 109,
+ },
+ dictWord{6, 10, 1784},
+ dictWord{7, 11, 1134},
+ dictWord{7, 10, 1895},
+ dictWord{8, 11, 105},
+ dictWord{12, 10, 296},
+ dictWord{140, 10, 302},
+ dictWord{4, 11, 31},
+ dictWord{6, 11, 429},
+ dictWord{7, 11, 962},
+ dictWord{9, 11, 458},
+ dictWord{139, 11, 691},
+ dictWord{10, 0, 553},
+ dictWord{11, 0, 876},
+ dictWord{13, 0, 193},
+ dictWord{13, 0, 423},
+ dictWord{14, 0, 166},
+ dictWord{19, 0, 84},
+ dictWord{4, 11, 312},
+ dictWord{5, 10, 216},
+ dictWord{7, 10, 1879},
+ dictWord{
+ 9,
+ 10,
+ 141,
+ },
+ dictWord{9, 10, 270},
+ dictWord{9, 10, 679},
+ dictWord{10, 10, 159},
+ dictWord{11, 10, 197},
+ dictWord{12, 10, 538},
+ dictWord{12, 10, 559},
+ dictWord{14, 10, 144},
+ dictWord{14, 10, 167},
+ dictWord{143, 10, 67},
+ dictWord{134, 0, 1582},
+ dictWord{7, 0, 1578},
+ dictWord{135, 11, 1578},
+ dictWord{
+ 137,
+ 10,
+ 81,
+ },
+ dictWord{132, 11, 236},
+ dictWord{134, 10, 391},
+ dictWord{134, 0, 795},
+ dictWord{7, 10, 322},
+ dictWord{136, 10, 249},
+ dictWord{5, 11, 836},
+ dictWord{
+ 5,
+ 11,
+ 857,
+ },
+ dictWord{6, 11, 1680},
+ dictWord{7, 11, 59},
+ dictWord{147, 11, 53},
+ dictWord{135, 0, 432},
+ dictWord{10, 11, 68},
+ dictWord{139, 11, 494},
+ dictWord{4, 11, 81},
+ dictWord{139, 11, 867},
+ dictWord{7, 0, 126},
+ dictWord{136, 0, 84},
+ dictWord{142, 11, 280},
+ dictWord{5, 11, 282},
+ dictWord{8, 11, 650},
+ dictWord{
+ 9,
+ 11,
+ 295,
+ },
+ dictWord{9, 11, 907},
+ dictWord{138, 11, 443},
+ dictWord{136, 0, 790},
+ dictWord{5, 10, 632},
+ dictWord{138, 10, 526},
+ dictWord{6, 0, 64},
+ dictWord{12, 0, 377},
+ dictWord{13, 0, 309},
+ dictWord{14, 0, 141},
+ dictWord{14, 0, 429},
+ dictWord{14, 11, 141},
+ dictWord{142, 11, 429},
+ dictWord{134, 0, 1529},
+ dictWord{6, 0, 321},
+ dictWord{7, 0, 1857},
+ dictWord{9, 0, 530},
+ dictWord{19, 0, 99},
+ dictWord{7, 10, 948},
+ dictWord{7, 10, 1042},
+ dictWord{8, 10, 235},
+ dictWord{
+ 8,
+ 10,
+ 461,
+ },
+ dictWord{9, 10, 453},
+ dictWord{10, 10, 354},
+ dictWord{145, 10, 77},
+ dictWord{7, 0, 1104},
+ dictWord{11, 0, 269},
+ dictWord{11, 0, 539},
+ dictWord{
+ 11,
+ 0,
+ 627,
+ },
+ dictWord{11, 0, 706},
+ dictWord{11, 0, 975},
+ dictWord{12, 0, 248},
+ dictWord{12, 0, 434},
+ dictWord{12, 0, 600},
+ dictWord{12, 0, 622},
+ dictWord{
+ 13,
+ 0,
+ 297,
+ },
+ dictWord{13, 0, 485},
+ dictWord{14, 0, 69},
+ dictWord{14, 0, 409},
+ dictWord{143, 0, 108},
+ dictWord{4, 10, 362},
+ dictWord{7, 10, 52},
+ dictWord{7, 10, 303},
+ dictWord{10, 11, 70},
+ dictWord{12, 11, 26},
+ dictWord{14, 11, 17},
+ dictWord{14, 11, 178},
+ dictWord{15, 11, 34},
+ dictWord{149, 11, 12},
+ dictWord{11, 0, 977},
+ dictWord{141, 0, 507},
+ dictWord{9, 0, 34},
+ dictWord{139, 0, 484},
+ dictWord{5, 10, 196},
+ dictWord{6, 10, 486},
+ dictWord{7, 10, 212},
+ dictWord{8, 10, 309},
+ dictWord{136, 10, 346},
+ dictWord{6, 0, 1700},
+ dictWord{7, 0, 26},
+ dictWord{7, 0, 293},
+ dictWord{7, 0, 382},
+ dictWord{7, 0, 1026},
+ dictWord{7, 0, 1087},
+ dictWord{
+ 7,
+ 0,
+ 2027,
+ },
+ dictWord{8, 0, 24},
+ dictWord{8, 0, 114},
+ dictWord{8, 0, 252},
+ dictWord{8, 0, 727},
+ dictWord{8, 0, 729},
+ dictWord{9, 0, 30},
+ dictWord{9, 0, 199},
+ dictWord{
+ 9,
+ 0,
+ 231,
+ },
+ dictWord{9, 0, 251},
+ dictWord{9, 0, 334},
+ dictWord{9, 0, 361},
+ dictWord{9, 0, 712},
+ dictWord{10, 0, 55},
+ dictWord{10, 0, 60},
+ dictWord{10, 0, 232},
+ dictWord{
+ 10,
+ 0,
+ 332,
+ },
+ dictWord{10, 0, 384},
+ dictWord{10, 0, 396},
+ dictWord{10, 0, 504},
+ dictWord{10, 0, 542},
+ dictWord{10, 0, 652},
+ dictWord{11, 0, 20},
+ dictWord{11, 0, 48},
+ dictWord{11, 0, 207},
+ dictWord{11, 0, 291},
+ dictWord{11, 0, 298},
+ dictWord{11, 0, 342},
+ dictWord{11, 0, 365},
+ dictWord{11, 0, 394},
+ dictWord{11, 0, 620},
+ dictWord{11, 0, 705},
+ dictWord{11, 0, 1017},
+ dictWord{12, 0, 123},
+ dictWord{12, 0, 340},
+ dictWord{12, 0, 406},
+ dictWord{12, 0, 643},
+ dictWord{13, 0, 61},
+ dictWord{
+ 13,
+ 0,
+ 269,
+ },
+ dictWord{13, 0, 311},
+ dictWord{13, 0, 319},
+ dictWord{13, 0, 486},
+ dictWord{14, 0, 234},
+ dictWord{15, 0, 62},
+ dictWord{15, 0, 85},
+ dictWord{16, 0, 71},
+ dictWord{18, 0, 119},
+ dictWord{20, 0, 105},
+ dictWord{135, 10, 1912},
+ dictWord{4, 11, 71},
+ dictWord{5, 11, 376},
+ dictWord{7, 11, 119},
+ dictWord{138, 11, 665},
+ dictWord{10, 0, 918},
+ dictWord{10, 0, 926},
+ dictWord{4, 10, 686},
+ dictWord{136, 11, 55},
+ dictWord{138, 10, 625},
+ dictWord{136, 10, 706},
+ dictWord{
+ 132,
+ 11,
+ 479,
+ },
+ dictWord{4, 10, 30},
+ dictWord{133, 10, 43},
+ dictWord{6, 0, 379},
+ dictWord{7, 0, 270},
+ dictWord{8, 0, 176},
+ dictWord{8, 0, 183},
+ dictWord{9, 0, 432},
+ dictWord{
+ 9,
+ 0,
+ 661,
+ },
+ dictWord{12, 0, 247},
+ dictWord{12, 0, 617},
+ dictWord{18, 0, 125},
+ dictWord{7, 11, 607},
+ dictWord{8, 11, 99},
+ dictWord{152, 11, 4},
+ dictWord{
+ 5,
+ 0,
+ 792,
+ },
+ dictWord{133, 0, 900},
+ dictWord{4, 11, 612},
+ dictWord{133, 11, 561},
+ dictWord{4, 11, 41},
+ dictWord{4, 10, 220},
+ dictWord{5, 11, 74},
+ dictWord{
+ 7,
+ 10,
+ 1535,
+ },
+ dictWord{7, 11, 1627},
+ dictWord{11, 11, 871},
+ dictWord{140, 11, 619},
+ dictWord{135, 0, 1920},
+ dictWord{7, 11, 94},
+ dictWord{11, 11, 329},
+ dictWord{11, 11, 965},
+ dictWord{12, 11, 241},
+ dictWord{14, 11, 354},
+ dictWord{15, 11, 22},
+ dictWord{148, 11, 63},
+ dictWord{9, 11, 209},
+ dictWord{137, 11, 300},
+ dictWord{134, 0, 771},
+ dictWord{135, 0, 1979},
+ dictWord{4, 0, 901},
+ dictWord{133, 0, 776},
+ dictWord{142, 0, 254},
+ dictWord{133, 11, 98},
+ dictWord{
+ 9,
+ 11,
+ 16,
+ },
+ dictWord{141, 11, 386},
+ dictWord{133, 11, 984},
+ dictWord{4, 11, 182},
+ dictWord{6, 11, 205},
+ dictWord{135, 11, 220},
+ dictWord{7, 10, 1725},
+ dictWord{
+ 7,
+ 10,
+ 1774,
+ },
+ dictWord{138, 10, 393},
+ dictWord{5, 10, 263},
+ dictWord{134, 10, 414},
+ dictWord{4, 11, 42},
+ dictWord{9, 11, 205},
+ dictWord{9, 11, 786},
+ dictWord{138, 11, 659},
+ dictWord{14, 0, 140},
+ dictWord{148, 0, 41},
+ dictWord{8, 0, 440},
+ dictWord{10, 0, 359},
+ dictWord{6, 10, 178},
+ dictWord{6, 11, 289},
+ dictWord{
+ 6,
+ 10,
+ 1750,
+ },
+ dictWord{7, 11, 1670},
+ dictWord{9, 10, 690},
+ dictWord{10, 10, 155},
+ dictWord{10, 10, 373},
+ dictWord{11, 10, 698},
+ dictWord{12, 11, 57},
+ dictWord{13, 10, 155},
+ dictWord{20, 10, 93},
+ dictWord{151, 11, 4},
+ dictWord{4, 0, 37},
+ dictWord{5, 0, 334},
+ dictWord{7, 0, 1253},
+ dictWord{151, 11, 25},
+ dictWord{
+ 4,
+ 0,
+ 508,
+ },
+ dictWord{4, 11, 635},
+ dictWord{5, 10, 97},
+ dictWord{137, 10, 393},
+ dictWord{139, 11, 533},
+ dictWord{4, 0, 640},
+ dictWord{133, 0, 513},
+ dictWord{
+ 134,
+ 10,
+ 1639,
+ },
+ dictWord{132, 11, 371},
+ dictWord{4, 11, 272},
+ dictWord{7, 11, 836},
+ dictWord{7, 11, 1651},
+ dictWord{145, 11, 89},
+ dictWord{5, 11, 825},
+ dictWord{6, 11, 444},
+ dictWord{6, 11, 1640},
+ dictWord{136, 11, 308},
+ dictWord{4, 10, 191},
+ dictWord{7, 10, 934},
+ dictWord{8, 10, 647},
+ dictWord{145, 10, 97},
+ dictWord{12, 0, 246},
+ dictWord{15, 0, 162},
+ dictWord{19, 0, 64},
+ dictWord{20, 0, 8},
+ dictWord{20, 0, 95},
+ dictWord{22, 0, 24},
+ dictWord{152, 0, 17},
+ dictWord{4, 0, 533},
+ dictWord{5, 10, 165},
+ dictWord{9, 10, 346},
+ dictWord{138, 10, 655},
+ dictWord{5, 11, 737},
+ dictWord{139, 10, 885},
+ dictWord{133, 10, 877},
+ dictWord{
+ 8,
+ 10,
+ 128,
+ },
+ dictWord{139, 10, 179},
+ dictWord{137, 11, 307},
+ dictWord{140, 0, 752},
+ dictWord{133, 0, 920},
+ dictWord{135, 0, 1048},
+ dictWord{5, 0, 153},
+ dictWord{
+ 6,
+ 0,
+ 580,
+ },
+ dictWord{6, 10, 1663},
+ dictWord{7, 10, 132},
+ dictWord{7, 10, 1154},
+ dictWord{7, 10, 1415},
+ dictWord{7, 10, 1507},
+ dictWord{12, 10, 493},
+ dictWord{15, 10, 105},
+ dictWord{151, 10, 15},
+ dictWord{5, 10, 459},
+ dictWord{7, 10, 1073},
+ dictWord{8, 10, 241},
+ dictWord{136, 10, 334},
+ dictWord{138, 0, 391},
+ dictWord{135, 0, 1952},
+ dictWord{133, 11, 525},
+ dictWord{8, 11, 641},
+ dictWord{11, 11, 388},
+ dictWord{140, 11, 580},
+ dictWord{142, 0, 126},
+ dictWord{
+ 134,
+ 0,
+ 640,
+ },
+ dictWord{132, 0, 483},
+ dictWord{7, 0, 1616},
+ dictWord{9, 0, 69},
+ dictWord{6, 10, 324},
+ dictWord{6, 10, 520},
+ dictWord{7, 10, 338},
+ dictWord{
+ 7,
+ 10,
+ 1729,
+ },
+ dictWord{8, 10, 228},
+ dictWord{139, 10, 750},
+ dictWord{5, 11, 493},
+ dictWord{134, 11, 528},
+ dictWord{135, 0, 734},
+ dictWord{4, 11, 174},
+ dictWord{135, 11, 911},
+ dictWord{138, 0, 480},
+ dictWord{9, 0, 495},
+ dictWord{146, 0, 104},
+ dictWord{135, 10, 705},
+ dictWord{9, 0, 472},
+ dictWord{4, 10, 73},
+ dictWord{6, 10, 612},
+ dictWord{7, 10, 927},
+ dictWord{7, 10, 1330},
+ dictWord{7, 10, 1822},
+ dictWord{8, 10, 217},
+ dictWord{9, 10, 765},
+ dictWord{9, 10, 766},
+ dictWord{10, 10, 408},
+ dictWord{11, 10, 51},
+ dictWord{11, 10, 793},
+ dictWord{12, 10, 266},
+ dictWord{15, 10, 158},
+ dictWord{20, 10, 89},
+ dictWord{150, 10, 32},
+ dictWord{7, 11, 548},
+ dictWord{137, 11, 58},
+ dictWord{4, 11, 32},
+ dictWord{5, 11, 215},
+ dictWord{6, 11, 269},
+ dictWord{7, 11, 1782},
+ dictWord{7, 11, 1892},
+ dictWord{10, 11, 16},
+ dictWord{11, 11, 822},
+ dictWord{11, 11, 954},
+ dictWord{141, 11, 481},
+ dictWord{132, 0, 874},
+ dictWord{9, 0, 229},
+ dictWord{5, 10, 389},
+ dictWord{136, 10, 636},
+ dictWord{7, 11, 1749},
+ dictWord{136, 11, 477},
+ dictWord{134, 0, 948},
+ dictWord{5, 11, 308},
+ dictWord{135, 11, 1088},
+ dictWord{
+ 4,
+ 0,
+ 748,
+ },
+ dictWord{139, 0, 1009},
+ dictWord{136, 10, 21},
+ dictWord{6, 0, 555},
+ dictWord{135, 0, 485},
+ dictWord{5, 11, 126},
+ dictWord{8, 11, 297},
+ dictWord{
+ 9,
+ 11,
+ 366,
+ },
+ dictWord{9, 11, 445},
+ dictWord{12, 11, 53},
+ dictWord{12, 11, 374},
+ dictWord{141, 11, 492},
+ dictWord{7, 11, 1551},
+ dictWord{139, 11, 361},
+ dictWord{136, 0, 193},
+ dictWord{136, 0, 472},
+ dictWord{8, 0, 653},
+ dictWord{13, 0, 93},
+ dictWord{147, 0, 14},
+ dictWord{132, 0, 984},
+ dictWord{132, 11, 175},
+ dictWord{5, 0, 172},
+ dictWord{6, 0, 1971},
+ dictWord{132, 11, 685},
+ dictWord{149, 11, 8},
+ dictWord{133, 11, 797},
+ dictWord{13, 0, 83},
+ dictWord{5, 10, 189},
+ dictWord{
+ 7,
+ 10,
+ 442,
+ },
+ dictWord{7, 10, 443},
+ dictWord{8, 10, 281},
+ dictWord{12, 10, 174},
+ dictWord{141, 10, 261},
+ dictWord{134, 0, 1568},
+ dictWord{133, 11, 565},
+ dictWord{139, 0, 384},
+ dictWord{133, 0, 260},
+ dictWord{7, 0, 758},
+ dictWord{7, 0, 880},
+ dictWord{7, 0, 1359},
+ dictWord{9, 0, 164},
+ dictWord{9, 0, 167},
+ dictWord{
+ 10,
+ 0,
+ 156,
+ },
+ dictWord{10, 0, 588},
+ dictWord{12, 0, 101},
+ dictWord{14, 0, 48},
+ dictWord{15, 0, 70},
+ dictWord{6, 10, 2},
+ dictWord{7, 10, 1262},
+ dictWord{
+ 7,
+ 10,
+ 1737,
+ },
+ dictWord{8, 10, 22},
+ dictWord{8, 10, 270},
+ dictWord{8, 10, 612},
+ dictWord{9, 10, 312},
+ dictWord{9, 10, 436},
+ dictWord{10, 10, 311},
+ dictWord{
+ 10,
+ 10,
+ 623,
+ },
+ dictWord{11, 10, 72},
+ dictWord{11, 10, 330},
+ dictWord{11, 10, 455},
+ dictWord{12, 10, 321},
+ dictWord{12, 10, 504},
+ dictWord{12, 10, 530},
+ dictWord{
+ 12,
+ 10,
+ 543,
+ },
+ dictWord{13, 10, 17},
+ dictWord{13, 10, 156},
+ dictWord{13, 10, 334},
+ dictWord{17, 10, 60},
+ dictWord{148, 10, 64},
+ dictWord{4, 11, 252},
+ dictWord{
+ 7,
+ 11,
+ 1068,
+ },
+ dictWord{10, 11, 434},
+ dictWord{11, 11, 228},
+ dictWord{11, 11, 426},
+ dictWord{13, 11, 231},
+ dictWord{18, 11, 106},
+ dictWord{148, 11, 87},
+ dictWord{7, 10, 354},
+ dictWord{10, 10, 410},
+ dictWord{139, 10, 815},
+ dictWord{6, 0, 367},
+ dictWord{7, 10, 670},
+ dictWord{7, 10, 1327},
+ dictWord{8, 10, 411},
+ dictWord{8, 10, 435},
+ dictWord{9, 10, 653},
+ dictWord{9, 10, 740},
+ dictWord{10, 10, 385},
+ dictWord{11, 10, 222},
+ dictWord{11, 10, 324},
+ dictWord{11, 10, 829},
+ dictWord{140, 10, 611},
+ dictWord{7, 0, 1174},
+ dictWord{6, 10, 166},
+ dictWord{135, 10, 374},
+ dictWord{146, 0, 121},
+ dictWord{132, 0, 828},
+ dictWord{
+ 5,
+ 11,
+ 231,
+ },
+ dictWord{138, 11, 509},
+ dictWord{7, 11, 601},
+ dictWord{9, 11, 277},
+ dictWord{9, 11, 674},
+ dictWord{10, 11, 178},
+ dictWord{10, 11, 257},
+ dictWord{
+ 10,
+ 11,
+ 418,
+ },
+ dictWord{11, 11, 531},
+ dictWord{11, 11, 544},
+ dictWord{11, 11, 585},
+ dictWord{12, 11, 113},
+ dictWord{12, 11, 475},
+ dictWord{13, 11, 99},
+ dictWord{142, 11, 428},
+ dictWord{134, 0, 1541},
+ dictWord{135, 11, 1779},
+ dictWord{5, 0, 343},
+ dictWord{134, 10, 398},
+ dictWord{135, 10, 50},
+ dictWord{
+ 135,
+ 11,
+ 1683,
+ },
+ dictWord{4, 0, 440},
+ dictWord{7, 0, 57},
+ dictWord{8, 0, 167},
+ dictWord{8, 0, 375},
+ dictWord{9, 0, 82},
+ dictWord{9, 0, 561},
+ dictWord{9, 0, 744},
+ dictWord{
+ 10,
+ 0,
+ 620,
+ },
+ dictWord{137, 11, 744},
+ dictWord{134, 0, 926},
+ dictWord{6, 10, 517},
+ dictWord{7, 10, 1159},
+ dictWord{10, 10, 621},
+ dictWord{139, 10, 192},
+ dictWord{137, 0, 827},
+ dictWord{8, 0, 194},
+ dictWord{136, 0, 756},
+ dictWord{10, 10, 223},
+ dictWord{139, 10, 645},
+ dictWord{7, 10, 64},
+ dictWord{
+ 136,
+ 10,
+ 245,
+ },
+ dictWord{4, 11, 399},
+ dictWord{5, 11, 119},
+ dictWord{5, 11, 494},
+ dictWord{7, 11, 751},
+ dictWord{137, 11, 556},
+ dictWord{132, 0, 808},
+ dictWord{
+ 135,
+ 0,
+ 22,
+ },
+ dictWord{7, 10, 1763},
+ dictWord{140, 10, 310},
+ dictWord{5, 0, 639},
+ dictWord{7, 0, 1249},
+ dictWord{11, 0, 896},
+ dictWord{134, 11, 584},
+ dictWord{
+ 134,
+ 0,
+ 1614,
+ },
+ dictWord{135, 0, 860},
+ dictWord{135, 11, 1121},
+ dictWord{5, 10, 129},
+ dictWord{6, 10, 61},
+ dictWord{135, 10, 947},
+ dictWord{4, 0, 102},
+ dictWord{
+ 7,
+ 0,
+ 815,
+ },
+ dictWord{7, 0, 1699},
+ dictWord{139, 0, 964},
+ dictWord{13, 10, 505},
+ dictWord{141, 10, 506},
+ dictWord{139, 10, 1000},
+ dictWord{
+ 132,
+ 11,
+ 679,
+ },
+ dictWord{132, 0, 899},
+ dictWord{132, 0, 569},
+ dictWord{5, 11, 694},
+ dictWord{137, 11, 714},
+ dictWord{136, 0, 795},
+ dictWord{6, 0, 2045},
+ dictWord{
+ 139,
+ 11,
+ 7,
+ },
+ dictWord{6, 0, 52},
+ dictWord{9, 0, 104},
+ dictWord{9, 0, 559},
+ dictWord{12, 0, 308},
+ dictWord{147, 0, 87},
+ dictWord{4, 0, 301},
+ dictWord{132, 0, 604},
+ dictWord{133, 10, 637},
+ dictWord{136, 0, 779},
+ dictWord{5, 11, 143},
+ dictWord{5, 11, 769},
+ dictWord{6, 11, 1760},
+ dictWord{7, 11, 682},
+ dictWord{7, 11, 1992},
+ dictWord{136, 11, 736},
+ dictWord{137, 10, 590},
+ dictWord{147, 0, 32},
+ dictWord{137, 11, 527},
+ dictWord{5, 10, 280},
+ dictWord{135, 10, 1226},
+ dictWord{134, 0, 494},
+ dictWord{6, 0, 677},
+ dictWord{6, 0, 682},
+ dictWord{134, 0, 1044},
+ dictWord{133, 10, 281},
+ dictWord{135, 10, 1064},
+ dictWord{7, 0, 508},
+ dictWord{133, 11, 860},
+ dictWord{6, 11, 422},
+ dictWord{7, 11, 0},
+ dictWord{7, 11, 1544},
+ dictWord{9, 11, 577},
+ dictWord{11, 11, 990},
+ dictWord{12, 11, 141},
+ dictWord{12, 11, 453},
+ dictWord{13, 11, 47},
+ dictWord{141, 11, 266},
+ dictWord{134, 0, 1014},
+ dictWord{5, 11, 515},
+ dictWord{137, 11, 131},
+ dictWord{
+ 134,
+ 0,
+ 957,
+ },
+ dictWord{132, 11, 646},
+ dictWord{6, 0, 310},
+ dictWord{7, 0, 1849},
+ dictWord{8, 0, 72},
+ dictWord{8, 0, 272},
+ dictWord{8, 0, 431},
+ dictWord{9, 0, 12},
+ dictWord{
+ 9,
+ 0,
+ 376,
+ },
+ dictWord{10, 0, 563},
+ dictWord{10, 0, 630},
+ dictWord{10, 0, 796},
+ dictWord{10, 0, 810},
+ dictWord{11, 0, 367},
+ dictWord{11, 0, 599},
+ dictWord{
+ 11,
+ 0,
+ 686,
+ },
+ dictWord{140, 0, 672},
+ dictWord{7, 0, 570},
+ dictWord{4, 11, 396},
+ dictWord{7, 10, 120},
+ dictWord{7, 11, 728},
+ dictWord{8, 10, 489},
+ dictWord{9, 11, 117},
+ dictWord{9, 10, 319},
+ dictWord{10, 10, 820},
+ dictWord{11, 10, 1004},
+ dictWord{12, 10, 379},
+ dictWord{12, 10, 679},
+ dictWord{13, 10, 117},
+ dictWord{
+ 13,
+ 11,
+ 202,
+ },
+ dictWord{13, 10, 412},
+ dictWord{14, 10, 25},
+ dictWord{15, 10, 52},
+ dictWord{15, 10, 161},
+ dictWord{16, 10, 47},
+ dictWord{20, 11, 51},
+ dictWord{
+ 149,
+ 10,
+ 2,
+ },
+ dictWord{6, 11, 121},
+ dictWord{6, 11, 124},
+ dictWord{6, 11, 357},
+ dictWord{7, 11, 1138},
+ dictWord{7, 11, 1295},
+ dictWord{8, 11, 162},
+ dictWord{
+ 139,
+ 11,
+ 655,
+ },
+ dictWord{8, 0, 449},
+ dictWord{4, 10, 937},
+ dictWord{5, 10, 801},
+ dictWord{136, 11, 449},
+ dictWord{139, 11, 958},
+ dictWord{6, 0, 181},
+ dictWord{
+ 7,
+ 0,
+ 537,
+ },
+ dictWord{8, 0, 64},
+ dictWord{9, 0, 127},
+ dictWord{10, 0, 496},
+ dictWord{12, 0, 510},
+ dictWord{141, 0, 384},
+ dictWord{138, 11, 253},
+ dictWord{4, 0, 244},
+ dictWord{135, 0, 233},
+ dictWord{133, 11, 237},
+ dictWord{132, 10, 365},
+ dictWord{6, 0, 1650},
+ dictWord{10, 0, 702},
+ dictWord{139, 0, 245},
+ dictWord{
+ 5,
+ 10,
+ 7,
+ },
+ dictWord{139, 10, 774},
+ dictWord{13, 0, 463},
+ dictWord{20, 0, 49},
+ dictWord{13, 11, 463},
+ dictWord{148, 11, 49},
+ dictWord{4, 10, 734},
+ dictWord{
+ 5,
+ 10,
+ 662,
+ },
+ dictWord{134, 10, 430},
+ dictWord{4, 10, 746},
+ dictWord{135, 10, 1090},
+ dictWord{5, 10, 360},
+ dictWord{136, 10, 237},
+ dictWord{137, 0, 338},
+ dictWord{143, 11, 10},
+ dictWord{7, 11, 571},
+ dictWord{138, 11, 366},
+ dictWord{134, 0, 1279},
+ dictWord{9, 11, 513},
+ dictWord{10, 11, 22},
+ dictWord{10, 11, 39},
+ dictWord{12, 11, 122},
+ dictWord{140, 11, 187},
+ dictWord{133, 0, 896},
+ dictWord{146, 0, 178},
+ dictWord{134, 0, 695},
+ dictWord{137, 0, 808},
+ dictWord{
+ 134,
+ 11,
+ 587,
+ },
+ dictWord{7, 11, 107},
+ dictWord{7, 11, 838},
+ dictWord{8, 11, 550},
+ dictWord{138, 11, 401},
+ dictWord{7, 0, 1117},
+ dictWord{136, 0, 539},
+ dictWord{
+ 4,
+ 10,
+ 277,
+ },
+ dictWord{5, 10, 608},
+ dictWord{6, 10, 493},
+ dictWord{7, 10, 457},
+ dictWord{140, 10, 384},
+ dictWord{133, 11, 768},
+ dictWord{12, 0, 257},
+ dictWord{
+ 7,
+ 10,
+ 27,
+ },
+ dictWord{135, 10, 316},
+ dictWord{140, 0, 1003},
+ dictWord{4, 0, 207},
+ dictWord{5, 0, 586},
+ dictWord{5, 0, 676},
+ dictWord{6, 0, 448},
+ dictWord{
+ 8,
+ 0,
+ 244,
+ },
+ dictWord{11, 0, 1},
+ dictWord{13, 0, 3},
+ dictWord{16, 0, 54},
+ dictWord{17, 0, 4},
+ dictWord{18, 0, 13},
+ dictWord{133, 10, 552},
+ dictWord{4, 10, 401},
+ dictWord{
+ 137,
+ 10,
+ 264,
+ },
+ dictWord{5, 0, 516},
+ dictWord{7, 0, 1883},
+ dictWord{135, 11, 1883},
+ dictWord{12, 0, 960},
+ dictWord{132, 11, 894},
+ dictWord{5, 0, 4},
+ dictWord{
+ 5,
+ 0,
+ 810,
+ },
+ dictWord{6, 0, 13},
+ dictWord{6, 0, 538},
+ dictWord{6, 0, 1690},
+ dictWord{6, 0, 1726},
+ dictWord{7, 0, 499},
+ dictWord{7, 0, 1819},
+ dictWord{8, 0, 148},
+ dictWord{
+ 8,
+ 0,
+ 696,
+ },
+ dictWord{8, 0, 791},
+ dictWord{12, 0, 125},
+ dictWord{143, 0, 9},
+ dictWord{135, 0, 1268},
+ dictWord{11, 0, 30},
+ dictWord{14, 0, 315},
+ dictWord{
+ 9,
+ 10,
+ 543,
+ },
+ dictWord{10, 10, 524},
+ dictWord{12, 10, 524},
+ dictWord{16, 10, 18},
+ dictWord{20, 10, 26},
+ dictWord{148, 10, 65},
+ dictWord{6, 0, 748},
+ dictWord{
+ 4,
+ 10,
+ 205,
+ },
+ dictWord{5, 10, 623},
+ dictWord{7, 10, 104},
+ dictWord{136, 10, 519},
+ dictWord{11, 0, 542},
+ dictWord{139, 0, 852},
+ dictWord{140, 0, 6},
+ dictWord{
+ 132,
+ 0,
+ 848,
+ },
+ dictWord{7, 0, 1385},
+ dictWord{11, 0, 582},
+ dictWord{11, 0, 650},
+ dictWord{11, 0, 901},
+ dictWord{11, 0, 949},
+ dictWord{12, 0, 232},
+ dictWord{12, 0, 236},
+ dictWord{13, 0, 413},
+ dictWord{13, 0, 501},
+ dictWord{18, 0, 116},
+ dictWord{7, 10, 579},
+ dictWord{9, 10, 41},
+ dictWord{9, 10, 244},
+ dictWord{9, 10, 669},
+ dictWord{10, 10, 5},
+ dictWord{11, 10, 861},
+ dictWord{11, 10, 951},
+ dictWord{139, 10, 980},
+ dictWord{4, 0, 945},
+ dictWord{6, 0, 1811},
+ dictWord{6, 0, 1845},
+ dictWord{
+ 6,
+ 0,
+ 1853,
+ },
+ dictWord{6, 0, 1858},
+ dictWord{8, 0, 862},
+ dictWord{12, 0, 782},
+ dictWord{12, 0, 788},
+ dictWord{18, 0, 160},
+ dictWord{148, 0, 117},
+ dictWord{
+ 132,
+ 10,
+ 717,
+ },
+ dictWord{4, 0, 925},
+ dictWord{5, 0, 803},
+ dictWord{8, 0, 698},
+ dictWord{138, 0, 828},
+ dictWord{134, 0, 1416},
+ dictWord{132, 0, 610},
+ dictWord{
+ 139,
+ 0,
+ 992,
+ },
+ dictWord{6, 0, 878},
+ dictWord{134, 0, 1477},
+ dictWord{135, 0, 1847},
+ dictWord{138, 11, 531},
+ dictWord{137, 11, 539},
+ dictWord{134, 11, 272},
+ dictWord{133, 0, 383},
+ dictWord{134, 0, 1404},
+ dictWord{132, 10, 489},
+ dictWord{4, 11, 9},
+ dictWord{5, 11, 128},
+ dictWord{7, 11, 368},
+ dictWord{
+ 11,
+ 11,
+ 480,
+ },
+ dictWord{148, 11, 3},
+ dictWord{136, 0, 986},
+ dictWord{9, 0, 660},
+ dictWord{138, 0, 347},
+ dictWord{135, 10, 892},
+ dictWord{136, 11, 682},
+ dictWord{
+ 7,
+ 0,
+ 572,
+ },
+ dictWord{9, 0, 592},
+ dictWord{11, 0, 680},
+ dictWord{12, 0, 356},
+ dictWord{140, 0, 550},
+ dictWord{7, 0, 1411},
+ dictWord{138, 11, 527},
+ dictWord{
+ 4,
+ 11,
+ 2,
+ },
+ dictWord{7, 11, 545},
+ dictWord{135, 11, 894},
+ dictWord{137, 10, 473},
+ dictWord{11, 0, 64},
+ dictWord{7, 11, 481},
+ dictWord{7, 10, 819},
+ dictWord{9, 10, 26},
+ dictWord{9, 10, 392},
+ dictWord{9, 11, 792},
+ dictWord{10, 10, 152},
+ dictWord{10, 10, 226},
+ dictWord{12, 10, 276},
+ dictWord{12, 10, 426},
+ dictWord{
+ 12,
+ 10,
+ 589,
+ },
+ dictWord{13, 10, 460},
+ dictWord{15, 10, 97},
+ dictWord{19, 10, 48},
+ dictWord{148, 10, 104},
+ dictWord{135, 10, 51},
+ dictWord{136, 11, 445},
+ dictWord{136, 11, 646},
+ dictWord{135, 0, 606},
+ dictWord{132, 10, 674},
+ dictWord{6, 0, 1829},
+ dictWord{134, 0, 1830},
+ dictWord{132, 10, 770},
+ dictWord{
+ 5,
+ 10,
+ 79,
+ },
+ dictWord{7, 10, 1027},
+ dictWord{7, 10, 1477},
+ dictWord{139, 10, 52},
+ dictWord{5, 11, 530},
+ dictWord{142, 11, 113},
+ dictWord{134, 10, 1666},
+ dictWord{
+ 7,
+ 0,
+ 748,
+ },
+ dictWord{139, 0, 700},
+ dictWord{134, 10, 195},
+ dictWord{133, 10, 789},
+ dictWord{9, 0, 87},
+ dictWord{10, 0, 365},
+ dictWord{4, 10, 251},
+ dictWord{
+ 4,
+ 10,
+ 688,
+ },
+ dictWord{7, 10, 513},
+ dictWord{135, 10, 1284},
+ dictWord{136, 11, 111},
+ dictWord{133, 0, 127},
+ dictWord{6, 0, 198},
+ dictWord{140, 0, 83},
+ dictWord{133, 11, 556},
+ dictWord{133, 10, 889},
+ dictWord{4, 10, 160},
+ dictWord{5, 10, 330},
+ dictWord{7, 10, 1434},
+ dictWord{136, 10, 174},
+ dictWord{5, 0, 276},
+ dictWord{6, 0, 55},
+ dictWord{7, 0, 1369},
+ dictWord{138, 0, 864},
+ dictWord{8, 11, 16},
+ dictWord{140, 11, 568},
+ dictWord{6, 0, 1752},
+ dictWord{136, 0, 726},
+ dictWord{135, 0, 1066},
+ dictWord{133, 0, 764},
+ dictWord{6, 11, 186},
+ dictWord{137, 11, 426},
+ dictWord{11, 0, 683},
+ dictWord{139, 11, 683},
+ dictWord{
+ 6,
+ 0,
+ 309,
+ },
+ dictWord{7, 0, 331},
+ dictWord{138, 0, 550},
+ dictWord{133, 10, 374},
+ dictWord{6, 0, 1212},
+ dictWord{6, 0, 1852},
+ dictWord{7, 0, 1062},
+ dictWord{
+ 8,
+ 0,
+ 874,
+ },
+ dictWord{8, 0, 882},
+ dictWord{138, 0, 936},
+ dictWord{132, 11, 585},
+ dictWord{134, 0, 1364},
+ dictWord{7, 0, 986},
+ dictWord{133, 10, 731},
+ dictWord{
+ 6,
+ 0,
+ 723,
+ },
+ dictWord{6, 0, 1408},
+ dictWord{138, 0, 381},
+ dictWord{135, 0, 1573},
+ dictWord{134, 0, 1025},
+ dictWord{4, 10, 626},
+ dictWord{5, 10, 642},
+ dictWord{
+ 6,
+ 10,
+ 425,
+ },
+ dictWord{10, 10, 202},
+ dictWord{139, 10, 141},
+ dictWord{4, 11, 93},
+ dictWord{5, 11, 252},
+ dictWord{6, 11, 229},
+ dictWord{7, 11, 291},
+ dictWord{
+ 9,
+ 11,
+ 550,
+ },
+ dictWord{139, 11, 644},
+ dictWord{137, 11, 749},
+ dictWord{137, 11, 162},
+ dictWord{132, 11, 381},
+ dictWord{135, 0, 1559},
+ dictWord{
+ 6,
+ 0,
+ 194,
+ },
+ dictWord{7, 0, 133},
+ dictWord{10, 0, 493},
+ dictWord{10, 0, 570},
+ dictWord{139, 0, 664},
+ dictWord{5, 0, 24},
+ dictWord{5, 0, 569},
+ dictWord{6, 0, 3},
+ dictWord{
+ 6,
+ 0,
+ 119,
+ },
+ dictWord{6, 0, 143},
+ dictWord{6, 0, 440},
+ dictWord{7, 0, 295},
+ dictWord{7, 0, 599},
+ dictWord{7, 0, 1686},
+ dictWord{7, 0, 1854},
+ dictWord{8, 0, 424},
+ dictWord{
+ 9,
+ 0,
+ 43,
+ },
+ dictWord{9, 0, 584},
+ dictWord{9, 0, 760},
+ dictWord{10, 0, 148},
+ dictWord{10, 0, 328},
+ dictWord{11, 0, 159},
+ dictWord{11, 0, 253},
+ dictWord{11, 0, 506},
+ dictWord{12, 0, 487},
+ dictWord{140, 0, 531},
+ dictWord{6, 0, 661},
+ dictWord{134, 0, 1517},
+ dictWord{136, 10, 835},
+ dictWord{151, 10, 17},
+ dictWord{5, 0, 14},
+ dictWord{5, 0, 892},
+ dictWord{6, 0, 283},
+ dictWord{7, 0, 234},
+ dictWord{136, 0, 537},
+ dictWord{139, 0, 541},
+ dictWord{4, 0, 126},
+ dictWord{8, 0, 635},
+ dictWord{
+ 147,
+ 0,
+ 34,
+ },
+ dictWord{4, 0, 316},
+ dictWord{4, 0, 495},
+ dictWord{135, 0, 1561},
+ dictWord{4, 11, 187},
+ dictWord{5, 11, 184},
+ dictWord{5, 11, 690},
+ dictWord{
+ 7,
+ 11,
+ 1869,
+ },
+ dictWord{138, 11, 756},
+ dictWord{139, 11, 783},
+ dictWord{4, 0, 998},
+ dictWord{137, 0, 861},
+ dictWord{136, 0, 1009},
+ dictWord{139, 11, 292},
+ dictWord{5, 11, 21},
+ dictWord{6, 11, 77},
+ dictWord{6, 11, 157},
+ dictWord{7, 11, 974},
+ dictWord{7, 11, 1301},
+ dictWord{7, 11, 1339},
+ dictWord{7, 11, 1490},
+ dictWord{
+ 7,
+ 11,
+ 1873,
+ },
+ dictWord{137, 11, 628},
+ dictWord{7, 11, 1283},
+ dictWord{9, 11, 227},
+ dictWord{9, 11, 499},
+ dictWord{10, 11, 341},
+ dictWord{11, 11, 325},
+ dictWord{11, 11, 408},
+ dictWord{14, 11, 180},
+ dictWord{15, 11, 144},
+ dictWord{18, 11, 47},
+ dictWord{147, 11, 49},
+ dictWord{4, 0, 64},
+ dictWord{5, 0, 352},
+ dictWord{5, 0, 720},
+ dictWord{6, 0, 368},
+ dictWord{139, 0, 359},
+ dictWord{5, 10, 384},
+ dictWord{8, 10, 455},
+ dictWord{140, 10, 48},
+ dictWord{5, 10, 264},
+ dictWord{
+ 134,
+ 10,
+ 184,
+ },
+ dictWord{7, 0, 1577},
+ dictWord{10, 0, 304},
+ dictWord{10, 0, 549},
+ dictWord{12, 0, 365},
+ dictWord{13, 0, 220},
+ dictWord{13, 0, 240},
+ dictWord{
+ 142,
+ 0,
+ 33,
+ },
+ dictWord{134, 0, 1107},
+ dictWord{134, 0, 929},
+ dictWord{135, 0, 1142},
+ dictWord{6, 0, 175},
+ dictWord{137, 0, 289},
+ dictWord{5, 0, 432},
+ dictWord{
+ 133,
+ 0,
+ 913,
+ },
+ dictWord{6, 0, 279},
+ dictWord{7, 0, 219},
+ dictWord{5, 10, 633},
+ dictWord{135, 10, 1323},
+ dictWord{7, 0, 785},
+ dictWord{7, 10, 359},
+ dictWord{
+ 8,
+ 10,
+ 243,
+ },
+ dictWord{140, 10, 175},
+ dictWord{139, 0, 595},
+ dictWord{132, 10, 105},
+ dictWord{8, 11, 398},
+ dictWord{9, 11, 681},
+ dictWord{139, 11, 632},
+ dictWord{140, 0, 80},
+ dictWord{5, 0, 931},
+ dictWord{134, 0, 1698},
+ dictWord{142, 11, 241},
+ dictWord{134, 11, 20},
+ dictWord{134, 0, 1323},
+ dictWord{11, 0, 526},
+ dictWord{11, 0, 939},
+ dictWord{141, 0, 290},
+ dictWord{5, 0, 774},
+ dictWord{6, 0, 780},
+ dictWord{6, 0, 1637},
+ dictWord{6, 0, 1686},
+ dictWord{6, 0, 1751},
+ dictWord{
+ 8,
+ 0,
+ 559,
+ },
+ dictWord{141, 0, 109},
+ dictWord{141, 0, 127},
+ dictWord{7, 0, 1167},
+ dictWord{11, 0, 934},
+ dictWord{13, 0, 391},
+ dictWord{17, 0, 76},
+ dictWord{
+ 135,
+ 11,
+ 709,
+ },
+ dictWord{135, 0, 963},
+ dictWord{6, 0, 260},
+ dictWord{135, 0, 1484},
+ dictWord{134, 0, 573},
+ dictWord{4, 10, 758},
+ dictWord{139, 11, 941},
+ dictWord{135, 10, 1649},
+ dictWord{145, 11, 36},
+ dictWord{4, 0, 292},
+ dictWord{137, 0, 580},
+ dictWord{4, 0, 736},
+ dictWord{5, 0, 871},
+ dictWord{6, 0, 1689},
+ dictWord{135, 0, 1944},
+ dictWord{7, 11, 945},
+ dictWord{11, 11, 713},
+ dictWord{139, 11, 744},
+ dictWord{134, 0, 1164},
+ dictWord{135, 11, 937},
+ dictWord{
+ 6,
+ 0,
+ 1922,
+ },
+ dictWord{9, 0, 982},
+ dictWord{15, 0, 173},
+ dictWord{15, 0, 178},
+ dictWord{15, 0, 200},
+ dictWord{18, 0, 189},
+ dictWord{18, 0, 207},
+ dictWord{21, 0, 47},
+ dictWord{135, 11, 1652},
+ dictWord{7, 0, 1695},
+ dictWord{139, 10, 128},
+ dictWord{6, 0, 63},
+ dictWord{135, 0, 920},
+ dictWord{133, 0, 793},
+ dictWord{
+ 143,
+ 11,
+ 134,
+ },
+ dictWord{133, 10, 918},
+ dictWord{5, 0, 67},
+ dictWord{6, 0, 62},
+ dictWord{6, 0, 374},
+ dictWord{135, 0, 1391},
+ dictWord{9, 0, 790},
+ dictWord{12, 0, 47},
+ dictWord{4, 11, 579},
+ dictWord{5, 11, 226},
+ dictWord{5, 11, 323},
+ dictWord{135, 11, 960},
+ dictWord{10, 11, 784},
+ dictWord{141, 11, 191},
+ dictWord{4, 0, 391},
+ dictWord{135, 0, 1169},
+ dictWord{137, 0, 443},
+ dictWord{13, 11, 232},
+ dictWord{146, 11, 35},
+ dictWord{132, 10, 340},
+ dictWord{132, 0, 271},
+ dictWord{
+ 137,
+ 11,
+ 313,
+ },
+ dictWord{5, 11, 973},
+ dictWord{137, 11, 659},
+ dictWord{134, 0, 1140},
+ dictWord{6, 11, 135},
+ dictWord{135, 11, 1176},
+ dictWord{4, 0, 253},
+ dictWord{5, 0, 544},
+ dictWord{7, 0, 300},
+ dictWord{137, 0, 340},
+ dictWord{7, 0, 897},
+ dictWord{5, 10, 985},
+ dictWord{7, 10, 509},
+ dictWord{145, 10, 96},
+ dictWord{
+ 138,
+ 11,
+ 735,
+ },
+ dictWord{135, 10, 1919},
+ dictWord{138, 0, 890},
+ dictWord{5, 0, 818},
+ dictWord{134, 0, 1122},
+ dictWord{5, 0, 53},
+ dictWord{5, 0, 541},
+ dictWord{
+ 6,
+ 0,
+ 94,
+ },
+ dictWord{6, 0, 499},
+ dictWord{7, 0, 230},
+ dictWord{139, 0, 321},
+ dictWord{4, 0, 920},
+ dictWord{5, 0, 25},
+ dictWord{5, 0, 790},
+ dictWord{6, 0, 457},
+ dictWord{
+ 7,
+ 0,
+ 853,
+ },
+ dictWord{8, 0, 788},
+ dictWord{142, 11, 31},
+ dictWord{132, 10, 247},
+ dictWord{135, 11, 314},
+ dictWord{132, 0, 468},
+ dictWord{7, 0, 243},
+ dictWord{
+ 6,
+ 10,
+ 337,
+ },
+ dictWord{7, 10, 494},
+ dictWord{8, 10, 27},
+ dictWord{8, 10, 599},
+ dictWord{138, 10, 153},
+ dictWord{4, 10, 184},
+ dictWord{5, 10, 390},
+ dictWord{
+ 7,
+ 10,
+ 618,
+ },
+ dictWord{7, 10, 1456},
+ dictWord{139, 10, 710},
+ dictWord{134, 0, 870},
+ dictWord{134, 0, 1238},
+ dictWord{134, 0, 1765},
+ dictWord{10, 0, 853},
+ dictWord{10, 0, 943},
+ dictWord{14, 0, 437},
+ dictWord{14, 0, 439},
+ dictWord{14, 0, 443},
+ dictWord{14, 0, 446},
+ dictWord{14, 0, 452},
+ dictWord{14, 0, 469},
+ dictWord{
+ 14,
+ 0,
+ 471,
+ },
+ dictWord{14, 0, 473},
+ dictWord{16, 0, 93},
+ dictWord{16, 0, 102},
+ dictWord{16, 0, 110},
+ dictWord{148, 0, 121},
+ dictWord{4, 0, 605},
+ dictWord{
+ 7,
+ 0,
+ 518,
+ },
+ dictWord{7, 0, 1282},
+ dictWord{7, 0, 1918},
+ dictWord{10, 0, 180},
+ dictWord{139, 0, 218},
+ dictWord{133, 0, 822},
+ dictWord{4, 0, 634},
+ dictWord{
+ 11,
+ 0,
+ 916,
+ },
+ dictWord{142, 0, 419},
+ dictWord{6, 11, 281},
+ dictWord{7, 11, 6},
+ dictWord{8, 11, 282},
+ dictWord{8, 11, 480},
+ dictWord{8, 11, 499},
+ dictWord{9, 11, 198},
+ dictWord{10, 11, 143},
+ dictWord{10, 11, 169},
+ dictWord{10, 11, 211},
+ dictWord{10, 11, 417},
+ dictWord{10, 11, 574},
+ dictWord{11, 11, 147},
+ dictWord{
+ 11,
+ 11,
+ 395,
+ },
+ dictWord{12, 11, 75},
+ dictWord{12, 11, 407},
+ dictWord{12, 11, 608},
+ dictWord{13, 11, 500},
+ dictWord{142, 11, 251},
+ dictWord{134, 0, 898},
+ dictWord{
+ 6,
+ 0,
+ 36,
+ },
+ dictWord{7, 0, 658},
+ dictWord{8, 0, 454},
+ dictWord{150, 11, 48},
+ dictWord{133, 11, 674},
+ dictWord{135, 11, 1776},
+ dictWord{4, 11, 419},
+ dictWord{
+ 10,
+ 10,
+ 227,
+ },
+ dictWord{11, 10, 497},
+ dictWord{11, 10, 709},
+ dictWord{140, 10, 415},
+ dictWord{6, 10, 360},
+ dictWord{7, 10, 1664},
+ dictWord{136, 10, 478},
+ dictWord{137, 0, 806},
+ dictWord{12, 11, 508},
+ dictWord{14, 11, 102},
+ dictWord{14, 11, 226},
+ dictWord{144, 11, 57},
+ dictWord{135, 11, 1123},
+ dictWord{
+ 4,
+ 11,
+ 138,
+ },
+ dictWord{7, 11, 1012},
+ dictWord{7, 11, 1280},
+ dictWord{137, 11, 76},
+ dictWord{5, 11, 29},
+ dictWord{140, 11, 638},
+ dictWord{136, 10, 699},
+ dictWord{134, 0, 1326},
+ dictWord{132, 0, 104},
+ dictWord{135, 11, 735},
+ dictWord{132, 10, 739},
+ dictWord{134, 0, 1331},
+ dictWord{7, 0, 260},
+ dictWord{
+ 135,
+ 11,
+ 260,
+ },
+ dictWord{135, 11, 1063},
+ dictWord{7, 0, 45},
+ dictWord{9, 0, 542},
+ dictWord{9, 0, 566},
+ dictWord{10, 0, 728},
+ dictWord{137, 10, 869},
+ dictWord{
+ 4,
+ 10,
+ 67,
+ },
+ dictWord{5, 10, 422},
+ dictWord{7, 10, 1037},
+ dictWord{7, 10, 1289},
+ dictWord{7, 10, 1555},
+ dictWord{9, 10, 741},
+ dictWord{145, 10, 108},
+ dictWord{
+ 139,
+ 0,
+ 263,
+ },
+ dictWord{134, 0, 1516},
+ dictWord{14, 0, 146},
+ dictWord{15, 0, 42},
+ dictWord{16, 0, 23},
+ dictWord{17, 0, 86},
+ dictWord{146, 0, 17},
+ dictWord{
+ 138,
+ 0,
+ 468,
+ },
+ dictWord{136, 0, 1005},
+ dictWord{4, 11, 17},
+ dictWord{5, 11, 23},
+ dictWord{7, 11, 995},
+ dictWord{11, 11, 383},
+ dictWord{11, 11, 437},
+ dictWord{
+ 12,
+ 11,
+ 460,
+ },
+ dictWord{140, 11, 532},
+ dictWord{7, 0, 87},
+ dictWord{142, 0, 288},
+ dictWord{138, 10, 96},
+ dictWord{135, 11, 626},
+ dictWord{144, 10, 26},
+ dictWord{
+ 7,
+ 0,
+ 988,
+ },
+ dictWord{7, 0, 1939},
+ dictWord{9, 0, 64},
+ dictWord{9, 0, 502},
+ dictWord{12, 0, 22},
+ dictWord{12, 0, 34},
+ dictWord{13, 0, 12},
+ dictWord{13, 0, 234},
+ dictWord{147, 0, 77},
+ dictWord{13, 0, 133},
+ dictWord{8, 10, 203},
+ dictWord{11, 10, 823},
+ dictWord{11, 10, 846},
+ dictWord{12, 10, 482},
+ dictWord{13, 10, 277},
+ dictWord{13, 10, 302},
+ dictWord{13, 10, 464},
+ dictWord{14, 10, 205},
+ dictWord{142, 10, 221},
+ dictWord{4, 10, 449},
+ dictWord{133, 10, 718},
+ dictWord{
+ 135,
+ 0,
+ 141,
+ },
+ dictWord{6, 0, 1842},
+ dictWord{136, 0, 872},
+ dictWord{8, 11, 70},
+ dictWord{12, 11, 171},
+ dictWord{141, 11, 272},
+ dictWord{4, 10, 355},
+ dictWord{
+ 6,
+ 10,
+ 311,
+ },
+ dictWord{9, 10, 256},
+ dictWord{138, 10, 404},
+ dictWord{132, 0, 619},
+ dictWord{137, 0, 261},
+ dictWord{10, 11, 233},
+ dictWord{10, 10, 758},
+ dictWord{139, 11, 76},
+ dictWord{5, 0, 246},
+ dictWord{8, 0, 189},
+ dictWord{9, 0, 355},
+ dictWord{9, 0, 512},
+ dictWord{10, 0, 124},
+ dictWord{10, 0, 453},
+ dictWord{
+ 11,
+ 0,
+ 143,
+ },
+ dictWord{11, 0, 416},
+ dictWord{11, 0, 859},
+ dictWord{141, 0, 341},
+ dictWord{134, 11, 442},
+ dictWord{133, 10, 827},
+ dictWord{5, 10, 64},
+ dictWord{
+ 140,
+ 10,
+ 581,
+ },
+ dictWord{4, 10, 442},
+ dictWord{7, 10, 1047},
+ dictWord{7, 10, 1352},
+ dictWord{135, 10, 1643},
+ dictWord{134, 11, 1709},
+ dictWord{5, 0, 678},
+ dictWord{6, 0, 305},
+ dictWord{7, 0, 775},
+ dictWord{7, 0, 1065},
+ dictWord{133, 10, 977},
+ dictWord{11, 11, 69},
+ dictWord{12, 11, 105},
+ dictWord{12, 11, 117},
+ dictWord{13, 11, 213},
+ dictWord{14, 11, 13},
+ dictWord{14, 11, 62},
+ dictWord{14, 11, 177},
+ dictWord{14, 11, 421},
+ dictWord{15, 11, 19},
+ dictWord{146, 11, 141},
+ dictWord{137, 11, 309},
+ dictWord{5, 0, 35},
+ dictWord{7, 0, 862},
+ dictWord{7, 0, 1886},
+ dictWord{138, 0, 179},
+ dictWord{136, 0, 285},
+ dictWord{132, 0, 517},
+ dictWord{7, 11, 976},
+ dictWord{9, 11, 146},
+ dictWord{10, 11, 206},
+ dictWord{10, 11, 596},
+ dictWord{13, 11, 218},
+ dictWord{142, 11, 153},
+ dictWord{
+ 132,
+ 10,
+ 254,
+ },
+ dictWord{6, 0, 214},
+ dictWord{12, 0, 540},
+ dictWord{4, 10, 275},
+ dictWord{7, 10, 1219},
+ dictWord{140, 10, 376},
+ dictWord{8, 0, 667},
+ dictWord{
+ 11,
+ 0,
+ 403,
+ },
+ dictWord{146, 0, 83},
+ dictWord{12, 0, 74},
+ dictWord{10, 11, 648},
+ dictWord{11, 11, 671},
+ dictWord{143, 11, 46},
+ dictWord{135, 0, 125},
+ dictWord{
+ 134,
+ 10,
+ 1753,
+ },
+ dictWord{133, 0, 761},
+ dictWord{6, 0, 912},
+ dictWord{4, 11, 518},
+ dictWord{6, 10, 369},
+ dictWord{6, 10, 502},
+ dictWord{7, 10, 1036},
+ dictWord{
+ 7,
+ 11,
+ 1136,
+ },
+ dictWord{8, 10, 348},
+ dictWord{9, 10, 452},
+ dictWord{10, 10, 26},
+ dictWord{11, 10, 224},
+ dictWord{11, 10, 387},
+ dictWord{11, 10, 772},
+ dictWord{12, 10, 95},
+ dictWord{12, 10, 629},
+ dictWord{13, 10, 195},
+ dictWord{13, 10, 207},
+ dictWord{13, 10, 241},
+ dictWord{14, 10, 260},
+ dictWord{14, 10, 270},
+ dictWord{143, 10, 140},
+ dictWord{10, 0, 131},
+ dictWord{140, 0, 72},
+ dictWord{132, 10, 269},
+ dictWord{5, 10, 480},
+ dictWord{7, 10, 532},
+ dictWord{
+ 7,
+ 10,
+ 1197,
+ },
+ dictWord{7, 10, 1358},
+ dictWord{8, 10, 291},
+ dictWord{11, 10, 349},
+ dictWord{142, 10, 396},
+ dictWord{8, 11, 689},
+ dictWord{137, 11, 863},
+ dictWord{
+ 8,
+ 0,
+ 333,
+ },
+ dictWord{138, 0, 182},
+ dictWord{4, 11, 18},
+ dictWord{7, 11, 145},
+ dictWord{7, 11, 444},
+ dictWord{7, 11, 1278},
+ dictWord{8, 11, 49},
+ dictWord{
+ 8,
+ 11,
+ 400,
+ },
+ dictWord{9, 11, 71},
+ dictWord{9, 11, 250},
+ dictWord{10, 11, 459},
+ dictWord{12, 11, 160},
+ dictWord{144, 11, 24},
+ dictWord{14, 11, 35},
+ dictWord{
+ 142,
+ 11,
+ 191,
+ },
+ dictWord{135, 11, 1864},
+ dictWord{135, 0, 1338},
+ dictWord{148, 10, 15},
+ dictWord{14, 0, 94},
+ dictWord{15, 0, 65},
+ dictWord{16, 0, 4},
+ dictWord{
+ 16,
+ 0,
+ 77,
+ },
+ dictWord{16, 0, 80},
+ dictWord{145, 0, 5},
+ dictWord{12, 11, 82},
+ dictWord{143, 11, 36},
+ dictWord{133, 11, 1010},
+ dictWord{133, 0, 449},
+ dictWord{
+ 133,
+ 0,
+ 646,
+ },
+ dictWord{7, 0, 86},
+ dictWord{8, 0, 103},
+ dictWord{135, 10, 657},
+ dictWord{7, 0, 2028},
+ dictWord{138, 0, 641},
+ dictWord{136, 10, 533},
+ dictWord{
+ 134,
+ 0,
+ 1,
+ },
+ dictWord{139, 11, 970},
+ dictWord{5, 11, 87},
+ dictWord{7, 11, 313},
+ dictWord{7, 11, 1103},
+ dictWord{10, 11, 112},
+ dictWord{10, 11, 582},
+ dictWord{
+ 11,
+ 11,
+ 389,
+ },
+ dictWord{11, 11, 813},
+ dictWord{12, 11, 385},
+ dictWord{13, 11, 286},
+ dictWord{14, 11, 124},
+ dictWord{146, 11, 108},
+ dictWord{6, 0, 869},
+ dictWord{
+ 132,
+ 11,
+ 267,
+ },
+ dictWord{6, 0, 277},
+ dictWord{7, 0, 1274},
+ dictWord{7, 0, 1386},
+ dictWord{146, 0, 87},
+ dictWord{6, 0, 187},
+ dictWord{7, 0, 39},
+ dictWord{7, 0, 1203},
+ dictWord{8, 0, 380},
+ dictWord{14, 0, 117},
+ dictWord{149, 0, 28},
+ dictWord{4, 10, 211},
+ dictWord{4, 10, 332},
+ dictWord{5, 10, 335},
+ dictWord{6, 10, 238},
+ dictWord{
+ 7,
+ 10,
+ 269,
+ },
+ dictWord{7, 10, 811},
+ dictWord{7, 10, 1797},
+ dictWord{8, 10, 836},
+ dictWord{9, 10, 507},
+ dictWord{141, 10, 242},
+ dictWord{4, 0, 785},
+ dictWord{
+ 5,
+ 0,
+ 368,
+ },
+ dictWord{6, 0, 297},
+ dictWord{7, 0, 793},
+ dictWord{139, 0, 938},
+ dictWord{7, 0, 464},
+ dictWord{8, 0, 558},
+ dictWord{11, 0, 105},
+ dictWord{12, 0, 231},
+ dictWord{14, 0, 386},
+ dictWord{15, 0, 102},
+ dictWord{148, 0, 75},
+ dictWord{133, 10, 1009},
+ dictWord{8, 0, 877},
+ dictWord{140, 0, 731},
+ dictWord{
+ 139,
+ 11,
+ 289,
+ },
+ dictWord{10, 11, 249},
+ dictWord{139, 11, 209},
+ dictWord{132, 11, 561},
+ dictWord{134, 0, 1608},
+ dictWord{132, 11, 760},
+ dictWord{134, 0, 1429},
+ dictWord{9, 11, 154},
+ dictWord{140, 11, 485},
+ dictWord{5, 10, 228},
+ dictWord{6, 10, 203},
+ dictWord{7, 10, 156},
+ dictWord{8, 10, 347},
+ dictWord{
+ 137,
+ 10,
+ 265,
+ },
+ dictWord{7, 0, 1010},
+ dictWord{11, 0, 733},
+ dictWord{11, 0, 759},
+ dictWord{13, 0, 34},
+ dictWord{14, 0, 427},
+ dictWord{146, 0, 45},
+ dictWord{7, 10, 1131},
+ dictWord{135, 10, 1468},
+ dictWord{136, 11, 255},
+ dictWord{7, 0, 1656},
+ dictWord{9, 0, 369},
+ dictWord{10, 0, 338},
+ dictWord{10, 0, 490},
+ dictWord{
+ 11,
+ 0,
+ 154,
+ },
+ dictWord{11, 0, 545},
+ dictWord{11, 0, 775},
+ dictWord{13, 0, 77},
+ dictWord{141, 0, 274},
+ dictWord{133, 11, 621},
+ dictWord{134, 0, 1038},
+ dictWord{
+ 4,
+ 11,
+ 368,
+ },
+ dictWord{135, 11, 641},
+ dictWord{6, 0, 2010},
+ dictWord{8, 0, 979},
+ dictWord{8, 0, 985},
+ dictWord{10, 0, 951},
+ dictWord{138, 0, 1011},
+ dictWord{
+ 134,
+ 0,
+ 1005,
+ },
+ dictWord{19, 0, 121},
+ dictWord{5, 10, 291},
+ dictWord{5, 10, 318},
+ dictWord{7, 10, 765},
+ dictWord{9, 10, 389},
+ dictWord{140, 10, 548},
+ dictWord{
+ 5,
+ 0,
+ 20,
+ },
+ dictWord{6, 0, 298},
+ dictWord{7, 0, 659},
+ dictWord{137, 0, 219},
+ dictWord{7, 0, 1440},
+ dictWord{11, 0, 854},
+ dictWord{11, 0, 872},
+ dictWord{11, 0, 921},
+ dictWord{12, 0, 551},
+ dictWord{13, 0, 472},
+ dictWord{142, 0, 367},
+ dictWord{5, 0, 490},
+ dictWord{6, 0, 615},
+ dictWord{6, 0, 620},
+ dictWord{135, 0, 683},
+ dictWord{
+ 6,
+ 0,
+ 1070,
+ },
+ dictWord{134, 0, 1597},
+ dictWord{139, 0, 522},
+ dictWord{132, 0, 439},
+ dictWord{136, 0, 669},
+ dictWord{6, 0, 766},
+ dictWord{6, 0, 1143},
+ dictWord{
+ 6,
+ 0,
+ 1245,
+ },
+ dictWord{10, 10, 525},
+ dictWord{139, 10, 82},
+ dictWord{9, 11, 92},
+ dictWord{147, 11, 91},
+ dictWord{6, 0, 668},
+ dictWord{134, 0, 1218},
+ dictWord{
+ 6,
+ 11,
+ 525,
+ },
+ dictWord{9, 11, 876},
+ dictWord{140, 11, 284},
+ dictWord{132, 0, 233},
+ dictWord{136, 0, 547},
+ dictWord{132, 10, 422},
+ dictWord{5, 10, 355},
+ dictWord{145, 10, 0},
+ dictWord{6, 11, 300},
+ dictWord{135, 11, 1515},
+ dictWord{4, 0, 482},
+ dictWord{137, 10, 905},
+ dictWord{4, 0, 886},
+ dictWord{7, 0, 346},
+ dictWord{133, 11, 594},
+ dictWord{133, 10, 865},
+ dictWord{5, 10, 914},
+ dictWord{134, 10, 1625},
+ dictWord{135, 0, 334},
+ dictWord{5, 0, 795},
+ dictWord{
+ 6,
+ 0,
+ 1741,
+ },
+ dictWord{133, 10, 234},
+ dictWord{135, 10, 1383},
+ dictWord{6, 11, 1641},
+ dictWord{136, 11, 820},
+ dictWord{135, 0, 371},
+ dictWord{7, 11, 1313},
+ dictWord{138, 11, 660},
+ dictWord{135, 10, 1312},
+ dictWord{135, 0, 622},
+ dictWord{7, 0, 625},
+ dictWord{135, 0, 1750},
+ dictWord{135, 0, 339},
+ dictWord{
+ 4,
+ 0,
+ 203,
+ },
+ dictWord{135, 0, 1936},
+ dictWord{15, 0, 29},
+ dictWord{16, 0, 38},
+ dictWord{15, 11, 29},
+ dictWord{144, 11, 38},
+ dictWord{5, 0, 338},
+ dictWord{
+ 135,
+ 0,
+ 1256,
+ },
+ dictWord{135, 10, 1493},
+ dictWord{10, 0, 130},
+ dictWord{6, 10, 421},
+ dictWord{7, 10, 61},
+ dictWord{7, 10, 1540},
+ dictWord{138, 10, 501},
+ dictWord{
+ 6,
+ 11,
+ 389,
+ },
+ dictWord{7, 11, 149},
+ dictWord{9, 11, 142},
+ dictWord{138, 11, 94},
+ dictWord{137, 10, 341},
+ dictWord{11, 0, 678},
+ dictWord{12, 0, 307},
+ dictWord{142, 10, 98},
+ dictWord{6, 11, 8},
+ dictWord{7, 11, 1881},
+ dictWord{136, 11, 91},
+ dictWord{135, 0, 2044},
+ dictWord{6, 0, 770},
+ dictWord{6, 0, 802},
+ dictWord{
+ 6,
+ 0,
+ 812,
+ },
+ dictWord{7, 0, 311},
+ dictWord{9, 0, 308},
+ dictWord{12, 0, 255},
+ dictWord{6, 10, 102},
+ dictWord{7, 10, 72},
+ dictWord{15, 10, 142},
+ dictWord{
+ 147,
+ 10,
+ 67,
+ },
+ dictWord{151, 10, 30},
+ dictWord{135, 10, 823},
+ dictWord{135, 0, 1266},
+ dictWord{135, 11, 1746},
+ dictWord{135, 10, 1870},
+ dictWord{4, 0, 400},
+ dictWord{5, 0, 267},
+ dictWord{135, 0, 232},
+ dictWord{7, 11, 24},
+ dictWord{11, 11, 542},
+ dictWord{139, 11, 852},
+ dictWord{135, 11, 1739},
+ dictWord{4, 11, 503},
+ dictWord{135, 11, 1661},
+ dictWord{5, 11, 130},
+ dictWord{7, 11, 1314},
+ dictWord{9, 11, 610},
+ dictWord{10, 11, 718},
+ dictWord{11, 11, 601},
+ dictWord{
+ 11,
+ 11,
+ 819,
+ },
+ dictWord{11, 11, 946},
+ dictWord{140, 11, 536},
+ dictWord{10, 11, 149},
+ dictWord{11, 11, 280},
+ dictWord{142, 11, 336},
+ dictWord{7, 0, 739},
+ dictWord{11, 0, 690},
+ dictWord{7, 11, 1946},
+ dictWord{8, 10, 48},
+ dictWord{8, 10, 88},
+ dictWord{8, 10, 582},
+ dictWord{8, 10, 681},
+ dictWord{9, 10, 373},
+ dictWord{
+ 9,
+ 10,
+ 864,
+ },
+ dictWord{11, 10, 157},
+ dictWord{11, 10, 843},
+ dictWord{148, 10, 27},
+ dictWord{134, 0, 990},
+ dictWord{4, 10, 88},
+ dictWord{5, 10, 137},
+ dictWord{
+ 5,
+ 10,
+ 174,
+ },
+ dictWord{5, 10, 777},
+ dictWord{6, 10, 1664},
+ dictWord{6, 10, 1725},
+ dictWord{7, 10, 77},
+ dictWord{7, 10, 426},
+ dictWord{7, 10, 1317},
+ dictWord{
+ 7,
+ 10,
+ 1355,
+ },
+ dictWord{8, 10, 126},
+ dictWord{8, 10, 563},
+ dictWord{9, 10, 523},
+ dictWord{9, 10, 750},
+ dictWord{10, 10, 310},
+ dictWord{10, 10, 836},
+ dictWord{
+ 11,
+ 10,
+ 42,
+ },
+ dictWord{11, 10, 318},
+ dictWord{11, 10, 731},
+ dictWord{12, 10, 68},
+ dictWord{12, 10, 92},
+ dictWord{12, 10, 507},
+ dictWord{12, 10, 692},
+ dictWord{
+ 13,
+ 10,
+ 81,
+ },
+ dictWord{13, 10, 238},
+ dictWord{13, 10, 374},
+ dictWord{14, 10, 436},
+ dictWord{18, 10, 138},
+ dictWord{19, 10, 78},
+ dictWord{19, 10, 111},
+ dictWord{20, 10, 55},
+ dictWord{20, 10, 77},
+ dictWord{148, 10, 92},
+ dictWord{141, 10, 418},
+ dictWord{7, 0, 1831},
+ dictWord{132, 10, 938},
+ dictWord{6, 0, 776},
+ dictWord{134, 0, 915},
+ dictWord{138, 10, 351},
+ dictWord{5, 11, 348},
+ dictWord{6, 11, 522},
+ dictWord{6, 10, 1668},
+ dictWord{7, 10, 1499},
+ dictWord{8, 10, 117},
+ dictWord{9, 10, 314},
+ dictWord{138, 10, 174},
+ dictWord{135, 10, 707},
+ dictWord{132, 0, 613},
+ dictWord{133, 10, 403},
+ dictWord{132, 11, 392},
+ dictWord{
+ 5,
+ 11,
+ 433,
+ },
+ dictWord{9, 11, 633},
+ dictWord{139, 11, 629},
+ dictWord{133, 0, 763},
+ dictWord{132, 0, 878},
+ dictWord{132, 0, 977},
+ dictWord{132, 0, 100},
+ dictWord{6, 0, 463},
+ dictWord{4, 10, 44},
+ dictWord{5, 10, 311},
+ dictWord{7, 10, 639},
+ dictWord{7, 10, 762},
+ dictWord{7, 10, 1827},
+ dictWord{9, 10, 8},
+ dictWord{
+ 9,
+ 10,
+ 462,
+ },
+ dictWord{148, 10, 83},
+ dictWord{134, 11, 234},
+ dictWord{4, 10, 346},
+ dictWord{7, 10, 115},
+ dictWord{9, 10, 180},
+ dictWord{9, 10, 456},
+ dictWord{
+ 138,
+ 10,
+ 363,
+ },
+ dictWord{5, 0, 362},
+ dictWord{5, 0, 443},
+ dictWord{6, 0, 318},
+ dictWord{7, 0, 1019},
+ dictWord{139, 0, 623},
+ dictWord{5, 0, 463},
+ dictWord{8, 0, 296},
+ dictWord{7, 11, 140},
+ dictWord{7, 11, 1950},
+ dictWord{8, 11, 680},
+ dictWord{11, 11, 817},
+ dictWord{147, 11, 88},
+ dictWord{7, 11, 1222},
+ dictWord{
+ 138,
+ 11,
+ 386,
+ },
+ dictWord{142, 0, 137},
+ dictWord{132, 0, 454},
+ dictWord{7, 0, 1914},
+ dictWord{6, 11, 5},
+ dictWord{7, 10, 1051},
+ dictWord{9, 10, 545},
+ dictWord{
+ 11,
+ 11,
+ 249,
+ },
+ dictWord{12, 11, 313},
+ dictWord{16, 11, 66},
+ dictWord{145, 11, 26},
+ dictWord{135, 0, 1527},
+ dictWord{145, 0, 58},
+ dictWord{148, 11, 59},
+ dictWord{
+ 5,
+ 0,
+ 48,
+ },
+ dictWord{5, 0, 404},
+ dictWord{6, 0, 557},
+ dictWord{7, 0, 458},
+ dictWord{8, 0, 597},
+ dictWord{10, 0, 455},
+ dictWord{10, 0, 606},
+ dictWord{11, 0, 49},
+ dictWord{
+ 11,
+ 0,
+ 548,
+ },
+ dictWord{12, 0, 476},
+ dictWord{13, 0, 18},
+ dictWord{141, 0, 450},
+ dictWord{5, 11, 963},
+ dictWord{134, 11, 1773},
+ dictWord{133, 0, 729},
+ dictWord{138, 11, 586},
+ dictWord{5, 0, 442},
+ dictWord{135, 0, 1984},
+ dictWord{134, 0, 449},
+ dictWord{144, 0, 40},
+ dictWord{4, 0, 853},
+ dictWord{7, 11, 180},
+ dictWord{8, 11, 509},
+ dictWord{136, 11, 792},
+ dictWord{6, 10, 185},
+ dictWord{7, 10, 1899},
+ dictWord{9, 10, 875},
+ dictWord{139, 10, 673},
+ dictWord{
+ 134,
+ 11,
+ 524,
+ },
+ dictWord{12, 0, 227},
+ dictWord{4, 10, 327},
+ dictWord{5, 10, 478},
+ dictWord{7, 10, 1332},
+ dictWord{136, 10, 753},
+ dictWord{6, 0, 1491},
+ dictWord{
+ 5,
+ 10,
+ 1020,
+ },
+ dictWord{133, 10, 1022},
+ dictWord{4, 10, 103},
+ dictWord{133, 10, 401},
+ dictWord{132, 11, 931},
+ dictWord{4, 10, 499},
+ dictWord{135, 10, 1421},
+ dictWord{5, 0, 55},
+ dictWord{7, 0, 376},
+ dictWord{140, 0, 161},
+ dictWord{133, 0, 450},
+ dictWord{6, 0, 1174},
+ dictWord{134, 0, 1562},
+ dictWord{10, 0, 62},
+ dictWord{13, 0, 400},
+ dictWord{135, 11, 1837},
+ dictWord{140, 0, 207},
+ dictWord{135, 0, 869},
+ dictWord{4, 11, 773},
+ dictWord{5, 11, 618},
+ dictWord{
+ 137,
+ 11,
+ 756,
+ },
+ dictWord{132, 10, 96},
+ dictWord{4, 0, 213},
+ dictWord{7, 0, 223},
+ dictWord{8, 0, 80},
+ dictWord{135, 10, 968},
+ dictWord{4, 11, 90},
+ dictWord{5, 11, 337},
+ dictWord{5, 11, 545},
+ dictWord{7, 11, 754},
+ dictWord{9, 11, 186},
+ dictWord{10, 11, 72},
+ dictWord{10, 11, 782},
+ dictWord{11, 11, 513},
+ dictWord{11, 11, 577},
+ dictWord{11, 11, 610},
+ dictWord{11, 11, 889},
+ dictWord{11, 11, 961},
+ dictWord{12, 11, 354},
+ dictWord{12, 11, 362},
+ dictWord{12, 11, 461},
+ dictWord{
+ 12,
+ 11,
+ 595,
+ },
+ dictWord{13, 11, 79},
+ dictWord{143, 11, 121},
+ dictWord{7, 0, 381},
+ dictWord{7, 0, 806},
+ dictWord{7, 0, 820},
+ dictWord{8, 0, 354},
+ dictWord{8, 0, 437},
+ dictWord{8, 0, 787},
+ dictWord{9, 0, 657},
+ dictWord{10, 0, 58},
+ dictWord{10, 0, 339},
+ dictWord{10, 0, 749},
+ dictWord{11, 0, 914},
+ dictWord{12, 0, 162},
+ dictWord{
+ 13,
+ 0,
+ 75,
+ },
+ dictWord{14, 0, 106},
+ dictWord{14, 0, 198},
+ dictWord{14, 0, 320},
+ dictWord{14, 0, 413},
+ dictWord{146, 0, 43},
+ dictWord{136, 0, 747},
+ dictWord{
+ 136,
+ 0,
+ 954,
+ },
+ dictWord{134, 0, 1073},
+ dictWord{135, 0, 556},
+ dictWord{7, 11, 151},
+ dictWord{9, 11, 329},
+ dictWord{139, 11, 254},
+ dictWord{5, 0, 692},
+ dictWord{
+ 134,
+ 0,
+ 1395,
+ },
+ dictWord{6, 10, 563},
+ dictWord{137, 10, 224},
+ dictWord{134, 0, 191},
+ dictWord{132, 0, 804},
+ dictWord{9, 11, 187},
+ dictWord{10, 11, 36},
+ dictWord{17, 11, 44},
+ dictWord{146, 11, 64},
+ dictWord{7, 11, 165},
+ dictWord{7, 11, 919},
+ dictWord{136, 11, 517},
+ dictWord{4, 11, 506},
+ dictWord{5, 11, 295},
+ dictWord{7, 11, 1680},
+ dictWord{15, 11, 14},
+ dictWord{144, 11, 5},
+ dictWord{4, 0, 706},
+ dictWord{6, 0, 162},
+ dictWord{7, 0, 1960},
+ dictWord{136, 0, 831},
+ dictWord{
+ 135,
+ 11,
+ 1376,
+ },
+ dictWord{7, 11, 987},
+ dictWord{9, 11, 688},
+ dictWord{10, 11, 522},
+ dictWord{11, 11, 788},
+ dictWord{140, 11, 566},
+ dictWord{150, 0, 35},
+ dictWord{138, 0, 426},
+ dictWord{135, 0, 1235},
+ dictWord{135, 11, 1741},
+ dictWord{7, 11, 389},
+ dictWord{7, 11, 700},
+ dictWord{7, 11, 940},
+ dictWord{
+ 8,
+ 11,
+ 514,
+ },
+ dictWord{9, 11, 116},
+ dictWord{9, 11, 535},
+ dictWord{10, 11, 118},
+ dictWord{11, 11, 107},
+ dictWord{11, 11, 148},
+ dictWord{11, 11, 922},
+ dictWord{
+ 12,
+ 11,
+ 254,
+ },
+ dictWord{12, 11, 421},
+ dictWord{142, 11, 238},
+ dictWord{134, 0, 1234},
+ dictWord{132, 11, 743},
+ dictWord{4, 10, 910},
+ dictWord{5, 10, 832},
+ dictWord{135, 11, 1335},
+ dictWord{141, 0, 96},
+ dictWord{135, 11, 185},
+ dictWord{146, 0, 149},
+ dictWord{4, 0, 204},
+ dictWord{137, 0, 902},
+ dictWord{
+ 4,
+ 11,
+ 784,
+ },
+ dictWord{133, 11, 745},
+ dictWord{136, 0, 833},
+ dictWord{136, 0, 949},
+ dictWord{7, 0, 366},
+ dictWord{9, 0, 287},
+ dictWord{12, 0, 199},
+ dictWord{
+ 12,
+ 0,
+ 556,
+ },
+ dictWord{12, 0, 577},
+ dictWord{5, 11, 81},
+ dictWord{7, 11, 146},
+ dictWord{7, 11, 1342},
+ dictWord{7, 11, 1446},
+ dictWord{8, 11, 53},
+ dictWord{8, 11, 561},
+ dictWord{8, 11, 694},
+ dictWord{8, 11, 754},
+ dictWord{9, 11, 97},
+ dictWord{9, 11, 115},
+ dictWord{9, 11, 894},
+ dictWord{10, 11, 462},
+ dictWord{10, 11, 813},
+ dictWord{11, 11, 230},
+ dictWord{11, 11, 657},
+ dictWord{11, 11, 699},
+ dictWord{11, 11, 748},
+ dictWord{12, 11, 119},
+ dictWord{12, 11, 200},
+ dictWord{
+ 12,
+ 11,
+ 283,
+ },
+ dictWord{14, 11, 273},
+ dictWord{145, 11, 15},
+ dictWord{5, 11, 408},
+ dictWord{137, 11, 747},
+ dictWord{9, 11, 498},
+ dictWord{140, 11, 181},
+ dictWord{
+ 6,
+ 0,
+ 2020,
+ },
+ dictWord{136, 0, 992},
+ dictWord{5, 0, 356},
+ dictWord{135, 0, 224},
+ dictWord{134, 0, 784},
+ dictWord{7, 0, 630},
+ dictWord{9, 0, 567},
+ dictWord{
+ 11,
+ 0,
+ 150,
+ },
+ dictWord{11, 0, 444},
+ dictWord{13, 0, 119},
+ dictWord{8, 10, 528},
+ dictWord{137, 10, 348},
+ dictWord{134, 0, 539},
+ dictWord{4, 10, 20},
+ dictWord{
+ 133,
+ 10,
+ 616,
+ },
+ dictWord{142, 0, 27},
+ dictWord{7, 11, 30},
+ dictWord{8, 11, 86},
+ dictWord{8, 11, 315},
+ dictWord{8, 11, 700},
+ dictWord{9, 11, 576},
+ dictWord{9, 11, 858},
+ dictWord{11, 11, 310},
+ dictWord{11, 11, 888},
+ dictWord{11, 11, 904},
+ dictWord{12, 11, 361},
+ dictWord{141, 11, 248},
+ dictWord{138, 11, 839},
+ dictWord{
+ 134,
+ 0,
+ 755,
+ },
+ dictWord{134, 0, 1063},
+ dictWord{7, 10, 1091},
+ dictWord{135, 10, 1765},
+ dictWord{134, 11, 428},
+ dictWord{7, 11, 524},
+ dictWord{8, 11, 169},
+ dictWord{8, 11, 234},
+ dictWord{9, 11, 480},
+ dictWord{138, 11, 646},
+ dictWord{139, 0, 814},
+ dictWord{7, 11, 1462},
+ dictWord{139, 11, 659},
+ dictWord{
+ 4,
+ 10,
+ 26,
+ },
+ dictWord{5, 10, 429},
+ dictWord{6, 10, 245},
+ dictWord{7, 10, 704},
+ dictWord{7, 10, 1379},
+ dictWord{135, 10, 1474},
+ dictWord{7, 11, 1205},
+ dictWord{
+ 138,
+ 11,
+ 637,
+ },
+ dictWord{139, 11, 803},
+ dictWord{132, 10, 621},
+ dictWord{136, 0, 987},
+ dictWord{4, 11, 266},
+ dictWord{8, 11, 4},
+ dictWord{9, 11, 39},
+ dictWord{
+ 10,
+ 11,
+ 166,
+ },
+ dictWord{11, 11, 918},
+ dictWord{12, 11, 635},
+ dictWord{20, 11, 10},
+ dictWord{22, 11, 27},
+ dictWord{150, 11, 43},
+ dictWord{4, 0, 235},
+ dictWord{
+ 135,
+ 0,
+ 255,
+ },
+ dictWord{4, 0, 194},
+ dictWord{5, 0, 584},
+ dictWord{6, 0, 384},
+ dictWord{7, 0, 583},
+ dictWord{10, 0, 761},
+ dictWord{11, 0, 760},
+ dictWord{139, 0, 851},
+ dictWord{133, 10, 542},
+ dictWord{134, 0, 1086},
+ dictWord{133, 10, 868},
+ dictWord{8, 0, 1016},
+ dictWord{136, 0, 1018},
+ dictWord{7, 0, 1396},
+ dictWord{
+ 7,
+ 11,
+ 1396,
+ },
+ dictWord{136, 10, 433},
+ dictWord{135, 10, 1495},
+ dictWord{138, 10, 215},
+ dictWord{141, 10, 124},
+ dictWord{7, 11, 157},
+ dictWord{
+ 8,
+ 11,
+ 279,
+ },
+ dictWord{9, 11, 759},
+ dictWord{16, 11, 31},
+ dictWord{16, 11, 39},
+ dictWord{16, 11, 75},
+ dictWord{18, 11, 24},
+ dictWord{20, 11, 42},
+ dictWord{152, 11, 1},
+ dictWord{5, 0, 562},
+ dictWord{134, 11, 604},
+ dictWord{134, 0, 913},
+ dictWord{5, 0, 191},
+ dictWord{137, 0, 271},
+ dictWord{4, 0, 470},
+ dictWord{6, 0, 153},
+ dictWord{7, 0, 1503},
+ dictWord{7, 0, 1923},
+ dictWord{10, 0, 701},
+ dictWord{11, 0, 132},
+ dictWord{11, 0, 227},
+ dictWord{11, 0, 320},
+ dictWord{11, 0, 436},
+ dictWord{
+ 11,
+ 0,
+ 525,
+ },
+ dictWord{11, 0, 855},
+ dictWord{11, 0, 873},
+ dictWord{12, 0, 41},
+ dictWord{12, 0, 286},
+ dictWord{13, 0, 103},
+ dictWord{13, 0, 284},
+ dictWord{
+ 14,
+ 0,
+ 255,
+ },
+ dictWord{14, 0, 262},
+ dictWord{15, 0, 117},
+ dictWord{143, 0, 127},
+ dictWord{7, 0, 475},
+ dictWord{12, 0, 45},
+ dictWord{147, 10, 112},
+ dictWord{
+ 132,
+ 11,
+ 567,
+ },
+ dictWord{137, 11, 859},
+ dictWord{6, 0, 713},
+ dictWord{6, 0, 969},
+ dictWord{6, 0, 1290},
+ dictWord{134, 0, 1551},
+ dictWord{133, 0, 327},
+ dictWord{
+ 6,
+ 0,
+ 552,
+ },
+ dictWord{6, 0, 1292},
+ dictWord{7, 0, 1754},
+ dictWord{137, 0, 604},
+ dictWord{4, 0, 223},
+ dictWord{6, 0, 359},
+ dictWord{11, 0, 3},
+ dictWord{13, 0, 108},
+ dictWord{14, 0, 89},
+ dictWord{16, 0, 22},
+ dictWord{5, 11, 762},
+ dictWord{7, 11, 1880},
+ dictWord{9, 11, 680},
+ dictWord{139, 11, 798},
+ dictWord{5, 0, 80},
+ dictWord{
+ 6,
+ 0,
+ 405,
+ },
+ dictWord{7, 0, 403},
+ dictWord{7, 0, 1502},
+ dictWord{8, 0, 456},
+ dictWord{9, 0, 487},
+ dictWord{9, 0, 853},
+ dictWord{9, 0, 889},
+ dictWord{10, 0, 309},
+ dictWord{
+ 11,
+ 0,
+ 721,
+ },
+ dictWord{11, 0, 994},
+ dictWord{12, 0, 430},
+ dictWord{141, 0, 165},
+ dictWord{133, 11, 298},
+ dictWord{132, 10, 647},
+ dictWord{134, 0, 2016},
+ dictWord{18, 10, 10},
+ dictWord{146, 11, 10},
+ dictWord{4, 0, 453},
+ dictWord{5, 0, 887},
+ dictWord{6, 0, 535},
+ dictWord{8, 0, 6},
+ dictWord{8, 0, 543},
+ dictWord{
+ 136,
+ 0,
+ 826,
+ },
+ dictWord{136, 0, 975},
+ dictWord{10, 0, 961},
+ dictWord{138, 0, 962},
+ dictWord{138, 10, 220},
+ dictWord{6, 0, 1891},
+ dictWord{6, 0, 1893},
+ dictWord{
+ 9,
+ 0,
+ 916,
+ },
+ dictWord{9, 0, 965},
+ dictWord{9, 0, 972},
+ dictWord{12, 0, 801},
+ dictWord{12, 0, 859},
+ dictWord{12, 0, 883},
+ dictWord{15, 0, 226},
+ dictWord{149, 0, 51},
+ dictWord{132, 10, 109},
+ dictWord{135, 11, 267},
+ dictWord{7, 11, 92},
+ dictWord{7, 11, 182},
+ dictWord{8, 11, 453},
+ dictWord{9, 11, 204},
+ dictWord{11, 11, 950},
+ dictWord{12, 11, 94},
+ dictWord{12, 11, 644},
+ dictWord{16, 11, 20},
+ dictWord{16, 11, 70},
+ dictWord{16, 11, 90},
+ dictWord{147, 11, 55},
+ dictWord{
+ 134,
+ 10,
+ 1746,
+ },
+ dictWord{6, 11, 71},
+ dictWord{7, 11, 845},
+ dictWord{7, 11, 1308},
+ dictWord{8, 11, 160},
+ dictWord{137, 11, 318},
+ dictWord{5, 0, 101},
+ dictWord{6, 0, 88},
+ dictWord{7, 0, 263},
+ dictWord{7, 0, 628},
+ dictWord{7, 0, 1677},
+ dictWord{8, 0, 349},
+ dictWord{9, 0, 100},
+ dictWord{10, 0, 677},
+ dictWord{14, 0, 169},
+ dictWord{
+ 14,
+ 0,
+ 302,
+ },
+ dictWord{14, 0, 313},
+ dictWord{15, 0, 48},
+ dictWord{15, 0, 84},
+ dictWord{7, 11, 237},
+ dictWord{8, 11, 664},
+ dictWord{9, 11, 42},
+ dictWord{9, 11, 266},
+ dictWord{9, 11, 380},
+ dictWord{9, 11, 645},
+ dictWord{10, 11, 177},
+ dictWord{138, 11, 276},
+ dictWord{138, 11, 69},
+ dictWord{4, 0, 310},
+ dictWord{7, 0, 708},
+ dictWord{7, 0, 996},
+ dictWord{9, 0, 795},
+ dictWord{10, 0, 390},
+ dictWord{10, 0, 733},
+ dictWord{11, 0, 451},
+ dictWord{12, 0, 249},
+ dictWord{14, 0, 115},
+ dictWord{
+ 14,
+ 0,
+ 286,
+ },
+ dictWord{143, 0, 100},
+ dictWord{5, 0, 587},
+ dictWord{4, 10, 40},
+ dictWord{10, 10, 67},
+ dictWord{11, 10, 117},
+ dictWord{11, 10, 768},
+ dictWord{
+ 139,
+ 10,
+ 935,
+ },
+ dictWord{6, 0, 1942},
+ dictWord{7, 0, 512},
+ dictWord{136, 0, 983},
+ dictWord{7, 10, 992},
+ dictWord{8, 10, 301},
+ dictWord{9, 10, 722},
+ dictWord{12, 10, 63},
+ dictWord{13, 10, 29},
+ dictWord{14, 10, 161},
+ dictWord{143, 10, 18},
+ dictWord{136, 11, 76},
+ dictWord{139, 10, 923},
+ dictWord{134, 0, 645},
+ dictWord{
+ 134,
+ 0,
+ 851,
+ },
+ dictWord{4, 0, 498},
+ dictWord{132, 11, 293},
+ dictWord{7, 0, 217},
+ dictWord{8, 0, 140},
+ dictWord{10, 0, 610},
+ dictWord{14, 11, 352},
+ dictWord{
+ 17,
+ 11,
+ 53,
+ },
+ dictWord{18, 11, 146},
+ dictWord{18, 11, 152},
+ dictWord{19, 11, 11},
+ dictWord{150, 11, 54},
+ dictWord{134, 0, 1448},
+ dictWord{138, 11, 841},
+ dictWord{133, 0, 905},
+ dictWord{4, 11, 605},
+ dictWord{7, 11, 518},
+ dictWord{7, 11, 1282},
+ dictWord{7, 11, 1918},
+ dictWord{10, 11, 180},
+ dictWord{139, 11, 218},
+ dictWord{139, 11, 917},
+ dictWord{135, 10, 825},
+ dictWord{140, 10, 328},
+ dictWord{4, 0, 456},
+ dictWord{7, 0, 105},
+ dictWord{7, 0, 358},
+ dictWord{7, 0, 1637},
+ dictWord{8, 0, 643},
+ dictWord{139, 0, 483},
+ dictWord{134, 0, 792},
+ dictWord{6, 11, 96},
+ dictWord{135, 11, 1426},
+ dictWord{137, 11, 691},
+ dictWord{
+ 4,
+ 11,
+ 651,
+ },
+ dictWord{133, 11, 289},
+ dictWord{7, 11, 688},
+ dictWord{8, 11, 35},
+ dictWord{9, 11, 511},
+ dictWord{10, 11, 767},
+ dictWord{147, 11, 118},
+ dictWord{
+ 150,
+ 0,
+ 56,
+ },
+ dictWord{5, 0, 243},
+ dictWord{5, 0, 535},
+ dictWord{6, 10, 204},
+ dictWord{10, 10, 320},
+ dictWord{10, 10, 583},
+ dictWord{13, 10, 502},
+ dictWord{
+ 14,
+ 10,
+ 72,
+ },
+ dictWord{14, 10, 274},
+ dictWord{14, 10, 312},
+ dictWord{14, 10, 344},
+ dictWord{15, 10, 159},
+ dictWord{16, 10, 62},
+ dictWord{16, 10, 69},
+ dictWord{
+ 17,
+ 10,
+ 30,
+ },
+ dictWord{18, 10, 42},
+ dictWord{18, 10, 53},
+ dictWord{18, 10, 84},
+ dictWord{18, 10, 140},
+ dictWord{19, 10, 68},
+ dictWord{19, 10, 85},
+ dictWord{20, 10, 5},
+ dictWord{20, 10, 45},
+ dictWord{20, 10, 101},
+ dictWord{22, 10, 7},
+ dictWord{150, 10, 20},
+ dictWord{4, 10, 558},
+ dictWord{6, 10, 390},
+ dictWord{7, 10, 162},
+ dictWord{7, 10, 689},
+ dictWord{9, 10, 360},
+ dictWord{138, 10, 653},
+ dictWord{146, 11, 23},
+ dictWord{135, 0, 1748},
+ dictWord{5, 10, 856},
+ dictWord{
+ 6,
+ 10,
+ 1672,
+ },
+ dictWord{6, 10, 1757},
+ dictWord{134, 10, 1781},
+ dictWord{5, 0, 539},
+ dictWord{5, 0, 754},
+ dictWord{6, 0, 876},
+ dictWord{132, 11, 704},
+ dictWord{
+ 135,
+ 11,
+ 1078,
+ },
+ dictWord{5, 10, 92},
+ dictWord{10, 10, 736},
+ dictWord{140, 10, 102},
+ dictWord{17, 0, 91},
+ dictWord{5, 10, 590},
+ dictWord{137, 10, 213},
+ dictWord{134, 0, 1565},
+ dictWord{6, 0, 91},
+ dictWord{135, 0, 435},
+ dictWord{4, 0, 939},
+ dictWord{140, 0, 792},
+ dictWord{134, 0, 1399},
+ dictWord{4, 0, 16},
+ dictWord{
+ 5,
+ 0,
+ 316,
+ },
+ dictWord{5, 0, 842},
+ dictWord{6, 0, 370},
+ dictWord{6, 0, 1778},
+ dictWord{8, 0, 166},
+ dictWord{11, 0, 812},
+ dictWord{12, 0, 206},
+ dictWord{12, 0, 351},
+ dictWord{14, 0, 418},
+ dictWord{16, 0, 15},
+ dictWord{16, 0, 34},
+ dictWord{18, 0, 3},
+ dictWord{19, 0, 3},
+ dictWord{19, 0, 7},
+ dictWord{20, 0, 4},
+ dictWord{21, 0, 21},
+ dictWord{
+ 4,
+ 11,
+ 720,
+ },
+ dictWord{133, 11, 306},
+ dictWord{144, 0, 95},
+ dictWord{133, 11, 431},
+ dictWord{132, 11, 234},
+ dictWord{135, 0, 551},
+ dictWord{4, 0, 999},
+ dictWord{6, 0, 1966},
+ dictWord{134, 0, 2042},
+ dictWord{7, 0, 619},
+ dictWord{10, 0, 547},
+ dictWord{11, 0, 122},
+ dictWord{12, 0, 601},
+ dictWord{15, 0, 7},
+ dictWord{148, 0, 20},
+ dictWord{5, 11, 464},
+ dictWord{6, 11, 236},
+ dictWord{7, 11, 276},
+ dictWord{7, 11, 696},
+ dictWord{7, 11, 914},
+ dictWord{7, 11, 1108},
+ dictWord{
+ 7,
+ 11,
+ 1448,
+ },
+ dictWord{9, 11, 15},
+ dictWord{9, 11, 564},
+ dictWord{10, 11, 14},
+ dictWord{12, 11, 565},
+ dictWord{13, 11, 449},
+ dictWord{14, 11, 53},
+ dictWord{
+ 15,
+ 11,
+ 13,
+ },
+ dictWord{16, 11, 64},
+ dictWord{145, 11, 41},
+ dictWord{6, 0, 884},
+ dictWord{6, 0, 1019},
+ dictWord{134, 0, 1150},
+ dictWord{6, 11, 1767},
+ dictWord{
+ 12,
+ 11,
+ 194,
+ },
+ dictWord{145, 11, 107},
+ dictWord{136, 10, 503},
+ dictWord{133, 11, 840},
+ dictWord{7, 0, 671},
+ dictWord{134, 10, 466},
+ dictWord{132, 0, 888},
+ dictWord{4, 0, 149},
+ dictWord{138, 0, 368},
+ dictWord{4, 0, 154},
+ dictWord{7, 0, 1134},
+ dictWord{136, 0, 105},
+ dictWord{135, 0, 983},
+ dictWord{9, 11, 642},
+ dictWord{11, 11, 236},
+ dictWord{142, 11, 193},
+ dictWord{4, 0, 31},
+ dictWord{6, 0, 429},
+ dictWord{7, 0, 962},
+ dictWord{9, 0, 458},
+ dictWord{139, 0, 691},
+ dictWord{
+ 6,
+ 0,
+ 643,
+ },
+ dictWord{134, 0, 1102},
+ dictWord{132, 0, 312},
+ dictWord{4, 11, 68},
+ dictWord{5, 11, 634},
+ dictWord{6, 11, 386},
+ dictWord{7, 11, 794},
+ dictWord{
+ 8,
+ 11,
+ 273,
+ },
+ dictWord{9, 11, 563},
+ dictWord{10, 11, 105},
+ dictWord{10, 11, 171},
+ dictWord{11, 11, 94},
+ dictWord{139, 11, 354},
+ dictWord{133, 0, 740},
+ dictWord{
+ 135,
+ 0,
+ 1642,
+ },
+ dictWord{4, 11, 95},
+ dictWord{7, 11, 416},
+ dictWord{8, 11, 211},
+ dictWord{139, 11, 830},
+ dictWord{132, 0, 236},
+ dictWord{138, 10, 241},
+ dictWord{7, 11, 731},
+ dictWord{13, 11, 20},
+ dictWord{143, 11, 11},
+ dictWord{5, 0, 836},
+ dictWord{5, 0, 857},
+ dictWord{6, 0, 1680},
+ dictWord{135, 0, 59},
+ dictWord{
+ 10,
+ 0,
+ 68,
+ },
+ dictWord{11, 0, 494},
+ dictWord{152, 11, 6},
+ dictWord{4, 0, 81},
+ dictWord{139, 0, 867},
+ dictWord{135, 0, 795},
+ dictWord{133, 11, 689},
+ dictWord{
+ 4,
+ 0,
+ 1001,
+ },
+ dictWord{5, 0, 282},
+ dictWord{6, 0, 1932},
+ dictWord{6, 0, 1977},
+ dictWord{6, 0, 1987},
+ dictWord{6, 0, 1992},
+ dictWord{8, 0, 650},
+ dictWord{8, 0, 919},
+ dictWord{8, 0, 920},
+ dictWord{8, 0, 923},
+ dictWord{8, 0, 926},
+ dictWord{8, 0, 927},
+ dictWord{8, 0, 931},
+ dictWord{8, 0, 939},
+ dictWord{8, 0, 947},
+ dictWord{8, 0, 956},
+ dictWord{8, 0, 997},
+ dictWord{9, 0, 907},
+ dictWord{10, 0, 950},
+ dictWord{10, 0, 953},
+ dictWord{10, 0, 954},
+ dictWord{10, 0, 956},
+ dictWord{10, 0, 958},
+ dictWord{
+ 10,
+ 0,
+ 959,
+ },
+ dictWord{10, 0, 964},
+ dictWord{10, 0, 970},
+ dictWord{10, 0, 972},
+ dictWord{10, 0, 973},
+ dictWord{10, 0, 975},
+ dictWord{10, 0, 976},
+ dictWord{
+ 10,
+ 0,
+ 980,
+ },
+ dictWord{10, 0, 981},
+ dictWord{10, 0, 984},
+ dictWord{10, 0, 988},
+ dictWord{10, 0, 990},
+ dictWord{10, 0, 995},
+ dictWord{10, 0, 999},
+ dictWord{
+ 10,
+ 0,
+ 1002,
+ },
+ dictWord{10, 0, 1003},
+ dictWord{10, 0, 1005},
+ dictWord{10, 0, 1006},
+ dictWord{10, 0, 1008},
+ dictWord{10, 0, 1009},
+ dictWord{10, 0, 1012},
+ dictWord{10, 0, 1014},
+ dictWord{10, 0, 1015},
+ dictWord{10, 0, 1019},
+ dictWord{10, 0, 1020},
+ dictWord{10, 0, 1022},
+ dictWord{12, 0, 959},
+ dictWord{12, 0, 961},
+ dictWord{12, 0, 962},
+ dictWord{12, 0, 963},
+ dictWord{12, 0, 964},
+ dictWord{12, 0, 965},
+ dictWord{12, 0, 967},
+ dictWord{12, 0, 968},
+ dictWord{12, 0, 969},
+ dictWord{12, 0, 970},
+ dictWord{12, 0, 971},
+ dictWord{12, 0, 972},
+ dictWord{12, 0, 973},
+ dictWord{12, 0, 974},
+ dictWord{12, 0, 975},
+ dictWord{12, 0, 976},
+ dictWord{
+ 12,
+ 0,
+ 977,
+ },
+ dictWord{12, 0, 979},
+ dictWord{12, 0, 981},
+ dictWord{12, 0, 982},
+ dictWord{12, 0, 983},
+ dictWord{12, 0, 984},
+ dictWord{12, 0, 985},
+ dictWord{
+ 12,
+ 0,
+ 986,
+ },
+ dictWord{12, 0, 987},
+ dictWord{12, 0, 989},
+ dictWord{12, 0, 990},
+ dictWord{12, 0, 992},
+ dictWord{12, 0, 993},
+ dictWord{12, 0, 995},
+ dictWord{12, 0, 998},
+ dictWord{12, 0, 999},
+ dictWord{12, 0, 1000},
+ dictWord{12, 0, 1001},
+ dictWord{12, 0, 1002},
+ dictWord{12, 0, 1004},
+ dictWord{12, 0, 1005},
+ dictWord{
+ 12,
+ 0,
+ 1006,
+ },
+ dictWord{12, 0, 1007},
+ dictWord{12, 0, 1008},
+ dictWord{12, 0, 1009},
+ dictWord{12, 0, 1010},
+ dictWord{12, 0, 1011},
+ dictWord{12, 0, 1012},
+ dictWord{12, 0, 1014},
+ dictWord{12, 0, 1015},
+ dictWord{12, 0, 1016},
+ dictWord{12, 0, 1017},
+ dictWord{12, 0, 1018},
+ dictWord{12, 0, 1019},
+ dictWord{
+ 12,
+ 0,
+ 1022,
+ },
+ dictWord{12, 0, 1023},
+ dictWord{14, 0, 475},
+ dictWord{14, 0, 477},
+ dictWord{14, 0, 478},
+ dictWord{14, 0, 479},
+ dictWord{14, 0, 480},
+ dictWord{
+ 14,
+ 0,
+ 482,
+ },
+ dictWord{14, 0, 483},
+ dictWord{14, 0, 484},
+ dictWord{14, 0, 485},
+ dictWord{14, 0, 486},
+ dictWord{14, 0, 487},
+ dictWord{14, 0, 488},
+ dictWord{14, 0, 489},
+ dictWord{14, 0, 490},
+ dictWord{14, 0, 491},
+ dictWord{14, 0, 492},
+ dictWord{14, 0, 493},
+ dictWord{14, 0, 494},
+ dictWord{14, 0, 495},
+ dictWord{14, 0, 496},
+ dictWord{14, 0, 497},
+ dictWord{14, 0, 498},
+ dictWord{14, 0, 499},
+ dictWord{14, 0, 500},
+ dictWord{14, 0, 501},
+ dictWord{14, 0, 502},
+ dictWord{14, 0, 503},
+ dictWord{
+ 14,
+ 0,
+ 504,
+ },
+ dictWord{14, 0, 506},
+ dictWord{14, 0, 507},
+ dictWord{14, 0, 508},
+ dictWord{14, 0, 509},
+ dictWord{14, 0, 510},
+ dictWord{14, 0, 511},
+ dictWord{
+ 16,
+ 0,
+ 113,
+ },
+ dictWord{16, 0, 114},
+ dictWord{16, 0, 115},
+ dictWord{16, 0, 117},
+ dictWord{16, 0, 118},
+ dictWord{16, 0, 119},
+ dictWord{16, 0, 121},
+ dictWord{16, 0, 122},
+ dictWord{16, 0, 123},
+ dictWord{16, 0, 124},
+ dictWord{16, 0, 125},
+ dictWord{16, 0, 126},
+ dictWord{16, 0, 127},
+ dictWord{18, 0, 242},
+ dictWord{18, 0, 243},
+ dictWord{18, 0, 244},
+ dictWord{18, 0, 245},
+ dictWord{18, 0, 248},
+ dictWord{18, 0, 249},
+ dictWord{18, 0, 250},
+ dictWord{18, 0, 251},
+ dictWord{18, 0, 252},
+ dictWord{
+ 18,
+ 0,
+ 253,
+ },
+ dictWord{18, 0, 254},
+ dictWord{18, 0, 255},
+ dictWord{20, 0, 125},
+ dictWord{20, 0, 126},
+ dictWord{148, 0, 127},
+ dictWord{7, 11, 1717},
+ dictWord{
+ 7,
+ 11,
+ 1769,
+ },
+ dictWord{138, 11, 546},
+ dictWord{7, 11, 1127},
+ dictWord{7, 11, 1572},
+ dictWord{10, 11, 297},
+ dictWord{10, 11, 422},
+ dictWord{11, 11, 764},
+ dictWord{11, 11, 810},
+ dictWord{12, 11, 264},
+ dictWord{13, 11, 102},
+ dictWord{13, 11, 300},
+ dictWord{13, 11, 484},
+ dictWord{14, 11, 147},
+ dictWord{
+ 14,
+ 11,
+ 229,
+ },
+ dictWord{17, 11, 71},
+ dictWord{18, 11, 118},
+ dictWord{147, 11, 120},
+ dictWord{6, 0, 1148},
+ dictWord{134, 0, 1586},
+ dictWord{132, 0, 775},
+ dictWord{135, 10, 954},
+ dictWord{133, 11, 864},
+ dictWord{133, 11, 928},
+ dictWord{138, 11, 189},
+ dictWord{135, 10, 1958},
+ dictWord{6, 10, 549},
+ dictWord{
+ 8,
+ 10,
+ 34,
+ },
+ dictWord{8, 10, 283},
+ dictWord{9, 10, 165},
+ dictWord{138, 10, 475},
+ dictWord{5, 10, 652},
+ dictWord{5, 10, 701},
+ dictWord{135, 10, 449},
+ dictWord{135, 11, 695},
+ dictWord{4, 10, 655},
+ dictWord{7, 10, 850},
+ dictWord{17, 10, 75},
+ dictWord{146, 10, 137},
+ dictWord{140, 11, 682},
+ dictWord{
+ 133,
+ 11,
+ 523,
+ },
+ dictWord{8, 0, 970},
+ dictWord{136, 10, 670},
+ dictWord{136, 11, 555},
+ dictWord{7, 11, 76},
+ dictWord{8, 11, 44},
+ dictWord{9, 11, 884},
+ dictWord{
+ 10,
+ 11,
+ 580,
+ },
+ dictWord{11, 11, 399},
+ dictWord{11, 11, 894},
+ dictWord{15, 11, 122},
+ dictWord{18, 11, 144},
+ dictWord{147, 11, 61},
+ dictWord{6, 10, 159},
+ dictWord{
+ 6,
+ 10,
+ 364,
+ },
+ dictWord{7, 10, 516},
+ dictWord{7, 10, 1439},
+ dictWord{137, 10, 518},
+ dictWord{4, 0, 71},
+ dictWord{5, 0, 376},
+ dictWord{7, 0, 119},
+ dictWord{
+ 138,
+ 0,
+ 665,
+ },
+ dictWord{141, 10, 151},
+ dictWord{11, 0, 827},
+ dictWord{14, 0, 34},
+ dictWord{143, 0, 148},
+ dictWord{133, 11, 518},
+ dictWord{4, 0, 479},
+ dictWord{
+ 135,
+ 11,
+ 1787,
+ },
+ dictWord{135, 11, 1852},
+ dictWord{135, 10, 993},
+ dictWord{7, 0, 607},
+ dictWord{136, 0, 99},
+ dictWord{134, 0, 1960},
+ dictWord{132, 0, 793},
+ dictWord{4, 0, 41},
+ dictWord{5, 0, 74},
+ dictWord{7, 0, 1627},
+ dictWord{11, 0, 871},
+ dictWord{140, 0, 619},
+ dictWord{7, 0, 94},
+ dictWord{11, 0, 329},
+ dictWord{
+ 11,
+ 0,
+ 965,
+ },
+ dictWord{12, 0, 241},
+ dictWord{14, 0, 354},
+ dictWord{15, 0, 22},
+ dictWord{148, 0, 63},
+ dictWord{7, 10, 501},
+ dictWord{9, 10, 111},
+ dictWord{10, 10, 141},
+ dictWord{11, 10, 332},
+ dictWord{13, 10, 43},
+ dictWord{13, 10, 429},
+ dictWord{14, 10, 130},
+ dictWord{14, 10, 415},
+ dictWord{145, 10, 102},
+ dictWord{
+ 9,
+ 0,
+ 209,
+ },
+ dictWord{137, 0, 300},
+ dictWord{134, 0, 1497},
+ dictWord{138, 11, 255},
+ dictWord{4, 11, 934},
+ dictWord{5, 11, 138},
+ dictWord{136, 11, 610},
+ dictWord{133, 0, 98},
+ dictWord{6, 0, 1316},
+ dictWord{10, 11, 804},
+ dictWord{138, 11, 832},
+ dictWord{8, 11, 96},
+ dictWord{9, 11, 36},
+ dictWord{10, 11, 607},
+ dictWord{11, 11, 423},
+ dictWord{11, 11, 442},
+ dictWord{12, 11, 309},
+ dictWord{14, 11, 199},
+ dictWord{15, 11, 90},
+ dictWord{145, 11, 110},
+ dictWord{
+ 132,
+ 0,
+ 463,
+ },
+ dictWord{5, 10, 149},
+ dictWord{136, 10, 233},
+ dictWord{133, 10, 935},
+ dictWord{4, 11, 652},
+ dictWord{8, 11, 320},
+ dictWord{9, 11, 13},
+ dictWord{
+ 9,
+ 11,
+ 398,
+ },
+ dictWord{9, 11, 727},
+ dictWord{10, 11, 75},
+ dictWord{10, 11, 184},
+ dictWord{10, 11, 230},
+ dictWord{10, 11, 564},
+ dictWord{10, 11, 569},
+ dictWord{
+ 11,
+ 11,
+ 973,
+ },
+ dictWord{12, 11, 70},
+ dictWord{12, 11, 189},
+ dictWord{13, 11, 57},
+ dictWord{13, 11, 257},
+ dictWord{22, 11, 6},
+ dictWord{150, 11, 16},
+ dictWord{
+ 142,
+ 0,
+ 291,
+ },
+ dictWord{12, 10, 582},
+ dictWord{146, 10, 131},
+ dictWord{136, 10, 801},
+ dictWord{133, 0, 984},
+ dictWord{145, 11, 116},
+ dictWord{4, 11, 692},
+ dictWord{133, 11, 321},
+ dictWord{4, 0, 182},
+ dictWord{6, 0, 205},
+ dictWord{135, 0, 220},
+ dictWord{4, 0, 42},
+ dictWord{9, 0, 205},
+ dictWord{9, 0, 786},
+ dictWord{
+ 138,
+ 0,
+ 659,
+ },
+ dictWord{6, 0, 801},
+ dictWord{11, 11, 130},
+ dictWord{140, 11, 609},
+ dictWord{132, 0, 635},
+ dictWord{5, 11, 345},
+ dictWord{135, 11, 1016},
+ dictWord{139, 0, 533},
+ dictWord{132, 0, 371},
+ dictWord{4, 0, 272},
+ dictWord{135, 0, 836},
+ dictWord{6, 0, 1282},
+ dictWord{135, 11, 1100},
+ dictWord{5, 0, 825},
+ dictWord{134, 0, 1640},
+ dictWord{135, 11, 1325},
+ dictWord{133, 11, 673},
+ dictWord{4, 11, 287},
+ dictWord{133, 11, 1018},
+ dictWord{135, 0, 357},
+ dictWord{
+ 6,
+ 0,
+ 467,
+ },
+ dictWord{137, 0, 879},
+ dictWord{7, 0, 317},
+ dictWord{135, 0, 569},
+ dictWord{6, 0, 924},
+ dictWord{134, 0, 1588},
+ dictWord{5, 11, 34},
+ dictWord{
+ 5,
+ 10,
+ 406,
+ },
+ dictWord{10, 11, 724},
+ dictWord{12, 11, 444},
+ dictWord{13, 11, 354},
+ dictWord{18, 11, 32},
+ dictWord{23, 11, 24},
+ dictWord{23, 11, 31},
+ dictWord{
+ 152,
+ 11,
+ 5,
+ },
+ dictWord{6, 0, 1795},
+ dictWord{6, 0, 1835},
+ dictWord{6, 0, 1836},
+ dictWord{6, 0, 1856},
+ dictWord{8, 0, 844},
+ dictWord{8, 0, 849},
+ dictWord{8, 0, 854},
+ dictWord{8, 0, 870},
+ dictWord{8, 0, 887},
+ dictWord{10, 0, 852},
+ dictWord{138, 0, 942},
+ dictWord{6, 10, 69},
+ dictWord{135, 10, 117},
+ dictWord{137, 0, 307},
+ dictWord{
+ 4,
+ 0,
+ 944,
+ },
+ dictWord{6, 0, 1799},
+ dictWord{6, 0, 1825},
+ dictWord{10, 0, 848},
+ dictWord{10, 0, 875},
+ dictWord{10, 0, 895},
+ dictWord{10, 0, 899},
+ dictWord{
+ 10,
+ 0,
+ 902,
+ },
+ dictWord{140, 0, 773},
+ dictWord{11, 0, 43},
+ dictWord{13, 0, 72},
+ dictWord{141, 0, 142},
+ dictWord{135, 10, 1830},
+ dictWord{134, 11, 382},
+ dictWord{
+ 4,
+ 10,
+ 432,
+ },
+ dictWord{135, 10, 824},
+ dictWord{132, 11, 329},
+ dictWord{7, 0, 1820},
+ dictWord{139, 11, 124},
+ dictWord{133, 10, 826},
+ dictWord{
+ 133,
+ 0,
+ 525,
+ },
+ dictWord{132, 11, 906},
+ dictWord{7, 11, 1940},
+ dictWord{136, 11, 366},
+ dictWord{138, 11, 10},
+ dictWord{4, 11, 123},
+ dictWord{4, 11, 649},
+ dictWord{
+ 5,
+ 11,
+ 605,
+ },
+ dictWord{7, 11, 1509},
+ dictWord{136, 11, 36},
+ dictWord{6, 0, 110},
+ dictWord{135, 0, 1681},
+ dictWord{133, 0, 493},
+ dictWord{133, 11, 767},
+ dictWord{4, 0, 174},
+ dictWord{135, 0, 911},
+ dictWord{138, 11, 786},
+ dictWord{8, 0, 417},
+ dictWord{137, 0, 782},
+ dictWord{133, 10, 1000},
+ dictWord{7, 0, 733},
+ dictWord{137, 0, 583},
+ dictWord{4, 10, 297},
+ dictWord{6, 10, 529},
+ dictWord{7, 10, 152},
+ dictWord{7, 10, 713},
+ dictWord{7, 10, 1845},
+ dictWord{8, 10, 710},
+ dictWord{8, 10, 717},
+ dictWord{12, 10, 639},
+ dictWord{140, 10, 685},
+ dictWord{4, 0, 32},
+ dictWord{5, 0, 215},
+ dictWord{6, 0, 269},
+ dictWord{7, 0, 1782},
+ dictWord{
+ 7,
+ 0,
+ 1892,
+ },
+ dictWord{10, 0, 16},
+ dictWord{11, 0, 822},
+ dictWord{11, 0, 954},
+ dictWord{141, 0, 481},
+ dictWord{4, 11, 273},
+ dictWord{5, 11, 658},
+ dictWord{
+ 133,
+ 11,
+ 995,
+ },
+ dictWord{136, 0, 477},
+ dictWord{134, 11, 72},
+ dictWord{135, 11, 1345},
+ dictWord{5, 0, 308},
+ dictWord{7, 0, 1088},
+ dictWord{4, 10, 520},
+ dictWord{
+ 135,
+ 10,
+ 575,
+ },
+ dictWord{133, 11, 589},
+ dictWord{5, 0, 126},
+ dictWord{8, 0, 297},
+ dictWord{9, 0, 366},
+ dictWord{140, 0, 374},
+ dictWord{7, 0, 1551},
+ dictWord{
+ 139,
+ 0,
+ 361,
+ },
+ dictWord{5, 11, 117},
+ dictWord{6, 11, 514},
+ dictWord{6, 11, 541},
+ dictWord{7, 11, 1164},
+ dictWord{7, 11, 1436},
+ dictWord{8, 11, 220},
+ dictWord{
+ 8,
+ 11,
+ 648,
+ },
+ dictWord{10, 11, 688},
+ dictWord{139, 11, 560},
+ dictWord{133, 11, 686},
+ dictWord{4, 0, 946},
+ dictWord{6, 0, 1807},
+ dictWord{8, 0, 871},
+ dictWord{
+ 10,
+ 0,
+ 854,
+ },
+ dictWord{10, 0, 870},
+ dictWord{10, 0, 888},
+ dictWord{10, 0, 897},
+ dictWord{10, 0, 920},
+ dictWord{12, 0, 722},
+ dictWord{12, 0, 761},
+ dictWord{
+ 12,
+ 0,
+ 763,
+ },
+ dictWord{12, 0, 764},
+ dictWord{14, 0, 454},
+ dictWord{14, 0, 465},
+ dictWord{16, 0, 107},
+ dictWord{18, 0, 167},
+ dictWord{18, 0, 168},
+ dictWord{
+ 146,
+ 0,
+ 172,
+ },
+ dictWord{132, 0, 175},
+ dictWord{135, 0, 1307},
+ dictWord{132, 0, 685},
+ dictWord{135, 11, 1834},
+ dictWord{133, 0, 797},
+ dictWord{6, 0, 745},
+ dictWord{
+ 6,
+ 0,
+ 858,
+ },
+ dictWord{134, 0, 963},
+ dictWord{133, 0, 565},
+ dictWord{5, 10, 397},
+ dictWord{6, 10, 154},
+ dictWord{7, 11, 196},
+ dictWord{7, 10, 676},
+ dictWord{
+ 8,
+ 10,
+ 443,
+ },
+ dictWord{8, 10, 609},
+ dictWord{9, 10, 24},
+ dictWord{9, 10, 325},
+ dictWord{10, 10, 35},
+ dictWord{10, 11, 765},
+ dictWord{11, 11, 347},
+ dictWord{
+ 11,
+ 10,
+ 535,
+ },
+ dictWord{11, 11, 552},
+ dictWord{11, 11, 576},
+ dictWord{11, 10, 672},
+ dictWord{11, 11, 790},
+ dictWord{11, 10, 1018},
+ dictWord{12, 11, 263},
+ dictWord{12, 10, 637},
+ dictWord{13, 11, 246},
+ dictWord{13, 11, 270},
+ dictWord{13, 11, 395},
+ dictWord{14, 11, 74},
+ dictWord{14, 11, 176},
+ dictWord{
+ 14,
+ 11,
+ 190,
+ },
+ dictWord{14, 11, 398},
+ dictWord{14, 11, 412},
+ dictWord{15, 11, 32},
+ dictWord{15, 11, 63},
+ dictWord{16, 10, 30},
+ dictWord{16, 11, 88},
+ dictWord{
+ 147,
+ 11,
+ 105,
+ },
+ dictWord{13, 11, 84},
+ dictWord{141, 11, 122},
+ dictWord{4, 0, 252},
+ dictWord{7, 0, 1068},
+ dictWord{10, 0, 434},
+ dictWord{11, 0, 228},
+ dictWord{
+ 11,
+ 0,
+ 426,
+ },
+ dictWord{13, 0, 231},
+ dictWord{18, 0, 106},
+ dictWord{148, 0, 87},
+ dictWord{137, 0, 826},
+ dictWord{4, 11, 589},
+ dictWord{139, 11, 282},
+ dictWord{
+ 5,
+ 11,
+ 381,
+ },
+ dictWord{135, 11, 1792},
+ dictWord{132, 0, 791},
+ dictWord{5, 0, 231},
+ dictWord{10, 0, 509},
+ dictWord{133, 10, 981},
+ dictWord{7, 0, 601},
+ dictWord{
+ 9,
+ 0,
+ 277,
+ },
+ dictWord{9, 0, 674},
+ dictWord{10, 0, 178},
+ dictWord{10, 0, 418},
+ dictWord{10, 0, 571},
+ dictWord{11, 0, 531},
+ dictWord{12, 0, 113},
+ dictWord{12, 0, 475},
+ dictWord{13, 0, 99},
+ dictWord{142, 0, 428},
+ dictWord{4, 10, 56},
+ dictWord{7, 11, 616},
+ dictWord{7, 10, 1791},
+ dictWord{8, 10, 607},
+ dictWord{8, 10, 651},
+ dictWord{10, 11, 413},
+ dictWord{11, 10, 465},
+ dictWord{11, 10, 835},
+ dictWord{12, 10, 337},
+ dictWord{141, 10, 480},
+ dictWord{7, 0, 1591},
+ dictWord{144, 0, 43},
+ dictWord{9, 10, 158},
+ dictWord{138, 10, 411},
+ dictWord{135, 0, 1683},
+ dictWord{8, 0, 289},
+ dictWord{11, 0, 45},
+ dictWord{12, 0, 278},
+ dictWord{140, 0, 537},
+ dictWord{6, 11, 120},
+ dictWord{7, 11, 1188},
+ dictWord{7, 11, 1710},
+ dictWord{8, 11, 286},
+ dictWord{9, 11, 667},
+ dictWord{11, 11, 592},
+ dictWord{
+ 139,
+ 11,
+ 730,
+ },
+ dictWord{136, 10, 617},
+ dictWord{135, 0, 1120},
+ dictWord{135, 11, 1146},
+ dictWord{139, 10, 563},
+ dictWord{4, 11, 352},
+ dictWord{4, 10, 369},
+ dictWord{135, 11, 687},
+ dictWord{143, 11, 38},
+ dictWord{4, 0, 399},
+ dictWord{5, 0, 119},
+ dictWord{5, 0, 494},
+ dictWord{7, 0, 751},
+ dictWord{9, 0, 556},
+ dictWord{
+ 14,
+ 11,
+ 179,
+ },
+ dictWord{15, 11, 151},
+ dictWord{150, 11, 11},
+ dictWord{4, 11, 192},
+ dictWord{5, 11, 49},
+ dictWord{6, 11, 200},
+ dictWord{6, 11, 293},
+ dictWord{
+ 6,
+ 11,
+ 1696,
+ },
+ dictWord{135, 11, 488},
+ dictWord{4, 0, 398},
+ dictWord{133, 0, 660},
+ dictWord{7, 0, 1030},
+ dictWord{134, 10, 622},
+ dictWord{135, 11, 595},
+ dictWord{141, 0, 168},
+ dictWord{132, 11, 147},
+ dictWord{7, 0, 973},
+ dictWord{10, 10, 624},
+ dictWord{142, 10, 279},
+ dictWord{132, 10, 363},
+ dictWord{
+ 132,
+ 0,
+ 642,
+ },
+ dictWord{133, 11, 934},
+ dictWord{134, 0, 1615},
+ dictWord{7, 11, 505},
+ dictWord{135, 11, 523},
+ dictWord{7, 0, 594},
+ dictWord{7, 0, 851},
+ dictWord{
+ 7,
+ 0,
+ 1858,
+ },
+ dictWord{9, 0, 411},
+ dictWord{9, 0, 574},
+ dictWord{9, 0, 666},
+ dictWord{9, 0, 737},
+ dictWord{10, 0, 346},
+ dictWord{10, 0, 712},
+ dictWord{11, 0, 246},
+ dictWord{11, 0, 432},
+ dictWord{11, 0, 517},
+ dictWord{11, 0, 647},
+ dictWord{11, 0, 679},
+ dictWord{11, 0, 727},
+ dictWord{12, 0, 304},
+ dictWord{12, 0, 305},
+ dictWord{
+ 12,
+ 0,
+ 323,
+ },
+ dictWord{12, 0, 483},
+ dictWord{12, 0, 572},
+ dictWord{12, 0, 593},
+ dictWord{12, 0, 602},
+ dictWord{13, 0, 95},
+ dictWord{13, 0, 101},
+ dictWord{
+ 13,
+ 0,
+ 171,
+ },
+ dictWord{13, 0, 315},
+ dictWord{13, 0, 378},
+ dictWord{13, 0, 425},
+ dictWord{13, 0, 475},
+ dictWord{14, 0, 63},
+ dictWord{14, 0, 380},
+ dictWord{14, 0, 384},
+ dictWord{15, 0, 133},
+ dictWord{18, 0, 112},
+ dictWord{148, 0, 72},
+ dictWord{135, 0, 1093},
+ dictWord{132, 0, 679},
+ dictWord{8, 0, 913},
+ dictWord{10, 0, 903},
+ dictWord{10, 0, 915},
+ dictWord{12, 0, 648},
+ dictWord{12, 0, 649},
+ dictWord{14, 0, 455},
+ dictWord{16, 0, 112},
+ dictWord{138, 11, 438},
+ dictWord{137, 0, 203},
+ dictWord{134, 10, 292},
+ dictWord{134, 0, 1492},
+ dictWord{7, 0, 1374},
+ dictWord{8, 0, 540},
+ dictWord{5, 10, 177},
+ dictWord{6, 10, 616},
+ dictWord{7, 10, 827},
+ dictWord{9, 10, 525},
+ dictWord{138, 10, 656},
+ dictWord{135, 0, 1486},
+ dictWord{9, 0, 714},
+ dictWord{138, 10, 31},
+ dictWord{136, 0, 825},
+ dictWord{
+ 134,
+ 0,
+ 1511,
+ },
+ dictWord{132, 11, 637},
+ dictWord{134, 0, 952},
+ dictWord{4, 10, 161},
+ dictWord{133, 10, 631},
+ dictWord{5, 0, 143},
+ dictWord{5, 0, 769},
+ dictWord{
+ 6,
+ 0,
+ 1760,
+ },
+ dictWord{7, 0, 682},
+ dictWord{7, 0, 1992},
+ dictWord{136, 0, 736},
+ dictWord{132, 0, 700},
+ dictWord{134, 0, 1540},
+ dictWord{132, 11, 777},
+ dictWord{
+ 9,
+ 11,
+ 867,
+ },
+ dictWord{138, 11, 837},
+ dictWord{7, 0, 1557},
+ dictWord{135, 10, 1684},
+ dictWord{133, 0, 860},
+ dictWord{6, 0, 422},
+ dictWord{7, 0, 0},
+ dictWord{
+ 7,
+ 0,
+ 1544,
+ },
+ dictWord{9, 0, 605},
+ dictWord{11, 0, 990},
+ dictWord{12, 0, 235},
+ dictWord{12, 0, 453},
+ dictWord{13, 0, 47},
+ dictWord{13, 0, 266},
+ dictWord{9, 10, 469},
+ dictWord{9, 10, 709},
+ dictWord{12, 10, 512},
+ dictWord{14, 10, 65},
+ dictWord{145, 10, 12},
+ dictWord{11, 0, 807},
+ dictWord{10, 10, 229},
+ dictWord{11, 10, 73},
+ dictWord{139, 10, 376},
+ dictWord{6, 11, 170},
+ dictWord{7, 11, 1080},
+ dictWord{8, 11, 395},
+ dictWord{8, 11, 487},
+ dictWord{11, 11, 125},
+ dictWord{
+ 141,
+ 11,
+ 147,
+ },
+ dictWord{5, 0, 515},
+ dictWord{137, 0, 131},
+ dictWord{7, 0, 1605},
+ dictWord{11, 0, 962},
+ dictWord{146, 0, 139},
+ dictWord{132, 0, 646},
+ dictWord{
+ 4,
+ 0,
+ 396,
+ },
+ dictWord{7, 0, 728},
+ dictWord{9, 0, 117},
+ dictWord{13, 0, 202},
+ dictWord{148, 0, 51},
+ dictWord{6, 0, 121},
+ dictWord{6, 0, 124},
+ dictWord{6, 0, 357},
+ dictWord{
+ 7,
+ 0,
+ 1138,
+ },
+ dictWord{7, 0, 1295},
+ dictWord{8, 0, 162},
+ dictWord{8, 0, 508},
+ dictWord{11, 0, 655},
+ dictWord{4, 11, 535},
+ dictWord{6, 10, 558},
+ dictWord{
+ 7,
+ 10,
+ 651,
+ },
+ dictWord{8, 11, 618},
+ dictWord{9, 10, 0},
+ dictWord{10, 10, 34},
+ dictWord{139, 10, 1008},
+ dictWord{135, 11, 1245},
+ dictWord{138, 0, 357},
+ dictWord{
+ 150,
+ 11,
+ 23,
+ },
+ dictWord{133, 0, 237},
+ dictWord{135, 0, 1784},
+ dictWord{7, 10, 1832},
+ dictWord{138, 10, 374},
+ dictWord{132, 0, 713},
+ dictWord{132, 11, 46},
+ dictWord{6, 0, 1536},
+ dictWord{10, 0, 348},
+ dictWord{5, 11, 811},
+ dictWord{6, 11, 1679},
+ dictWord{6, 11, 1714},
+ dictWord{135, 11, 2032},
+ dictWord{
+ 11,
+ 11,
+ 182,
+ },
+ dictWord{142, 11, 195},
+ dictWord{6, 0, 523},
+ dictWord{7, 0, 738},
+ dictWord{7, 10, 771},
+ dictWord{7, 10, 1731},
+ dictWord{9, 10, 405},
+ dictWord{
+ 138,
+ 10,
+ 421,
+ },
+ dictWord{7, 11, 1458},
+ dictWord{9, 11, 407},
+ dictWord{139, 11, 15},
+ dictWord{6, 11, 34},
+ dictWord{7, 11, 69},
+ dictWord{7, 11, 640},
+ dictWord{
+ 7,
+ 11,
+ 1089,
+ },
+ dictWord{8, 11, 708},
+ dictWord{8, 11, 721},
+ dictWord{9, 11, 363},
+ dictWord{9, 11, 643},
+ dictWord{10, 11, 628},
+ dictWord{148, 11, 98},
+ dictWord{
+ 133,
+ 0,
+ 434,
+ },
+ dictWord{135, 0, 1877},
+ dictWord{7, 0, 571},
+ dictWord{138, 0, 366},
+ dictWord{5, 10, 881},
+ dictWord{133, 10, 885},
+ dictWord{9, 0, 513},
+ dictWord{
+ 10,
+ 0,
+ 25,
+ },
+ dictWord{10, 0, 39},
+ dictWord{12, 0, 122},
+ dictWord{140, 0, 187},
+ dictWord{132, 0, 580},
+ dictWord{5, 10, 142},
+ dictWord{134, 10, 546},
+ dictWord{
+ 132,
+ 11,
+ 462,
+ },
+ dictWord{137, 0, 873},
+ dictWord{5, 10, 466},
+ dictWord{11, 10, 571},
+ dictWord{12, 10, 198},
+ dictWord{13, 10, 283},
+ dictWord{14, 10, 186},
+ dictWord{15, 10, 21},
+ dictWord{143, 10, 103},
+ dictWord{7, 0, 171},
+ dictWord{4, 10, 185},
+ dictWord{5, 10, 257},
+ dictWord{5, 10, 839},
+ dictWord{5, 10, 936},
+ dictWord{
+ 9,
+ 10,
+ 399,
+ },
+ dictWord{10, 10, 258},
+ dictWord{10, 10, 395},
+ dictWord{10, 10, 734},
+ dictWord{11, 10, 1014},
+ dictWord{12, 10, 23},
+ dictWord{13, 10, 350},
+ dictWord{14, 10, 150},
+ dictWord{147, 10, 6},
+ dictWord{134, 0, 625},
+ dictWord{7, 0, 107},
+ dictWord{7, 0, 838},
+ dictWord{8, 0, 550},
+ dictWord{138, 0, 401},
+ dictWord{
+ 5,
+ 11,
+ 73,
+ },
+ dictWord{6, 11, 23},
+ dictWord{134, 11, 338},
+ dictWord{4, 0, 943},
+ dictWord{6, 0, 1850},
+ dictWord{12, 0, 713},
+ dictWord{142, 0, 434},
+ dictWord{
+ 11,
+ 0,
+ 588,
+ },
+ dictWord{11, 0, 864},
+ dictWord{11, 0, 936},
+ dictWord{11, 0, 968},
+ dictWord{12, 0, 73},
+ dictWord{12, 0, 343},
+ dictWord{12, 0, 394},
+ dictWord{13, 0, 275},
+ dictWord{14, 0, 257},
+ dictWord{15, 0, 160},
+ dictWord{7, 10, 404},
+ dictWord{7, 10, 1377},
+ dictWord{7, 10, 1430},
+ dictWord{7, 10, 2017},
+ dictWord{8, 10, 149},
+ dictWord{8, 10, 239},
+ dictWord{8, 10, 512},
+ dictWord{8, 10, 793},
+ dictWord{8, 10, 818},
+ dictWord{9, 10, 474},
+ dictWord{9, 10, 595},
+ dictWord{10, 10, 122},
+ dictWord{10, 10, 565},
+ dictWord{10, 10, 649},
+ dictWord{10, 10, 783},
+ dictWord{11, 10, 239},
+ dictWord{11, 10, 295},
+ dictWord{11, 10, 447},
+ dictWord{
+ 11,
+ 10,
+ 528,
+ },
+ dictWord{11, 10, 639},
+ dictWord{11, 10, 800},
+ dictWord{12, 10, 25},
+ dictWord{12, 10, 157},
+ dictWord{12, 10, 316},
+ dictWord{12, 10, 390},
+ dictWord{
+ 12,
+ 10,
+ 391,
+ },
+ dictWord{12, 10, 395},
+ dictWord{12, 10, 478},
+ dictWord{12, 10, 503},
+ dictWord{12, 10, 592},
+ dictWord{12, 10, 680},
+ dictWord{13, 10, 50},
+ dictWord{13, 10, 53},
+ dictWord{13, 10, 132},
+ dictWord{13, 10, 198},
+ dictWord{13, 10, 322},
+ dictWord{13, 10, 415},
+ dictWord{13, 10, 511},
+ dictWord{14, 10, 71},
+ dictWord{14, 10, 395},
+ dictWord{15, 10, 71},
+ dictWord{15, 10, 136},
+ dictWord{17, 10, 123},
+ dictWord{18, 10, 93},
+ dictWord{147, 10, 58},
+ dictWord{
+ 133,
+ 0,
+ 768,
+ },
+ dictWord{11, 0, 103},
+ dictWord{142, 0, 0},
+ dictWord{136, 10, 712},
+ dictWord{132, 0, 799},
+ dictWord{132, 0, 894},
+ dictWord{7, 11, 725},
+ dictWord{
+ 8,
+ 11,
+ 498,
+ },
+ dictWord{139, 11, 268},
+ dictWord{135, 11, 1798},
+ dictWord{135, 11, 773},
+ dictWord{141, 11, 360},
+ dictWord{4, 10, 377},
+ dictWord{152, 10, 13},
+ dictWord{135, 0, 1673},
+ dictWord{132, 11, 583},
+ dictWord{134, 0, 1052},
+ dictWord{133, 11, 220},
+ dictWord{140, 11, 69},
+ dictWord{132, 11, 544},
+ dictWord{
+ 4,
+ 10,
+ 180,
+ },
+ dictWord{135, 10, 1906},
+ dictWord{134, 0, 272},
+ dictWord{4, 0, 441},
+ dictWord{134, 0, 1421},
+ dictWord{4, 0, 9},
+ dictWord{5, 0, 128},
+ dictWord{
+ 7,
+ 0,
+ 368,
+ },
+ dictWord{11, 0, 480},
+ dictWord{148, 0, 3},
+ dictWord{5, 11, 176},
+ dictWord{6, 11, 437},
+ dictWord{6, 11, 564},
+ dictWord{11, 11, 181},
+ dictWord{
+ 141,
+ 11,
+ 183,
+ },
+ dictWord{132, 10, 491},
+ dictWord{7, 0, 1182},
+ dictWord{141, 11, 67},
+ dictWord{6, 0, 1346},
+ dictWord{4, 10, 171},
+ dictWord{138, 10, 234},
+ dictWord{
+ 4,
+ 10,
+ 586,
+ },
+ dictWord{7, 10, 1186},
+ dictWord{138, 10, 631},
+ dictWord{136, 0, 682},
+ dictWord{134, 0, 1004},
+ dictWord{15, 0, 24},
+ dictWord{143, 11, 24},
+ dictWord{134, 0, 968},
+ dictWord{4, 0, 2},
+ dictWord{6, 0, 742},
+ dictWord{6, 0, 793},
+ dictWord{7, 0, 545},
+ dictWord{7, 0, 894},
+ dictWord{9, 10, 931},
+ dictWord{
+ 10,
+ 10,
+ 334,
+ },
+ dictWord{148, 10, 71},
+ dictWord{136, 11, 600},
+ dictWord{133, 10, 765},
+ dictWord{9, 0, 769},
+ dictWord{140, 0, 185},
+ dictWord{4, 11, 790},
+ dictWord{
+ 5,
+ 11,
+ 273,
+ },
+ dictWord{134, 11, 394},
+ dictWord{7, 0, 474},
+ dictWord{137, 0, 578},
+ dictWord{4, 11, 135},
+ dictWord{6, 11, 127},
+ dictWord{7, 11, 1185},
+ dictWord{
+ 7,
+ 11,
+ 1511,
+ },
+ dictWord{8, 11, 613},
+ dictWord{11, 11, 5},
+ dictWord{12, 11, 133},
+ dictWord{12, 11, 495},
+ dictWord{12, 11, 586},
+ dictWord{14, 11, 385},
+ dictWord{15, 11, 118},
+ dictWord{17, 11, 20},
+ dictWord{146, 11, 98},
+ dictWord{133, 10, 424},
+ dictWord{5, 0, 530},
+ dictWord{142, 0, 113},
+ dictWord{6, 11, 230},
+ dictWord{7, 11, 961},
+ dictWord{7, 11, 1085},
+ dictWord{136, 11, 462},
+ dictWord{7, 11, 1954},
+ dictWord{137, 11, 636},
+ dictWord{136, 10, 714},
+ dictWord{
+ 149,
+ 11,
+ 6,
+ },
+ dictWord{135, 10, 685},
+ dictWord{9, 10, 420},
+ dictWord{10, 10, 269},
+ dictWord{10, 10, 285},
+ dictWord{10, 10, 576},
+ dictWord{11, 10, 397},
+ dictWord{13, 10, 175},
+ dictWord{145, 10, 90},
+ dictWord{132, 10, 429},
+ dictWord{5, 0, 556},
+ dictWord{5, 11, 162},
+ dictWord{136, 11, 68},
+ dictWord{132, 11, 654},
+ dictWord{4, 11, 156},
+ dictWord{7, 11, 998},
+ dictWord{7, 11, 1045},
+ dictWord{7, 11, 1860},
+ dictWord{9, 11, 48},
+ dictWord{9, 11, 692},
+ dictWord{11, 11, 419},
+ dictWord{139, 11, 602},
+ dictWord{6, 0, 1317},
+ dictWord{8, 0, 16},
+ dictWord{9, 0, 825},
+ dictWord{12, 0, 568},
+ dictWord{7, 11, 1276},
+ dictWord{8, 11, 474},
+ dictWord{137, 11, 652},
+ dictWord{18, 0, 97},
+ dictWord{7, 10, 18},
+ dictWord{7, 10, 699},
+ dictWord{7, 10, 1966},
+ dictWord{8, 10, 752},
+ dictWord{9, 10, 273},
+ dictWord{
+ 9,
+ 10,
+ 412,
+ },
+ dictWord{9, 10, 703},
+ dictWord{10, 10, 71},
+ dictWord{10, 10, 427},
+ dictWord{138, 10, 508},
+ dictWord{10, 0, 703},
+ dictWord{7, 11, 1454},
+ dictWord{138, 11, 703},
+ dictWord{4, 10, 53},
+ dictWord{5, 10, 186},
+ dictWord{135, 10, 752},
+ dictWord{134, 0, 892},
+ dictWord{134, 0, 1571},
+ dictWord{8, 10, 575},
+ dictWord{10, 10, 289},
+ dictWord{139, 10, 319},
+ dictWord{6, 0, 186},
+ dictWord{137, 0, 426},
+ dictWord{134, 0, 1101},
+ dictWord{132, 10, 675},
+ dictWord{
+ 132,
+ 0,
+ 585,
+ },
+ dictWord{6, 0, 1870},
+ dictWord{137, 0, 937},
+ dictWord{152, 11, 10},
+ dictWord{9, 11, 197},
+ dictWord{10, 11, 300},
+ dictWord{12, 11, 473},
+ dictWord{
+ 13,
+ 11,
+ 90,
+ },
+ dictWord{141, 11, 405},
+ dictWord{4, 0, 93},
+ dictWord{5, 0, 252},
+ dictWord{6, 0, 229},
+ dictWord{7, 0, 291},
+ dictWord{9, 0, 550},
+ dictWord{139, 0, 644},
+ dictWord{137, 0, 749},
+ dictWord{9, 0, 162},
+ dictWord{6, 10, 209},
+ dictWord{8, 10, 468},
+ dictWord{9, 10, 210},
+ dictWord{11, 10, 36},
+ dictWord{12, 10, 28},
+ dictWord{12, 10, 630},
+ dictWord{13, 10, 21},
+ dictWord{13, 10, 349},
+ dictWord{14, 10, 7},
+ dictWord{145, 10, 13},
+ dictWord{132, 0, 381},
+ dictWord{132, 11, 606},
+ dictWord{4, 10, 342},
+ dictWord{135, 10, 1179},
+ dictWord{7, 11, 1587},
+ dictWord{7, 11, 1707},
+ dictWord{10, 11, 528},
+ dictWord{139, 11, 504},
+ dictWord{
+ 12,
+ 11,
+ 39,
+ },
+ dictWord{13, 11, 265},
+ dictWord{141, 11, 439},
+ dictWord{4, 10, 928},
+ dictWord{133, 10, 910},
+ dictWord{7, 10, 1838},
+ dictWord{7, 11, 1978},
+ dictWord{136, 11, 676},
+ dictWord{6, 0, 762},
+ dictWord{6, 0, 796},
+ dictWord{134, 0, 956},
+ dictWord{4, 10, 318},
+ dictWord{4, 10, 496},
+ dictWord{7, 10, 856},
+ dictWord{139, 10, 654},
+ dictWord{137, 11, 242},
+ dictWord{4, 11, 361},
+ dictWord{133, 11, 315},
+ dictWord{132, 11, 461},
+ dictWord{132, 11, 472},
+ dictWord{
+ 132,
+ 0,
+ 857,
+ },
+ dictWord{5, 0, 21},
+ dictWord{6, 0, 77},
+ dictWord{6, 0, 157},
+ dictWord{7, 0, 974},
+ dictWord{7, 0, 1301},
+ dictWord{7, 0, 1339},
+ dictWord{7, 0, 1490},
+ dictWord{
+ 7,
+ 0,
+ 1873,
+ },
+ dictWord{9, 0, 628},
+ dictWord{7, 10, 915},
+ dictWord{8, 10, 247},
+ dictWord{147, 10, 0},
+ dictWord{4, 10, 202},
+ dictWord{5, 10, 382},
+ dictWord{
+ 6,
+ 10,
+ 454,
+ },
+ dictWord{7, 10, 936},
+ dictWord{7, 10, 1803},
+ dictWord{8, 10, 758},
+ dictWord{9, 10, 375},
+ dictWord{9, 10, 895},
+ dictWord{10, 10, 743},
+ dictWord{
+ 10,
+ 10,
+ 792,
+ },
+ dictWord{11, 10, 978},
+ dictWord{11, 10, 1012},
+ dictWord{142, 10, 109},
+ dictWord{7, 11, 617},
+ dictWord{10, 11, 498},
+ dictWord{11, 11, 501},
+ dictWord{12, 11, 16},
+ dictWord{140, 11, 150},
+ dictWord{7, 10, 1150},
+ dictWord{7, 10, 1425},
+ dictWord{7, 10, 1453},
+ dictWord{10, 11, 747},
+ dictWord{
+ 140,
+ 10,
+ 513,
+ },
+ dictWord{133, 11, 155},
+ dictWord{11, 0, 919},
+ dictWord{141, 0, 409},
+ dictWord{138, 10, 791},
+ dictWord{10, 0, 633},
+ dictWord{139, 11, 729},
+ dictWord{
+ 7,
+ 11,
+ 163,
+ },
+ dictWord{8, 11, 319},
+ dictWord{9, 11, 402},
+ dictWord{10, 11, 24},
+ dictWord{10, 11, 681},
+ dictWord{11, 11, 200},
+ dictWord{11, 11, 567},
+ dictWord{12, 11, 253},
+ dictWord{12, 11, 410},
+ dictWord{142, 11, 219},
+ dictWord{5, 11, 475},
+ dictWord{7, 11, 1780},
+ dictWord{9, 11, 230},
+ dictWord{11, 11, 297},
+ dictWord{11, 11, 558},
+ dictWord{14, 11, 322},
+ dictWord{147, 11, 76},
+ dictWord{7, 0, 332},
+ dictWord{6, 10, 445},
+ dictWord{137, 10, 909},
+ dictWord{
+ 135,
+ 11,
+ 1956,
+ },
+ dictWord{136, 11, 274},
+ dictWord{134, 10, 578},
+ dictWord{135, 0, 1489},
+ dictWord{135, 11, 1848},
+ dictWord{5, 11, 944},
+ dictWord{
+ 134,
+ 11,
+ 1769,
+ },
+ dictWord{132, 11, 144},
+ dictWord{136, 10, 766},
+ dictWord{4, 0, 832},
+ dictWord{135, 10, 541},
+ dictWord{8, 0, 398},
+ dictWord{9, 0, 681},
+ dictWord{
+ 139,
+ 0,
+ 632,
+ },
+ dictWord{136, 0, 645},
+ dictWord{9, 0, 791},
+ dictWord{10, 0, 93},
+ dictWord{16, 0, 13},
+ dictWord{17, 0, 23},
+ dictWord{18, 0, 135},
+ dictWord{19, 0, 12},
+ dictWord{20, 0, 1},
+ dictWord{20, 0, 12},
+ dictWord{148, 0, 14},
+ dictWord{6, 11, 247},
+ dictWord{137, 11, 555},
+ dictWord{134, 0, 20},
+ dictWord{132, 0, 800},
+ dictWord{135, 0, 1841},
+ dictWord{139, 10, 983},
+ dictWord{137, 10, 768},
+ dictWord{132, 10, 584},
+ dictWord{141, 11, 51},
+ dictWord{6, 0, 1993},
+ dictWord{
+ 4,
+ 11,
+ 620,
+ },
+ dictWord{138, 11, 280},
+ dictWord{136, 0, 769},
+ dictWord{11, 0, 290},
+ dictWord{11, 0, 665},
+ dictWord{7, 11, 1810},
+ dictWord{11, 11, 866},
+ dictWord{
+ 12,
+ 11,
+ 103,
+ },
+ dictWord{13, 11, 495},
+ dictWord{17, 11, 67},
+ dictWord{147, 11, 74},
+ dictWord{134, 0, 1426},
+ dictWord{139, 0, 60},
+ dictWord{4, 10, 326},
+ dictWord{135, 10, 1770},
+ dictWord{7, 0, 1874},
+ dictWord{9, 0, 641},
+ dictWord{132, 10, 226},
+ dictWord{6, 0, 644},
+ dictWord{5, 10, 426},
+ dictWord{8, 10, 30},
+ dictWord{
+ 9,
+ 10,
+ 2,
+ },
+ dictWord{11, 10, 549},
+ dictWord{147, 10, 122},
+ dictWord{5, 11, 428},
+ dictWord{138, 11, 442},
+ dictWord{135, 11, 1871},
+ dictWord{
+ 135,
+ 0,
+ 1757,
+ },
+ dictWord{147, 10, 117},
+ dictWord{135, 0, 937},
+ dictWord{135, 0, 1652},
+ dictWord{6, 0, 654},
+ dictWord{134, 0, 1476},
+ dictWord{133, 11, 99},
+ dictWord{135, 0, 527},
+ dictWord{132, 10, 345},
+ dictWord{4, 10, 385},
+ dictWord{4, 11, 397},
+ dictWord{7, 10, 265},
+ dictWord{135, 10, 587},
+ dictWord{4, 0, 579},
+ dictWord{5, 0, 226},
+ dictWord{5, 0, 323},
+ dictWord{135, 0, 960},
+ dictWord{134, 0, 1486},
+ dictWord{8, 11, 502},
+ dictWord{144, 11, 9},
+ dictWord{4, 10, 347},
+ dictWord{
+ 5,
+ 10,
+ 423,
+ },
+ dictWord{5, 10, 996},
+ dictWord{135, 10, 1329},
+ dictWord{7, 11, 727},
+ dictWord{146, 11, 73},
+ dictWord{4, 11, 485},
+ dictWord{7, 11, 353},
+ dictWord{7, 10, 1259},
+ dictWord{7, 11, 1523},
+ dictWord{9, 10, 125},
+ dictWord{139, 10, 65},
+ dictWord{6, 0, 325},
+ dictWord{5, 10, 136},
+ dictWord{6, 11, 366},
+ dictWord{
+ 7,
+ 11,
+ 1384,
+ },
+ dictWord{7, 11, 1601},
+ dictWord{136, 10, 644},
+ dictWord{138, 11, 160},
+ dictWord{6, 0, 1345},
+ dictWord{137, 11, 282},
+ dictWord{18, 0, 91},
+ dictWord{147, 0, 70},
+ dictWord{136, 0, 404},
+ dictWord{4, 11, 157},
+ dictWord{133, 11, 471},
+ dictWord{133, 0, 973},
+ dictWord{6, 0, 135},
+ dictWord{
+ 135,
+ 0,
+ 1176,
+ },
+ dictWord{8, 11, 116},
+ dictWord{11, 11, 551},
+ dictWord{142, 11, 159},
+ dictWord{4, 0, 549},
+ dictWord{4, 10, 433},
+ dictWord{133, 10, 719},
+ dictWord{
+ 136,
+ 0,
+ 976,
+ },
+ dictWord{5, 11, 160},
+ dictWord{7, 11, 363},
+ dictWord{7, 11, 589},
+ dictWord{10, 11, 170},
+ dictWord{141, 11, 55},
+ dictWord{144, 0, 21},
+ dictWord{
+ 144,
+ 0,
+ 51,
+ },
+ dictWord{135, 0, 314},
+ dictWord{135, 10, 1363},
+ dictWord{4, 11, 108},
+ dictWord{7, 11, 405},
+ dictWord{10, 11, 491},
+ dictWord{139, 11, 498},
+ dictWord{146, 0, 4},
+ dictWord{4, 10, 555},
+ dictWord{8, 10, 536},
+ dictWord{10, 10, 288},
+ dictWord{139, 10, 1005},
+ dictWord{135, 11, 1005},
+ dictWord{6, 0, 281},
+ dictWord{7, 0, 6},
+ dictWord{8, 0, 282},
+ dictWord{8, 0, 480},
+ dictWord{8, 0, 499},
+ dictWord{9, 0, 198},
+ dictWord{10, 0, 143},
+ dictWord{10, 0, 169},
+ dictWord{
+ 10,
+ 0,
+ 211,
+ },
+ dictWord{10, 0, 417},
+ dictWord{10, 0, 574},
+ dictWord{11, 0, 147},
+ dictWord{11, 0, 395},
+ dictWord{12, 0, 75},
+ dictWord{12, 0, 407},
+ dictWord{12, 0, 608},
+ dictWord{13, 0, 500},
+ dictWord{142, 0, 251},
+ dictWord{6, 0, 1093},
+ dictWord{6, 0, 1405},
+ dictWord{9, 10, 370},
+ dictWord{138, 10, 90},
+ dictWord{4, 11, 926},
+ dictWord{133, 11, 983},
+ dictWord{135, 0, 1776},
+ dictWord{134, 0, 1528},
+ dictWord{132, 0, 419},
+ dictWord{132, 11, 538},
+ dictWord{6, 11, 294},
+ dictWord{
+ 7,
+ 11,
+ 1267,
+ },
+ dictWord{136, 11, 624},
+ dictWord{135, 11, 1772},
+ dictWord{138, 11, 301},
+ dictWord{4, 10, 257},
+ dictWord{135, 10, 2031},
+ dictWord{4, 0, 138},
+ dictWord{7, 0, 1012},
+ dictWord{7, 0, 1280},
+ dictWord{9, 0, 76},
+ dictWord{135, 10, 1768},
+ dictWord{132, 11, 757},
+ dictWord{5, 0, 29},
+ dictWord{140, 0, 638},
+ dictWord{7, 11, 655},
+ dictWord{135, 11, 1844},
+ dictWord{7, 0, 1418},
+ dictWord{6, 11, 257},
+ dictWord{135, 11, 1522},
+ dictWord{8, 11, 469},
+ dictWord{
+ 138,
+ 11,
+ 47,
+ },
+ dictWord{142, 11, 278},
+ dictWord{6, 10, 83},
+ dictWord{6, 10, 1733},
+ dictWord{135, 10, 1389},
+ dictWord{11, 11, 204},
+ dictWord{11, 11, 243},
+ dictWord{140, 11, 293},
+ dictWord{135, 11, 1875},
+ dictWord{6, 0, 1710},
+ dictWord{135, 0, 2038},
+ dictWord{137, 11, 299},
+ dictWord{4, 0, 17},
+ dictWord{5, 0, 23},
+ dictWord{7, 0, 995},
+ dictWord{11, 0, 383},
+ dictWord{11, 0, 437},
+ dictWord{12, 0, 460},
+ dictWord{140, 0, 532},
+ dictWord{133, 0, 862},
+ dictWord{137, 10, 696},
+ dictWord{6, 0, 592},
+ dictWord{138, 0, 946},
+ dictWord{138, 11, 599},
+ dictWord{7, 10, 1718},
+ dictWord{9, 10, 95},
+ dictWord{9, 10, 274},
+ dictWord{10, 10, 279},
+ dictWord{10, 10, 317},
+ dictWord{10, 10, 420},
+ dictWord{11, 10, 303},
+ dictWord{11, 10, 808},
+ dictWord{12, 10, 134},
+ dictWord{12, 10, 367},
+ dictWord{
+ 13,
+ 10,
+ 149,
+ },
+ dictWord{13, 10, 347},
+ dictWord{14, 10, 349},
+ dictWord{14, 10, 406},
+ dictWord{18, 10, 22},
+ dictWord{18, 10, 89},
+ dictWord{18, 10, 122},
+ dictWord{
+ 147,
+ 10,
+ 47,
+ },
+ dictWord{8, 0, 70},
+ dictWord{12, 0, 171},
+ dictWord{141, 0, 272},
+ dictWord{133, 10, 26},
+ dictWord{132, 10, 550},
+ dictWord{137, 0, 812},
+ dictWord{
+ 10,
+ 0,
+ 233,
+ },
+ dictWord{139, 0, 76},
+ dictWord{134, 0, 988},
+ dictWord{134, 0, 442},
+ dictWord{136, 10, 822},
+ dictWord{7, 0, 896},
+ dictWord{4, 10, 902},
+ dictWord{
+ 5,
+ 10,
+ 809,
+ },
+ dictWord{134, 10, 122},
+ dictWord{5, 11, 150},
+ dictWord{7, 11, 106},
+ dictWord{8, 11, 603},
+ dictWord{9, 11, 593},
+ dictWord{9, 11, 634},
+ dictWord{
+ 10,
+ 11,
+ 44,
+ },
+ dictWord{10, 11, 173},
+ dictWord{11, 11, 462},
+ dictWord{11, 11, 515},
+ dictWord{13, 11, 216},
+ dictWord{13, 11, 288},
+ dictWord{142, 11, 400},
+ dictWord{136, 0, 483},
+ dictWord{135, 10, 262},
+ dictWord{6, 0, 1709},
+ dictWord{133, 10, 620},
+ dictWord{4, 10, 34},
+ dictWord{5, 10, 574},
+ dictWord{7, 10, 279},
+ dictWord{7, 10, 1624},
+ dictWord{136, 10, 601},
+ dictWord{137, 10, 170},
+ dictWord{147, 0, 119},
+ dictWord{12, 11, 108},
+ dictWord{141, 11, 291},
+ dictWord{
+ 11,
+ 0,
+ 69,
+ },
+ dictWord{12, 0, 105},
+ dictWord{12, 0, 117},
+ dictWord{13, 0, 213},
+ dictWord{14, 0, 13},
+ dictWord{14, 0, 62},
+ dictWord{14, 0, 177},
+ dictWord{14, 0, 421},
+ dictWord{15, 0, 19},
+ dictWord{146, 0, 141},
+ dictWord{137, 0, 309},
+ dictWord{11, 11, 278},
+ dictWord{142, 11, 73},
+ dictWord{7, 0, 608},
+ dictWord{7, 0, 976},
+ dictWord{9, 0, 146},
+ dictWord{10, 0, 206},
+ dictWord{10, 0, 596},
+ dictWord{13, 0, 218},
+ dictWord{142, 0, 153},
+ dictWord{133, 10, 332},
+ dictWord{6, 10, 261},
+ dictWord{
+ 8,
+ 10,
+ 182,
+ },
+ dictWord{139, 10, 943},
+ dictWord{4, 11, 493},
+ dictWord{144, 11, 55},
+ dictWord{134, 10, 1721},
+ dictWord{132, 0, 768},
+ dictWord{4, 10, 933},
+ dictWord{133, 10, 880},
+ dictWord{7, 11, 555},
+ dictWord{7, 11, 1316},
+ dictWord{7, 11, 1412},
+ dictWord{7, 11, 1839},
+ dictWord{9, 11, 192},
+ dictWord{
+ 9,
+ 11,
+ 589,
+ },
+ dictWord{11, 11, 241},
+ dictWord{11, 11, 676},
+ dictWord{11, 11, 811},
+ dictWord{11, 11, 891},
+ dictWord{12, 11, 140},
+ dictWord{12, 11, 346},
+ dictWord{
+ 12,
+ 11,
+ 479,
+ },
+ dictWord{13, 11, 30},
+ dictWord{13, 11, 49},
+ dictWord{13, 11, 381},
+ dictWord{14, 11, 188},
+ dictWord{15, 11, 150},
+ dictWord{16, 11, 76},
+ dictWord{18, 11, 30},
+ dictWord{148, 11, 52},
+ dictWord{4, 0, 518},
+ dictWord{135, 0, 1136},
+ dictWord{6, 11, 568},
+ dictWord{7, 11, 112},
+ dictWord{7, 11, 1804},
+ dictWord{8, 11, 362},
+ dictWord{8, 11, 410},
+ dictWord{8, 11, 830},
+ dictWord{9, 11, 514},
+ dictWord{11, 11, 649},
+ dictWord{142, 11, 157},
+ dictWord{135, 11, 673},
+ dictWord{8, 0, 689},
+ dictWord{137, 0, 863},
+ dictWord{4, 0, 18},
+ dictWord{7, 0, 145},
+ dictWord{7, 0, 444},
+ dictWord{7, 0, 1278},
+ dictWord{8, 0, 49},
+ dictWord{8, 0, 400},
+ dictWord{9, 0, 71},
+ dictWord{9, 0, 250},
+ dictWord{10, 0, 459},
+ dictWord{12, 0, 160},
+ dictWord{16, 0, 24},
+ dictWord{132, 11, 625},
+ dictWord{140, 0, 1020},
+ dictWord{4, 0, 997},
+ dictWord{6, 0, 1946},
+ dictWord{6, 0, 1984},
+ dictWord{134, 0, 1998},
+ dictWord{6, 11, 16},
+ dictWord{6, 11, 158},
+ dictWord{7, 11, 43},
+ dictWord{
+ 7,
+ 11,
+ 129,
+ },
+ dictWord{7, 11, 181},
+ dictWord{8, 11, 276},
+ dictWord{8, 11, 377},
+ dictWord{10, 11, 523},
+ dictWord{11, 11, 816},
+ dictWord{12, 11, 455},
+ dictWord{
+ 13,
+ 11,
+ 303,
+ },
+ dictWord{142, 11, 135},
+ dictWord{133, 10, 812},
+ dictWord{134, 0, 658},
+ dictWord{4, 11, 1},
+ dictWord{7, 11, 1143},
+ dictWord{7, 11, 1463},
+ dictWord{8, 11, 61},
+ dictWord{9, 11, 207},
+ dictWord{9, 11, 390},
+ dictWord{9, 11, 467},
+ dictWord{139, 11, 836},
+ dictWord{150, 11, 26},
+ dictWord{140, 0, 106},
+ dictWord{6, 0, 1827},
+ dictWord{10, 0, 931},
+ dictWord{18, 0, 166},
+ dictWord{20, 0, 114},
+ dictWord{4, 10, 137},
+ dictWord{7, 10, 1178},
+ dictWord{7, 11, 1319},
+ dictWord{135, 10, 1520},
+ dictWord{133, 0, 1010},
+ dictWord{4, 11, 723},
+ dictWord{5, 11, 895},
+ dictWord{7, 11, 1031},
+ dictWord{8, 11, 199},
+ dictWord{8, 11, 340},
+ dictWord{9, 11, 153},
+ dictWord{9, 11, 215},
+ dictWord{10, 11, 21},
+ dictWord{10, 11, 59},
+ dictWord{10, 11, 80},
+ dictWord{10, 11, 224},
+ dictWord{11, 11, 229},
+ dictWord{11, 11, 652},
+ dictWord{12, 11, 192},
+ dictWord{13, 11, 146},
+ dictWord{142, 11, 91},
+ dictWord{132, 11, 295},
+ dictWord{6, 11, 619},
+ dictWord{
+ 7,
+ 11,
+ 898,
+ },
+ dictWord{7, 11, 1092},
+ dictWord{8, 11, 485},
+ dictWord{18, 11, 28},
+ dictWord{147, 11, 116},
+ dictWord{137, 11, 51},
+ dictWord{6, 10, 1661},
+ dictWord{
+ 7,
+ 10,
+ 1975,
+ },
+ dictWord{7, 10, 2009},
+ dictWord{135, 10, 2011},
+ dictWord{5, 11, 309},
+ dictWord{140, 11, 211},
+ dictWord{5, 0, 87},
+ dictWord{7, 0, 313},
+ dictWord{
+ 7,
+ 0,
+ 1103,
+ },
+ dictWord{10, 0, 208},
+ dictWord{10, 0, 582},
+ dictWord{11, 0, 389},
+ dictWord{11, 0, 813},
+ dictWord{12, 0, 385},
+ dictWord{13, 0, 286},
+ dictWord{
+ 14,
+ 0,
+ 124,
+ },
+ dictWord{146, 0, 108},
+ dictWord{5, 11, 125},
+ dictWord{8, 11, 77},
+ dictWord{138, 11, 15},
+ dictWord{132, 0, 267},
+ dictWord{133, 0, 703},
+ dictWord{
+ 137,
+ 11,
+ 155,
+ },
+ dictWord{133, 11, 439},
+ dictWord{11, 11, 164},
+ dictWord{140, 11, 76},
+ dictWord{9, 0, 496},
+ dictWord{5, 10, 89},
+ dictWord{7, 10, 1915},
+ dictWord{
+ 9,
+ 10,
+ 185,
+ },
+ dictWord{9, 10, 235},
+ dictWord{10, 10, 64},
+ dictWord{10, 10, 270},
+ dictWord{10, 10, 403},
+ dictWord{10, 10, 469},
+ dictWord{10, 10, 529},
+ dictWord{10, 10, 590},
+ dictWord{11, 10, 140},
+ dictWord{11, 10, 860},
+ dictWord{13, 10, 1},
+ dictWord{13, 10, 422},
+ dictWord{14, 10, 341},
+ dictWord{14, 10, 364},
+ dictWord{17, 10, 93},
+ dictWord{18, 10, 113},
+ dictWord{19, 10, 97},
+ dictWord{147, 10, 113},
+ dictWord{133, 10, 695},
+ dictWord{135, 0, 1121},
+ dictWord{
+ 5,
+ 10,
+ 6,
+ },
+ dictWord{6, 10, 183},
+ dictWord{7, 10, 680},
+ dictWord{7, 10, 978},
+ dictWord{7, 10, 1013},
+ dictWord{7, 10, 1055},
+ dictWord{12, 10, 230},
+ dictWord{
+ 13,
+ 10,
+ 172,
+ },
+ dictWord{146, 10, 29},
+ dictWord{4, 11, 8},
+ dictWord{7, 11, 1152},
+ dictWord{7, 11, 1153},
+ dictWord{7, 11, 1715},
+ dictWord{9, 11, 374},
+ dictWord{
+ 10,
+ 11,
+ 478,
+ },
+ dictWord{139, 11, 648},
+ dictWord{135, 11, 1099},
+ dictWord{6, 10, 29},
+ dictWord{139, 10, 63},
+ dictWord{4, 0, 561},
+ dictWord{10, 0, 249},
+ dictWord{
+ 139,
+ 0,
+ 209,
+ },
+ dictWord{132, 0, 760},
+ dictWord{7, 11, 799},
+ dictWord{138, 11, 511},
+ dictWord{136, 11, 87},
+ dictWord{9, 0, 154},
+ dictWord{140, 0, 485},
+ dictWord{136, 0, 255},
+ dictWord{132, 0, 323},
+ dictWord{140, 0, 419},
+ dictWord{132, 10, 311},
+ dictWord{134, 10, 1740},
+ dictWord{4, 0, 368},
+ dictWord{
+ 135,
+ 0,
+ 641,
+ },
+ dictWord{7, 10, 170},
+ dictWord{8, 10, 90},
+ dictWord{8, 10, 177},
+ dictWord{8, 10, 415},
+ dictWord{11, 10, 714},
+ dictWord{142, 10, 281},
+ dictWord{
+ 4,
+ 11,
+ 69,
+ },
+ dictWord{5, 11, 122},
+ dictWord{9, 11, 656},
+ dictWord{138, 11, 464},
+ dictWord{5, 11, 849},
+ dictWord{134, 11, 1633},
+ dictWord{8, 0, 522},
+ dictWord{
+ 142,
+ 0,
+ 328,
+ },
+ dictWord{11, 10, 91},
+ dictWord{13, 10, 129},
+ dictWord{15, 10, 101},
+ dictWord{145, 10, 125},
+ dictWord{7, 0, 562},
+ dictWord{8, 0, 551},
+ dictWord{
+ 4,
+ 10,
+ 494,
+ },
+ dictWord{6, 10, 74},
+ dictWord{7, 10, 44},
+ dictWord{11, 11, 499},
+ dictWord{12, 10, 17},
+ dictWord{15, 10, 5},
+ dictWord{148, 10, 11},
+ dictWord{4, 10, 276},
+ dictWord{133, 10, 296},
+ dictWord{9, 0, 92},
+ dictWord{147, 0, 91},
+ dictWord{4, 10, 7},
+ dictWord{5, 10, 90},
+ dictWord{5, 10, 158},
+ dictWord{6, 10, 542},
+ dictWord{
+ 7,
+ 10,
+ 221,
+ },
+ dictWord{7, 10, 1574},
+ dictWord{9, 10, 490},
+ dictWord{10, 10, 540},
+ dictWord{11, 10, 443},
+ dictWord{139, 10, 757},
+ dictWord{6, 0, 525},
+ dictWord{
+ 6,
+ 0,
+ 1976,
+ },
+ dictWord{8, 0, 806},
+ dictWord{9, 0, 876},
+ dictWord{140, 0, 284},
+ dictWord{5, 11, 859},
+ dictWord{7, 10, 588},
+ dictWord{7, 11, 1160},
+ dictWord{
+ 8,
+ 11,
+ 107,
+ },
+ dictWord{9, 10, 175},
+ dictWord{9, 11, 291},
+ dictWord{9, 11, 439},
+ dictWord{10, 10, 530},
+ dictWord{10, 11, 663},
+ dictWord{11, 11, 609},
+ dictWord{
+ 140,
+ 11,
+ 197,
+ },
+ dictWord{7, 11, 168},
+ dictWord{13, 11, 196},
+ dictWord{141, 11, 237},
+ dictWord{139, 0, 958},
+ dictWord{133, 0, 594},
+ dictWord{135, 10, 580},
+ dictWord{7, 10, 88},
+ dictWord{136, 10, 627},
+ dictWord{6, 0, 479},
+ dictWord{6, 0, 562},
+ dictWord{7, 0, 1060},
+ dictWord{13, 0, 6},
+ dictWord{5, 10, 872},
+ dictWord{
+ 6,
+ 10,
+ 57,
+ },
+ dictWord{7, 10, 471},
+ dictWord{9, 10, 447},
+ dictWord{137, 10, 454},
+ dictWord{136, 11, 413},
+ dictWord{145, 11, 19},
+ dictWord{4, 11, 117},
+ dictWord{
+ 6,
+ 11,
+ 372,
+ },
+ dictWord{7, 11, 1905},
+ dictWord{142, 11, 323},
+ dictWord{4, 11, 722},
+ dictWord{139, 11, 471},
+ dictWord{17, 0, 61},
+ dictWord{5, 10, 31},
+ dictWord{134, 10, 614},
+ dictWord{8, 10, 330},
+ dictWord{140, 10, 477},
+ dictWord{7, 10, 1200},
+ dictWord{138, 10, 460},
+ dictWord{6, 10, 424},
+ dictWord{
+ 135,
+ 10,
+ 1866,
+ },
+ dictWord{6, 0, 1641},
+ dictWord{136, 0, 820},
+ dictWord{6, 0, 1556},
+ dictWord{134, 0, 1618},
+ dictWord{9, 11, 5},
+ dictWord{12, 11, 216},
+ dictWord{
+ 12,
+ 11,
+ 294,
+ },
+ dictWord{12, 11, 298},
+ dictWord{12, 11, 400},
+ dictWord{12, 11, 518},
+ dictWord{13, 11, 229},
+ dictWord{143, 11, 139},
+ dictWord{15, 11, 155},
+ dictWord{144, 11, 79},
+ dictWord{4, 0, 302},
+ dictWord{135, 0, 1766},
+ dictWord{5, 10, 13},
+ dictWord{134, 10, 142},
+ dictWord{6, 0, 148},
+ dictWord{7, 0, 1313},
+ dictWord{
+ 7,
+ 10,
+ 116,
+ },
+ dictWord{8, 10, 322},
+ dictWord{8, 10, 755},
+ dictWord{9, 10, 548},
+ dictWord{10, 10, 714},
+ dictWord{11, 10, 884},
+ dictWord{141, 10, 324},
+ dictWord{137, 0, 676},
+ dictWord{9, 11, 88},
+ dictWord{139, 11, 270},
+ dictWord{5, 11, 12},
+ dictWord{7, 11, 375},
+ dictWord{137, 11, 438},
+ dictWord{134, 0, 1674},
+ dictWord{7, 10, 1472},
+ dictWord{135, 10, 1554},
+ dictWord{11, 0, 178},
+ dictWord{7, 10, 1071},
+ dictWord{7, 10, 1541},
+ dictWord{7, 10, 1767},
+ dictWord{
+ 7,
+ 10,
+ 1806,
+ },
+ dictWord{11, 10, 162},
+ dictWord{11, 10, 242},
+ dictWord{12, 10, 605},
+ dictWord{15, 10, 26},
+ dictWord{144, 10, 44},
+ dictWord{6, 0, 389},
+ dictWord{
+ 7,
+ 0,
+ 149,
+ },
+ dictWord{9, 0, 142},
+ dictWord{138, 0, 94},
+ dictWord{140, 11, 71},
+ dictWord{145, 10, 115},
+ dictWord{6, 0, 8},
+ dictWord{7, 0, 1881},
+ dictWord{8, 0, 91},
+ dictWord{11, 11, 966},
+ dictWord{12, 11, 287},
+ dictWord{13, 11, 342},
+ dictWord{13, 11, 402},
+ dictWord{15, 11, 110},
+ dictWord{143, 11, 163},
+ dictWord{
+ 4,
+ 11,
+ 258,
+ },
+ dictWord{136, 11, 639},
+ dictWord{6, 11, 22},
+ dictWord{7, 11, 903},
+ dictWord{138, 11, 577},
+ dictWord{133, 11, 681},
+ dictWord{135, 10, 1111},
+ dictWord{135, 11, 1286},
+ dictWord{9, 0, 112},
+ dictWord{8, 10, 1},
+ dictWord{138, 10, 326},
+ dictWord{5, 10, 488},
+ dictWord{6, 10, 527},
+ dictWord{7, 10, 489},
+ dictWord{
+ 7,
+ 10,
+ 1636,
+ },
+ dictWord{8, 10, 121},
+ dictWord{8, 10, 144},
+ dictWord{8, 10, 359},
+ dictWord{9, 10, 193},
+ dictWord{9, 10, 241},
+ dictWord{9, 10, 336},
+ dictWord{
+ 9,
+ 10,
+ 882,
+ },
+ dictWord{11, 10, 266},
+ dictWord{11, 10, 372},
+ dictWord{11, 10, 944},
+ dictWord{12, 10, 401},
+ dictWord{140, 10, 641},
+ dictWord{4, 11, 664},
+ dictWord{133, 11, 804},
+ dictWord{6, 0, 747},
+ dictWord{134, 0, 1015},
+ dictWord{135, 0, 1746},
+ dictWord{9, 10, 31},
+ dictWord{10, 10, 244},
+ dictWord{
+ 10,
+ 10,
+ 699,
+ },
+ dictWord{12, 10, 149},
+ dictWord{141, 10, 497},
+ dictWord{133, 10, 377},
+ dictWord{135, 0, 24},
+ dictWord{6, 0, 1352},
+ dictWord{5, 11, 32},
+ dictWord{
+ 145,
+ 10,
+ 101,
+ },
+ dictWord{7, 0, 1530},
+ dictWord{10, 0, 158},
+ dictWord{13, 0, 13},
+ dictWord{13, 0, 137},
+ dictWord{13, 0, 258},
+ dictWord{14, 0, 111},
+ dictWord{
+ 14,
+ 0,
+ 225,
+ },
+ dictWord{14, 0, 253},
+ dictWord{14, 0, 304},
+ dictWord{14, 0, 339},
+ dictWord{14, 0, 417},
+ dictWord{146, 0, 33},
+ dictWord{4, 0, 503},
+ dictWord{
+ 135,
+ 0,
+ 1661,
+ },
+ dictWord{5, 0, 130},
+ dictWord{6, 0, 845},
+ dictWord{7, 0, 1314},
+ dictWord{9, 0, 610},
+ dictWord{10, 0, 718},
+ dictWord{11, 0, 601},
+ dictWord{11, 0, 819},
+ dictWord{11, 0, 946},
+ dictWord{140, 0, 536},
+ dictWord{10, 0, 149},
+ dictWord{11, 0, 280},
+ dictWord{142, 0, 336},
+ dictWord{134, 0, 1401},
+ dictWord{
+ 135,
+ 0,
+ 1946,
+ },
+ dictWord{8, 0, 663},
+ dictWord{144, 0, 8},
+ dictWord{134, 0, 1607},
+ dictWord{135, 10, 2023},
+ dictWord{4, 11, 289},
+ dictWord{7, 11, 629},
+ dictWord{
+ 7,
+ 11,
+ 1698,
+ },
+ dictWord{7, 11, 1711},
+ dictWord{140, 11, 215},
+ dictWord{6, 11, 450},
+ dictWord{136, 11, 109},
+ dictWord{10, 0, 882},
+ dictWord{10, 0, 883},
+ dictWord{10, 0, 914},
+ dictWord{138, 0, 928},
+ dictWord{133, 10, 843},
+ dictWord{136, 11, 705},
+ dictWord{132, 10, 554},
+ dictWord{133, 10, 536},
+ dictWord{
+ 5,
+ 0,
+ 417,
+ },
+ dictWord{9, 10, 79},
+ dictWord{11, 10, 625},
+ dictWord{145, 10, 7},
+ dictWord{7, 11, 1238},
+ dictWord{142, 11, 37},
+ dictWord{4, 0, 392},
+ dictWord{
+ 135,
+ 0,
+ 1597,
+ },
+ dictWord{5, 0, 433},
+ dictWord{9, 0, 633},
+ dictWord{11, 0, 629},
+ dictWord{132, 10, 424},
+ dictWord{7, 10, 336},
+ dictWord{136, 10, 785},
+ dictWord{
+ 134,
+ 11,
+ 355,
+ },
+ dictWord{6, 0, 234},
+ dictWord{7, 0, 769},
+ dictWord{9, 0, 18},
+ dictWord{138, 0, 358},
+ dictWord{4, 10, 896},
+ dictWord{134, 10, 1777},
+ dictWord{
+ 138,
+ 11,
+ 323,
+ },
+ dictWord{7, 0, 140},
+ dictWord{7, 0, 1950},
+ dictWord{8, 0, 680},
+ dictWord{11, 0, 817},
+ dictWord{147, 0, 88},
+ dictWord{7, 0, 1222},
+ dictWord{
+ 138,
+ 0,
+ 386,
+ },
+ dictWord{139, 11, 908},
+ dictWord{11, 0, 249},
+ dictWord{12, 0, 313},
+ dictWord{16, 0, 66},
+ dictWord{145, 0, 26},
+ dictWord{134, 0, 5},
+ dictWord{7, 10, 750},
+ dictWord{9, 10, 223},
+ dictWord{11, 10, 27},
+ dictWord{11, 10, 466},
+ dictWord{12, 10, 624},
+ dictWord{14, 10, 265},
+ dictWord{146, 10, 61},
+ dictWord{
+ 134,
+ 11,
+ 26,
+ },
+ dictWord{134, 0, 1216},
+ dictWord{5, 0, 963},
+ dictWord{134, 0, 1773},
+ dictWord{4, 11, 414},
+ dictWord{5, 11, 467},
+ dictWord{9, 11, 654},
+ dictWord{
+ 10,
+ 11,
+ 451,
+ },
+ dictWord{12, 11, 59},
+ dictWord{141, 11, 375},
+ dictWord{135, 11, 17},
+ dictWord{4, 10, 603},
+ dictWord{133, 10, 661},
+ dictWord{4, 10, 11},
+ dictWord{
+ 6,
+ 10,
+ 128,
+ },
+ dictWord{7, 10, 231},
+ dictWord{7, 10, 1533},
+ dictWord{138, 10, 725},
+ dictWord{135, 11, 955},
+ dictWord{7, 0, 180},
+ dictWord{8, 0, 509},
+ dictWord{
+ 136,
+ 0,
+ 792,
+ },
+ dictWord{132, 10, 476},
+ dictWord{132, 0, 1002},
+ dictWord{133, 11, 538},
+ dictWord{135, 10, 1807},
+ dictWord{132, 0, 931},
+ dictWord{7, 0, 943},
+ dictWord{11, 0, 614},
+ dictWord{140, 0, 747},
+ dictWord{135, 0, 1837},
+ dictWord{9, 10, 20},
+ dictWord{10, 10, 324},
+ dictWord{10, 10, 807},
+ dictWord{
+ 139,
+ 10,
+ 488,
+ },
+ dictWord{134, 0, 641},
+ dictWord{6, 11, 280},
+ dictWord{10, 11, 502},
+ dictWord{11, 11, 344},
+ dictWord{140, 11, 38},
+ dictWord{5, 11, 45},
+ dictWord{
+ 7,
+ 11,
+ 1161,
+ },
+ dictWord{11, 11, 448},
+ dictWord{11, 11, 880},
+ dictWord{13, 11, 139},
+ dictWord{13, 11, 407},
+ dictWord{15, 11, 16},
+ dictWord{17, 11, 95},
+ dictWord{
+ 18,
+ 11,
+ 66,
+ },
+ dictWord{18, 11, 88},
+ dictWord{18, 11, 123},
+ dictWord{149, 11, 7},
+ dictWord{9, 0, 280},
+ dictWord{138, 0, 134},
+ dictWord{22, 0, 22},
+ dictWord{23, 0, 5},
+ dictWord{151, 0, 29},
+ dictWord{136, 11, 777},
+ dictWord{4, 0, 90},
+ dictWord{5, 0, 545},
+ dictWord{7, 0, 754},
+ dictWord{9, 0, 186},
+ dictWord{10, 0, 72},
+ dictWord{
+ 10,
+ 0,
+ 782,
+ },
+ dictWord{11, 0, 577},
+ dictWord{11, 0, 610},
+ dictWord{11, 0, 960},
+ dictWord{12, 0, 354},
+ dictWord{12, 0, 362},
+ dictWord{12, 0, 595},
+ dictWord{
+ 4,
+ 11,
+ 410,
+ },
+ dictWord{135, 11, 521},
+ dictWord{135, 11, 1778},
+ dictWord{5, 10, 112},
+ dictWord{6, 10, 103},
+ dictWord{134, 10, 150},
+ dictWord{138, 10, 356},
+ dictWord{132, 0, 742},
+ dictWord{7, 0, 151},
+ dictWord{9, 0, 329},
+ dictWord{139, 0, 254},
+ dictWord{8, 0, 853},
+ dictWord{8, 0, 881},
+ dictWord{8, 0, 911},
+ dictWord{
+ 8,
+ 0,
+ 912,
+ },
+ dictWord{10, 0, 872},
+ dictWord{12, 0, 741},
+ dictWord{12, 0, 742},
+ dictWord{152, 0, 18},
+ dictWord{4, 11, 573},
+ dictWord{136, 11, 655},
+ dictWord{
+ 6,
+ 0,
+ 921,
+ },
+ dictWord{134, 0, 934},
+ dictWord{9, 0, 187},
+ dictWord{10, 0, 36},
+ dictWord{11, 0, 1016},
+ dictWord{17, 0, 44},
+ dictWord{146, 0, 64},
+ dictWord{7, 0, 833},
+ dictWord{136, 0, 517},
+ dictWord{4, 0, 506},
+ dictWord{5, 0, 295},
+ dictWord{135, 0, 1680},
+ dictWord{4, 10, 708},
+ dictWord{8, 10, 15},
+ dictWord{9, 10, 50},
+ dictWord{
+ 9,
+ 10,
+ 386,
+ },
+ dictWord{11, 10, 18},
+ dictWord{11, 10, 529},
+ dictWord{140, 10, 228},
+ dictWord{7, 0, 251},
+ dictWord{7, 0, 1701},
+ dictWord{8, 0, 436},
+ dictWord{
+ 4,
+ 10,
+ 563,
+ },
+ dictWord{7, 10, 592},
+ dictWord{7, 10, 637},
+ dictWord{7, 10, 770},
+ dictWord{8, 10, 463},
+ dictWord{9, 10, 60},
+ dictWord{9, 10, 335},
+ dictWord{9, 10, 904},
+ dictWord{10, 10, 73},
+ dictWord{11, 10, 434},
+ dictWord{12, 10, 585},
+ dictWord{13, 10, 331},
+ dictWord{18, 10, 110},
+ dictWord{148, 10, 60},
+ dictWord{
+ 132,
+ 10,
+ 502,
+ },
+ dictWord{136, 0, 584},
+ dictWord{6, 10, 347},
+ dictWord{138, 10, 161},
+ dictWord{7, 0, 987},
+ dictWord{9, 0, 688},
+ dictWord{10, 0, 522},
+ dictWord{
+ 11,
+ 0,
+ 788,
+ },
+ dictWord{12, 0, 137},
+ dictWord{12, 0, 566},
+ dictWord{14, 0, 9},
+ dictWord{14, 0, 24},
+ dictWord{14, 0, 64},
+ dictWord{7, 11, 899},
+ dictWord{142, 11, 325},
+ dictWord{4, 0, 214},
+ dictWord{5, 0, 500},
+ dictWord{5, 10, 102},
+ dictWord{6, 10, 284},
+ dictWord{7, 10, 1079},
+ dictWord{7, 10, 1423},
+ dictWord{7, 10, 1702},
+ dictWord{
+ 8,
+ 10,
+ 470,
+ },
+ dictWord{9, 10, 554},
+ dictWord{9, 10, 723},
+ dictWord{139, 10, 333},
+ dictWord{7, 10, 246},
+ dictWord{135, 10, 840},
+ dictWord{6, 10, 10},
+ dictWord{
+ 8,
+ 10,
+ 571,
+ },
+ dictWord{9, 10, 739},
+ dictWord{143, 10, 91},
+ dictWord{133, 10, 626},
+ dictWord{146, 0, 195},
+ dictWord{134, 0, 1775},
+ dictWord{7, 0, 389},
+ dictWord{7, 0, 700},
+ dictWord{7, 0, 940},
+ dictWord{8, 0, 514},
+ dictWord{9, 0, 116},
+ dictWord{9, 0, 535},
+ dictWord{10, 0, 118},
+ dictWord{11, 0, 107},
+ dictWord{
+ 11,
+ 0,
+ 148,
+ },
+ dictWord{11, 0, 922},
+ dictWord{12, 0, 254},
+ dictWord{12, 0, 421},
+ dictWord{142, 0, 238},
+ dictWord{5, 10, 18},
+ dictWord{6, 10, 526},
+ dictWord{13, 10, 24},
+ dictWord{13, 10, 110},
+ dictWord{19, 10, 5},
+ dictWord{147, 10, 44},
+ dictWord{132, 0, 743},
+ dictWord{11, 0, 292},
+ dictWord{4, 10, 309},
+ dictWord{5, 10, 462},
+ dictWord{7, 10, 970},
+ dictWord{135, 10, 1097},
+ dictWord{22, 10, 30},
+ dictWord{150, 10, 33},
+ dictWord{139, 11, 338},
+ dictWord{135, 11, 1598},
+ dictWord{
+ 7,
+ 0,
+ 1283,
+ },
+ dictWord{9, 0, 227},
+ dictWord{11, 0, 325},
+ dictWord{11, 0, 408},
+ dictWord{14, 0, 180},
+ dictWord{146, 0, 47},
+ dictWord{4, 0, 953},
+ dictWord{6, 0, 1805},
+ dictWord{6, 0, 1814},
+ dictWord{6, 0, 1862},
+ dictWord{140, 0, 774},
+ dictWord{6, 11, 611},
+ dictWord{135, 11, 1733},
+ dictWord{135, 11, 1464},
+ dictWord{
+ 5,
+ 0,
+ 81,
+ },
+ dictWord{7, 0, 146},
+ dictWord{7, 0, 1342},
+ dictWord{8, 0, 53},
+ dictWord{8, 0, 561},
+ dictWord{8, 0, 694},
+ dictWord{8, 0, 754},
+ dictWord{9, 0, 115},
+ dictWord{
+ 9,
+ 0,
+ 179,
+ },
+ dictWord{9, 0, 894},
+ dictWord{10, 0, 462},
+ dictWord{10, 0, 813},
+ dictWord{11, 0, 230},
+ dictWord{11, 0, 657},
+ dictWord{11, 0, 699},
+ dictWord{11, 0, 748},
+ dictWord{12, 0, 119},
+ dictWord{12, 0, 200},
+ dictWord{12, 0, 283},
+ dictWord{142, 0, 273},
+ dictWord{5, 0, 408},
+ dictWord{6, 0, 789},
+ dictWord{6, 0, 877},
+ dictWord{
+ 6,
+ 0,
+ 1253,
+ },
+ dictWord{6, 0, 1413},
+ dictWord{137, 0, 747},
+ dictWord{134, 10, 1704},
+ dictWord{135, 11, 663},
+ dictWord{6, 0, 1910},
+ dictWord{6, 0, 1915},
+ dictWord{6, 0, 1923},
+ dictWord{9, 0, 913},
+ dictWord{9, 0, 928},
+ dictWord{9, 0, 950},
+ dictWord{9, 0, 954},
+ dictWord{9, 0, 978},
+ dictWord{9, 0, 993},
+ dictWord{12, 0, 812},
+ dictWord{12, 0, 819},
+ dictWord{12, 0, 831},
+ dictWord{12, 0, 833},
+ dictWord{12, 0, 838},
+ dictWord{12, 0, 909},
+ dictWord{12, 0, 928},
+ dictWord{12, 0, 931},
+ dictWord{12, 0, 950},
+ dictWord{15, 0, 186},
+ dictWord{15, 0, 187},
+ dictWord{15, 0, 195},
+ dictWord{15, 0, 196},
+ dictWord{15, 0, 209},
+ dictWord{15, 0, 215},
+ dictWord{
+ 15,
+ 0,
+ 236,
+ },
+ dictWord{15, 0, 241},
+ dictWord{15, 0, 249},
+ dictWord{15, 0, 253},
+ dictWord{18, 0, 180},
+ dictWord{18, 0, 221},
+ dictWord{18, 0, 224},
+ dictWord{
+ 18,
+ 0,
+ 227,
+ },
+ dictWord{18, 0, 229},
+ dictWord{149, 0, 60},
+ dictWord{7, 0, 1826},
+ dictWord{135, 0, 1938},
+ dictWord{11, 0, 490},
+ dictWord{18, 0, 143},
+ dictWord{
+ 5,
+ 10,
+ 86,
+ },
+ dictWord{7, 10, 743},
+ dictWord{9, 10, 85},
+ dictWord{10, 10, 281},
+ dictWord{10, 10, 432},
+ dictWord{12, 10, 251},
+ dictWord{13, 10, 118},
+ dictWord{
+ 142,
+ 10,
+ 378,
+ },
+ dictWord{5, 10, 524},
+ dictWord{133, 10, 744},
+ dictWord{141, 11, 442},
+ dictWord{10, 10, 107},
+ dictWord{140, 10, 436},
+ dictWord{135, 11, 503},
+ dictWord{134, 0, 1162},
+ dictWord{132, 10, 927},
+ dictWord{7, 0, 30},
+ dictWord{8, 0, 86},
+ dictWord{8, 0, 315},
+ dictWord{8, 0, 700},
+ dictWord{9, 0, 576},
+ dictWord{
+ 9,
+ 0,
+ 858,
+ },
+ dictWord{10, 0, 414},
+ dictWord{11, 0, 310},
+ dictWord{11, 0, 888},
+ dictWord{11, 0, 904},
+ dictWord{12, 0, 361},
+ dictWord{13, 0, 248},
+ dictWord{13, 0, 371},
+ dictWord{14, 0, 142},
+ dictWord{12, 10, 670},
+ dictWord{146, 10, 94},
+ dictWord{134, 0, 721},
+ dictWord{4, 11, 113},
+ dictWord{5, 11, 163},
+ dictWord{5, 11, 735},
+ dictWord{7, 11, 1009},
+ dictWord{7, 10, 1149},
+ dictWord{9, 11, 9},
+ dictWord{9, 10, 156},
+ dictWord{9, 11, 771},
+ dictWord{12, 11, 90},
+ dictWord{13, 11, 138},
+ dictWord{13, 11, 410},
+ dictWord{143, 11, 128},
+ dictWord{138, 0, 839},
+ dictWord{133, 10, 778},
+ dictWord{137, 0, 617},
+ dictWord{133, 10, 502},
+ dictWord{
+ 8,
+ 10,
+ 196,
+ },
+ dictWord{10, 10, 283},
+ dictWord{139, 10, 406},
+ dictWord{6, 0, 428},
+ dictWord{7, 0, 524},
+ dictWord{8, 0, 169},
+ dictWord{8, 0, 234},
+ dictWord{9, 0, 480},
+ dictWord{138, 0, 646},
+ dictWord{133, 10, 855},
+ dictWord{134, 0, 1648},
+ dictWord{7, 0, 1205},
+ dictWord{138, 0, 637},
+ dictWord{7, 0, 1596},
+ dictWord{
+ 4,
+ 11,
+ 935,
+ },
+ dictWord{133, 11, 823},
+ dictWord{5, 11, 269},
+ dictWord{7, 11, 434},
+ dictWord{7, 11, 891},
+ dictWord{8, 11, 339},
+ dictWord{9, 11, 702},
+ dictWord{
+ 11,
+ 11,
+ 594,
+ },
+ dictWord{11, 11, 718},
+ dictWord{145, 11, 100},
+ dictWord{7, 11, 878},
+ dictWord{9, 11, 485},
+ dictWord{141, 11, 264},
+ dictWord{4, 0, 266},
+ dictWord{
+ 8,
+ 0,
+ 4,
+ },
+ dictWord{9, 0, 39},
+ dictWord{10, 0, 166},
+ dictWord{11, 0, 918},
+ dictWord{12, 0, 635},
+ dictWord{20, 0, 10},
+ dictWord{22, 0, 27},
+ dictWord{22, 0, 43},
+ dictWord{
+ 22,
+ 0,
+ 52,
+ },
+ dictWord{134, 11, 1713},
+ dictWord{7, 10, 1400},
+ dictWord{9, 10, 446},
+ dictWord{138, 10, 45},
+ dictWord{135, 11, 900},
+ dictWord{132, 0, 862},
+ dictWord{134, 0, 1554},
+ dictWord{135, 11, 1033},
+ dictWord{19, 0, 16},
+ dictWord{147, 11, 16},
+ dictWord{135, 11, 1208},
+ dictWord{7, 0, 157},
+ dictWord{
+ 136,
+ 0,
+ 279,
+ },
+ dictWord{6, 0, 604},
+ dictWord{136, 0, 391},
+ dictWord{13, 10, 455},
+ dictWord{15, 10, 99},
+ dictWord{15, 10, 129},
+ dictWord{144, 10, 68},
+ dictWord{
+ 135,
+ 10,
+ 172,
+ },
+ dictWord{7, 0, 945},
+ dictWord{11, 0, 713},
+ dictWord{139, 0, 744},
+ dictWord{4, 0, 973},
+ dictWord{10, 0, 877},
+ dictWord{10, 0, 937},
+ dictWord{
+ 10,
+ 0,
+ 938,
+ },
+ dictWord{140, 0, 711},
+ dictWord{139, 0, 1022},
+ dictWord{132, 10, 568},
+ dictWord{142, 11, 143},
+ dictWord{4, 0, 567},
+ dictWord{9, 0, 859},
+ dictWord{
+ 132,
+ 10,
+ 732,
+ },
+ dictWord{7, 0, 1846},
+ dictWord{136, 0, 628},
+ dictWord{136, 10, 733},
+ dictWord{133, 0, 762},
+ dictWord{4, 10, 428},
+ dictWord{135, 10, 1789},
+ dictWord{10, 0, 784},
+ dictWord{13, 0, 191},
+ dictWord{7, 10, 2015},
+ dictWord{140, 10, 665},
+ dictWord{133, 0, 298},
+ dictWord{7, 0, 633},
+ dictWord{7, 0, 905},
+ dictWord{7, 0, 909},
+ dictWord{7, 0, 1538},
+ dictWord{9, 0, 767},
+ dictWord{140, 0, 636},
+ dictWord{138, 10, 806},
+ dictWord{132, 0, 795},
+ dictWord{139, 0, 301},
+ dictWord{135, 0, 1970},
+ dictWord{5, 11, 625},
+ dictWord{135, 11, 1617},
+ dictWord{135, 11, 275},
+ dictWord{7, 11, 37},
+ dictWord{8, 11, 425},
+ dictWord{
+ 8,
+ 11,
+ 693,
+ },
+ dictWord{9, 11, 720},
+ dictWord{10, 11, 380},
+ dictWord{10, 11, 638},
+ dictWord{11, 11, 273},
+ dictWord{11, 11, 307},
+ dictWord{11, 11, 473},
+ dictWord{
+ 12,
+ 11,
+ 61,
+ },
+ dictWord{143, 11, 43},
+ dictWord{135, 11, 198},
+ dictWord{134, 0, 1236},
+ dictWord{7, 0, 369},
+ dictWord{12, 0, 644},
+ dictWord{12, 0, 645},
+ dictWord{144, 0, 90},
+ dictWord{19, 0, 15},
+ dictWord{149, 0, 27},
+ dictWord{6, 0, 71},
+ dictWord{7, 0, 845},
+ dictWord{8, 0, 160},
+ dictWord{9, 0, 318},
+ dictWord{6, 10, 1623},
+ dictWord{134, 10, 1681},
+ dictWord{134, 0, 1447},
+ dictWord{134, 0, 1255},
+ dictWord{138, 0, 735},
+ dictWord{8, 0, 76},
+ dictWord{132, 11, 168},
+ dictWord{
+ 6,
+ 10,
+ 1748,
+ },
+ dictWord{8, 10, 715},
+ dictWord{9, 10, 802},
+ dictWord{10, 10, 46},
+ dictWord{10, 10, 819},
+ dictWord{13, 10, 308},
+ dictWord{14, 10, 351},
+ dictWord{14, 10, 363},
+ dictWord{146, 10, 67},
+ dictWord{135, 11, 91},
+ dictWord{6, 0, 474},
+ dictWord{4, 10, 63},
+ dictWord{133, 10, 347},
+ dictWord{133, 10, 749},
+ dictWord{138, 0, 841},
+ dictWord{133, 10, 366},
+ dictWord{6, 0, 836},
+ dictWord{132, 11, 225},
+ dictWord{135, 0, 1622},
+ dictWord{135, 10, 89},
+ dictWord{
+ 140,
+ 0,
+ 735,
+ },
+ dictWord{134, 0, 1601},
+ dictWord{138, 11, 145},
+ dictWord{6, 0, 1390},
+ dictWord{137, 0, 804},
+ dictWord{142, 0, 394},
+ dictWord{6, 11, 15},
+ dictWord{
+ 7,
+ 11,
+ 70,
+ },
+ dictWord{10, 11, 240},
+ dictWord{147, 11, 93},
+ dictWord{6, 0, 96},
+ dictWord{135, 0, 1426},
+ dictWord{4, 0, 651},
+ dictWord{133, 0, 289},
+ dictWord{
+ 7,
+ 11,
+ 956,
+ },
+ dictWord{7, 10, 977},
+ dictWord{7, 11, 1157},
+ dictWord{7, 11, 1506},
+ dictWord{7, 11, 1606},
+ dictWord{7, 11, 1615},
+ dictWord{7, 11, 1619},
+ dictWord{
+ 7,
+ 11,
+ 1736,
+ },
+ dictWord{7, 11, 1775},
+ dictWord{8, 11, 590},
+ dictWord{9, 11, 324},
+ dictWord{9, 11, 736},
+ dictWord{9, 11, 774},
+ dictWord{9, 11, 776},
+ dictWord{
+ 9,
+ 11,
+ 784,
+ },
+ dictWord{10, 11, 567},
+ dictWord{10, 11, 708},
+ dictWord{11, 11, 518},
+ dictWord{11, 11, 613},
+ dictWord{11, 11, 695},
+ dictWord{11, 11, 716},
+ dictWord{11, 11, 739},
+ dictWord{11, 11, 770},
+ dictWord{11, 11, 771},
+ dictWord{11, 11, 848},
+ dictWord{11, 11, 857},
+ dictWord{11, 11, 931},
+ dictWord{
+ 11,
+ 11,
+ 947,
+ },
+ dictWord{12, 11, 326},
+ dictWord{12, 11, 387},
+ dictWord{12, 11, 484},
+ dictWord{12, 11, 528},
+ dictWord{12, 11, 552},
+ dictWord{12, 11, 613},
+ dictWord{
+ 13,
+ 11,
+ 189,
+ },
+ dictWord{13, 11, 256},
+ dictWord{13, 11, 340},
+ dictWord{13, 11, 432},
+ dictWord{13, 11, 436},
+ dictWord{13, 11, 440},
+ dictWord{13, 11, 454},
+ dictWord{14, 11, 174},
+ dictWord{14, 11, 220},
+ dictWord{14, 11, 284},
+ dictWord{14, 11, 390},
+ dictWord{145, 11, 121},
+ dictWord{7, 0, 688},
+ dictWord{8, 0, 35},
+ dictWord{9, 0, 511},
+ dictWord{10, 0, 767},
+ dictWord{147, 0, 118},
+ dictWord{134, 0, 667},
+ dictWord{4, 0, 513},
+ dictWord{5, 10, 824},
+ dictWord{133, 10, 941},
+ dictWord{7, 10, 440},
+ dictWord{8, 10, 230},
+ dictWord{139, 10, 106},
+ dictWord{134, 0, 2034},
+ dictWord{135, 11, 1399},
+ dictWord{143, 11, 66},
+ dictWord{
+ 135,
+ 11,
+ 1529,
+ },
+ dictWord{4, 11, 145},
+ dictWord{6, 11, 176},
+ dictWord{7, 11, 395},
+ dictWord{9, 11, 562},
+ dictWord{144, 11, 28},
+ dictWord{132, 11, 501},
+ dictWord{132, 0, 704},
+ dictWord{134, 0, 1524},
+ dictWord{7, 0, 1078},
+ dictWord{134, 11, 464},
+ dictWord{6, 11, 509},
+ dictWord{10, 11, 82},
+ dictWord{20, 11, 91},
+ dictWord{151, 11, 13},
+ dictWord{4, 0, 720},
+ dictWord{133, 0, 306},
+ dictWord{133, 0, 431},
+ dictWord{7, 0, 1196},
+ dictWord{4, 10, 914},
+ dictWord{5, 10, 800},
+ dictWord{133, 10, 852},
+ dictWord{135, 11, 1189},
+ dictWord{10, 0, 54},
+ dictWord{141, 10, 115},
+ dictWord{7, 10, 564},
+ dictWord{142, 10, 168},
+ dictWord{
+ 5,
+ 0,
+ 464,
+ },
+ dictWord{6, 0, 236},
+ dictWord{7, 0, 696},
+ dictWord{7, 0, 914},
+ dictWord{7, 0, 1108},
+ dictWord{7, 0, 1448},
+ dictWord{9, 0, 15},
+ dictWord{9, 0, 564},
+ dictWord{
+ 10,
+ 0,
+ 14,
+ },
+ dictWord{12, 0, 565},
+ dictWord{13, 0, 449},
+ dictWord{14, 0, 53},
+ dictWord{15, 0, 13},
+ dictWord{16, 0, 64},
+ dictWord{17, 0, 41},
+ dictWord{4, 10, 918},
+ dictWord{133, 10, 876},
+ dictWord{6, 0, 1418},
+ dictWord{134, 10, 1764},
+ dictWord{4, 10, 92},
+ dictWord{133, 10, 274},
+ dictWord{134, 0, 907},
+ dictWord{
+ 4,
+ 11,
+ 114,
+ },
+ dictWord{8, 10, 501},
+ dictWord{9, 11, 492},
+ dictWord{13, 11, 462},
+ dictWord{142, 11, 215},
+ dictWord{4, 11, 77},
+ dictWord{5, 11, 361},
+ dictWord{
+ 6,
+ 11,
+ 139,
+ },
+ dictWord{6, 11, 401},
+ dictWord{6, 11, 404},
+ dictWord{7, 11, 413},
+ dictWord{7, 11, 715},
+ dictWord{7, 11, 1716},
+ dictWord{11, 11, 279},
+ dictWord{
+ 12,
+ 11,
+ 179,
+ },
+ dictWord{12, 11, 258},
+ dictWord{13, 11, 244},
+ dictWord{142, 11, 358},
+ dictWord{6, 0, 1767},
+ dictWord{12, 0, 194},
+ dictWord{145, 0, 107},
+ dictWord{
+ 134,
+ 11,
+ 1717,
+ },
+ dictWord{5, 10, 743},
+ dictWord{142, 11, 329},
+ dictWord{4, 10, 49},
+ dictWord{7, 10, 280},
+ dictWord{135, 10, 1633},
+ dictWord{5, 0, 840},
+ dictWord{7, 11, 1061},
+ dictWord{8, 11, 82},
+ dictWord{11, 11, 250},
+ dictWord{12, 11, 420},
+ dictWord{141, 11, 184},
+ dictWord{135, 11, 724},
+ dictWord{
+ 134,
+ 0,
+ 900,
+ },
+ dictWord{136, 10, 47},
+ dictWord{134, 0, 1436},
+ dictWord{144, 11, 0},
+ dictWord{6, 0, 675},
+ dictWord{7, 0, 1008},
+ dictWord{7, 0, 1560},
+ dictWord{
+ 9,
+ 0,
+ 642,
+ },
+ dictWord{11, 0, 236},
+ dictWord{14, 0, 193},
+ dictWord{5, 10, 272},
+ dictWord{5, 10, 908},
+ dictWord{5, 10, 942},
+ dictWord{8, 10, 197},
+ dictWord{9, 10, 47},
+ dictWord{11, 10, 538},
+ dictWord{139, 10, 742},
+ dictWord{4, 0, 68},
+ dictWord{5, 0, 628},
+ dictWord{5, 0, 634},
+ dictWord{6, 0, 386},
+ dictWord{7, 0, 794},
+ dictWord{
+ 8,
+ 0,
+ 273,
+ },
+ dictWord{9, 0, 563},
+ dictWord{10, 0, 105},
+ dictWord{10, 0, 171},
+ dictWord{11, 0, 94},
+ dictWord{139, 0, 354},
+ dictWord{135, 10, 1911},
+ dictWord{
+ 137,
+ 10,
+ 891,
+ },
+ dictWord{4, 0, 95},
+ dictWord{6, 0, 1297},
+ dictWord{6, 0, 1604},
+ dictWord{7, 0, 416},
+ dictWord{139, 0, 830},
+ dictWord{6, 11, 513},
+ dictWord{
+ 135,
+ 11,
+ 1052,
+ },
+ dictWord{7, 0, 731},
+ dictWord{13, 0, 20},
+ dictWord{143, 0, 11},
+ dictWord{137, 11, 899},
+ dictWord{10, 0, 850},
+ dictWord{140, 0, 697},
+ dictWord{
+ 4,
+ 0,
+ 662,
+ },
+ dictWord{7, 11, 1417},
+ dictWord{12, 11, 382},
+ dictWord{17, 11, 48},
+ dictWord{152, 11, 12},
+ dictWord{133, 0, 736},
+ dictWord{132, 0, 861},
+ dictWord{
+ 4,
+ 10,
+ 407,
+ },
+ dictWord{132, 10, 560},
+ dictWord{141, 10, 490},
+ dictWord{6, 11, 545},
+ dictWord{7, 11, 565},
+ dictWord{7, 11, 1669},
+ dictWord{10, 11, 114},
+ dictWord{11, 11, 642},
+ dictWord{140, 11, 618},
+ dictWord{6, 0, 871},
+ dictWord{134, 0, 1000},
+ dictWord{5, 0, 864},
+ dictWord{10, 0, 648},
+ dictWord{11, 0, 671},
+ dictWord{15, 0, 46},
+ dictWord{133, 11, 5},
+ dictWord{133, 0, 928},
+ dictWord{11, 0, 90},
+ dictWord{13, 0, 7},
+ dictWord{4, 10, 475},
+ dictWord{11, 10, 35},
+ dictWord{
+ 13,
+ 10,
+ 71,
+ },
+ dictWord{13, 10, 177},
+ dictWord{142, 10, 422},
+ dictWord{136, 0, 332},
+ dictWord{135, 11, 192},
+ dictWord{134, 0, 1055},
+ dictWord{136, 11, 763},
+ dictWord{11, 0, 986},
+ dictWord{140, 0, 682},
+ dictWord{7, 0, 76},
+ dictWord{8, 0, 44},
+ dictWord{9, 0, 884},
+ dictWord{10, 0, 580},
+ dictWord{11, 0, 399},
+ dictWord{
+ 11,
+ 0,
+ 894,
+ },
+ dictWord{143, 0, 122},
+ dictWord{135, 11, 1237},
+ dictWord{135, 10, 636},
+ dictWord{11, 0, 300},
+ dictWord{6, 10, 222},
+ dictWord{7, 10, 1620},
+ dictWord{
+ 8,
+ 10,
+ 409,
+ },
+ dictWord{137, 10, 693},
+ dictWord{4, 11, 87},
+ dictWord{5, 11, 250},
+ dictWord{10, 11, 601},
+ dictWord{13, 11, 298},
+ dictWord{13, 11, 353},
+ dictWord{141, 11, 376},
+ dictWord{5, 0, 518},
+ dictWord{10, 0, 340},
+ dictWord{11, 0, 175},
+ dictWord{149, 0, 16},
+ dictWord{140, 0, 771},
+ dictWord{6, 0, 1108},
+ dictWord{137, 0, 831},
+ dictWord{132, 0, 836},
+ dictWord{135, 0, 1852},
+ dictWord{4, 0, 957},
+ dictWord{6, 0, 1804},
+ dictWord{8, 0, 842},
+ dictWord{8, 0, 843},
+ dictWord{
+ 8,
+ 0,
+ 851,
+ },
+ dictWord{8, 0, 855},
+ dictWord{140, 0, 767},
+ dictWord{135, 11, 814},
+ dictWord{4, 11, 57},
+ dictWord{7, 11, 1195},
+ dictWord{7, 11, 1438},
+ dictWord{
+ 7,
+ 11,
+ 1548,
+ },
+ dictWord{7, 11, 1835},
+ dictWord{7, 11, 1904},
+ dictWord{9, 11, 757},
+ dictWord{10, 11, 604},
+ dictWord{139, 11, 519},
+ dictWord{133, 10, 882},
+ dictWord{138, 0, 246},
+ dictWord{4, 0, 934},
+ dictWord{5, 0, 202},
+ dictWord{8, 0, 610},
+ dictWord{7, 11, 1897},
+ dictWord{12, 11, 290},
+ dictWord{13, 11, 80},
+ dictWord{13, 11, 437},
+ dictWord{145, 11, 74},
+ dictWord{8, 0, 96},
+ dictWord{9, 0, 36},
+ dictWord{10, 0, 607},
+ dictWord{10, 0, 804},
+ dictWord{10, 0, 832},
+ dictWord{
+ 11,
+ 0,
+ 423,
+ },
+ dictWord{11, 0, 442},
+ dictWord{12, 0, 309},
+ dictWord{14, 0, 199},
+ dictWord{15, 0, 90},
+ dictWord{145, 0, 110},
+ dictWord{132, 10, 426},
+ dictWord{
+ 7,
+ 0,
+ 654,
+ },
+ dictWord{8, 0, 240},
+ dictWord{6, 10, 58},
+ dictWord{7, 10, 745},
+ dictWord{7, 10, 1969},
+ dictWord{8, 10, 675},
+ dictWord{9, 10, 479},
+ dictWord{9, 10, 731},
+ dictWord{10, 10, 330},
+ dictWord{10, 10, 593},
+ dictWord{10, 10, 817},
+ dictWord{11, 10, 32},
+ dictWord{11, 10, 133},
+ dictWord{11, 10, 221},
+ dictWord{
+ 145,
+ 10,
+ 68,
+ },
+ dictWord{9, 0, 13},
+ dictWord{9, 0, 398},
+ dictWord{9, 0, 727},
+ dictWord{10, 0, 75},
+ dictWord{10, 0, 184},
+ dictWord{10, 0, 230},
+ dictWord{10, 0, 564},
+ dictWord{
+ 10,
+ 0,
+ 569,
+ },
+ dictWord{11, 0, 973},
+ dictWord{12, 0, 70},
+ dictWord{12, 0, 189},
+ dictWord{13, 0, 57},
+ dictWord{141, 0, 257},
+ dictWord{4, 11, 209},
+ dictWord{
+ 135,
+ 11,
+ 902,
+ },
+ dictWord{7, 0, 391},
+ dictWord{137, 10, 538},
+ dictWord{134, 0, 403},
+ dictWord{6, 11, 303},
+ dictWord{7, 11, 335},
+ dictWord{7, 11, 1437},
+ dictWord{
+ 7,
+ 11,
+ 1668,
+ },
+ dictWord{8, 11, 553},
+ dictWord{8, 11, 652},
+ dictWord{8, 11, 656},
+ dictWord{9, 11, 558},
+ dictWord{11, 11, 743},
+ dictWord{149, 11, 18},
+ dictWord{
+ 132,
+ 11,
+ 559,
+ },
+ dictWord{11, 0, 75},
+ dictWord{142, 0, 267},
+ dictWord{6, 0, 815},
+ dictWord{141, 11, 2},
+ dictWord{141, 0, 366},
+ dictWord{137, 0, 631},
+ dictWord{
+ 133,
+ 11,
+ 1017,
+ },
+ dictWord{5, 0, 345},
+ dictWord{135, 0, 1016},
+ dictWord{133, 11, 709},
+ dictWord{134, 11, 1745},
+ dictWord{133, 10, 566},
+ dictWord{7, 0, 952},
+ dictWord{6, 10, 48},
+ dictWord{9, 10, 139},
+ dictWord{10, 10, 399},
+ dictWord{11, 10, 469},
+ dictWord{12, 10, 634},
+ dictWord{141, 10, 223},
+ dictWord{
+ 133,
+ 0,
+ 673,
+ },
+ dictWord{9, 0, 850},
+ dictWord{7, 11, 8},
+ dictWord{136, 11, 206},
+ dictWord{6, 0, 662},
+ dictWord{149, 0, 35},
+ dictWord{4, 0, 287},
+ dictWord{133, 0, 1018},
+ dictWord{6, 10, 114},
+ dictWord{7, 10, 1224},
+ dictWord{7, 10, 1556},
+ dictWord{136, 10, 3},
+ dictWord{8, 10, 576},
+ dictWord{137, 10, 267},
+ dictWord{4, 0, 884},
+ dictWord{5, 0, 34},
+ dictWord{10, 0, 724},
+ dictWord{12, 0, 444},
+ dictWord{13, 0, 354},
+ dictWord{18, 0, 32},
+ dictWord{23, 0, 24},
+ dictWord{23, 0, 31},
+ dictWord{
+ 152,
+ 0,
+ 5,
+ },
+ dictWord{133, 10, 933},
+ dictWord{132, 11, 776},
+ dictWord{138, 0, 151},
+ dictWord{136, 0, 427},
+ dictWord{134, 0, 382},
+ dictWord{132, 0, 329},
+ dictWord{
+ 9,
+ 0,
+ 846,
+ },
+ dictWord{10, 0, 827},
+ dictWord{138, 11, 33},
+ dictWord{9, 0, 279},
+ dictWord{10, 0, 407},
+ dictWord{14, 0, 84},
+ dictWord{22, 0, 18},
+ dictWord{
+ 135,
+ 11,
+ 1297,
+ },
+ dictWord{136, 11, 406},
+ dictWord{132, 0, 906},
+ dictWord{136, 0, 366},
+ dictWord{134, 0, 843},
+ dictWord{134, 0, 1443},
+ dictWord{135, 0, 1372},
+ dictWord{138, 0, 992},
+ dictWord{4, 0, 123},
+ dictWord{5, 0, 605},
+ dictWord{7, 0, 1509},
+ dictWord{136, 0, 36},
+ dictWord{132, 0, 649},
+ dictWord{8, 11, 175},
+ dictWord{10, 11, 168},
+ dictWord{138, 11, 573},
+ dictWord{133, 0, 767},
+ dictWord{134, 0, 1018},
+ dictWord{135, 11, 1305},
+ dictWord{12, 10, 30},
+ dictWord{
+ 13,
+ 10,
+ 148,
+ },
+ dictWord{14, 10, 87},
+ dictWord{14, 10, 182},
+ dictWord{16, 10, 42},
+ dictWord{148, 10, 70},
+ dictWord{134, 11, 607},
+ dictWord{4, 0, 273},
+ dictWord{
+ 5,
+ 0,
+ 658,
+ },
+ dictWord{133, 0, 995},
+ dictWord{6, 0, 72},
+ dictWord{139, 11, 174},
+ dictWord{10, 0, 483},
+ dictWord{12, 0, 368},
+ dictWord{7, 10, 56},
+ dictWord{
+ 7,
+ 10,
+ 1989,
+ },
+ dictWord{8, 10, 337},
+ dictWord{8, 10, 738},
+ dictWord{9, 10, 600},
+ dictWord{13, 10, 447},
+ dictWord{142, 10, 92},
+ dictWord{5, 11, 784},
+ dictWord{
+ 138,
+ 10,
+ 666,
+ },
+ dictWord{135, 0, 1345},
+ dictWord{139, 11, 882},
+ dictWord{134, 0, 1293},
+ dictWord{133, 0, 589},
+ dictWord{134, 0, 1988},
+ dictWord{5, 0, 117},
+ dictWord{6, 0, 514},
+ dictWord{6, 0, 541},
+ dictWord{7, 0, 1164},
+ dictWord{7, 0, 1436},
+ dictWord{8, 0, 220},
+ dictWord{8, 0, 648},
+ dictWord{10, 0, 688},
+ dictWord{
+ 139,
+ 0,
+ 560,
+ },
+ dictWord{136, 0, 379},
+ dictWord{5, 0, 686},
+ dictWord{7, 10, 866},
+ dictWord{135, 10, 1163},
+ dictWord{132, 10, 328},
+ dictWord{9, 11, 14},
+ dictWord{
+ 9,
+ 11,
+ 441,
+ },
+ dictWord{10, 11, 306},
+ dictWord{139, 11, 9},
+ dictWord{4, 10, 101},
+ dictWord{135, 10, 1171},
+ dictWord{5, 10, 833},
+ dictWord{136, 10, 744},
+ dictWord{5, 11, 161},
+ dictWord{7, 11, 839},
+ dictWord{135, 11, 887},
+ dictWord{7, 0, 196},
+ dictWord{10, 0, 765},
+ dictWord{11, 0, 347},
+ dictWord{11, 0, 552},
+ dictWord{11, 0, 790},
+ dictWord{12, 0, 263},
+ dictWord{13, 0, 246},
+ dictWord{13, 0, 270},
+ dictWord{13, 0, 395},
+ dictWord{14, 0, 176},
+ dictWord{14, 0, 190},
+ dictWord{
+ 14,
+ 0,
+ 398,
+ },
+ dictWord{14, 0, 412},
+ dictWord{15, 0, 32},
+ dictWord{15, 0, 63},
+ dictWord{16, 0, 88},
+ dictWord{147, 0, 105},
+ dictWord{6, 10, 9},
+ dictWord{6, 10, 397},
+ dictWord{7, 10, 53},
+ dictWord{7, 10, 1742},
+ dictWord{10, 10, 632},
+ dictWord{11, 10, 828},
+ dictWord{140, 10, 146},
+ dictWord{5, 0, 381},
+ dictWord{135, 0, 1792},
+ dictWord{134, 0, 1452},
+ dictWord{135, 11, 429},
+ dictWord{8, 0, 367},
+ dictWord{10, 0, 760},
+ dictWord{14, 0, 79},
+ dictWord{20, 0, 17},
+ dictWord{152, 0, 0},
+ dictWord{7, 0, 616},
+ dictWord{138, 0, 413},
+ dictWord{11, 10, 417},
+ dictWord{12, 10, 223},
+ dictWord{140, 10, 265},
+ dictWord{7, 11, 1611},
+ dictWord{13, 11, 14},
+ dictWord{15, 11, 44},
+ dictWord{19, 11, 13},
+ dictWord{148, 11, 76},
+ dictWord{135, 0, 1229},
+ dictWord{6, 0, 120},
+ dictWord{7, 0, 1188},
+ dictWord{7, 0, 1710},
+ dictWord{8, 0, 286},
+ dictWord{9, 0, 667},
+ dictWord{11, 0, 592},
+ dictWord{139, 0, 730},
+ dictWord{135, 11, 1814},
+ dictWord{135, 0, 1146},
+ dictWord{4, 10, 186},
+ dictWord{5, 10, 157},
+ dictWord{8, 10, 168},
+ dictWord{138, 10, 6},
+ dictWord{4, 0, 352},
+ dictWord{135, 0, 687},
+ dictWord{4, 0, 192},
+ dictWord{5, 0, 49},
+ dictWord{
+ 6,
+ 0,
+ 200,
+ },
+ dictWord{6, 0, 293},
+ dictWord{6, 0, 1696},
+ dictWord{135, 0, 1151},
+ dictWord{133, 10, 875},
+ dictWord{5, 10, 773},
+ dictWord{5, 10, 991},
+ dictWord{
+ 6,
+ 10,
+ 1635,
+ },
+ dictWord{134, 10, 1788},
+ dictWord{7, 10, 111},
+ dictWord{136, 10, 581},
+ dictWord{6, 0, 935},
+ dictWord{134, 0, 1151},
+ dictWord{134, 0, 1050},
+ dictWord{132, 0, 650},
+ dictWord{132, 0, 147},
+ dictWord{11, 0, 194},
+ dictWord{12, 0, 62},
+ dictWord{12, 0, 88},
+ dictWord{11, 11, 194},
+ dictWord{12, 11, 62},
+ dictWord{140, 11, 88},
+ dictWord{6, 0, 339},
+ dictWord{135, 0, 923},
+ dictWord{134, 10, 1747},
+ dictWord{7, 11, 643},
+ dictWord{136, 11, 236},
+ dictWord{
+ 133,
+ 0,
+ 934,
+ },
+ dictWord{7, 10, 1364},
+ dictWord{7, 10, 1907},
+ dictWord{141, 10, 158},
+ dictWord{132, 10, 659},
+ dictWord{4, 10, 404},
+ dictWord{135, 10, 675},
+ dictWord{7, 11, 581},
+ dictWord{9, 11, 644},
+ dictWord{137, 11, 699},
+ dictWord{13, 0, 211},
+ dictWord{14, 0, 133},
+ dictWord{14, 0, 204},
+ dictWord{15, 0, 64},
+ dictWord{
+ 15,
+ 0,
+ 69,
+ },
+ dictWord{15, 0, 114},
+ dictWord{16, 0, 10},
+ dictWord{19, 0, 23},
+ dictWord{19, 0, 35},
+ dictWord{19, 0, 39},
+ dictWord{19, 0, 51},
+ dictWord{19, 0, 71},
+ dictWord{19, 0, 75},
+ dictWord{152, 0, 15},
+ dictWord{133, 10, 391},
+ dictWord{5, 11, 54},
+ dictWord{135, 11, 1513},
+ dictWord{7, 0, 222},
+ dictWord{8, 0, 341},
+ dictWord{
+ 5,
+ 10,
+ 540,
+ },
+ dictWord{134, 10, 1697},
+ dictWord{134, 10, 78},
+ dictWord{132, 11, 744},
+ dictWord{136, 0, 293},
+ dictWord{137, 11, 701},
+ dictWord{
+ 7,
+ 11,
+ 930,
+ },
+ dictWord{10, 11, 402},
+ dictWord{10, 11, 476},
+ dictWord{13, 11, 452},
+ dictWord{18, 11, 55},
+ dictWord{147, 11, 104},
+ dictWord{132, 0, 637},
+ dictWord{133, 10, 460},
+ dictWord{8, 11, 50},
+ dictWord{137, 11, 624},
+ dictWord{132, 11, 572},
+ dictWord{134, 0, 1159},
+ dictWord{4, 10, 199},
+ dictWord{
+ 139,
+ 10,
+ 34,
+ },
+ dictWord{134, 0, 847},
+ dictWord{134, 10, 388},
+ dictWord{6, 11, 43},
+ dictWord{7, 11, 38},
+ dictWord{8, 11, 248},
+ dictWord{9, 11, 504},
+ dictWord{
+ 138,
+ 11,
+ 513,
+ },
+ dictWord{9, 0, 683},
+ dictWord{4, 10, 511},
+ dictWord{6, 10, 608},
+ dictWord{9, 10, 333},
+ dictWord{10, 10, 602},
+ dictWord{11, 10, 441},
+ dictWord{
+ 11,
+ 10,
+ 723,
+ },
+ dictWord{11, 10, 976},
+ dictWord{140, 10, 357},
+ dictWord{9, 0, 867},
+ dictWord{138, 0, 837},
+ dictWord{6, 0, 944},
+ dictWord{135, 11, 326},
+ dictWord{
+ 135,
+ 0,
+ 1809,
+ },
+ dictWord{5, 10, 938},
+ dictWord{7, 11, 783},
+ dictWord{136, 10, 707},
+ dictWord{133, 11, 766},
+ dictWord{133, 11, 363},
+ dictWord{6, 0, 170},
+ dictWord{7, 0, 1080},
+ dictWord{8, 0, 395},
+ dictWord{8, 0, 487},
+ dictWord{141, 0, 147},
+ dictWord{6, 11, 258},
+ dictWord{140, 11, 409},
+ dictWord{4, 0, 535},
+ dictWord{
+ 8,
+ 0,
+ 618,
+ },
+ dictWord{5, 11, 249},
+ dictWord{148, 11, 82},
+ dictWord{6, 0, 1379},
+ dictWord{149, 11, 15},
+ dictWord{135, 0, 1625},
+ dictWord{150, 0, 23},
+ dictWord{
+ 5,
+ 11,
+ 393,
+ },
+ dictWord{6, 11, 378},
+ dictWord{7, 11, 1981},
+ dictWord{9, 11, 32},
+ dictWord{9, 11, 591},
+ dictWord{10, 11, 685},
+ dictWord{10, 11, 741},
+ dictWord{
+ 142,
+ 11,
+ 382,
+ },
+ dictWord{133, 11, 788},
+ dictWord{7, 11, 1968},
+ dictWord{10, 11, 19},
+ dictWord{139, 11, 911},
+ dictWord{7, 11, 1401},
+ dictWord{
+ 135,
+ 11,
+ 1476,
+ },
+ dictWord{4, 11, 61},
+ dictWord{5, 11, 58},
+ dictWord{5, 11, 171},
+ dictWord{5, 11, 635},
+ dictWord{5, 11, 683},
+ dictWord{5, 11, 700},
+ dictWord{6, 11, 291},
+ dictWord{6, 11, 566},
+ dictWord{7, 11, 1650},
+ dictWord{11, 11, 523},
+ dictWord{12, 11, 273},
+ dictWord{12, 11, 303},
+ dictWord{15, 11, 39},
+ dictWord{
+ 143,
+ 11,
+ 111,
+ },
+ dictWord{6, 10, 469},
+ dictWord{7, 10, 1709},
+ dictWord{138, 10, 515},
+ dictWord{4, 0, 778},
+ dictWord{134, 11, 589},
+ dictWord{132, 0, 46},
+ dictWord{
+ 5,
+ 0,
+ 811,
+ },
+ dictWord{6, 0, 1679},
+ dictWord{6, 0, 1714},
+ dictWord{135, 0, 2032},
+ dictWord{7, 0, 1458},
+ dictWord{9, 0, 407},
+ dictWord{11, 0, 15},
+ dictWord{12, 0, 651},
+ dictWord{149, 0, 37},
+ dictWord{7, 0, 938},
+ dictWord{132, 10, 500},
+ dictWord{6, 0, 34},
+ dictWord{7, 0, 69},
+ dictWord{7, 0, 1089},
+ dictWord{7, 0, 1281},
+ dictWord{
+ 8,
+ 0,
+ 708,
+ },
+ dictWord{8, 0, 721},
+ dictWord{9, 0, 363},
+ dictWord{148, 0, 98},
+ dictWord{10, 11, 231},
+ dictWord{147, 11, 124},
+ dictWord{7, 11, 726},
+ dictWord{
+ 152,
+ 11,
+ 9,
+ },
+ dictWord{5, 10, 68},
+ dictWord{134, 10, 383},
+ dictWord{136, 11, 583},
+ dictWord{4, 11, 917},
+ dictWord{133, 11, 1005},
+ dictWord{11, 10, 216},
+ dictWord{139, 10, 340},
+ dictWord{135, 11, 1675},
+ dictWord{8, 0, 441},
+ dictWord{10, 0, 314},
+ dictWord{143, 0, 3},
+ dictWord{132, 11, 919},
+ dictWord{4, 10, 337},
+ dictWord{6, 10, 353},
+ dictWord{7, 10, 1934},
+ dictWord{8, 10, 488},
+ dictWord{137, 10, 429},
+ dictWord{7, 0, 889},
+ dictWord{7, 10, 1795},
+ dictWord{8, 10, 259},
+ dictWord{9, 10, 135},
+ dictWord{9, 10, 177},
+ dictWord{9, 10, 860},
+ dictWord{10, 10, 825},
+ dictWord{11, 10, 115},
+ dictWord{11, 10, 370},
+ dictWord{11, 10, 405},
+ dictWord{11, 10, 604},
+ dictWord{12, 10, 10},
+ dictWord{12, 10, 667},
+ dictWord{12, 10, 669},
+ dictWord{13, 10, 76},
+ dictWord{14, 10, 310},
+ dictWord{
+ 15,
+ 10,
+ 76,
+ },
+ dictWord{15, 10, 147},
+ dictWord{148, 10, 23},
+ dictWord{4, 10, 15},
+ dictWord{4, 11, 255},
+ dictWord{5, 10, 22},
+ dictWord{5, 11, 302},
+ dictWord{6, 11, 132},
+ dictWord{6, 10, 244},
+ dictWord{7, 10, 40},
+ dictWord{7, 11, 128},
+ dictWord{7, 10, 200},
+ dictWord{7, 11, 283},
+ dictWord{7, 10, 906},
+ dictWord{7, 10, 1199},
+ dictWord{
+ 7,
+ 11,
+ 1299,
+ },
+ dictWord{9, 10, 616},
+ dictWord{10, 11, 52},
+ dictWord{10, 11, 514},
+ dictWord{10, 10, 716},
+ dictWord{11, 10, 635},
+ dictWord{11, 10, 801},
+ dictWord{11, 11, 925},
+ dictWord{12, 10, 458},
+ dictWord{13, 11, 92},
+ dictWord{142, 11, 309},
+ dictWord{132, 0, 462},
+ dictWord{137, 11, 173},
+ dictWord{
+ 135,
+ 10,
+ 1735,
+ },
+ dictWord{8, 0, 525},
+ dictWord{5, 10, 598},
+ dictWord{7, 10, 791},
+ dictWord{8, 10, 108},
+ dictWord{137, 10, 123},
+ dictWord{5, 0, 73},
+ dictWord{6, 0, 23},
+ dictWord{134, 0, 338},
+ dictWord{132, 0, 676},
+ dictWord{132, 10, 683},
+ dictWord{7, 0, 725},
+ dictWord{8, 0, 498},
+ dictWord{139, 0, 268},
+ dictWord{12, 0, 21},
+ dictWord{151, 0, 7},
+ dictWord{135, 0, 773},
+ dictWord{4, 10, 155},
+ dictWord{135, 10, 1689},
+ dictWord{4, 0, 164},
+ dictWord{5, 0, 730},
+ dictWord{5, 10, 151},
+ dictWord{
+ 5,
+ 10,
+ 741,
+ },
+ dictWord{6, 11, 210},
+ dictWord{7, 10, 498},
+ dictWord{7, 10, 870},
+ dictWord{7, 10, 1542},
+ dictWord{12, 10, 213},
+ dictWord{14, 10, 36},
+ dictWord{
+ 14,
+ 10,
+ 391,
+ },
+ dictWord{17, 10, 111},
+ dictWord{18, 10, 6},
+ dictWord{18, 10, 46},
+ dictWord{18, 10, 151},
+ dictWord{19, 10, 36},
+ dictWord{20, 10, 32},
+ dictWord{
+ 20,
+ 10,
+ 56,
+ },
+ dictWord{20, 10, 69},
+ dictWord{20, 10, 102},
+ dictWord{21, 10, 4},
+ dictWord{22, 10, 8},
+ dictWord{22, 10, 10},
+ dictWord{22, 10, 14},
+ dictWord{
+ 150,
+ 10,
+ 31,
+ },
+ dictWord{4, 10, 624},
+ dictWord{135, 10, 1752},
+ dictWord{4, 0, 583},
+ dictWord{9, 0, 936},
+ dictWord{15, 0, 214},
+ dictWord{18, 0, 199},
+ dictWord{24, 0, 26},
+ dictWord{134, 11, 588},
+ dictWord{7, 0, 1462},
+ dictWord{11, 0, 659},
+ dictWord{4, 11, 284},
+ dictWord{134, 11, 223},
+ dictWord{133, 0, 220},
+ dictWord{
+ 139,
+ 0,
+ 803,
+ },
+ dictWord{132, 0, 544},
+ dictWord{4, 10, 492},
+ dictWord{133, 10, 451},
+ dictWord{16, 0, 98},
+ dictWord{148, 0, 119},
+ dictWord{4, 11, 218},
+ dictWord{
+ 7,
+ 11,
+ 526,
+ },
+ dictWord{143, 11, 137},
+ dictWord{135, 10, 835},
+ dictWord{4, 11, 270},
+ dictWord{5, 11, 192},
+ dictWord{6, 11, 332},
+ dictWord{7, 11, 1322},
+ dictWord{
+ 13,
+ 11,
+ 9,
+ },
+ dictWord{13, 10, 70},
+ dictWord{14, 11, 104},
+ dictWord{142, 11, 311},
+ dictWord{132, 10, 539},
+ dictWord{140, 11, 661},
+ dictWord{5, 0, 176},
+ dictWord{
+ 6,
+ 0,
+ 437,
+ },
+ dictWord{6, 0, 564},
+ dictWord{11, 0, 181},
+ dictWord{141, 0, 183},
+ dictWord{135, 0, 1192},
+ dictWord{6, 10, 113},
+ dictWord{135, 10, 436},
+ dictWord{136, 10, 718},
+ dictWord{135, 10, 520},
+ dictWord{135, 0, 1878},
+ dictWord{140, 11, 196},
+ dictWord{7, 11, 379},
+ dictWord{8, 11, 481},
+ dictWord{
+ 137,
+ 11,
+ 377,
+ },
+ dictWord{5, 11, 1003},
+ dictWord{6, 11, 149},
+ dictWord{137, 11, 746},
+ dictWord{8, 11, 262},
+ dictWord{9, 11, 627},
+ dictWord{10, 11, 18},
+ dictWord{
+ 11,
+ 11,
+ 214,
+ },
+ dictWord{11, 11, 404},
+ dictWord{11, 11, 457},
+ dictWord{11, 11, 780},
+ dictWord{11, 11, 849},
+ dictWord{11, 11, 913},
+ dictWord{13, 11, 330},
+ dictWord{13, 11, 401},
+ dictWord{142, 11, 200},
+ dictWord{149, 0, 26},
+ dictWord{136, 11, 304},
+ dictWord{132, 11, 142},
+ dictWord{135, 0, 944},
+ dictWord{
+ 4,
+ 0,
+ 790,
+ },
+ dictWord{5, 0, 273},
+ dictWord{134, 0, 394},
+ dictWord{134, 0, 855},
+ dictWord{4, 0, 135},
+ dictWord{6, 0, 127},
+ dictWord{7, 0, 1185},
+ dictWord{7, 0, 1511},
+ dictWord{8, 0, 613},
+ dictWord{11, 0, 5},
+ dictWord{12, 0, 336},
+ dictWord{12, 0, 495},
+ dictWord{12, 0, 586},
+ dictWord{12, 0, 660},
+ dictWord{12, 0, 668},
+ dictWord{
+ 14,
+ 0,
+ 385,
+ },
+ dictWord{15, 0, 118},
+ dictWord{17, 0, 20},
+ dictWord{146, 0, 98},
+ dictWord{6, 0, 230},
+ dictWord{9, 0, 752},
+ dictWord{18, 0, 109},
+ dictWord{12, 10, 610},
+ dictWord{13, 10, 431},
+ dictWord{144, 10, 59},
+ dictWord{7, 0, 1954},
+ dictWord{135, 11, 925},
+ dictWord{4, 11, 471},
+ dictWord{5, 11, 51},
+ dictWord{6, 11, 602},
+ dictWord{8, 11, 484},
+ dictWord{10, 11, 195},
+ dictWord{140, 11, 159},
+ dictWord{132, 10, 307},
+ dictWord{136, 11, 688},
+ dictWord{132, 11, 697},
+ dictWord{
+ 7,
+ 11,
+ 812,
+ },
+ dictWord{7, 11, 1261},
+ dictWord{7, 11, 1360},
+ dictWord{9, 11, 632},
+ dictWord{140, 11, 352},
+ dictWord{5, 0, 162},
+ dictWord{8, 0, 68},
+ dictWord{
+ 133,
+ 10,
+ 964,
+ },
+ dictWord{4, 0, 654},
+ dictWord{136, 11, 212},
+ dictWord{4, 0, 156},
+ dictWord{7, 0, 998},
+ dictWord{7, 0, 1045},
+ dictWord{7, 0, 1860},
+ dictWord{9, 0, 48},
+ dictWord{9, 0, 692},
+ dictWord{11, 0, 419},
+ dictWord{139, 0, 602},
+ dictWord{133, 11, 221},
+ dictWord{4, 11, 373},
+ dictWord{5, 11, 283},
+ dictWord{6, 11, 480},
+ dictWord{135, 11, 609},
+ dictWord{142, 11, 216},
+ dictWord{132, 0, 240},
+ dictWord{6, 11, 192},
+ dictWord{9, 11, 793},
+ dictWord{145, 11, 55},
+ dictWord{
+ 4,
+ 10,
+ 75,
+ },
+ dictWord{5, 10, 180},
+ dictWord{6, 10, 500},
+ dictWord{7, 10, 58},
+ dictWord{7, 10, 710},
+ dictWord{138, 10, 645},
+ dictWord{4, 11, 132},
+ dictWord{5, 11, 69},
+ dictWord{5, 10, 649},
+ dictWord{135, 11, 1242},
+ dictWord{6, 10, 276},
+ dictWord{7, 10, 282},
+ dictWord{7, 10, 879},
+ dictWord{7, 10, 924},
+ dictWord{8, 10, 459},
+ dictWord{9, 10, 599},
+ dictWord{9, 10, 754},
+ dictWord{11, 10, 574},
+ dictWord{12, 10, 128},
+ dictWord{12, 10, 494},
+ dictWord{13, 10, 52},
+ dictWord{13, 10, 301},
+ dictWord{15, 10, 30},
+ dictWord{143, 10, 132},
+ dictWord{132, 10, 200},
+ dictWord{4, 11, 111},
+ dictWord{135, 11, 302},
+ dictWord{9, 0, 197},
+ dictWord{
+ 10,
+ 0,
+ 300,
+ },
+ dictWord{12, 0, 473},
+ dictWord{13, 0, 90},
+ dictWord{141, 0, 405},
+ dictWord{132, 11, 767},
+ dictWord{6, 11, 42},
+ dictWord{7, 11, 1416},
+ dictWord{
+ 7,
+ 11,
+ 1590,
+ },
+ dictWord{7, 11, 2005},
+ dictWord{8, 11, 131},
+ dictWord{8, 11, 466},
+ dictWord{9, 11, 672},
+ dictWord{13, 11, 252},
+ dictWord{148, 11, 103},
+ dictWord{
+ 8,
+ 0,
+ 958,
+ },
+ dictWord{8, 0, 999},
+ dictWord{10, 0, 963},
+ dictWord{138, 0, 1001},
+ dictWord{135, 10, 1621},
+ dictWord{135, 0, 858},
+ dictWord{4, 0, 606},
+ dictWord{
+ 137,
+ 11,
+ 444,
+ },
+ dictWord{6, 11, 44},
+ dictWord{136, 11, 368},
+ dictWord{139, 11, 172},
+ dictWord{4, 11, 570},
+ dictWord{133, 11, 120},
+ dictWord{139, 11, 624},
+ dictWord{7, 0, 1978},
+ dictWord{8, 0, 676},
+ dictWord{6, 10, 225},
+ dictWord{137, 10, 211},
+ dictWord{7, 0, 972},
+ dictWord{11, 0, 102},
+ dictWord{136, 10, 687},
+ dictWord{6, 11, 227},
+ dictWord{135, 11, 1589},
+ dictWord{8, 10, 58},
+ dictWord{9, 10, 724},
+ dictWord{11, 10, 809},
+ dictWord{13, 10, 113},
+ dictWord{
+ 145,
+ 10,
+ 72,
+ },
+ dictWord{4, 0, 361},
+ dictWord{133, 0, 315},
+ dictWord{132, 0, 461},
+ dictWord{6, 10, 345},
+ dictWord{135, 10, 1247},
+ dictWord{132, 0, 472},
+ dictWord{
+ 8,
+ 10,
+ 767,
+ },
+ dictWord{8, 10, 803},
+ dictWord{9, 10, 301},
+ dictWord{137, 10, 903},
+ dictWord{135, 11, 1333},
+ dictWord{135, 11, 477},
+ dictWord{7, 10, 1949},
+ dictWord{136, 10, 674},
+ dictWord{6, 0, 905},
+ dictWord{138, 0, 747},
+ dictWord{133, 0, 155},
+ dictWord{134, 10, 259},
+ dictWord{7, 0, 163},
+ dictWord{8, 0, 319},
+ dictWord{9, 0, 402},
+ dictWord{10, 0, 24},
+ dictWord{10, 0, 681},
+ dictWord{11, 0, 200},
+ dictWord{12, 0, 253},
+ dictWord{12, 0, 410},
+ dictWord{142, 0, 219},
+ dictWord{
+ 5,
+ 0,
+ 475,
+ },
+ dictWord{7, 0, 1780},
+ dictWord{9, 0, 230},
+ dictWord{11, 0, 297},
+ dictWord{11, 0, 558},
+ dictWord{14, 0, 322},
+ dictWord{19, 0, 76},
+ dictWord{6, 11, 1667},
+ dictWord{7, 11, 2036},
+ dictWord{138, 11, 600},
+ dictWord{136, 10, 254},
+ dictWord{6, 0, 848},
+ dictWord{135, 0, 1956},
+ dictWord{6, 11, 511},
+ dictWord{
+ 140,
+ 11,
+ 132,
+ },
+ dictWord{5, 11, 568},
+ dictWord{6, 11, 138},
+ dictWord{135, 11, 1293},
+ dictWord{6, 0, 631},
+ dictWord{137, 0, 838},
+ dictWord{149, 0, 36},
+ dictWord{
+ 4,
+ 11,
+ 565,
+ },
+ dictWord{8, 11, 23},
+ dictWord{136, 11, 827},
+ dictWord{5, 0, 944},
+ dictWord{134, 0, 1769},
+ dictWord{4, 0, 144},
+ dictWord{6, 0, 842},
+ dictWord{
+ 6,
+ 0,
+ 1400,
+ },
+ dictWord{4, 11, 922},
+ dictWord{133, 11, 1023},
+ dictWord{133, 10, 248},
+ dictWord{9, 10, 800},
+ dictWord{10, 10, 693},
+ dictWord{11, 10, 482},
+ dictWord{11, 10, 734},
+ dictWord{139, 10, 789},
+ dictWord{7, 11, 1002},
+ dictWord{139, 11, 145},
+ dictWord{4, 10, 116},
+ dictWord{5, 10, 95},
+ dictWord{5, 10, 445},
+ dictWord{7, 10, 1688},
+ dictWord{8, 10, 29},
+ dictWord{9, 10, 272},
+ dictWord{11, 10, 509},
+ dictWord{139, 10, 915},
+ dictWord{14, 0, 369},
+ dictWord{146, 0, 72},
+ dictWord{135, 10, 1641},
+ dictWord{132, 11, 740},
+ dictWord{133, 10, 543},
+ dictWord{140, 11, 116},
+ dictWord{6, 0, 247},
+ dictWord{9, 0, 555},
+ dictWord{
+ 5,
+ 10,
+ 181,
+ },
+ dictWord{136, 10, 41},
+ dictWord{133, 10, 657},
+ dictWord{136, 0, 996},
+ dictWord{138, 10, 709},
+ dictWord{7, 0, 189},
+ dictWord{8, 10, 202},
+ dictWord{
+ 138,
+ 10,
+ 536,
+ },
+ dictWord{136, 11, 402},
+ dictWord{4, 11, 716},
+ dictWord{141, 11, 31},
+ dictWord{10, 0, 280},
+ dictWord{138, 0, 797},
+ dictWord{9, 10, 423},
+ dictWord{140, 10, 89},
+ dictWord{8, 10, 113},
+ dictWord{9, 10, 877},
+ dictWord{10, 10, 554},
+ dictWord{11, 10, 83},
+ dictWord{12, 10, 136},
+ dictWord{147, 10, 109},
+ dictWord{133, 10, 976},
+ dictWord{7, 0, 746},
+ dictWord{132, 10, 206},
+ dictWord{136, 0, 526},
+ dictWord{139, 0, 345},
+ dictWord{136, 0, 1017},
+ dictWord{
+ 8,
+ 11,
+ 152,
+ },
+ dictWord{9, 11, 53},
+ dictWord{9, 11, 268},
+ dictWord{9, 11, 901},
+ dictWord{10, 11, 518},
+ dictWord{10, 11, 829},
+ dictWord{11, 11, 188},
+ dictWord{
+ 13,
+ 11,
+ 74,
+ },
+ dictWord{14, 11, 46},
+ dictWord{15, 11, 17},
+ dictWord{15, 11, 33},
+ dictWord{17, 11, 40},
+ dictWord{18, 11, 36},
+ dictWord{19, 11, 20},
+ dictWord{22, 11, 1},
+ dictWord{152, 11, 2},
+ dictWord{133, 11, 736},
+ dictWord{136, 11, 532},
+ dictWord{5, 0, 428},
+ dictWord{138, 0, 651},
+ dictWord{135, 11, 681},
+ dictWord{
+ 135,
+ 0,
+ 1162,
+ },
+ dictWord{7, 0, 327},
+ dictWord{13, 0, 230},
+ dictWord{17, 0, 113},
+ dictWord{8, 10, 226},
+ dictWord{10, 10, 537},
+ dictWord{11, 10, 570},
+ dictWord{
+ 11,
+ 10,
+ 605,
+ },
+ dictWord{11, 10, 799},
+ dictWord{11, 10, 804},
+ dictWord{12, 10, 85},
+ dictWord{12, 10, 516},
+ dictWord{12, 10, 623},
+ dictWord{12, 11, 677},
+ dictWord{
+ 13,
+ 10,
+ 361,
+ },
+ dictWord{14, 10, 77},
+ dictWord{14, 10, 78},
+ dictWord{147, 10, 110},
+ dictWord{4, 0, 792},
+ dictWord{7, 0, 1717},
+ dictWord{10, 0, 546},
+ dictWord{
+ 132,
+ 10,
+ 769,
+ },
+ dictWord{4, 11, 684},
+ dictWord{136, 11, 384},
+ dictWord{132, 10, 551},
+ dictWord{134, 0, 1203},
+ dictWord{9, 10, 57},
+ dictWord{9, 10, 459},
+ dictWord{10, 10, 425},
+ dictWord{11, 10, 119},
+ dictWord{12, 10, 184},
+ dictWord{12, 10, 371},
+ dictWord{13, 10, 358},
+ dictWord{145, 10, 51},
+ dictWord{5, 0, 672},
+ dictWord{5, 10, 814},
+ dictWord{8, 10, 10},
+ dictWord{9, 10, 421},
+ dictWord{9, 10, 729},
+ dictWord{10, 10, 609},
+ dictWord{139, 10, 689},
+ dictWord{138, 0, 189},
+ dictWord{134, 10, 624},
+ dictWord{7, 11, 110},
+ dictWord{7, 11, 188},
+ dictWord{8, 11, 290},
+ dictWord{8, 11, 591},
+ dictWord{9, 11, 382},
+ dictWord{9, 11, 649},
+ dictWord{11, 11, 71},
+ dictWord{11, 11, 155},
+ dictWord{11, 11, 313},
+ dictWord{12, 11, 5},
+ dictWord{13, 11, 325},
+ dictWord{142, 11, 287},
+ dictWord{133, 0, 99},
+ dictWord{6, 0, 1053},
+ dictWord{135, 0, 298},
+ dictWord{7, 11, 360},
+ dictWord{7, 11, 425},
+ dictWord{9, 11, 66},
+ dictWord{9, 11, 278},
+ dictWord{138, 11, 644},
+ dictWord{4, 0, 397},
+ dictWord{136, 0, 555},
+ dictWord{137, 10, 269},
+ dictWord{132, 10, 528},
+ dictWord{4, 11, 900},
+ dictWord{133, 11, 861},
+ dictWord{
+ 6,
+ 0,
+ 1157,
+ },
+ dictWord{5, 11, 254},
+ dictWord{7, 11, 985},
+ dictWord{136, 11, 73},
+ dictWord{7, 11, 1959},
+ dictWord{136, 11, 683},
+ dictWord{12, 0, 398},
+ dictWord{
+ 20,
+ 0,
+ 39,
+ },
+ dictWord{21, 0, 11},
+ dictWord{150, 0, 41},
+ dictWord{4, 0, 485},
+ dictWord{7, 0, 353},
+ dictWord{135, 0, 1523},
+ dictWord{6, 0, 366},
+ dictWord{7, 0, 1384},
+ dictWord{135, 0, 1601},
+ dictWord{138, 0, 787},
+ dictWord{137, 0, 282},
+ dictWord{5, 10, 104},
+ dictWord{6, 10, 173},
+ dictWord{135, 10, 1631},
+ dictWord{
+ 139,
+ 11,
+ 146,
+ },
+ dictWord{4, 0, 157},
+ dictWord{133, 0, 471},
+ dictWord{134, 0, 941},
+ dictWord{132, 11, 725},
+ dictWord{7, 0, 1336},
+ dictWord{8, 10, 138},
+ dictWord{
+ 8,
+ 10,
+ 342,
+ },
+ dictWord{9, 10, 84},
+ dictWord{10, 10, 193},
+ dictWord{11, 10, 883},
+ dictWord{140, 10, 359},
+ dictWord{134, 11, 196},
+ dictWord{136, 0, 116},
+ dictWord{133, 11, 831},
+ dictWord{134, 0, 787},
+ dictWord{134, 10, 95},
+ dictWord{6, 10, 406},
+ dictWord{10, 10, 409},
+ dictWord{10, 10, 447},
+ dictWord{
+ 11,
+ 10,
+ 44,
+ },
+ dictWord{140, 10, 100},
+ dictWord{5, 0, 160},
+ dictWord{7, 0, 363},
+ dictWord{7, 0, 589},
+ dictWord{10, 0, 170},
+ dictWord{141, 0, 55},
+ dictWord{134, 0, 1815},
+ dictWord{132, 0, 866},
+ dictWord{6, 0, 889},
+ dictWord{6, 0, 1067},
+ dictWord{6, 0, 1183},
+ dictWord{4, 11, 321},
+ dictWord{134, 11, 569},
+ dictWord{5, 11, 848},
+ dictWord{134, 11, 66},
+ dictWord{4, 11, 36},
+ dictWord{6, 10, 1636},
+ dictWord{7, 11, 1387},
+ dictWord{10, 11, 205},
+ dictWord{11, 11, 755},
+ dictWord{
+ 141,
+ 11,
+ 271,
+ },
+ dictWord{132, 0, 689},
+ dictWord{9, 0, 820},
+ dictWord{4, 10, 282},
+ dictWord{7, 10, 1034},
+ dictWord{11, 10, 398},
+ dictWord{11, 10, 634},
+ dictWord{
+ 12,
+ 10,
+ 1,
+ },
+ dictWord{12, 10, 79},
+ dictWord{12, 10, 544},
+ dictWord{14, 10, 237},
+ dictWord{17, 10, 10},
+ dictWord{146, 10, 20},
+ dictWord{4, 0, 108},
+ dictWord{7, 0, 804},
+ dictWord{139, 0, 498},
+ dictWord{132, 11, 887},
+ dictWord{6, 0, 1119},
+ dictWord{135, 11, 620},
+ dictWord{6, 11, 165},
+ dictWord{138, 11, 388},
+ dictWord{
+ 5,
+ 0,
+ 244,
+ },
+ dictWord{5, 10, 499},
+ dictWord{6, 10, 476},
+ dictWord{7, 10, 600},
+ dictWord{7, 10, 888},
+ dictWord{135, 10, 1096},
+ dictWord{140, 0, 609},
+ dictWord{
+ 135,
+ 0,
+ 1005,
+ },
+ dictWord{4, 0, 412},
+ dictWord{133, 0, 581},
+ dictWord{4, 11, 719},
+ dictWord{135, 11, 155},
+ dictWord{7, 10, 296},
+ dictWord{7, 10, 596},
+ dictWord{
+ 8,
+ 10,
+ 560,
+ },
+ dictWord{8, 10, 586},
+ dictWord{9, 10, 612},
+ dictWord{11, 10, 304},
+ dictWord{12, 10, 46},
+ dictWord{13, 10, 89},
+ dictWord{14, 10, 112},
+ dictWord{
+ 145,
+ 10,
+ 122,
+ },
+ dictWord{4, 0, 895},
+ dictWord{133, 0, 772},
+ dictWord{142, 11, 307},
+ dictWord{135, 0, 1898},
+ dictWord{4, 0, 926},
+ dictWord{133, 0, 983},
+ dictWord{4, 11, 353},
+ dictWord{6, 11, 146},
+ dictWord{6, 11, 1789},
+ dictWord{7, 11, 288},
+ dictWord{7, 11, 990},
+ dictWord{7, 11, 1348},
+ dictWord{9, 11, 665},
+ dictWord{
+ 9,
+ 11,
+ 898,
+ },
+ dictWord{11, 11, 893},
+ dictWord{142, 11, 212},
+ dictWord{132, 0, 538},
+ dictWord{133, 11, 532},
+ dictWord{6, 0, 294},
+ dictWord{7, 0, 1267},
+ dictWord{8, 0, 624},
+ dictWord{141, 0, 496},
+ dictWord{7, 0, 1325},
+ dictWord{4, 11, 45},
+ dictWord{135, 11, 1257},
+ dictWord{138, 0, 301},
+ dictWord{9, 0, 298},
+ dictWord{12, 0, 291},
+ dictWord{13, 0, 276},
+ dictWord{14, 0, 6},
+ dictWord{17, 0, 18},
+ dictWord{21, 0, 32},
+ dictWord{7, 10, 1599},
+ dictWord{7, 10, 1723},
+ dictWord{
+ 8,
+ 10,
+ 79,
+ },
+ dictWord{8, 10, 106},
+ dictWord{8, 10, 190},
+ dictWord{8, 10, 302},
+ dictWord{8, 10, 383},
+ dictWord{8, 10, 713},
+ dictWord{9, 10, 119},
+ dictWord{9, 10, 233},
+ dictWord{9, 10, 419},
+ dictWord{9, 10, 471},
+ dictWord{10, 10, 181},
+ dictWord{10, 10, 406},
+ dictWord{11, 10, 57},
+ dictWord{11, 10, 85},
+ dictWord{11, 10, 120},
+ dictWord{11, 10, 177},
+ dictWord{11, 10, 296},
+ dictWord{11, 10, 382},
+ dictWord{11, 10, 454},
+ dictWord{11, 10, 758},
+ dictWord{11, 10, 999},
+ dictWord{
+ 12,
+ 10,
+ 27,
+ },
+ dictWord{12, 10, 131},
+ dictWord{12, 10, 245},
+ dictWord{12, 10, 312},
+ dictWord{12, 10, 446},
+ dictWord{12, 10, 454},
+ dictWord{13, 10, 98},
+ dictWord{
+ 13,
+ 10,
+ 426,
+ },
+ dictWord{13, 10, 508},
+ dictWord{14, 10, 163},
+ dictWord{14, 10, 272},
+ dictWord{14, 10, 277},
+ dictWord{14, 10, 370},
+ dictWord{15, 10, 95},
+ dictWord{15, 10, 138},
+ dictWord{15, 10, 167},
+ dictWord{17, 10, 38},
+ dictWord{148, 10, 96},
+ dictWord{132, 0, 757},
+ dictWord{134, 0, 1263},
+ dictWord{4, 0, 820},
+ dictWord{134, 10, 1759},
+ dictWord{133, 0, 722},
+ dictWord{136, 11, 816},
+ dictWord{138, 10, 372},
+ dictWord{145, 10, 16},
+ dictWord{134, 0, 1039},
+ dictWord{
+ 4,
+ 0,
+ 991,
+ },
+ dictWord{134, 0, 2028},
+ dictWord{133, 10, 258},
+ dictWord{7, 0, 1875},
+ dictWord{139, 0, 124},
+ dictWord{6, 11, 559},
+ dictWord{6, 11, 1691},
+ dictWord{135, 11, 586},
+ dictWord{5, 0, 324},
+ dictWord{7, 0, 881},
+ dictWord{8, 10, 134},
+ dictWord{9, 10, 788},
+ dictWord{140, 10, 438},
+ dictWord{7, 11, 1823},
+ dictWord{139, 11, 693},
+ dictWord{6, 0, 1348},
+ dictWord{134, 0, 1545},
+ dictWord{134, 0, 911},
+ dictWord{132, 0, 954},
+ dictWord{8, 0, 329},
+ dictWord{8, 0, 414},
+ dictWord{7, 10, 1948},
+ dictWord{135, 10, 2004},
+ dictWord{5, 0, 517},
+ dictWord{6, 10, 439},
+ dictWord{7, 10, 780},
+ dictWord{135, 10, 1040},
+ dictWord{
+ 132,
+ 0,
+ 816,
+ },
+ dictWord{5, 10, 1},
+ dictWord{6, 10, 81},
+ dictWord{138, 10, 520},
+ dictWord{9, 0, 713},
+ dictWord{10, 0, 222},
+ dictWord{5, 10, 482},
+ dictWord{8, 10, 98},
+ dictWord{10, 10, 700},
+ dictWord{10, 10, 822},
+ dictWord{11, 10, 302},
+ dictWord{11, 10, 778},
+ dictWord{12, 10, 50},
+ dictWord{12, 10, 127},
+ dictWord{12, 10, 396},
+ dictWord{13, 10, 62},
+ dictWord{13, 10, 328},
+ dictWord{14, 10, 122},
+ dictWord{147, 10, 72},
+ dictWord{137, 0, 33},
+ dictWord{5, 10, 2},
+ dictWord{7, 10, 1494},
+ dictWord{136, 10, 589},
+ dictWord{6, 10, 512},
+ dictWord{7, 10, 797},
+ dictWord{8, 10, 253},
+ dictWord{9, 10, 77},
+ dictWord{10, 10, 1},
+ dictWord{10, 11, 108},
+ dictWord{10, 10, 129},
+ dictWord{10, 10, 225},
+ dictWord{11, 11, 116},
+ dictWord{11, 10, 118},
+ dictWord{11, 10, 226},
+ dictWord{11, 10, 251},
+ dictWord{
+ 11,
+ 10,
+ 430,
+ },
+ dictWord{11, 10, 701},
+ dictWord{11, 10, 974},
+ dictWord{11, 10, 982},
+ dictWord{12, 10, 64},
+ dictWord{12, 10, 260},
+ dictWord{12, 10, 488},
+ dictWord{
+ 140,
+ 10,
+ 690,
+ },
+ dictWord{134, 11, 456},
+ dictWord{133, 11, 925},
+ dictWord{5, 0, 150},
+ dictWord{7, 0, 106},
+ dictWord{7, 0, 774},
+ dictWord{8, 0, 603},
+ dictWord{
+ 9,
+ 0,
+ 593,
+ },
+ dictWord{9, 0, 634},
+ dictWord{10, 0, 44},
+ dictWord{10, 0, 173},
+ dictWord{11, 0, 462},
+ dictWord{11, 0, 515},
+ dictWord{13, 0, 216},
+ dictWord{13, 0, 288},
+ dictWord{142, 0, 400},
+ dictWord{137, 10, 347},
+ dictWord{5, 0, 748},
+ dictWord{134, 0, 553},
+ dictWord{12, 0, 108},
+ dictWord{141, 0, 291},
+ dictWord{7, 0, 420},
+ dictWord{4, 10, 12},
+ dictWord{7, 10, 522},
+ dictWord{7, 10, 809},
+ dictWord{8, 10, 797},
+ dictWord{141, 10, 88},
+ dictWord{6, 11, 193},
+ dictWord{7, 11, 240},
+ dictWord{
+ 7,
+ 11,
+ 1682,
+ },
+ dictWord{10, 11, 51},
+ dictWord{10, 11, 640},
+ dictWord{11, 11, 410},
+ dictWord{13, 11, 82},
+ dictWord{14, 11, 247},
+ dictWord{14, 11, 331},
+ dictWord{142, 11, 377},
+ dictWord{133, 10, 528},
+ dictWord{135, 0, 1777},
+ dictWord{4, 0, 493},
+ dictWord{144, 0, 55},
+ dictWord{136, 11, 633},
+ dictWord{
+ 139,
+ 0,
+ 81,
+ },
+ dictWord{6, 0, 980},
+ dictWord{136, 0, 321},
+ dictWord{148, 10, 109},
+ dictWord{5, 10, 266},
+ dictWord{9, 10, 290},
+ dictWord{9, 10, 364},
+ dictWord{
+ 10,
+ 10,
+ 293,
+ },
+ dictWord{11, 10, 606},
+ dictWord{142, 10, 45},
+ dictWord{6, 0, 568},
+ dictWord{7, 0, 112},
+ dictWord{7, 0, 1804},
+ dictWord{8, 0, 362},
+ dictWord{8, 0, 410},
+ dictWord{8, 0, 830},
+ dictWord{9, 0, 514},
+ dictWord{11, 0, 649},
+ dictWord{142, 0, 157},
+ dictWord{4, 0, 74},
+ dictWord{6, 0, 510},
+ dictWord{6, 10, 594},
+ dictWord{
+ 9,
+ 10,
+ 121,
+ },
+ dictWord{10, 10, 49},
+ dictWord{10, 10, 412},
+ dictWord{139, 10, 834},
+ dictWord{134, 0, 838},
+ dictWord{136, 10, 748},
+ dictWord{132, 10, 466},
+ dictWord{132, 0, 625},
+ dictWord{135, 11, 1443},
+ dictWord{4, 11, 237},
+ dictWord{135, 11, 514},
+ dictWord{9, 10, 378},
+ dictWord{141, 10, 162},
+ dictWord{6, 0, 16},
+ dictWord{6, 0, 158},
+ dictWord{7, 0, 43},
+ dictWord{7, 0, 129},
+ dictWord{7, 0, 181},
+ dictWord{8, 0, 276},
+ dictWord{8, 0, 377},
+ dictWord{10, 0, 523},
+ dictWord{
+ 11,
+ 0,
+ 816,
+ },
+ dictWord{12, 0, 455},
+ dictWord{13, 0, 303},
+ dictWord{142, 0, 135},
+ dictWord{135, 0, 281},
+ dictWord{4, 0, 1},
+ dictWord{7, 0, 1143},
+ dictWord{7, 0, 1463},
+ dictWord{8, 0, 61},
+ dictWord{9, 0, 207},
+ dictWord{9, 0, 390},
+ dictWord{9, 0, 467},
+ dictWord{139, 0, 836},
+ dictWord{6, 11, 392},
+ dictWord{7, 11, 65},
+ dictWord{
+ 135,
+ 11,
+ 2019,
+ },
+ dictWord{132, 10, 667},
+ dictWord{4, 0, 723},
+ dictWord{5, 0, 895},
+ dictWord{7, 0, 1031},
+ dictWord{8, 0, 199},
+ dictWord{8, 0, 340},
+ dictWord{9, 0, 153},
+ dictWord{9, 0, 215},
+ dictWord{10, 0, 21},
+ dictWord{10, 0, 59},
+ dictWord{10, 0, 80},
+ dictWord{10, 0, 224},
+ dictWord{10, 0, 838},
+ dictWord{11, 0, 229},
+ dictWord{
+ 11,
+ 0,
+ 652,
+ },
+ dictWord{12, 0, 192},
+ dictWord{13, 0, 146},
+ dictWord{142, 0, 91},
+ dictWord{132, 0, 295},
+ dictWord{137, 0, 51},
+ dictWord{9, 11, 222},
+ dictWord{
+ 10,
+ 11,
+ 43,
+ },
+ dictWord{139, 11, 900},
+ dictWord{5, 0, 309},
+ dictWord{140, 0, 211},
+ dictWord{5, 0, 125},
+ dictWord{8, 0, 77},
+ dictWord{138, 0, 15},
+ dictWord{136, 11, 604},
+ dictWord{138, 0, 789},
+ dictWord{5, 0, 173},
+ dictWord{4, 10, 39},
+ dictWord{7, 10, 1843},
+ dictWord{8, 10, 407},
+ dictWord{11, 10, 144},
+ dictWord{140, 10, 523},
+ dictWord{138, 11, 265},
+ dictWord{133, 0, 439},
+ dictWord{132, 10, 510},
+ dictWord{7, 0, 648},
+ dictWord{7, 0, 874},
+ dictWord{11, 0, 164},
+ dictWord{12, 0, 76},
+ dictWord{18, 0, 9},
+ dictWord{7, 10, 1980},
+ dictWord{10, 10, 487},
+ dictWord{138, 10, 809},
+ dictWord{12, 0, 111},
+ dictWord{14, 0, 294},
+ dictWord{19, 0, 45},
+ dictWord{13, 10, 260},
+ dictWord{146, 10, 63},
+ dictWord{133, 11, 549},
+ dictWord{134, 10, 570},
+ dictWord{4, 0, 8},
+ dictWord{7, 0, 1152},
+ dictWord{7, 0, 1153},
+ dictWord{7, 0, 1715},
+ dictWord{9, 0, 374},
+ dictWord{10, 0, 478},
+ dictWord{139, 0, 648},
+ dictWord{135, 0, 1099},
+ dictWord{5, 0, 575},
+ dictWord{6, 0, 354},
+ dictWord{
+ 135,
+ 0,
+ 701,
+ },
+ dictWord{7, 11, 36},
+ dictWord{8, 11, 201},
+ dictWord{136, 11, 605},
+ dictWord{4, 10, 787},
+ dictWord{136, 11, 156},
+ dictWord{6, 0, 518},
+ dictWord{
+ 149,
+ 11,
+ 13,
+ },
+ dictWord{140, 11, 224},
+ dictWord{134, 0, 702},
+ dictWord{132, 10, 516},
+ dictWord{5, 11, 724},
+ dictWord{10, 11, 305},
+ dictWord{11, 11, 151},
+ dictWord{12, 11, 33},
+ dictWord{12, 11, 121},
+ dictWord{12, 11, 381},
+ dictWord{17, 11, 3},
+ dictWord{17, 11, 27},
+ dictWord{17, 11, 78},
+ dictWord{18, 11, 18},
+ dictWord{19, 11, 54},
+ dictWord{149, 11, 5},
+ dictWord{8, 0, 87},
+ dictWord{4, 11, 523},
+ dictWord{5, 11, 638},
+ dictWord{11, 10, 887},
+ dictWord{14, 10, 365},
+ dictWord{
+ 142,
+ 10,
+ 375,
+ },
+ dictWord{138, 0, 438},
+ dictWord{136, 10, 821},
+ dictWord{135, 11, 1908},
+ dictWord{6, 11, 242},
+ dictWord{7, 11, 227},
+ dictWord{7, 11, 1581},
+ dictWord{8, 11, 104},
+ dictWord{9, 11, 113},
+ dictWord{9, 11, 220},
+ dictWord{9, 11, 427},
+ dictWord{10, 11, 74},
+ dictWord{10, 11, 239},
+ dictWord{11, 11, 579},
+ dictWord{11, 11, 1023},
+ dictWord{13, 11, 4},
+ dictWord{13, 11, 204},
+ dictWord{13, 11, 316},
+ dictWord{18, 11, 95},
+ dictWord{148, 11, 86},
+ dictWord{4, 0, 69},
+ dictWord{5, 0, 122},
+ dictWord{5, 0, 849},
+ dictWord{6, 0, 1633},
+ dictWord{9, 0, 656},
+ dictWord{138, 0, 464},
+ dictWord{7, 0, 1802},
+ dictWord{4, 10, 10},
+ dictWord{
+ 139,
+ 10,
+ 786,
+ },
+ dictWord{135, 11, 861},
+ dictWord{139, 0, 499},
+ dictWord{7, 0, 476},
+ dictWord{7, 0, 1592},
+ dictWord{138, 0, 87},
+ dictWord{133, 10, 684},
+ dictWord{
+ 4,
+ 0,
+ 840,
+ },
+ dictWord{134, 10, 27},
+ dictWord{142, 0, 283},
+ dictWord{6, 0, 1620},
+ dictWord{7, 11, 1328},
+ dictWord{136, 11, 494},
+ dictWord{5, 0, 859},
+ dictWord{
+ 7,
+ 0,
+ 1160,
+ },
+ dictWord{8, 0, 107},
+ dictWord{9, 0, 291},
+ dictWord{9, 0, 439},
+ dictWord{10, 0, 663},
+ dictWord{11, 0, 609},
+ dictWord{140, 0, 197},
+ dictWord{
+ 7,
+ 11,
+ 1306,
+ },
+ dictWord{8, 11, 505},
+ dictWord{9, 11, 482},
+ dictWord{10, 11, 126},
+ dictWord{11, 11, 225},
+ dictWord{12, 11, 347},
+ dictWord{12, 11, 449},
+ dictWord{
+ 13,
+ 11,
+ 19,
+ },
+ dictWord{142, 11, 218},
+ dictWord{5, 11, 268},
+ dictWord{10, 11, 764},
+ dictWord{12, 11, 120},
+ dictWord{13, 11, 39},
+ dictWord{145, 11, 127},
+ dictWord{145, 10, 56},
+ dictWord{7, 11, 1672},
+ dictWord{10, 11, 472},
+ dictWord{11, 11, 189},
+ dictWord{143, 11, 51},
+ dictWord{6, 10, 342},
+ dictWord{6, 10, 496},
+ dictWord{8, 10, 275},
+ dictWord{137, 10, 206},
+ dictWord{133, 0, 600},
+ dictWord{4, 0, 117},
+ dictWord{6, 0, 372},
+ dictWord{7, 0, 1905},
+ dictWord{142, 0, 323},
+ dictWord{4, 10, 909},
+ dictWord{5, 10, 940},
+ dictWord{135, 11, 1471},
+ dictWord{132, 10, 891},
+ dictWord{4, 0, 722},
+ dictWord{139, 0, 471},
+ dictWord{4, 11, 384},
+ dictWord{135, 11, 1022},
+ dictWord{132, 10, 687},
+ dictWord{9, 0, 5},
+ dictWord{12, 0, 216},
+ dictWord{12, 0, 294},
+ dictWord{12, 0, 298},
+ dictWord{12, 0, 400},
+ dictWord{12, 0, 518},
+ dictWord{13, 0, 229},
+ dictWord{143, 0, 139},
+ dictWord{135, 11, 1703},
+ dictWord{7, 11, 1602},
+ dictWord{10, 11, 698},
+ dictWord{
+ 12,
+ 11,
+ 212,
+ },
+ dictWord{141, 11, 307},
+ dictWord{6, 10, 41},
+ dictWord{141, 10, 160},
+ dictWord{135, 11, 1077},
+ dictWord{9, 11, 159},
+ dictWord{11, 11, 28},
+ dictWord{140, 11, 603},
+ dictWord{4, 0, 514},
+ dictWord{7, 0, 1304},
+ dictWord{138, 0, 477},
+ dictWord{134, 0, 1774},
+ dictWord{9, 0, 88},
+ dictWord{139, 0, 270},
+ dictWord{5, 0, 12},
+ dictWord{7, 0, 375},
+ dictWord{9, 0, 438},
+ dictWord{134, 10, 1718},
+ dictWord{132, 11, 515},
+ dictWord{136, 10, 778},
+ dictWord{8, 11, 632},
+ dictWord{8, 11, 697},
+ dictWord{137, 11, 854},
+ dictWord{6, 0, 362},
+ dictWord{6, 0, 997},
+ dictWord{146, 0, 51},
+ dictWord{7, 0, 816},
+ dictWord{7, 0, 1241},
+ dictWord{
+ 9,
+ 0,
+ 283,
+ },
+ dictWord{9, 0, 520},
+ dictWord{10, 0, 213},
+ dictWord{10, 0, 307},
+ dictWord{10, 0, 463},
+ dictWord{10, 0, 671},
+ dictWord{10, 0, 746},
+ dictWord{11, 0, 401},
+ dictWord{11, 0, 794},
+ dictWord{12, 0, 517},
+ dictWord{18, 0, 107},
+ dictWord{147, 0, 115},
+ dictWord{133, 10, 115},
+ dictWord{150, 11, 28},
+ dictWord{4, 11, 136},
+ dictWord{133, 11, 551},
+ dictWord{142, 10, 314},
+ dictWord{132, 0, 258},
+ dictWord{6, 0, 22},
+ dictWord{7, 0, 903},
+ dictWord{7, 0, 1963},
+ dictWord{8, 0, 639},
+ dictWord{138, 0, 577},
+ dictWord{5, 0, 681},
+ dictWord{8, 0, 782},
+ dictWord{13, 0, 130},
+ dictWord{17, 0, 84},
+ dictWord{5, 10, 193},
+ dictWord{140, 10, 178},
+ dictWord{
+ 9,
+ 11,
+ 17,
+ },
+ dictWord{138, 11, 291},
+ dictWord{7, 11, 1287},
+ dictWord{9, 11, 44},
+ dictWord{10, 11, 552},
+ dictWord{10, 11, 642},
+ dictWord{11, 11, 839},
+ dictWord{12, 11, 274},
+ dictWord{12, 11, 275},
+ dictWord{12, 11, 372},
+ dictWord{13, 11, 91},
+ dictWord{142, 11, 125},
+ dictWord{135, 10, 174},
+ dictWord{4, 0, 664},
+ dictWord{5, 0, 804},
+ dictWord{139, 0, 1013},
+ dictWord{134, 0, 942},
+ dictWord{6, 0, 1349},
+ dictWord{6, 0, 1353},
+ dictWord{6, 0, 1450},
+ dictWord{7, 11, 1518},
+ dictWord{139, 11, 694},
+ dictWord{11, 0, 356},
+ dictWord{4, 10, 122},
+ dictWord{5, 10, 796},
+ dictWord{5, 10, 952},
+ dictWord{6, 10, 1660},
+ dictWord{
+ 6,
+ 10,
+ 1671,
+ },
+ dictWord{8, 10, 567},
+ dictWord{9, 10, 687},
+ dictWord{9, 10, 742},
+ dictWord{10, 10, 686},
+ dictWord{11, 10, 682},
+ dictWord{140, 10, 281},
+ dictWord{
+ 5,
+ 0,
+ 32,
+ },
+ dictWord{6, 11, 147},
+ dictWord{7, 11, 886},
+ dictWord{9, 11, 753},
+ dictWord{138, 11, 268},
+ dictWord{5, 10, 179},
+ dictWord{7, 10, 1095},
+ dictWord{
+ 135,
+ 10,
+ 1213,
+ },
+ dictWord{4, 10, 66},
+ dictWord{7, 10, 722},
+ dictWord{135, 10, 904},
+ dictWord{135, 10, 352},
+ dictWord{9, 11, 245},
+ dictWord{138, 11, 137},
+ dictWord{4, 0, 289},
+ dictWord{7, 0, 629},
+ dictWord{7, 0, 1698},
+ dictWord{7, 0, 1711},
+ dictWord{12, 0, 215},
+ dictWord{133, 11, 414},
+ dictWord{6, 0, 1975},
+ dictWord{135, 11, 1762},
+ dictWord{6, 0, 450},
+ dictWord{136, 0, 109},
+ dictWord{141, 10, 35},
+ dictWord{134, 11, 599},
+ dictWord{136, 0, 705},
+ dictWord{
+ 133,
+ 0,
+ 664,
+ },
+ dictWord{134, 11, 1749},
+ dictWord{11, 11, 402},
+ dictWord{12, 11, 109},
+ dictWord{12, 11, 431},
+ dictWord{13, 11, 179},
+ dictWord{13, 11, 206},
+ dictWord{14, 11, 175},
+ dictWord{14, 11, 217},
+ dictWord{16, 11, 3},
+ dictWord{148, 11, 53},
+ dictWord{135, 0, 1238},
+ dictWord{134, 11, 1627},
+ dictWord{
+ 132,
+ 11,
+ 488,
+ },
+ dictWord{13, 0, 318},
+ dictWord{10, 10, 592},
+ dictWord{10, 10, 753},
+ dictWord{12, 10, 317},
+ dictWord{12, 10, 355},
+ dictWord{12, 10, 465},
+ dictWord{
+ 12,
+ 10,
+ 469,
+ },
+ dictWord{12, 10, 560},
+ dictWord{140, 10, 578},
+ dictWord{133, 10, 564},
+ dictWord{132, 11, 83},
+ dictWord{140, 11, 676},
+ dictWord{6, 0, 1872},
+ dictWord{6, 0, 1906},
+ dictWord{6, 0, 1907},
+ dictWord{9, 0, 934},
+ dictWord{9, 0, 956},
+ dictWord{9, 0, 960},
+ dictWord{9, 0, 996},
+ dictWord{12, 0, 794},
+ dictWord{
+ 12,
+ 0,
+ 876,
+ },
+ dictWord{12, 0, 880},
+ dictWord{12, 0, 918},
+ dictWord{15, 0, 230},
+ dictWord{18, 0, 234},
+ dictWord{18, 0, 238},
+ dictWord{21, 0, 38},
+ dictWord{149, 0, 62},
+ dictWord{134, 10, 556},
+ dictWord{134, 11, 278},
+ dictWord{137, 0, 103},
+ dictWord{7, 10, 544},
+ dictWord{8, 10, 719},
+ dictWord{138, 10, 61},
+ dictWord{
+ 4,
+ 10,
+ 5,
+ },
+ dictWord{5, 10, 498},
+ dictWord{8, 10, 637},
+ dictWord{137, 10, 521},
+ dictWord{7, 0, 777},
+ dictWord{12, 0, 229},
+ dictWord{12, 0, 239},
+ dictWord{15, 0, 12},
+ dictWord{12, 11, 229},
+ dictWord{12, 11, 239},
+ dictWord{143, 11, 12},
+ dictWord{6, 0, 26},
+ dictWord{7, 11, 388},
+ dictWord{7, 11, 644},
+ dictWord{139, 11, 781},
+ dictWord{7, 11, 229},
+ dictWord{8, 11, 59},
+ dictWord{9, 11, 190},
+ dictWord{9, 11, 257},
+ dictWord{10, 11, 378},
+ dictWord{140, 11, 191},
+ dictWord{133, 10, 927},
+ dictWord{135, 10, 1441},
+ dictWord{4, 10, 893},
+ dictWord{5, 10, 780},
+ dictWord{133, 10, 893},
+ dictWord{4, 0, 414},
+ dictWord{5, 0, 467},
+ dictWord{9, 0, 654},
+ dictWord{10, 0, 451},
+ dictWord{12, 0, 59},
+ dictWord{141, 0, 375},
+ dictWord{142, 0, 173},
+ dictWord{135, 0, 17},
+ dictWord{7, 0, 1350},
+ dictWord{133, 10, 238},
+ dictWord{135, 0, 955},
+ dictWord{4, 0, 960},
+ dictWord{10, 0, 887},
+ dictWord{12, 0, 753},
+ dictWord{18, 0, 161},
+ dictWord{18, 0, 162},
+ dictWord{152, 0, 19},
+ dictWord{136, 11, 344},
+ dictWord{6, 10, 1729},
+ dictWord{137, 11, 288},
+ dictWord{132, 11, 660},
+ dictWord{4, 0, 217},
+ dictWord{5, 0, 710},
+ dictWord{7, 0, 760},
+ dictWord{7, 0, 1926},
+ dictWord{9, 0, 428},
+ dictWord{9, 0, 708},
+ dictWord{10, 0, 254},
+ dictWord{10, 0, 296},
+ dictWord{10, 0, 720},
+ dictWord{11, 0, 109},
+ dictWord{
+ 11,
+ 0,
+ 255,
+ },
+ dictWord{12, 0, 165},
+ dictWord{12, 0, 315},
+ dictWord{13, 0, 107},
+ dictWord{13, 0, 203},
+ dictWord{14, 0, 54},
+ dictWord{14, 0, 99},
+ dictWord{14, 0, 114},
+ dictWord{14, 0, 388},
+ dictWord{16, 0, 85},
+ dictWord{17, 0, 9},
+ dictWord{17, 0, 33},
+ dictWord{20, 0, 25},
+ dictWord{20, 0, 28},
+ dictWord{20, 0, 29},
+ dictWord{21, 0, 9},
+ dictWord{21, 0, 10},
+ dictWord{21, 0, 34},
+ dictWord{22, 0, 17},
+ dictWord{4, 10, 60},
+ dictWord{7, 10, 1800},
+ dictWord{8, 10, 314},
+ dictWord{9, 10, 700},
+ dictWord{
+ 139,
+ 10,
+ 487,
+ },
+ dictWord{7, 11, 1035},
+ dictWord{138, 11, 737},
+ dictWord{7, 11, 690},
+ dictWord{9, 11, 217},
+ dictWord{9, 11, 587},
+ dictWord{140, 11, 521},
+ dictWord{6, 0, 919},
+ dictWord{7, 11, 706},
+ dictWord{7, 11, 1058},
+ dictWord{138, 11, 538},
+ dictWord{7, 10, 1853},
+ dictWord{138, 10, 437},
+ dictWord{
+ 136,
+ 10,
+ 419,
+ },
+ dictWord{6, 0, 280},
+ dictWord{10, 0, 502},
+ dictWord{11, 0, 344},
+ dictWord{140, 0, 38},
+ dictWord{5, 0, 45},
+ dictWord{7, 0, 1161},
+ dictWord{11, 0, 448},
+ dictWord{11, 0, 880},
+ dictWord{13, 0, 139},
+ dictWord{13, 0, 407},
+ dictWord{15, 0, 16},
+ dictWord{17, 0, 95},
+ dictWord{18, 0, 66},
+ dictWord{18, 0, 88},
+ dictWord{
+ 18,
+ 0,
+ 123,
+ },
+ dictWord{149, 0, 7},
+ dictWord{11, 11, 92},
+ dictWord{11, 11, 196},
+ dictWord{11, 11, 409},
+ dictWord{11, 11, 450},
+ dictWord{11, 11, 666},
+ dictWord{
+ 11,
+ 11,
+ 777,
+ },
+ dictWord{12, 11, 262},
+ dictWord{13, 11, 385},
+ dictWord{13, 11, 393},
+ dictWord{15, 11, 115},
+ dictWord{16, 11, 45},
+ dictWord{145, 11, 82},
+ dictWord{136, 0, 777},
+ dictWord{134, 11, 1744},
+ dictWord{4, 0, 410},
+ dictWord{7, 0, 521},
+ dictWord{133, 10, 828},
+ dictWord{134, 0, 673},
+ dictWord{7, 0, 1110},
+ dictWord{7, 0, 1778},
+ dictWord{7, 10, 176},
+ dictWord{135, 10, 178},
+ dictWord{5, 10, 806},
+ dictWord{7, 11, 268},
+ dictWord{7, 10, 1976},
+ dictWord{
+ 136,
+ 11,
+ 569,
+ },
+ dictWord{4, 11, 733},
+ dictWord{9, 11, 194},
+ dictWord{10, 11, 92},
+ dictWord{11, 11, 198},
+ dictWord{12, 11, 84},
+ dictWord{12, 11, 87},
+ dictWord{
+ 13,
+ 11,
+ 128,
+ },
+ dictWord{144, 11, 74},
+ dictWord{5, 0, 341},
+ dictWord{7, 0, 1129},
+ dictWord{11, 0, 414},
+ dictWord{4, 10, 51},
+ dictWord{6, 10, 4},
+ dictWord{7, 10, 591},
+ dictWord{7, 10, 849},
+ dictWord{7, 10, 951},
+ dictWord{7, 10, 1613},
+ dictWord{7, 10, 1760},
+ dictWord{7, 10, 1988},
+ dictWord{9, 10, 434},
+ dictWord{10, 10, 754},
+ dictWord{11, 10, 25},
+ dictWord{139, 10, 37},
+ dictWord{133, 10, 902},
+ dictWord{135, 10, 928},
+ dictWord{135, 0, 787},
+ dictWord{132, 0, 436},
+ dictWord{
+ 134,
+ 10,
+ 270,
+ },
+ dictWord{7, 0, 1587},
+ dictWord{135, 0, 1707},
+ dictWord{6, 0, 377},
+ dictWord{7, 0, 1025},
+ dictWord{9, 0, 613},
+ dictWord{145, 0, 104},
+ dictWord{
+ 7,
+ 11,
+ 982,
+ },
+ dictWord{7, 11, 1361},
+ dictWord{10, 11, 32},
+ dictWord{143, 11, 56},
+ dictWord{139, 0, 96},
+ dictWord{132, 0, 451},
+ dictWord{132, 10, 416},
+ dictWord{
+ 142,
+ 10,
+ 372,
+ },
+ dictWord{5, 10, 152},
+ dictWord{5, 10, 197},
+ dictWord{7, 11, 306},
+ dictWord{7, 10, 340},
+ dictWord{7, 10, 867},
+ dictWord{10, 10, 548},
+ dictWord{
+ 10,
+ 10,
+ 581,
+ },
+ dictWord{11, 10, 6},
+ dictWord{12, 10, 3},
+ dictWord{12, 10, 19},
+ dictWord{14, 10, 110},
+ dictWord{142, 10, 289},
+ dictWord{134, 0, 680},
+ dictWord{
+ 134,
+ 11,
+ 609,
+ },
+ dictWord{7, 0, 483},
+ dictWord{7, 10, 190},
+ dictWord{8, 10, 28},
+ dictWord{8, 10, 141},
+ dictWord{8, 10, 444},
+ dictWord{8, 10, 811},
+ dictWord{
+ 9,
+ 10,
+ 468,
+ },
+ dictWord{11, 10, 334},
+ dictWord{12, 10, 24},
+ dictWord{12, 10, 386},
+ dictWord{140, 10, 576},
+ dictWord{10, 0, 916},
+ dictWord{133, 10, 757},
+ dictWord{
+ 5,
+ 10,
+ 721,
+ },
+ dictWord{135, 10, 1553},
+ dictWord{133, 11, 178},
+ dictWord{134, 0, 937},
+ dictWord{132, 10, 898},
+ dictWord{133, 0, 739},
+ dictWord{
+ 147,
+ 0,
+ 82,
+ },
+ dictWord{135, 0, 663},
+ dictWord{146, 0, 128},
+ dictWord{5, 10, 277},
+ dictWord{141, 10, 247},
+ dictWord{134, 0, 1087},
+ dictWord{132, 10, 435},
+ dictWord{
+ 6,
+ 11,
+ 381,
+ },
+ dictWord{7, 11, 645},
+ dictWord{7, 11, 694},
+ dictWord{136, 11, 546},
+ dictWord{7, 0, 503},
+ dictWord{135, 0, 1885},
+ dictWord{6, 0, 1965},
+ dictWord{
+ 8,
+ 0,
+ 925,
+ },
+ dictWord{138, 0, 955},
+ dictWord{4, 0, 113},
+ dictWord{5, 0, 163},
+ dictWord{5, 0, 735},
+ dictWord{7, 0, 1009},
+ dictWord{9, 0, 9},
+ dictWord{9, 0, 771},
+ dictWord{12, 0, 90},
+ dictWord{13, 0, 138},
+ dictWord{13, 0, 410},
+ dictWord{143, 0, 128},
+ dictWord{4, 0, 324},
+ dictWord{138, 0, 104},
+ dictWord{7, 0, 460},
+ dictWord{
+ 5,
+ 10,
+ 265,
+ },
+ dictWord{134, 10, 212},
+ dictWord{133, 11, 105},
+ dictWord{7, 11, 261},
+ dictWord{7, 11, 1107},
+ dictWord{7, 11, 1115},
+ dictWord{7, 11, 1354},
+ dictWord{7, 11, 1588},
+ dictWord{7, 11, 1705},
+ dictWord{7, 11, 1902},
+ dictWord{9, 11, 465},
+ dictWord{10, 11, 248},
+ dictWord{10, 11, 349},
+ dictWord{10, 11, 647},
+ dictWord{11, 11, 527},
+ dictWord{11, 11, 660},
+ dictWord{11, 11, 669},
+ dictWord{12, 11, 529},
+ dictWord{141, 11, 305},
+ dictWord{5, 11, 438},
+ dictWord{
+ 9,
+ 11,
+ 694,
+ },
+ dictWord{12, 11, 627},
+ dictWord{141, 11, 210},
+ dictWord{152, 11, 11},
+ dictWord{4, 0, 935},
+ dictWord{133, 0, 823},
+ dictWord{132, 10, 702},
+ dictWord{
+ 5,
+ 0,
+ 269,
+ },
+ dictWord{7, 0, 434},
+ dictWord{7, 0, 891},
+ dictWord{8, 0, 339},
+ dictWord{9, 0, 702},
+ dictWord{11, 0, 594},
+ dictWord{11, 0, 718},
+ dictWord{17, 0, 100},
+ dictWord{5, 10, 808},
+ dictWord{135, 10, 2045},
+ dictWord{7, 0, 1014},
+ dictWord{9, 0, 485},
+ dictWord{141, 0, 264},
+ dictWord{134, 0, 1713},
+ dictWord{7, 0, 1810},
+ dictWord{11, 0, 866},
+ dictWord{12, 0, 103},
+ dictWord{13, 0, 495},
+ dictWord{140, 11, 233},
+ dictWord{4, 0, 423},
+ dictWord{10, 0, 949},
+ dictWord{138, 0, 1013},
+ dictWord{135, 0, 900},
+ dictWord{8, 11, 25},
+ dictWord{138, 11, 826},
+ dictWord{5, 10, 166},
+ dictWord{8, 10, 739},
+ dictWord{140, 10, 511},
+ dictWord{
+ 134,
+ 0,
+ 2018,
+ },
+ dictWord{7, 11, 1270},
+ dictWord{139, 11, 612},
+ dictWord{4, 10, 119},
+ dictWord{5, 10, 170},
+ dictWord{5, 10, 447},
+ dictWord{7, 10, 1708},
+ dictWord{
+ 7,
+ 10,
+ 1889,
+ },
+ dictWord{9, 10, 357},
+ dictWord{9, 10, 719},
+ dictWord{12, 10, 486},
+ dictWord{140, 10, 596},
+ dictWord{12, 0, 574},
+ dictWord{140, 11, 574},
+ dictWord{132, 11, 308},
+ dictWord{6, 0, 964},
+ dictWord{6, 0, 1206},
+ dictWord{134, 0, 1302},
+ dictWord{4, 10, 450},
+ dictWord{135, 10, 1158},
+ dictWord{
+ 135,
+ 11,
+ 150,
+ },
+ dictWord{136, 11, 649},
+ dictWord{14, 0, 213},
+ dictWord{148, 0, 38},
+ dictWord{9, 11, 45},
+ dictWord{9, 11, 311},
+ dictWord{141, 11, 42},
+ dictWord{
+ 134,
+ 11,
+ 521,
+ },
+ dictWord{7, 10, 1375},
+ dictWord{7, 10, 1466},
+ dictWord{138, 10, 331},
+ dictWord{132, 10, 754},
+ dictWord{5, 11, 339},
+ dictWord{7, 11, 1442},
+ dictWord{14, 11, 3},
+ dictWord{15, 11, 41},
+ dictWord{147, 11, 66},
+ dictWord{136, 11, 378},
+ dictWord{134, 0, 1022},
+ dictWord{5, 10, 850},
+ dictWord{136, 10, 799},
+ dictWord{142, 0, 143},
+ dictWord{135, 0, 2029},
+ dictWord{134, 11, 1628},
+ dictWord{8, 0, 523},
+ dictWord{150, 0, 34},
+ dictWord{5, 0, 625},
+ dictWord{
+ 135,
+ 0,
+ 1617,
+ },
+ dictWord{7, 0, 275},
+ dictWord{7, 10, 238},
+ dictWord{7, 10, 2033},
+ dictWord{8, 10, 120},
+ dictWord{8, 10, 188},
+ dictWord{8, 10, 659},
+ dictWord{
+ 9,
+ 10,
+ 598,
+ },
+ dictWord{10, 10, 466},
+ dictWord{12, 10, 342},
+ dictWord{12, 10, 588},
+ dictWord{13, 10, 503},
+ dictWord{14, 10, 246},
+ dictWord{143, 10, 92},
+ dictWord{
+ 7,
+ 0,
+ 37,
+ },
+ dictWord{8, 0, 425},
+ dictWord{8, 0, 693},
+ dictWord{9, 0, 720},
+ dictWord{10, 0, 380},
+ dictWord{10, 0, 638},
+ dictWord{11, 0, 273},
+ dictWord{11, 0, 473},
+ dictWord{12, 0, 61},
+ dictWord{143, 0, 43},
+ dictWord{135, 11, 829},
+ dictWord{135, 0, 1943},
+ dictWord{132, 0, 765},
+ dictWord{5, 11, 486},
+ dictWord{
+ 135,
+ 11,
+ 1349,
+ },
+ dictWord{7, 11, 1635},
+ dictWord{8, 11, 17},
+ dictWord{10, 11, 217},
+ dictWord{138, 11, 295},
+ dictWord{4, 10, 201},
+ dictWord{7, 10, 1744},
+ dictWord{
+ 8,
+ 10,
+ 602,
+ },
+ dictWord{11, 10, 247},
+ dictWord{11, 10, 826},
+ dictWord{145, 10, 65},
+ dictWord{138, 11, 558},
+ dictWord{11, 0, 551},
+ dictWord{142, 0, 159},
+ dictWord{8, 10, 164},
+ dictWord{146, 10, 62},
+ dictWord{139, 11, 176},
+ dictWord{132, 0, 168},
+ dictWord{136, 0, 1010},
+ dictWord{134, 0, 1994},
+ dictWord{
+ 135,
+ 0,
+ 91,
+ },
+ dictWord{138, 0, 532},
+ dictWord{135, 10, 1243},
+ dictWord{135, 0, 1884},
+ dictWord{132, 10, 907},
+ dictWord{5, 10, 100},
+ dictWord{10, 10, 329},
+ dictWord{12, 10, 416},
+ dictWord{149, 10, 29},
+ dictWord{134, 11, 447},
+ dictWord{132, 10, 176},
+ dictWord{5, 10, 636},
+ dictWord{5, 10, 998},
+ dictWord{7, 10, 9},
+ dictWord{7, 10, 1508},
+ dictWord{8, 10, 26},
+ dictWord{9, 10, 317},
+ dictWord{9, 10, 358},
+ dictWord{10, 10, 210},
+ dictWord{10, 10, 292},
+ dictWord{10, 10, 533},
+ dictWord{11, 10, 555},
+ dictWord{12, 10, 526},
+ dictWord{12, 10, 607},
+ dictWord{13, 10, 263},
+ dictWord{13, 10, 459},
+ dictWord{142, 10, 271},
+ dictWord{
+ 4,
+ 11,
+ 609,
+ },
+ dictWord{135, 11, 756},
+ dictWord{6, 0, 15},
+ dictWord{7, 0, 70},
+ dictWord{10, 0, 240},
+ dictWord{147, 0, 93},
+ dictWord{4, 11, 930},
+ dictWord{133, 11, 947},
+ dictWord{134, 0, 1227},
+ dictWord{134, 0, 1534},
+ dictWord{133, 11, 939},
+ dictWord{133, 11, 962},
+ dictWord{5, 11, 651},
+ dictWord{8, 11, 170},
+ dictWord{
+ 9,
+ 11,
+ 61,
+ },
+ dictWord{9, 11, 63},
+ dictWord{10, 11, 23},
+ dictWord{10, 11, 37},
+ dictWord{10, 11, 834},
+ dictWord{11, 11, 4},
+ dictWord{11, 11, 187},
+ dictWord{
+ 11,
+ 11,
+ 281,
+ },
+ dictWord{11, 11, 503},
+ dictWord{11, 11, 677},
+ dictWord{12, 11, 96},
+ dictWord{12, 11, 130},
+ dictWord{12, 11, 244},
+ dictWord{14, 11, 5},
+ dictWord{
+ 14,
+ 11,
+ 40,
+ },
+ dictWord{14, 11, 162},
+ dictWord{14, 11, 202},
+ dictWord{146, 11, 133},
+ dictWord{4, 11, 406},
+ dictWord{5, 11, 579},
+ dictWord{12, 11, 492},
+ dictWord{
+ 150,
+ 11,
+ 15,
+ },
+ dictWord{139, 0, 392},
+ dictWord{6, 10, 610},
+ dictWord{10, 10, 127},
+ dictWord{141, 10, 27},
+ dictWord{7, 0, 655},
+ dictWord{7, 0, 1844},
+ dictWord{
+ 136,
+ 10,
+ 119,
+ },
+ dictWord{4, 0, 145},
+ dictWord{6, 0, 176},
+ dictWord{7, 0, 395},
+ dictWord{137, 0, 562},
+ dictWord{132, 0, 501},
+ dictWord{140, 11, 145},
+ dictWord{
+ 136,
+ 0,
+ 1019,
+ },
+ dictWord{134, 0, 509},
+ dictWord{139, 0, 267},
+ dictWord{6, 11, 17},
+ dictWord{7, 11, 16},
+ dictWord{7, 11, 1001},
+ dictWord{7, 11, 1982},
+ dictWord{
+ 9,
+ 11,
+ 886,
+ },
+ dictWord{10, 11, 489},
+ dictWord{10, 11, 800},
+ dictWord{11, 11, 782},
+ dictWord{12, 11, 320},
+ dictWord{13, 11, 467},
+ dictWord{14, 11, 145},
+ dictWord{14, 11, 387},
+ dictWord{143, 11, 119},
+ dictWord{145, 11, 17},
+ dictWord{6, 0, 1099},
+ dictWord{133, 11, 458},
+ dictWord{7, 11, 1983},
+ dictWord{8, 11, 0},
+ dictWord{8, 11, 171},
+ dictWord{9, 11, 120},
+ dictWord{9, 11, 732},
+ dictWord{10, 11, 473},
+ dictWord{11, 11, 656},
+ dictWord{11, 11, 998},
+ dictWord{18, 11, 0},
+ dictWord{18, 11, 2},
+ dictWord{147, 11, 21},
+ dictWord{12, 11, 427},
+ dictWord{146, 11, 38},
+ dictWord{10, 0, 948},
+ dictWord{138, 0, 968},
+ dictWord{7, 10, 126},
+ dictWord{136, 10, 84},
+ dictWord{136, 10, 790},
+ dictWord{4, 0, 114},
+ dictWord{9, 0, 492},
+ dictWord{13, 0, 462},
+ dictWord{142, 0, 215},
+ dictWord{6, 10, 64},
+ dictWord{12, 10, 377},
+ dictWord{141, 10, 309},
+ dictWord{4, 0, 77},
+ dictWord{5, 0, 361},
+ dictWord{6, 0, 139},
+ dictWord{6, 0, 401},
+ dictWord{6, 0, 404},
+ dictWord{
+ 7,
+ 0,
+ 413,
+ },
+ dictWord{7, 0, 715},
+ dictWord{7, 0, 1716},
+ dictWord{11, 0, 279},
+ dictWord{12, 0, 179},
+ dictWord{12, 0, 258},
+ dictWord{13, 0, 244},
+ dictWord{142, 0, 358},
+ dictWord{134, 0, 1717},
+ dictWord{7, 0, 772},
+ dictWord{7, 0, 1061},
+ dictWord{7, 0, 1647},
+ dictWord{8, 0, 82},
+ dictWord{11, 0, 250},
+ dictWord{11, 0, 607},
+ dictWord{12, 0, 311},
+ dictWord{12, 0, 420},
+ dictWord{13, 0, 184},
+ dictWord{13, 0, 367},
+ dictWord{7, 10, 1104},
+ dictWord{11, 10, 269},
+ dictWord{11, 10, 539},
+ dictWord{11, 10, 627},
+ dictWord{11, 10, 706},
+ dictWord{11, 10, 975},
+ dictWord{12, 10, 248},
+ dictWord{12, 10, 434},
+ dictWord{12, 10, 600},
+ dictWord{
+ 12,
+ 10,
+ 622,
+ },
+ dictWord{13, 10, 297},
+ dictWord{13, 10, 485},
+ dictWord{14, 10, 69},
+ dictWord{14, 10, 409},
+ dictWord{143, 10, 108},
+ dictWord{135, 0, 724},
+ dictWord{
+ 4,
+ 11,
+ 512,
+ },
+ dictWord{4, 11, 519},
+ dictWord{133, 11, 342},
+ dictWord{134, 0, 1133},
+ dictWord{145, 11, 29},
+ dictWord{11, 10, 977},
+ dictWord{141, 10, 507},
+ dictWord{6, 0, 841},
+ dictWord{6, 0, 1042},
+ dictWord{6, 0, 1194},
+ dictWord{10, 0, 993},
+ dictWord{140, 0, 1021},
+ dictWord{6, 11, 31},
+ dictWord{7, 11, 491},
+ dictWord{7, 11, 530},
+ dictWord{8, 11, 592},
+ dictWord{9, 10, 34},
+ dictWord{11, 11, 53},
+ dictWord{11, 10, 484},
+ dictWord{11, 11, 779},
+ dictWord{12, 11, 167},
+ dictWord{12, 11, 411},
+ dictWord{14, 11, 14},
+ dictWord{14, 11, 136},
+ dictWord{15, 11, 72},
+ dictWord{16, 11, 17},
+ dictWord{144, 11, 72},
+ dictWord{4, 0, 1021},
+ dictWord{6, 0, 2037},
+ dictWord{133, 11, 907},
+ dictWord{7, 0, 373},
+ dictWord{8, 0, 335},
+ dictWord{8, 0, 596},
+ dictWord{9, 0, 488},
+ dictWord{6, 10, 1700},
+ dictWord{
+ 7,
+ 10,
+ 293,
+ },
+ dictWord{7, 10, 382},
+ dictWord{7, 10, 1026},
+ dictWord{7, 10, 1087},
+ dictWord{7, 10, 2027},
+ dictWord{8, 10, 252},
+ dictWord{8, 10, 727},
+ dictWord{
+ 8,
+ 10,
+ 729,
+ },
+ dictWord{9, 10, 30},
+ dictWord{9, 10, 199},
+ dictWord{9, 10, 231},
+ dictWord{9, 10, 251},
+ dictWord{9, 10, 334},
+ dictWord{9, 10, 361},
+ dictWord{9, 10, 712},
+ dictWord{10, 10, 55},
+ dictWord{10, 10, 60},
+ dictWord{10, 10, 232},
+ dictWord{10, 10, 332},
+ dictWord{10, 10, 384},
+ dictWord{10, 10, 396},
+ dictWord{
+ 10,
+ 10,
+ 504,
+ },
+ dictWord{10, 10, 542},
+ dictWord{10, 10, 652},
+ dictWord{11, 10, 20},
+ dictWord{11, 10, 48},
+ dictWord{11, 10, 207},
+ dictWord{11, 10, 291},
+ dictWord{
+ 11,
+ 10,
+ 298,
+ },
+ dictWord{11, 10, 342},
+ dictWord{11, 10, 365},
+ dictWord{11, 10, 394},
+ dictWord{11, 10, 620},
+ dictWord{11, 10, 705},
+ dictWord{11, 10, 1017},
+ dictWord{12, 10, 123},
+ dictWord{12, 10, 340},
+ dictWord{12, 10, 406},
+ dictWord{12, 10, 643},
+ dictWord{13, 10, 61},
+ dictWord{13, 10, 269},
+ dictWord{
+ 13,
+ 10,
+ 311,
+ },
+ dictWord{13, 10, 319},
+ dictWord{13, 10, 486},
+ dictWord{14, 10, 234},
+ dictWord{15, 10, 62},
+ dictWord{15, 10, 85},
+ dictWord{16, 10, 71},
+ dictWord{
+ 18,
+ 10,
+ 119,
+ },
+ dictWord{148, 10, 105},
+ dictWord{150, 0, 37},
+ dictWord{4, 11, 208},
+ dictWord{5, 11, 106},
+ dictWord{6, 11, 531},
+ dictWord{8, 11, 408},
+ dictWord{
+ 9,
+ 11,
+ 188,
+ },
+ dictWord{138, 11, 572},
+ dictWord{132, 0, 564},
+ dictWord{6, 0, 513},
+ dictWord{135, 0, 1052},
+ dictWord{132, 0, 825},
+ dictWord{9, 0, 899},
+ dictWord{
+ 140,
+ 11,
+ 441,
+ },
+ dictWord{134, 0, 778},
+ dictWord{133, 11, 379},
+ dictWord{7, 0, 1417},
+ dictWord{12, 0, 382},
+ dictWord{17, 0, 48},
+ dictWord{152, 0, 12},
+ dictWord{
+ 132,
+ 11,
+ 241,
+ },
+ dictWord{7, 0, 1116},
+ dictWord{6, 10, 379},
+ dictWord{7, 10, 270},
+ dictWord{8, 10, 176},
+ dictWord{8, 10, 183},
+ dictWord{9, 10, 432},
+ dictWord{
+ 9,
+ 10,
+ 661,
+ },
+ dictWord{12, 10, 247},
+ dictWord{12, 10, 617},
+ dictWord{146, 10, 125},
+ dictWord{5, 10, 792},
+ dictWord{133, 10, 900},
+ dictWord{6, 0, 545},
+ dictWord{
+ 7,
+ 0,
+ 565,
+ },
+ dictWord{7, 0, 1669},
+ dictWord{10, 0, 114},
+ dictWord{11, 0, 642},
+ dictWord{140, 0, 618},
+ dictWord{133, 0, 5},
+ dictWord{138, 11, 7},
+ dictWord{
+ 132,
+ 11,
+ 259,
+ },
+ dictWord{135, 0, 192},
+ dictWord{134, 0, 701},
+ dictWord{136, 0, 763},
+ dictWord{135, 10, 1979},
+ dictWord{4, 10, 901},
+ dictWord{133, 10, 776},
+ dictWord{10, 0, 755},
+ dictWord{147, 0, 29},
+ dictWord{133, 0, 759},
+ dictWord{4, 11, 173},
+ dictWord{5, 11, 312},
+ dictWord{5, 11, 512},
+ dictWord{135, 11, 1285},
+ dictWord{7, 11, 1603},
+ dictWord{7, 11, 1691},
+ dictWord{9, 11, 464},
+ dictWord{11, 11, 195},
+ dictWord{12, 11, 279},
+ dictWord{12, 11, 448},
+ dictWord{
+ 14,
+ 11,
+ 11,
+ },
+ dictWord{147, 11, 102},
+ dictWord{7, 0, 370},
+ dictWord{7, 0, 1007},
+ dictWord{7, 0, 1177},
+ dictWord{135, 0, 1565},
+ dictWord{135, 0, 1237},
+ dictWord{
+ 4,
+ 0,
+ 87,
+ },
+ dictWord{5, 0, 250},
+ dictWord{141, 0, 298},
+ dictWord{4, 11, 452},
+ dictWord{5, 11, 583},
+ dictWord{5, 11, 817},
+ dictWord{6, 11, 433},
+ dictWord{7, 11, 593},
+ dictWord{7, 11, 720},
+ dictWord{7, 11, 1378},
+ dictWord{8, 11, 161},
+ dictWord{9, 11, 284},
+ dictWord{10, 11, 313},
+ dictWord{139, 11, 886},
+ dictWord{4, 11, 547},
+ dictWord{135, 11, 1409},
+ dictWord{136, 11, 722},
+ dictWord{4, 10, 37},
+ dictWord{5, 10, 334},
+ dictWord{135, 10, 1253},
+ dictWord{132, 10, 508},
+ dictWord{
+ 12,
+ 0,
+ 107,
+ },
+ dictWord{146, 0, 31},
+ dictWord{8, 11, 420},
+ dictWord{139, 11, 193},
+ dictWord{135, 0, 814},
+ dictWord{135, 11, 409},
+ dictWord{140, 0, 991},
+ dictWord{4, 0, 57},
+ dictWord{7, 0, 1195},
+ dictWord{7, 0, 1438},
+ dictWord{7, 0, 1548},
+ dictWord{7, 0, 1835},
+ dictWord{7, 0, 1904},
+ dictWord{9, 0, 757},
+ dictWord{
+ 10,
+ 0,
+ 604,
+ },
+ dictWord{139, 0, 519},
+ dictWord{132, 0, 540},
+ dictWord{138, 11, 308},
+ dictWord{132, 10, 533},
+ dictWord{136, 0, 608},
+ dictWord{144, 11, 65},
+ dictWord{4, 0, 1014},
+ dictWord{134, 0, 2029},
+ dictWord{4, 0, 209},
+ dictWord{7, 0, 902},
+ dictWord{5, 11, 1002},
+ dictWord{136, 11, 745},
+ dictWord{134, 0, 2030},
+ dictWord{6, 0, 303},
+ dictWord{7, 0, 335},
+ dictWord{7, 0, 1437},
+ dictWord{7, 0, 1668},
+ dictWord{8, 0, 553},
+ dictWord{8, 0, 652},
+ dictWord{8, 0, 656},
+ dictWord{
+ 9,
+ 0,
+ 558,
+ },
+ dictWord{11, 0, 743},
+ dictWord{149, 0, 18},
+ dictWord{5, 11, 575},
+ dictWord{6, 11, 354},
+ dictWord{135, 11, 701},
+ dictWord{4, 11, 239},
+ dictWord{
+ 6,
+ 11,
+ 477,
+ },
+ dictWord{7, 11, 1607},
+ dictWord{11, 11, 68},
+ dictWord{139, 11, 617},
+ dictWord{132, 0, 559},
+ dictWord{8, 0, 527},
+ dictWord{18, 0, 60},
+ dictWord{
+ 147,
+ 0,
+ 24,
+ },
+ dictWord{133, 10, 920},
+ dictWord{138, 0, 511},
+ dictWord{133, 0, 1017},
+ dictWord{133, 0, 675},
+ dictWord{138, 10, 391},
+ dictWord{11, 0, 156},
+ dictWord{135, 10, 1952},
+ dictWord{138, 11, 369},
+ dictWord{132, 11, 367},
+ dictWord{133, 0, 709},
+ dictWord{6, 0, 698},
+ dictWord{134, 0, 887},
+ dictWord{
+ 142,
+ 10,
+ 126,
+ },
+ dictWord{134, 0, 1745},
+ dictWord{132, 10, 483},
+ dictWord{13, 11, 299},
+ dictWord{142, 11, 75},
+ dictWord{133, 0, 714},
+ dictWord{7, 0, 8},
+ dictWord{
+ 136,
+ 0,
+ 206,
+ },
+ dictWord{138, 10, 480},
+ dictWord{4, 11, 694},
+ dictWord{9, 10, 495},
+ dictWord{146, 10, 104},
+ dictWord{7, 11, 1248},
+ dictWord{11, 11, 621},
+ dictWord{139, 11, 702},
+ dictWord{140, 11, 687},
+ dictWord{132, 0, 776},
+ dictWord{139, 10, 1009},
+ dictWord{135, 0, 1272},
+ dictWord{134, 0, 1059},
+ dictWord{
+ 8,
+ 10,
+ 653,
+ },
+ dictWord{13, 10, 93},
+ dictWord{147, 10, 14},
+ dictWord{135, 11, 213},
+ dictWord{136, 0, 406},
+ dictWord{133, 10, 172},
+ dictWord{132, 0, 947},
+ dictWord{8, 0, 175},
+ dictWord{10, 0, 168},
+ dictWord{138, 0, 573},
+ dictWord{132, 0, 870},
+ dictWord{6, 0, 1567},
+ dictWord{151, 11, 28},
+ dictWord{
+ 134,
+ 11,
+ 472,
+ },
+ dictWord{5, 10, 260},
+ dictWord{136, 11, 132},
+ dictWord{4, 11, 751},
+ dictWord{11, 11, 390},
+ dictWord{140, 11, 32},
+ dictWord{4, 11, 409},
+ dictWord{
+ 133,
+ 11,
+ 78,
+ },
+ dictWord{12, 0, 554},
+ dictWord{6, 11, 473},
+ dictWord{145, 11, 105},
+ dictWord{133, 0, 784},
+ dictWord{8, 0, 908},
+ dictWord{136, 11, 306},
+ dictWord{139, 0, 882},
+ dictWord{6, 0, 358},
+ dictWord{7, 0, 1393},
+ dictWord{8, 0, 396},
+ dictWord{10, 0, 263},
+ dictWord{14, 0, 154},
+ dictWord{16, 0, 48},
+ dictWord{
+ 17,
+ 0,
+ 8,
+ },
+ dictWord{7, 11, 1759},
+ dictWord{8, 11, 396},
+ dictWord{10, 11, 263},
+ dictWord{14, 11, 154},
+ dictWord{16, 11, 48},
+ dictWord{145, 11, 8},
+ dictWord{
+ 13,
+ 11,
+ 163,
+ },
+ dictWord{13, 11, 180},
+ dictWord{18, 11, 78},
+ dictWord{148, 11, 35},
+ dictWord{14, 0, 32},
+ dictWord{18, 0, 85},
+ dictWord{20, 0, 2},
+ dictWord{152, 0, 16},
+ dictWord{7, 0, 228},
+ dictWord{10, 0, 770},
+ dictWord{8, 10, 167},
+ dictWord{8, 10, 375},
+ dictWord{9, 10, 82},
+ dictWord{9, 10, 561},
+ dictWord{138, 10, 620},
+ dictWord{132, 0, 845},
+ dictWord{9, 0, 14},
+ dictWord{9, 0, 441},
+ dictWord{10, 0, 306},
+ dictWord{139, 0, 9},
+ dictWord{11, 0, 966},
+ dictWord{12, 0, 287},
+ dictWord{
+ 13,
+ 0,
+ 342,
+ },
+ dictWord{13, 0, 402},
+ dictWord{15, 0, 110},
+ dictWord{15, 0, 163},
+ dictWord{8, 10, 194},
+ dictWord{136, 10, 756},
+ dictWord{134, 0, 1578},
+ dictWord{
+ 4,
+ 0,
+ 967,
+ },
+ dictWord{6, 0, 1820},
+ dictWord{6, 0, 1847},
+ dictWord{140, 0, 716},
+ dictWord{136, 0, 594},
+ dictWord{7, 0, 1428},
+ dictWord{7, 0, 1640},
+ dictWord{
+ 7,
+ 0,
+ 1867,
+ },
+ dictWord{9, 0, 169},
+ dictWord{9, 0, 182},
+ dictWord{9, 0, 367},
+ dictWord{9, 0, 478},
+ dictWord{9, 0, 506},
+ dictWord{9, 0, 551},
+ dictWord{9, 0, 557},
+ dictWord{
+ 9,
+ 0,
+ 648,
+ },
+ dictWord{9, 0, 697},
+ dictWord{9, 0, 705},
+ dictWord{9, 0, 725},
+ dictWord{9, 0, 787},
+ dictWord{9, 0, 794},
+ dictWord{10, 0, 198},
+ dictWord{10, 0, 214},
+ dictWord{10, 0, 267},
+ dictWord{10, 0, 275},
+ dictWord{10, 0, 456},
+ dictWord{10, 0, 551},
+ dictWord{10, 0, 561},
+ dictWord{10, 0, 613},
+ dictWord{10, 0, 627},
+ dictWord{
+ 10,
+ 0,
+ 668,
+ },
+ dictWord{10, 0, 675},
+ dictWord{10, 0, 691},
+ dictWord{10, 0, 695},
+ dictWord{10, 0, 707},
+ dictWord{10, 0, 715},
+ dictWord{11, 0, 183},
+ dictWord{
+ 11,
+ 0,
+ 201,
+ },
+ dictWord{11, 0, 244},
+ dictWord{11, 0, 262},
+ dictWord{11, 0, 352},
+ dictWord{11, 0, 439},
+ dictWord{11, 0, 493},
+ dictWord{11, 0, 572},
+ dictWord{11, 0, 591},
+ dictWord{11, 0, 608},
+ dictWord{11, 0, 611},
+ dictWord{11, 0, 646},
+ dictWord{11, 0, 674},
+ dictWord{11, 0, 711},
+ dictWord{11, 0, 751},
+ dictWord{11, 0, 761},
+ dictWord{11, 0, 776},
+ dictWord{11, 0, 785},
+ dictWord{11, 0, 850},
+ dictWord{11, 0, 853},
+ dictWord{11, 0, 862},
+ dictWord{11, 0, 865},
+ dictWord{11, 0, 868},
+ dictWord{
+ 11,
+ 0,
+ 875,
+ },
+ dictWord{11, 0, 898},
+ dictWord{11, 0, 902},
+ dictWord{11, 0, 903},
+ dictWord{11, 0, 910},
+ dictWord{11, 0, 932},
+ dictWord{11, 0, 942},
+ dictWord{
+ 11,
+ 0,
+ 957,
+ },
+ dictWord{11, 0, 967},
+ dictWord{11, 0, 972},
+ dictWord{12, 0, 148},
+ dictWord{12, 0, 195},
+ dictWord{12, 0, 220},
+ dictWord{12, 0, 237},
+ dictWord{12, 0, 318},
+ dictWord{12, 0, 339},
+ dictWord{12, 0, 393},
+ dictWord{12, 0, 445},
+ dictWord{12, 0, 450},
+ dictWord{12, 0, 474},
+ dictWord{12, 0, 505},
+ dictWord{12, 0, 509},
+ dictWord{12, 0, 533},
+ dictWord{12, 0, 591},
+ dictWord{12, 0, 594},
+ dictWord{12, 0, 597},
+ dictWord{12, 0, 621},
+ dictWord{12, 0, 633},
+ dictWord{12, 0, 642},
+ dictWord{
+ 13,
+ 0,
+ 59,
+ },
+ dictWord{13, 0, 60},
+ dictWord{13, 0, 145},
+ dictWord{13, 0, 239},
+ dictWord{13, 0, 250},
+ dictWord{13, 0, 329},
+ dictWord{13, 0, 344},
+ dictWord{13, 0, 365},
+ dictWord{13, 0, 372},
+ dictWord{13, 0, 387},
+ dictWord{13, 0, 403},
+ dictWord{13, 0, 414},
+ dictWord{13, 0, 456},
+ dictWord{13, 0, 470},
+ dictWord{13, 0, 478},
+ dictWord{13, 0, 483},
+ dictWord{13, 0, 489},
+ dictWord{14, 0, 55},
+ dictWord{14, 0, 57},
+ dictWord{14, 0, 81},
+ dictWord{14, 0, 90},
+ dictWord{14, 0, 148},
+ dictWord{
+ 14,
+ 0,
+ 239,
+ },
+ dictWord{14, 0, 266},
+ dictWord{14, 0, 321},
+ dictWord{14, 0, 326},
+ dictWord{14, 0, 327},
+ dictWord{14, 0, 330},
+ dictWord{14, 0, 347},
+ dictWord{14, 0, 355},
+ dictWord{14, 0, 401},
+ dictWord{14, 0, 404},
+ dictWord{14, 0, 411},
+ dictWord{14, 0, 414},
+ dictWord{14, 0, 416},
+ dictWord{14, 0, 420},
+ dictWord{15, 0, 61},
+ dictWord{15, 0, 74},
+ dictWord{15, 0, 87},
+ dictWord{15, 0, 88},
+ dictWord{15, 0, 94},
+ dictWord{15, 0, 96},
+ dictWord{15, 0, 116},
+ dictWord{15, 0, 149},
+ dictWord{15, 0, 154},
+ dictWord{16, 0, 50},
+ dictWord{16, 0, 63},
+ dictWord{16, 0, 73},
+ dictWord{17, 0, 2},
+ dictWord{17, 0, 66},
+ dictWord{17, 0, 92},
+ dictWord{17, 0, 103},
+ dictWord{
+ 17,
+ 0,
+ 112,
+ },
+ dictWord{17, 0, 120},
+ dictWord{18, 0, 50},
+ dictWord{18, 0, 54},
+ dictWord{18, 0, 82},
+ dictWord{18, 0, 86},
+ dictWord{18, 0, 90},
+ dictWord{18, 0, 111},
+ dictWord{
+ 18,
+ 0,
+ 115,
+ },
+ dictWord{18, 0, 156},
+ dictWord{19, 0, 40},
+ dictWord{19, 0, 79},
+ dictWord{20, 0, 78},
+ dictWord{21, 0, 22},
+ dictWord{135, 11, 883},
+ dictWord{5, 0, 161},
+ dictWord{135, 0, 839},
+ dictWord{4, 0, 782},
+ dictWord{13, 11, 293},
+ dictWord{142, 11, 56},
+ dictWord{133, 11, 617},
+ dictWord{139, 11, 50},
+ dictWord{
+ 135,
+ 10,
+ 22,
+ },
+ dictWord{145, 0, 64},
+ dictWord{5, 10, 639},
+ dictWord{7, 10, 1249},
+ dictWord{139, 10, 896},
+ dictWord{138, 0, 998},
+ dictWord{135, 11, 2042},
+ dictWord{
+ 4,
+ 11,
+ 546,
+ },
+ dictWord{142, 11, 233},
+ dictWord{6, 0, 1043},
+ dictWord{134, 0, 1574},
+ dictWord{134, 0, 1496},
+ dictWord{4, 10, 102},
+ dictWord{7, 10, 815},
+ dictWord{7, 10, 1699},
+ dictWord{139, 10, 964},
+ dictWord{12, 0, 781},
+ dictWord{142, 0, 461},
+ dictWord{4, 11, 313},
+ dictWord{133, 11, 577},
+ dictWord{
+ 6,
+ 0,
+ 639,
+ },
+ dictWord{6, 0, 1114},
+ dictWord{137, 0, 817},
+ dictWord{8, 11, 184},
+ dictWord{141, 11, 433},
+ dictWord{7, 0, 1814},
+ dictWord{135, 11, 935},
+ dictWord{
+ 10,
+ 0,
+ 997,
+ },
+ dictWord{140, 0, 958},
+ dictWord{4, 0, 812},
+ dictWord{137, 11, 625},
+ dictWord{132, 10, 899},
+ dictWord{136, 10, 795},
+ dictWord{5, 11, 886},
+ dictWord{6, 11, 46},
+ dictWord{6, 11, 1790},
+ dictWord{7, 11, 14},
+ dictWord{7, 11, 732},
+ dictWord{7, 11, 1654},
+ dictWord{8, 11, 95},
+ dictWord{8, 11, 327},
+ dictWord{
+ 8,
+ 11,
+ 616,
+ },
+ dictWord{10, 11, 598},
+ dictWord{10, 11, 769},
+ dictWord{11, 11, 134},
+ dictWord{11, 11, 747},
+ dictWord{12, 11, 378},
+ dictWord{142, 11, 97},
+ dictWord{136, 0, 139},
+ dictWord{6, 10, 52},
+ dictWord{9, 10, 104},
+ dictWord{9, 10, 559},
+ dictWord{12, 10, 308},
+ dictWord{147, 10, 87},
+ dictWord{133, 11, 1021},
+ dictWord{132, 10, 604},
+ dictWord{132, 10, 301},
+ dictWord{136, 10, 779},
+ dictWord{7, 0, 643},
+ dictWord{136, 0, 236},
+ dictWord{132, 11, 153},
+ dictWord{
+ 134,
+ 0,
+ 1172,
+ },
+ dictWord{147, 10, 32},
+ dictWord{133, 11, 798},
+ dictWord{6, 0, 1338},
+ dictWord{132, 11, 587},
+ dictWord{6, 11, 598},
+ dictWord{7, 11, 42},
+ dictWord{
+ 8,
+ 11,
+ 695,
+ },
+ dictWord{10, 11, 212},
+ dictWord{11, 11, 158},
+ dictWord{14, 11, 196},
+ dictWord{145, 11, 85},
+ dictWord{135, 10, 508},
+ dictWord{5, 11, 957},
+ dictWord{5, 11, 1008},
+ dictWord{135, 11, 249},
+ dictWord{4, 11, 129},
+ dictWord{135, 11, 465},
+ dictWord{5, 0, 54},
+ dictWord{7, 11, 470},
+ dictWord{7, 11, 1057},
+ dictWord{7, 11, 1201},
+ dictWord{9, 11, 755},
+ dictWord{11, 11, 906},
+ dictWord{140, 11, 527},
+ dictWord{7, 11, 908},
+ dictWord{146, 11, 7},
+ dictWord{
+ 5,
+ 11,
+ 148,
+ },
+ dictWord{136, 11, 450},
+ dictWord{144, 11, 1},
+ dictWord{4, 0, 256},
+ dictWord{135, 0, 1488},
+ dictWord{9, 0, 351},
+ dictWord{6, 10, 310},
+ dictWord{
+ 7,
+ 10,
+ 1849,
+ },
+ dictWord{8, 10, 72},
+ dictWord{8, 10, 272},
+ dictWord{8, 10, 431},
+ dictWord{9, 10, 12},
+ dictWord{10, 10, 563},
+ dictWord{10, 10, 630},
+ dictWord{
+ 10,
+ 10,
+ 796,
+ },
+ dictWord{10, 10, 810},
+ dictWord{11, 10, 367},
+ dictWord{11, 10, 599},
+ dictWord{11, 10, 686},
+ dictWord{140, 10, 672},
+ dictWord{6, 0, 1885},
+ dictWord{
+ 6,
+ 0,
+ 1898,
+ },
+ dictWord{6, 0, 1899},
+ dictWord{140, 0, 955},
+ dictWord{4, 0, 714},
+ dictWord{133, 0, 469},
+ dictWord{6, 0, 1270},
+ dictWord{134, 0, 1456},
+ dictWord{132, 0, 744},
+ dictWord{6, 0, 313},
+ dictWord{7, 10, 537},
+ dictWord{8, 10, 64},
+ dictWord{9, 10, 127},
+ dictWord{10, 10, 496},
+ dictWord{12, 10, 510},
+ dictWord{141, 10, 384},
+ dictWord{4, 11, 217},
+ dictWord{4, 10, 244},
+ dictWord{5, 11, 710},
+ dictWord{7, 10, 233},
+ dictWord{7, 11, 1926},
+ dictWord{9, 11, 428},
+ dictWord{9, 11, 708},
+ dictWord{10, 11, 254},
+ dictWord{10, 11, 296},
+ dictWord{10, 11, 720},
+ dictWord{11, 11, 109},
+ dictWord{11, 11, 255},
+ dictWord{12, 11, 165},
+ dictWord{12, 11, 315},
+ dictWord{13, 11, 107},
+ dictWord{13, 11, 203},
+ dictWord{14, 11, 54},
+ dictWord{14, 11, 99},
+ dictWord{14, 11, 114},
+ dictWord{
+ 14,
+ 11,
+ 388,
+ },
+ dictWord{16, 11, 85},
+ dictWord{17, 11, 9},
+ dictWord{17, 11, 33},
+ dictWord{20, 11, 25},
+ dictWord{20, 11, 28},
+ dictWord{20, 11, 29},
+ dictWord{21, 11, 9},
+ dictWord{21, 11, 10},
+ dictWord{21, 11, 34},
+ dictWord{150, 11, 17},
+ dictWord{138, 0, 402},
+ dictWord{7, 0, 969},
+ dictWord{146, 0, 55},
+ dictWord{8, 0, 50},
+ dictWord{
+ 137,
+ 0,
+ 624,
+ },
+ dictWord{134, 0, 1355},
+ dictWord{132, 0, 572},
+ dictWord{134, 10, 1650},
+ dictWord{10, 10, 702},
+ dictWord{139, 10, 245},
+ dictWord{
+ 10,
+ 0,
+ 847,
+ },
+ dictWord{142, 0, 445},
+ dictWord{6, 0, 43},
+ dictWord{7, 0, 38},
+ dictWord{8, 0, 248},
+ dictWord{138, 0, 513},
+ dictWord{133, 0, 369},
+ dictWord{137, 10, 338},
+ dictWord{133, 0, 766},
+ dictWord{133, 0, 363},
+ dictWord{133, 10, 896},
+ dictWord{8, 11, 392},
+ dictWord{11, 11, 54},
+ dictWord{13, 11, 173},
+ dictWord{
+ 13,
+ 11,
+ 294,
+ },
+ dictWord{148, 11, 7},
+ dictWord{134, 0, 678},
+ dictWord{7, 11, 1230},
+ dictWord{136, 11, 531},
+ dictWord{6, 0, 258},
+ dictWord{140, 0, 409},
+ dictWord{
+ 5,
+ 0,
+ 249,
+ },
+ dictWord{148, 0, 82},
+ dictWord{7, 10, 1117},
+ dictWord{136, 10, 539},
+ dictWord{5, 0, 393},
+ dictWord{6, 0, 378},
+ dictWord{7, 0, 1981},
+ dictWord{9, 0, 32},
+ dictWord{9, 0, 591},
+ dictWord{10, 0, 685},
+ dictWord{10, 0, 741},
+ dictWord{142, 0, 382},
+ dictWord{133, 0, 788},
+ dictWord{134, 0, 1281},
+ dictWord{
+ 134,
+ 0,
+ 1295,
+ },
+ dictWord{7, 0, 1968},
+ dictWord{141, 0, 509},
+ dictWord{4, 0, 61},
+ dictWord{5, 0, 58},
+ dictWord{5, 0, 171},
+ dictWord{5, 0, 683},
+ dictWord{6, 0, 291},
+ dictWord{
+ 6,
+ 0,
+ 566,
+ },
+ dictWord{7, 0, 1650},
+ dictWord{11, 0, 523},
+ dictWord{12, 0, 273},
+ dictWord{12, 0, 303},
+ dictWord{15, 0, 39},
+ dictWord{143, 0, 111},
+ dictWord{
+ 6,
+ 0,
+ 706,
+ },
+ dictWord{134, 0, 1283},
+ dictWord{134, 0, 589},
+ dictWord{135, 11, 1433},
+ dictWord{133, 11, 435},
+ dictWord{7, 0, 1059},
+ dictWord{13, 0, 54},
+ dictWord{
+ 5,
+ 10,
+ 4,
+ },
+ dictWord{5, 10, 810},
+ dictWord{6, 10, 13},
+ dictWord{6, 10, 538},
+ dictWord{6, 10, 1690},
+ dictWord{6, 10, 1726},
+ dictWord{7, 10, 1819},
+ dictWord{
+ 8,
+ 10,
+ 148,
+ },
+ dictWord{8, 10, 696},
+ dictWord{8, 10, 791},
+ dictWord{12, 10, 125},
+ dictWord{143, 10, 9},
+ dictWord{135, 10, 1268},
+ dictWord{5, 11, 85},
+ dictWord{
+ 6,
+ 11,
+ 419,
+ },
+ dictWord{7, 11, 134},
+ dictWord{7, 11, 305},
+ dictWord{7, 11, 361},
+ dictWord{7, 11, 1337},
+ dictWord{8, 11, 71},
+ dictWord{140, 11, 519},
+ dictWord{
+ 137,
+ 0,
+ 824,
+ },
+ dictWord{140, 11, 688},
+ dictWord{5, 11, 691},
+ dictWord{7, 11, 345},
+ dictWord{7, 10, 1385},
+ dictWord{9, 11, 94},
+ dictWord{11, 10, 582},
+ dictWord{
+ 11,
+ 10,
+ 650,
+ },
+ dictWord{11, 10, 901},
+ dictWord{11, 10, 949},
+ dictWord{12, 11, 169},
+ dictWord{12, 10, 232},
+ dictWord{12, 10, 236},
+ dictWord{13, 10, 413},
+ dictWord{13, 10, 501},
+ dictWord{146, 10, 116},
+ dictWord{4, 0, 917},
+ dictWord{133, 0, 1005},
+ dictWord{7, 0, 1598},
+ dictWord{5, 11, 183},
+ dictWord{6, 11, 582},
+ dictWord{9, 11, 344},
+ dictWord{10, 11, 679},
+ dictWord{140, 11, 435},
+ dictWord{4, 10, 925},
+ dictWord{5, 10, 803},
+ dictWord{8, 10, 698},
+ dictWord{
+ 138,
+ 10,
+ 828,
+ },
+ dictWord{132, 0, 919},
+ dictWord{135, 11, 511},
+ dictWord{139, 10, 992},
+ dictWord{4, 0, 255},
+ dictWord{5, 0, 302},
+ dictWord{6, 0, 132},
+ dictWord{
+ 7,
+ 0,
+ 128,
+ },
+ dictWord{7, 0, 283},
+ dictWord{7, 0, 1299},
+ dictWord{10, 0, 52},
+ dictWord{10, 0, 514},
+ dictWord{11, 0, 925},
+ dictWord{13, 0, 92},
+ dictWord{142, 0, 309},
+ dictWord{134, 0, 1369},
+ dictWord{135, 10, 1847},
+ dictWord{134, 0, 328},
+ dictWord{7, 11, 1993},
+ dictWord{136, 11, 684},
+ dictWord{133, 10, 383},
+ dictWord{137, 0, 173},
+ dictWord{134, 11, 583},
+ dictWord{134, 0, 1411},
+ dictWord{19, 0, 65},
+ dictWord{5, 11, 704},
+ dictWord{8, 11, 357},
+ dictWord{10, 11, 745},
+ dictWord{14, 11, 426},
+ dictWord{17, 11, 94},
+ dictWord{147, 11, 57},
+ dictWord{9, 10, 660},
+ dictWord{138, 10, 347},
+ dictWord{4, 11, 179},
+ dictWord{5, 11, 198},
+ dictWord{133, 11, 697},
+ dictWord{7, 11, 347},
+ dictWord{7, 11, 971},
+ dictWord{8, 11, 181},
+ dictWord{138, 11, 711},
+ dictWord{141, 0, 442},
+ dictWord{
+ 11,
+ 0,
+ 842,
+ },
+ dictWord{11, 0, 924},
+ dictWord{13, 0, 317},
+ dictWord{13, 0, 370},
+ dictWord{13, 0, 469},
+ dictWord{13, 0, 471},
+ dictWord{14, 0, 397},
+ dictWord{18, 0, 69},
+ dictWord{18, 0, 145},
+ dictWord{7, 10, 572},
+ dictWord{9, 10, 592},
+ dictWord{11, 10, 680},
+ dictWord{12, 10, 356},
+ dictWord{140, 10, 550},
+ dictWord{14, 11, 19},
+ dictWord{14, 11, 28},
+ dictWord{144, 11, 29},
+ dictWord{136, 0, 534},
+ dictWord{4, 11, 243},
+ dictWord{5, 11, 203},
+ dictWord{7, 11, 19},
+ dictWord{7, 11, 71},
+ dictWord{7, 11, 113},
+ dictWord{10, 11, 405},
+ dictWord{11, 11, 357},
+ dictWord{142, 11, 240},
+ dictWord{6, 0, 210},
+ dictWord{10, 0, 845},
+ dictWord{138, 0, 862},
+ dictWord{7, 11, 1351},
+ dictWord{9, 11, 581},
+ dictWord{10, 11, 639},
+ dictWord{11, 11, 453},
+ dictWord{140, 11, 584},
+ dictWord{7, 11, 1450},
+ dictWord{
+ 139,
+ 11,
+ 99,
+ },
+ dictWord{10, 0, 892},
+ dictWord{12, 0, 719},
+ dictWord{144, 0, 105},
+ dictWord{4, 0, 284},
+ dictWord{6, 0, 223},
+ dictWord{134, 11, 492},
+ dictWord{5, 11, 134},
+ dictWord{6, 11, 408},
+ dictWord{6, 11, 495},
+ dictWord{135, 11, 1593},
+ dictWord{136, 0, 529},
+ dictWord{137, 0, 807},
+ dictWord{4, 0, 218},
+ dictWord{7, 0, 526},
+ dictWord{143, 0, 137},
+ dictWord{6, 0, 1444},
+ dictWord{142, 11, 4},
+ dictWord{132, 11, 665},
+ dictWord{4, 0, 270},
+ dictWord{5, 0, 192},
+ dictWord{6, 0, 332},
+ dictWord{7, 0, 1322},
+ dictWord{4, 11, 248},
+ dictWord{7, 11, 137},
+ dictWord{137, 11, 349},
+ dictWord{140, 0, 661},
+ dictWord{7, 0, 1517},
+ dictWord{11, 0, 597},
+ dictWord{14, 0, 76},
+ dictWord{14, 0, 335},
+ dictWord{20, 0, 33},
+ dictWord{7, 10, 748},
+ dictWord{139, 10, 700},
+ dictWord{5, 11, 371},
+ dictWord{135, 11, 563},
+ dictWord{146, 11, 57},
+ dictWord{133, 10, 127},
+ dictWord{133, 0, 418},
+ dictWord{4, 11, 374},
+ dictWord{7, 11, 547},
+ dictWord{7, 11, 1700},
+ dictWord{7, 11, 1833},
+ dictWord{139, 11, 858},
+ dictWord{6, 10, 198},
+ dictWord{140, 10, 83},
+ dictWord{7, 11, 1812},
+ dictWord{13, 11, 259},
+ dictWord{13, 11, 356},
+ dictWord{
+ 14,
+ 11,
+ 242,
+ },
+ dictWord{147, 11, 114},
+ dictWord{7, 0, 379},
+ dictWord{8, 0, 481},
+ dictWord{9, 0, 377},
+ dictWord{5, 10, 276},
+ dictWord{6, 10, 55},
+ dictWord{
+ 135,
+ 10,
+ 1369,
+ },
+ dictWord{138, 11, 286},
+ dictWord{5, 0, 1003},
+ dictWord{6, 0, 149},
+ dictWord{6, 10, 1752},
+ dictWord{136, 10, 726},
+ dictWord{8, 0, 262},
+ dictWord{
+ 9,
+ 0,
+ 627,
+ },
+ dictWord{10, 0, 18},
+ dictWord{11, 0, 214},
+ dictWord{11, 0, 404},
+ dictWord{11, 0, 457},
+ dictWord{11, 0, 780},
+ dictWord{11, 0, 913},
+ dictWord{13, 0, 401},
+ dictWord{14, 0, 200},
+ dictWord{6, 11, 1647},
+ dictWord{7, 11, 1552},
+ dictWord{7, 11, 2010},
+ dictWord{9, 11, 494},
+ dictWord{137, 11, 509},
+ dictWord{
+ 135,
+ 0,
+ 742,
+ },
+ dictWord{136, 0, 304},
+ dictWord{132, 0, 142},
+ dictWord{133, 10, 764},
+ dictWord{6, 10, 309},
+ dictWord{7, 10, 331},
+ dictWord{138, 10, 550},
+ dictWord{135, 10, 1062},
+ dictWord{6, 11, 123},
+ dictWord{7, 11, 214},
+ dictWord{7, 10, 986},
+ dictWord{9, 11, 728},
+ dictWord{10, 11, 157},
+ dictWord{11, 11, 346},
+ dictWord{11, 11, 662},
+ dictWord{143, 11, 106},
+ dictWord{135, 10, 1573},
+ dictWord{7, 0, 925},
+ dictWord{137, 0, 799},
+ dictWord{4, 0, 471},
+ dictWord{5, 0, 51},
+ dictWord{6, 0, 602},
+ dictWord{8, 0, 484},
+ dictWord{138, 0, 195},
+ dictWord{136, 0, 688},
+ dictWord{132, 0, 697},
+ dictWord{6, 0, 1169},
+ dictWord{6, 0, 1241},
+ dictWord{6, 10, 194},
+ dictWord{7, 10, 133},
+ dictWord{10, 10, 493},
+ dictWord{10, 10, 570},
+ dictWord{139, 10, 664},
+ dictWord{140, 0, 751},
+ dictWord{7, 0, 929},
+ dictWord{10, 0, 452},
+ dictWord{11, 0, 878},
+ dictWord{16, 0, 33},
+ dictWord{5, 10, 24},
+ dictWord{5, 10, 569},
+ dictWord{6, 10, 3},
+ dictWord{6, 10, 119},
+ dictWord{
+ 6,
+ 10,
+ 143,
+ },
+ dictWord{6, 10, 440},
+ dictWord{7, 10, 599},
+ dictWord{7, 10, 1686},
+ dictWord{7, 10, 1854},
+ dictWord{8, 10, 424},
+ dictWord{9, 10, 43},
+ dictWord{
+ 9,
+ 10,
+ 584,
+ },
+ dictWord{9, 10, 760},
+ dictWord{10, 10, 328},
+ dictWord{11, 10, 159},
+ dictWord{11, 10, 253},
+ dictWord{12, 10, 487},
+ dictWord{140, 10, 531},
+ dictWord{
+ 4,
+ 11,
+ 707,
+ },
+ dictWord{13, 11, 106},
+ dictWord{18, 11, 49},
+ dictWord{147, 11, 41},
+ dictWord{5, 0, 221},
+ dictWord{5, 11, 588},
+ dictWord{134, 11, 393},
+ dictWord{134, 0, 1437},
+ dictWord{6, 11, 211},
+ dictWord{7, 11, 1690},
+ dictWord{11, 11, 486},
+ dictWord{140, 11, 369},
+ dictWord{5, 10, 14},
+ dictWord{5, 10, 892},
+ dictWord{6, 10, 283},
+ dictWord{7, 10, 234},
+ dictWord{136, 10, 537},
+ dictWord{4, 0, 988},
+ dictWord{136, 0, 955},
+ dictWord{135, 0, 1251},
+ dictWord{4, 10, 126},
+ dictWord{8, 10, 635},
+ dictWord{147, 10, 34},
+ dictWord{4, 10, 316},
+ dictWord{135, 10, 1561},
+ dictWord{137, 10, 861},
+ dictWord{4, 10, 64},
+ dictWord{
+ 5,
+ 10,
+ 352,
+ },
+ dictWord{5, 10, 720},
+ dictWord{6, 10, 368},
+ dictWord{139, 10, 359},
+ dictWord{134, 0, 192},
+ dictWord{4, 0, 132},
+ dictWord{5, 0, 69},
+ dictWord{
+ 135,
+ 0,
+ 1242,
+ },
+ dictWord{7, 10, 1577},
+ dictWord{10, 10, 304},
+ dictWord{10, 10, 549},
+ dictWord{12, 10, 365},
+ dictWord{13, 10, 220},
+ dictWord{13, 10, 240},
+ dictWord{142, 10, 33},
+ dictWord{4, 0, 111},
+ dictWord{7, 0, 865},
+ dictWord{134, 11, 219},
+ dictWord{5, 11, 582},
+ dictWord{6, 11, 1646},
+ dictWord{7, 11, 99},
+ dictWord{
+ 7,
+ 11,
+ 1962,
+ },
+ dictWord{7, 11, 1986},
+ dictWord{8, 11, 515},
+ dictWord{8, 11, 773},
+ dictWord{9, 11, 23},
+ dictWord{9, 11, 491},
+ dictWord{12, 11, 620},
+ dictWord{
+ 14,
+ 11,
+ 52,
+ },
+ dictWord{145, 11, 50},
+ dictWord{132, 0, 767},
+ dictWord{7, 11, 568},
+ dictWord{148, 11, 21},
+ dictWord{6, 0, 42},
+ dictWord{7, 0, 1416},
+ dictWord{
+ 7,
+ 0,
+ 2005,
+ },
+ dictWord{8, 0, 131},
+ dictWord{8, 0, 466},
+ dictWord{9, 0, 672},
+ dictWord{13, 0, 252},
+ dictWord{20, 0, 103},
+ dictWord{133, 11, 851},
+ dictWord{
+ 135,
+ 0,
+ 1050,
+ },
+ dictWord{6, 10, 175},
+ dictWord{137, 10, 289},
+ dictWord{5, 10, 432},
+ dictWord{133, 10, 913},
+ dictWord{6, 0, 44},
+ dictWord{136, 0, 368},
+ dictWord{
+ 135,
+ 11,
+ 784,
+ },
+ dictWord{132, 0, 570},
+ dictWord{133, 0, 120},
+ dictWord{139, 10, 595},
+ dictWord{140, 0, 29},
+ dictWord{6, 0, 227},
+ dictWord{135, 0, 1589},
+ dictWord{4, 11, 98},
+ dictWord{7, 11, 1365},
+ dictWord{9, 11, 422},
+ dictWord{9, 11, 670},
+ dictWord{10, 11, 775},
+ dictWord{11, 11, 210},
+ dictWord{13, 11, 26},
+ dictWord{13, 11, 457},
+ dictWord{141, 11, 476},
+ dictWord{140, 10, 80},
+ dictWord{5, 10, 931},
+ dictWord{134, 10, 1698},
+ dictWord{133, 0, 522},
+ dictWord{
+ 134,
+ 0,
+ 1120,
+ },
+ dictWord{135, 0, 1529},
+ dictWord{12, 0, 739},
+ dictWord{14, 0, 448},
+ dictWord{142, 0, 467},
+ dictWord{11, 10, 526},
+ dictWord{11, 10, 939},
+ dictWord{141, 10, 290},
+ dictWord{5, 10, 774},
+ dictWord{6, 10, 1637},
+ dictWord{6, 10, 1686},
+ dictWord{134, 10, 1751},
+ dictWord{6, 0, 1667},
+ dictWord{
+ 135,
+ 0,
+ 2036,
+ },
+ dictWord{7, 10, 1167},
+ dictWord{11, 10, 934},
+ dictWord{13, 10, 391},
+ dictWord{145, 10, 76},
+ dictWord{137, 11, 147},
+ dictWord{6, 10, 260},
+ dictWord{
+ 7,
+ 10,
+ 1484,
+ },
+ dictWord{11, 11, 821},
+ dictWord{12, 11, 110},
+ dictWord{12, 11, 153},
+ dictWord{18, 11, 41},
+ dictWord{150, 11, 19},
+ dictWord{6, 0, 511},
+ dictWord{12, 0, 132},
+ dictWord{134, 10, 573},
+ dictWord{5, 0, 568},
+ dictWord{6, 0, 138},
+ dictWord{135, 0, 1293},
+ dictWord{132, 0, 1020},
+ dictWord{8, 0, 258},
+ dictWord{9, 0, 208},
+ dictWord{137, 0, 359},
+ dictWord{4, 0, 565},
+ dictWord{8, 0, 23},
+ dictWord{136, 0, 827},
+ dictWord{134, 0, 344},
+ dictWord{4, 0, 922},
+ dictWord{
+ 5,
+ 0,
+ 1023,
+ },
+ dictWord{13, 11, 477},
+ dictWord{14, 11, 120},
+ dictWord{148, 11, 61},
+ dictWord{134, 0, 240},
+ dictWord{5, 11, 209},
+ dictWord{6, 11, 30},
+ dictWord{
+ 11,
+ 11,
+ 56,
+ },
+ dictWord{139, 11, 305},
+ dictWord{6, 0, 171},
+ dictWord{7, 0, 1002},
+ dictWord{7, 0, 1324},
+ dictWord{9, 0, 415},
+ dictWord{14, 0, 230},
+ dictWord{
+ 18,
+ 0,
+ 68,
+ },
+ dictWord{4, 10, 292},
+ dictWord{4, 10, 736},
+ dictWord{5, 10, 871},
+ dictWord{6, 10, 1689},
+ dictWord{7, 10, 1944},
+ dictWord{137, 10, 580},
+ dictWord{
+ 9,
+ 11,
+ 635,
+ },
+ dictWord{139, 11, 559},
+ dictWord{4, 11, 150},
+ dictWord{5, 11, 303},
+ dictWord{134, 11, 327},
+ dictWord{6, 10, 63},
+ dictWord{135, 10, 920},
+ dictWord{
+ 133,
+ 10,
+ 793,
+ },
+ dictWord{8, 11, 192},
+ dictWord{10, 11, 78},
+ dictWord{10, 11, 555},
+ dictWord{11, 11, 308},
+ dictWord{13, 11, 359},
+ dictWord{147, 11, 95},
+ dictWord{135, 11, 786},
+ dictWord{135, 11, 1712},
+ dictWord{136, 0, 402},
+ dictWord{6, 0, 754},
+ dictWord{6, 11, 1638},
+ dictWord{7, 11, 79},
+ dictWord{7, 11, 496},
+ dictWord{9, 11, 138},
+ dictWord{10, 11, 336},
+ dictWord{11, 11, 12},
+ dictWord{12, 11, 412},
+ dictWord{12, 11, 440},
+ dictWord{142, 11, 305},
+ dictWord{4, 0, 716},
+ dictWord{141, 0, 31},
+ dictWord{133, 0, 982},
+ dictWord{8, 0, 691},
+ dictWord{8, 0, 731},
+ dictWord{5, 10, 67},
+ dictWord{6, 10, 62},
+ dictWord{6, 10, 374},
+ dictWord{
+ 135,
+ 10,
+ 1391,
+ },
+ dictWord{9, 10, 790},
+ dictWord{140, 10, 47},
+ dictWord{139, 11, 556},
+ dictWord{151, 11, 1},
+ dictWord{7, 11, 204},
+ dictWord{7, 11, 415},
+ dictWord{8, 11, 42},
+ dictWord{10, 11, 85},
+ dictWord{11, 11, 33},
+ dictWord{11, 11, 564},
+ dictWord{12, 11, 571},
+ dictWord{149, 11, 1},
+ dictWord{8, 0, 888},
+ dictWord{
+ 7,
+ 11,
+ 610,
+ },
+ dictWord{135, 11, 1501},
+ dictWord{4, 10, 391},
+ dictWord{135, 10, 1169},
+ dictWord{5, 0, 847},
+ dictWord{9, 0, 840},
+ dictWord{138, 0, 803},
+ dictWord{137, 0, 823},
+ dictWord{134, 0, 785},
+ dictWord{8, 0, 152},
+ dictWord{9, 0, 53},
+ dictWord{9, 0, 268},
+ dictWord{9, 0, 901},
+ dictWord{10, 0, 518},
+ dictWord{
+ 10,
+ 0,
+ 829,
+ },
+ dictWord{11, 0, 188},
+ dictWord{13, 0, 74},
+ dictWord{14, 0, 46},
+ dictWord{15, 0, 17},
+ dictWord{15, 0, 33},
+ dictWord{17, 0, 40},
+ dictWord{18, 0, 36},
+ dictWord{
+ 19,
+ 0,
+ 20,
+ },
+ dictWord{22, 0, 1},
+ dictWord{152, 0, 2},
+ dictWord{4, 11, 3},
+ dictWord{5, 11, 247},
+ dictWord{5, 11, 644},
+ dictWord{7, 11, 744},
+ dictWord{7, 11, 1207},
+ dictWord{7, 11, 1225},
+ dictWord{7, 11, 1909},
+ dictWord{146, 11, 147},
+ dictWord{136, 0, 532},
+ dictWord{135, 0, 681},
+ dictWord{132, 10, 271},
+ dictWord{
+ 140,
+ 0,
+ 314,
+ },
+ dictWord{140, 0, 677},
+ dictWord{4, 0, 684},
+ dictWord{136, 0, 384},
+ dictWord{5, 11, 285},
+ dictWord{9, 11, 67},
+ dictWord{13, 11, 473},
+ dictWord{
+ 143,
+ 11,
+ 82,
+ },
+ dictWord{4, 10, 253},
+ dictWord{5, 10, 544},
+ dictWord{7, 10, 300},
+ dictWord{137, 10, 340},
+ dictWord{7, 0, 110},
+ dictWord{7, 0, 447},
+ dictWord{8, 0, 290},
+ dictWord{8, 0, 591},
+ dictWord{9, 0, 382},
+ dictWord{9, 0, 649},
+ dictWord{11, 0, 71},
+ dictWord{11, 0, 155},
+ dictWord{11, 0, 313},
+ dictWord{12, 0, 5},
+ dictWord{13, 0, 325},
+ dictWord{142, 0, 287},
+ dictWord{134, 0, 1818},
+ dictWord{136, 0, 1007},
+ dictWord{138, 0, 321},
+ dictWord{7, 0, 360},
+ dictWord{7, 0, 425},
+ dictWord{9, 0, 66},
+ dictWord{9, 0, 278},
+ dictWord{138, 0, 644},
+ dictWord{133, 10, 818},
+ dictWord{5, 0, 385},
+ dictWord{5, 10, 541},
+ dictWord{6, 10, 94},
+ dictWord{6, 10, 499},
+ dictWord{
+ 7,
+ 10,
+ 230,
+ },
+ dictWord{139, 10, 321},
+ dictWord{4, 10, 920},
+ dictWord{5, 10, 25},
+ dictWord{5, 10, 790},
+ dictWord{6, 10, 457},
+ dictWord{7, 10, 853},
+ dictWord{
+ 136,
+ 10,
+ 788,
+ },
+ dictWord{4, 0, 900},
+ dictWord{133, 0, 861},
+ dictWord{5, 0, 254},
+ dictWord{7, 0, 985},
+ dictWord{136, 0, 73},
+ dictWord{7, 0, 1959},
+ dictWord{
+ 136,
+ 0,
+ 683,
+ },
+ dictWord{134, 10, 1765},
+ dictWord{133, 10, 822},
+ dictWord{132, 10, 634},
+ dictWord{4, 11, 29},
+ dictWord{6, 11, 532},
+ dictWord{7, 11, 1628},
+ dictWord{
+ 7,
+ 11,
+ 1648,
+ },
+ dictWord{9, 11, 303},
+ dictWord{9, 11, 350},
+ dictWord{10, 11, 433},
+ dictWord{11, 11, 97},
+ dictWord{11, 11, 557},
+ dictWord{11, 11, 745},
+ dictWord{12, 11, 289},
+ dictWord{12, 11, 335},
+ dictWord{12, 11, 348},
+ dictWord{12, 11, 606},
+ dictWord{13, 11, 116},
+ dictWord{13, 11, 233},
+ dictWord{
+ 13,
+ 11,
+ 466,
+ },
+ dictWord{14, 11, 181},
+ dictWord{14, 11, 209},
+ dictWord{14, 11, 232},
+ dictWord{14, 11, 236},
+ dictWord{14, 11, 300},
+ dictWord{16, 11, 41},
+ dictWord{
+ 148,
+ 11,
+ 97,
+ },
+ dictWord{19, 0, 86},
+ dictWord{6, 10, 36},
+ dictWord{7, 10, 658},
+ dictWord{136, 10, 454},
+ dictWord{135, 11, 1692},
+ dictWord{132, 0, 725},
+ dictWord{
+ 5,
+ 11,
+ 501,
+ },
+ dictWord{7, 11, 1704},
+ dictWord{9, 11, 553},
+ dictWord{11, 11, 520},
+ dictWord{12, 11, 557},
+ dictWord{141, 11, 249},
+ dictWord{134, 0, 196},
+ dictWord{133, 0, 831},
+ dictWord{136, 0, 723},
+ dictWord{7, 0, 1897},
+ dictWord{13, 0, 80},
+ dictWord{13, 0, 437},
+ dictWord{145, 0, 74},
+ dictWord{4, 0, 992},
+ dictWord{
+ 6,
+ 0,
+ 627,
+ },
+ dictWord{136, 0, 994},
+ dictWord{135, 11, 1294},
+ dictWord{132, 10, 104},
+ dictWord{5, 0, 848},
+ dictWord{6, 0, 66},
+ dictWord{136, 0, 764},
+ dictWord{
+ 4,
+ 0,
+ 36,
+ },
+ dictWord{7, 0, 1387},
+ dictWord{10, 0, 205},
+ dictWord{139, 0, 755},
+ dictWord{6, 0, 1046},
+ dictWord{134, 0, 1485},
+ dictWord{134, 0, 950},
+ dictWord{132, 0, 887},
+ dictWord{14, 0, 450},
+ dictWord{148, 0, 111},
+ dictWord{7, 0, 620},
+ dictWord{7, 0, 831},
+ dictWord{9, 10, 542},
+ dictWord{9, 10, 566},
+ dictWord{
+ 138,
+ 10,
+ 728,
+ },
+ dictWord{6, 0, 165},
+ dictWord{138, 0, 388},
+ dictWord{139, 10, 263},
+ dictWord{4, 0, 719},
+ dictWord{135, 0, 155},
+ dictWord{138, 10, 468},
+ dictWord{6, 11, 453},
+ dictWord{144, 11, 36},
+ dictWord{134, 11, 129},
+ dictWord{5, 0, 533},
+ dictWord{7, 0, 755},
+ dictWord{138, 0, 780},
+ dictWord{134, 0, 1465},
+ dictWord{4, 0, 353},
+ dictWord{6, 0, 146},
+ dictWord{6, 0, 1789},
+ dictWord{7, 0, 427},
+ dictWord{7, 0, 990},
+ dictWord{7, 0, 1348},
+ dictWord{9, 0, 665},
+ dictWord{9, 0, 898},
+ dictWord{11, 0, 893},
+ dictWord{142, 0, 212},
+ dictWord{7, 10, 87},
+ dictWord{142, 10, 288},
+ dictWord{4, 0, 45},
+ dictWord{135, 0, 1257},
+ dictWord{12, 0, 7},
+ dictWord{7, 10, 988},
+ dictWord{7, 10, 1939},
+ dictWord{9, 10, 64},
+ dictWord{9, 10, 502},
+ dictWord{12, 10, 34},
+ dictWord{13, 10, 12},
+ dictWord{13, 10, 234},
+ dictWord{147, 10, 77},
+ dictWord{4, 0, 607},
+ dictWord{5, 11, 60},
+ dictWord{6, 11, 504},
+ dictWord{7, 11, 614},
+ dictWord{7, 11, 1155},
+ dictWord{140, 11, 0},
+ dictWord{
+ 135,
+ 10,
+ 141,
+ },
+ dictWord{8, 11, 198},
+ dictWord{11, 11, 29},
+ dictWord{140, 11, 534},
+ dictWord{140, 0, 65},
+ dictWord{136, 0, 816},
+ dictWord{132, 10, 619},
+ dictWord{139, 0, 88},
+ dictWord{5, 10, 246},
+ dictWord{8, 10, 189},
+ dictWord{9, 10, 355},
+ dictWord{9, 10, 512},
+ dictWord{10, 10, 124},
+ dictWord{10, 10, 453},
+ dictWord{11, 10, 143},
+ dictWord{11, 10, 416},
+ dictWord{11, 10, 859},
+ dictWord{141, 10, 341},
+ dictWord{4, 11, 379},
+ dictWord{135, 11, 1397},
+ dictWord{
+ 4,
+ 0,
+ 600,
+ },
+ dictWord{137, 0, 621},
+ dictWord{133, 0, 367},
+ dictWord{134, 0, 561},
+ dictWord{6, 0, 559},
+ dictWord{134, 0, 1691},
+ dictWord{6, 0, 585},
+ dictWord{
+ 134,
+ 11,
+ 585,
+ },
+ dictWord{135, 11, 1228},
+ dictWord{4, 11, 118},
+ dictWord{5, 10, 678},
+ dictWord{6, 11, 274},
+ dictWord{6, 11, 361},
+ dictWord{7, 11, 75},
+ dictWord{
+ 141,
+ 11,
+ 441,
+ },
+ dictWord{135, 11, 1818},
+ dictWord{137, 11, 841},
+ dictWord{5, 0, 573},
+ dictWord{6, 0, 287},
+ dictWord{7, 10, 862},
+ dictWord{7, 10, 1886},
+ dictWord{138, 10, 179},
+ dictWord{132, 10, 517},
+ dictWord{140, 11, 693},
+ dictWord{5, 11, 314},
+ dictWord{6, 11, 221},
+ dictWord{7, 11, 419},
+ dictWord{
+ 10,
+ 11,
+ 650,
+ },
+ dictWord{11, 11, 396},
+ dictWord{12, 11, 156},
+ dictWord{13, 11, 369},
+ dictWord{14, 11, 333},
+ dictWord{145, 11, 47},
+ dictWord{140, 10, 540},
+ dictWord{136, 10, 667},
+ dictWord{11, 10, 403},
+ dictWord{146, 10, 83},
+ dictWord{6, 0, 672},
+ dictWord{133, 10, 761},
+ dictWord{9, 0, 157},
+ dictWord{10, 10, 131},
+ dictWord{140, 10, 72},
+ dictWord{7, 0, 714},
+ dictWord{134, 11, 460},
+ dictWord{134, 0, 456},
+ dictWord{133, 0, 925},
+ dictWord{5, 11, 682},
+ dictWord{
+ 135,
+ 11,
+ 1887,
+ },
+ dictWord{136, 11, 510},
+ dictWord{136, 11, 475},
+ dictWord{133, 11, 1016},
+ dictWord{9, 0, 19},
+ dictWord{7, 11, 602},
+ dictWord{8, 11, 179},
+ dictWord{
+ 10,
+ 11,
+ 781,
+ },
+ dictWord{140, 11, 126},
+ dictWord{6, 11, 329},
+ dictWord{138, 11, 111},
+ dictWord{6, 0, 822},
+ dictWord{134, 0, 1473},
+ dictWord{144, 11, 86},
+ dictWord{11, 0, 113},
+ dictWord{139, 11, 113},
+ dictWord{5, 11, 821},
+ dictWord{134, 11, 1687},
+ dictWord{133, 10, 449},
+ dictWord{7, 0, 463},
+ dictWord{
+ 17,
+ 0,
+ 69,
+ },
+ dictWord{136, 10, 103},
+ dictWord{7, 10, 2028},
+ dictWord{138, 10, 641},
+ dictWord{6, 0, 193},
+ dictWord{7, 0, 240},
+ dictWord{7, 0, 1682},
+ dictWord{
+ 10,
+ 0,
+ 51,
+ },
+ dictWord{10, 0, 640},
+ dictWord{11, 0, 410},
+ dictWord{13, 0, 82},
+ dictWord{14, 0, 247},
+ dictWord{14, 0, 331},
+ dictWord{142, 0, 377},
+ dictWord{6, 0, 471},
+ dictWord{11, 0, 411},
+ dictWord{142, 0, 2},
+ dictWord{5, 11, 71},
+ dictWord{7, 11, 1407},
+ dictWord{9, 11, 388},
+ dictWord{9, 11, 704},
+ dictWord{10, 11, 261},
+ dictWord{
+ 10,
+ 11,
+ 619,
+ },
+ dictWord{11, 11, 547},
+ dictWord{11, 11, 619},
+ dictWord{143, 11, 157},
+ dictWord{136, 0, 633},
+ dictWord{135, 0, 1148},
+ dictWord{6, 0, 554},
+ dictWord{7, 0, 1392},
+ dictWord{12, 0, 129},
+ dictWord{7, 10, 1274},
+ dictWord{7, 10, 1386},
+ dictWord{7, 11, 2008},
+ dictWord{9, 11, 337},
+ dictWord{10, 11, 517},
+ dictWord{146, 10, 87},
+ dictWord{7, 0, 803},
+ dictWord{8, 0, 542},
+ dictWord{6, 10, 187},
+ dictWord{7, 10, 1203},
+ dictWord{8, 10, 380},
+ dictWord{14, 10, 117},
+ dictWord{149, 10, 28},
+ dictWord{6, 10, 297},
+ dictWord{7, 10, 793},
+ dictWord{139, 10, 938},
+ dictWord{8, 0, 438},
+ dictWord{11, 0, 363},
+ dictWord{7, 10, 464},
+ dictWord{11, 10, 105},
+ dictWord{12, 10, 231},
+ dictWord{14, 10, 386},
+ dictWord{15, 10, 102},
+ dictWord{148, 10, 75},
+ dictWord{5, 11, 16},
+ dictWord{6, 11, 86},
+ dictWord{6, 11, 603},
+ dictWord{7, 11, 292},
+ dictWord{7, 11, 561},
+ dictWord{8, 11, 257},
+ dictWord{8, 11, 382},
+ dictWord{9, 11, 721},
+ dictWord{9, 11, 778},
+ dictWord{
+ 11,
+ 11,
+ 581,
+ },
+ dictWord{140, 11, 466},
+ dictWord{6, 0, 717},
+ dictWord{4, 11, 486},
+ dictWord{133, 11, 491},
+ dictWord{132, 0, 875},
+ dictWord{132, 11, 72},
+ dictWord{6, 11, 265},
+ dictWord{135, 11, 847},
+ dictWord{4, 0, 237},
+ dictWord{135, 0, 514},
+ dictWord{6, 0, 392},
+ dictWord{7, 0, 65},
+ dictWord{135, 0, 2019},
+ dictWord{140, 11, 261},
+ dictWord{135, 11, 922},
+ dictWord{137, 11, 404},
+ dictWord{12, 0, 563},
+ dictWord{14, 0, 101},
+ dictWord{18, 0, 129},
+ dictWord{
+ 7,
+ 10,
+ 1010,
+ },
+ dictWord{11, 10, 733},
+ dictWord{11, 10, 759},
+ dictWord{13, 10, 34},
+ dictWord{146, 10, 45},
+ dictWord{7, 10, 1656},
+ dictWord{9, 10, 369},
+ dictWord{
+ 10,
+ 10,
+ 338,
+ },
+ dictWord{10, 10, 490},
+ dictWord{11, 10, 154},
+ dictWord{11, 10, 545},
+ dictWord{11, 10, 775},
+ dictWord{13, 10, 77},
+ dictWord{141, 10, 274},
+ dictWord{4, 0, 444},
+ dictWord{10, 0, 146},
+ dictWord{140, 0, 9},
+ dictWord{139, 11, 163},
+ dictWord{7, 0, 1260},
+ dictWord{135, 0, 1790},
+ dictWord{9, 0, 222},
+ dictWord{10, 0, 43},
+ dictWord{139, 0, 900},
+ dictWord{137, 11, 234},
+ dictWord{138, 0, 971},
+ dictWord{137, 0, 761},
+ dictWord{134, 0, 699},
+ dictWord{
+ 136,
+ 11,
+ 434,
+ },
+ dictWord{6, 0, 1116},
+ dictWord{7, 0, 1366},
+ dictWord{5, 10, 20},
+ dictWord{6, 11, 197},
+ dictWord{6, 10, 298},
+ dictWord{7, 10, 659},
+ dictWord{8, 11, 205},
+ dictWord{137, 10, 219},
+ dictWord{132, 11, 490},
+ dictWord{11, 11, 820},
+ dictWord{150, 11, 51},
+ dictWord{7, 10, 1440},
+ dictWord{11, 10, 854},
+ dictWord{
+ 11,
+ 10,
+ 872,
+ },
+ dictWord{11, 10, 921},
+ dictWord{12, 10, 551},
+ dictWord{13, 10, 472},
+ dictWord{142, 10, 367},
+ dictWord{140, 11, 13},
+ dictWord{132, 0, 829},
+ dictWord{12, 0, 242},
+ dictWord{132, 10, 439},
+ dictWord{136, 10, 669},
+ dictWord{6, 0, 593},
+ dictWord{6, 11, 452},
+ dictWord{7, 11, 312},
+ dictWord{
+ 138,
+ 11,
+ 219,
+ },
+ dictWord{4, 11, 333},
+ dictWord{9, 11, 176},
+ dictWord{12, 11, 353},
+ dictWord{141, 11, 187},
+ dictWord{7, 0, 36},
+ dictWord{8, 0, 201},
+ dictWord{
+ 136,
+ 0,
+ 605,
+ },
+ dictWord{140, 0, 224},
+ dictWord{132, 10, 233},
+ dictWord{134, 0, 1430},
+ dictWord{134, 0, 1806},
+ dictWord{4, 0, 523},
+ dictWord{133, 0, 638},
+ dictWord{
+ 6,
+ 0,
+ 1889,
+ },
+ dictWord{9, 0, 958},
+ dictWord{9, 0, 971},
+ dictWord{9, 0, 976},
+ dictWord{12, 0, 796},
+ dictWord{12, 0, 799},
+ dictWord{12, 0, 808},
+ dictWord{
+ 12,
+ 0,
+ 835,
+ },
+ dictWord{12, 0, 836},
+ dictWord{12, 0, 914},
+ dictWord{12, 0, 946},
+ dictWord{15, 0, 216},
+ dictWord{15, 0, 232},
+ dictWord{18, 0, 183},
+ dictWord{18, 0, 187},
+ dictWord{18, 0, 194},
+ dictWord{18, 0, 212},
+ dictWord{18, 0, 232},
+ dictWord{149, 0, 49},
+ dictWord{132, 10, 482},
+ dictWord{6, 0, 827},
+ dictWord{134, 0, 1434},
+ dictWord{135, 10, 346},
+ dictWord{134, 0, 2043},
+ dictWord{6, 0, 242},
+ dictWord{7, 0, 227},
+ dictWord{7, 0, 1581},
+ dictWord{8, 0, 104},
+ dictWord{9, 0, 113},
+ dictWord{9, 0, 220},
+ dictWord{9, 0, 427},
+ dictWord{10, 0, 136},
+ dictWord{10, 0, 239},
+ dictWord{11, 0, 579},
+ dictWord{11, 0, 1023},
+ dictWord{13, 0, 4},
+ dictWord{
+ 13,
+ 0,
+ 204,
+ },
+ dictWord{13, 0, 316},
+ dictWord{148, 0, 86},
+ dictWord{134, 11, 1685},
+ dictWord{7, 0, 148},
+ dictWord{8, 0, 284},
+ dictWord{141, 0, 63},
+ dictWord{
+ 142,
+ 0,
+ 10,
+ },
+ dictWord{135, 11, 584},
+ dictWord{134, 0, 1249},
+ dictWord{7, 0, 861},
+ dictWord{135, 10, 334},
+ dictWord{5, 10, 795},
+ dictWord{6, 10, 1741},
+ dictWord{
+ 137,
+ 11,
+ 70,
+ },
+ dictWord{132, 0, 807},
+ dictWord{7, 11, 135},
+ dictWord{8, 11, 7},
+ dictWord{8, 11, 62},
+ dictWord{9, 11, 243},
+ dictWord{10, 11, 658},
+ dictWord{
+ 10,
+ 11,
+ 697,
+ },
+ dictWord{11, 11, 456},
+ dictWord{139, 11, 756},
+ dictWord{9, 11, 395},
+ dictWord{138, 11, 79},
+ dictWord{137, 11, 108},
+ dictWord{147, 0, 94},
+ dictWord{136, 0, 494},
+ dictWord{135, 11, 631},
+ dictWord{135, 10, 622},
+ dictWord{7, 0, 1510},
+ dictWord{135, 10, 1750},
+ dictWord{4, 10, 203},
+ dictWord{
+ 135,
+ 10,
+ 1936,
+ },
+ dictWord{7, 11, 406},
+ dictWord{7, 11, 459},
+ dictWord{8, 11, 606},
+ dictWord{139, 11, 726},
+ dictWord{7, 0, 1306},
+ dictWord{8, 0, 505},
+ dictWord{
+ 9,
+ 0,
+ 482,
+ },
+ dictWord{10, 0, 126},
+ dictWord{11, 0, 225},
+ dictWord{12, 0, 347},
+ dictWord{12, 0, 449},
+ dictWord{13, 0, 19},
+ dictWord{14, 0, 218},
+ dictWord{142, 0, 435},
+ dictWord{5, 0, 268},
+ dictWord{10, 0, 764},
+ dictWord{12, 0, 120},
+ dictWord{13, 0, 39},
+ dictWord{145, 0, 127},
+ dictWord{142, 11, 68},
+ dictWord{11, 10, 678},
+ dictWord{140, 10, 307},
+ dictWord{12, 11, 268},
+ dictWord{12, 11, 640},
+ dictWord{142, 11, 119},
+ dictWord{135, 10, 2044},
+ dictWord{133, 11, 612},
+ dictWord{
+ 4,
+ 11,
+ 372,
+ },
+ dictWord{7, 11, 482},
+ dictWord{8, 11, 158},
+ dictWord{9, 11, 602},
+ dictWord{9, 11, 615},
+ dictWord{10, 11, 245},
+ dictWord{10, 11, 678},
+ dictWord{
+ 10,
+ 11,
+ 744,
+ },
+ dictWord{11, 11, 248},
+ dictWord{139, 11, 806},
+ dictWord{7, 10, 311},
+ dictWord{9, 10, 308},
+ dictWord{140, 10, 255},
+ dictWord{4, 0, 384},
+ dictWord{135, 0, 1022},
+ dictWord{5, 11, 854},
+ dictWord{135, 11, 1991},
+ dictWord{135, 10, 1266},
+ dictWord{4, 10, 400},
+ dictWord{5, 10, 267},
+ dictWord{
+ 135,
+ 10,
+ 232,
+ },
+ dictWord{135, 0, 1703},
+ dictWord{9, 0, 159},
+ dictWord{11, 0, 661},
+ dictWord{140, 0, 603},
+ dictWord{4, 0, 964},
+ dictWord{14, 0, 438},
+ dictWord{
+ 14,
+ 0,
+ 444,
+ },
+ dictWord{14, 0, 456},
+ dictWord{22, 0, 60},
+ dictWord{22, 0, 63},
+ dictWord{9, 11, 106},
+ dictWord{9, 11, 163},
+ dictWord{9, 11, 296},
+ dictWord{10, 11, 167},
+ dictWord{10, 11, 172},
+ dictWord{10, 11, 777},
+ dictWord{139, 11, 16},
+ dictWord{136, 0, 583},
+ dictWord{132, 0, 515},
+ dictWord{8, 0, 632},
+ dictWord{8, 0, 697},
+ dictWord{137, 0, 854},
+ dictWord{5, 11, 195},
+ dictWord{135, 11, 1685},
+ dictWord{6, 0, 1123},
+ dictWord{134, 0, 1365},
+ dictWord{134, 11, 328},
+ dictWord{
+ 7,
+ 11,
+ 1997,
+ },
+ dictWord{8, 11, 730},
+ dictWord{139, 11, 1006},
+ dictWord{4, 0, 136},
+ dictWord{133, 0, 551},
+ dictWord{134, 0, 1782},
+ dictWord{7, 0, 1287},
+ dictWord{
+ 9,
+ 0,
+ 44,
+ },
+ dictWord{10, 0, 552},
+ dictWord{10, 0, 642},
+ dictWord{11, 0, 839},
+ dictWord{12, 0, 274},
+ dictWord{12, 0, 275},
+ dictWord{12, 0, 372},
+ dictWord{
+ 13,
+ 0,
+ 91,
+ },
+ dictWord{142, 0, 125},
+ dictWord{5, 11, 751},
+ dictWord{11, 11, 797},
+ dictWord{140, 11, 203},
+ dictWord{133, 0, 732},
+ dictWord{7, 0, 679},
+ dictWord{
+ 8,
+ 0,
+ 313,
+ },
+ dictWord{4, 10, 100},
+ dictWord{135, 11, 821},
+ dictWord{10, 0, 361},
+ dictWord{142, 0, 316},
+ dictWord{134, 0, 595},
+ dictWord{6, 0, 147},
+ dictWord{
+ 7,
+ 0,
+ 886,
+ },
+ dictWord{9, 0, 753},
+ dictWord{138, 0, 268},
+ dictWord{5, 10, 362},
+ dictWord{5, 10, 443},
+ dictWord{6, 10, 318},
+ dictWord{7, 10, 1019},
+ dictWord{
+ 139,
+ 10,
+ 623,
+ },
+ dictWord{5, 10, 463},
+ dictWord{136, 10, 296},
+ dictWord{4, 10, 454},
+ dictWord{5, 11, 950},
+ dictWord{5, 11, 994},
+ dictWord{134, 11, 351},
+ dictWord{
+ 138,
+ 0,
+ 137,
+ },
+ dictWord{5, 10, 48},
+ dictWord{5, 10, 404},
+ dictWord{6, 10, 557},
+ dictWord{7, 10, 458},
+ dictWord{8, 10, 597},
+ dictWord{10, 10, 455},
+ dictWord{
+ 10,
+ 10,
+ 606,
+ },
+ dictWord{11, 10, 49},
+ dictWord{11, 10, 548},
+ dictWord{12, 10, 476},
+ dictWord{13, 10, 18},
+ dictWord{141, 10, 450},
+ dictWord{133, 0, 414},
+ dictWord{
+ 135,
+ 0,
+ 1762,
+ },
+ dictWord{5, 11, 421},
+ dictWord{135, 11, 47},
+ dictWord{5, 10, 442},
+ dictWord{135, 10, 1984},
+ dictWord{134, 0, 599},
+ dictWord{134, 0, 1749},
+ dictWord{134, 0, 1627},
+ dictWord{4, 0, 488},
+ dictWord{132, 11, 350},
+ dictWord{137, 11, 751},
+ dictWord{132, 0, 83},
+ dictWord{140, 0, 676},
+ dictWord{
+ 133,
+ 11,
+ 967,
+ },
+ dictWord{7, 0, 1639},
+ dictWord{5, 10, 55},
+ dictWord{140, 10, 161},
+ dictWord{4, 11, 473},
+ dictWord{7, 11, 623},
+ dictWord{8, 11, 808},
+ dictWord{
+ 9,
+ 11,
+ 871,
+ },
+ dictWord{9, 11, 893},
+ dictWord{11, 11, 38},
+ dictWord{11, 11, 431},
+ dictWord{12, 11, 112},
+ dictWord{12, 11, 217},
+ dictWord{12, 11, 243},
+ dictWord{
+ 12,
+ 11,
+ 562,
+ },
+ dictWord{12, 11, 683},
+ dictWord{13, 11, 141},
+ dictWord{13, 11, 197},
+ dictWord{13, 11, 227},
+ dictWord{13, 11, 406},
+ dictWord{13, 11, 487},
+ dictWord{14, 11, 156},
+ dictWord{14, 11, 203},
+ dictWord{14, 11, 224},
+ dictWord{14, 11, 256},
+ dictWord{18, 11, 58},
+ dictWord{150, 11, 0},
+ dictWord{
+ 133,
+ 10,
+ 450,
+ },
+ dictWord{7, 11, 736},
+ dictWord{139, 11, 264},
+ dictWord{134, 0, 278},
+ dictWord{4, 11, 222},
+ dictWord{7, 11, 286},
+ dictWord{136, 11, 629},
+ dictWord{
+ 135,
+ 10,
+ 869,
+ },
+ dictWord{140, 0, 97},
+ dictWord{144, 0, 14},
+ dictWord{134, 0, 1085},
+ dictWord{4, 10, 213},
+ dictWord{7, 10, 223},
+ dictWord{136, 10, 80},
+ dictWord{
+ 7,
+ 0,
+ 388,
+ },
+ dictWord{7, 0, 644},
+ dictWord{139, 0, 781},
+ dictWord{132, 0, 849},
+ dictWord{7, 0, 229},
+ dictWord{8, 0, 59},
+ dictWord{9, 0, 190},
+ dictWord{10, 0, 378},
+ dictWord{140, 0, 191},
+ dictWord{7, 10, 381},
+ dictWord{7, 10, 806},
+ dictWord{7, 10, 820},
+ dictWord{8, 10, 354},
+ dictWord{8, 10, 437},
+ dictWord{8, 10, 787},
+ dictWord{9, 10, 657},
+ dictWord{10, 10, 58},
+ dictWord{10, 10, 339},
+ dictWord{10, 10, 749},
+ dictWord{11, 10, 914},
+ dictWord{12, 10, 162},
+ dictWord{13, 10, 75},
+ dictWord{14, 10, 106},
+ dictWord{14, 10, 198},
+ dictWord{14, 10, 320},
+ dictWord{14, 10, 413},
+ dictWord{146, 10, 43},
+ dictWord{141, 11, 306},
+ dictWord{
+ 136,
+ 10,
+ 747,
+ },
+ dictWord{134, 0, 1115},
+ dictWord{16, 0, 94},
+ dictWord{16, 0, 108},
+ dictWord{136, 11, 146},
+ dictWord{6, 0, 700},
+ dictWord{6, 0, 817},
+ dictWord{
+ 134,
+ 0,
+ 1002,
+ },
+ dictWord{133, 10, 692},
+ dictWord{4, 11, 465},
+ dictWord{135, 11, 1663},
+ dictWord{134, 10, 191},
+ dictWord{6, 0, 1414},
+ dictWord{
+ 135,
+ 11,
+ 913,
+ },
+ dictWord{132, 0, 660},
+ dictWord{7, 0, 1035},
+ dictWord{138, 0, 737},
+ dictWord{6, 10, 162},
+ dictWord{7, 10, 1960},
+ dictWord{136, 10, 831},
+ dictWord{
+ 132,
+ 10,
+ 706,
+ },
+ dictWord{7, 0, 690},
+ dictWord{9, 0, 217},
+ dictWord{9, 0, 587},
+ dictWord{140, 0, 521},
+ dictWord{138, 10, 426},
+ dictWord{135, 10, 1235},
+ dictWord{
+ 6,
+ 11,
+ 82,
+ },
+ dictWord{7, 11, 138},
+ dictWord{7, 11, 517},
+ dictWord{9, 11, 673},
+ dictWord{139, 11, 238},
+ dictWord{138, 0, 272},
+ dictWord{5, 11, 495},
+ dictWord{
+ 7,
+ 11,
+ 834,
+ },
+ dictWord{9, 11, 733},
+ dictWord{139, 11, 378},
+ dictWord{134, 0, 1744},
+ dictWord{132, 0, 1011},
+ dictWord{7, 11, 828},
+ dictWord{142, 11, 116},
+ dictWord{4, 0, 733},
+ dictWord{9, 0, 194},
+ dictWord{10, 0, 92},
+ dictWord{11, 0, 198},
+ dictWord{12, 0, 84},
+ dictWord{13, 0, 128},
+ dictWord{133, 11, 559},
+ dictWord{
+ 10,
+ 0,
+ 57,
+ },
+ dictWord{10, 0, 277},
+ dictWord{6, 11, 21},
+ dictWord{6, 11, 1737},
+ dictWord{7, 11, 1444},
+ dictWord{136, 11, 224},
+ dictWord{4, 10, 204},
+ dictWord{
+ 137,
+ 10,
+ 902,
+ },
+ dictWord{136, 10, 833},
+ dictWord{11, 0, 348},
+ dictWord{12, 0, 99},
+ dictWord{18, 0, 1},
+ dictWord{18, 0, 11},
+ dictWord{19, 0, 4},
+ dictWord{7, 10, 366},
+ dictWord{9, 10, 287},
+ dictWord{12, 10, 199},
+ dictWord{12, 10, 556},
+ dictWord{140, 10, 577},
+ dictWord{6, 0, 1981},
+ dictWord{136, 0, 936},
+ dictWord{
+ 21,
+ 0,
+ 33,
+ },
+ dictWord{150, 0, 40},
+ dictWord{5, 11, 519},
+ dictWord{138, 11, 204},
+ dictWord{5, 10, 356},
+ dictWord{135, 10, 224},
+ dictWord{134, 0, 775},
+ dictWord{
+ 135,
+ 0,
+ 306,
+ },
+ dictWord{7, 10, 630},
+ dictWord{9, 10, 567},
+ dictWord{11, 10, 150},
+ dictWord{11, 10, 444},
+ dictWord{141, 10, 119},
+ dictWord{5, 0, 979},
+ dictWord{
+ 134,
+ 10,
+ 539,
+ },
+ dictWord{133, 0, 611},
+ dictWord{4, 11, 402},
+ dictWord{135, 11, 1679},
+ dictWord{5, 0, 178},
+ dictWord{7, 11, 2},
+ dictWord{8, 11, 323},
+ dictWord{
+ 136,
+ 11,
+ 479,
+ },
+ dictWord{5, 11, 59},
+ dictWord{135, 11, 672},
+ dictWord{4, 0, 1010},
+ dictWord{6, 0, 1969},
+ dictWord{138, 11, 237},
+ dictWord{133, 11, 412},
+ dictWord{146, 11, 34},
+ dictWord{7, 11, 1740},
+ dictWord{146, 11, 48},
+ dictWord{134, 0, 664},
+ dictWord{139, 10, 814},
+ dictWord{4, 11, 85},
+ dictWord{
+ 135,
+ 11,
+ 549,
+ },
+ dictWord{133, 11, 94},
+ dictWord{133, 11, 457},
+ dictWord{132, 0, 390},
+ dictWord{134, 0, 1510},
+ dictWord{4, 10, 235},
+ dictWord{135, 10, 255},
+ dictWord{4, 10, 194},
+ dictWord{5, 10, 584},
+ dictWord{6, 11, 11},
+ dictWord{6, 10, 384},
+ dictWord{7, 11, 187},
+ dictWord{7, 10, 583},
+ dictWord{10, 10, 761},
+ dictWord{
+ 11,
+ 10,
+ 760,
+ },
+ dictWord{139, 10, 851},
+ dictWord{4, 11, 522},
+ dictWord{139, 11, 802},
+ dictWord{135, 0, 493},
+ dictWord{10, 11, 776},
+ dictWord{13, 11, 345},
+ dictWord{142, 11, 425},
+ dictWord{146, 0, 37},
+ dictWord{4, 11, 52},
+ dictWord{135, 11, 661},
+ dictWord{134, 0, 724},
+ dictWord{134, 0, 829},
+ dictWord{
+ 133,
+ 11,
+ 520,
+ },
+ dictWord{133, 10, 562},
+ dictWord{4, 11, 281},
+ dictWord{5, 11, 38},
+ dictWord{7, 11, 194},
+ dictWord{7, 11, 668},
+ dictWord{7, 11, 1893},
+ dictWord{
+ 137,
+ 11,
+ 397,
+ },
+ dictWord{5, 10, 191},
+ dictWord{137, 10, 271},
+ dictWord{7, 0, 1537},
+ dictWord{14, 0, 96},
+ dictWord{143, 0, 73},
+ dictWord{5, 0, 473},
+ dictWord{
+ 11,
+ 0,
+ 168,
+ },
+ dictWord{4, 10, 470},
+ dictWord{6, 10, 153},
+ dictWord{7, 10, 1503},
+ dictWord{7, 10, 1923},
+ dictWord{10, 10, 701},
+ dictWord{11, 10, 132},
+ dictWord{
+ 11,
+ 10,
+ 227,
+ },
+ dictWord{11, 10, 320},
+ dictWord{11, 10, 436},
+ dictWord{11, 10, 525},
+ dictWord{11, 10, 855},
+ dictWord{12, 10, 41},
+ dictWord{12, 10, 286},
+ dictWord{13, 10, 103},
+ dictWord{13, 10, 284},
+ dictWord{14, 10, 255},
+ dictWord{14, 10, 262},
+ dictWord{15, 10, 117},
+ dictWord{143, 10, 127},
+ dictWord{
+ 133,
+ 0,
+ 105,
+ },
+ dictWord{5, 0, 438},
+ dictWord{9, 0, 694},
+ dictWord{12, 0, 627},
+ dictWord{141, 0, 210},
+ dictWord{133, 10, 327},
+ dictWord{6, 10, 552},
+ dictWord{
+ 7,
+ 10,
+ 1754,
+ },
+ dictWord{137, 10, 604},
+ dictWord{134, 0, 1256},
+ dictWord{152, 0, 11},
+ dictWord{5, 11, 448},
+ dictWord{11, 11, 98},
+ dictWord{139, 11, 524},
+ dictWord{
+ 7,
+ 0,
+ 1626,
+ },
+ dictWord{5, 10, 80},
+ dictWord{6, 10, 405},
+ dictWord{7, 10, 403},
+ dictWord{7, 10, 1502},
+ dictWord{8, 10, 456},
+ dictWord{9, 10, 487},
+ dictWord{
+ 9,
+ 10,
+ 853,
+ },
+ dictWord{9, 10, 889},
+ dictWord{10, 10, 309},
+ dictWord{11, 10, 721},
+ dictWord{11, 10, 994},
+ dictWord{12, 10, 430},
+ dictWord{13, 10, 165},
+ dictWord{
+ 14,
+ 11,
+ 16,
+ },
+ dictWord{146, 11, 44},
+ dictWord{132, 0, 779},
+ dictWord{8, 0, 25},
+ dictWord{138, 0, 826},
+ dictWord{4, 10, 453},
+ dictWord{5, 10, 887},
+ dictWord{
+ 6,
+ 10,
+ 535,
+ },
+ dictWord{8, 10, 6},
+ dictWord{8, 10, 543},
+ dictWord{136, 10, 826},
+ dictWord{137, 11, 461},
+ dictWord{140, 11, 632},
+ dictWord{132, 0, 308},
+ dictWord{135, 0, 741},
+ dictWord{132, 0, 671},
+ dictWord{7, 0, 150},
+ dictWord{8, 0, 649},
+ dictWord{136, 0, 1020},
+ dictWord{9, 0, 99},
+ dictWord{6, 11, 336},
+ dictWord{
+ 8,
+ 11,
+ 552,
+ },
+ dictWord{9, 11, 285},
+ dictWord{10, 11, 99},
+ dictWord{139, 11, 568},
+ dictWord{134, 0, 521},
+ dictWord{5, 0, 339},
+ dictWord{14, 0, 3},
+ dictWord{
+ 15,
+ 0,
+ 41,
+ },
+ dictWord{15, 0, 166},
+ dictWord{147, 0, 66},
+ dictWord{6, 11, 423},
+ dictWord{7, 11, 665},
+ dictWord{7, 11, 1210},
+ dictWord{9, 11, 218},
+ dictWord{
+ 141,
+ 11,
+ 222,
+ },
+ dictWord{6, 0, 543},
+ dictWord{5, 10, 101},
+ dictWord{5, 11, 256},
+ dictWord{6, 10, 88},
+ dictWord{7, 10, 1677},
+ dictWord{9, 10, 100},
+ dictWord{10, 10, 677},
+ dictWord{14, 10, 169},
+ dictWord{14, 10, 302},
+ dictWord{14, 10, 313},
+ dictWord{15, 10, 48},
+ dictWord{143, 10, 84},
+ dictWord{4, 10, 310},
+ dictWord{
+ 7,
+ 10,
+ 708,
+ },
+ dictWord{7, 10, 996},
+ dictWord{9, 10, 795},
+ dictWord{10, 10, 390},
+ dictWord{10, 10, 733},
+ dictWord{11, 10, 451},
+ dictWord{12, 10, 249},
+ dictWord{
+ 14,
+ 10,
+ 115,
+ },
+ dictWord{14, 10, 286},
+ dictWord{143, 10, 100},
+ dictWord{133, 10, 587},
+ dictWord{13, 11, 417},
+ dictWord{14, 11, 129},
+ dictWord{143, 11, 15},
+ dictWord{134, 0, 1358},
+ dictWord{136, 11, 554},
+ dictWord{132, 10, 498},
+ dictWord{7, 10, 217},
+ dictWord{8, 10, 140},
+ dictWord{138, 10, 610},
+ dictWord{
+ 135,
+ 11,
+ 989,
+ },
+ dictWord{135, 11, 634},
+ dictWord{6, 0, 155},
+ dictWord{140, 0, 234},
+ dictWord{135, 11, 462},
+ dictWord{132, 11, 618},
+ dictWord{
+ 134,
+ 0,
+ 1628,
+ },
+ dictWord{132, 0, 766},
+ dictWord{4, 11, 339},
+ dictWord{5, 10, 905},
+ dictWord{135, 11, 259},
+ dictWord{135, 0, 829},
+ dictWord{4, 11, 759},
+ dictWord{
+ 141,
+ 11,
+ 169,
+ },
+ dictWord{7, 0, 1445},
+ dictWord{4, 10, 456},
+ dictWord{7, 10, 358},
+ dictWord{7, 10, 1637},
+ dictWord{8, 10, 643},
+ dictWord{139, 10, 483},
+ dictWord{
+ 5,
+ 0,
+ 486,
+ },
+ dictWord{135, 0, 1349},
+ dictWord{5, 11, 688},
+ dictWord{135, 11, 712},
+ dictWord{7, 0, 1635},
+ dictWord{8, 0, 17},
+ dictWord{10, 0, 217},
+ dictWord{
+ 10,
+ 0,
+ 295,
+ },
+ dictWord{12, 0, 2},
+ dictWord{140, 11, 2},
+ dictWord{138, 0, 558},
+ dictWord{150, 10, 56},
+ dictWord{4, 11, 278},
+ dictWord{5, 11, 465},
+ dictWord{
+ 135,
+ 11,
+ 1367,
+ },
+ dictWord{136, 11, 482},
+ dictWord{133, 10, 535},
+ dictWord{6, 0, 1362},
+ dictWord{6, 0, 1461},
+ dictWord{10, 11, 274},
+ dictWord{10, 11, 625},
+ dictWord{139, 11, 530},
+ dictWord{5, 0, 599},
+ dictWord{5, 11, 336},
+ dictWord{6, 11, 341},
+ dictWord{6, 11, 478},
+ dictWord{6, 11, 1763},
+ dictWord{136, 11, 386},
+ dictWord{7, 10, 1748},
+ dictWord{137, 11, 151},
+ dictWord{134, 0, 1376},
+ dictWord{133, 10, 539},
+ dictWord{135, 11, 73},
+ dictWord{135, 11, 1971},
+ dictWord{139, 11, 283},
+ dictWord{9, 0, 93},
+ dictWord{139, 0, 474},
+ dictWord{6, 10, 91},
+ dictWord{135, 10, 435},
+ dictWord{6, 0, 447},
+ dictWord{5, 11, 396},
+ dictWord{134, 11, 501},
+ dictWord{4, 10, 16},
+ dictWord{5, 10, 316},
+ dictWord{5, 10, 842},
+ dictWord{6, 10, 370},
+ dictWord{6, 10, 1778},
+ dictWord{8, 10, 166},
+ dictWord{11, 10, 812},
+ dictWord{12, 10, 206},
+ dictWord{12, 10, 351},
+ dictWord{14, 10, 418},
+ dictWord{16, 10, 15},
+ dictWord{16, 10, 34},
+ dictWord{18, 10, 3},
+ dictWord{19, 10, 3},
+ dictWord{19, 10, 7},
+ dictWord{20, 10, 4},
+ dictWord{149, 10, 21},
+ dictWord{7, 0, 577},
+ dictWord{7, 0, 1432},
+ dictWord{9, 0, 475},
+ dictWord{9, 0, 505},
+ dictWord{9, 0, 526},
+ dictWord{9, 0, 609},
+ dictWord{9, 0, 689},
+ dictWord{9, 0, 726},
+ dictWord{9, 0, 735},
+ dictWord{9, 0, 738},
+ dictWord{10, 0, 556},
+ dictWord{
+ 10,
+ 0,
+ 674,
+ },
+ dictWord{10, 0, 684},
+ dictWord{11, 0, 89},
+ dictWord{11, 0, 202},
+ dictWord{11, 0, 272},
+ dictWord{11, 0, 380},
+ dictWord{11, 0, 415},
+ dictWord{11, 0, 505},
+ dictWord{11, 0, 537},
+ dictWord{11, 0, 550},
+ dictWord{11, 0, 562},
+ dictWord{11, 0, 640},
+ dictWord{11, 0, 667},
+ dictWord{11, 0, 688},
+ dictWord{11, 0, 847},
+ dictWord{11, 0, 927},
+ dictWord{11, 0, 930},
+ dictWord{11, 0, 940},
+ dictWord{12, 0, 144},
+ dictWord{12, 0, 325},
+ dictWord{12, 0, 329},
+ dictWord{12, 0, 389},
+ dictWord{
+ 12,
+ 0,
+ 403,
+ },
+ dictWord{12, 0, 451},
+ dictWord{12, 0, 515},
+ dictWord{12, 0, 604},
+ dictWord{12, 0, 616},
+ dictWord{12, 0, 626},
+ dictWord{13, 0, 66},
+ dictWord{
+ 13,
+ 0,
+ 131,
+ },
+ dictWord{13, 0, 167},
+ dictWord{13, 0, 236},
+ dictWord{13, 0, 368},
+ dictWord{13, 0, 411},
+ dictWord{13, 0, 434},
+ dictWord{13, 0, 453},
+ dictWord{13, 0, 461},
+ dictWord{13, 0, 474},
+ dictWord{14, 0, 59},
+ dictWord{14, 0, 60},
+ dictWord{14, 0, 139},
+ dictWord{14, 0, 152},
+ dictWord{14, 0, 276},
+ dictWord{14, 0, 353},
+ dictWord{
+ 14,
+ 0,
+ 402,
+ },
+ dictWord{15, 0, 28},
+ dictWord{15, 0, 81},
+ dictWord{15, 0, 123},
+ dictWord{15, 0, 152},
+ dictWord{18, 0, 136},
+ dictWord{148, 0, 88},
+ dictWord{
+ 4,
+ 11,
+ 929,
+ },
+ dictWord{133, 11, 799},
+ dictWord{136, 11, 46},
+ dictWord{142, 0, 307},
+ dictWord{4, 0, 609},
+ dictWord{7, 0, 756},
+ dictWord{9, 0, 544},
+ dictWord{
+ 11,
+ 0,
+ 413,
+ },
+ dictWord{144, 0, 25},
+ dictWord{10, 0, 687},
+ dictWord{7, 10, 619},
+ dictWord{10, 10, 547},
+ dictWord{11, 10, 122},
+ dictWord{140, 10, 601},
+ dictWord{
+ 4,
+ 0,
+ 930,
+ },
+ dictWord{133, 0, 947},
+ dictWord{133, 0, 939},
+ dictWord{142, 0, 21},
+ dictWord{4, 11, 892},
+ dictWord{133, 11, 770},
+ dictWord{133, 0, 962},
+ dictWord{
+ 5,
+ 0,
+ 651,
+ },
+ dictWord{8, 0, 170},
+ dictWord{9, 0, 61},
+ dictWord{9, 0, 63},
+ dictWord{10, 0, 23},
+ dictWord{10, 0, 37},
+ dictWord{10, 0, 834},
+ dictWord{11, 0, 4},
+ dictWord{
+ 11,
+ 0,
+ 187,
+ },
+ dictWord{11, 0, 281},
+ dictWord{11, 0, 503},
+ dictWord{11, 0, 677},
+ dictWord{12, 0, 96},
+ dictWord{12, 0, 130},
+ dictWord{12, 0, 244},
+ dictWord{14, 0, 5},
+ dictWord{14, 0, 40},
+ dictWord{14, 0, 162},
+ dictWord{14, 0, 202},
+ dictWord{146, 0, 133},
+ dictWord{4, 0, 406},
+ dictWord{5, 0, 579},
+ dictWord{12, 0, 492},
+ dictWord{
+ 150,
+ 0,
+ 15,
+ },
+ dictWord{135, 11, 158},
+ dictWord{135, 0, 597},
+ dictWord{132, 0, 981},
+ dictWord{132, 10, 888},
+ dictWord{4, 10, 149},
+ dictWord{138, 10, 368},
+ dictWord{132, 0, 545},
+ dictWord{4, 10, 154},
+ dictWord{7, 10, 1134},
+ dictWord{136, 10, 105},
+ dictWord{135, 11, 2001},
+ dictWord{134, 0, 1558},
+ dictWord{
+ 4,
+ 10,
+ 31,
+ },
+ dictWord{6, 10, 429},
+ dictWord{7, 10, 962},
+ dictWord{9, 10, 458},
+ dictWord{139, 10, 691},
+ dictWord{132, 10, 312},
+ dictWord{135, 10, 1642},
+ dictWord{
+ 6,
+ 0,
+ 17,
+ },
+ dictWord{6, 0, 1304},
+ dictWord{7, 0, 16},
+ dictWord{7, 0, 1001},
+ dictWord{9, 0, 886},
+ dictWord{10, 0, 489},
+ dictWord{10, 0, 800},
+ dictWord{11, 0, 782},
+ dictWord{12, 0, 320},
+ dictWord{13, 0, 467},
+ dictWord{14, 0, 145},
+ dictWord{14, 0, 387},
+ dictWord{143, 0, 119},
+ dictWord{135, 0, 1982},
+ dictWord{17, 0, 17},
+ dictWord{7, 11, 1461},
+ dictWord{140, 11, 91},
+ dictWord{4, 10, 236},
+ dictWord{132, 11, 602},
+ dictWord{138, 0, 907},
+ dictWord{136, 0, 110},
+ dictWord{7, 0, 272},
+ dictWord{19, 0, 53},
+ dictWord{5, 10, 836},
+ dictWord{5, 10, 857},
+ dictWord{134, 10, 1680},
+ dictWord{5, 0, 458},
+ dictWord{7, 11, 1218},
+ dictWord{136, 11, 303},
+ dictWord{7, 0, 1983},
+ dictWord{8, 0, 0},
+ dictWord{8, 0, 171},
+ dictWord{9, 0, 120},
+ dictWord{9, 0, 732},
+ dictWord{10, 0, 473},
+ dictWord{11, 0, 656},
+ dictWord{
+ 11,
+ 0,
+ 998,
+ },
+ dictWord{18, 0, 0},
+ dictWord{18, 0, 2},
+ dictWord{19, 0, 21},
+ dictWord{10, 10, 68},
+ dictWord{139, 10, 494},
+ dictWord{137, 11, 662},
+ dictWord{4, 11, 13},
+ dictWord{5, 11, 567},
+ dictWord{7, 11, 1498},
+ dictWord{9, 11, 124},
+ dictWord{11, 11, 521},
+ dictWord{140, 11, 405},
+ dictWord{4, 10, 81},
+ dictWord{139, 10, 867},
+ dictWord{135, 11, 1006},
+ dictWord{7, 11, 800},
+ dictWord{7, 11, 1783},
+ dictWord{138, 11, 12},
+ dictWord{9, 0, 295},
+ dictWord{10, 0, 443},
+ dictWord{
+ 5,
+ 10,
+ 282,
+ },
+ dictWord{8, 10, 650},
+ dictWord{137, 10, 907},
+ dictWord{132, 11, 735},
+ dictWord{4, 11, 170},
+ dictWord{4, 10, 775},
+ dictWord{135, 11, 323},
+ dictWord{
+ 6,
+ 0,
+ 1844,
+ },
+ dictWord{10, 0, 924},
+ dictWord{11, 11, 844},
+ dictWord{12, 11, 104},
+ dictWord{140, 11, 625},
+ dictWord{5, 11, 304},
+ dictWord{7, 11, 1403},
+ dictWord{140, 11, 498},
+ dictWord{134, 0, 1232},
+ dictWord{4, 0, 519},
+ dictWord{10, 0, 70},
+ dictWord{12, 0, 26},
+ dictWord{14, 0, 17},
+ dictWord{14, 0, 178},
+ dictWord{
+ 15,
+ 0,
+ 34,
+ },
+ dictWord{149, 0, 12},
+ dictWord{132, 0, 993},
+ dictWord{4, 11, 148},
+ dictWord{133, 11, 742},
+ dictWord{6, 0, 31},
+ dictWord{7, 0, 491},
+ dictWord{7, 0, 530},
+ dictWord{8, 0, 592},
+ dictWord{11, 0, 53},
+ dictWord{11, 0, 779},
+ dictWord{12, 0, 167},
+ dictWord{12, 0, 411},
+ dictWord{14, 0, 14},
+ dictWord{14, 0, 136},
+ dictWord{
+ 15,
+ 0,
+ 72,
+ },
+ dictWord{16, 0, 17},
+ dictWord{144, 0, 72},
+ dictWord{133, 0, 907},
+ dictWord{134, 0, 733},
+ dictWord{133, 11, 111},
+ dictWord{4, 10, 71},
+ dictWord{
+ 5,
+ 10,
+ 376,
+ },
+ dictWord{7, 10, 119},
+ dictWord{138, 10, 665},
+ dictWord{136, 0, 55},
+ dictWord{8, 0, 430},
+ dictWord{136, 11, 430},
+ dictWord{4, 0, 208},
+ dictWord{
+ 5,
+ 0,
+ 106,
+ },
+ dictWord{6, 0, 531},
+ dictWord{8, 0, 408},
+ dictWord{9, 0, 188},
+ dictWord{138, 0, 572},
+ dictWord{12, 0, 56},
+ dictWord{11, 10, 827},
+ dictWord{14, 10, 34},
+ dictWord{143, 10, 148},
+ dictWord{134, 0, 1693},
+ dictWord{133, 11, 444},
+ dictWord{132, 10, 479},
+ dictWord{140, 0, 441},
+ dictWord{9, 0, 449},
+ dictWord{
+ 10,
+ 0,
+ 192,
+ },
+ dictWord{138, 0, 740},
+ dictWord{134, 0, 928},
+ dictWord{4, 0, 241},
+ dictWord{7, 10, 607},
+ dictWord{136, 10, 99},
+ dictWord{8, 11, 123},
+ dictWord{
+ 15,
+ 11,
+ 6,
+ },
+ dictWord{144, 11, 7},
+ dictWord{6, 11, 285},
+ dictWord{8, 11, 654},
+ dictWord{11, 11, 749},
+ dictWord{12, 11, 190},
+ dictWord{12, 11, 327},
+ dictWord{
+ 13,
+ 11,
+ 120,
+ },
+ dictWord{13, 11, 121},
+ dictWord{13, 11, 327},
+ dictWord{15, 11, 47},
+ dictWord{146, 11, 40},
+ dictWord{4, 10, 41},
+ dictWord{5, 10, 74},
+ dictWord{
+ 7,
+ 10,
+ 1627,
+ },
+ dictWord{11, 10, 871},
+ dictWord{140, 10, 619},
+ dictWord{7, 0, 1525},
+ dictWord{11, 10, 329},
+ dictWord{11, 10, 965},
+ dictWord{12, 10, 241},
+ dictWord{14, 10, 354},
+ dictWord{15, 10, 22},
+ dictWord{148, 10, 63},
+ dictWord{132, 0, 259},
+ dictWord{135, 11, 183},
+ dictWord{9, 10, 209},
+ dictWord{
+ 137,
+ 10,
+ 300,
+ },
+ dictWord{5, 11, 937},
+ dictWord{135, 11, 100},
+ dictWord{133, 10, 98},
+ dictWord{4, 0, 173},
+ dictWord{5, 0, 312},
+ dictWord{5, 0, 512},
+ dictWord{
+ 135,
+ 0,
+ 1285,
+ },
+ dictWord{141, 0, 185},
+ dictWord{7, 0, 1603},
+ dictWord{7, 0, 1691},
+ dictWord{9, 0, 464},
+ dictWord{11, 0, 195},
+ dictWord{12, 0, 279},
+ dictWord{
+ 12,
+ 0,
+ 448,
+ },
+ dictWord{14, 0, 11},
+ dictWord{147, 0, 102},
+ dictWord{135, 0, 1113},
+ dictWord{133, 10, 984},
+ dictWord{4, 0, 452},
+ dictWord{5, 0, 583},
+ dictWord{
+ 135,
+ 0,
+ 720,
+ },
+ dictWord{4, 0, 547},
+ dictWord{5, 0, 817},
+ dictWord{6, 0, 433},
+ dictWord{7, 0, 593},
+ dictWord{7, 0, 1378},
+ dictWord{8, 0, 161},
+ dictWord{9, 0, 284},
+ dictWord{
+ 10,
+ 0,
+ 313,
+ },
+ dictWord{139, 0, 886},
+ dictWord{8, 0, 722},
+ dictWord{4, 10, 182},
+ dictWord{6, 10, 205},
+ dictWord{135, 10, 220},
+ dictWord{150, 0, 13},
+ dictWord{
+ 4,
+ 10,
+ 42,
+ },
+ dictWord{9, 10, 205},
+ dictWord{9, 10, 786},
+ dictWord{138, 10, 659},
+ dictWord{6, 0, 289},
+ dictWord{7, 0, 1670},
+ dictWord{12, 0, 57},
+ dictWord{151, 0, 4},
+ dictWord{132, 10, 635},
+ dictWord{14, 0, 43},
+ dictWord{146, 0, 21},
+ dictWord{139, 10, 533},
+ dictWord{135, 0, 1694},
+ dictWord{8, 0, 420},
+ dictWord{
+ 139,
+ 0,
+ 193,
+ },
+ dictWord{135, 0, 409},
+ dictWord{132, 10, 371},
+ dictWord{4, 10, 272},
+ dictWord{135, 10, 836},
+ dictWord{5, 10, 825},
+ dictWord{134, 10, 1640},
+ dictWord{5, 11, 251},
+ dictWord{5, 11, 956},
+ dictWord{8, 11, 268},
+ dictWord{9, 11, 214},
+ dictWord{146, 11, 142},
+ dictWord{138, 0, 308},
+ dictWord{6, 0, 1863},
+ dictWord{141, 11, 37},
+ dictWord{137, 10, 879},
+ dictWord{7, 10, 317},
+ dictWord{135, 10, 569},
+ dictWord{132, 11, 294},
+ dictWord{134, 0, 790},
+ dictWord{
+ 5,
+ 0,
+ 1002,
+ },
+ dictWord{136, 0, 745},
+ dictWord{5, 11, 346},
+ dictWord{5, 11, 711},
+ dictWord{136, 11, 390},
+ dictWord{135, 0, 289},
+ dictWord{5, 0, 504},
+ dictWord{
+ 11,
+ 0,
+ 68,
+ },
+ dictWord{137, 10, 307},
+ dictWord{4, 0, 239},
+ dictWord{6, 0, 477},
+ dictWord{7, 0, 1607},
+ dictWord{139, 0, 617},
+ dictWord{149, 0, 13},
+ dictWord{
+ 133,
+ 0,
+ 609,
+ },
+ dictWord{133, 11, 624},
+ dictWord{5, 11, 783},
+ dictWord{7, 11, 1998},
+ dictWord{135, 11, 2047},
+ dictWord{133, 10, 525},
+ dictWord{132, 0, 367},
+ dictWord{132, 11, 594},
+ dictWord{6, 0, 528},
+ dictWord{133, 10, 493},
+ dictWord{4, 10, 174},
+ dictWord{135, 10, 911},
+ dictWord{8, 10, 417},
+ dictWord{
+ 137,
+ 10,
+ 782,
+ },
+ dictWord{132, 0, 694},
+ dictWord{7, 0, 548},
+ dictWord{137, 0, 58},
+ dictWord{4, 10, 32},
+ dictWord{5, 10, 215},
+ dictWord{6, 10, 269},
+ dictWord{7, 10, 1782},
+ dictWord{7, 10, 1892},
+ dictWord{10, 10, 16},
+ dictWord{11, 10, 822},
+ dictWord{11, 10, 954},
+ dictWord{141, 10, 481},
+ dictWord{140, 0, 687},
+ dictWord{
+ 7,
+ 0,
+ 1749,
+ },
+ dictWord{136, 10, 477},
+ dictWord{132, 11, 569},
+ dictWord{133, 10, 308},
+ dictWord{135, 10, 1088},
+ dictWord{4, 0, 661},
+ dictWord{138, 0, 1004},
+ dictWord{5, 11, 37},
+ dictWord{6, 11, 39},
+ dictWord{6, 11, 451},
+ dictWord{7, 11, 218},
+ dictWord{7, 11, 667},
+ dictWord{7, 11, 1166},
+ dictWord{7, 11, 1687},
+ dictWord{8, 11, 662},
+ dictWord{144, 11, 2},
+ dictWord{9, 0, 445},
+ dictWord{12, 0, 53},
+ dictWord{13, 0, 492},
+ dictWord{5, 10, 126},
+ dictWord{8, 10, 297},
+ dictWord{
+ 9,
+ 10,
+ 366,
+ },
+ dictWord{140, 10, 374},
+ dictWord{7, 10, 1551},
+ dictWord{139, 10, 361},
+ dictWord{148, 0, 74},
+ dictWord{134, 11, 508},
+ dictWord{135, 0, 213},
+ dictWord{132, 10, 175},
+ dictWord{132, 10, 685},
+ dictWord{6, 0, 760},
+ dictWord{6, 0, 834},
+ dictWord{134, 0, 1248},
+ dictWord{7, 11, 453},
+ dictWord{7, 11, 635},
+ dictWord{7, 11, 796},
+ dictWord{8, 11, 331},
+ dictWord{9, 11, 328},
+ dictWord{9, 11, 330},
+ dictWord{9, 11, 865},
+ dictWord{10, 11, 119},
+ dictWord{10, 11, 235},
+ dictWord{11, 11, 111},
+ dictWord{11, 11, 129},
+ dictWord{11, 11, 240},
+ dictWord{12, 11, 31},
+ dictWord{12, 11, 66},
+ dictWord{12, 11, 222},
+ dictWord{12, 11, 269},
+ dictWord{12, 11, 599},
+ dictWord{12, 11, 689},
+ dictWord{13, 11, 186},
+ dictWord{13, 11, 364},
+ dictWord{142, 11, 345},
+ dictWord{7, 0, 1672},
+ dictWord{
+ 139,
+ 0,
+ 189,
+ },
+ dictWord{133, 10, 797},
+ dictWord{133, 10, 565},
+ dictWord{6, 0, 1548},
+ dictWord{6, 11, 98},
+ dictWord{7, 11, 585},
+ dictWord{135, 11, 702},
+ dictWord{
+ 9,
+ 0,
+ 968,
+ },
+ dictWord{15, 0, 192},
+ dictWord{149, 0, 56},
+ dictWord{4, 10, 252},
+ dictWord{6, 11, 37},
+ dictWord{7, 11, 299},
+ dictWord{7, 10, 1068},
+ dictWord{
+ 7,
+ 11,
+ 1666,
+ },
+ dictWord{8, 11, 195},
+ dictWord{8, 11, 316},
+ dictWord{9, 11, 178},
+ dictWord{9, 11, 276},
+ dictWord{9, 11, 339},
+ dictWord{9, 11, 536},
+ dictWord{
+ 10,
+ 11,
+ 102,
+ },
+ dictWord{10, 11, 362},
+ dictWord{10, 10, 434},
+ dictWord{10, 11, 785},
+ dictWord{11, 11, 55},
+ dictWord{11, 11, 149},
+ dictWord{11, 10, 228},
+ dictWord{
+ 11,
+ 10,
+ 426,
+ },
+ dictWord{11, 11, 773},
+ dictWord{13, 10, 231},
+ dictWord{13, 11, 416},
+ dictWord{13, 11, 419},
+ dictWord{14, 11, 38},
+ dictWord{14, 11, 41},
+ dictWord{14, 11, 210},
+ dictWord{18, 10, 106},
+ dictWord{148, 10, 87},
+ dictWord{4, 0, 751},
+ dictWord{11, 0, 390},
+ dictWord{140, 0, 32},
+ dictWord{4, 0, 409},
+ dictWord{133, 0, 78},
+ dictWord{11, 11, 458},
+ dictWord{12, 11, 15},
+ dictWord{140, 11, 432},
+ dictWord{7, 0, 1602},
+ dictWord{10, 0, 257},
+ dictWord{10, 0, 698},
+ dictWord{11, 0, 544},
+ dictWord{11, 0, 585},
+ dictWord{12, 0, 212},
+ dictWord{13, 0, 307},
+ dictWord{5, 10, 231},
+ dictWord{7, 10, 601},
+ dictWord{9, 10, 277},
+ dictWord{
+ 9,
+ 10,
+ 674,
+ },
+ dictWord{10, 10, 178},
+ dictWord{10, 10, 418},
+ dictWord{10, 10, 509},
+ dictWord{11, 10, 531},
+ dictWord{12, 10, 113},
+ dictWord{12, 10, 475},
+ dictWord{13, 10, 99},
+ dictWord{142, 10, 428},
+ dictWord{6, 0, 473},
+ dictWord{145, 0, 105},
+ dictWord{6, 0, 1949},
+ dictWord{15, 0, 156},
+ dictWord{133, 11, 645},
+ dictWord{7, 10, 1591},
+ dictWord{144, 10, 43},
+ dictWord{135, 0, 1779},
+ dictWord{135, 10, 1683},
+ dictWord{4, 11, 290},
+ dictWord{135, 11, 1356},
+ dictWord{134, 0, 763},
+ dictWord{6, 11, 70},
+ dictWord{7, 11, 1292},
+ dictWord{10, 11, 762},
+ dictWord{139, 11, 288},
+ dictWord{142, 0, 29},
+ dictWord{140, 11, 428},
+ dictWord{7, 0, 883},
+ dictWord{7, 11, 131},
+ dictWord{7, 11, 422},
+ dictWord{8, 11, 210},
+ dictWord{140, 11, 573},
+ dictWord{134, 0, 488},
+ dictWord{4, 10, 399},
+ dictWord{5, 10, 119},
+ dictWord{5, 10, 494},
+ dictWord{7, 10, 751},
+ dictWord{137, 10, 556},
+ dictWord{133, 0, 617},
+ dictWord{132, 11, 936},
+ dictWord{
+ 139,
+ 0,
+ 50,
+ },
+ dictWord{7, 0, 1518},
+ dictWord{139, 0, 694},
+ dictWord{137, 0, 785},
+ dictWord{4, 0, 546},
+ dictWord{135, 0, 2042},
+ dictWord{7, 11, 716},
+ dictWord{
+ 13,
+ 11,
+ 97,
+ },
+ dictWord{141, 11, 251},
+ dictWord{132, 11, 653},
+ dictWord{145, 0, 22},
+ dictWord{134, 0, 1016},
+ dictWord{4, 0, 313},
+ dictWord{133, 0, 577},
+ dictWord{
+ 136,
+ 11,
+ 657,
+ },
+ dictWord{8, 0, 184},
+ dictWord{141, 0, 433},
+ dictWord{135, 0, 935},
+ dictWord{6, 0, 720},
+ dictWord{9, 0, 114},
+ dictWord{146, 11, 80},
+ dictWord{
+ 12,
+ 0,
+ 186,
+ },
+ dictWord{12, 0, 292},
+ dictWord{14, 0, 100},
+ dictWord{18, 0, 70},
+ dictWord{7, 10, 594},
+ dictWord{7, 10, 851},
+ dictWord{7, 10, 1858},
+ dictWord{
+ 9,
+ 10,
+ 411,
+ },
+ dictWord{9, 10, 574},
+ dictWord{9, 10, 666},
+ dictWord{9, 10, 737},
+ dictWord{10, 10, 346},
+ dictWord{10, 10, 712},
+ dictWord{11, 10, 246},
+ dictWord{
+ 11,
+ 10,
+ 432,
+ },
+ dictWord{11, 10, 517},
+ dictWord{11, 10, 647},
+ dictWord{11, 10, 679},
+ dictWord{11, 10, 727},
+ dictWord{12, 10, 304},
+ dictWord{12, 10, 305},
+ dictWord{12, 10, 323},
+ dictWord{12, 10, 483},
+ dictWord{12, 10, 572},
+ dictWord{12, 10, 593},
+ dictWord{12, 10, 602},
+ dictWord{13, 10, 95},
+ dictWord{13, 10, 101},
+ dictWord{13, 10, 171},
+ dictWord{13, 10, 315},
+ dictWord{13, 10, 378},
+ dictWord{13, 10, 425},
+ dictWord{13, 10, 475},
+ dictWord{14, 10, 63},
+ dictWord{
+ 14,
+ 10,
+ 380,
+ },
+ dictWord{14, 10, 384},
+ dictWord{15, 10, 133},
+ dictWord{18, 10, 112},
+ dictWord{148, 10, 72},
+ dictWord{135, 10, 1093},
+ dictWord{135, 11, 1836},
+ dictWord{132, 10, 679},
+ dictWord{137, 10, 203},
+ dictWord{11, 0, 402},
+ dictWord{12, 0, 109},
+ dictWord{12, 0, 431},
+ dictWord{13, 0, 179},
+ dictWord{13, 0, 206},
+ dictWord{14, 0, 217},
+ dictWord{16, 0, 3},
+ dictWord{148, 0, 53},
+ dictWord{7, 11, 1368},
+ dictWord{8, 11, 232},
+ dictWord{8, 11, 361},
+ dictWord{10, 11, 682},
+ dictWord{138, 11, 742},
+ dictWord{137, 10, 714},
+ dictWord{5, 0, 886},
+ dictWord{6, 0, 46},
+ dictWord{6, 0, 1790},
+ dictWord{7, 0, 14},
+ dictWord{7, 0, 732},
+ dictWord{
+ 7,
+ 0,
+ 1654,
+ },
+ dictWord{8, 0, 95},
+ dictWord{8, 0, 327},
+ dictWord{8, 0, 616},
+ dictWord{9, 0, 892},
+ dictWord{10, 0, 598},
+ dictWord{10, 0, 769},
+ dictWord{11, 0, 134},
+ dictWord{11, 0, 747},
+ dictWord{12, 0, 378},
+ dictWord{14, 0, 97},
+ dictWord{137, 11, 534},
+ dictWord{4, 0, 969},
+ dictWord{136, 10, 825},
+ dictWord{137, 11, 27},
+ dictWord{6, 0, 727},
+ dictWord{142, 11, 12},
+ dictWord{133, 0, 1021},
+ dictWord{134, 0, 1190},
+ dictWord{134, 11, 1657},
+ dictWord{5, 10, 143},
+ dictWord{
+ 5,
+ 10,
+ 769,
+ },
+ dictWord{6, 10, 1760},
+ dictWord{7, 10, 682},
+ dictWord{7, 10, 1992},
+ dictWord{136, 10, 736},
+ dictWord{132, 0, 153},
+ dictWord{135, 11, 127},
+ dictWord{133, 0, 798},
+ dictWord{132, 0, 587},
+ dictWord{6, 0, 598},
+ dictWord{7, 0, 42},
+ dictWord{8, 0, 695},
+ dictWord{10, 0, 212},
+ dictWord{11, 0, 158},
+ dictWord{
+ 14,
+ 0,
+ 196,
+ },
+ dictWord{145, 0, 85},
+ dictWord{133, 10, 860},
+ dictWord{6, 0, 1929},
+ dictWord{134, 0, 1933},
+ dictWord{5, 0, 957},
+ dictWord{5, 0, 1008},
+ dictWord{
+ 9,
+ 0,
+ 577,
+ },
+ dictWord{12, 0, 141},
+ dictWord{6, 10, 422},
+ dictWord{7, 10, 0},
+ dictWord{7, 10, 1544},
+ dictWord{8, 11, 364},
+ dictWord{11, 10, 990},
+ dictWord{
+ 12,
+ 10,
+ 453,
+ },
+ dictWord{13, 10, 47},
+ dictWord{141, 10, 266},
+ dictWord{134, 0, 1319},
+ dictWord{4, 0, 129},
+ dictWord{135, 0, 465},
+ dictWord{7, 0, 470},
+ dictWord{
+ 7,
+ 0,
+ 1057,
+ },
+ dictWord{7, 0, 1201},
+ dictWord{9, 0, 755},
+ dictWord{11, 0, 906},
+ dictWord{140, 0, 527},
+ dictWord{7, 0, 908},
+ dictWord{146, 0, 7},
+ dictWord{5, 0, 148},
+ dictWord{136, 0, 450},
+ dictWord{5, 10, 515},
+ dictWord{137, 10, 131},
+ dictWord{7, 10, 1605},
+ dictWord{11, 10, 962},
+ dictWord{146, 10, 139},
+ dictWord{
+ 132,
+ 10,
+ 646,
+ },
+ dictWord{134, 0, 1166},
+ dictWord{4, 10, 396},
+ dictWord{7, 10, 728},
+ dictWord{9, 10, 117},
+ dictWord{13, 10, 202},
+ dictWord{148, 10, 51},
+ dictWord{
+ 6,
+ 10,
+ 121,
+ },
+ dictWord{6, 10, 124},
+ dictWord{6, 10, 357},
+ dictWord{7, 10, 1138},
+ dictWord{7, 10, 1295},
+ dictWord{8, 10, 162},
+ dictWord{139, 10, 655},
+ dictWord{14, 0, 374},
+ dictWord{142, 11, 374},
+ dictWord{138, 0, 253},
+ dictWord{139, 0, 1003},
+ dictWord{5, 11, 909},
+ dictWord{9, 11, 849},
+ dictWord{
+ 138,
+ 11,
+ 805,
+ },
+ dictWord{133, 10, 237},
+ dictWord{7, 11, 525},
+ dictWord{7, 11, 1579},
+ dictWord{8, 11, 497},
+ dictWord{136, 11, 573},
+ dictWord{137, 0, 46},
+ dictWord{
+ 132,
+ 0,
+ 879,
+ },
+ dictWord{134, 0, 806},
+ dictWord{135, 0, 1868},
+ dictWord{6, 0, 1837},
+ dictWord{134, 0, 1846},
+ dictWord{6, 0, 730},
+ dictWord{134, 0, 881},
+ dictWord{7, 0, 965},
+ dictWord{7, 0, 1460},
+ dictWord{7, 0, 1604},
+ dictWord{7, 11, 193},
+ dictWord{7, 11, 397},
+ dictWord{7, 11, 1105},
+ dictWord{8, 11, 124},
+ dictWord{
+ 8,
+ 11,
+ 619,
+ },
+ dictWord{9, 11, 305},
+ dictWord{10, 11, 264},
+ dictWord{11, 11, 40},
+ dictWord{12, 11, 349},
+ dictWord{13, 11, 134},
+ dictWord{13, 11, 295},
+ dictWord{14, 11, 155},
+ dictWord{15, 11, 120},
+ dictWord{146, 11, 105},
+ dictWord{136, 0, 506},
+ dictWord{143, 0, 10},
+ dictWord{4, 11, 262},
+ dictWord{7, 11, 342},
+ dictWord{7, 10, 571},
+ dictWord{7, 10, 1877},
+ dictWord{10, 10, 366},
+ dictWord{141, 11, 23},
+ dictWord{133, 11, 641},
+ dictWord{10, 0, 22},
+ dictWord{9, 10, 513},
+ dictWord{10, 10, 39},
+ dictWord{12, 10, 122},
+ dictWord{140, 10, 187},
+ dictWord{135, 11, 1431},
+ dictWord{150, 11, 49},
+ dictWord{4, 11, 99},
+ dictWord{
+ 6,
+ 11,
+ 250,
+ },
+ dictWord{6, 11, 346},
+ dictWord{8, 11, 127},
+ dictWord{138, 11, 81},
+ dictWord{6, 0, 2014},
+ dictWord{8, 0, 928},
+ dictWord{10, 0, 960},
+ dictWord{10, 0, 979},
+ dictWord{140, 0, 996},
+ dictWord{134, 0, 296},
+ dictWord{132, 11, 915},
+ dictWord{5, 11, 75},
+ dictWord{9, 11, 517},
+ dictWord{10, 11, 470},
+ dictWord{
+ 12,
+ 11,
+ 155,
+ },
+ dictWord{141, 11, 224},
+ dictWord{137, 10, 873},
+ dictWord{4, 0, 854},
+ dictWord{140, 11, 18},
+ dictWord{134, 0, 587},
+ dictWord{7, 10, 107},
+ dictWord{
+ 7,
+ 10,
+ 838,
+ },
+ dictWord{8, 10, 550},
+ dictWord{138, 10, 401},
+ dictWord{11, 0, 636},
+ dictWord{15, 0, 145},
+ dictWord{17, 0, 34},
+ dictWord{19, 0, 50},
+ dictWord{
+ 23,
+ 0,
+ 20,
+ },
+ dictWord{11, 10, 588},
+ dictWord{11, 10, 864},
+ dictWord{11, 10, 968},
+ dictWord{143, 10, 160},
+ dictWord{135, 11, 216},
+ dictWord{7, 0, 982},
+ dictWord{
+ 10,
+ 0,
+ 32,
+ },
+ dictWord{143, 0, 56},
+ dictWord{133, 10, 768},
+ dictWord{133, 11, 954},
+ dictWord{6, 11, 304},
+ dictWord{7, 11, 1114},
+ dictWord{8, 11, 418},
+ dictWord{
+ 10,
+ 11,
+ 345,
+ },
+ dictWord{11, 11, 341},
+ dictWord{11, 11, 675},
+ dictWord{141, 11, 40},
+ dictWord{9, 11, 410},
+ dictWord{139, 11, 425},
+ dictWord{136, 0, 941},
+ dictWord{5, 0, 435},
+ dictWord{132, 10, 894},
+ dictWord{5, 0, 85},
+ dictWord{6, 0, 419},
+ dictWord{7, 0, 134},
+ dictWord{7, 0, 305},
+ dictWord{7, 0, 361},
+ dictWord{
+ 7,
+ 0,
+ 1337,
+ },
+ dictWord{8, 0, 71},
+ dictWord{140, 0, 519},
+ dictWord{140, 0, 688},
+ dictWord{135, 0, 740},
+ dictWord{5, 0, 691},
+ dictWord{7, 0, 345},
+ dictWord{9, 0, 94},
+ dictWord{140, 0, 169},
+ dictWord{5, 0, 183},
+ dictWord{6, 0, 582},
+ dictWord{10, 0, 679},
+ dictWord{140, 0, 435},
+ dictWord{134, 11, 14},
+ dictWord{6, 0, 945},
+ dictWord{135, 0, 511},
+ dictWord{134, 11, 1708},
+ dictWord{5, 11, 113},
+ dictWord{6, 11, 243},
+ dictWord{7, 11, 1865},
+ dictWord{11, 11, 161},
+ dictWord{16, 11, 37},
+ dictWord{145, 11, 99},
+ dictWord{132, 11, 274},
+ dictWord{137, 0, 539},
+ dictWord{7, 0, 1993},
+ dictWord{8, 0, 684},
+ dictWord{134, 10, 272},
+ dictWord{
+ 6,
+ 0,
+ 659,
+ },
+ dictWord{134, 0, 982},
+ dictWord{4, 10, 9},
+ dictWord{5, 10, 128},
+ dictWord{7, 10, 368},
+ dictWord{11, 10, 480},
+ dictWord{148, 10, 3},
+ dictWord{
+ 134,
+ 0,
+ 583,
+ },
+ dictWord{132, 0, 803},
+ dictWord{133, 0, 704},
+ dictWord{4, 0, 179},
+ dictWord{5, 0, 198},
+ dictWord{133, 0, 697},
+ dictWord{7, 0, 347},
+ dictWord{7, 0, 971},
+ dictWord{8, 0, 181},
+ dictWord{10, 0, 711},
+ dictWord{135, 11, 166},
+ dictWord{136, 10, 682},
+ dictWord{4, 10, 2},
+ dictWord{7, 10, 545},
+ dictWord{7, 10, 894},
+ dictWord{136, 11, 521},
+ dictWord{135, 0, 481},
+ dictWord{132, 0, 243},
+ dictWord{5, 0, 203},
+ dictWord{7, 0, 19},
+ dictWord{7, 0, 71},
+ dictWord{7, 0, 113},
+ dictWord{
+ 10,
+ 0,
+ 405,
+ },
+ dictWord{11, 0, 357},
+ dictWord{142, 0, 240},
+ dictWord{5, 11, 725},
+ dictWord{5, 11, 727},
+ dictWord{135, 11, 1811},
+ dictWord{6, 0, 826},
+ dictWord{
+ 137,
+ 11,
+ 304,
+ },
+ dictWord{7, 0, 1450},
+ dictWord{139, 0, 99},
+ dictWord{133, 11, 654},
+ dictWord{134, 0, 492},
+ dictWord{5, 0, 134},
+ dictWord{6, 0, 408},
+ dictWord{
+ 6,
+ 0,
+ 495,
+ },
+ dictWord{7, 0, 1593},
+ dictWord{6, 11, 273},
+ dictWord{10, 11, 188},
+ dictWord{13, 11, 377},
+ dictWord{146, 11, 77},
+ dictWord{9, 10, 769},
+ dictWord{
+ 140,
+ 10,
+ 185,
+ },
+ dictWord{135, 11, 410},
+ dictWord{142, 0, 4},
+ dictWord{4, 0, 665},
+ dictWord{134, 11, 1785},
+ dictWord{4, 0, 248},
+ dictWord{7, 0, 137},
+ dictWord{
+ 137,
+ 0,
+ 349,
+ },
+ dictWord{5, 10, 530},
+ dictWord{142, 10, 113},
+ dictWord{7, 0, 1270},
+ dictWord{139, 0, 612},
+ dictWord{132, 11, 780},
+ dictWord{5, 0, 371},
+ dictWord{135, 0, 563},
+ dictWord{135, 0, 826},
+ dictWord{6, 0, 1535},
+ dictWord{23, 0, 21},
+ dictWord{151, 0, 23},
+ dictWord{4, 0, 374},
+ dictWord{7, 0, 547},
+ dictWord{
+ 7,
+ 0,
+ 1700,
+ },
+ dictWord{7, 0, 1833},
+ dictWord{139, 0, 858},
+ dictWord{133, 10, 556},
+ dictWord{7, 11, 612},
+ dictWord{8, 11, 545},
+ dictWord{8, 11, 568},
+ dictWord{
+ 8,
+ 11,
+ 642,
+ },
+ dictWord{9, 11, 717},
+ dictWord{10, 11, 541},
+ dictWord{10, 11, 763},
+ dictWord{11, 11, 449},
+ dictWord{12, 11, 489},
+ dictWord{13, 11, 153},
+ dictWord{
+ 13,
+ 11,
+ 296,
+ },
+ dictWord{14, 11, 138},
+ dictWord{14, 11, 392},
+ dictWord{15, 11, 50},
+ dictWord{16, 11, 6},
+ dictWord{16, 11, 12},
+ dictWord{148, 11, 9},
+ dictWord{
+ 9,
+ 0,
+ 311,
+ },
+ dictWord{141, 0, 42},
+ dictWord{8, 10, 16},
+ dictWord{140, 10, 568},
+ dictWord{6, 0, 1968},
+ dictWord{6, 0, 2027},
+ dictWord{138, 0, 991},
+ dictWord{
+ 6,
+ 0,
+ 1647,
+ },
+ dictWord{7, 0, 1552},
+ dictWord{7, 0, 2010},
+ dictWord{9, 0, 494},
+ dictWord{137, 0, 509},
+ dictWord{133, 11, 948},
+ dictWord{6, 10, 186},
+ dictWord{
+ 137,
+ 10,
+ 426,
+ },
+ dictWord{134, 0, 769},
+ dictWord{134, 0, 642},
+ dictWord{132, 10, 585},
+ dictWord{6, 0, 123},
+ dictWord{7, 0, 214},
+ dictWord{9, 0, 728},
+ dictWord{
+ 10,
+ 0,
+ 157,
+ },
+ dictWord{11, 0, 346},
+ dictWord{11, 0, 662},
+ dictWord{143, 0, 106},
+ dictWord{142, 11, 381},
+ dictWord{135, 0, 1435},
+ dictWord{4, 11, 532},
+ dictWord{
+ 5,
+ 11,
+ 706,
+ },
+ dictWord{135, 11, 662},
+ dictWord{5, 11, 837},
+ dictWord{134, 11, 1651},
+ dictWord{4, 10, 93},
+ dictWord{5, 10, 252},
+ dictWord{6, 10, 229},
+ dictWord{
+ 7,
+ 10,
+ 291,
+ },
+ dictWord{9, 10, 550},
+ dictWord{139, 10, 644},
+ dictWord{148, 0, 79},
+ dictWord{137, 10, 749},
+ dictWord{134, 0, 1425},
+ dictWord{
+ 137,
+ 10,
+ 162,
+ },
+ dictWord{4, 11, 362},
+ dictWord{7, 11, 52},
+ dictWord{7, 11, 303},
+ dictWord{140, 11, 166},
+ dictWord{132, 10, 381},
+ dictWord{4, 11, 330},
+ dictWord{
+ 7,
+ 11,
+ 933,
+ },
+ dictWord{7, 11, 2012},
+ dictWord{136, 11, 292},
+ dictWord{135, 11, 767},
+ dictWord{4, 0, 707},
+ dictWord{5, 0, 588},
+ dictWord{6, 0, 393},
+ dictWord{
+ 13,
+ 0,
+ 106,
+ },
+ dictWord{18, 0, 49},
+ dictWord{147, 0, 41},
+ dictWord{6, 0, 211},
+ dictWord{7, 0, 1690},
+ dictWord{11, 0, 486},
+ dictWord{140, 0, 369},
+ dictWord{
+ 137,
+ 11,
+ 883,
+ },
+ dictWord{4, 11, 703},
+ dictWord{135, 11, 207},
+ dictWord{4, 0, 187},
+ dictWord{5, 0, 184},
+ dictWord{5, 0, 690},
+ dictWord{7, 0, 1869},
+ dictWord{10, 0, 756},
+ dictWord{139, 0, 783},
+ dictWord{132, 11, 571},
+ dictWord{134, 0, 1382},
+ dictWord{5, 0, 175},
+ dictWord{6, 10, 77},
+ dictWord{6, 10, 157},
+ dictWord{7, 10, 974},
+ dictWord{7, 10, 1301},
+ dictWord{7, 10, 1339},
+ dictWord{7, 10, 1490},
+ dictWord{7, 10, 1873},
+ dictWord{137, 10, 628},
+ dictWord{134, 0, 1493},
+ dictWord{
+ 5,
+ 11,
+ 873,
+ },
+ dictWord{133, 11, 960},
+ dictWord{134, 0, 1007},
+ dictWord{12, 11, 93},
+ dictWord{12, 11, 501},
+ dictWord{13, 11, 362},
+ dictWord{14, 11, 151},
+ dictWord{15, 11, 40},
+ dictWord{15, 11, 59},
+ dictWord{16, 11, 46},
+ dictWord{17, 11, 25},
+ dictWord{18, 11, 14},
+ dictWord{18, 11, 134},
+ dictWord{19, 11, 25},
+ dictWord{
+ 19,
+ 11,
+ 69,
+ },
+ dictWord{20, 11, 16},
+ dictWord{20, 11, 19},
+ dictWord{20, 11, 66},
+ dictWord{21, 11, 23},
+ dictWord{21, 11, 25},
+ dictWord{150, 11, 42},
+ dictWord{
+ 11,
+ 10,
+ 919,
+ },
+ dictWord{141, 10, 409},
+ dictWord{134, 0, 219},
+ dictWord{5, 0, 582},
+ dictWord{6, 0, 1646},
+ dictWord{7, 0, 99},
+ dictWord{7, 0, 1962},
+ dictWord{
+ 7,
+ 0,
+ 1986,
+ },
+ dictWord{8, 0, 515},
+ dictWord{8, 0, 773},
+ dictWord{9, 0, 23},
+ dictWord{9, 0, 491},
+ dictWord{12, 0, 620},
+ dictWord{142, 0, 93},
+ dictWord{133, 0, 851},
+ dictWord{5, 11, 33},
+ dictWord{134, 11, 470},
+ dictWord{135, 11, 1291},
+ dictWord{134, 0, 1278},
+ dictWord{135, 11, 1882},
+ dictWord{135, 10, 1489},
+ dictWord{132, 0, 1000},
+ dictWord{138, 0, 982},
+ dictWord{8, 0, 762},
+ dictWord{8, 0, 812},
+ dictWord{137, 0, 910},
+ dictWord{6, 11, 47},
+ dictWord{7, 11, 90},
+ dictWord{
+ 7,
+ 11,
+ 664,
+ },
+ dictWord{7, 11, 830},
+ dictWord{7, 11, 1380},
+ dictWord{7, 11, 2025},
+ dictWord{8, 11, 448},
+ dictWord{136, 11, 828},
+ dictWord{4, 0, 98},
+ dictWord{
+ 4,
+ 0,
+ 940,
+ },
+ dictWord{6, 0, 1819},
+ dictWord{6, 0, 1834},
+ dictWord{6, 0, 1841},
+ dictWord{7, 0, 1365},
+ dictWord{8, 0, 859},
+ dictWord{8, 0, 897},
+ dictWord{8, 0, 918},
+ dictWord{9, 0, 422},
+ dictWord{9, 0, 670},
+ dictWord{10, 0, 775},
+ dictWord{10, 0, 894},
+ dictWord{10, 0, 909},
+ dictWord{10, 0, 910},
+ dictWord{10, 0, 935},
+ dictWord{
+ 11,
+ 0,
+ 210,
+ },
+ dictWord{12, 0, 750},
+ dictWord{12, 0, 755},
+ dictWord{13, 0, 26},
+ dictWord{13, 0, 457},
+ dictWord{13, 0, 476},
+ dictWord{16, 0, 100},
+ dictWord{16, 0, 109},
+ dictWord{18, 0, 173},
+ dictWord{18, 0, 175},
+ dictWord{8, 10, 398},
+ dictWord{9, 10, 681},
+ dictWord{139, 10, 632},
+ dictWord{9, 11, 417},
+ dictWord{
+ 137,
+ 11,
+ 493,
+ },
+ dictWord{136, 10, 645},
+ dictWord{138, 0, 906},
+ dictWord{134, 0, 1730},
+ dictWord{134, 10, 20},
+ dictWord{133, 11, 1019},
+ dictWord{134, 0, 1185},
+ dictWord{10, 0, 40},
+ dictWord{136, 10, 769},
+ dictWord{9, 0, 147},
+ dictWord{134, 11, 208},
+ dictWord{140, 0, 650},
+ dictWord{5, 0, 209},
+ dictWord{6, 0, 30},
+ dictWord{11, 0, 56},
+ dictWord{139, 0, 305},
+ dictWord{132, 0, 553},
+ dictWord{138, 11, 344},
+ dictWord{6, 11, 68},
+ dictWord{7, 11, 398},
+ dictWord{7, 11, 448},
+ dictWord{
+ 7,
+ 11,
+ 1629,
+ },
+ dictWord{7, 11, 1813},
+ dictWord{8, 11, 387},
+ dictWord{8, 11, 442},
+ dictWord{9, 11, 710},
+ dictWord{10, 11, 282},
+ dictWord{138, 11, 722},
+ dictWord{5, 0, 597},
+ dictWord{14, 0, 20},
+ dictWord{142, 11, 20},
+ dictWord{135, 0, 1614},
+ dictWord{135, 10, 1757},
+ dictWord{4, 0, 150},
+ dictWord{5, 0, 303},
+ dictWord{6, 0, 327},
+ dictWord{135, 10, 937},
+ dictWord{16, 0, 49},
+ dictWord{7, 10, 1652},
+ dictWord{144, 11, 49},
+ dictWord{8, 0, 192},
+ dictWord{10, 0, 78},
+ dictWord{
+ 141,
+ 0,
+ 359,
+ },
+ dictWord{135, 0, 786},
+ dictWord{143, 0, 134},
+ dictWord{6, 0, 1638},
+ dictWord{7, 0, 79},
+ dictWord{7, 0, 496},
+ dictWord{9, 0, 138},
+ dictWord{
+ 10,
+ 0,
+ 336,
+ },
+ dictWord{11, 0, 12},
+ dictWord{12, 0, 412},
+ dictWord{12, 0, 440},
+ dictWord{142, 0, 305},
+ dictWord{136, 11, 491},
+ dictWord{4, 10, 579},
+ dictWord{
+ 5,
+ 10,
+ 226,
+ },
+ dictWord{5, 10, 323},
+ dictWord{135, 10, 960},
+ dictWord{7, 0, 204},
+ dictWord{7, 0, 415},
+ dictWord{8, 0, 42},
+ dictWord{10, 0, 85},
+ dictWord{139, 0, 564},
+ dictWord{132, 0, 614},
+ dictWord{4, 11, 403},
+ dictWord{5, 11, 441},
+ dictWord{7, 11, 450},
+ dictWord{11, 11, 101},
+ dictWord{12, 11, 193},
+ dictWord{141, 11, 430},
+ dictWord{135, 11, 1927},
+ dictWord{135, 11, 1330},
+ dictWord{4, 0, 3},
+ dictWord{5, 0, 247},
+ dictWord{5, 0, 644},
+ dictWord{7, 0, 744},
+ dictWord{7, 0, 1207},
+ dictWord{7, 0, 1225},
+ dictWord{7, 0, 1909},
+ dictWord{146, 0, 147},
+ dictWord{136, 0, 942},
+ dictWord{4, 0, 1019},
+ dictWord{134, 0, 2023},
+ dictWord{5, 11, 679},
+ dictWord{133, 10, 973},
+ dictWord{5, 0, 285},
+ dictWord{9, 0, 67},
+ dictWord{13, 0, 473},
+ dictWord{143, 0, 82},
+ dictWord{7, 11, 328},
+ dictWord{137, 11, 326},
+ dictWord{151, 0, 8},
+ dictWord{6, 10, 135},
+ dictWord{135, 10, 1176},
+ dictWord{135, 11, 1128},
+ dictWord{134, 0, 1309},
+ dictWord{135, 11, 1796},
+ dictWord{
+ 135,
+ 10,
+ 314,
+ },
+ dictWord{4, 11, 574},
+ dictWord{7, 11, 350},
+ dictWord{7, 11, 1024},
+ dictWord{8, 11, 338},
+ dictWord{9, 11, 677},
+ dictWord{10, 11, 808},
+ dictWord{
+ 139,
+ 11,
+ 508,
+ },
+ dictWord{7, 11, 818},
+ dictWord{17, 11, 14},
+ dictWord{17, 11, 45},
+ dictWord{18, 11, 75},
+ dictWord{148, 11, 18},
+ dictWord{146, 10, 4},
+ dictWord{
+ 135,
+ 11,
+ 1081,
+ },
+ dictWord{4, 0, 29},
+ dictWord{6, 0, 532},
+ dictWord{7, 0, 1628},
+ dictWord{7, 0, 1648},
+ dictWord{9, 0, 350},
+ dictWord{10, 0, 433},
+ dictWord{11, 0, 97},
+ dictWord{11, 0, 557},
+ dictWord{11, 0, 745},
+ dictWord{12, 0, 289},
+ dictWord{12, 0, 335},
+ dictWord{12, 0, 348},
+ dictWord{12, 0, 606},
+ dictWord{13, 0, 116},
+ dictWord{13, 0, 233},
+ dictWord{13, 0, 466},
+ dictWord{14, 0, 181},
+ dictWord{14, 0, 209},
+ dictWord{14, 0, 232},
+ dictWord{14, 0, 236},
+ dictWord{14, 0, 300},
+ dictWord{
+ 16,
+ 0,
+ 41,
+ },
+ dictWord{148, 0, 97},
+ dictWord{7, 0, 318},
+ dictWord{6, 10, 281},
+ dictWord{8, 10, 282},
+ dictWord{8, 10, 480},
+ dictWord{8, 10, 499},
+ dictWord{9, 10, 198},
+ dictWord{10, 10, 143},
+ dictWord{10, 10, 169},
+ dictWord{10, 10, 211},
+ dictWord{10, 10, 417},
+ dictWord{10, 10, 574},
+ dictWord{11, 10, 147},
+ dictWord{
+ 11,
+ 10,
+ 395,
+ },
+ dictWord{12, 10, 75},
+ dictWord{12, 10, 407},
+ dictWord{12, 10, 608},
+ dictWord{13, 10, 500},
+ dictWord{142, 10, 251},
+ dictWord{135, 11, 1676},
+ dictWord{135, 11, 2037},
+ dictWord{135, 0, 1692},
+ dictWord{5, 0, 501},
+ dictWord{7, 0, 1704},
+ dictWord{9, 0, 553},
+ dictWord{11, 0, 520},
+ dictWord{12, 0, 557},
+ dictWord{141, 0, 249},
+ dictWord{6, 0, 1527},
+ dictWord{14, 0, 324},
+ dictWord{15, 0, 55},
+ dictWord{15, 0, 80},
+ dictWord{14, 11, 324},
+ dictWord{15, 11, 55},
+ dictWord{143, 11, 80},
+ dictWord{135, 10, 1776},
+ dictWord{8, 0, 988},
+ dictWord{137, 11, 297},
+ dictWord{132, 10, 419},
+ dictWord{142, 0, 223},
+ dictWord{
+ 139,
+ 11,
+ 234,
+ },
+ dictWord{7, 0, 1123},
+ dictWord{12, 0, 508},
+ dictWord{14, 0, 102},
+ dictWord{14, 0, 226},
+ dictWord{144, 0, 57},
+ dictWord{4, 10, 138},
+ dictWord{
+ 7,
+ 10,
+ 1012,
+ },
+ dictWord{7, 10, 1280},
+ dictWord{137, 10, 76},
+ dictWord{7, 0, 1764},
+ dictWord{5, 10, 29},
+ dictWord{140, 10, 638},
+ dictWord{134, 0, 2015},
+ dictWord{134, 0, 1599},
+ dictWord{138, 11, 56},
+ dictWord{6, 11, 306},
+ dictWord{7, 11, 1140},
+ dictWord{7, 11, 1340},
+ dictWord{8, 11, 133},
+ dictWord{
+ 138,
+ 11,
+ 449,
+ },
+ dictWord{139, 11, 1011},
+ dictWord{6, 10, 1710},
+ dictWord{135, 10, 2038},
+ dictWord{7, 11, 1763},
+ dictWord{140, 11, 310},
+ dictWord{6, 0, 129},
+ dictWord{4, 10, 17},
+ dictWord{5, 10, 23},
+ dictWord{7, 10, 995},
+ dictWord{11, 10, 383},
+ dictWord{11, 10, 437},
+ dictWord{12, 10, 460},
+ dictWord{140, 10, 532},
+ dictWord{5, 11, 329},
+ dictWord{136, 11, 260},
+ dictWord{133, 10, 862},
+ dictWord{132, 0, 534},
+ dictWord{6, 0, 811},
+ dictWord{135, 0, 626},
+ dictWord{
+ 132,
+ 11,
+ 657,
+ },
+ dictWord{4, 0, 25},
+ dictWord{5, 0, 60},
+ dictWord{6, 0, 504},
+ dictWord{7, 0, 614},
+ dictWord{7, 0, 1155},
+ dictWord{12, 0, 0},
+ dictWord{152, 11, 7},
+ dictWord{
+ 7,
+ 0,
+ 1248,
+ },
+ dictWord{11, 0, 621},
+ dictWord{139, 0, 702},
+ dictWord{137, 0, 321},
+ dictWord{8, 10, 70},
+ dictWord{12, 10, 171},
+ dictWord{141, 10, 272},
+ dictWord{
+ 10,
+ 10,
+ 233,
+ },
+ dictWord{139, 10, 76},
+ dictWord{4, 0, 379},
+ dictWord{7, 0, 1397},
+ dictWord{134, 10, 442},
+ dictWord{5, 11, 66},
+ dictWord{7, 11, 1896},
+ dictWord{
+ 136,
+ 11,
+ 288,
+ },
+ dictWord{134, 11, 1643},
+ dictWord{134, 10, 1709},
+ dictWord{4, 11, 21},
+ dictWord{5, 11, 91},
+ dictWord{5, 11, 570},
+ dictWord{5, 11, 648},
+ dictWord{5, 11, 750},
+ dictWord{5, 11, 781},
+ dictWord{6, 11, 54},
+ dictWord{6, 11, 112},
+ dictWord{6, 11, 402},
+ dictWord{6, 11, 1732},
+ dictWord{7, 11, 315},
+ dictWord{
+ 7,
+ 11,
+ 749,
+ },
+ dictWord{7, 11, 1347},
+ dictWord{7, 11, 1900},
+ dictWord{9, 11, 78},
+ dictWord{9, 11, 508},
+ dictWord{10, 11, 611},
+ dictWord{11, 11, 510},
+ dictWord{
+ 11,
+ 11,
+ 728,
+ },
+ dictWord{13, 11, 36},
+ dictWord{14, 11, 39},
+ dictWord{16, 11, 83},
+ dictWord{17, 11, 124},
+ dictWord{148, 11, 30},
+ dictWord{4, 0, 118},
+ dictWord{
+ 6,
+ 0,
+ 274,
+ },
+ dictWord{6, 0, 361},
+ dictWord{7, 0, 75},
+ dictWord{141, 0, 441},
+ dictWord{10, 11, 322},
+ dictWord{10, 11, 719},
+ dictWord{139, 11, 407},
+ dictWord{
+ 147,
+ 10,
+ 119,
+ },
+ dictWord{12, 11, 549},
+ dictWord{14, 11, 67},
+ dictWord{147, 11, 60},
+ dictWord{11, 10, 69},
+ dictWord{12, 10, 105},
+ dictWord{12, 10, 117},
+ dictWord{13, 10, 213},
+ dictWord{14, 10, 13},
+ dictWord{14, 10, 62},
+ dictWord{14, 10, 177},
+ dictWord{14, 10, 421},
+ dictWord{15, 10, 19},
+ dictWord{146, 10, 141},
+ dictWord{9, 0, 841},
+ dictWord{137, 10, 309},
+ dictWord{7, 10, 608},
+ dictWord{7, 10, 976},
+ dictWord{8, 11, 125},
+ dictWord{8, 11, 369},
+ dictWord{8, 11, 524},
+ dictWord{9, 10, 146},
+ dictWord{10, 10, 206},
+ dictWord{10, 11, 486},
+ dictWord{10, 10, 596},
+ dictWord{11, 11, 13},
+ dictWord{11, 11, 381},
+ dictWord{11, 11, 736},
+ dictWord{11, 11, 766},
+ dictWord{11, 11, 845},
+ dictWord{13, 11, 114},
+ dictWord{13, 10, 218},
+ dictWord{13, 11, 292},
+ dictWord{14, 11, 47},
+ dictWord{
+ 142,
+ 10,
+ 153,
+ },
+ dictWord{12, 0, 693},
+ dictWord{135, 11, 759},
+ dictWord{5, 0, 314},
+ dictWord{6, 0, 221},
+ dictWord{7, 0, 419},
+ dictWord{10, 0, 650},
+ dictWord{11, 0, 396},
+ dictWord{12, 0, 156},
+ dictWord{13, 0, 369},
+ dictWord{14, 0, 333},
+ dictWord{145, 0, 47},
+ dictWord{6, 11, 1684},
+ dictWord{6, 11, 1731},
+ dictWord{7, 11, 356},
+ dictWord{7, 11, 1932},
+ dictWord{8, 11, 54},
+ dictWord{8, 11, 221},
+ dictWord{9, 11, 225},
+ dictWord{9, 11, 356},
+ dictWord{10, 11, 77},
+ dictWord{10, 11, 446},
+ dictWord{10, 11, 731},
+ dictWord{12, 11, 404},
+ dictWord{141, 11, 491},
+ dictWord{132, 11, 375},
+ dictWord{4, 10, 518},
+ dictWord{135, 10, 1136},
+ dictWord{
+ 4,
+ 0,
+ 913,
+ },
+ dictWord{4, 11, 411},
+ dictWord{11, 11, 643},
+ dictWord{140, 11, 115},
+ dictWord{4, 11, 80},
+ dictWord{133, 11, 44},
+ dictWord{8, 10, 689},
+ dictWord{
+ 137,
+ 10,
+ 863,
+ },
+ dictWord{138, 0, 880},
+ dictWord{4, 10, 18},
+ dictWord{7, 10, 145},
+ dictWord{7, 10, 444},
+ dictWord{7, 10, 1278},
+ dictWord{8, 10, 49},
+ dictWord{
+ 8,
+ 10,
+ 400,
+ },
+ dictWord{9, 10, 71},
+ dictWord{9, 10, 250},
+ dictWord{10, 10, 459},
+ dictWord{12, 10, 160},
+ dictWord{144, 10, 24},
+ dictWord{136, 0, 475},
+ dictWord{
+ 5,
+ 0,
+ 1016,
+ },
+ dictWord{5, 11, 299},
+ dictWord{135, 11, 1083},
+ dictWord{7, 0, 602},
+ dictWord{8, 0, 179},
+ dictWord{10, 0, 781},
+ dictWord{140, 0, 126},
+ dictWord{
+ 6,
+ 0,
+ 329,
+ },
+ dictWord{138, 0, 111},
+ dictWord{135, 0, 1864},
+ dictWord{4, 11, 219},
+ dictWord{7, 11, 1761},
+ dictWord{137, 11, 86},
+ dictWord{6, 0, 1888},
+ dictWord{
+ 6,
+ 0,
+ 1892,
+ },
+ dictWord{6, 0, 1901},
+ dictWord{6, 0, 1904},
+ dictWord{9, 0, 953},
+ dictWord{9, 0, 985},
+ dictWord{9, 0, 991},
+ dictWord{9, 0, 1001},
+ dictWord{12, 0, 818},
+ dictWord{12, 0, 846},
+ dictWord{12, 0, 847},
+ dictWord{12, 0, 861},
+ dictWord{12, 0, 862},
+ dictWord{12, 0, 873},
+ dictWord{12, 0, 875},
+ dictWord{12, 0, 877},
+ dictWord{12, 0, 879},
+ dictWord{12, 0, 881},
+ dictWord{12, 0, 884},
+ dictWord{12, 0, 903},
+ dictWord{12, 0, 915},
+ dictWord{12, 0, 926},
+ dictWord{12, 0, 939},
+ dictWord{
+ 15,
+ 0,
+ 182,
+ },
+ dictWord{15, 0, 219},
+ dictWord{15, 0, 255},
+ dictWord{18, 0, 191},
+ dictWord{18, 0, 209},
+ dictWord{18, 0, 211},
+ dictWord{149, 0, 41},
+ dictWord{
+ 5,
+ 11,
+ 328,
+ },
+ dictWord{135, 11, 918},
+ dictWord{137, 0, 780},
+ dictWord{12, 0, 82},
+ dictWord{143, 0, 36},
+ dictWord{133, 10, 1010},
+ dictWord{5, 0, 821},
+ dictWord{
+ 134,
+ 0,
+ 1687,
+ },
+ dictWord{133, 11, 514},
+ dictWord{132, 0, 956},
+ dictWord{134, 0, 1180},
+ dictWord{10, 0, 112},
+ dictWord{5, 10, 87},
+ dictWord{7, 10, 313},
+ dictWord{
+ 7,
+ 10,
+ 1103,
+ },
+ dictWord{10, 10, 582},
+ dictWord{11, 10, 389},
+ dictWord{11, 10, 813},
+ dictWord{12, 10, 385},
+ dictWord{13, 10, 286},
+ dictWord{14, 10, 124},
+ dictWord{146, 10, 108},
+ dictWord{5, 0, 71},
+ dictWord{7, 0, 1407},
+ dictWord{9, 0, 704},
+ dictWord{10, 0, 261},
+ dictWord{10, 0, 619},
+ dictWord{11, 0, 547},
+ dictWord{11, 0, 619},
+ dictWord{143, 0, 157},
+ dictWord{4, 0, 531},
+ dictWord{5, 0, 455},
+ dictWord{5, 11, 301},
+ dictWord{6, 11, 571},
+ dictWord{14, 11, 49},
+ dictWord{
+ 146,
+ 11,
+ 102,
+ },
+ dictWord{132, 10, 267},
+ dictWord{6, 0, 385},
+ dictWord{7, 0, 2008},
+ dictWord{9, 0, 337},
+ dictWord{138, 0, 517},
+ dictWord{133, 11, 726},
+ dictWord{133, 11, 364},
+ dictWord{4, 11, 76},
+ dictWord{7, 11, 1550},
+ dictWord{9, 11, 306},
+ dictWord{9, 11, 430},
+ dictWord{9, 11, 663},
+ dictWord{10, 11, 683},
+ dictWord{11, 11, 427},
+ dictWord{11, 11, 753},
+ dictWord{12, 11, 334},
+ dictWord{12, 11, 442},
+ dictWord{14, 11, 258},
+ dictWord{14, 11, 366},
+ dictWord{
+ 143,
+ 11,
+ 131,
+ },
+ dictWord{6, 0, 1865},
+ dictWord{6, 0, 1879},
+ dictWord{6, 0, 1881},
+ dictWord{6, 0, 1894},
+ dictWord{6, 0, 1908},
+ dictWord{9, 0, 915},
+ dictWord{9, 0, 926},
+ dictWord{9, 0, 940},
+ dictWord{9, 0, 943},
+ dictWord{9, 0, 966},
+ dictWord{9, 0, 980},
+ dictWord{9, 0, 989},
+ dictWord{9, 0, 1005},
+ dictWord{9, 0, 1010},
+ dictWord{
+ 12,
+ 0,
+ 813,
+ },
+ dictWord{12, 0, 817},
+ dictWord{12, 0, 840},
+ dictWord{12, 0, 843},
+ dictWord{12, 0, 855},
+ dictWord{12, 0, 864},
+ dictWord{12, 0, 871},
+ dictWord{12, 0, 872},
+ dictWord{12, 0, 899},
+ dictWord{12, 0, 905},
+ dictWord{12, 0, 924},
+ dictWord{15, 0, 171},
+ dictWord{15, 0, 181},
+ dictWord{15, 0, 224},
+ dictWord{15, 0, 235},
+ dictWord{15, 0, 251},
+ dictWord{146, 0, 184},
+ dictWord{137, 11, 52},
+ dictWord{5, 0, 16},
+ dictWord{6, 0, 86},
+ dictWord{6, 0, 603},
+ dictWord{7, 0, 292},
+ dictWord{7, 0, 561},
+ dictWord{8, 0, 257},
+ dictWord{8, 0, 382},
+ dictWord{9, 0, 721},
+ dictWord{9, 0, 778},
+ dictWord{11, 0, 581},
+ dictWord{140, 0, 466},
+ dictWord{4, 0, 486},
+ dictWord{
+ 5,
+ 0,
+ 491,
+ },
+ dictWord{135, 10, 1121},
+ dictWord{4, 0, 72},
+ dictWord{6, 0, 265},
+ dictWord{135, 0, 1300},
+ dictWord{135, 11, 1183},
+ dictWord{10, 10, 249},
+ dictWord{139, 10, 209},
+ dictWord{132, 10, 561},
+ dictWord{137, 11, 519},
+ dictWord{4, 11, 656},
+ dictWord{4, 10, 760},
+ dictWord{135, 11, 779},
+ dictWord{
+ 9,
+ 10,
+ 154,
+ },
+ dictWord{140, 10, 485},
+ dictWord{135, 11, 1793},
+ dictWord{135, 11, 144},
+ dictWord{136, 10, 255},
+ dictWord{133, 0, 621},
+ dictWord{4, 10, 368},
+ dictWord{135, 10, 641},
+ dictWord{135, 11, 1373},
+ dictWord{7, 11, 554},
+ dictWord{7, 11, 605},
+ dictWord{141, 11, 10},
+ dictWord{137, 0, 234},
+ dictWord{
+ 5,
+ 0,
+ 815,
+ },
+ dictWord{6, 0, 1688},
+ dictWord{134, 0, 1755},
+ dictWord{5, 11, 838},
+ dictWord{5, 11, 841},
+ dictWord{134, 11, 1649},
+ dictWord{7, 0, 1987},
+ dictWord{
+ 7,
+ 0,
+ 2040,
+ },
+ dictWord{136, 0, 743},
+ dictWord{133, 11, 1012},
+ dictWord{6, 0, 197},
+ dictWord{136, 0, 205},
+ dictWord{6, 0, 314},
+ dictWord{134, 11, 314},
+ dictWord{144, 11, 53},
+ dictWord{6, 11, 251},
+ dictWord{7, 11, 365},
+ dictWord{7, 11, 1357},
+ dictWord{7, 11, 1497},
+ dictWord{8, 11, 154},
+ dictWord{141, 11, 281},
+ dictWord{133, 11, 340},
+ dictWord{6, 0, 452},
+ dictWord{7, 0, 312},
+ dictWord{138, 0, 219},
+ dictWord{138, 0, 589},
+ dictWord{4, 0, 333},
+ dictWord{9, 0, 176},
+ dictWord{12, 0, 353},
+ dictWord{141, 0, 187},
+ dictWord{9, 10, 92},
+ dictWord{147, 10, 91},
+ dictWord{134, 0, 1110},
+ dictWord{11, 0, 47},
+ dictWord{139, 11, 495},
+ dictWord{6, 10, 525},
+ dictWord{8, 10, 806},
+ dictWord{9, 10, 876},
+ dictWord{140, 10, 284},
+ dictWord{8, 11, 261},
+ dictWord{9, 11, 144},
+ dictWord{9, 11, 466},
+ dictWord{10, 11, 370},
+ dictWord{12, 11, 470},
+ dictWord{13, 11, 144},
+ dictWord{142, 11, 348},
+ dictWord{137, 11, 897},
+ dictWord{8, 0, 863},
+ dictWord{8, 0, 864},
+ dictWord{8, 0, 868},
+ dictWord{8, 0, 884},
+ dictWord{10, 0, 866},
+ dictWord{10, 0, 868},
+ dictWord{10, 0, 873},
+ dictWord{10, 0, 911},
+ dictWord{10, 0, 912},
+ dictWord{
+ 10,
+ 0,
+ 944,
+ },
+ dictWord{12, 0, 727},
+ dictWord{6, 11, 248},
+ dictWord{9, 11, 546},
+ dictWord{10, 11, 535},
+ dictWord{11, 11, 681},
+ dictWord{141, 11, 135},
+ dictWord{
+ 6,
+ 0,
+ 300,
+ },
+ dictWord{135, 0, 1515},
+ dictWord{134, 0, 1237},
+ dictWord{139, 10, 958},
+ dictWord{133, 10, 594},
+ dictWord{140, 11, 250},
+ dictWord{
+ 134,
+ 0,
+ 1685,
+ },
+ dictWord{134, 11, 567},
+ dictWord{7, 0, 135},
+ dictWord{8, 0, 7},
+ dictWord{8, 0, 62},
+ dictWord{9, 0, 243},
+ dictWord{10, 0, 658},
+ dictWord{10, 0, 697},
+ dictWord{11, 0, 456},
+ dictWord{139, 0, 756},
+ dictWord{9, 0, 395},
+ dictWord{138, 0, 79},
+ dictWord{6, 10, 1641},
+ dictWord{136, 10, 820},
+ dictWord{4, 10, 302},
+ dictWord{135, 10, 1766},
+ dictWord{134, 11, 174},
+ dictWord{135, 10, 1313},
+ dictWord{135, 0, 631},
+ dictWord{134, 10, 1674},
+ dictWord{134, 11, 395},
+ dictWord{138, 0, 835},
+ dictWord{7, 0, 406},
+ dictWord{7, 0, 459},
+ dictWord{8, 0, 606},
+ dictWord{139, 0, 726},
+ dictWord{134, 11, 617},
+ dictWord{134, 0, 979},
+ dictWord{
+ 6,
+ 10,
+ 389,
+ },
+ dictWord{7, 10, 149},
+ dictWord{9, 10, 142},
+ dictWord{138, 10, 94},
+ dictWord{5, 11, 878},
+ dictWord{133, 11, 972},
+ dictWord{6, 10, 8},
+ dictWord{
+ 7,
+ 10,
+ 1881,
+ },
+ dictWord{8, 10, 91},
+ dictWord{136, 11, 511},
+ dictWord{133, 0, 612},
+ dictWord{132, 11, 351},
+ dictWord{4, 0, 372},
+ dictWord{7, 0, 482},
+ dictWord{
+ 8,
+ 0,
+ 158,
+ },
+ dictWord{9, 0, 602},
+ dictWord{9, 0, 615},
+ dictWord{10, 0, 245},
+ dictWord{10, 0, 678},
+ dictWord{10, 0, 744},
+ dictWord{11, 0, 248},
+ dictWord{
+ 139,
+ 0,
+ 806,
+ },
+ dictWord{5, 0, 854},
+ dictWord{135, 0, 1991},
+ dictWord{132, 11, 286},
+ dictWord{135, 11, 344},
+ dictWord{7, 11, 438},
+ dictWord{7, 11, 627},
+ dictWord{
+ 7,
+ 11,
+ 1516,
+ },
+ dictWord{8, 11, 40},
+ dictWord{9, 11, 56},
+ dictWord{9, 11, 294},
+ dictWord{10, 11, 30},
+ dictWord{10, 11, 259},
+ dictWord{11, 11, 969},
+ dictWord{
+ 146,
+ 11,
+ 148,
+ },
+ dictWord{135, 0, 1492},
+ dictWord{5, 11, 259},
+ dictWord{7, 11, 414},
+ dictWord{7, 11, 854},
+ dictWord{142, 11, 107},
+ dictWord{135, 10, 1746},
+ dictWord{6, 0, 833},
+ dictWord{134, 0, 998},
+ dictWord{135, 10, 24},
+ dictWord{6, 0, 750},
+ dictWord{135, 0, 1739},
+ dictWord{4, 10, 503},
+ dictWord{
+ 135,
+ 10,
+ 1661,
+ },
+ dictWord{5, 10, 130},
+ dictWord{7, 10, 1314},
+ dictWord{9, 10, 610},
+ dictWord{10, 10, 718},
+ dictWord{11, 10, 601},
+ dictWord{11, 10, 819},
+ dictWord{
+ 11,
+ 10,
+ 946,
+ },
+ dictWord{140, 10, 536},
+ dictWord{10, 10, 149},
+ dictWord{11, 10, 280},
+ dictWord{142, 10, 336},
+ dictWord{132, 11, 738},
+ dictWord{
+ 135,
+ 10,
+ 1946,
+ },
+ dictWord{5, 0, 195},
+ dictWord{135, 0, 1685},
+ dictWord{7, 0, 1997},
+ dictWord{8, 0, 730},
+ dictWord{139, 0, 1006},
+ dictWord{151, 11, 17},
+ dictWord{
+ 133,
+ 11,
+ 866,
+ },
+ dictWord{14, 0, 463},
+ dictWord{14, 0, 470},
+ dictWord{150, 0, 61},
+ dictWord{5, 0, 751},
+ dictWord{8, 0, 266},
+ dictWord{11, 0, 578},
+ dictWord{
+ 4,
+ 10,
+ 392,
+ },
+ dictWord{135, 10, 1597},
+ dictWord{5, 10, 433},
+ dictWord{9, 10, 633},
+ dictWord{139, 10, 629},
+ dictWord{135, 0, 821},
+ dictWord{6, 0, 715},
+ dictWord{
+ 134,
+ 0,
+ 1325,
+ },
+ dictWord{133, 11, 116},
+ dictWord{6, 0, 868},
+ dictWord{132, 11, 457},
+ dictWord{134, 0, 959},
+ dictWord{6, 10, 234},
+ dictWord{138, 11, 199},
+ dictWord{7, 0, 1053},
+ dictWord{7, 10, 1950},
+ dictWord{8, 10, 680},
+ dictWord{11, 10, 817},
+ dictWord{147, 10, 88},
+ dictWord{7, 10, 1222},
+ dictWord{
+ 138,
+ 10,
+ 386,
+ },
+ dictWord{5, 0, 950},
+ dictWord{5, 0, 994},
+ dictWord{6, 0, 351},
+ dictWord{134, 0, 1124},
+ dictWord{134, 0, 1081},
+ dictWord{7, 0, 1595},
+ dictWord{6, 10, 5},
+ dictWord{11, 10, 249},
+ dictWord{12, 10, 313},
+ dictWord{16, 10, 66},
+ dictWord{145, 10, 26},
+ dictWord{148, 0, 59},
+ dictWord{5, 11, 527},
+ dictWord{6, 11, 189},
+ dictWord{135, 11, 859},
+ dictWord{5, 10, 963},
+ dictWord{6, 10, 1773},
+ dictWord{11, 11, 104},
+ dictWord{11, 11, 554},
+ dictWord{15, 11, 60},
+ dictWord{
+ 143,
+ 11,
+ 125,
+ },
+ dictWord{135, 0, 47},
+ dictWord{137, 0, 684},
+ dictWord{134, 11, 116},
+ dictWord{134, 0, 1606},
+ dictWord{134, 0, 777},
+ dictWord{7, 0, 1020},
+ dictWord{
+ 8,
+ 10,
+ 509,
+ },
+ dictWord{136, 10, 792},
+ dictWord{135, 0, 1094},
+ dictWord{132, 0, 350},
+ dictWord{133, 11, 487},
+ dictWord{4, 11, 86},
+ dictWord{5, 11, 667},
+ dictWord{5, 11, 753},
+ dictWord{6, 11, 316},
+ dictWord{6, 11, 455},
+ dictWord{135, 11, 946},
+ dictWord{7, 0, 1812},
+ dictWord{13, 0, 259},
+ dictWord{13, 0, 356},
+ dictWord{14, 0, 242},
+ dictWord{147, 0, 114},
+ dictWord{132, 10, 931},
+ dictWord{133, 0, 967},
+ dictWord{4, 0, 473},
+ dictWord{7, 0, 623},
+ dictWord{8, 0, 808},
+ dictWord{
+ 9,
+ 0,
+ 871,
+ },
+ dictWord{9, 0, 893},
+ dictWord{11, 0, 38},
+ dictWord{11, 0, 431},
+ dictWord{12, 0, 112},
+ dictWord{12, 0, 217},
+ dictWord{12, 0, 243},
+ dictWord{12, 0, 562},
+ dictWord{12, 0, 663},
+ dictWord{12, 0, 683},
+ dictWord{13, 0, 141},
+ dictWord{13, 0, 197},
+ dictWord{13, 0, 227},
+ dictWord{13, 0, 406},
+ dictWord{13, 0, 487},
+ dictWord{14, 0, 156},
+ dictWord{14, 0, 203},
+ dictWord{14, 0, 224},
+ dictWord{14, 0, 256},
+ dictWord{18, 0, 58},
+ dictWord{150, 0, 0},
+ dictWord{138, 0, 286},
+ dictWord{
+ 7,
+ 10,
+ 943,
+ },
+ dictWord{139, 10, 614},
+ dictWord{135, 10, 1837},
+ dictWord{150, 11, 45},
+ dictWord{132, 0, 798},
+ dictWord{4, 0, 222},
+ dictWord{7, 0, 286},
+ dictWord{136, 0, 629},
+ dictWord{4, 11, 79},
+ dictWord{7, 11, 1773},
+ dictWord{10, 11, 450},
+ dictWord{11, 11, 589},
+ dictWord{13, 11, 332},
+ dictWord{13, 11, 493},
+ dictWord{14, 11, 183},
+ dictWord{14, 11, 334},
+ dictWord{14, 11, 362},
+ dictWord{14, 11, 368},
+ dictWord{14, 11, 376},
+ dictWord{14, 11, 379},
+ dictWord{
+ 19,
+ 11,
+ 90,
+ },
+ dictWord{19, 11, 103},
+ dictWord{19, 11, 127},
+ dictWord{148, 11, 90},
+ dictWord{5, 0, 337},
+ dictWord{11, 0, 513},
+ dictWord{11, 0, 889},
+ dictWord{
+ 11,
+ 0,
+ 961,
+ },
+ dictWord{12, 0, 461},
+ dictWord{13, 0, 79},
+ dictWord{15, 0, 121},
+ dictWord{4, 10, 90},
+ dictWord{5, 10, 545},
+ dictWord{7, 10, 754},
+ dictWord{9, 10, 186},
+ dictWord{10, 10, 72},
+ dictWord{10, 10, 782},
+ dictWord{11, 10, 577},
+ dictWord{11, 10, 610},
+ dictWord{12, 10, 354},
+ dictWord{12, 10, 362},
+ dictWord{
+ 140,
+ 10,
+ 595,
+ },
+ dictWord{141, 0, 306},
+ dictWord{136, 0, 146},
+ dictWord{7, 0, 1646},
+ dictWord{9, 10, 329},
+ dictWord{11, 10, 254},
+ dictWord{141, 11, 124},
+ dictWord{
+ 4,
+ 0,
+ 465,
+ },
+ dictWord{135, 0, 1663},
+ dictWord{132, 0, 525},
+ dictWord{133, 11, 663},
+ dictWord{10, 0, 299},
+ dictWord{18, 0, 74},
+ dictWord{9, 10, 187},
+ dictWord{
+ 11,
+ 10,
+ 1016,
+ },
+ dictWord{145, 10, 44},
+ dictWord{7, 0, 165},
+ dictWord{7, 0, 919},
+ dictWord{4, 10, 506},
+ dictWord{136, 10, 517},
+ dictWord{5, 10, 295},
+ dictWord{
+ 135,
+ 10,
+ 1680,
+ },
+ dictWord{133, 11, 846},
+ dictWord{134, 0, 1064},
+ dictWord{5, 11, 378},
+ dictWord{7, 11, 1402},
+ dictWord{7, 11, 1414},
+ dictWord{8, 11, 465},
+ dictWord{9, 11, 286},
+ dictWord{10, 11, 185},
+ dictWord{10, 11, 562},
+ dictWord{10, 11, 635},
+ dictWord{11, 11, 31},
+ dictWord{11, 11, 393},
+ dictWord{
+ 12,
+ 11,
+ 456,
+ },
+ dictWord{13, 11, 312},
+ dictWord{18, 11, 65},
+ dictWord{18, 11, 96},
+ dictWord{147, 11, 89},
+ dictWord{132, 0, 596},
+ dictWord{7, 10, 987},
+ dictWord{
+ 9,
+ 10,
+ 688,
+ },
+ dictWord{10, 10, 522},
+ dictWord{11, 10, 788},
+ dictWord{140, 10, 566},
+ dictWord{6, 0, 82},
+ dictWord{7, 0, 138},
+ dictWord{7, 0, 517},
+ dictWord{7, 0, 1741},
+ dictWord{11, 0, 238},
+ dictWord{4, 11, 648},
+ dictWord{134, 10, 1775},
+ dictWord{7, 0, 1233},
+ dictWord{7, 10, 700},
+ dictWord{7, 10, 940},
+ dictWord{8, 10, 514},
+ dictWord{9, 10, 116},
+ dictWord{9, 10, 535},
+ dictWord{10, 10, 118},
+ dictWord{11, 10, 107},
+ dictWord{11, 10, 148},
+ dictWord{11, 10, 922},
+ dictWord{
+ 12,
+ 10,
+ 254,
+ },
+ dictWord{12, 10, 421},
+ dictWord{142, 10, 238},
+ dictWord{4, 0, 962},
+ dictWord{6, 0, 1824},
+ dictWord{8, 0, 894},
+ dictWord{12, 0, 708},
+ dictWord{
+ 12,
+ 0,
+ 725,
+ },
+ dictWord{14, 0, 451},
+ dictWord{20, 0, 94},
+ dictWord{22, 0, 59},
+ dictWord{150, 0, 62},
+ dictWord{5, 11, 945},
+ dictWord{6, 11, 1656},
+ dictWord{6, 11, 1787},
+ dictWord{7, 11, 167},
+ dictWord{8, 11, 824},
+ dictWord{9, 11, 391},
+ dictWord{10, 11, 375},
+ dictWord{139, 11, 185},
+ dictWord{5, 0, 495},
+ dictWord{7, 0, 834},
+ dictWord{9, 0, 733},
+ dictWord{139, 0, 378},
+ dictWord{4, 10, 743},
+ dictWord{135, 11, 1273},
+ dictWord{6, 0, 1204},
+ dictWord{7, 11, 1645},
+ dictWord{8, 11, 352},
+ dictWord{137, 11, 249},
+ dictWord{139, 10, 292},
+ dictWord{133, 0, 559},
+ dictWord{132, 11, 152},
+ dictWord{9, 0, 499},
+ dictWord{10, 0, 341},
+ dictWord{
+ 15,
+ 0,
+ 144,
+ },
+ dictWord{19, 0, 49},
+ dictWord{7, 10, 1283},
+ dictWord{9, 10, 227},
+ dictWord{11, 10, 325},
+ dictWord{11, 10, 408},
+ dictWord{14, 10, 180},
+ dictWord{
+ 146,
+ 10,
+ 47,
+ },
+ dictWord{6, 0, 21},
+ dictWord{6, 0, 1737},
+ dictWord{7, 0, 1444},
+ dictWord{136, 0, 224},
+ dictWord{133, 11, 1006},
+ dictWord{7, 0, 1446},
+ dictWord{
+ 9,
+ 0,
+ 97,
+ },
+ dictWord{17, 0, 15},
+ dictWord{5, 10, 81},
+ dictWord{7, 10, 146},
+ dictWord{7, 10, 1342},
+ dictWord{8, 10, 53},
+ dictWord{8, 10, 561},
+ dictWord{8, 10, 694},
+ dictWord{8, 10, 754},
+ dictWord{9, 10, 115},
+ dictWord{9, 10, 894},
+ dictWord{10, 10, 462},
+ dictWord{10, 10, 813},
+ dictWord{11, 10, 230},
+ dictWord{11, 10, 657},
+ dictWord{11, 10, 699},
+ dictWord{11, 10, 748},
+ dictWord{12, 10, 119},
+ dictWord{12, 10, 200},
+ dictWord{12, 10, 283},
+ dictWord{142, 10, 273},
+ dictWord{
+ 5,
+ 10,
+ 408,
+ },
+ dictWord{137, 10, 747},
+ dictWord{135, 11, 431},
+ dictWord{135, 11, 832},
+ dictWord{6, 0, 729},
+ dictWord{134, 0, 953},
+ dictWord{4, 0, 727},
+ dictWord{
+ 8,
+ 0,
+ 565,
+ },
+ dictWord{5, 11, 351},
+ dictWord{7, 11, 264},
+ dictWord{136, 11, 565},
+ dictWord{134, 0, 1948},
+ dictWord{5, 0, 519},
+ dictWord{5, 11, 40},
+ dictWord{
+ 7,
+ 11,
+ 598,
+ },
+ dictWord{7, 11, 1638},
+ dictWord{8, 11, 78},
+ dictWord{9, 11, 166},
+ dictWord{9, 11, 640},
+ dictWord{9, 11, 685},
+ dictWord{9, 11, 773},
+ dictWord{
+ 11,
+ 11,
+ 215,
+ },
+ dictWord{13, 11, 65},
+ dictWord{14, 11, 172},
+ dictWord{14, 11, 317},
+ dictWord{145, 11, 6},
+ dictWord{8, 11, 60},
+ dictWord{9, 11, 343},
+ dictWord{
+ 139,
+ 11,
+ 769,
+ },
+ dictWord{137, 11, 455},
+ dictWord{134, 0, 1193},
+ dictWord{140, 0, 790},
+ dictWord{7, 11, 1951},
+ dictWord{8, 11, 765},
+ dictWord{8, 11, 772},
+ dictWord{140, 11, 671},
+ dictWord{7, 11, 108},
+ dictWord{8, 11, 219},
+ dictWord{8, 11, 388},
+ dictWord{9, 11, 639},
+ dictWord{9, 11, 775},
+ dictWord{11, 11, 275},
+ dictWord{140, 11, 464},
+ dictWord{132, 11, 468},
+ dictWord{7, 10, 30},
+ dictWord{8, 10, 86},
+ dictWord{8, 10, 315},
+ dictWord{8, 10, 700},
+ dictWord{9, 10, 576},
+ dictWord{
+ 9,
+ 10,
+ 858,
+ },
+ dictWord{11, 10, 310},
+ dictWord{11, 10, 888},
+ dictWord{11, 10, 904},
+ dictWord{12, 10, 361},
+ dictWord{141, 10, 248},
+ dictWord{5, 11, 15},
+ dictWord{6, 11, 56},
+ dictWord{7, 11, 1758},
+ dictWord{8, 11, 500},
+ dictWord{9, 11, 730},
+ dictWord{11, 11, 331},
+ dictWord{13, 11, 150},
+ dictWord{142, 11, 282},
+ dictWord{4, 0, 402},
+ dictWord{7, 0, 2},
+ dictWord{8, 0, 323},
+ dictWord{136, 0, 479},
+ dictWord{138, 10, 839},
+ dictWord{11, 0, 580},
+ dictWord{142, 0, 201},
+ dictWord{
+ 5,
+ 0,
+ 59,
+ },
+ dictWord{135, 0, 672},
+ dictWord{137, 10, 617},
+ dictWord{146, 0, 34},
+ dictWord{134, 11, 1886},
+ dictWord{4, 0, 961},
+ dictWord{136, 0, 896},
+ dictWord{
+ 6,
+ 0,
+ 1285,
+ },
+ dictWord{5, 11, 205},
+ dictWord{6, 11, 438},
+ dictWord{137, 11, 711},
+ dictWord{134, 10, 428},
+ dictWord{7, 10, 524},
+ dictWord{8, 10, 169},
+ dictWord{8, 10, 234},
+ dictWord{9, 10, 480},
+ dictWord{138, 10, 646},
+ dictWord{148, 0, 46},
+ dictWord{141, 0, 479},
+ dictWord{133, 11, 534},
+ dictWord{6, 0, 2019},
+ dictWord{134, 10, 1648},
+ dictWord{4, 0, 85},
+ dictWord{7, 0, 549},
+ dictWord{7, 10, 1205},
+ dictWord{138, 10, 637},
+ dictWord{4, 0, 663},
+ dictWord{5, 0, 94},
+ dictWord{
+ 7,
+ 11,
+ 235,
+ },
+ dictWord{7, 11, 1475},
+ dictWord{15, 11, 68},
+ dictWord{146, 11, 120},
+ dictWord{6, 11, 443},
+ dictWord{9, 11, 237},
+ dictWord{9, 11, 571},
+ dictWord{
+ 9,
+ 11,
+ 695,
+ },
+ dictWord{10, 11, 139},
+ dictWord{11, 11, 715},
+ dictWord{12, 11, 417},
+ dictWord{141, 11, 421},
+ dictWord{132, 0, 783},
+ dictWord{4, 0, 682},
+ dictWord{8, 0, 65},
+ dictWord{9, 10, 39},
+ dictWord{10, 10, 166},
+ dictWord{11, 10, 918},
+ dictWord{12, 10, 635},
+ dictWord{20, 10, 10},
+ dictWord{22, 10, 27},
+ dictWord{
+ 22,
+ 10,
+ 43,
+ },
+ dictWord{150, 10, 52},
+ dictWord{6, 0, 11},
+ dictWord{135, 0, 187},
+ dictWord{132, 0, 522},
+ dictWord{4, 0, 52},
+ dictWord{135, 0, 661},
+ dictWord{
+ 4,
+ 0,
+ 383,
+ },
+ dictWord{133, 0, 520},
+ dictWord{135, 11, 546},
+ dictWord{11, 0, 343},
+ dictWord{142, 0, 127},
+ dictWord{4, 11, 578},
+ dictWord{7, 10, 157},
+ dictWord{
+ 7,
+ 11,
+ 624,
+ },
+ dictWord{7, 11, 916},
+ dictWord{8, 10, 279},
+ dictWord{10, 11, 256},
+ dictWord{11, 11, 87},
+ dictWord{139, 11, 703},
+ dictWord{134, 10, 604},
+ dictWord{
+ 4,
+ 0,
+ 281,
+ },
+ dictWord{5, 0, 38},
+ dictWord{7, 0, 194},
+ dictWord{7, 0, 668},
+ dictWord{7, 0, 1893},
+ dictWord{137, 0, 397},
+ dictWord{7, 10, 945},
+ dictWord{11, 10, 713},
+ dictWord{139, 10, 744},
+ dictWord{139, 10, 1022},
+ dictWord{9, 0, 635},
+ dictWord{139, 0, 559},
+ dictWord{5, 11, 923},
+ dictWord{7, 11, 490},
+ dictWord{
+ 12,
+ 11,
+ 553,
+ },
+ dictWord{13, 11, 100},
+ dictWord{14, 11, 118},
+ dictWord{143, 11, 75},
+ dictWord{132, 0, 975},
+ dictWord{132, 10, 567},
+ dictWord{137, 10, 859},
+ dictWord{7, 10, 1846},
+ dictWord{7, 11, 1846},
+ dictWord{8, 10, 628},
+ dictWord{136, 11, 628},
+ dictWord{148, 0, 116},
+ dictWord{138, 11, 750},
+ dictWord{14, 0, 51},
+ dictWord{14, 11, 51},
+ dictWord{15, 11, 7},
+ dictWord{148, 11, 20},
+ dictWord{132, 0, 858},
+ dictWord{134, 0, 1075},
+ dictWord{4, 11, 924},
+ dictWord{
+ 133,
+ 10,
+ 762,
+ },
+ dictWord{136, 0, 535},
+ dictWord{133, 0, 448},
+ dictWord{10, 10, 784},
+ dictWord{141, 10, 191},
+ dictWord{133, 10, 298},
+ dictWord{7, 0, 610},
+ dictWord{135, 0, 1501},
+ dictWord{7, 10, 633},
+ dictWord{7, 10, 905},
+ dictWord{7, 10, 909},
+ dictWord{7, 10, 1538},
+ dictWord{9, 10, 767},
+ dictWord{140, 10, 636},
+ dictWord{4, 11, 265},
+ dictWord{7, 11, 807},
+ dictWord{135, 11, 950},
+ dictWord{5, 11, 93},
+ dictWord{12, 11, 267},
+ dictWord{144, 11, 26},
+ dictWord{136, 0, 191},
+ dictWord{139, 10, 301},
+ dictWord{135, 10, 1970},
+ dictWord{135, 0, 267},
+ dictWord{4, 0, 319},
+ dictWord{5, 0, 699},
+ dictWord{138, 0, 673},
+ dictWord{
+ 6,
+ 0,
+ 336,
+ },
+ dictWord{7, 0, 92},
+ dictWord{7, 0, 182},
+ dictWord{8, 0, 453},
+ dictWord{8, 0, 552},
+ dictWord{9, 0, 204},
+ dictWord{9, 0, 285},
+ dictWord{10, 0, 99},
+ dictWord{
+ 11,
+ 0,
+ 568,
+ },
+ dictWord{11, 0, 950},
+ dictWord{12, 0, 94},
+ dictWord{16, 0, 20},
+ dictWord{16, 0, 70},
+ dictWord{19, 0, 55},
+ dictWord{12, 10, 644},
+ dictWord{144, 10, 90},
+ dictWord{6, 0, 551},
+ dictWord{7, 0, 1308},
+ dictWord{7, 10, 845},
+ dictWord{7, 11, 994},
+ dictWord{8, 10, 160},
+ dictWord{137, 10, 318},
+ dictWord{19, 11, 1},
+ dictWord{
+ 19,
+ 11,
+ 26,
+ },
+ dictWord{150, 11, 9},
+ dictWord{7, 0, 1406},
+ dictWord{9, 0, 218},
+ dictWord{141, 0, 222},
+ dictWord{5, 0, 256},
+ dictWord{138, 0, 69},
+ dictWord{
+ 5,
+ 11,
+ 233,
+ },
+ dictWord{5, 11, 320},
+ dictWord{6, 11, 140},
+ dictWord{7, 11, 330},
+ dictWord{136, 11, 295},
+ dictWord{6, 0, 1980},
+ dictWord{136, 0, 952},
+ dictWord{
+ 4,
+ 0,
+ 833,
+ },
+ dictWord{137, 11, 678},
+ dictWord{133, 11, 978},
+ dictWord{4, 11, 905},
+ dictWord{6, 11, 1701},
+ dictWord{137, 11, 843},
+ dictWord{138, 10, 735},
+ dictWord{136, 10, 76},
+ dictWord{17, 0, 39},
+ dictWord{148, 0, 36},
+ dictWord{18, 0, 81},
+ dictWord{146, 11, 81},
+ dictWord{14, 0, 352},
+ dictWord{17, 0, 53},
+ dictWord{
+ 18,
+ 0,
+ 146,
+ },
+ dictWord{18, 0, 152},
+ dictWord{19, 0, 11},
+ dictWord{150, 0, 54},
+ dictWord{135, 0, 634},
+ dictWord{138, 10, 841},
+ dictWord{132, 0, 618},
+ dictWord{
+ 4,
+ 0,
+ 339,
+ },
+ dictWord{7, 0, 259},
+ dictWord{17, 0, 73},
+ dictWord{4, 11, 275},
+ dictWord{140, 11, 376},
+ dictWord{132, 11, 509},
+ dictWord{7, 11, 273},
+ dictWord{
+ 139,
+ 11,
+ 377,
+ },
+ dictWord{4, 0, 759},
+ dictWord{13, 0, 169},
+ dictWord{137, 10, 804},
+ dictWord{6, 10, 96},
+ dictWord{135, 10, 1426},
+ dictWord{4, 10, 651},
+ dictWord{133, 10, 289},
+ dictWord{7, 0, 1075},
+ dictWord{8, 10, 35},
+ dictWord{9, 10, 511},
+ dictWord{10, 10, 767},
+ dictWord{147, 10, 118},
+ dictWord{6, 0, 649},
+ dictWord{6, 0, 670},
+ dictWord{136, 0, 482},
+ dictWord{5, 0, 336},
+ dictWord{6, 0, 341},
+ dictWord{6, 0, 478},
+ dictWord{6, 0, 1763},
+ dictWord{136, 0, 386},
+ dictWord{
+ 5,
+ 11,
+ 802,
+ },
+ dictWord{7, 11, 2021},
+ dictWord{8, 11, 805},
+ dictWord{14, 11, 94},
+ dictWord{15, 11, 65},
+ dictWord{16, 11, 4},
+ dictWord{16, 11, 77},
+ dictWord{16, 11, 80},
+ dictWord{145, 11, 5},
+ dictWord{6, 0, 1035},
+ dictWord{5, 11, 167},
+ dictWord{5, 11, 899},
+ dictWord{6, 11, 410},
+ dictWord{137, 11, 777},
+ dictWord{
+ 134,
+ 11,
+ 1705,
+ },
+ dictWord{5, 0, 924},
+ dictWord{133, 0, 969},
+ dictWord{132, 10, 704},
+ dictWord{135, 0, 73},
+ dictWord{135, 11, 10},
+ dictWord{135, 10, 1078},
+ dictWord{
+ 5,
+ 11,
+ 11,
+ },
+ dictWord{6, 11, 117},
+ dictWord{6, 11, 485},
+ dictWord{7, 11, 1133},
+ dictWord{9, 11, 582},
+ dictWord{9, 11, 594},
+ dictWord{11, 11, 21},
+ dictWord{
+ 11,
+ 11,
+ 818,
+ },
+ dictWord{12, 11, 535},
+ dictWord{141, 11, 86},
+ dictWord{135, 0, 1971},
+ dictWord{4, 11, 264},
+ dictWord{7, 11, 1067},
+ dictWord{8, 11, 204},
+ dictWord{8, 11, 385},
+ dictWord{139, 11, 953},
+ dictWord{6, 0, 1458},
+ dictWord{135, 0, 1344},
+ dictWord{5, 0, 396},
+ dictWord{134, 0, 501},
+ dictWord{4, 10, 720},
+ dictWord{133, 10, 306},
+ dictWord{4, 0, 929},
+ dictWord{5, 0, 799},
+ dictWord{8, 0, 46},
+ dictWord{8, 0, 740},
+ dictWord{133, 10, 431},
+ dictWord{7, 11, 646},
+ dictWord{
+ 7,
+ 11,
+ 1730,
+ },
+ dictWord{11, 11, 446},
+ dictWord{141, 11, 178},
+ dictWord{7, 0, 276},
+ dictWord{5, 10, 464},
+ dictWord{6, 10, 236},
+ dictWord{7, 10, 696},
+ dictWord{
+ 7,
+ 10,
+ 914,
+ },
+ dictWord{7, 10, 1108},
+ dictWord{7, 10, 1448},
+ dictWord{9, 10, 15},
+ dictWord{9, 10, 564},
+ dictWord{10, 10, 14},
+ dictWord{12, 10, 565},
+ dictWord{
+ 13,
+ 10,
+ 449,
+ },
+ dictWord{14, 10, 53},
+ dictWord{15, 10, 13},
+ dictWord{16, 10, 64},
+ dictWord{145, 10, 41},
+ dictWord{4, 0, 892},
+ dictWord{133, 0, 770},
+ dictWord{
+ 6,
+ 10,
+ 1767,
+ },
+ dictWord{12, 10, 194},
+ dictWord{145, 10, 107},
+ dictWord{135, 0, 158},
+ dictWord{5, 10, 840},
+ dictWord{138, 11, 608},
+ dictWord{134, 0, 1432},
+ dictWord{138, 11, 250},
+ dictWord{8, 11, 794},
+ dictWord{9, 11, 400},
+ dictWord{10, 11, 298},
+ dictWord{142, 11, 228},
+ dictWord{151, 0, 25},
+ dictWord{
+ 7,
+ 11,
+ 1131,
+ },
+ dictWord{135, 11, 1468},
+ dictWord{135, 0, 2001},
+ dictWord{9, 10, 642},
+ dictWord{11, 10, 236},
+ dictWord{142, 10, 193},
+ dictWord{4, 10, 68},
+ dictWord{5, 10, 634},
+ dictWord{6, 10, 386},
+ dictWord{7, 10, 794},
+ dictWord{8, 10, 273},
+ dictWord{9, 10, 563},
+ dictWord{10, 10, 105},
+ dictWord{10, 10, 171},
+ dictWord{11, 10, 94},
+ dictWord{139, 10, 354},
+ dictWord{136, 11, 724},
+ dictWord{132, 0, 478},
+ dictWord{11, 11, 512},
+ dictWord{13, 11, 205},
+ dictWord{
+ 19,
+ 11,
+ 30,
+ },
+ dictWord{22, 11, 36},
+ dictWord{151, 11, 19},
+ dictWord{7, 0, 1461},
+ dictWord{140, 0, 91},
+ dictWord{6, 11, 190},
+ dictWord{7, 11, 768},
+ dictWord{
+ 135,
+ 11,
+ 1170,
+ },
+ dictWord{4, 0, 602},
+ dictWord{8, 0, 211},
+ dictWord{4, 10, 95},
+ dictWord{7, 10, 416},
+ dictWord{139, 10, 830},
+ dictWord{7, 10, 731},
+ dictWord{13, 10, 20},
+ dictWord{143, 10, 11},
+ dictWord{6, 0, 1068},
+ dictWord{135, 0, 1872},
+ dictWord{4, 0, 13},
+ dictWord{5, 0, 567},
+ dictWord{7, 0, 1498},
+ dictWord{9, 0, 124},
+ dictWord{11, 0, 521},
+ dictWord{12, 0, 405},
+ dictWord{135, 11, 1023},
+ dictWord{135, 0, 1006},
+ dictWord{132, 0, 735},
+ dictWord{138, 0, 812},
+ dictWord{4, 0, 170},
+ dictWord{135, 0, 323},
+ dictWord{6, 11, 137},
+ dictWord{9, 11, 75},
+ dictWord{9, 11, 253},
+ dictWord{10, 11, 194},
+ dictWord{138, 11, 444},
+ dictWord{5, 0, 304},
+ dictWord{7, 0, 1403},
+ dictWord{5, 10, 864},
+ dictWord{10, 10, 648},
+ dictWord{11, 10, 671},
+ dictWord{143, 10, 46},
+ dictWord{135, 11, 1180},
+ dictWord{
+ 133,
+ 10,
+ 928,
+ },
+ dictWord{4, 0, 148},
+ dictWord{133, 0, 742},
+ dictWord{11, 10, 986},
+ dictWord{140, 10, 682},
+ dictWord{133, 0, 523},
+ dictWord{135, 11, 1743},
+ dictWord{7, 0, 730},
+ dictWord{18, 0, 144},
+ dictWord{19, 0, 61},
+ dictWord{8, 10, 44},
+ dictWord{9, 10, 884},
+ dictWord{10, 10, 580},
+ dictWord{11, 10, 399},
+ dictWord{
+ 11,
+ 10,
+ 894,
+ },
+ dictWord{143, 10, 122},
+ dictWord{5, 11, 760},
+ dictWord{7, 11, 542},
+ dictWord{8, 11, 135},
+ dictWord{136, 11, 496},
+ dictWord{136, 0, 981},
+ dictWord{133, 0, 111},
+ dictWord{10, 0, 132},
+ dictWord{11, 0, 191},
+ dictWord{11, 0, 358},
+ dictWord{139, 0, 460},
+ dictWord{7, 11, 319},
+ dictWord{7, 11, 355},
+ dictWord{
+ 7,
+ 11,
+ 763,
+ },
+ dictWord{10, 11, 389},
+ dictWord{145, 11, 43},
+ dictWord{134, 0, 890},
+ dictWord{134, 0, 1420},
+ dictWord{136, 11, 557},
+ dictWord{
+ 133,
+ 10,
+ 518,
+ },
+ dictWord{133, 0, 444},
+ dictWord{135, 0, 1787},
+ dictWord{135, 10, 1852},
+ dictWord{8, 0, 123},
+ dictWord{15, 0, 6},
+ dictWord{144, 0, 7},
+ dictWord{
+ 6,
+ 0,
+ 2041,
+ },
+ dictWord{10, 11, 38},
+ dictWord{139, 11, 784},
+ dictWord{136, 0, 932},
+ dictWord{5, 0, 937},
+ dictWord{135, 0, 100},
+ dictWord{6, 0, 995},
+ dictWord{
+ 4,
+ 11,
+ 58,
+ },
+ dictWord{5, 11, 286},
+ dictWord{6, 11, 319},
+ dictWord{7, 11, 402},
+ dictWord{7, 11, 1254},
+ dictWord{7, 11, 1903},
+ dictWord{8, 11, 356},
+ dictWord{
+ 140,
+ 11,
+ 408,
+ },
+ dictWord{4, 11, 389},
+ dictWord{9, 11, 181},
+ dictWord{9, 11, 255},
+ dictWord{10, 11, 8},
+ dictWord{10, 11, 29},
+ dictWord{10, 11, 816},
+ dictWord{
+ 11,
+ 11,
+ 311,
+ },
+ dictWord{11, 11, 561},
+ dictWord{12, 11, 67},
+ dictWord{141, 11, 181},
+ dictWord{138, 0, 255},
+ dictWord{5, 0, 138},
+ dictWord{4, 10, 934},
+ dictWord{
+ 136,
+ 10,
+ 610,
+ },
+ dictWord{4, 0, 965},
+ dictWord{10, 0, 863},
+ dictWord{138, 0, 898},
+ dictWord{10, 10, 804},
+ dictWord{138, 10, 832},
+ dictWord{12, 0, 631},
+ dictWord{
+ 8,
+ 10,
+ 96,
+ },
+ dictWord{9, 10, 36},
+ dictWord{10, 10, 607},
+ dictWord{11, 10, 423},
+ dictWord{11, 10, 442},
+ dictWord{12, 10, 309},
+ dictWord{14, 10, 199},
+ dictWord{
+ 15,
+ 10,
+ 90,
+ },
+ dictWord{145, 10, 110},
+ dictWord{134, 0, 1394},
+ dictWord{4, 0, 652},
+ dictWord{8, 0, 320},
+ dictWord{22, 0, 6},
+ dictWord{22, 0, 16},
+ dictWord{
+ 9,
+ 10,
+ 13,
+ },
+ dictWord{9, 10, 398},
+ dictWord{9, 10, 727},
+ dictWord{10, 10, 75},
+ dictWord{10, 10, 184},
+ dictWord{10, 10, 230},
+ dictWord{10, 10, 564},
+ dictWord{
+ 10,
+ 10,
+ 569,
+ },
+ dictWord{11, 10, 973},
+ dictWord{12, 10, 70},
+ dictWord{12, 10, 189},
+ dictWord{13, 10, 57},
+ dictWord{141, 10, 257},
+ dictWord{6, 0, 897},
+ dictWord{
+ 134,
+ 0,
+ 1333,
+ },
+ dictWord{4, 0, 692},
+ dictWord{133, 0, 321},
+ dictWord{133, 11, 373},
+ dictWord{135, 0, 922},
+ dictWord{5, 0, 619},
+ dictWord{133, 0, 698},
+ dictWord{
+ 137,
+ 10,
+ 631,
+ },
+ dictWord{5, 10, 345},
+ dictWord{135, 10, 1016},
+ dictWord{9, 0, 957},
+ dictWord{9, 0, 1018},
+ dictWord{12, 0, 828},
+ dictWord{12, 0, 844},
+ dictWord{
+ 12,
+ 0,
+ 897,
+ },
+ dictWord{12, 0, 901},
+ dictWord{12, 0, 943},
+ dictWord{15, 0, 180},
+ dictWord{18, 0, 197},
+ dictWord{18, 0, 200},
+ dictWord{18, 0, 213},
+ dictWord{
+ 18,
+ 0,
+ 214,
+ },
+ dictWord{146, 0, 226},
+ dictWord{5, 0, 917},
+ dictWord{134, 0, 1659},
+ dictWord{135, 0, 1100},
+ dictWord{134, 0, 1173},
+ dictWord{134, 0, 1930},
+ dictWord{5, 0, 251},
+ dictWord{5, 0, 956},
+ dictWord{8, 0, 268},
+ dictWord{9, 0, 214},
+ dictWord{146, 0, 142},
+ dictWord{133, 10, 673},
+ dictWord{137, 10, 850},
+ dictWord{
+ 4,
+ 10,
+ 287,
+ },
+ dictWord{133, 10, 1018},
+ dictWord{132, 11, 672},
+ dictWord{5, 0, 346},
+ dictWord{5, 0, 711},
+ dictWord{8, 0, 390},
+ dictWord{11, 11, 752},
+ dictWord{139, 11, 885},
+ dictWord{5, 10, 34},
+ dictWord{10, 10, 724},
+ dictWord{12, 10, 444},
+ dictWord{13, 10, 354},
+ dictWord{18, 10, 32},
+ dictWord{23, 10, 24},
+ dictWord{23, 10, 31},
+ dictWord{152, 10, 5},
+ dictWord{4, 11, 710},
+ dictWord{134, 11, 606},
+ dictWord{134, 0, 744},
+ dictWord{134, 10, 382},
+ dictWord{
+ 133,
+ 11,
+ 145,
+ },
+ dictWord{4, 10, 329},
+ dictWord{7, 11, 884},
+ dictWord{140, 11, 124},
+ dictWord{4, 11, 467},
+ dictWord{5, 11, 405},
+ dictWord{134, 11, 544},
+ dictWord{
+ 9,
+ 10,
+ 846,
+ },
+ dictWord{138, 10, 827},
+ dictWord{133, 0, 624},
+ dictWord{9, 11, 372},
+ dictWord{15, 11, 2},
+ dictWord{19, 11, 10},
+ dictWord{147, 11, 18},
+ dictWord{
+ 4,
+ 11,
+ 387,
+ },
+ dictWord{135, 11, 1288},
+ dictWord{5, 0, 783},
+ dictWord{7, 0, 1998},
+ dictWord{135, 0, 2047},
+ dictWord{132, 10, 906},
+ dictWord{136, 10, 366},
+ dictWord{135, 11, 550},
+ dictWord{4, 10, 123},
+ dictWord{4, 10, 649},
+ dictWord{5, 10, 605},
+ dictWord{7, 10, 1509},
+ dictWord{136, 10, 36},
+ dictWord{
+ 134,
+ 0,
+ 1125,
+ },
+ dictWord{132, 0, 594},
+ dictWord{133, 10, 767},
+ dictWord{135, 11, 1227},
+ dictWord{136, 11, 467},
+ dictWord{4, 11, 576},
+ dictWord{
+ 135,
+ 11,
+ 1263,
+ },
+ dictWord{4, 0, 268},
+ dictWord{7, 0, 1534},
+ dictWord{135, 11, 1534},
+ dictWord{4, 10, 273},
+ dictWord{5, 10, 658},
+ dictWord{5, 11, 919},
+ dictWord{
+ 5,
+ 10,
+ 995,
+ },
+ dictWord{134, 11, 1673},
+ dictWord{133, 0, 563},
+ dictWord{134, 10, 72},
+ dictWord{135, 10, 1345},
+ dictWord{4, 11, 82},
+ dictWord{5, 11, 333},
+ dictWord{
+ 5,
+ 11,
+ 904,
+ },
+ dictWord{6, 11, 207},
+ dictWord{7, 11, 325},
+ dictWord{7, 11, 1726},
+ dictWord{8, 11, 101},
+ dictWord{10, 11, 778},
+ dictWord{139, 11, 220},
+ dictWord{5, 0, 37},
+ dictWord{6, 0, 39},
+ dictWord{6, 0, 451},
+ dictWord{7, 0, 218},
+ dictWord{7, 0, 667},
+ dictWord{7, 0, 1166},
+ dictWord{7, 0, 1687},
+ dictWord{8, 0, 662},
+ dictWord{16, 0, 2},
+ dictWord{133, 10, 589},
+ dictWord{134, 0, 1332},
+ dictWord{133, 11, 903},
+ dictWord{134, 0, 508},
+ dictWord{5, 10, 117},
+ dictWord{6, 10, 514},
+ dictWord{6, 10, 541},
+ dictWord{7, 10, 1164},
+ dictWord{7, 10, 1436},
+ dictWord{8, 10, 220},
+ dictWord{8, 10, 648},
+ dictWord{10, 10, 688},
+ dictWord{11, 10, 560},
+ dictWord{140, 11, 147},
+ dictWord{6, 11, 555},
+ dictWord{135, 11, 485},
+ dictWord{133, 10, 686},
+ dictWord{7, 0, 453},
+ dictWord{7, 0, 635},
+ dictWord{7, 0, 796},
+ dictWord{8, 0, 331},
+ dictWord{9, 0, 330},
+ dictWord{9, 0, 865},
+ dictWord{10, 0, 119},
+ dictWord{10, 0, 235},
+ dictWord{11, 0, 111},
+ dictWord{11, 0, 129},
+ dictWord{
+ 11,
+ 0,
+ 240,
+ },
+ dictWord{12, 0, 31},
+ dictWord{12, 0, 66},
+ dictWord{12, 0, 222},
+ dictWord{12, 0, 269},
+ dictWord{12, 0, 599},
+ dictWord{12, 0, 684},
+ dictWord{12, 0, 689},
+ dictWord{12, 0, 691},
+ dictWord{142, 0, 345},
+ dictWord{135, 0, 1834},
+ dictWord{4, 11, 705},
+ dictWord{7, 11, 615},
+ dictWord{138, 11, 251},
+ dictWord{
+ 136,
+ 11,
+ 345,
+ },
+ dictWord{137, 0, 527},
+ dictWord{6, 0, 98},
+ dictWord{7, 0, 702},
+ dictWord{135, 0, 991},
+ dictWord{11, 0, 576},
+ dictWord{14, 0, 74},
+ dictWord{7, 10, 196},
+ dictWord{10, 10, 765},
+ dictWord{11, 10, 347},
+ dictWord{11, 10, 552},
+ dictWord{11, 10, 790},
+ dictWord{12, 10, 263},
+ dictWord{13, 10, 246},
+ dictWord{
+ 13,
+ 10,
+ 270,
+ },
+ dictWord{13, 10, 395},
+ dictWord{14, 10, 176},
+ dictWord{14, 10, 190},
+ dictWord{14, 10, 398},
+ dictWord{14, 10, 412},
+ dictWord{15, 10, 32},
+ dictWord{
+ 15,
+ 10,
+ 63,
+ },
+ dictWord{16, 10, 88},
+ dictWord{147, 10, 105},
+ dictWord{134, 11, 90},
+ dictWord{13, 0, 84},
+ dictWord{141, 0, 122},
+ dictWord{6, 0, 37},
+ dictWord{
+ 7,
+ 0,
+ 299,
+ },
+ dictWord{7, 0, 1666},
+ dictWord{8, 0, 195},
+ dictWord{8, 0, 316},
+ dictWord{9, 0, 178},
+ dictWord{9, 0, 276},
+ dictWord{9, 0, 339},
+ dictWord{9, 0, 536},
+ dictWord{
+ 10,
+ 0,
+ 102,
+ },
+ dictWord{10, 0, 362},
+ dictWord{10, 0, 785},
+ dictWord{11, 0, 55},
+ dictWord{11, 0, 149},
+ dictWord{11, 0, 773},
+ dictWord{13, 0, 416},
+ dictWord{
+ 13,
+ 0,
+ 419,
+ },
+ dictWord{14, 0, 38},
+ dictWord{14, 0, 41},
+ dictWord{142, 0, 210},
+ dictWord{5, 10, 381},
+ dictWord{135, 10, 1792},
+ dictWord{7, 11, 813},
+ dictWord{
+ 12,
+ 11,
+ 497,
+ },
+ dictWord{141, 11, 56},
+ dictWord{7, 10, 616},
+ dictWord{138, 10, 413},
+ dictWord{133, 0, 645},
+ dictWord{6, 11, 125},
+ dictWord{135, 11, 1277},
+ dictWord{132, 0, 290},
+ dictWord{6, 0, 70},
+ dictWord{7, 0, 1292},
+ dictWord{10, 0, 762},
+ dictWord{139, 0, 288},
+ dictWord{6, 10, 120},
+ dictWord{7, 10, 1188},
+ dictWord{
+ 7,
+ 10,
+ 1710,
+ },
+ dictWord{8, 10, 286},
+ dictWord{9, 10, 667},
+ dictWord{11, 10, 592},
+ dictWord{139, 10, 730},
+ dictWord{135, 11, 1784},
+ dictWord{7, 0, 1315},
+ dictWord{135, 11, 1315},
+ dictWord{134, 0, 1955},
+ dictWord{135, 10, 1146},
+ dictWord{7, 0, 131},
+ dictWord{7, 0, 422},
+ dictWord{8, 0, 210},
+ dictWord{
+ 140,
+ 0,
+ 573,
+ },
+ dictWord{4, 10, 352},
+ dictWord{135, 10, 687},
+ dictWord{139, 0, 797},
+ dictWord{143, 0, 38},
+ dictWord{14, 0, 179},
+ dictWord{15, 0, 151},
+ dictWord{
+ 150,
+ 0,
+ 11,
+ },
+ dictWord{7, 0, 488},
+ dictWord{4, 10, 192},
+ dictWord{5, 10, 49},
+ dictWord{6, 10, 200},
+ dictWord{6, 10, 293},
+ dictWord{134, 10, 1696},
+ dictWord{
+ 132,
+ 0,
+ 936,
+ },
+ dictWord{135, 11, 703},
+ dictWord{6, 11, 160},
+ dictWord{7, 11, 1106},
+ dictWord{9, 11, 770},
+ dictWord{10, 11, 618},
+ dictWord{11, 11, 112},
+ dictWord{
+ 140,
+ 11,
+ 413,
+ },
+ dictWord{5, 0, 453},
+ dictWord{134, 0, 441},
+ dictWord{135, 0, 595},
+ dictWord{132, 10, 650},
+ dictWord{132, 10, 147},
+ dictWord{6, 0, 991},
+ dictWord{6, 0, 1182},
+ dictWord{12, 11, 271},
+ dictWord{145, 11, 109},
+ dictWord{133, 10, 934},
+ dictWord{140, 11, 221},
+ dictWord{132, 0, 653},
+ dictWord{
+ 7,
+ 0,
+ 505,
+ },
+ dictWord{135, 0, 523},
+ dictWord{134, 0, 903},
+ dictWord{135, 11, 479},
+ dictWord{7, 11, 304},
+ dictWord{9, 11, 646},
+ dictWord{9, 11, 862},
+ dictWord{
+ 10,
+ 11,
+ 262,
+ },
+ dictWord{11, 11, 696},
+ dictWord{12, 11, 208},
+ dictWord{15, 11, 79},
+ dictWord{147, 11, 108},
+ dictWord{146, 0, 80},
+ dictWord{135, 11, 981},
+ dictWord{142, 0, 432},
+ dictWord{132, 0, 314},
+ dictWord{137, 11, 152},
+ dictWord{7, 0, 1368},
+ dictWord{8, 0, 232},
+ dictWord{8, 0, 361},
+ dictWord{10, 0, 682},
+ dictWord{138, 0, 742},
+ dictWord{135, 11, 1586},
+ dictWord{9, 0, 534},
+ dictWord{4, 11, 434},
+ dictWord{11, 11, 663},
+ dictWord{12, 11, 210},
+ dictWord{13, 11, 166},
+ dictWord{13, 11, 310},
+ dictWord{14, 11, 373},
+ dictWord{147, 11, 43},
+ dictWord{7, 11, 1091},
+ dictWord{135, 11, 1765},
+ dictWord{6, 11, 550},
+ dictWord{
+ 135,
+ 11,
+ 652,
+ },
+ dictWord{137, 0, 27},
+ dictWord{142, 0, 12},
+ dictWord{4, 10, 637},
+ dictWord{5, 11, 553},
+ dictWord{7, 11, 766},
+ dictWord{138, 11, 824},
+ dictWord{
+ 7,
+ 11,
+ 737,
+ },
+ dictWord{8, 11, 298},
+ dictWord{136, 11, 452},
+ dictWord{7, 0, 736},
+ dictWord{139, 0, 264},
+ dictWord{134, 0, 1657},
+ dictWord{133, 11, 292},
+ dictWord{138, 11, 135},
+ dictWord{6, 0, 844},
+ dictWord{134, 0, 1117},
+ dictWord{135, 0, 127},
+ dictWord{9, 10, 867},
+ dictWord{138, 10, 837},
+ dictWord{
+ 6,
+ 0,
+ 1184,
+ },
+ dictWord{134, 0, 1208},
+ dictWord{134, 0, 1294},
+ dictWord{136, 0, 364},
+ dictWord{6, 0, 1415},
+ dictWord{7, 0, 1334},
+ dictWord{11, 0, 125},
+ dictWord{
+ 6,
+ 10,
+ 170,
+ },
+ dictWord{7, 11, 393},
+ dictWord{8, 10, 395},
+ dictWord{8, 10, 487},
+ dictWord{10, 11, 603},
+ dictWord{11, 11, 206},
+ dictWord{141, 10, 147},
+ dictWord{137, 11, 748},
+ dictWord{4, 11, 912},
+ dictWord{137, 11, 232},
+ dictWord{4, 10, 535},
+ dictWord{136, 10, 618},
+ dictWord{137, 0, 792},
+ dictWord{
+ 7,
+ 11,
+ 1973,
+ },
+ dictWord{136, 11, 716},
+ dictWord{135, 11, 98},
+ dictWord{5, 0, 909},
+ dictWord{9, 0, 849},
+ dictWord{138, 0, 805},
+ dictWord{4, 0, 630},
+ dictWord{
+ 132,
+ 0,
+ 699,
+ },
+ dictWord{5, 11, 733},
+ dictWord{14, 11, 103},
+ dictWord{150, 10, 23},
+ dictWord{12, 11, 158},
+ dictWord{18, 11, 8},
+ dictWord{19, 11, 62},
+ dictWord{
+ 20,
+ 11,
+ 6,
+ },
+ dictWord{22, 11, 4},
+ dictWord{23, 11, 2},
+ dictWord{151, 11, 9},
+ dictWord{132, 0, 968},
+ dictWord{132, 10, 778},
+ dictWord{132, 10, 46},
+ dictWord{5, 10, 811},
+ dictWord{6, 10, 1679},
+ dictWord{6, 10, 1714},
+ dictWord{135, 10, 2032},
+ dictWord{6, 0, 1446},
+ dictWord{7, 10, 1458},
+ dictWord{9, 10, 407},
+ dictWord{
+ 139,
+ 10,
+ 15,
+ },
+ dictWord{7, 0, 206},
+ dictWord{7, 0, 397},
+ dictWord{7, 0, 621},
+ dictWord{7, 0, 640},
+ dictWord{8, 0, 124},
+ dictWord{8, 0, 619},
+ dictWord{9, 0, 305},
+ dictWord{
+ 9,
+ 0,
+ 643,
+ },
+ dictWord{10, 0, 264},
+ dictWord{10, 0, 628},
+ dictWord{11, 0, 40},
+ dictWord{12, 0, 349},
+ dictWord{13, 0, 134},
+ dictWord{13, 0, 295},
+ dictWord{
+ 14,
+ 0,
+ 155,
+ },
+ dictWord{15, 0, 120},
+ dictWord{18, 0, 105},
+ dictWord{6, 10, 34},
+ dictWord{7, 10, 1089},
+ dictWord{8, 10, 708},
+ dictWord{8, 10, 721},
+ dictWord{9, 10, 363},
+ dictWord{148, 10, 98},
+ dictWord{4, 0, 262},
+ dictWord{5, 0, 641},
+ dictWord{135, 0, 342},
+ dictWord{137, 11, 72},
+ dictWord{4, 0, 99},
+ dictWord{6, 0, 250},
+ dictWord{
+ 6,
+ 0,
+ 346,
+ },
+ dictWord{8, 0, 127},
+ dictWord{138, 0, 81},
+ dictWord{132, 0, 915},
+ dictWord{5, 0, 75},
+ dictWord{9, 0, 517},
+ dictWord{10, 0, 470},
+ dictWord{12, 0, 155},
+ dictWord{141, 0, 224},
+ dictWord{132, 10, 462},
+ dictWord{11, 11, 600},
+ dictWord{11, 11, 670},
+ dictWord{141, 11, 245},
+ dictWord{142, 0, 83},
+ dictWord{
+ 5,
+ 10,
+ 73,
+ },
+ dictWord{6, 10, 23},
+ dictWord{134, 10, 338},
+ dictWord{6, 0, 1031},
+ dictWord{139, 11, 923},
+ dictWord{7, 11, 164},
+ dictWord{7, 11, 1571},
+ dictWord{
+ 9,
+ 11,
+ 107,
+ },
+ dictWord{140, 11, 225},
+ dictWord{134, 0, 1470},
+ dictWord{133, 0, 954},
+ dictWord{6, 0, 304},
+ dictWord{8, 0, 418},
+ dictWord{10, 0, 345},
+ dictWord{
+ 11,
+ 0,
+ 341,
+ },
+ dictWord{139, 0, 675},
+ dictWord{9, 0, 410},
+ dictWord{139, 0, 425},
+ dictWord{4, 11, 27},
+ dictWord{5, 11, 484},
+ dictWord{5, 11, 510},
+ dictWord{6, 11, 434},
+ dictWord{7, 11, 1000},
+ dictWord{7, 11, 1098},
+ dictWord{8, 11, 2},
+ dictWord{136, 11, 200},
+ dictWord{134, 0, 734},
+ dictWord{140, 11, 257},
+ dictWord{
+ 7,
+ 10,
+ 725,
+ },
+ dictWord{8, 10, 498},
+ dictWord{139, 10, 268},
+ dictWord{134, 0, 1822},
+ dictWord{135, 0, 1798},
+ dictWord{135, 10, 773},
+ dictWord{132, 11, 460},
+ dictWord{4, 11, 932},
+ dictWord{133, 11, 891},
+ dictWord{134, 0, 14},
+ dictWord{132, 10, 583},
+ dictWord{7, 10, 1462},
+ dictWord{8, 11, 625},
+ dictWord{
+ 139,
+ 10,
+ 659,
+ },
+ dictWord{5, 0, 113},
+ dictWord{6, 0, 243},
+ dictWord{6, 0, 1708},
+ dictWord{7, 0, 1865},
+ dictWord{11, 0, 161},
+ dictWord{16, 0, 37},
+ dictWord{17, 0, 99},
+ dictWord{133, 10, 220},
+ dictWord{134, 11, 76},
+ dictWord{5, 11, 461},
+ dictWord{135, 11, 1925},
+ dictWord{140, 0, 69},
+ dictWord{8, 11, 92},
+ dictWord{
+ 137,
+ 11,
+ 221,
+ },
+ dictWord{139, 10, 803},
+ dictWord{132, 10, 544},
+ dictWord{4, 0, 274},
+ dictWord{134, 0, 922},
+ dictWord{132, 0, 541},
+ dictWord{5, 0, 627},
+ dictWord{
+ 6,
+ 10,
+ 437,
+ },
+ dictWord{6, 10, 564},
+ dictWord{11, 10, 181},
+ dictWord{141, 10, 183},
+ dictWord{135, 10, 1192},
+ dictWord{7, 0, 166},
+ dictWord{132, 11, 763},
+ dictWord{133, 11, 253},
+ dictWord{134, 0, 849},
+ dictWord{9, 11, 73},
+ dictWord{10, 11, 110},
+ dictWord{14, 11, 185},
+ dictWord{145, 11, 119},
+ dictWord{5, 11, 212},
+ dictWord{12, 11, 35},
+ dictWord{141, 11, 382},
+ dictWord{133, 0, 717},
+ dictWord{137, 0, 304},
+ dictWord{136, 0, 600},
+ dictWord{133, 0, 654},
+ dictWord{
+ 6,
+ 0,
+ 273,
+ },
+ dictWord{10, 0, 188},
+ dictWord{13, 0, 377},
+ dictWord{146, 0, 77},
+ dictWord{4, 10, 790},
+ dictWord{5, 10, 273},
+ dictWord{134, 10, 394},
+ dictWord{
+ 132,
+ 0,
+ 543,
+ },
+ dictWord{135, 0, 410},
+ dictWord{11, 0, 98},
+ dictWord{11, 0, 524},
+ dictWord{141, 0, 87},
+ dictWord{132, 0, 941},
+ dictWord{135, 11, 1175},
+ dictWord{
+ 4,
+ 0,
+ 250,
+ },
+ dictWord{7, 0, 1612},
+ dictWord{11, 0, 186},
+ dictWord{12, 0, 133},
+ dictWord{6, 10, 127},
+ dictWord{7, 10, 1511},
+ dictWord{8, 10, 613},
+ dictWord{
+ 12,
+ 10,
+ 495,
+ },
+ dictWord{12, 10, 586},
+ dictWord{12, 10, 660},
+ dictWord{12, 10, 668},
+ dictWord{14, 10, 385},
+ dictWord{15, 10, 118},
+ dictWord{17, 10, 20},
+ dictWord{
+ 146,
+ 10,
+ 98,
+ },
+ dictWord{6, 0, 1785},
+ dictWord{133, 11, 816},
+ dictWord{134, 0, 1339},
+ dictWord{7, 0, 961},
+ dictWord{7, 0, 1085},
+ dictWord{7, 0, 1727},
+ dictWord{
+ 8,
+ 0,
+ 462,
+ },
+ dictWord{6, 10, 230},
+ dictWord{135, 11, 1727},
+ dictWord{9, 0, 636},
+ dictWord{135, 10, 1954},
+ dictWord{132, 0, 780},
+ dictWord{5, 11, 869},
+ dictWord{5, 11, 968},
+ dictWord{6, 11, 1626},
+ dictWord{8, 11, 734},
+ dictWord{136, 11, 784},
+ dictWord{4, 11, 542},
+ dictWord{6, 11, 1716},
+ dictWord{6, 11, 1727},
+ dictWord{7, 11, 1082},
+ dictWord{7, 11, 1545},
+ dictWord{8, 11, 56},
+ dictWord{8, 11, 118},
+ dictWord{8, 11, 412},
+ dictWord{8, 11, 564},
+ dictWord{9, 11, 888},
+ dictWord{9, 11, 908},
+ dictWord{10, 11, 50},
+ dictWord{10, 11, 423},
+ dictWord{11, 11, 685},
+ dictWord{11, 11, 697},
+ dictWord{11, 11, 933},
+ dictWord{12, 11, 299},
+ dictWord{13, 11, 126},
+ dictWord{13, 11, 136},
+ dictWord{13, 11, 170},
+ dictWord{141, 11, 190},
+ dictWord{134, 11, 226},
+ dictWord{4, 11, 232},
+ dictWord{
+ 9,
+ 11,
+ 202,
+ },
+ dictWord{10, 11, 474},
+ dictWord{140, 11, 433},
+ dictWord{137, 11, 500},
+ dictWord{5, 0, 529},
+ dictWord{136, 10, 68},
+ dictWord{132, 10, 654},
+ dictWord{
+ 4,
+ 10,
+ 156,
+ },
+ dictWord{7, 10, 998},
+ dictWord{7, 10, 1045},
+ dictWord{7, 10, 1860},
+ dictWord{9, 10, 48},
+ dictWord{9, 10, 692},
+ dictWord{11, 10, 419},
+ dictWord{139, 10, 602},
+ dictWord{7, 0, 1276},
+ dictWord{8, 0, 474},
+ dictWord{9, 0, 652},
+ dictWord{6, 11, 108},
+ dictWord{7, 11, 1003},
+ dictWord{7, 11, 1181},
+ dictWord{136, 11, 343},
+ dictWord{7, 11, 1264},
+ dictWord{7, 11, 1678},
+ dictWord{11, 11, 945},
+ dictWord{12, 11, 341},
+ dictWord{12, 11, 471},
+ dictWord{
+ 140,
+ 11,
+ 569,
+ },
+ dictWord{134, 11, 1712},
+ dictWord{5, 0, 948},
+ dictWord{12, 0, 468},
+ dictWord{19, 0, 96},
+ dictWord{148, 0, 24},
+ dictWord{4, 11, 133},
+ dictWord{
+ 7,
+ 11,
+ 711,
+ },
+ dictWord{7, 11, 1298},
+ dictWord{7, 11, 1585},
+ dictWord{135, 11, 1929},
+ dictWord{6, 0, 753},
+ dictWord{140, 0, 657},
+ dictWord{139, 0, 941},
+ dictWord{
+ 6,
+ 11,
+ 99,
+ },
+ dictWord{7, 11, 1808},
+ dictWord{145, 11, 57},
+ dictWord{6, 11, 574},
+ dictWord{7, 11, 428},
+ dictWord{7, 11, 1250},
+ dictWord{10, 11, 669},
+ dictWord{
+ 11,
+ 11,
+ 485,
+ },
+ dictWord{11, 11, 840},
+ dictWord{12, 11, 300},
+ dictWord{142, 11, 250},
+ dictWord{4, 0, 532},
+ dictWord{5, 0, 706},
+ dictWord{135, 0, 662},
+ dictWord{
+ 5,
+ 0,
+ 837,
+ },
+ dictWord{6, 0, 1651},
+ dictWord{139, 0, 985},
+ dictWord{7, 0, 1861},
+ dictWord{9, 10, 197},
+ dictWord{10, 10, 300},
+ dictWord{12, 10, 473},
+ dictWord{
+ 13,
+ 10,
+ 90,
+ },
+ dictWord{141, 10, 405},
+ dictWord{137, 11, 252},
+ dictWord{6, 11, 323},
+ dictWord{135, 11, 1564},
+ dictWord{4, 0, 330},
+ dictWord{4, 0, 863},
+ dictWord{7, 0, 933},
+ dictWord{7, 0, 2012},
+ dictWord{8, 0, 292},
+ dictWord{7, 11, 461},
+ dictWord{8, 11, 775},
+ dictWord{138, 11, 435},
+ dictWord{132, 10, 606},
+ dictWord{
+ 4,
+ 11,
+ 655,
+ },
+ dictWord{7, 11, 850},
+ dictWord{17, 11, 75},
+ dictWord{146, 11, 137},
+ dictWord{135, 0, 767},
+ dictWord{7, 10, 1978},
+ dictWord{136, 10, 676},
+ dictWord{132, 0, 641},
+ dictWord{135, 11, 1559},
+ dictWord{134, 0, 1233},
+ dictWord{137, 0, 242},
+ dictWord{17, 0, 114},
+ dictWord{4, 10, 361},
+ dictWord{
+ 133,
+ 10,
+ 315,
+ },
+ dictWord{137, 0, 883},
+ dictWord{132, 10, 461},
+ dictWord{138, 0, 274},
+ dictWord{134, 0, 2008},
+ dictWord{134, 0, 1794},
+ dictWord{4, 0, 703},
+ dictWord{135, 0, 207},
+ dictWord{12, 0, 285},
+ dictWord{132, 10, 472},
+ dictWord{132, 0, 571},
+ dictWord{5, 0, 873},
+ dictWord{5, 0, 960},
+ dictWord{8, 0, 823},
+ dictWord{9, 0, 881},
+ dictWord{136, 11, 577},
+ dictWord{7, 0, 617},
+ dictWord{10, 0, 498},
+ dictWord{11, 0, 501},
+ dictWord{12, 0, 16},
+ dictWord{140, 0, 150},
+ dictWord{
+ 138,
+ 10,
+ 747,
+ },
+ dictWord{132, 0, 431},
+ dictWord{133, 10, 155},
+ dictWord{11, 0, 283},
+ dictWord{11, 0, 567},
+ dictWord{7, 10, 163},
+ dictWord{8, 10, 319},
+ dictWord{
+ 9,
+ 10,
+ 402,
+ },
+ dictWord{10, 10, 24},
+ dictWord{10, 10, 681},
+ dictWord{11, 10, 200},
+ dictWord{12, 10, 253},
+ dictWord{12, 10, 410},
+ dictWord{142, 10, 219},
+ dictWord{4, 11, 413},
+ dictWord{5, 11, 677},
+ dictWord{8, 11, 432},
+ dictWord{140, 11, 280},
+ dictWord{9, 0, 401},
+ dictWord{5, 10, 475},
+ dictWord{7, 10, 1780},
+ dictWord{11, 10, 297},
+ dictWord{11, 10, 558},
+ dictWord{14, 10, 322},
+ dictWord{147, 10, 76},
+ dictWord{6, 0, 781},
+ dictWord{9, 0, 134},
+ dictWord{10, 0, 2},
+ dictWord{
+ 10,
+ 0,
+ 27,
+ },
+ dictWord{10, 0, 333},
+ dictWord{11, 0, 722},
+ dictWord{143, 0, 1},
+ dictWord{5, 0, 33},
+ dictWord{6, 0, 470},
+ dictWord{139, 0, 424},
+ dictWord{
+ 135,
+ 0,
+ 2006,
+ },
+ dictWord{12, 0, 783},
+ dictWord{135, 10, 1956},
+ dictWord{136, 0, 274},
+ dictWord{135, 0, 1882},
+ dictWord{132, 0, 794},
+ dictWord{135, 0, 1848},
+ dictWord{5, 10, 944},
+ dictWord{134, 10, 1769},
+ dictWord{6, 0, 47},
+ dictWord{7, 0, 90},
+ dictWord{7, 0, 664},
+ dictWord{7, 0, 830},
+ dictWord{7, 0, 1380},
+ dictWord{
+ 7,
+ 0,
+ 2025,
+ },
+ dictWord{8, 0, 448},
+ dictWord{136, 0, 828},
+ dictWord{132, 10, 144},
+ dictWord{134, 0, 1199},
+ dictWord{4, 11, 395},
+ dictWord{139, 11, 762},
+ dictWord{135, 11, 1504},
+ dictWord{9, 0, 417},
+ dictWord{137, 0, 493},
+ dictWord{9, 11, 174},
+ dictWord{10, 11, 164},
+ dictWord{11, 11, 440},
+ dictWord{11, 11, 841},
+ dictWord{143, 11, 98},
+ dictWord{134, 11, 426},
+ dictWord{139, 11, 1002},
+ dictWord{134, 0, 295},
+ dictWord{134, 0, 816},
+ dictWord{6, 10, 247},
+ dictWord{
+ 137,
+ 10,
+ 555,
+ },
+ dictWord{133, 0, 1019},
+ dictWord{4, 0, 620},
+ dictWord{5, 11, 476},
+ dictWord{10, 10, 280},
+ dictWord{138, 10, 797},
+ dictWord{139, 0, 464},
+ dictWord{5, 11, 76},
+ dictWord{6, 11, 458},
+ dictWord{6, 11, 497},
+ dictWord{7, 11, 764},
+ dictWord{7, 11, 868},
+ dictWord{9, 11, 658},
+ dictWord{10, 11, 594},
+ dictWord{
+ 11,
+ 11,
+ 173,
+ },
+ dictWord{11, 11, 566},
+ dictWord{12, 11, 20},
+ dictWord{12, 11, 338},
+ dictWord{141, 11, 200},
+ dictWord{134, 0, 208},
+ dictWord{4, 11, 526},
+ dictWord{7, 11, 1029},
+ dictWord{135, 11, 1054},
+ dictWord{132, 11, 636},
+ dictWord{6, 11, 233},
+ dictWord{7, 11, 660},
+ dictWord{7, 11, 1124},
+ dictWord{
+ 17,
+ 11,
+ 31,
+ },
+ dictWord{19, 11, 22},
+ dictWord{151, 11, 14},
+ dictWord{10, 0, 442},
+ dictWord{133, 10, 428},
+ dictWord{10, 0, 930},
+ dictWord{140, 0, 778},
+ dictWord{
+ 6,
+ 0,
+ 68,
+ },
+ dictWord{7, 0, 448},
+ dictWord{7, 0, 1629},
+ dictWord{7, 0, 1769},
+ dictWord{7, 0, 1813},
+ dictWord{8, 0, 442},
+ dictWord{8, 0, 516},
+ dictWord{9, 0, 710},
+ dictWord{
+ 10,
+ 0,
+ 282,
+ },
+ dictWord{10, 0, 722},
+ dictWord{7, 10, 1717},
+ dictWord{138, 10, 546},
+ dictWord{134, 0, 1128},
+ dictWord{11, 0, 844},
+ dictWord{12, 0, 104},
+ dictWord{140, 0, 625},
+ dictWord{4, 11, 432},
+ dictWord{135, 11, 824},
+ dictWord{138, 10, 189},
+ dictWord{133, 0, 787},
+ dictWord{133, 10, 99},
+ dictWord{
+ 4,
+ 11,
+ 279,
+ },
+ dictWord{7, 11, 301},
+ dictWord{137, 11, 362},
+ dictWord{8, 0, 491},
+ dictWord{4, 10, 397},
+ dictWord{136, 10, 555},
+ dictWord{4, 11, 178},
+ dictWord{
+ 133,
+ 11,
+ 399,
+ },
+ dictWord{134, 0, 711},
+ dictWord{144, 0, 9},
+ dictWord{4, 0, 403},
+ dictWord{5, 0, 441},
+ dictWord{7, 0, 450},
+ dictWord{10, 0, 840},
+ dictWord{11, 0, 101},
+ dictWord{12, 0, 193},
+ dictWord{141, 0, 430},
+ dictWord{135, 11, 1246},
+ dictWord{12, 10, 398},
+ dictWord{20, 10, 39},
+ dictWord{21, 10, 11},
+ dictWord{
+ 150,
+ 10,
+ 41,
+ },
+ dictWord{4, 10, 485},
+ dictWord{7, 10, 353},
+ dictWord{135, 10, 1523},
+ dictWord{6, 10, 366},
+ dictWord{7, 10, 1384},
+ dictWord{7, 10, 1601},
+ dictWord{
+ 135,
+ 11,
+ 1912,
+ },
+ dictWord{7, 0, 396},
+ dictWord{10, 0, 160},
+ dictWord{135, 11, 396},
+ dictWord{137, 10, 282},
+ dictWord{134, 11, 1692},
+ dictWord{4, 10, 157},
+ dictWord{5, 10, 471},
+ dictWord{6, 11, 202},
+ dictWord{10, 11, 448},
+ dictWord{11, 11, 208},
+ dictWord{12, 11, 360},
+ dictWord{17, 11, 117},
+ dictWord{
+ 17,
+ 11,
+ 118,
+ },
+ dictWord{18, 11, 27},
+ dictWord{148, 11, 67},
+ dictWord{133, 0, 679},
+ dictWord{137, 0, 326},
+ dictWord{136, 10, 116},
+ dictWord{7, 11, 872},
+ dictWord{
+ 10,
+ 11,
+ 516,
+ },
+ dictWord{139, 11, 167},
+ dictWord{132, 11, 224},
+ dictWord{5, 11, 546},
+ dictWord{7, 11, 35},
+ dictWord{8, 11, 11},
+ dictWord{8, 11, 12},
+ dictWord{
+ 9,
+ 11,
+ 315,
+ },
+ dictWord{9, 11, 533},
+ dictWord{10, 11, 802},
+ dictWord{11, 11, 166},
+ dictWord{12, 11, 525},
+ dictWord{142, 11, 243},
+ dictWord{7, 0, 1128},
+ dictWord{135, 11, 1920},
+ dictWord{5, 11, 241},
+ dictWord{8, 11, 242},
+ dictWord{9, 11, 451},
+ dictWord{10, 11, 667},
+ dictWord{11, 11, 598},
+ dictWord{
+ 140,
+ 11,
+ 429,
+ },
+ dictWord{6, 0, 737},
+ dictWord{5, 10, 160},
+ dictWord{7, 10, 363},
+ dictWord{7, 10, 589},
+ dictWord{10, 10, 170},
+ dictWord{141, 10, 55},
+ dictWord{
+ 135,
+ 0,
+ 1796,
+ },
+ dictWord{142, 11, 254},
+ dictWord{4, 0, 574},
+ dictWord{7, 0, 350},
+ dictWord{7, 0, 1024},
+ dictWord{8, 0, 338},
+ dictWord{9, 0, 677},
+ dictWord{138, 0, 808},
+ dictWord{134, 0, 1096},
+ dictWord{137, 11, 516},
+ dictWord{7, 0, 405},
+ dictWord{10, 0, 491},
+ dictWord{4, 10, 108},
+ dictWord{4, 11, 366},
+ dictWord{
+ 139,
+ 10,
+ 498,
+ },
+ dictWord{11, 11, 337},
+ dictWord{142, 11, 303},
+ dictWord{134, 11, 1736},
+ dictWord{7, 0, 1081},
+ dictWord{140, 11, 364},
+ dictWord{7, 10, 1005},
+ dictWord{140, 10, 609},
+ dictWord{7, 0, 1676},
+ dictWord{4, 10, 895},
+ dictWord{133, 10, 772},
+ dictWord{135, 0, 2037},
+ dictWord{6, 0, 1207},
+ dictWord{
+ 11,
+ 11,
+ 916,
+ },
+ dictWord{142, 11, 419},
+ dictWord{14, 11, 140},
+ dictWord{148, 11, 41},
+ dictWord{6, 11, 331},
+ dictWord{136, 11, 623},
+ dictWord{9, 0, 944},
+ dictWord{
+ 9,
+ 0,
+ 969,
+ },
+ dictWord{9, 0, 1022},
+ dictWord{12, 0, 913},
+ dictWord{12, 0, 936},
+ dictWord{15, 0, 177},
+ dictWord{15, 0, 193},
+ dictWord{4, 10, 926},
+ dictWord{
+ 133,
+ 10,
+ 983,
+ },
+ dictWord{5, 0, 354},
+ dictWord{135, 11, 506},
+ dictWord{8, 0, 598},
+ dictWord{9, 0, 664},
+ dictWord{138, 0, 441},
+ dictWord{4, 11, 640},
+ dictWord{
+ 133,
+ 11,
+ 513,
+ },
+ dictWord{137, 0, 297},
+ dictWord{132, 10, 538},
+ dictWord{6, 10, 294},
+ dictWord{7, 10, 1267},
+ dictWord{136, 10, 624},
+ dictWord{7, 0, 1772},
+ dictWord{
+ 7,
+ 11,
+ 1888,
+ },
+ dictWord{8, 11, 289},
+ dictWord{11, 11, 45},
+ dictWord{12, 11, 278},
+ dictWord{140, 11, 537},
+ dictWord{135, 10, 1325},
+ dictWord{138, 0, 751},
+ dictWord{141, 0, 37},
+ dictWord{134, 0, 1828},
+ dictWord{132, 10, 757},
+ dictWord{132, 11, 394},
+ dictWord{6, 0, 257},
+ dictWord{135, 0, 1522},
+ dictWord{
+ 4,
+ 0,
+ 582,
+ },
+ dictWord{9, 0, 191},
+ dictWord{135, 11, 1931},
+ dictWord{7, 11, 574},
+ dictWord{7, 11, 1719},
+ dictWord{137, 11, 145},
+ dictWord{132, 11, 658},
+ dictWord{10, 0, 790},
+ dictWord{132, 11, 369},
+ dictWord{9, 11, 781},
+ dictWord{10, 11, 144},
+ dictWord{11, 11, 385},
+ dictWord{13, 11, 161},
+ dictWord{13, 11, 228},
+ dictWord{13, 11, 268},
+ dictWord{148, 11, 107},
+ dictWord{8, 0, 469},
+ dictWord{10, 0, 47},
+ dictWord{136, 11, 374},
+ dictWord{6, 0, 306},
+ dictWord{7, 0, 1140},
+ dictWord{7, 0, 1340},
+ dictWord{8, 0, 133},
+ dictWord{138, 0, 449},
+ dictWord{139, 0, 1011},
+ dictWord{7, 10, 1875},
+ dictWord{139, 10, 124},
+ dictWord{
+ 4,
+ 11,
+ 344,
+ },
+ dictWord{6, 11, 498},
+ dictWord{139, 11, 323},
+ dictWord{137, 0, 299},
+ dictWord{132, 0, 837},
+ dictWord{133, 11, 906},
+ dictWord{5, 0, 329},
+ dictWord{
+ 8,
+ 0,
+ 260,
+ },
+ dictWord{138, 0, 10},
+ dictWord{134, 0, 1320},
+ dictWord{4, 0, 657},
+ dictWord{146, 0, 158},
+ dictWord{135, 0, 1191},
+ dictWord{152, 0, 7},
+ dictWord{
+ 6,
+ 0,
+ 1939,
+ },
+ dictWord{8, 0, 974},
+ dictWord{138, 0, 996},
+ dictWord{135, 0, 1665},
+ dictWord{11, 11, 126},
+ dictWord{139, 11, 287},
+ dictWord{143, 0, 8},
+ dictWord{
+ 14,
+ 11,
+ 149,
+ },
+ dictWord{14, 11, 399},
+ dictWord{143, 11, 57},
+ dictWord{5, 0, 66},
+ dictWord{7, 0, 1896},
+ dictWord{136, 0, 288},
+ dictWord{7, 0, 175},
+ dictWord{
+ 10,
+ 0,
+ 494,
+ },
+ dictWord{5, 10, 150},
+ dictWord{8, 10, 603},
+ dictWord{9, 10, 593},
+ dictWord{9, 10, 634},
+ dictWord{10, 10, 173},
+ dictWord{11, 10, 462},
+ dictWord{
+ 11,
+ 10,
+ 515,
+ },
+ dictWord{13, 10, 216},
+ dictWord{13, 10, 288},
+ dictWord{142, 10, 400},
+ dictWord{134, 0, 1643},
+ dictWord{136, 11, 21},
+ dictWord{4, 0, 21},
+ dictWord{
+ 5,
+ 0,
+ 91,
+ },
+ dictWord{5, 0, 648},
+ dictWord{5, 0, 750},
+ dictWord{5, 0, 781},
+ dictWord{6, 0, 54},
+ dictWord{6, 0, 112},
+ dictWord{6, 0, 402},
+ dictWord{6, 0, 1732},
+ dictWord{
+ 7,
+ 0,
+ 315,
+ },
+ dictWord{7, 0, 749},
+ dictWord{7, 0, 1427},
+ dictWord{7, 0, 1900},
+ dictWord{9, 0, 78},
+ dictWord{9, 0, 508},
+ dictWord{10, 0, 611},
+ dictWord{10, 0, 811},
+ dictWord{11, 0, 510},
+ dictWord{11, 0, 728},
+ dictWord{13, 0, 36},
+ dictWord{14, 0, 39},
+ dictWord{16, 0, 83},
+ dictWord{17, 0, 124},
+ dictWord{148, 0, 30},
+ dictWord{
+ 4,
+ 0,
+ 668,
+ },
+ dictWord{136, 0, 570},
+ dictWord{10, 0, 322},
+ dictWord{10, 0, 719},
+ dictWord{139, 0, 407},
+ dictWord{135, 11, 1381},
+ dictWord{136, 11, 193},
+ dictWord{12, 10, 108},
+ dictWord{141, 10, 291},
+ dictWord{132, 11, 616},
+ dictWord{136, 11, 692},
+ dictWord{8, 0, 125},
+ dictWord{8, 0, 369},
+ dictWord{8, 0, 524},
+ dictWord{10, 0, 486},
+ dictWord{11, 0, 13},
+ dictWord{11, 0, 381},
+ dictWord{11, 0, 736},
+ dictWord{11, 0, 766},
+ dictWord{11, 0, 845},
+ dictWord{13, 0, 114},
+ dictWord{
+ 13,
+ 0,
+ 292,
+ },
+ dictWord{142, 0, 47},
+ dictWord{134, 0, 1247},
+ dictWord{6, 0, 1684},
+ dictWord{6, 0, 1731},
+ dictWord{7, 0, 356},
+ dictWord{8, 0, 54},
+ dictWord{8, 0, 221},
+ dictWord{9, 0, 225},
+ dictWord{9, 0, 356},
+ dictWord{10, 0, 77},
+ dictWord{10, 0, 446},
+ dictWord{10, 0, 731},
+ dictWord{12, 0, 404},
+ dictWord{141, 0, 491},
+ dictWord{135, 10, 1777},
+ dictWord{4, 11, 305},
+ dictWord{4, 10, 493},
+ dictWord{144, 10, 55},
+ dictWord{4, 0, 951},
+ dictWord{6, 0, 1809},
+ dictWord{6, 0, 1849},
+ dictWord{8, 0, 846},
+ dictWord{8, 0, 866},
+ dictWord{8, 0, 899},
+ dictWord{10, 0, 896},
+ dictWord{12, 0, 694},
+ dictWord{142, 0, 468},
+ dictWord{5, 11, 214},
+ dictWord{
+ 7,
+ 11,
+ 603,
+ },
+ dictWord{8, 11, 611},
+ dictWord{9, 11, 686},
+ dictWord{10, 11, 88},
+ dictWord{11, 11, 459},
+ dictWord{11, 11, 496},
+ dictWord{12, 11, 463},
+ dictWord{
+ 12,
+ 11,
+ 590,
+ },
+ dictWord{13, 11, 0},
+ dictWord{142, 11, 214},
+ dictWord{132, 0, 411},
+ dictWord{4, 0, 80},
+ dictWord{133, 0, 44},
+ dictWord{140, 11, 74},
+ dictWord{
+ 143,
+ 0,
+ 31,
+ },
+ dictWord{7, 0, 669},
+ dictWord{6, 10, 568},
+ dictWord{7, 10, 1804},
+ dictWord{8, 10, 362},
+ dictWord{8, 10, 410},
+ dictWord{8, 10, 830},
+ dictWord{9, 10, 514},
+ dictWord{11, 10, 649},
+ dictWord{142, 10, 157},
+ dictWord{7, 0, 673},
+ dictWord{134, 11, 1703},
+ dictWord{132, 10, 625},
+ dictWord{134, 0, 1303},
+ dictWord{
+ 5,
+ 0,
+ 299,
+ },
+ dictWord{135, 0, 1083},
+ dictWord{138, 0, 704},
+ dictWord{6, 0, 275},
+ dictWord{7, 0, 408},
+ dictWord{6, 10, 158},
+ dictWord{7, 10, 129},
+ dictWord{
+ 7,
+ 10,
+ 181,
+ },
+ dictWord{8, 10, 276},
+ dictWord{8, 10, 377},
+ dictWord{10, 10, 523},
+ dictWord{11, 10, 816},
+ dictWord{12, 10, 455},
+ dictWord{13, 10, 303},
+ dictWord{
+ 142,
+ 10,
+ 135,
+ },
+ dictWord{4, 0, 219},
+ dictWord{7, 0, 367},
+ dictWord{7, 0, 1713},
+ dictWord{7, 0, 1761},
+ dictWord{9, 0, 86},
+ dictWord{9, 0, 537},
+ dictWord{10, 0, 165},
+ dictWord{12, 0, 219},
+ dictWord{140, 0, 561},
+ dictWord{8, 0, 216},
+ dictWord{4, 10, 1},
+ dictWord{4, 11, 737},
+ dictWord{6, 11, 317},
+ dictWord{7, 10, 1143},
+ dictWord{
+ 7,
+ 10,
+ 1463,
+ },
+ dictWord{9, 10, 207},
+ dictWord{9, 10, 390},
+ dictWord{9, 10, 467},
+ dictWord{10, 11, 98},
+ dictWord{11, 11, 294},
+ dictWord{11, 10, 836},
+ dictWord{
+ 12,
+ 11,
+ 60,
+ },
+ dictWord{12, 11, 437},
+ dictWord{13, 11, 64},
+ dictWord{13, 11, 380},
+ dictWord{142, 11, 430},
+ dictWord{6, 11, 1758},
+ dictWord{8, 11, 520},
+ dictWord{9, 11, 345},
+ dictWord{9, 11, 403},
+ dictWord{142, 11, 350},
+ dictWord{5, 11, 47},
+ dictWord{10, 11, 242},
+ dictWord{138, 11, 579},
+ dictWord{5, 11, 139},
+ dictWord{7, 11, 1168},
+ dictWord{138, 11, 539},
+ dictWord{135, 0, 1319},
+ dictWord{4, 10, 295},
+ dictWord{4, 10, 723},
+ dictWord{5, 10, 895},
+ dictWord{
+ 7,
+ 10,
+ 1031,
+ },
+ dictWord{8, 10, 199},
+ dictWord{8, 10, 340},
+ dictWord{9, 10, 153},
+ dictWord{9, 10, 215},
+ dictWord{10, 10, 21},
+ dictWord{10, 10, 59},
+ dictWord{
+ 10,
+ 10,
+ 80,
+ },
+ dictWord{10, 10, 224},
+ dictWord{10, 10, 838},
+ dictWord{11, 10, 229},
+ dictWord{11, 10, 652},
+ dictWord{12, 10, 192},
+ dictWord{13, 10, 146},
+ dictWord{
+ 142,
+ 10,
+ 91,
+ },
+ dictWord{140, 0, 428},
+ dictWord{137, 10, 51},
+ dictWord{133, 0, 514},
+ dictWord{5, 10, 309},
+ dictWord{140, 10, 211},
+ dictWord{6, 0, 1010},
+ dictWord{5, 10, 125},
+ dictWord{8, 10, 77},
+ dictWord{138, 10, 15},
+ dictWord{4, 0, 55},
+ dictWord{5, 0, 301},
+ dictWord{6, 0, 571},
+ dictWord{142, 0, 49},
+ dictWord{
+ 146,
+ 0,
+ 102,
+ },
+ dictWord{136, 11, 370},
+ dictWord{4, 11, 107},
+ dictWord{7, 11, 613},
+ dictWord{8, 11, 358},
+ dictWord{8, 11, 439},
+ dictWord{8, 11, 504},
+ dictWord{
+ 9,
+ 11,
+ 501,
+ },
+ dictWord{10, 11, 383},
+ dictWord{139, 11, 477},
+ dictWord{132, 11, 229},
+ dictWord{133, 0, 364},
+ dictWord{133, 10, 439},
+ dictWord{4, 11, 903},
+ dictWord{135, 11, 1816},
+ dictWord{11, 0, 379},
+ dictWord{140, 10, 76},
+ dictWord{4, 0, 76},
+ dictWord{4, 0, 971},
+ dictWord{7, 0, 1550},
+ dictWord{9, 0, 306},
+ dictWord{
+ 9,
+ 0,
+ 430,
+ },
+ dictWord{9, 0, 663},
+ dictWord{10, 0, 683},
+ dictWord{10, 0, 921},
+ dictWord{11, 0, 427},
+ dictWord{11, 0, 753},
+ dictWord{12, 0, 334},
+ dictWord{12, 0, 442},
+ dictWord{14, 0, 258},
+ dictWord{14, 0, 366},
+ dictWord{143, 0, 131},
+ dictWord{137, 0, 52},
+ dictWord{4, 11, 47},
+ dictWord{6, 11, 373},
+ dictWord{7, 11, 452},
+ dictWord{7, 11, 543},
+ dictWord{7, 11, 1714},
+ dictWord{7, 11, 1856},
+ dictWord{9, 11, 6},
+ dictWord{11, 11, 257},
+ dictWord{139, 11, 391},
+ dictWord{4, 10, 8},
+ dictWord{
+ 7,
+ 10,
+ 1152,
+ },
+ dictWord{7, 10, 1153},
+ dictWord{7, 10, 1715},
+ dictWord{9, 10, 374},
+ dictWord{10, 10, 478},
+ dictWord{139, 10, 648},
+ dictWord{4, 11, 785},
+ dictWord{133, 11, 368},
+ dictWord{135, 10, 1099},
+ dictWord{135, 11, 860},
+ dictWord{5, 11, 980},
+ dictWord{134, 11, 1754},
+ dictWord{134, 0, 1258},
+ dictWord{
+ 6,
+ 0,
+ 1058,
+ },
+ dictWord{6, 0, 1359},
+ dictWord{7, 11, 536},
+ dictWord{7, 11, 1331},
+ dictWord{136, 11, 143},
+ dictWord{4, 0, 656},
+ dictWord{135, 0, 779},
+ dictWord{136, 10, 87},
+ dictWord{5, 11, 19},
+ dictWord{6, 11, 533},
+ dictWord{146, 11, 126},
+ dictWord{7, 0, 144},
+ dictWord{138, 10, 438},
+ dictWord{5, 11, 395},
+ dictWord{5, 11, 951},
+ dictWord{134, 11, 1776},
+ dictWord{135, 0, 1373},
+ dictWord{7, 0, 554},
+ dictWord{7, 0, 605},
+ dictWord{141, 0, 10},
+ dictWord{4, 10, 69},
+ dictWord{
+ 5,
+ 10,
+ 122,
+ },
+ dictWord{9, 10, 656},
+ dictWord{138, 10, 464},
+ dictWord{5, 10, 849},
+ dictWord{134, 10, 1633},
+ dictWord{5, 0, 838},
+ dictWord{5, 0, 841},
+ dictWord{134, 0, 1649},
+ dictWord{133, 0, 1012},
+ dictWord{139, 10, 499},
+ dictWord{7, 10, 476},
+ dictWord{7, 10, 1592},
+ dictWord{138, 10, 87},
+ dictWord{
+ 6,
+ 0,
+ 251,
+ },
+ dictWord{7, 0, 365},
+ dictWord{7, 0, 1357},
+ dictWord{7, 0, 1497},
+ dictWord{8, 0, 154},
+ dictWord{141, 0, 281},
+ dictWord{132, 11, 441},
+ dictWord{
+ 132,
+ 11,
+ 695,
+ },
+ dictWord{7, 11, 497},
+ dictWord{9, 11, 387},
+ dictWord{147, 11, 81},
+ dictWord{133, 0, 340},
+ dictWord{14, 10, 283},
+ dictWord{142, 11, 283},
+ dictWord{
+ 134,
+ 0,
+ 810,
+ },
+ dictWord{135, 11, 1894},
+ dictWord{139, 0, 495},
+ dictWord{5, 11, 284},
+ dictWord{6, 11, 49},
+ dictWord{6, 11, 350},
+ dictWord{7, 11, 1},
+ dictWord{
+ 7,
+ 11,
+ 377,
+ },
+ dictWord{7, 11, 1693},
+ dictWord{8, 11, 18},
+ dictWord{8, 11, 678},
+ dictWord{9, 11, 161},
+ dictWord{9, 11, 585},
+ dictWord{9, 11, 671},
+ dictWord{
+ 9,
+ 11,
+ 839,
+ },
+ dictWord{11, 11, 912},
+ dictWord{141, 11, 427},
+ dictWord{5, 10, 859},
+ dictWord{7, 10, 1160},
+ dictWord{8, 10, 107},
+ dictWord{9, 10, 291},
+ dictWord{
+ 9,
+ 10,
+ 439,
+ },
+ dictWord{10, 10, 663},
+ dictWord{11, 10, 609},
+ dictWord{140, 10, 197},
+ dictWord{8, 0, 261},
+ dictWord{9, 0, 144},
+ dictWord{9, 0, 466},
+ dictWord{
+ 10,
+ 0,
+ 370,
+ },
+ dictWord{12, 0, 470},
+ dictWord{13, 0, 144},
+ dictWord{142, 0, 348},
+ dictWord{137, 0, 897},
+ dictWord{6, 0, 248},
+ dictWord{9, 0, 546},
+ dictWord{10, 0, 535},
+ dictWord{11, 0, 681},
+ dictWord{141, 0, 135},
+ dictWord{4, 0, 358},
+ dictWord{135, 0, 1496},
+ dictWord{134, 0, 567},
+ dictWord{136, 0, 445},
+ dictWord{
+ 4,
+ 10,
+ 117,
+ },
+ dictWord{6, 10, 372},
+ dictWord{7, 10, 1905},
+ dictWord{142, 10, 323},
+ dictWord{4, 10, 722},
+ dictWord{139, 10, 471},
+ dictWord{6, 0, 697},
+ dictWord{
+ 134,
+ 0,
+ 996,
+ },
+ dictWord{7, 11, 2007},
+ dictWord{9, 11, 101},
+ dictWord{9, 11, 450},
+ dictWord{10, 11, 66},
+ dictWord{10, 11, 842},
+ dictWord{11, 11, 536},
+ dictWord{
+ 140,
+ 11,
+ 587,
+ },
+ dictWord{132, 0, 577},
+ dictWord{134, 0, 1336},
+ dictWord{9, 10, 5},
+ dictWord{12, 10, 216},
+ dictWord{12, 10, 294},
+ dictWord{12, 10, 298},
+ dictWord{12, 10, 400},
+ dictWord{12, 10, 518},
+ dictWord{13, 10, 229},
+ dictWord{143, 10, 139},
+ dictWord{6, 0, 174},
+ dictWord{138, 0, 917},
+ dictWord{
+ 134,
+ 10,
+ 1774,
+ },
+ dictWord{5, 10, 12},
+ dictWord{7, 10, 375},
+ dictWord{9, 10, 88},
+ dictWord{9, 10, 438},
+ dictWord{11, 11, 62},
+ dictWord{139, 10, 270},
+ dictWord{
+ 134,
+ 11,
+ 1766,
+ },
+ dictWord{6, 11, 0},
+ dictWord{7, 11, 84},
+ dictWord{7, 10, 816},
+ dictWord{7, 10, 1241},
+ dictWord{9, 10, 283},
+ dictWord{9, 10, 520},
+ dictWord{10, 10, 213},
+ dictWord{10, 10, 307},
+ dictWord{10, 10, 463},
+ dictWord{10, 10, 671},
+ dictWord{10, 10, 746},
+ dictWord{11, 10, 401},
+ dictWord{11, 10, 794},
+ dictWord{
+ 11,
+ 11,
+ 895,
+ },
+ dictWord{12, 10, 517},
+ dictWord{17, 11, 11},
+ dictWord{18, 10, 107},
+ dictWord{147, 10, 115},
+ dictWord{5, 0, 878},
+ dictWord{133, 0, 972},
+ dictWord{
+ 6,
+ 11,
+ 1665,
+ },
+ dictWord{7, 11, 256},
+ dictWord{7, 11, 1388},
+ dictWord{138, 11, 499},
+ dictWord{4, 10, 258},
+ dictWord{136, 10, 639},
+ dictWord{4, 11, 22},
+ dictWord{5, 11, 10},
+ dictWord{6, 10, 22},
+ dictWord{7, 11, 848},
+ dictWord{7, 10, 903},
+ dictWord{7, 10, 1963},
+ dictWord{8, 11, 97},
+ dictWord{138, 10, 577},
+ dictWord{
+ 5,
+ 10,
+ 681,
+ },
+ dictWord{136, 10, 782},
+ dictWord{133, 11, 481},
+ dictWord{132, 0, 351},
+ dictWord{4, 10, 664},
+ dictWord{5, 10, 804},
+ dictWord{139, 10, 1013},
+ dictWord{6, 11, 134},
+ dictWord{7, 11, 437},
+ dictWord{7, 11, 959},
+ dictWord{9, 11, 37},
+ dictWord{14, 11, 285},
+ dictWord{14, 11, 371},
+ dictWord{144, 11, 60},
+ dictWord{7, 11, 486},
+ dictWord{8, 11, 155},
+ dictWord{11, 11, 93},
+ dictWord{140, 11, 164},
+ dictWord{132, 0, 286},
+ dictWord{7, 0, 438},
+ dictWord{7, 0, 627},
+ dictWord{7, 0, 1516},
+ dictWord{8, 0, 40},
+ dictWord{9, 0, 56},
+ dictWord{9, 0, 294},
+ dictWord{10, 0, 30},
+ dictWord{11, 0, 969},
+ dictWord{11, 0, 995},
+ dictWord{146, 0, 148},
+ dictWord{5, 11, 591},
+ dictWord{135, 11, 337},
+ dictWord{134, 0, 1950},
+ dictWord{133, 10, 32},
+ dictWord{138, 11, 500},
+ dictWord{5, 11, 380},
+ dictWord{
+ 5,
+ 11,
+ 650,
+ },
+ dictWord{136, 11, 310},
+ dictWord{4, 11, 364},
+ dictWord{7, 11, 1156},
+ dictWord{7, 11, 1187},
+ dictWord{137, 11, 409},
+ dictWord{4, 0, 738},
+ dictWord{134, 11, 482},
+ dictWord{4, 11, 781},
+ dictWord{6, 11, 487},
+ dictWord{7, 11, 926},
+ dictWord{8, 11, 263},
+ dictWord{139, 11, 500},
+ dictWord{135, 11, 418},
+ dictWord{6, 0, 2047},
+ dictWord{10, 0, 969},
+ dictWord{4, 10, 289},
+ dictWord{7, 10, 629},
+ dictWord{7, 10, 1698},
+ dictWord{7, 10, 1711},
+ dictWord{
+ 140,
+ 10,
+ 215,
+ },
+ dictWord{6, 10, 450},
+ dictWord{136, 10, 109},
+ dictWord{134, 0, 818},
+ dictWord{136, 10, 705},
+ dictWord{133, 0, 866},
+ dictWord{4, 11, 94},
+ dictWord{
+ 135,
+ 11,
+ 1265,
+ },
+ dictWord{132, 11, 417},
+ dictWord{134, 0, 1467},
+ dictWord{135, 10, 1238},
+ dictWord{4, 0, 972},
+ dictWord{6, 0, 1851},
+ dictWord{
+ 134,
+ 0,
+ 1857,
+ },
+ dictWord{134, 0, 355},
+ dictWord{133, 0, 116},
+ dictWord{132, 0, 457},
+ dictWord{135, 11, 1411},
+ dictWord{4, 11, 408},
+ dictWord{4, 11, 741},
+ dictWord{135, 11, 500},
+ dictWord{134, 10, 26},
+ dictWord{142, 11, 137},
+ dictWord{5, 0, 527},
+ dictWord{6, 0, 189},
+ dictWord{7, 0, 859},
+ dictWord{136, 0, 267},
+ dictWord{11, 0, 104},
+ dictWord{11, 0, 554},
+ dictWord{15, 0, 60},
+ dictWord{143, 0, 125},
+ dictWord{134, 0, 1613},
+ dictWord{4, 10, 414},
+ dictWord{5, 10, 467},
+ dictWord{
+ 9,
+ 10,
+ 654,
+ },
+ dictWord{10, 10, 451},
+ dictWord{12, 10, 59},
+ dictWord{141, 10, 375},
+ dictWord{135, 10, 17},
+ dictWord{134, 0, 116},
+ dictWord{135, 11, 541},
+ dictWord{135, 10, 955},
+ dictWord{6, 11, 73},
+ dictWord{135, 11, 177},
+ dictWord{133, 11, 576},
+ dictWord{134, 0, 886},
+ dictWord{133, 0, 487},
+ dictWord{
+ 4,
+ 0,
+ 86,
+ },
+ dictWord{5, 0, 667},
+ dictWord{5, 0, 753},
+ dictWord{6, 0, 316},
+ dictWord{6, 0, 455},
+ dictWord{135, 0, 946},
+ dictWord{142, 11, 231},
+ dictWord{150, 0, 45},
+ dictWord{134, 0, 863},
+ dictWord{134, 0, 1953},
+ dictWord{6, 10, 280},
+ dictWord{10, 10, 502},
+ dictWord{11, 10, 344},
+ dictWord{140, 10, 38},
+ dictWord{4, 0, 79},
+ dictWord{7, 0, 1773},
+ dictWord{10, 0, 450},
+ dictWord{11, 0, 589},
+ dictWord{13, 0, 332},
+ dictWord{13, 0, 493},
+ dictWord{14, 0, 183},
+ dictWord{14, 0, 334},
+ dictWord{14, 0, 362},
+ dictWord{14, 0, 368},
+ dictWord{14, 0, 376},
+ dictWord{14, 0, 379},
+ dictWord{19, 0, 90},
+ dictWord{19, 0, 103},
+ dictWord{19, 0, 127},
+ dictWord{
+ 148,
+ 0,
+ 90,
+ },
+ dictWord{5, 10, 45},
+ dictWord{7, 10, 1161},
+ dictWord{11, 10, 448},
+ dictWord{11, 10, 880},
+ dictWord{13, 10, 139},
+ dictWord{13, 10, 407},
+ dictWord{
+ 15,
+ 10,
+ 16,
+ },
+ dictWord{17, 10, 95},
+ dictWord{18, 10, 66},
+ dictWord{18, 10, 88},
+ dictWord{18, 10, 123},
+ dictWord{149, 10, 7},
+ dictWord{136, 10, 777},
+ dictWord{
+ 4,
+ 10,
+ 410,
+ },
+ dictWord{135, 10, 521},
+ dictWord{135, 10, 1778},
+ dictWord{135, 11, 538},
+ dictWord{142, 0, 381},
+ dictWord{133, 11, 413},
+ dictWord{
+ 134,
+ 0,
+ 1142,
+ },
+ dictWord{6, 0, 1189},
+ dictWord{136, 11, 495},
+ dictWord{5, 0, 663},
+ dictWord{6, 0, 1962},
+ dictWord{134, 0, 2003},
+ dictWord{7, 11, 54},
+ dictWord{
+ 8,
+ 11,
+ 312,
+ },
+ dictWord{10, 11, 191},
+ dictWord{10, 11, 614},
+ dictWord{140, 11, 567},
+ dictWord{132, 10, 436},
+ dictWord{133, 0, 846},
+ dictWord{10, 0, 528},
+ dictWord{11, 0, 504},
+ dictWord{7, 10, 1587},
+ dictWord{135, 10, 1707},
+ dictWord{5, 0, 378},
+ dictWord{8, 0, 465},
+ dictWord{9, 0, 286},
+ dictWord{10, 0, 185},
+ dictWord{
+ 10,
+ 0,
+ 562,
+ },
+ dictWord{10, 0, 635},
+ dictWord{11, 0, 31},
+ dictWord{11, 0, 393},
+ dictWord{13, 0, 312},
+ dictWord{18, 0, 65},
+ dictWord{18, 0, 96},
+ dictWord{147, 0, 89},
+ dictWord{7, 0, 899},
+ dictWord{14, 0, 325},
+ dictWord{6, 11, 468},
+ dictWord{7, 11, 567},
+ dictWord{7, 11, 1478},
+ dictWord{8, 11, 530},
+ dictWord{142, 11, 290},
+ dictWord{7, 0, 1880},
+ dictWord{9, 0, 680},
+ dictWord{139, 0, 798},
+ dictWord{134, 0, 1770},
+ dictWord{132, 0, 648},
+ dictWord{150, 11, 35},
+ dictWord{5, 0, 945},
+ dictWord{6, 0, 1656},
+ dictWord{6, 0, 1787},
+ dictWord{7, 0, 167},
+ dictWord{8, 0, 824},
+ dictWord{9, 0, 391},
+ dictWord{10, 0, 375},
+ dictWord{139, 0, 185},
+ dictWord{
+ 6,
+ 11,
+ 484,
+ },
+ dictWord{135, 11, 822},
+ dictWord{134, 0, 2046},
+ dictWord{7, 0, 1645},
+ dictWord{8, 0, 352},
+ dictWord{137, 0, 249},
+ dictWord{132, 0, 152},
+ dictWord{6, 0, 611},
+ dictWord{135, 0, 1733},
+ dictWord{6, 11, 1724},
+ dictWord{135, 11, 2022},
+ dictWord{133, 0, 1006},
+ dictWord{141, 11, 96},
+ dictWord{
+ 5,
+ 0,
+ 420,
+ },
+ dictWord{135, 0, 1449},
+ dictWord{146, 11, 149},
+ dictWord{135, 0, 832},
+ dictWord{135, 10, 663},
+ dictWord{133, 0, 351},
+ dictWord{5, 0, 40},
+ dictWord{
+ 7,
+ 0,
+ 598,
+ },
+ dictWord{7, 0, 1638},
+ dictWord{8, 0, 78},
+ dictWord{9, 0, 166},
+ dictWord{9, 0, 640},
+ dictWord{9, 0, 685},
+ dictWord{9, 0, 773},
+ dictWord{11, 0, 215},
+ dictWord{13, 0, 65},
+ dictWord{14, 0, 172},
+ dictWord{14, 0, 317},
+ dictWord{145, 0, 6},
+ dictWord{8, 0, 60},
+ dictWord{9, 0, 343},
+ dictWord{139, 0, 769},
+ dictWord{
+ 134,
+ 0,
+ 1354,
+ },
+ dictWord{132, 0, 724},
+ dictWord{137, 0, 745},
+ dictWord{132, 11, 474},
+ dictWord{7, 0, 1951},
+ dictWord{8, 0, 765},
+ dictWord{8, 0, 772},
+ dictWord{
+ 140,
+ 0,
+ 671,
+ },
+ dictWord{7, 0, 108},
+ dictWord{8, 0, 219},
+ dictWord{8, 0, 388},
+ dictWord{9, 0, 775},
+ dictWord{11, 0, 275},
+ dictWord{140, 0, 464},
+ dictWord{137, 0, 639},
+ dictWord{135, 10, 503},
+ dictWord{133, 11, 366},
+ dictWord{5, 0, 15},
+ dictWord{6, 0, 56},
+ dictWord{7, 0, 1758},
+ dictWord{8, 0, 500},
+ dictWord{9, 0, 730},
+ dictWord{
+ 11,
+ 0,
+ 331,
+ },
+ dictWord{13, 0, 150},
+ dictWord{14, 0, 282},
+ dictWord{5, 11, 305},
+ dictWord{9, 11, 560},
+ dictWord{141, 11, 208},
+ dictWord{4, 10, 113},
+ dictWord{
+ 5,
+ 10,
+ 163,
+ },
+ dictWord{5, 10, 735},
+ dictWord{7, 10, 1009},
+ dictWord{9, 10, 9},
+ dictWord{9, 10, 771},
+ dictWord{12, 10, 90},
+ dictWord{13, 10, 138},
+ dictWord{
+ 13,
+ 10,
+ 410,
+ },
+ dictWord{143, 10, 128},
+ dictWord{4, 10, 324},
+ dictWord{138, 10, 104},
+ dictWord{135, 11, 466},
+ dictWord{142, 11, 27},
+ dictWord{134, 0, 1886},
+ dictWord{5, 0, 205},
+ dictWord{6, 0, 438},
+ dictWord{9, 0, 711},
+ dictWord{4, 11, 480},
+ dictWord{6, 11, 167},
+ dictWord{6, 11, 302},
+ dictWord{6, 11, 1642},
+ dictWord{
+ 7,
+ 11,
+ 130,
+ },
+ dictWord{7, 11, 656},
+ dictWord{7, 11, 837},
+ dictWord{7, 11, 1547},
+ dictWord{7, 11, 1657},
+ dictWord{8, 11, 429},
+ dictWord{9, 11, 228},
+ dictWord{
+ 10,
+ 11,
+ 643,
+ },
+ dictWord{13, 11, 289},
+ dictWord{13, 11, 343},
+ dictWord{147, 11, 101},
+ dictWord{134, 0, 865},
+ dictWord{6, 0, 2025},
+ dictWord{136, 0, 965},
+ dictWord{
+ 7,
+ 11,
+ 278,
+ },
+ dictWord{10, 11, 739},
+ dictWord{11, 11, 708},
+ dictWord{141, 11, 348},
+ dictWord{133, 0, 534},
+ dictWord{135, 11, 1922},
+ dictWord{
+ 137,
+ 0,
+ 691,
+ },
+ dictWord{4, 10, 935},
+ dictWord{133, 10, 823},
+ dictWord{6, 0, 443},
+ dictWord{9, 0, 237},
+ dictWord{9, 0, 571},
+ dictWord{9, 0, 695},
+ dictWord{10, 0, 139},
+ dictWord{11, 0, 715},
+ dictWord{12, 0, 417},
+ dictWord{141, 0, 421},
+ dictWord{5, 10, 269},
+ dictWord{7, 10, 434},
+ dictWord{7, 10, 891},
+ dictWord{8, 10, 339},
+ dictWord{
+ 9,
+ 10,
+ 702,
+ },
+ dictWord{11, 10, 594},
+ dictWord{11, 10, 718},
+ dictWord{145, 10, 100},
+ dictWord{6, 0, 1555},
+ dictWord{7, 0, 878},
+ dictWord{9, 10, 485},
+ dictWord{141, 10, 264},
+ dictWord{134, 10, 1713},
+ dictWord{7, 10, 1810},
+ dictWord{11, 10, 866},
+ dictWord{12, 10, 103},
+ dictWord{141, 10, 495},
+ dictWord{
+ 135,
+ 10,
+ 900,
+ },
+ dictWord{6, 0, 1410},
+ dictWord{9, 11, 316},
+ dictWord{139, 11, 256},
+ dictWord{4, 0, 995},
+ dictWord{135, 0, 1033},
+ dictWord{132, 0, 578},
+ dictWord{10, 0, 881},
+ dictWord{12, 0, 740},
+ dictWord{12, 0, 743},
+ dictWord{140, 0, 759},
+ dictWord{132, 0, 822},
+ dictWord{133, 0, 923},
+ dictWord{142, 10, 143},
+ dictWord{135, 11, 1696},
+ dictWord{6, 11, 363},
+ dictWord{7, 11, 1955},
+ dictWord{136, 11, 725},
+ dictWord{132, 0, 924},
+ dictWord{133, 0, 665},
+ dictWord{
+ 135,
+ 10,
+ 2029,
+ },
+ dictWord{135, 0, 1901},
+ dictWord{4, 0, 265},
+ dictWord{6, 0, 1092},
+ dictWord{6, 0, 1417},
+ dictWord{7, 0, 807},
+ dictWord{135, 0, 950},
+ dictWord{
+ 5,
+ 0,
+ 93,
+ },
+ dictWord{12, 0, 267},
+ dictWord{141, 0, 498},
+ dictWord{135, 0, 1451},
+ dictWord{5, 11, 813},
+ dictWord{135, 11, 2046},
+ dictWord{5, 10, 625},
+ dictWord{135, 10, 1617},
+ dictWord{135, 0, 747},
+ dictWord{6, 0, 788},
+ dictWord{137, 0, 828},
+ dictWord{7, 0, 184},
+ dictWord{11, 0, 307},
+ dictWord{11, 0, 400},
+ dictWord{15, 0, 130},
+ dictWord{5, 11, 712},
+ dictWord{7, 11, 1855},
+ dictWord{8, 10, 425},
+ dictWord{8, 10, 693},
+ dictWord{9, 10, 720},
+ dictWord{10, 10, 380},
+ dictWord{10, 10, 638},
+ dictWord{11, 11, 17},
+ dictWord{11, 10, 473},
+ dictWord{12, 10, 61},
+ dictWord{13, 11, 321},
+ dictWord{144, 11, 67},
+ dictWord{135, 0, 198},
+ dictWord{6, 11, 320},
+ dictWord{7, 11, 781},
+ dictWord{7, 11, 1921},
+ dictWord{9, 11, 55},
+ dictWord{10, 11, 186},
+ dictWord{10, 11, 273},
+ dictWord{10, 11, 664},
+ dictWord{10, 11, 801},
+ dictWord{11, 11, 996},
+ dictWord{11, 11, 997},
+ dictWord{13, 11, 157},
+ dictWord{142, 11, 170},
+ dictWord{136, 11, 271},
+ dictWord{
+ 135,
+ 0,
+ 994,
+ },
+ dictWord{7, 11, 103},
+ dictWord{7, 11, 863},
+ dictWord{11, 11, 184},
+ dictWord{14, 11, 299},
+ dictWord{145, 11, 62},
+ dictWord{11, 10, 551},
+ dictWord{142, 10, 159},
+ dictWord{5, 0, 233},
+ dictWord{5, 0, 320},
+ dictWord{6, 0, 140},
+ dictWord{8, 0, 295},
+ dictWord{8, 0, 615},
+ dictWord{136, 11, 615},
+ dictWord{
+ 133,
+ 0,
+ 978,
+ },
+ dictWord{4, 0, 905},
+ dictWord{6, 0, 1701},
+ dictWord{137, 0, 843},
+ dictWord{132, 10, 168},
+ dictWord{4, 0, 974},
+ dictWord{8, 0, 850},
+ dictWord{
+ 12,
+ 0,
+ 709,
+ },
+ dictWord{12, 0, 768},
+ dictWord{140, 0, 786},
+ dictWord{135, 10, 91},
+ dictWord{152, 0, 6},
+ dictWord{138, 10, 532},
+ dictWord{135, 10, 1884},
+ dictWord{132, 0, 509},
+ dictWord{6, 0, 1307},
+ dictWord{135, 0, 273},
+ dictWord{5, 11, 77},
+ dictWord{7, 11, 1455},
+ dictWord{10, 11, 843},
+ dictWord{19, 11, 73},
+ dictWord{150, 11, 5},
+ dictWord{132, 11, 458},
+ dictWord{135, 11, 1420},
+ dictWord{6, 11, 109},
+ dictWord{138, 11, 382},
+ dictWord{6, 0, 201},
+ dictWord{6, 11, 330},
+ dictWord{7, 10, 70},
+ dictWord{7, 11, 1084},
+ dictWord{10, 10, 240},
+ dictWord{11, 11, 142},
+ dictWord{147, 10, 93},
+ dictWord{7, 0, 1041},
+ dictWord{
+ 140,
+ 11,
+ 328,
+ },
+ dictWord{133, 11, 354},
+ dictWord{134, 0, 1040},
+ dictWord{133, 0, 693},
+ dictWord{134, 0, 774},
+ dictWord{139, 0, 234},
+ dictWord{132, 0, 336},
+ dictWord{7, 0, 1399},
+ dictWord{139, 10, 392},
+ dictWord{20, 0, 22},
+ dictWord{148, 11, 22},
+ dictWord{5, 0, 802},
+ dictWord{7, 0, 2021},
+ dictWord{136, 0, 805},
+ dictWord{
+ 5,
+ 0,
+ 167,
+ },
+ dictWord{5, 0, 899},
+ dictWord{6, 0, 410},
+ dictWord{137, 0, 777},
+ dictWord{137, 0, 789},
+ dictWord{134, 0, 1705},
+ dictWord{7, 10, 655},
+ dictWord{
+ 135,
+ 10,
+ 1844,
+ },
+ dictWord{4, 10, 145},
+ dictWord{6, 10, 176},
+ dictWord{7, 10, 395},
+ dictWord{137, 10, 562},
+ dictWord{132, 10, 501},
+ dictWord{135, 0, 10},
+ dictWord{5, 0, 11},
+ dictWord{6, 0, 117},
+ dictWord{6, 0, 485},
+ dictWord{7, 0, 1133},
+ dictWord{9, 0, 582},
+ dictWord{9, 0, 594},
+ dictWord{10, 0, 82},
+ dictWord{11, 0, 21},
+ dictWord{11, 0, 818},
+ dictWord{12, 0, 535},
+ dictWord{13, 0, 86},
+ dictWord{20, 0, 91},
+ dictWord{23, 0, 13},
+ dictWord{134, 10, 509},
+ dictWord{4, 0, 264},
+ dictWord{
+ 7,
+ 0,
+ 1067,
+ },
+ dictWord{8, 0, 204},
+ dictWord{8, 0, 385},
+ dictWord{139, 0, 953},
+ dictWord{139, 11, 737},
+ dictWord{138, 0, 56},
+ dictWord{134, 0, 1917},
+ dictWord{
+ 133,
+ 0,
+ 470,
+ },
+ dictWord{10, 11, 657},
+ dictWord{14, 11, 297},
+ dictWord{142, 11, 361},
+ dictWord{135, 11, 412},
+ dictWord{7, 0, 1198},
+ dictWord{7, 11, 1198},
+ dictWord{8, 11, 556},
+ dictWord{14, 11, 123},
+ dictWord{14, 11, 192},
+ dictWord{143, 11, 27},
+ dictWord{7, 11, 1985},
+ dictWord{14, 11, 146},
+ dictWord{15, 11, 42},
+ dictWord{16, 11, 23},
+ dictWord{17, 11, 86},
+ dictWord{146, 11, 17},
+ dictWord{11, 0, 1015},
+ dictWord{136, 11, 122},
+ dictWord{4, 10, 114},
+ dictWord{
+ 9,
+ 10,
+ 492,
+ },
+ dictWord{13, 10, 462},
+ dictWord{142, 10, 215},
+ dictWord{4, 10, 77},
+ dictWord{5, 10, 361},
+ dictWord{6, 10, 139},
+ dictWord{6, 10, 401},
+ dictWord{
+ 6,
+ 10,
+ 404,
+ },
+ dictWord{7, 10, 413},
+ dictWord{7, 10, 715},
+ dictWord{7, 10, 1716},
+ dictWord{11, 10, 279},
+ dictWord{12, 10, 179},
+ dictWord{12, 10, 258},
+ dictWord{
+ 13,
+ 10,
+ 244,
+ },
+ dictWord{142, 10, 358},
+ dictWord{134, 10, 1717},
+ dictWord{7, 10, 1061},
+ dictWord{8, 10, 82},
+ dictWord{11, 10, 250},
+ dictWord{12, 10, 420},
+ dictWord{141, 10, 184},
+ dictWord{133, 0, 715},
+ dictWord{135, 10, 724},
+ dictWord{9, 0, 919},
+ dictWord{9, 0, 922},
+ dictWord{9, 0, 927},
+ dictWord{9, 0, 933},
+ dictWord{9, 0, 962},
+ dictWord{9, 0, 1000},
+ dictWord{9, 0, 1002},
+ dictWord{9, 0, 1021},
+ dictWord{12, 0, 890},
+ dictWord{12, 0, 907},
+ dictWord{12, 0, 930},
+ dictWord{
+ 15,
+ 0,
+ 207,
+ },
+ dictWord{15, 0, 228},
+ dictWord{15, 0, 238},
+ dictWord{149, 0, 61},
+ dictWord{8, 0, 794},
+ dictWord{9, 0, 400},
+ dictWord{10, 0, 298},
+ dictWord{142, 0, 228},
+ dictWord{5, 11, 430},
+ dictWord{5, 11, 932},
+ dictWord{6, 11, 131},
+ dictWord{7, 11, 417},
+ dictWord{9, 11, 522},
+ dictWord{11, 11, 314},
+ dictWord{141, 11, 390},
+ dictWord{132, 0, 867},
+ dictWord{8, 0, 724},
+ dictWord{132, 11, 507},
+ dictWord{137, 11, 261},
+ dictWord{4, 11, 343},
+ dictWord{133, 11, 511},
+ dictWord{
+ 6,
+ 0,
+ 190,
+ },
+ dictWord{7, 0, 768},
+ dictWord{135, 0, 1170},
+ dictWord{6, 10, 513},
+ dictWord{135, 10, 1052},
+ dictWord{7, 11, 455},
+ dictWord{138, 11, 591},
+ dictWord{134, 0, 1066},
+ dictWord{137, 10, 899},
+ dictWord{14, 0, 67},
+ dictWord{147, 0, 60},
+ dictWord{4, 0, 948},
+ dictWord{18, 0, 174},
+ dictWord{146, 0, 176},
+ dictWord{135, 0, 1023},
+ dictWord{7, 10, 1417},
+ dictWord{12, 10, 382},
+ dictWord{17, 10, 48},
+ dictWord{152, 10, 12},
+ dictWord{134, 11, 575},
+ dictWord{
+ 132,
+ 0,
+ 764,
+ },
+ dictWord{6, 10, 545},
+ dictWord{7, 10, 565},
+ dictWord{7, 10, 1669},
+ dictWord{10, 10, 114},
+ dictWord{11, 10, 642},
+ dictWord{140, 10, 618},
+ dictWord{
+ 6,
+ 0,
+ 137,
+ },
+ dictWord{9, 0, 75},
+ dictWord{9, 0, 253},
+ dictWord{10, 0, 194},
+ dictWord{138, 0, 444},
+ dictWord{4, 0, 756},
+ dictWord{133, 10, 5},
+ dictWord{8, 0, 1008},
+ dictWord{135, 10, 192},
+ dictWord{132, 0, 842},
+ dictWord{11, 0, 643},
+ dictWord{12, 0, 115},
+ dictWord{136, 10, 763},
+ dictWord{139, 0, 67},
+ dictWord{
+ 133,
+ 10,
+ 759,
+ },
+ dictWord{4, 0, 821},
+ dictWord{5, 0, 760},
+ dictWord{7, 0, 542},
+ dictWord{8, 0, 135},
+ dictWord{8, 0, 496},
+ dictWord{135, 11, 580},
+ dictWord{7, 10, 370},
+ dictWord{7, 10, 1007},
+ dictWord{7, 10, 1177},
+ dictWord{135, 10, 1565},
+ dictWord{135, 10, 1237},
+ dictWord{140, 0, 736},
+ dictWord{7, 0, 319},
+ dictWord{
+ 7,
+ 0,
+ 355,
+ },
+ dictWord{7, 0, 763},
+ dictWord{10, 0, 389},
+ dictWord{145, 0, 43},
+ dictWord{8, 11, 333},
+ dictWord{138, 11, 182},
+ dictWord{4, 10, 87},
+ dictWord{5, 10, 250},
+ dictWord{141, 10, 298},
+ dictWord{138, 0, 786},
+ dictWord{134, 0, 2044},
+ dictWord{8, 11, 330},
+ dictWord{140, 11, 477},
+ dictWord{135, 11, 1338},
+ dictWord{132, 11, 125},
+ dictWord{134, 0, 1030},
+ dictWord{134, 0, 1083},
+ dictWord{132, 11, 721},
+ dictWord{135, 10, 814},
+ dictWord{7, 11, 776},
+ dictWord{
+ 8,
+ 11,
+ 145,
+ },
+ dictWord{147, 11, 56},
+ dictWord{134, 0, 1226},
+ dictWord{4, 10, 57},
+ dictWord{7, 10, 1195},
+ dictWord{7, 10, 1438},
+ dictWord{7, 10, 1548},
+ dictWord{
+ 7,
+ 10,
+ 1835,
+ },
+ dictWord{7, 10, 1904},
+ dictWord{9, 10, 757},
+ dictWord{10, 10, 604},
+ dictWord{139, 10, 519},
+ dictWord{7, 11, 792},
+ dictWord{8, 11, 147},
+ dictWord{10, 11, 821},
+ dictWord{139, 11, 1021},
+ dictWord{137, 11, 797},
+ dictWord{4, 0, 58},
+ dictWord{5, 0, 286},
+ dictWord{6, 0, 319},
+ dictWord{7, 0, 402},
+ dictWord{
+ 7,
+ 0,
+ 1254,
+ },
+ dictWord{7, 0, 1903},
+ dictWord{8, 0, 356},
+ dictWord{140, 0, 408},
+ dictWord{4, 0, 389},
+ dictWord{4, 0, 815},
+ dictWord{9, 0, 181},
+ dictWord{9, 0, 255},
+ dictWord{10, 0, 8},
+ dictWord{10, 0, 29},
+ dictWord{10, 0, 816},
+ dictWord{11, 0, 311},
+ dictWord{11, 0, 561},
+ dictWord{12, 0, 67},
+ dictWord{141, 0, 181},
+ dictWord{
+ 7,
+ 11,
+ 1472,
+ },
+ dictWord{135, 11, 1554},
+ dictWord{7, 11, 1071},
+ dictWord{7, 11, 1541},
+ dictWord{7, 11, 1767},
+ dictWord{7, 11, 1806},
+ dictWord{7, 11, 1999},
+ dictWord{9, 11, 248},
+ dictWord{10, 11, 400},
+ dictWord{11, 11, 162},
+ dictWord{11, 11, 178},
+ dictWord{11, 11, 242},
+ dictWord{12, 11, 605},
+ dictWord{
+ 15,
+ 11,
+ 26,
+ },
+ dictWord{144, 11, 44},
+ dictWord{5, 11, 168},
+ dictWord{5, 11, 930},
+ dictWord{8, 11, 74},
+ dictWord{9, 11, 623},
+ dictWord{12, 11, 500},
+ dictWord{
+ 12,
+ 11,
+ 579,
+ },
+ dictWord{13, 11, 41},
+ dictWord{143, 11, 93},
+ dictWord{6, 11, 220},
+ dictWord{7, 11, 1101},
+ dictWord{141, 11, 105},
+ dictWord{5, 0, 474},
+ dictWord{
+ 7,
+ 0,
+ 507,
+ },
+ dictWord{4, 10, 209},
+ dictWord{7, 11, 507},
+ dictWord{135, 10, 902},
+ dictWord{132, 0, 427},
+ dictWord{6, 0, 413},
+ dictWord{7, 10, 335},
+ dictWord{
+ 7,
+ 10,
+ 1437,
+ },
+ dictWord{7, 10, 1668},
+ dictWord{8, 10, 553},
+ dictWord{8, 10, 652},
+ dictWord{8, 10, 656},
+ dictWord{9, 10, 558},
+ dictWord{11, 10, 743},
+ dictWord{
+ 149,
+ 10,
+ 18,
+ },
+ dictWord{132, 0, 730},
+ dictWord{6, 11, 19},
+ dictWord{7, 11, 1413},
+ dictWord{139, 11, 428},
+ dictWord{133, 0, 373},
+ dictWord{132, 10, 559},
+ dictWord{7, 11, 96},
+ dictWord{8, 11, 401},
+ dictWord{137, 11, 896},
+ dictWord{7, 0, 799},
+ dictWord{7, 0, 1972},
+ dictWord{5, 10, 1017},
+ dictWord{138, 10, 511},
+ dictWord{135, 0, 1793},
+ dictWord{7, 11, 1961},
+ dictWord{7, 11, 1965},
+ dictWord{8, 11, 702},
+ dictWord{136, 11, 750},
+ dictWord{8, 11, 150},
+ dictWord{8, 11, 737},
+ dictWord{140, 11, 366},
+ dictWord{132, 0, 322},
+ dictWord{133, 10, 709},
+ dictWord{8, 11, 800},
+ dictWord{9, 11, 148},
+ dictWord{9, 11, 872},
+ dictWord{
+ 9,
+ 11,
+ 890,
+ },
+ dictWord{11, 11, 309},
+ dictWord{11, 11, 1001},
+ dictWord{13, 11, 267},
+ dictWord{141, 11, 323},
+ dictWord{134, 10, 1745},
+ dictWord{7, 0, 290},
+ dictWord{136, 10, 206},
+ dictWord{7, 0, 1651},
+ dictWord{145, 0, 89},
+ dictWord{139, 0, 2},
+ dictWord{132, 0, 672},
+ dictWord{6, 0, 1860},
+ dictWord{8, 0, 905},
+ dictWord{
+ 10,
+ 0,
+ 844,
+ },
+ dictWord{10, 0, 846},
+ dictWord{10, 0, 858},
+ dictWord{12, 0, 699},
+ dictWord{12, 0, 746},
+ dictWord{140, 0, 772},
+ dictWord{135, 11, 424},
+ dictWord{133, 11, 547},
+ dictWord{133, 0, 737},
+ dictWord{5, 11, 490},
+ dictWord{6, 11, 615},
+ dictWord{6, 11, 620},
+ dictWord{135, 11, 683},
+ dictWord{6, 0, 746},
+ dictWord{134, 0, 1612},
+ dictWord{132, 10, 776},
+ dictWord{9, 11, 385},
+ dictWord{149, 11, 17},
+ dictWord{133, 0, 145},
+ dictWord{135, 10, 1272},
+ dictWord{
+ 7,
+ 0,
+ 884,
+ },
+ dictWord{140, 0, 124},
+ dictWord{4, 0, 387},
+ dictWord{135, 0, 1288},
+ dictWord{5, 11, 133},
+ dictWord{136, 10, 406},
+ dictWord{136, 11, 187},
+ dictWord{
+ 6,
+ 0,
+ 679,
+ },
+ dictWord{8, 11, 8},
+ dictWord{138, 11, 0},
+ dictWord{135, 0, 550},
+ dictWord{135, 11, 798},
+ dictWord{136, 11, 685},
+ dictWord{7, 11, 1086},
+ dictWord{145, 11, 46},
+ dictWord{8, 10, 175},
+ dictWord{10, 10, 168},
+ dictWord{138, 10, 573},
+ dictWord{135, 0, 1305},
+ dictWord{4, 0, 576},
+ dictWord{
+ 135,
+ 0,
+ 1263,
+ },
+ dictWord{6, 0, 686},
+ dictWord{134, 0, 1563},
+ dictWord{134, 0, 607},
+ dictWord{5, 0, 919},
+ dictWord{134, 0, 1673},
+ dictWord{148, 0, 37},
+ dictWord{
+ 8,
+ 11,
+ 774,
+ },
+ dictWord{10, 11, 670},
+ dictWord{140, 11, 51},
+ dictWord{133, 10, 784},
+ dictWord{139, 10, 882},
+ dictWord{4, 0, 82},
+ dictWord{5, 0, 333},
+ dictWord{
+ 5,
+ 0,
+ 904,
+ },
+ dictWord{6, 0, 207},
+ dictWord{7, 0, 325},
+ dictWord{7, 0, 1726},
+ dictWord{8, 0, 101},
+ dictWord{10, 0, 778},
+ dictWord{139, 0, 220},
+ dictWord{135, 11, 371},
+ dictWord{132, 0, 958},
+ dictWord{133, 0, 903},
+ dictWord{4, 11, 127},
+ dictWord{5, 11, 350},
+ dictWord{6, 11, 356},
+ dictWord{8, 11, 426},
+ dictWord{9, 11, 572},
+ dictWord{10, 11, 247},
+ dictWord{139, 11, 312},
+ dictWord{140, 0, 147},
+ dictWord{6, 11, 59},
+ dictWord{7, 11, 885},
+ dictWord{9, 11, 603},
+ dictWord{
+ 141,
+ 11,
+ 397,
+ },
+ dictWord{10, 0, 367},
+ dictWord{9, 10, 14},
+ dictWord{9, 10, 441},
+ dictWord{139, 10, 9},
+ dictWord{11, 10, 966},
+ dictWord{12, 10, 287},
+ dictWord{
+ 13,
+ 10,
+ 342,
+ },
+ dictWord{13, 10, 402},
+ dictWord{15, 10, 110},
+ dictWord{143, 10, 163},
+ dictWord{134, 0, 690},
+ dictWord{132, 0, 705},
+ dictWord{9, 0, 651},
+ dictWord{
+ 11,
+ 0,
+ 971,
+ },
+ dictWord{13, 0, 273},
+ dictWord{7, 10, 1428},
+ dictWord{7, 10, 1640},
+ dictWord{7, 10, 1867},
+ dictWord{9, 10, 169},
+ dictWord{9, 10, 182},
+ dictWord{
+ 9,
+ 10,
+ 367,
+ },
+ dictWord{9, 10, 478},
+ dictWord{9, 10, 506},
+ dictWord{9, 10, 551},
+ dictWord{9, 10, 557},
+ dictWord{9, 10, 648},
+ dictWord{9, 10, 697},
+ dictWord{
+ 9,
+ 10,
+ 705,
+ },
+ dictWord{9, 10, 725},
+ dictWord{9, 10, 787},
+ dictWord{9, 10, 794},
+ dictWord{10, 10, 198},
+ dictWord{10, 10, 214},
+ dictWord{10, 10, 267},
+ dictWord{
+ 10,
+ 10,
+ 275,
+ },
+ dictWord{10, 10, 456},
+ dictWord{10, 10, 551},
+ dictWord{10, 10, 561},
+ dictWord{10, 10, 613},
+ dictWord{10, 10, 627},
+ dictWord{10, 10, 668},
+ dictWord{10, 10, 675},
+ dictWord{10, 10, 691},
+ dictWord{10, 10, 695},
+ dictWord{10, 10, 707},
+ dictWord{10, 10, 715},
+ dictWord{11, 10, 183},
+ dictWord{
+ 11,
+ 10,
+ 201,
+ },
+ dictWord{11, 10, 262},
+ dictWord{11, 10, 352},
+ dictWord{11, 10, 439},
+ dictWord{11, 10, 493},
+ dictWord{11, 10, 572},
+ dictWord{11, 10, 591},
+ dictWord{
+ 11,
+ 10,
+ 608,
+ },
+ dictWord{11, 10, 611},
+ dictWord{11, 10, 646},
+ dictWord{11, 10, 674},
+ dictWord{11, 10, 711},
+ dictWord{11, 10, 751},
+ dictWord{11, 10, 761},
+ dictWord{11, 10, 776},
+ dictWord{11, 10, 785},
+ dictWord{11, 10, 850},
+ dictWord{11, 10, 853},
+ dictWord{11, 10, 862},
+ dictWord{11, 10, 865},
+ dictWord{
+ 11,
+ 10,
+ 868,
+ },
+ dictWord{11, 10, 875},
+ dictWord{11, 10, 898},
+ dictWord{11, 10, 902},
+ dictWord{11, 10, 903},
+ dictWord{11, 10, 910},
+ dictWord{11, 10, 932},
+ dictWord{
+ 11,
+ 10,
+ 942,
+ },
+ dictWord{11, 10, 957},
+ dictWord{11, 10, 967},
+ dictWord{11, 10, 972},
+ dictWord{12, 10, 148},
+ dictWord{12, 10, 195},
+ dictWord{12, 10, 220},
+ dictWord{12, 10, 237},
+ dictWord{12, 10, 318},
+ dictWord{12, 10, 339},
+ dictWord{12, 10, 393},
+ dictWord{12, 10, 445},
+ dictWord{12, 10, 450},
+ dictWord{
+ 12,
+ 10,
+ 474,
+ },
+ dictWord{12, 10, 505},
+ dictWord{12, 10, 509},
+ dictWord{12, 10, 533},
+ dictWord{12, 10, 591},
+ dictWord{12, 10, 594},
+ dictWord{12, 10, 597},
+ dictWord{
+ 12,
+ 10,
+ 621,
+ },
+ dictWord{12, 10, 633},
+ dictWord{12, 10, 642},
+ dictWord{13, 10, 59},
+ dictWord{13, 10, 60},
+ dictWord{13, 10, 145},
+ dictWord{13, 10, 239},
+ dictWord{13, 10, 250},
+ dictWord{13, 10, 329},
+ dictWord{13, 10, 344},
+ dictWord{13, 10, 365},
+ dictWord{13, 10, 372},
+ dictWord{13, 10, 387},
+ dictWord{
+ 13,
+ 10,
+ 403,
+ },
+ dictWord{13, 10, 414},
+ dictWord{13, 10, 456},
+ dictWord{13, 10, 470},
+ dictWord{13, 10, 478},
+ dictWord{13, 10, 483},
+ dictWord{13, 10, 489},
+ dictWord{
+ 14,
+ 10,
+ 55,
+ },
+ dictWord{14, 10, 57},
+ dictWord{14, 10, 81},
+ dictWord{14, 10, 90},
+ dictWord{14, 10, 148},
+ dictWord{14, 10, 239},
+ dictWord{14, 10, 266},
+ dictWord{
+ 14,
+ 10,
+ 321,
+ },
+ dictWord{14, 10, 326},
+ dictWord{14, 10, 327},
+ dictWord{14, 10, 330},
+ dictWord{14, 10, 347},
+ dictWord{14, 10, 355},
+ dictWord{14, 10, 401},
+ dictWord{14, 10, 404},
+ dictWord{14, 10, 411},
+ dictWord{14, 10, 414},
+ dictWord{14, 10, 416},
+ dictWord{14, 10, 420},
+ dictWord{15, 10, 61},
+ dictWord{
+ 15,
+ 10,
+ 74,
+ },
+ dictWord{15, 10, 87},
+ dictWord{15, 10, 88},
+ dictWord{15, 10, 94},
+ dictWord{15, 10, 96},
+ dictWord{15, 10, 116},
+ dictWord{15, 10, 149},
+ dictWord{
+ 15,
+ 10,
+ 154,
+ },
+ dictWord{16, 10, 50},
+ dictWord{16, 10, 63},
+ dictWord{16, 10, 73},
+ dictWord{17, 10, 2},
+ dictWord{17, 10, 66},
+ dictWord{17, 10, 92},
+ dictWord{17, 10, 103},
+ dictWord{17, 10, 112},
+ dictWord{17, 10, 120},
+ dictWord{18, 10, 50},
+ dictWord{18, 10, 54},
+ dictWord{18, 10, 82},
+ dictWord{18, 10, 86},
+ dictWord{18, 10, 90},
+ dictWord{18, 10, 111},
+ dictWord{18, 10, 115},
+ dictWord{18, 10, 156},
+ dictWord{19, 10, 40},
+ dictWord{19, 10, 79},
+ dictWord{20, 10, 78},
+ dictWord{149, 10, 22},
+ dictWord{7, 0, 887},
+ dictWord{5, 10, 161},
+ dictWord{135, 10, 839},
+ dictWord{142, 11, 98},
+ dictWord{134, 0, 90},
+ dictWord{138, 11, 356},
+ dictWord{
+ 135,
+ 11,
+ 441,
+ },
+ dictWord{6, 11, 111},
+ dictWord{7, 11, 4},
+ dictWord{8, 11, 163},
+ dictWord{8, 11, 776},
+ dictWord{138, 11, 566},
+ dictWord{134, 0, 908},
+ dictWord{
+ 134,
+ 0,
+ 1261,
+ },
+ dictWord{7, 0, 813},
+ dictWord{12, 0, 497},
+ dictWord{141, 0, 56},
+ dictWord{134, 0, 1235},
+ dictWord{135, 0, 429},
+ dictWord{135, 11, 1994},
+ dictWord{138, 0, 904},
+ dictWord{6, 0, 125},
+ dictWord{7, 0, 1277},
+ dictWord{137, 0, 772},
+ dictWord{151, 0, 12},
+ dictWord{4, 0, 841},
+ dictWord{5, 0, 386},
+ dictWord{
+ 133,
+ 11,
+ 386,
+ },
+ dictWord{5, 11, 297},
+ dictWord{135, 11, 1038},
+ dictWord{6, 0, 860},
+ dictWord{6, 0, 1069},
+ dictWord{135, 11, 309},
+ dictWord{136, 0, 946},
+ dictWord{135, 10, 1814},
+ dictWord{141, 11, 418},
+ dictWord{136, 11, 363},
+ dictWord{10, 0, 768},
+ dictWord{139, 0, 787},
+ dictWord{22, 11, 30},
+ dictWord{
+ 150,
+ 11,
+ 33,
+ },
+ dictWord{6, 0, 160},
+ dictWord{7, 0, 1106},
+ dictWord{9, 0, 770},
+ dictWord{11, 0, 112},
+ dictWord{140, 0, 413},
+ dictWord{11, 11, 216},
+ dictWord{
+ 139,
+ 11,
+ 340,
+ },
+ dictWord{136, 10, 139},
+ dictWord{135, 11, 1390},
+ dictWord{135, 11, 808},
+ dictWord{132, 11, 280},
+ dictWord{12, 0, 271},
+ dictWord{17, 0, 109},
+ dictWord{7, 10, 643},
+ dictWord{136, 10, 236},
+ dictWord{140, 11, 54},
+ dictWord{4, 11, 421},
+ dictWord{133, 11, 548},
+ dictWord{11, 0, 719},
+ dictWord{12, 0, 36},
+ dictWord{141, 0, 337},
+ dictWord{7, 0, 581},
+ dictWord{9, 0, 644},
+ dictWord{137, 0, 699},
+ dictWord{11, 11, 511},
+ dictWord{13, 11, 394},
+ dictWord{14, 11, 298},
+ dictWord{14, 11, 318},
+ dictWord{146, 11, 103},
+ dictWord{7, 0, 304},
+ dictWord{9, 0, 646},
+ dictWord{9, 0, 862},
+ dictWord{11, 0, 696},
+ dictWord{12, 0, 208},
+ dictWord{15, 0, 79},
+ dictWord{147, 0, 108},
+ dictWord{4, 0, 631},
+ dictWord{7, 0, 1126},
+ dictWord{135, 0, 1536},
+ dictWord{135, 11, 1527},
+ dictWord{8, 0, 880},
+ dictWord{10, 0, 869},
+ dictWord{138, 0, 913},
+ dictWord{7, 0, 1513},
+ dictWord{5, 10, 54},
+ dictWord{6, 11, 254},
+ dictWord{9, 11, 109},
+ dictWord{138, 11, 103},
+ dictWord{135, 0, 981},
+ dictWord{133, 11, 729},
+ dictWord{132, 10, 744},
+ dictWord{132, 0, 434},
+ dictWord{134, 0, 550},
+ dictWord{7, 0, 930},
+ dictWord{10, 0, 476},
+ dictWord{13, 0, 452},
+ dictWord{19, 0, 104},
+ dictWord{6, 11, 1630},
+ dictWord{10, 10, 402},
+ dictWord{146, 10, 55},
+ dictWord{5, 0, 553},
+ dictWord{138, 0, 824},
+ dictWord{136, 0, 452},
+ dictWord{8, 0, 151},
+ dictWord{137, 10, 624},
+ dictWord{132, 10, 572},
+ dictWord{132, 0, 772},
+ dictWord{133, 11, 671},
+ dictWord{
+ 133,
+ 0,
+ 292,
+ },
+ dictWord{138, 0, 135},
+ dictWord{132, 11, 889},
+ dictWord{140, 11, 207},
+ dictWord{9, 0, 504},
+ dictWord{6, 10, 43},
+ dictWord{7, 10, 38},
+ dictWord{
+ 8,
+ 10,
+ 248,
+ },
+ dictWord{138, 10, 513},
+ dictWord{6, 0, 1089},
+ dictWord{135, 11, 1910},
+ dictWord{4, 11, 627},
+ dictWord{133, 11, 775},
+ dictWord{135, 0, 783},
+ dictWord{133, 10, 766},
+ dictWord{133, 10, 363},
+ dictWord{7, 0, 387},
+ dictWord{135, 11, 387},
+ dictWord{7, 0, 393},
+ dictWord{10, 0, 603},
+ dictWord{11, 0, 206},
+ dictWord{7, 11, 202},
+ dictWord{11, 11, 362},
+ dictWord{11, 11, 948},
+ dictWord{140, 11, 388},
+ dictWord{6, 11, 507},
+ dictWord{7, 11, 451},
+ dictWord{8, 11, 389},
+ dictWord{12, 11, 490},
+ dictWord{13, 11, 16},
+ dictWord{13, 11, 215},
+ dictWord{13, 11, 351},
+ dictWord{18, 11, 132},
+ dictWord{147, 11, 125},
+ dictWord{
+ 4,
+ 0,
+ 912,
+ },
+ dictWord{9, 0, 232},
+ dictWord{135, 11, 841},
+ dictWord{6, 10, 258},
+ dictWord{140, 10, 409},
+ dictWord{5, 10, 249},
+ dictWord{148, 10, 82},
+ dictWord{
+ 136,
+ 11,
+ 566,
+ },
+ dictWord{6, 0, 977},
+ dictWord{135, 11, 1214},
+ dictWord{7, 0, 1973},
+ dictWord{136, 0, 716},
+ dictWord{135, 0, 98},
+ dictWord{133, 0, 733},
+ dictWord{
+ 5,
+ 11,
+ 912,
+ },
+ dictWord{134, 11, 1695},
+ dictWord{5, 10, 393},
+ dictWord{6, 10, 378},
+ dictWord{7, 10, 1981},
+ dictWord{9, 10, 32},
+ dictWord{9, 10, 591},
+ dictWord{10, 10, 685},
+ dictWord{10, 10, 741},
+ dictWord{142, 10, 382},
+ dictWord{133, 10, 788},
+ dictWord{10, 0, 19},
+ dictWord{11, 0, 911},
+ dictWord{7, 10, 1968},
+ dictWord{141, 10, 509},
+ dictWord{5, 0, 668},
+ dictWord{5, 11, 236},
+ dictWord{6, 11, 572},
+ dictWord{8, 11, 492},
+ dictWord{11, 11, 618},
+ dictWord{144, 11, 56},
+ dictWord{135, 11, 1789},
+ dictWord{4, 0, 360},
+ dictWord{5, 0, 635},
+ dictWord{5, 0, 700},
+ dictWord{5, 10, 58},
+ dictWord{5, 10, 171},
+ dictWord{5, 10, 683},
+ dictWord{
+ 6,
+ 10,
+ 291,
+ },
+ dictWord{6, 10, 566},
+ dictWord{7, 10, 1650},
+ dictWord{11, 10, 523},
+ dictWord{12, 10, 273},
+ dictWord{12, 10, 303},
+ dictWord{15, 10, 39},
+ dictWord{143, 10, 111},
+ dictWord{133, 0, 901},
+ dictWord{134, 10, 589},
+ dictWord{5, 11, 190},
+ dictWord{136, 11, 318},
+ dictWord{140, 0, 656},
+ dictWord{
+ 7,
+ 0,
+ 726,
+ },
+ dictWord{152, 0, 9},
+ dictWord{4, 10, 917},
+ dictWord{133, 10, 1005},
+ dictWord{135, 10, 1598},
+ dictWord{134, 11, 491},
+ dictWord{4, 10, 919},
+ dictWord{133, 11, 434},
+ dictWord{137, 0, 72},
+ dictWord{6, 0, 1269},
+ dictWord{6, 0, 1566},
+ dictWord{134, 0, 1621},
+ dictWord{9, 0, 463},
+ dictWord{10, 0, 595},
+ dictWord{4, 10, 255},
+ dictWord{5, 10, 302},
+ dictWord{6, 10, 132},
+ dictWord{7, 10, 128},
+ dictWord{7, 10, 283},
+ dictWord{7, 10, 1299},
+ dictWord{10, 10, 52},
+ dictWord{
+ 10,
+ 10,
+ 514,
+ },
+ dictWord{11, 10, 925},
+ dictWord{13, 10, 92},
+ dictWord{142, 10, 309},
+ dictWord{135, 0, 1454},
+ dictWord{134, 0, 1287},
+ dictWord{11, 0, 600},
+ dictWord{13, 0, 245},
+ dictWord{137, 10, 173},
+ dictWord{136, 0, 989},
+ dictWord{7, 0, 164},
+ dictWord{7, 0, 1571},
+ dictWord{9, 0, 107},
+ dictWord{140, 0, 225},
+ dictWord{6, 0, 1061},
+ dictWord{141, 10, 442},
+ dictWord{4, 0, 27},
+ dictWord{5, 0, 484},
+ dictWord{5, 0, 510},
+ dictWord{6, 0, 434},
+ dictWord{7, 0, 1000},
+ dictWord{
+ 7,
+ 0,
+ 1098,
+ },
+ dictWord{136, 0, 2},
+ dictWord{7, 11, 85},
+ dictWord{7, 11, 247},
+ dictWord{8, 11, 585},
+ dictWord{10, 11, 163},
+ dictWord{138, 11, 316},
+ dictWord{
+ 11,
+ 11,
+ 103,
+ },
+ dictWord{142, 11, 0},
+ dictWord{134, 0, 1127},
+ dictWord{4, 0, 460},
+ dictWord{134, 0, 852},
+ dictWord{134, 10, 210},
+ dictWord{4, 0, 932},
+ dictWord{
+ 133,
+ 0,
+ 891,
+ },
+ dictWord{6, 0, 588},
+ dictWord{147, 11, 83},
+ dictWord{8, 0, 625},
+ dictWord{4, 10, 284},
+ dictWord{134, 10, 223},
+ dictWord{134, 0, 76},
+ dictWord{8, 0, 92},
+ dictWord{137, 0, 221},
+ dictWord{4, 11, 124},
+ dictWord{10, 11, 457},
+ dictWord{11, 11, 121},
+ dictWord{11, 11, 169},
+ dictWord{11, 11, 422},
+ dictWord{
+ 11,
+ 11,
+ 870,
+ },
+ dictWord{12, 11, 214},
+ dictWord{13, 11, 389},
+ dictWord{14, 11, 187},
+ dictWord{143, 11, 77},
+ dictWord{9, 11, 618},
+ dictWord{138, 11, 482},
+ dictWord{
+ 4,
+ 10,
+ 218,
+ },
+ dictWord{7, 10, 526},
+ dictWord{143, 10, 137},
+ dictWord{13, 0, 9},
+ dictWord{14, 0, 104},
+ dictWord{14, 0, 311},
+ dictWord{4, 10, 270},
+ dictWord{
+ 5,
+ 10,
+ 192,
+ },
+ dictWord{6, 10, 332},
+ dictWord{135, 10, 1322},
+ dictWord{140, 10, 661},
+ dictWord{135, 11, 1193},
+ dictWord{6, 11, 107},
+ dictWord{7, 11, 638},
+ dictWord{7, 11, 1632},
+ dictWord{137, 11, 396},
+ dictWord{132, 0, 763},
+ dictWord{4, 0, 622},
+ dictWord{5, 11, 370},
+ dictWord{134, 11, 1756},
+ dictWord{
+ 133,
+ 0,
+ 253,
+ },
+ dictWord{135, 0, 546},
+ dictWord{9, 0, 73},
+ dictWord{10, 0, 110},
+ dictWord{14, 0, 185},
+ dictWord{17, 0, 119},
+ dictWord{133, 11, 204},
+ dictWord{7, 0, 624},
+ dictWord{7, 0, 916},
+ dictWord{10, 0, 256},
+ dictWord{139, 0, 87},
+ dictWord{7, 10, 379},
+ dictWord{8, 10, 481},
+ dictWord{137, 10, 377},
+ dictWord{5, 0, 212},
+ dictWord{12, 0, 35},
+ dictWord{13, 0, 382},
+ dictWord{5, 11, 970},
+ dictWord{134, 11, 1706},
+ dictWord{9, 0, 746},
+ dictWord{5, 10, 1003},
+ dictWord{134, 10, 149},
+ dictWord{10, 0, 150},
+ dictWord{11, 0, 849},
+ dictWord{13, 0, 330},
+ dictWord{8, 10, 262},
+ dictWord{9, 10, 627},
+ dictWord{11, 10, 214},
+ dictWord{11, 10, 404},
+ dictWord{11, 10, 457},
+ dictWord{11, 10, 780},
+ dictWord{11, 10, 913},
+ dictWord{13, 10, 401},
+ dictWord{142, 10, 200},
+ dictWord{134, 0, 1466},
+ dictWord{
+ 135,
+ 11,
+ 3,
+ },
+ dictWord{6, 0, 1299},
+ dictWord{4, 11, 35},
+ dictWord{5, 11, 121},
+ dictWord{5, 11, 483},
+ dictWord{5, 11, 685},
+ dictWord{6, 11, 489},
+ dictWord{7, 11, 1204},
+ dictWord{136, 11, 394},
+ dictWord{135, 10, 742},
+ dictWord{4, 10, 142},
+ dictWord{136, 10, 304},
+ dictWord{4, 11, 921},
+ dictWord{133, 11, 1007},
+ dictWord{
+ 134,
+ 0,
+ 1518,
+ },
+ dictWord{6, 0, 1229},
+ dictWord{135, 0, 1175},
+ dictWord{133, 0, 816},
+ dictWord{12, 0, 159},
+ dictWord{4, 10, 471},
+ dictWord{4, 11, 712},
+ dictWord{
+ 5,
+ 10,
+ 51,
+ },
+ dictWord{6, 10, 602},
+ dictWord{7, 10, 925},
+ dictWord{8, 10, 484},
+ dictWord{138, 10, 195},
+ dictWord{134, 11, 1629},
+ dictWord{5, 0, 869},
+ dictWord{
+ 5,
+ 0,
+ 968,
+ },
+ dictWord{6, 0, 1626},
+ dictWord{8, 0, 734},
+ dictWord{136, 0, 784},
+ dictWord{4, 0, 542},
+ dictWord{6, 0, 1716},
+ dictWord{6, 0, 1727},
+ dictWord{
+ 7,
+ 0,
+ 1082,
+ },
+ dictWord{7, 0, 1545},
+ dictWord{8, 0, 56},
+ dictWord{8, 0, 118},
+ dictWord{8, 0, 412},
+ dictWord{8, 0, 564},
+ dictWord{9, 0, 888},
+ dictWord{9, 0, 908},
+ dictWord{
+ 10,
+ 0,
+ 50,
+ },
+ dictWord{10, 0, 423},
+ dictWord{11, 0, 685},
+ dictWord{11, 0, 697},
+ dictWord{11, 0, 933},
+ dictWord{12, 0, 299},
+ dictWord{13, 0, 126},
+ dictWord{
+ 13,
+ 0,
+ 136,
+ },
+ dictWord{13, 0, 170},
+ dictWord{13, 0, 190},
+ dictWord{136, 10, 688},
+ dictWord{132, 10, 697},
+ dictWord{4, 0, 232},
+ dictWord{9, 0, 202},
+ dictWord{
+ 10,
+ 0,
+ 474,
+ },
+ dictWord{140, 0, 433},
+ dictWord{136, 0, 212},
+ dictWord{6, 0, 108},
+ dictWord{7, 0, 1003},
+ dictWord{7, 0, 1181},
+ dictWord{8, 0, 111},
+ dictWord{
+ 136,
+ 0,
+ 343,
+ },
+ dictWord{5, 10, 221},
+ dictWord{135, 11, 1255},
+ dictWord{133, 11, 485},
+ dictWord{134, 0, 1712},
+ dictWord{142, 0, 216},
+ dictWord{5, 0, 643},
+ dictWord{
+ 6,
+ 0,
+ 516,
+ },
+ dictWord{4, 11, 285},
+ dictWord{5, 11, 317},
+ dictWord{6, 11, 301},
+ dictWord{7, 11, 7},
+ dictWord{8, 11, 153},
+ dictWord{10, 11, 766},
+ dictWord{
+ 11,
+ 11,
+ 468,
+ },
+ dictWord{12, 11, 467},
+ dictWord{141, 11, 143},
+ dictWord{4, 0, 133},
+ dictWord{7, 0, 711},
+ dictWord{7, 0, 1298},
+ dictWord{135, 0, 1585},
+ dictWord{
+ 134,
+ 0,
+ 650,
+ },
+ dictWord{135, 11, 512},
+ dictWord{6, 0, 99},
+ dictWord{7, 0, 1808},
+ dictWord{145, 0, 57},
+ dictWord{6, 0, 246},
+ dictWord{6, 0, 574},
+ dictWord{7, 0, 428},
+ dictWord{9, 0, 793},
+ dictWord{10, 0, 669},
+ dictWord{11, 0, 485},
+ dictWord{11, 0, 840},
+ dictWord{12, 0, 300},
+ dictWord{14, 0, 250},
+ dictWord{145, 0, 55},
+ dictWord{
+ 4,
+ 10,
+ 132,
+ },
+ dictWord{5, 10, 69},
+ dictWord{135, 10, 1242},
+ dictWord{136, 0, 1023},
+ dictWord{7, 0, 302},
+ dictWord{132, 10, 111},
+ dictWord{135, 0, 1871},
+ dictWord{132, 0, 728},
+ dictWord{9, 0, 252},
+ dictWord{132, 10, 767},
+ dictWord{6, 0, 461},
+ dictWord{7, 0, 1590},
+ dictWord{7, 10, 1416},
+ dictWord{7, 10, 2005},
+ dictWord{8, 10, 131},
+ dictWord{8, 10, 466},
+ dictWord{9, 10, 672},
+ dictWord{13, 10, 252},
+ dictWord{148, 10, 103},
+ dictWord{6, 0, 323},
+ dictWord{135, 0, 1564},
+ dictWord{7, 0, 461},
+ dictWord{136, 0, 775},
+ dictWord{6, 10, 44},
+ dictWord{136, 10, 368},
+ dictWord{139, 0, 172},
+ dictWord{132, 0, 464},
+ dictWord{4, 10, 570},
+ dictWord{133, 10, 120},
+ dictWord{137, 11, 269},
+ dictWord{6, 10, 227},
+ dictWord{135, 10, 1589},
+ dictWord{6, 11, 1719},
+ dictWord{6, 11, 1735},
+ dictWord{
+ 7,
+ 11,
+ 2016,
+ },
+ dictWord{7, 11, 2020},
+ dictWord{8, 11, 837},
+ dictWord{137, 11, 852},
+ dictWord{7, 0, 727},
+ dictWord{146, 0, 73},
+ dictWord{132, 0, 1023},
+ dictWord{135, 11, 852},
+ dictWord{135, 10, 1529},
+ dictWord{136, 0, 577},
+ dictWord{138, 11, 568},
+ dictWord{134, 0, 1037},
+ dictWord{8, 11, 67},
+ dictWord{
+ 138,
+ 11,
+ 419,
+ },
+ dictWord{4, 0, 413},
+ dictWord{5, 0, 677},
+ dictWord{8, 0, 432},
+ dictWord{140, 0, 280},
+ dictWord{10, 0, 600},
+ dictWord{6, 10, 1667},
+ dictWord{
+ 7,
+ 11,
+ 967,
+ },
+ dictWord{7, 10, 2036},
+ dictWord{141, 11, 11},
+ dictWord{6, 10, 511},
+ dictWord{140, 10, 132},
+ dictWord{6, 0, 799},
+ dictWord{5, 10, 568},
+ dictWord{
+ 6,
+ 10,
+ 138,
+ },
+ dictWord{135, 10, 1293},
+ dictWord{8, 0, 159},
+ dictWord{4, 10, 565},
+ dictWord{136, 10, 827},
+ dictWord{7, 0, 646},
+ dictWord{7, 0, 1730},
+ dictWord{
+ 11,
+ 0,
+ 446,
+ },
+ dictWord{141, 0, 178},
+ dictWord{4, 10, 922},
+ dictWord{133, 10, 1023},
+ dictWord{135, 11, 11},
+ dictWord{132, 0, 395},
+ dictWord{11, 0, 145},
+ dictWord{135, 10, 1002},
+ dictWord{9, 0, 174},
+ dictWord{10, 0, 164},
+ dictWord{11, 0, 440},
+ dictWord{11, 0, 514},
+ dictWord{11, 0, 841},
+ dictWord{15, 0, 98},
+ dictWord{149, 0, 20},
+ dictWord{134, 0, 426},
+ dictWord{10, 0, 608},
+ dictWord{139, 0, 1002},
+ dictWord{7, 11, 320},
+ dictWord{8, 11, 51},
+ dictWord{12, 11, 481},
+ dictWord{12, 11, 570},
+ dictWord{148, 11, 106},
+ dictWord{9, 0, 977},
+ dictWord{9, 0, 983},
+ dictWord{132, 11, 445},
+ dictWord{138, 0, 250},
+ dictWord{139, 0, 100},
+ dictWord{6, 0, 1982},
+ dictWord{136, 10, 402},
+ dictWord{133, 11, 239},
+ dictWord{4, 10, 716},
+ dictWord{141, 10, 31},
+ dictWord{5, 0, 476},
+ dictWord{7, 11, 83},
+ dictWord{7, 11, 1990},
+ dictWord{8, 11, 130},
+ dictWord{139, 11, 720},
+ dictWord{8, 10, 691},
+ dictWord{136, 10, 731},
+ dictWord{5, 11, 123},
+ dictWord{
+ 6,
+ 11,
+ 530,
+ },
+ dictWord{7, 11, 348},
+ dictWord{135, 11, 1419},
+ dictWord{5, 0, 76},
+ dictWord{6, 0, 458},
+ dictWord{6, 0, 497},
+ dictWord{7, 0, 868},
+ dictWord{9, 0, 658},
+ dictWord{10, 0, 594},
+ dictWord{11, 0, 173},
+ dictWord{11, 0, 566},
+ dictWord{12, 0, 20},
+ dictWord{12, 0, 338},
+ dictWord{141, 0, 200},
+ dictWord{9, 11, 139},
+ dictWord{
+ 10,
+ 11,
+ 399,
+ },
+ dictWord{11, 11, 469},
+ dictWord{12, 11, 634},
+ dictWord{141, 11, 223},
+ dictWord{9, 10, 840},
+ dictWord{138, 10, 803},
+ dictWord{133, 10, 847},
+ dictWord{11, 11, 223},
+ dictWord{140, 11, 168},
+ dictWord{132, 11, 210},
+ dictWord{8, 0, 447},
+ dictWord{9, 10, 53},
+ dictWord{9, 10, 268},
+ dictWord{9, 10, 901},
+ dictWord{10, 10, 518},
+ dictWord{10, 10, 829},
+ dictWord{11, 10, 188},
+ dictWord{13, 10, 74},
+ dictWord{14, 10, 46},
+ dictWord{15, 10, 17},
+ dictWord{15, 10, 33},
+ dictWord{17, 10, 40},
+ dictWord{18, 10, 36},
+ dictWord{19, 10, 20},
+ dictWord{22, 10, 1},
+ dictWord{152, 10, 2},
+ dictWord{4, 0, 526},
+ dictWord{7, 0, 1029},
+ dictWord{135, 0, 1054},
+ dictWord{19, 11, 59},
+ dictWord{150, 11, 2},
+ dictWord{4, 0, 636},
+ dictWord{6, 0, 1875},
+ dictWord{6, 0, 1920},
+ dictWord{9, 0, 999},
+ dictWord{
+ 12,
+ 0,
+ 807,
+ },
+ dictWord{12, 0, 825},
+ dictWord{15, 0, 179},
+ dictWord{15, 0, 190},
+ dictWord{18, 0, 182},
+ dictWord{136, 10, 532},
+ dictWord{6, 0, 1699},
+ dictWord{
+ 7,
+ 0,
+ 660,
+ },
+ dictWord{7, 0, 1124},
+ dictWord{17, 0, 31},
+ dictWord{19, 0, 22},
+ dictWord{151, 0, 14},
+ dictWord{135, 10, 681},
+ dictWord{132, 11, 430},
+ dictWord{
+ 140,
+ 10,
+ 677,
+ },
+ dictWord{4, 10, 684},
+ dictWord{136, 10, 384},
+ dictWord{132, 11, 756},
+ dictWord{133, 11, 213},
+ dictWord{7, 0, 188},
+ dictWord{7, 10, 110},
+ dictWord{
+ 8,
+ 10,
+ 290,
+ },
+ dictWord{8, 10, 591},
+ dictWord{9, 10, 382},
+ dictWord{9, 10, 649},
+ dictWord{11, 10, 71},
+ dictWord{11, 10, 155},
+ dictWord{11, 10, 313},
+ dictWord{
+ 12,
+ 10,
+ 5,
+ },
+ dictWord{13, 10, 325},
+ dictWord{142, 10, 287},
+ dictWord{7, 10, 360},
+ dictWord{7, 10, 425},
+ dictWord{9, 10, 66},
+ dictWord{9, 10, 278},
+ dictWord{
+ 138,
+ 10,
+ 644,
+ },
+ dictWord{142, 11, 164},
+ dictWord{4, 0, 279},
+ dictWord{7, 0, 301},
+ dictWord{137, 0, 362},
+ dictWord{134, 11, 586},
+ dictWord{135, 0, 1743},
+ dictWord{4, 0, 178},
+ dictWord{133, 0, 399},
+ dictWord{4, 10, 900},
+ dictWord{133, 10, 861},
+ dictWord{5, 10, 254},
+ dictWord{7, 10, 985},
+ dictWord{136, 10, 73},
+ dictWord{133, 11, 108},
+ dictWord{7, 10, 1959},
+ dictWord{136, 10, 683},
+ dictWord{133, 11, 219},
+ dictWord{4, 11, 193},
+ dictWord{5, 11, 916},
+ dictWord{
+ 7,
+ 11,
+ 364,
+ },
+ dictWord{10, 11, 398},
+ dictWord{10, 11, 726},
+ dictWord{11, 11, 317},
+ dictWord{11, 11, 626},
+ dictWord{12, 11, 142},
+ dictWord{12, 11, 288},
+ dictWord{
+ 12,
+ 11,
+ 678,
+ },
+ dictWord{13, 11, 313},
+ dictWord{15, 11, 113},
+ dictWord{18, 11, 114},
+ dictWord{21, 11, 30},
+ dictWord{150, 11, 53},
+ dictWord{6, 11, 241},
+ dictWord{7, 11, 907},
+ dictWord{8, 11, 832},
+ dictWord{9, 11, 342},
+ dictWord{10, 11, 729},
+ dictWord{11, 11, 284},
+ dictWord{11, 11, 445},
+ dictWord{11, 11, 651},
+ dictWord{11, 11, 863},
+ dictWord{13, 11, 398},
+ dictWord{146, 11, 99},
+ dictWord{132, 0, 872},
+ dictWord{134, 0, 831},
+ dictWord{134, 0, 1692},
+ dictWord{
+ 6,
+ 0,
+ 202,
+ },
+ dictWord{6, 0, 1006},
+ dictWord{9, 0, 832},
+ dictWord{10, 0, 636},
+ dictWord{11, 0, 208},
+ dictWord{12, 0, 360},
+ dictWord{17, 0, 118},
+ dictWord{18, 0, 27},
+ dictWord{20, 0, 67},
+ dictWord{137, 11, 734},
+ dictWord{132, 10, 725},
+ dictWord{7, 11, 993},
+ dictWord{138, 11, 666},
+ dictWord{134, 0, 1954},
+ dictWord{
+ 134,
+ 10,
+ 196,
+ },
+ dictWord{7, 0, 872},
+ dictWord{10, 0, 516},
+ dictWord{139, 0, 167},
+ dictWord{133, 10, 831},
+ dictWord{4, 11, 562},
+ dictWord{9, 11, 254},
+ dictWord{
+ 139,
+ 11,
+ 879,
+ },
+ dictWord{137, 0, 313},
+ dictWord{4, 0, 224},
+ dictWord{132, 11, 786},
+ dictWord{11, 0, 24},
+ dictWord{12, 0, 170},
+ dictWord{136, 10, 723},
+ dictWord{
+ 5,
+ 0,
+ 546,
+ },
+ dictWord{7, 0, 35},
+ dictWord{8, 0, 11},
+ dictWord{8, 0, 12},
+ dictWord{9, 0, 315},
+ dictWord{9, 0, 533},
+ dictWord{10, 0, 802},
+ dictWord{11, 0, 166},
+ dictWord{
+ 12,
+ 0,
+ 525,
+ },
+ dictWord{142, 0, 243},
+ dictWord{7, 0, 1937},
+ dictWord{13, 10, 80},
+ dictWord{13, 10, 437},
+ dictWord{145, 10, 74},
+ dictWord{5, 0, 241},
+ dictWord{
+ 8,
+ 0,
+ 242,
+ },
+ dictWord{9, 0, 451},
+ dictWord{10, 0, 667},
+ dictWord{11, 0, 598},
+ dictWord{140, 0, 429},
+ dictWord{150, 0, 46},
+ dictWord{6, 0, 1273},
+ dictWord{
+ 137,
+ 0,
+ 830,
+ },
+ dictWord{5, 10, 848},
+ dictWord{6, 10, 66},
+ dictWord{136, 10, 764},
+ dictWord{6, 0, 825},
+ dictWord{134, 0, 993},
+ dictWord{4, 0, 1006},
+ dictWord{
+ 10,
+ 0,
+ 327,
+ },
+ dictWord{13, 0, 271},
+ dictWord{4, 10, 36},
+ dictWord{7, 10, 1387},
+ dictWord{139, 10, 755},
+ dictWord{134, 0, 1023},
+ dictWord{135, 0, 1580},
+ dictWord{
+ 4,
+ 0,
+ 366,
+ },
+ dictWord{137, 0, 516},
+ dictWord{132, 10, 887},
+ dictWord{6, 0, 1736},
+ dictWord{135, 0, 1891},
+ dictWord{6, 11, 216},
+ dictWord{7, 11, 901},
+ dictWord{
+ 7,
+ 11,
+ 1343,
+ },
+ dictWord{136, 11, 493},
+ dictWord{6, 10, 165},
+ dictWord{138, 10, 388},
+ dictWord{7, 11, 341},
+ dictWord{139, 11, 219},
+ dictWord{4, 10, 719},
+ dictWord{135, 10, 155},
+ dictWord{134, 0, 1935},
+ dictWord{132, 0, 826},
+ dictWord{6, 0, 331},
+ dictWord{6, 0, 1605},
+ dictWord{8, 0, 623},
+ dictWord{11, 0, 139},
+ dictWord{139, 0, 171},
+ dictWord{135, 11, 1734},
+ dictWord{10, 11, 115},
+ dictWord{11, 11, 420},
+ dictWord{12, 11, 154},
+ dictWord{13, 11, 404},
+ dictWord{
+ 14,
+ 11,
+ 346,
+ },
+ dictWord{15, 11, 54},
+ dictWord{143, 11, 112},
+ dictWord{7, 0, 288},
+ dictWord{4, 10, 353},
+ dictWord{6, 10, 146},
+ dictWord{6, 10, 1789},
+ dictWord{
+ 7,
+ 10,
+ 990,
+ },
+ dictWord{7, 10, 1348},
+ dictWord{9, 10, 665},
+ dictWord{9, 10, 898},
+ dictWord{11, 10, 893},
+ dictWord{142, 10, 212},
+ dictWord{6, 0, 916},
+ dictWord{134, 0, 1592},
+ dictWord{7, 0, 1888},
+ dictWord{4, 10, 45},
+ dictWord{135, 10, 1257},
+ dictWord{5, 11, 1011},
+ dictWord{136, 11, 701},
+ dictWord{
+ 139,
+ 11,
+ 596,
+ },
+ dictWord{4, 11, 54},
+ dictWord{5, 11, 666},
+ dictWord{7, 11, 1039},
+ dictWord{7, 11, 1130},
+ dictWord{9, 11, 195},
+ dictWord{138, 11, 302},
+ dictWord{
+ 134,
+ 0,
+ 1471,
+ },
+ dictWord{134, 0, 1570},
+ dictWord{132, 0, 394},
+ dictWord{140, 10, 65},
+ dictWord{136, 10, 816},
+ dictWord{135, 0, 1931},
+ dictWord{7, 0, 574},
+ dictWord{135, 0, 1719},
+ dictWord{134, 11, 467},
+ dictWord{132, 0, 658},
+ dictWord{9, 0, 781},
+ dictWord{10, 0, 144},
+ dictWord{11, 0, 385},
+ dictWord{13, 0, 161},
+ dictWord{13, 0, 228},
+ dictWord{13, 0, 268},
+ dictWord{20, 0, 107},
+ dictWord{134, 11, 1669},
+ dictWord{136, 0, 374},
+ dictWord{135, 0, 735},
+ dictWord{4, 0, 344},
+ dictWord{6, 0, 498},
+ dictWord{139, 0, 323},
+ dictWord{7, 0, 586},
+ dictWord{7, 0, 1063},
+ dictWord{6, 10, 559},
+ dictWord{134, 10, 1691},
+ dictWord{137, 0, 155},
+ dictWord{133, 0, 906},
+ dictWord{7, 11, 122},
+ dictWord{9, 11, 259},
+ dictWord{10, 11, 84},
+ dictWord{11, 11, 470},
+ dictWord{12, 11, 541},
+ dictWord{
+ 141,
+ 11,
+ 379,
+ },
+ dictWord{134, 0, 1139},
+ dictWord{10, 0, 108},
+ dictWord{139, 0, 116},
+ dictWord{134, 10, 456},
+ dictWord{133, 10, 925},
+ dictWord{5, 11, 82},
+ dictWord{
+ 5,
+ 11,
+ 131,
+ },
+ dictWord{7, 11, 1755},
+ dictWord{8, 11, 31},
+ dictWord{9, 11, 168},
+ dictWord{9, 11, 764},
+ dictWord{139, 11, 869},
+ dictWord{134, 11, 605},
+ dictWord{
+ 5,
+ 11,
+ 278,
+ },
+ dictWord{137, 11, 68},
+ dictWord{4, 11, 163},
+ dictWord{5, 11, 201},
+ dictWord{5, 11, 307},
+ dictWord{5, 11, 310},
+ dictWord{6, 11, 335},
+ dictWord{
+ 7,
+ 11,
+ 284,
+ },
+ dictWord{136, 11, 165},
+ dictWord{135, 11, 1660},
+ dictWord{6, 11, 33},
+ dictWord{135, 11, 1244},
+ dictWord{4, 0, 616},
+ dictWord{136, 11, 483},
+ dictWord{8, 0, 857},
+ dictWord{8, 0, 902},
+ dictWord{8, 0, 910},
+ dictWord{10, 0, 879},
+ dictWord{12, 0, 726},
+ dictWord{4, 11, 199},
+ dictWord{139, 11, 34},
+ dictWord{136, 0, 692},
+ dictWord{6, 10, 193},
+ dictWord{7, 10, 240},
+ dictWord{7, 10, 1682},
+ dictWord{10, 10, 51},
+ dictWord{10, 10, 640},
+ dictWord{11, 10, 410},
+ dictWord{13, 10, 82},
+ dictWord{14, 10, 247},
+ dictWord{14, 10, 331},
+ dictWord{142, 10, 377},
+ dictWord{6, 0, 823},
+ dictWord{134, 0, 983},
+ dictWord{
+ 139,
+ 10,
+ 411,
+ },
+ dictWord{132, 0, 305},
+ dictWord{136, 10, 633},
+ dictWord{138, 11, 203},
+ dictWord{134, 0, 681},
+ dictWord{6, 11, 326},
+ dictWord{7, 11, 677},
+ dictWord{137, 11, 425},
+ dictWord{5, 0, 214},
+ dictWord{7, 0, 603},
+ dictWord{8, 0, 611},
+ dictWord{9, 0, 686},
+ dictWord{10, 0, 88},
+ dictWord{11, 0, 459},
+ dictWord{
+ 11,
+ 0,
+ 496,
+ },
+ dictWord{12, 0, 463},
+ dictWord{12, 0, 590},
+ dictWord{141, 0, 0},
+ dictWord{136, 0, 1004},
+ dictWord{142, 0, 23},
+ dictWord{134, 0, 1703},
+ dictWord{
+ 147,
+ 11,
+ 8,
+ },
+ dictWord{145, 11, 56},
+ dictWord{135, 0, 1443},
+ dictWord{4, 10, 237},
+ dictWord{135, 10, 514},
+ dictWord{6, 0, 714},
+ dictWord{145, 0, 19},
+ dictWord{
+ 5,
+ 11,
+ 358,
+ },
+ dictWord{7, 11, 473},
+ dictWord{7, 11, 1184},
+ dictWord{10, 11, 662},
+ dictWord{13, 11, 212},
+ dictWord{13, 11, 304},
+ dictWord{13, 11, 333},
+ dictWord{145, 11, 98},
+ dictWord{4, 0, 737},
+ dictWord{10, 0, 98},
+ dictWord{11, 0, 294},
+ dictWord{12, 0, 60},
+ dictWord{12, 0, 437},
+ dictWord{13, 0, 64},
+ dictWord{
+ 13,
+ 0,
+ 380,
+ },
+ dictWord{142, 0, 430},
+ dictWord{6, 10, 392},
+ dictWord{7, 10, 65},
+ dictWord{135, 10, 2019},
+ dictWord{6, 0, 1758},
+ dictWord{8, 0, 520},
+ dictWord{
+ 9,
+ 0,
+ 345,
+ },
+ dictWord{9, 0, 403},
+ dictWord{142, 0, 350},
+ dictWord{5, 0, 47},
+ dictWord{10, 0, 242},
+ dictWord{138, 0, 579},
+ dictWord{5, 0, 139},
+ dictWord{7, 0, 1168},
+ dictWord{138, 0, 539},
+ dictWord{134, 0, 1459},
+ dictWord{13, 0, 388},
+ dictWord{141, 11, 388},
+ dictWord{134, 0, 253},
+ dictWord{7, 10, 1260},
+ dictWord{
+ 135,
+ 10,
+ 1790,
+ },
+ dictWord{10, 0, 252},
+ dictWord{9, 10, 222},
+ dictWord{139, 10, 900},
+ dictWord{140, 0, 745},
+ dictWord{133, 11, 946},
+ dictWord{4, 0, 107},
+ dictWord{
+ 7,
+ 0,
+ 613,
+ },
+ dictWord{8, 0, 439},
+ dictWord{8, 0, 504},
+ dictWord{9, 0, 501},
+ dictWord{10, 0, 383},
+ dictWord{139, 0, 477},
+ dictWord{135, 11, 1485},
+ dictWord{
+ 132,
+ 0,
+ 871,
+ },
+ dictWord{7, 11, 411},
+ dictWord{7, 11, 590},
+ dictWord{8, 11, 631},
+ dictWord{9, 11, 323},
+ dictWord{10, 11, 355},
+ dictWord{11, 11, 491},
+ dictWord{
+ 12,
+ 11,
+ 143,
+ },
+ dictWord{12, 11, 402},
+ dictWord{13, 11, 73},
+ dictWord{14, 11, 408},
+ dictWord{15, 11, 107},
+ dictWord{146, 11, 71},
+ dictWord{132, 0, 229},
+ dictWord{132, 0, 903},
+ dictWord{140, 0, 71},
+ dictWord{133, 0, 549},
+ dictWord{4, 0, 47},
+ dictWord{6, 0, 373},
+ dictWord{7, 0, 452},
+ dictWord{7, 0, 543},
+ dictWord{
+ 7,
+ 0,
+ 1828,
+ },
+ dictWord{7, 0, 1856},
+ dictWord{9, 0, 6},
+ dictWord{11, 0, 257},
+ dictWord{139, 0, 391},
+ dictWord{7, 11, 1467},
+ dictWord{8, 11, 328},
+ dictWord{
+ 10,
+ 11,
+ 544,
+ },
+ dictWord{11, 11, 955},
+ dictWord{13, 11, 320},
+ dictWord{145, 11, 83},
+ dictWord{5, 0, 980},
+ dictWord{134, 0, 1754},
+ dictWord{136, 0, 865},
+ dictWord{
+ 5,
+ 0,
+ 705,
+ },
+ dictWord{137, 0, 606},
+ dictWord{7, 0, 161},
+ dictWord{8, 10, 201},
+ dictWord{136, 10, 605},
+ dictWord{143, 11, 35},
+ dictWord{5, 11, 835},
+ dictWord{
+ 6,
+ 11,
+ 483,
+ },
+ dictWord{140, 10, 224},
+ dictWord{7, 0, 536},
+ dictWord{7, 0, 1331},
+ dictWord{136, 0, 143},
+ dictWord{134, 0, 1388},
+ dictWord{5, 0, 724},
+ dictWord{
+ 10,
+ 0,
+ 305,
+ },
+ dictWord{11, 0, 151},
+ dictWord{12, 0, 33},
+ dictWord{12, 0, 121},
+ dictWord{12, 0, 381},
+ dictWord{17, 0, 3},
+ dictWord{17, 0, 27},
+ dictWord{17, 0, 78},
+ dictWord{18, 0, 18},
+ dictWord{19, 0, 54},
+ dictWord{149, 0, 5},
+ dictWord{4, 10, 523},
+ dictWord{133, 10, 638},
+ dictWord{5, 0, 19},
+ dictWord{134, 0, 533},
+ dictWord{
+ 5,
+ 0,
+ 395,
+ },
+ dictWord{5, 0, 951},
+ dictWord{134, 0, 1776},
+ dictWord{135, 0, 1908},
+ dictWord{132, 0, 846},
+ dictWord{10, 0, 74},
+ dictWord{11, 0, 663},
+ dictWord{
+ 12,
+ 0,
+ 210,
+ },
+ dictWord{13, 0, 166},
+ dictWord{13, 0, 310},
+ dictWord{14, 0, 373},
+ dictWord{18, 0, 95},
+ dictWord{19, 0, 43},
+ dictWord{6, 10, 242},
+ dictWord{7, 10, 227},
+ dictWord{7, 10, 1581},
+ dictWord{8, 10, 104},
+ dictWord{9, 10, 113},
+ dictWord{9, 10, 220},
+ dictWord{9, 10, 427},
+ dictWord{10, 10, 239},
+ dictWord{11, 10, 579},
+ dictWord{11, 10, 1023},
+ dictWord{13, 10, 4},
+ dictWord{13, 10, 204},
+ dictWord{13, 10, 316},
+ dictWord{148, 10, 86},
+ dictWord{9, 11, 716},
+ dictWord{11, 11, 108},
+ dictWord{13, 11, 123},
+ dictWord{14, 11, 252},
+ dictWord{19, 11, 38},
+ dictWord{21, 11, 3},
+ dictWord{151, 11, 11},
+ dictWord{8, 0, 372},
+ dictWord{9, 0, 122},
+ dictWord{138, 0, 175},
+ dictWord{132, 11, 677},
+ dictWord{7, 11, 1374},
+ dictWord{136, 11, 540},
+ dictWord{135, 10, 861},
+ dictWord{132, 0, 695},
+ dictWord{
+ 7,
+ 0,
+ 497,
+ },
+ dictWord{9, 0, 387},
+ dictWord{147, 0, 81},
+ dictWord{136, 0, 937},
+ dictWord{134, 0, 718},
+ dictWord{7, 0, 1328},
+ dictWord{136, 10, 494},
+ dictWord{
+ 132,
+ 11,
+ 331,
+ },
+ dictWord{6, 0, 1581},
+ dictWord{133, 11, 747},
+ dictWord{5, 0, 284},
+ dictWord{6, 0, 49},
+ dictWord{6, 0, 350},
+ dictWord{7, 0, 1},
+ dictWord{7, 0, 377},
+ dictWord{7, 0, 1693},
+ dictWord{8, 0, 18},
+ dictWord{8, 0, 678},
+ dictWord{9, 0, 161},
+ dictWord{9, 0, 585},
+ dictWord{9, 0, 671},
+ dictWord{9, 0, 839},
+ dictWord{11, 0, 912},
+ dictWord{141, 0, 427},
+ dictWord{7, 10, 1306},
+ dictWord{8, 10, 505},
+ dictWord{9, 10, 482},
+ dictWord{10, 10, 126},
+ dictWord{11, 10, 225},
+ dictWord{12, 10, 347},
+ dictWord{12, 10, 449},
+ dictWord{13, 10, 19},
+ dictWord{14, 10, 218},
+ dictWord{142, 10, 435},
+ dictWord{10, 10, 764},
+ dictWord{12, 10, 120},
+ dictWord{
+ 13,
+ 10,
+ 39,
+ },
+ dictWord{145, 10, 127},
+ dictWord{4, 0, 597},
+ dictWord{133, 10, 268},
+ dictWord{134, 0, 1094},
+ dictWord{4, 0, 1008},
+ dictWord{134, 0, 1973},
+ dictWord{132, 0, 811},
+ dictWord{139, 0, 908},
+ dictWord{135, 0, 1471},
+ dictWord{133, 11, 326},
+ dictWord{4, 10, 384},
+ dictWord{135, 10, 1022},
+ dictWord{
+ 7,
+ 0,
+ 1935,
+ },
+ dictWord{8, 0, 324},
+ dictWord{12, 0, 42},
+ dictWord{4, 11, 691},
+ dictWord{7, 11, 1935},
+ dictWord{8, 11, 324},
+ dictWord{9, 11, 35},
+ dictWord{10, 11, 680},
+ dictWord{11, 11, 364},
+ dictWord{12, 11, 42},
+ dictWord{13, 11, 357},
+ dictWord{146, 11, 16},
+ dictWord{135, 0, 2014},
+ dictWord{7, 0, 2007},
+ dictWord{
+ 9,
+ 0,
+ 101,
+ },
+ dictWord{9, 0, 450},
+ dictWord{10, 0, 66},
+ dictWord{10, 0, 842},
+ dictWord{11, 0, 536},
+ dictWord{12, 0, 587},
+ dictWord{6, 11, 32},
+ dictWord{7, 11, 385},
+ dictWord{7, 11, 757},
+ dictWord{7, 11, 1916},
+ dictWord{8, 11, 37},
+ dictWord{8, 11, 94},
+ dictWord{8, 11, 711},
+ dictWord{9, 11, 541},
+ dictWord{10, 11, 162},
+ dictWord{
+ 10,
+ 11,
+ 795,
+ },
+ dictWord{11, 11, 989},
+ dictWord{11, 11, 1010},
+ dictWord{12, 11, 14},
+ dictWord{142, 11, 308},
+ dictWord{139, 0, 586},
+ dictWord{
+ 135,
+ 10,
+ 1703,
+ },
+ dictWord{7, 0, 1077},
+ dictWord{11, 0, 28},
+ dictWord{9, 10, 159},
+ dictWord{140, 10, 603},
+ dictWord{6, 0, 1221},
+ dictWord{136, 10, 583},
+ dictWord{
+ 6,
+ 11,
+ 152,
+ },
+ dictWord{6, 11, 349},
+ dictWord{6, 11, 1682},
+ dictWord{7, 11, 1252},
+ dictWord{8, 11, 112},
+ dictWord{9, 11, 435},
+ dictWord{9, 11, 668},
+ dictWord{
+ 10,
+ 11,
+ 290,
+ },
+ dictWord{10, 11, 319},
+ dictWord{10, 11, 815},
+ dictWord{11, 11, 180},
+ dictWord{11, 11, 837},
+ dictWord{12, 11, 240},
+ dictWord{13, 11, 152},
+ dictWord{13, 11, 219},
+ dictWord{142, 11, 158},
+ dictWord{139, 0, 62},
+ dictWord{132, 10, 515},
+ dictWord{8, 10, 632},
+ dictWord{8, 10, 697},
+ dictWord{
+ 137,
+ 10,
+ 854,
+ },
+ dictWord{134, 0, 1766},
+ dictWord{132, 11, 581},
+ dictWord{6, 11, 126},
+ dictWord{7, 11, 573},
+ dictWord{8, 11, 397},
+ dictWord{142, 11, 44},
+ dictWord{
+ 150,
+ 0,
+ 28,
+ },
+ dictWord{11, 0, 670},
+ dictWord{22, 0, 25},
+ dictWord{4, 10, 136},
+ dictWord{133, 10, 551},
+ dictWord{6, 0, 1665},
+ dictWord{7, 0, 256},
+ dictWord{
+ 7,
+ 0,
+ 1388,
+ },
+ dictWord{138, 0, 499},
+ dictWord{4, 0, 22},
+ dictWord{5, 0, 10},
+ dictWord{7, 0, 1576},
+ dictWord{136, 0, 97},
+ dictWord{134, 10, 1782},
+ dictWord{5, 0, 481},
+ dictWord{7, 10, 1287},
+ dictWord{9, 10, 44},
+ dictWord{10, 10, 552},
+ dictWord{10, 10, 642},
+ dictWord{11, 10, 839},
+ dictWord{12, 10, 274},
+ dictWord{
+ 12,
+ 10,
+ 275,
+ },
+ dictWord{12, 10, 372},
+ dictWord{13, 10, 91},
+ dictWord{142, 10, 125},
+ dictWord{133, 11, 926},
+ dictWord{7, 11, 1232},
+ dictWord{137, 11, 531},
+ dictWord{6, 0, 134},
+ dictWord{7, 0, 437},
+ dictWord{7, 0, 1824},
+ dictWord{9, 0, 37},
+ dictWord{14, 0, 285},
+ dictWord{142, 0, 371},
+ dictWord{7, 0, 486},
+ dictWord{8, 0, 155},
+ dictWord{11, 0, 93},
+ dictWord{140, 0, 164},
+ dictWord{6, 0, 1391},
+ dictWord{134, 0, 1442},
+ dictWord{133, 11, 670},
+ dictWord{133, 0, 591},
+ dictWord{
+ 6,
+ 10,
+ 147,
+ },
+ dictWord{7, 10, 886},
+ dictWord{7, 11, 1957},
+ dictWord{9, 10, 753},
+ dictWord{138, 10, 268},
+ dictWord{5, 0, 380},
+ dictWord{5, 0, 650},
+ dictWord{
+ 7,
+ 0,
+ 1173,
+ },
+ dictWord{136, 0, 310},
+ dictWord{4, 0, 364},
+ dictWord{7, 0, 1156},
+ dictWord{7, 0, 1187},
+ dictWord{137, 0, 409},
+ dictWord{135, 11, 1621},
+ dictWord{
+ 134,
+ 0,
+ 482,
+ },
+ dictWord{133, 11, 506},
+ dictWord{4, 0, 781},
+ dictWord{6, 0, 487},
+ dictWord{7, 0, 926},
+ dictWord{8, 0, 263},
+ dictWord{139, 0, 500},
+ dictWord{
+ 138,
+ 10,
+ 137,
+ },
+ dictWord{135, 11, 242},
+ dictWord{139, 11, 96},
+ dictWord{133, 10, 414},
+ dictWord{135, 10, 1762},
+ dictWord{134, 0, 804},
+ dictWord{5, 11, 834},
+ dictWord{7, 11, 1202},
+ dictWord{8, 11, 14},
+ dictWord{9, 11, 481},
+ dictWord{137, 11, 880},
+ dictWord{134, 10, 599},
+ dictWord{4, 0, 94},
+ dictWord{135, 0, 1265},
+ dictWord{4, 0, 415},
+ dictWord{132, 0, 417},
+ dictWord{5, 0, 348},
+ dictWord{6, 0, 522},
+ dictWord{6, 10, 1749},
+ dictWord{7, 11, 1526},
+ dictWord{138, 11, 465},
+ dictWord{134, 10, 1627},
+ dictWord{132, 0, 1012},
+ dictWord{132, 10, 488},
+ dictWord{4, 11, 357},
+ dictWord{6, 11, 172},
+ dictWord{7, 11, 143},
+ dictWord{
+ 137,
+ 11,
+ 413,
+ },
+ dictWord{4, 10, 83},
+ dictWord{4, 11, 590},
+ dictWord{146, 11, 76},
+ dictWord{140, 10, 676},
+ dictWord{7, 11, 287},
+ dictWord{8, 11, 355},
+ dictWord{
+ 9,
+ 11,
+ 293,
+ },
+ dictWord{137, 11, 743},
+ dictWord{134, 10, 278},
+ dictWord{6, 0, 1803},
+ dictWord{18, 0, 165},
+ dictWord{24, 0, 21},
+ dictWord{5, 11, 169},
+ dictWord{
+ 7,
+ 11,
+ 333,
+ },
+ dictWord{136, 11, 45},
+ dictWord{12, 10, 97},
+ dictWord{140, 11, 97},
+ dictWord{4, 0, 408},
+ dictWord{4, 0, 741},
+ dictWord{135, 0, 500},
+ dictWord{
+ 132,
+ 11,
+ 198,
+ },
+ dictWord{7, 10, 388},
+ dictWord{7, 10, 644},
+ dictWord{139, 10, 781},
+ dictWord{4, 11, 24},
+ dictWord{5, 11, 140},
+ dictWord{5, 11, 185},
+ dictWord{
+ 7,
+ 11,
+ 1500,
+ },
+ dictWord{11, 11, 565},
+ dictWord{139, 11, 838},
+ dictWord{6, 0, 1321},
+ dictWord{9, 0, 257},
+ dictWord{7, 10, 229},
+ dictWord{8, 10, 59},
+ dictWord{
+ 9,
+ 10,
+ 190,
+ },
+ dictWord{10, 10, 378},
+ dictWord{140, 10, 191},
+ dictWord{4, 11, 334},
+ dictWord{133, 11, 593},
+ dictWord{135, 11, 1885},
+ dictWord{134, 0, 1138},
+ dictWord{4, 0, 249},
+ dictWord{6, 0, 73},
+ dictWord{135, 0, 177},
+ dictWord{133, 0, 576},
+ dictWord{142, 0, 231},
+ dictWord{137, 0, 288},
+ dictWord{132, 10, 660},
+ dictWord{7, 10, 1035},
+ dictWord{138, 10, 737},
+ dictWord{135, 0, 1487},
+ dictWord{6, 0, 989},
+ dictWord{9, 0, 433},
+ dictWord{7, 10, 690},
+ dictWord{9, 10, 587},
+ dictWord{140, 10, 521},
+ dictWord{7, 0, 1264},
+ dictWord{7, 0, 1678},
+ dictWord{11, 0, 945},
+ dictWord{12, 0, 341},
+ dictWord{12, 0, 471},
+ dictWord{140, 0, 569},
+ dictWord{132, 11, 709},
+ dictWord{133, 11, 897},
+ dictWord{5, 11, 224},
+ dictWord{13, 11, 174},
+ dictWord{146, 11, 52},
+ dictWord{135, 11, 1840},
+ dictWord{
+ 134,
+ 10,
+ 1744,
+ },
+ dictWord{12, 0, 87},
+ dictWord{16, 0, 74},
+ dictWord{4, 10, 733},
+ dictWord{9, 10, 194},
+ dictWord{10, 10, 92},
+ dictWord{11, 10, 198},
+ dictWord{
+ 12,
+ 10,
+ 84,
+ },
+ dictWord{141, 10, 128},
+ dictWord{140, 0, 779},
+ dictWord{135, 0, 538},
+ dictWord{4, 11, 608},
+ dictWord{133, 11, 497},
+ dictWord{133, 0, 413},
+ dictWord{7, 11, 1375},
+ dictWord{7, 11, 1466},
+ dictWord{138, 11, 331},
+ dictWord{136, 0, 495},
+ dictWord{6, 11, 540},
+ dictWord{136, 11, 136},
+ dictWord{7, 0, 54},
+ dictWord{8, 0, 312},
+ dictWord{10, 0, 191},
+ dictWord{10, 0, 614},
+ dictWord{140, 0, 567},
+ dictWord{6, 0, 468},
+ dictWord{7, 0, 567},
+ dictWord{7, 0, 1478},
+ dictWord{
+ 8,
+ 0,
+ 530,
+ },
+ dictWord{14, 0, 290},
+ dictWord{133, 11, 999},
+ dictWord{4, 11, 299},
+ dictWord{7, 10, 306},
+ dictWord{135, 11, 1004},
+ dictWord{142, 11, 296},
+ dictWord{134, 0, 1484},
+ dictWord{133, 10, 979},
+ dictWord{6, 0, 609},
+ dictWord{9, 0, 815},
+ dictWord{12, 11, 137},
+ dictWord{14, 11, 9},
+ dictWord{14, 11, 24},
+ dictWord{142, 11, 64},
+ dictWord{133, 11, 456},
+ dictWord{6, 0, 484},
+ dictWord{135, 0, 822},
+ dictWord{133, 10, 178},
+ dictWord{136, 11, 180},
+ dictWord{
+ 132,
+ 11,
+ 755,
+ },
+ dictWord{137, 0, 900},
+ dictWord{135, 0, 1335},
+ dictWord{6, 0, 1724},
+ dictWord{135, 0, 2022},
+ dictWord{135, 11, 1139},
+ dictWord{5, 0, 640},
+ dictWord{132, 10, 390},
+ dictWord{6, 0, 1831},
+ dictWord{138, 11, 633},
+ dictWord{135, 11, 566},
+ dictWord{4, 11, 890},
+ dictWord{5, 11, 805},
+ dictWord{5, 11, 819},
+ dictWord{5, 11, 961},
+ dictWord{6, 11, 396},
+ dictWord{6, 11, 1631},
+ dictWord{6, 11, 1678},
+ dictWord{7, 11, 1967},
+ dictWord{7, 11, 2041},
+ dictWord{
+ 9,
+ 11,
+ 630,
+ },
+ dictWord{11, 11, 8},
+ dictWord{11, 11, 1019},
+ dictWord{12, 11, 176},
+ dictWord{13, 11, 225},
+ dictWord{14, 11, 292},
+ dictWord{149, 11, 24},
+ dictWord{
+ 132,
+ 0,
+ 474,
+ },
+ dictWord{134, 0, 1103},
+ dictWord{135, 0, 1504},
+ dictWord{134, 0, 1576},
+ dictWord{6, 0, 961},
+ dictWord{6, 0, 1034},
+ dictWord{140, 0, 655},
+ dictWord{11, 11, 514},
+ dictWord{149, 11, 20},
+ dictWord{5, 0, 305},
+ dictWord{135, 11, 1815},
+ dictWord{7, 11, 1505},
+ dictWord{10, 11, 190},
+ dictWord{
+ 10,
+ 11,
+ 634,
+ },
+ dictWord{11, 11, 792},
+ dictWord{12, 11, 358},
+ dictWord{140, 11, 447},
+ dictWord{5, 11, 0},
+ dictWord{6, 11, 536},
+ dictWord{7, 11, 604},
+ dictWord{
+ 13,
+ 11,
+ 445,
+ },
+ dictWord{145, 11, 126},
+ dictWord{7, 0, 1236},
+ dictWord{133, 10, 105},
+ dictWord{4, 0, 480},
+ dictWord{6, 0, 217},
+ dictWord{6, 0, 302},
+ dictWord{
+ 6,
+ 0,
+ 1642,
+ },
+ dictWord{7, 0, 130},
+ dictWord{7, 0, 837},
+ dictWord{7, 0, 1321},
+ dictWord{7, 0, 1547},
+ dictWord{7, 0, 1657},
+ dictWord{8, 0, 429},
+ dictWord{9, 0, 228},
+ dictWord{13, 0, 289},
+ dictWord{13, 0, 343},
+ dictWord{19, 0, 101},
+ dictWord{6, 11, 232},
+ dictWord{6, 11, 412},
+ dictWord{7, 11, 1074},
+ dictWord{8, 11, 9},
+ dictWord{
+ 8,
+ 11,
+ 157,
+ },
+ dictWord{8, 11, 786},
+ dictWord{9, 11, 196},
+ dictWord{9, 11, 352},
+ dictWord{9, 11, 457},
+ dictWord{10, 11, 337},
+ dictWord{11, 11, 232},
+ dictWord{
+ 11,
+ 11,
+ 877,
+ },
+ dictWord{12, 11, 480},
+ dictWord{140, 11, 546},
+ dictWord{5, 10, 438},
+ dictWord{7, 11, 958},
+ dictWord{9, 10, 694},
+ dictWord{12, 10, 627},
+ dictWord{
+ 13,
+ 11,
+ 38,
+ },
+ dictWord{141, 10, 210},
+ dictWord{4, 11, 382},
+ dictWord{136, 11, 579},
+ dictWord{7, 0, 278},
+ dictWord{10, 0, 739},
+ dictWord{11, 0, 708},
+ dictWord{
+ 141,
+ 0,
+ 348,
+ },
+ dictWord{4, 11, 212},
+ dictWord{135, 11, 1206},
+ dictWord{135, 11, 1898},
+ dictWord{6, 0, 708},
+ dictWord{6, 0, 1344},
+ dictWord{152, 10, 11},
+ dictWord{137, 11, 768},
+ dictWord{134, 0, 1840},
+ dictWord{140, 0, 233},
+ dictWord{8, 10, 25},
+ dictWord{138, 10, 826},
+ dictWord{6, 0, 2017},
+ dictWord{
+ 133,
+ 11,
+ 655,
+ },
+ dictWord{6, 0, 1488},
+ dictWord{139, 11, 290},
+ dictWord{132, 10, 308},
+ dictWord{134, 0, 1590},
+ dictWord{134, 0, 1800},
+ dictWord{134, 0, 1259},
+ dictWord{16, 0, 28},
+ dictWord{6, 11, 231},
+ dictWord{7, 11, 95},
+ dictWord{136, 11, 423},
+ dictWord{133, 11, 300},
+ dictWord{135, 10, 150},
+ dictWord{
+ 136,
+ 10,
+ 649,
+ },
+ dictWord{7, 11, 1874},
+ dictWord{137, 11, 641},
+ dictWord{6, 11, 237},
+ dictWord{7, 11, 611},
+ dictWord{8, 11, 100},
+ dictWord{9, 11, 416},
+ dictWord{
+ 11,
+ 11,
+ 335,
+ },
+ dictWord{12, 11, 173},
+ dictWord{146, 11, 101},
+ dictWord{137, 0, 45},
+ dictWord{134, 10, 521},
+ dictWord{17, 0, 36},
+ dictWord{14, 11, 26},
+ dictWord{
+ 146,
+ 11,
+ 150,
+ },
+ dictWord{7, 0, 1442},
+ dictWord{14, 0, 22},
+ dictWord{5, 10, 339},
+ dictWord{15, 10, 41},
+ dictWord{15, 10, 166},
+ dictWord{147, 10, 66},
+ dictWord{
+ 8,
+ 0,
+ 378,
+ },
+ dictWord{6, 11, 581},
+ dictWord{135, 11, 1119},
+ dictWord{134, 0, 1507},
+ dictWord{147, 11, 117},
+ dictWord{139, 0, 39},
+ dictWord{134, 0, 1054},
+ dictWord{6, 0, 363},
+ dictWord{7, 0, 1955},
+ dictWord{136, 0, 725},
+ dictWord{134, 0, 2036},
+ dictWord{133, 11, 199},
+ dictWord{6, 0, 1871},
+ dictWord{9, 0, 935},
+ dictWord{9, 0, 961},
+ dictWord{9, 0, 1004},
+ dictWord{9, 0, 1016},
+ dictWord{12, 0, 805},
+ dictWord{12, 0, 852},
+ dictWord{12, 0, 853},
+ dictWord{12, 0, 869},
+ dictWord{
+ 12,
+ 0,
+ 882,
+ },
+ dictWord{12, 0, 896},
+ dictWord{12, 0, 906},
+ dictWord{12, 0, 917},
+ dictWord{12, 0, 940},
+ dictWord{15, 0, 170},
+ dictWord{15, 0, 176},
+ dictWord{
+ 15,
+ 0,
+ 188,
+ },
+ dictWord{15, 0, 201},
+ dictWord{15, 0, 205},
+ dictWord{15, 0, 212},
+ dictWord{15, 0, 234},
+ dictWord{15, 0, 244},
+ dictWord{18, 0, 181},
+ dictWord{18, 0, 193},
+ dictWord{18, 0, 196},
+ dictWord{18, 0, 201},
+ dictWord{18, 0, 202},
+ dictWord{18, 0, 210},
+ dictWord{18, 0, 217},
+ dictWord{18, 0, 235},
+ dictWord{18, 0, 236},
+ dictWord{18, 0, 237},
+ dictWord{21, 0, 54},
+ dictWord{21, 0, 55},
+ dictWord{21, 0, 58},
+ dictWord{21, 0, 59},
+ dictWord{152, 0, 22},
+ dictWord{134, 10, 1628},
+ dictWord{
+ 137,
+ 0,
+ 805,
+ },
+ dictWord{5, 0, 813},
+ dictWord{135, 0, 2046},
+ dictWord{142, 11, 42},
+ dictWord{5, 0, 712},
+ dictWord{6, 0, 1240},
+ dictWord{11, 0, 17},
+ dictWord{
+ 13,
+ 0,
+ 321,
+ },
+ dictWord{144, 0, 67},
+ dictWord{132, 0, 617},
+ dictWord{135, 10, 829},
+ dictWord{6, 0, 320},
+ dictWord{7, 0, 781},
+ dictWord{7, 0, 1921},
+ dictWord{9, 0, 55},
+ dictWord{10, 0, 186},
+ dictWord{10, 0, 273},
+ dictWord{10, 0, 664},
+ dictWord{10, 0, 801},
+ dictWord{11, 0, 996},
+ dictWord{11, 0, 997},
+ dictWord{13, 0, 157},
+ dictWord{142, 0, 170},
+ dictWord{136, 0, 271},
+ dictWord{5, 10, 486},
+ dictWord{135, 10, 1349},
+ dictWord{18, 11, 91},
+ dictWord{147, 11, 70},
+ dictWord{10, 0, 445},
+ dictWord{7, 10, 1635},
+ dictWord{8, 10, 17},
+ dictWord{138, 10, 295},
+ dictWord{136, 11, 404},
+ dictWord{7, 0, 103},
+ dictWord{7, 0, 863},
+ dictWord{11, 0, 184},
+ dictWord{145, 0, 62},
+ dictWord{138, 10, 558},
+ dictWord{137, 0, 659},
+ dictWord{6, 11, 312},
+ dictWord{6, 11, 1715},
+ dictWord{10, 11, 584},
+ dictWord{
+ 11,
+ 11,
+ 546,
+ },
+ dictWord{11, 11, 692},
+ dictWord{12, 11, 259},
+ dictWord{12, 11, 295},
+ dictWord{13, 11, 46},
+ dictWord{141, 11, 154},
+ dictWord{134, 0, 676},
+ dictWord{132, 11, 588},
+ dictWord{4, 11, 231},
+ dictWord{5, 11, 61},
+ dictWord{6, 11, 104},
+ dictWord{7, 11, 729},
+ dictWord{7, 11, 964},
+ dictWord{7, 11, 1658},
+ dictWord{140, 11, 414},
+ dictWord{6, 11, 263},
+ dictWord{138, 11, 757},
+ dictWord{11, 0, 337},
+ dictWord{142, 0, 303},
+ dictWord{135, 11, 1363},
+ dictWord{
+ 132,
+ 11,
+ 320,
+ },
+ dictWord{140, 0, 506},
+ dictWord{134, 10, 447},
+ dictWord{5, 0, 77},
+ dictWord{7, 0, 1455},
+ dictWord{10, 0, 843},
+ dictWord{147, 0, 73},
+ dictWord{
+ 7,
+ 10,
+ 577,
+ },
+ dictWord{7, 10, 1432},
+ dictWord{9, 10, 475},
+ dictWord{9, 10, 505},
+ dictWord{9, 10, 526},
+ dictWord{9, 10, 609},
+ dictWord{9, 10, 689},
+ dictWord{
+ 9,
+ 10,
+ 726,
+ },
+ dictWord{9, 10, 735},
+ dictWord{9, 10, 738},
+ dictWord{10, 10, 556},
+ dictWord{10, 10, 674},
+ dictWord{10, 10, 684},
+ dictWord{11, 10, 89},
+ dictWord{
+ 11,
+ 10,
+ 202,
+ },
+ dictWord{11, 10, 272},
+ dictWord{11, 10, 380},
+ dictWord{11, 10, 415},
+ dictWord{11, 10, 505},
+ dictWord{11, 10, 537},
+ dictWord{11, 10, 550},
+ dictWord{11, 10, 562},
+ dictWord{11, 10, 640},
+ dictWord{11, 10, 667},
+ dictWord{11, 10, 688},
+ dictWord{11, 10, 847},
+ dictWord{11, 10, 927},
+ dictWord{
+ 11,
+ 10,
+ 930,
+ },
+ dictWord{11, 10, 940},
+ dictWord{12, 10, 144},
+ dictWord{12, 10, 325},
+ dictWord{12, 10, 329},
+ dictWord{12, 10, 389},
+ dictWord{12, 10, 403},
+ dictWord{
+ 12,
+ 10,
+ 451,
+ },
+ dictWord{12, 10, 515},
+ dictWord{12, 10, 604},
+ dictWord{12, 10, 616},
+ dictWord{12, 10, 626},
+ dictWord{13, 10, 66},
+ dictWord{13, 10, 131},
+ dictWord{13, 10, 167},
+ dictWord{13, 10, 236},
+ dictWord{13, 10, 368},
+ dictWord{13, 10, 411},
+ dictWord{13, 10, 434},
+ dictWord{13, 10, 453},
+ dictWord{
+ 13,
+ 10,
+ 461,
+ },
+ dictWord{13, 10, 474},
+ dictWord{14, 10, 59},
+ dictWord{14, 10, 60},
+ dictWord{14, 10, 139},
+ dictWord{14, 10, 152},
+ dictWord{14, 10, 276},
+ dictWord{
+ 14,
+ 10,
+ 353,
+ },
+ dictWord{14, 10, 402},
+ dictWord{15, 10, 28},
+ dictWord{15, 10, 81},
+ dictWord{15, 10, 123},
+ dictWord{15, 10, 152},
+ dictWord{18, 10, 136},
+ dictWord{148, 10, 88},
+ dictWord{132, 0, 458},
+ dictWord{135, 0, 1420},
+ dictWord{6, 0, 109},
+ dictWord{10, 0, 382},
+ dictWord{4, 11, 405},
+ dictWord{4, 10, 609},
+ dictWord{7, 10, 756},
+ dictWord{7, 11, 817},
+ dictWord{9, 10, 544},
+ dictWord{11, 10, 413},
+ dictWord{14, 11, 58},
+ dictWord{14, 10, 307},
+ dictWord{16, 10, 25},
+ dictWord{17, 11, 37},
+ dictWord{146, 11, 124},
+ dictWord{6, 0, 330},
+ dictWord{7, 0, 1084},
+ dictWord{11, 0, 142},
+ dictWord{133, 11, 974},
+ dictWord{4, 10, 930},
+ dictWord{133, 10, 947},
+ dictWord{5, 10, 939},
+ dictWord{142, 11, 394},
+ dictWord{16, 0, 91},
+ dictWord{145, 0, 87},
+ dictWord{5, 11, 235},
+ dictWord{5, 10, 962},
+ dictWord{7, 11, 1239},
+ dictWord{11, 11, 131},
+ dictWord{140, 11, 370},
+ dictWord{11, 0, 492},
+ dictWord{5, 10, 651},
+ dictWord{8, 10, 170},
+ dictWord{9, 10, 61},
+ dictWord{9, 10, 63},
+ dictWord{10, 10, 23},
+ dictWord{10, 10, 37},
+ dictWord{10, 10, 834},
+ dictWord{11, 10, 4},
+ dictWord{11, 10, 281},
+ dictWord{11, 10, 503},
+ dictWord{
+ 11,
+ 10,
+ 677,
+ },
+ dictWord{12, 10, 96},
+ dictWord{12, 10, 130},
+ dictWord{12, 10, 244},
+ dictWord{14, 10, 5},
+ dictWord{14, 10, 40},
+ dictWord{14, 10, 162},
+ dictWord{
+ 14,
+ 10,
+ 202,
+ },
+ dictWord{146, 10, 133},
+ dictWord{4, 10, 406},
+ dictWord{5, 10, 579},
+ dictWord{12, 10, 492},
+ dictWord{150, 10, 15},
+ dictWord{9, 11, 137},
+ dictWord{138, 11, 221},
+ dictWord{134, 0, 1239},
+ dictWord{11, 0, 211},
+ dictWord{140, 0, 145},
+ dictWord{7, 11, 390},
+ dictWord{138, 11, 140},
+ dictWord{
+ 135,
+ 11,
+ 1418,
+ },
+ dictWord{135, 11, 1144},
+ dictWord{134, 0, 1049},
+ dictWord{7, 0, 321},
+ dictWord{6, 10, 17},
+ dictWord{7, 10, 1001},
+ dictWord{7, 10, 1982},
+ dictWord{
+ 9,
+ 10,
+ 886,
+ },
+ dictWord{10, 10, 489},
+ dictWord{10, 10, 800},
+ dictWord{11, 10, 782},
+ dictWord{12, 10, 320},
+ dictWord{13, 10, 467},
+ dictWord{14, 10, 145},
+ dictWord{14, 10, 387},
+ dictWord{143, 10, 119},
+ dictWord{145, 10, 17},
+ dictWord{5, 11, 407},
+ dictWord{11, 11, 489},
+ dictWord{19, 11, 37},
+ dictWord{20, 11, 73},
+ dictWord{150, 11, 38},
+ dictWord{133, 10, 458},
+ dictWord{135, 0, 1985},
+ dictWord{7, 10, 1983},
+ dictWord{8, 10, 0},
+ dictWord{8, 10, 171},
+ dictWord{
+ 9,
+ 10,
+ 120,
+ },
+ dictWord{9, 10, 732},
+ dictWord{10, 10, 473},
+ dictWord{11, 10, 656},
+ dictWord{11, 10, 998},
+ dictWord{18, 10, 0},
+ dictWord{18, 10, 2},
+ dictWord{
+ 147,
+ 10,
+ 21,
+ },
+ dictWord{5, 11, 325},
+ dictWord{7, 11, 1483},
+ dictWord{8, 11, 5},
+ dictWord{8, 11, 227},
+ dictWord{9, 11, 105},
+ dictWord{10, 11, 585},
+ dictWord{
+ 140,
+ 11,
+ 614,
+ },
+ dictWord{136, 0, 122},
+ dictWord{132, 0, 234},
+ dictWord{135, 11, 1196},
+ dictWord{6, 0, 976},
+ dictWord{6, 0, 1098},
+ dictWord{134, 0, 1441},
+ dictWord{
+ 7,
+ 0,
+ 253,
+ },
+ dictWord{136, 0, 549},
+ dictWord{6, 11, 621},
+ dictWord{13, 11, 504},
+ dictWord{144, 11, 19},
+ dictWord{132, 10, 519},
+ dictWord{5, 0, 430},
+ dictWord{
+ 5,
+ 0,
+ 932,
+ },
+ dictWord{6, 0, 131},
+ dictWord{7, 0, 417},
+ dictWord{9, 0, 522},
+ dictWord{11, 0, 314},
+ dictWord{141, 0, 390},
+ dictWord{14, 0, 149},
+ dictWord{14, 0, 399},
+ dictWord{143, 0, 57},
+ dictWord{5, 10, 907},
+ dictWord{6, 10, 31},
+ dictWord{6, 11, 218},
+ dictWord{7, 10, 491},
+ dictWord{7, 10, 530},
+ dictWord{8, 10, 592},
+ dictWord{11, 10, 53},
+ dictWord{11, 10, 779},
+ dictWord{12, 10, 167},
+ dictWord{12, 10, 411},
+ dictWord{14, 10, 14},
+ dictWord{14, 10, 136},
+ dictWord{15, 10, 72},
+ dictWord{16, 10, 17},
+ dictWord{144, 10, 72},
+ dictWord{140, 11, 330},
+ dictWord{7, 11, 454},
+ dictWord{7, 11, 782},
+ dictWord{136, 11, 768},
+ dictWord{
+ 132,
+ 0,
+ 507,
+ },
+ dictWord{10, 11, 676},
+ dictWord{140, 11, 462},
+ dictWord{6, 0, 630},
+ dictWord{9, 0, 811},
+ dictWord{4, 10, 208},
+ dictWord{5, 10, 106},
+ dictWord{
+ 6,
+ 10,
+ 531,
+ },
+ dictWord{8, 10, 408},
+ dictWord{9, 10, 188},
+ dictWord{138, 10, 572},
+ dictWord{4, 0, 343},
+ dictWord{5, 0, 511},
+ dictWord{134, 10, 1693},
+ dictWord{
+ 134,
+ 11,
+ 164,
+ },
+ dictWord{132, 0, 448},
+ dictWord{7, 0, 455},
+ dictWord{138, 0, 591},
+ dictWord{135, 0, 1381},
+ dictWord{12, 10, 441},
+ dictWord{150, 11, 50},
+ dictWord{9, 10, 449},
+ dictWord{10, 10, 192},
+ dictWord{138, 10, 740},
+ dictWord{6, 0, 575},
+ dictWord{132, 10, 241},
+ dictWord{134, 0, 1175},
+ dictWord{
+ 134,
+ 0,
+ 653,
+ },
+ dictWord{134, 0, 1761},
+ dictWord{134, 0, 1198},
+ dictWord{132, 10, 259},
+ dictWord{6, 11, 343},
+ dictWord{7, 11, 195},
+ dictWord{9, 11, 226},
+ dictWord{
+ 10,
+ 11,
+ 197,
+ },
+ dictWord{10, 11, 575},
+ dictWord{11, 11, 502},
+ dictWord{139, 11, 899},
+ dictWord{7, 0, 1127},
+ dictWord{7, 0, 1572},
+ dictWord{10, 0, 297},
+ dictWord{10, 0, 422},
+ dictWord{11, 0, 764},
+ dictWord{11, 0, 810},
+ dictWord{12, 0, 264},
+ dictWord{13, 0, 102},
+ dictWord{13, 0, 300},
+ dictWord{13, 0, 484},
+ dictWord{
+ 14,
+ 0,
+ 147,
+ },
+ dictWord{14, 0, 229},
+ dictWord{17, 0, 71},
+ dictWord{18, 0, 118},
+ dictWord{147, 0, 120},
+ dictWord{135, 11, 666},
+ dictWord{132, 0, 678},
+ dictWord{
+ 4,
+ 10,
+ 173,
+ },
+ dictWord{5, 10, 312},
+ dictWord{5, 10, 512},
+ dictWord{135, 10, 1285},
+ dictWord{7, 10, 1603},
+ dictWord{7, 10, 1691},
+ dictWord{9, 10, 464},
+ dictWord{11, 10, 195},
+ dictWord{12, 10, 279},
+ dictWord{12, 10, 448},
+ dictWord{14, 10, 11},
+ dictWord{147, 10, 102},
+ dictWord{16, 0, 99},
+ dictWord{146, 0, 164},
+ dictWord{7, 11, 1125},
+ dictWord{9, 11, 143},
+ dictWord{11, 11, 61},
+ dictWord{14, 11, 405},
+ dictWord{150, 11, 21},
+ dictWord{137, 11, 260},
+ dictWord{
+ 4,
+ 10,
+ 452,
+ },
+ dictWord{5, 10, 583},
+ dictWord{5, 10, 817},
+ dictWord{6, 10, 433},
+ dictWord{7, 10, 593},
+ dictWord{7, 10, 720},
+ dictWord{7, 10, 1378},
+ dictWord{
+ 8,
+ 10,
+ 161,
+ },
+ dictWord{9, 10, 284},
+ dictWord{10, 10, 313},
+ dictWord{139, 10, 886},
+ dictWord{132, 10, 547},
+ dictWord{136, 10, 722},
+ dictWord{14, 0, 35},
+ dictWord{142, 0, 191},
+ dictWord{141, 0, 45},
+ dictWord{138, 0, 121},
+ dictWord{132, 0, 125},
+ dictWord{134, 0, 1622},
+ dictWord{133, 11, 959},
+ dictWord{
+ 8,
+ 10,
+ 420,
+ },
+ dictWord{139, 10, 193},
+ dictWord{132, 0, 721},
+ dictWord{135, 10, 409},
+ dictWord{136, 0, 145},
+ dictWord{7, 0, 792},
+ dictWord{8, 0, 147},
+ dictWord{
+ 10,
+ 0,
+ 821,
+ },
+ dictWord{11, 0, 970},
+ dictWord{11, 0, 1021},
+ dictWord{136, 11, 173},
+ dictWord{134, 11, 266},
+ dictWord{132, 0, 715},
+ dictWord{7, 0, 1999},
+ dictWord{138, 10, 308},
+ dictWord{133, 0, 531},
+ dictWord{5, 0, 168},
+ dictWord{5, 0, 930},
+ dictWord{8, 0, 74},
+ dictWord{9, 0, 623},
+ dictWord{12, 0, 500},
+ dictWord{
+ 140,
+ 0,
+ 579,
+ },
+ dictWord{144, 0, 65},
+ dictWord{138, 11, 246},
+ dictWord{6, 0, 220},
+ dictWord{7, 0, 1101},
+ dictWord{13, 0, 105},
+ dictWord{142, 11, 314},
+ dictWord{
+ 5,
+ 10,
+ 1002,
+ },
+ dictWord{136, 10, 745},
+ dictWord{134, 0, 960},
+ dictWord{20, 0, 0},
+ dictWord{148, 11, 0},
+ dictWord{4, 0, 1005},
+ dictWord{4, 10, 239},
+ dictWord{
+ 6,
+ 10,
+ 477,
+ },
+ dictWord{7, 10, 1607},
+ dictWord{11, 10, 68},
+ dictWord{139, 10, 617},
+ dictWord{6, 0, 19},
+ dictWord{7, 0, 1413},
+ dictWord{139, 0, 428},
+ dictWord{
+ 149,
+ 10,
+ 13,
+ },
+ dictWord{7, 0, 96},
+ dictWord{8, 0, 401},
+ dictWord{8, 0, 703},
+ dictWord{9, 0, 896},
+ dictWord{136, 11, 300},
+ dictWord{134, 0, 1595},
+ dictWord{145, 0, 116},
+ dictWord{136, 0, 1021},
+ dictWord{7, 0, 1961},
+ dictWord{7, 0, 1965},
+ dictWord{7, 0, 2030},
+ dictWord{8, 0, 150},
+ dictWord{8, 0, 702},
+ dictWord{8, 0, 737},
+ dictWord{
+ 8,
+ 0,
+ 750,
+ },
+ dictWord{140, 0, 366},
+ dictWord{11, 11, 75},
+ dictWord{142, 11, 267},
+ dictWord{132, 10, 367},
+ dictWord{8, 0, 800},
+ dictWord{9, 0, 148},
+ dictWord{
+ 9,
+ 0,
+ 872,
+ },
+ dictWord{9, 0, 890},
+ dictWord{11, 0, 309},
+ dictWord{11, 0, 1001},
+ dictWord{13, 0, 267},
+ dictWord{13, 0, 323},
+ dictWord{5, 11, 427},
+ dictWord{
+ 5,
+ 11,
+ 734,
+ },
+ dictWord{7, 11, 478},
+ dictWord{136, 11, 52},
+ dictWord{7, 11, 239},
+ dictWord{11, 11, 217},
+ dictWord{142, 11, 165},
+ dictWord{132, 11, 323},
+ dictWord{140, 11, 419},
+ dictWord{13, 0, 299},
+ dictWord{142, 0, 75},
+ dictWord{6, 11, 87},
+ dictWord{6, 11, 1734},
+ dictWord{7, 11, 20},
+ dictWord{7, 11, 1056},
+ dictWord{
+ 8,
+ 11,
+ 732,
+ },
+ dictWord{9, 11, 406},
+ dictWord{9, 11, 911},
+ dictWord{138, 11, 694},
+ dictWord{134, 0, 1383},
+ dictWord{132, 10, 694},
+ dictWord{
+ 133,
+ 11,
+ 613,
+ },
+ dictWord{137, 0, 779},
+ dictWord{4, 0, 598},
+ dictWord{140, 10, 687},
+ dictWord{6, 0, 970},
+ dictWord{135, 0, 424},
+ dictWord{133, 0, 547},
+ dictWord{
+ 7,
+ 11,
+ 32,
+ },
+ dictWord{7, 11, 984},
+ dictWord{8, 11, 85},
+ dictWord{8, 11, 709},
+ dictWord{9, 11, 579},
+ dictWord{9, 11, 847},
+ dictWord{9, 11, 856},
+ dictWord{10, 11, 799},
+ dictWord{11, 11, 258},
+ dictWord{11, 11, 1007},
+ dictWord{12, 11, 331},
+ dictWord{12, 11, 615},
+ dictWord{13, 11, 188},
+ dictWord{13, 11, 435},
+ dictWord{
+ 14,
+ 11,
+ 8,
+ },
+ dictWord{15, 11, 165},
+ dictWord{16, 11, 27},
+ dictWord{148, 11, 40},
+ dictWord{6, 0, 1222},
+ dictWord{134, 0, 1385},
+ dictWord{132, 0, 876},
+ dictWord{
+ 138,
+ 11,
+ 151,
+ },
+ dictWord{135, 10, 213},
+ dictWord{4, 11, 167},
+ dictWord{135, 11, 82},
+ dictWord{133, 0, 133},
+ dictWord{6, 11, 24},
+ dictWord{7, 11, 74},
+ dictWord{
+ 7,
+ 11,
+ 678,
+ },
+ dictWord{137, 11, 258},
+ dictWord{5, 11, 62},
+ dictWord{6, 11, 534},
+ dictWord{7, 11, 684},
+ dictWord{7, 11, 1043},
+ dictWord{7, 11, 1072},
+ dictWord{
+ 8,
+ 11,
+ 280,
+ },
+ dictWord{8, 11, 541},
+ dictWord{8, 11, 686},
+ dictWord{10, 11, 519},
+ dictWord{11, 11, 252},
+ dictWord{140, 11, 282},
+ dictWord{136, 0, 187},
+ dictWord{8, 0, 8},
+ dictWord{10, 0, 0},
+ dictWord{10, 0, 818},
+ dictWord{139, 0, 988},
+ dictWord{132, 11, 359},
+ dictWord{11, 0, 429},
+ dictWord{15, 0, 51},
+ dictWord{
+ 135,
+ 10,
+ 1672,
+ },
+ dictWord{136, 0, 685},
+ dictWord{5, 11, 211},
+ dictWord{7, 11, 88},
+ dictWord{136, 11, 627},
+ dictWord{134, 0, 472},
+ dictWord{136, 0, 132},
+ dictWord{
+ 6,
+ 11,
+ 145,
+ },
+ dictWord{141, 11, 336},
+ dictWord{4, 10, 751},
+ dictWord{11, 10, 390},
+ dictWord{140, 10, 32},
+ dictWord{6, 0, 938},
+ dictWord{6, 0, 1060},
+ dictWord{
+ 4,
+ 11,
+ 263,
+ },
+ dictWord{4, 10, 409},
+ dictWord{133, 10, 78},
+ dictWord{137, 0, 874},
+ dictWord{8, 0, 774},
+ dictWord{10, 0, 670},
+ dictWord{12, 0, 51},
+ dictWord{
+ 4,
+ 11,
+ 916,
+ },
+ dictWord{6, 10, 473},
+ dictWord{7, 10, 1602},
+ dictWord{10, 10, 698},
+ dictWord{12, 10, 212},
+ dictWord{13, 10, 307},
+ dictWord{145, 10, 105},
+ dictWord{146, 0, 92},
+ dictWord{143, 10, 156},
+ dictWord{132, 0, 830},
+ dictWord{137, 0, 701},
+ dictWord{4, 11, 599},
+ dictWord{6, 11, 1634},
+ dictWord{7, 11, 5},
+ dictWord{7, 11, 55},
+ dictWord{7, 11, 67},
+ dictWord{7, 11, 97},
+ dictWord{7, 11, 691},
+ dictWord{7, 11, 979},
+ dictWord{7, 11, 1697},
+ dictWord{8, 11, 207},
+ dictWord{
+ 8,
+ 11,
+ 214,
+ },
+ dictWord{8, 11, 231},
+ dictWord{8, 11, 294},
+ dictWord{8, 11, 336},
+ dictWord{8, 11, 428},
+ dictWord{8, 11, 451},
+ dictWord{8, 11, 460},
+ dictWord{8, 11, 471},
+ dictWord{8, 11, 622},
+ dictWord{8, 11, 626},
+ dictWord{8, 11, 679},
+ dictWord{8, 11, 759},
+ dictWord{8, 11, 829},
+ dictWord{9, 11, 11},
+ dictWord{9, 11, 246},
+ dictWord{
+ 9,
+ 11,
+ 484,
+ },
+ dictWord{9, 11, 573},
+ dictWord{9, 11, 706},
+ dictWord{9, 11, 762},
+ dictWord{9, 11, 798},
+ dictWord{9, 11, 855},
+ dictWord{9, 11, 870},
+ dictWord{
+ 9,
+ 11,
+ 912,
+ },
+ dictWord{10, 11, 303},
+ dictWord{10, 11, 335},
+ dictWord{10, 11, 424},
+ dictWord{10, 11, 461},
+ dictWord{10, 11, 543},
+ dictWord{10, 11, 759},
+ dictWord{10, 11, 814},
+ dictWord{11, 11, 59},
+ dictWord{11, 11, 199},
+ dictWord{11, 11, 235},
+ dictWord{11, 11, 475},
+ dictWord{11, 11, 590},
+ dictWord{11, 11, 929},
+ dictWord{11, 11, 963},
+ dictWord{12, 11, 114},
+ dictWord{12, 11, 182},
+ dictWord{12, 11, 226},
+ dictWord{12, 11, 332},
+ dictWord{12, 11, 439},
+ dictWord{
+ 12,
+ 11,
+ 575,
+ },
+ dictWord{12, 11, 598},
+ dictWord{13, 11, 8},
+ dictWord{13, 11, 125},
+ dictWord{13, 11, 194},
+ dictWord{13, 11, 287},
+ dictWord{14, 11, 197},
+ dictWord{
+ 14,
+ 11,
+ 383,
+ },
+ dictWord{15, 11, 53},
+ dictWord{17, 11, 63},
+ dictWord{19, 11, 46},
+ dictWord{19, 11, 98},
+ dictWord{19, 11, 106},
+ dictWord{148, 11, 85},
+ dictWord{
+ 4,
+ 0,
+ 127,
+ },
+ dictWord{5, 0, 350},
+ dictWord{6, 0, 356},
+ dictWord{8, 0, 426},
+ dictWord{9, 0, 572},
+ dictWord{10, 0, 247},
+ dictWord{139, 0, 312},
+ dictWord{134, 0, 1215},
+ dictWord{6, 0, 59},
+ dictWord{9, 0, 603},
+ dictWord{13, 0, 397},
+ dictWord{7, 11, 1853},
+ dictWord{138, 11, 437},
+ dictWord{134, 0, 1762},
+ dictWord{
+ 147,
+ 11,
+ 126,
+ },
+ dictWord{135, 10, 883},
+ dictWord{13, 0, 293},
+ dictWord{142, 0, 56},
+ dictWord{133, 10, 617},
+ dictWord{139, 10, 50},
+ dictWord{5, 11, 187},
+ dictWord{
+ 7,
+ 10,
+ 1518,
+ },
+ dictWord{139, 10, 694},
+ dictWord{135, 0, 441},
+ dictWord{6, 0, 111},
+ dictWord{7, 0, 4},
+ dictWord{8, 0, 163},
+ dictWord{8, 0, 776},
+ dictWord{
+ 138,
+ 0,
+ 566,
+ },
+ dictWord{132, 0, 806},
+ dictWord{4, 11, 215},
+ dictWord{9, 11, 38},
+ dictWord{10, 11, 3},
+ dictWord{11, 11, 23},
+ dictWord{11, 11, 127},
+ dictWord{
+ 139,
+ 11,
+ 796,
+ },
+ dictWord{14, 0, 233},
+ dictWord{4, 10, 546},
+ dictWord{135, 10, 2042},
+ dictWord{135, 0, 1994},
+ dictWord{134, 0, 1739},
+ dictWord{135, 11, 1530},
+ dictWord{136, 0, 393},
+ dictWord{5, 0, 297},
+ dictWord{7, 0, 1038},
+ dictWord{14, 0, 359},
+ dictWord{19, 0, 52},
+ dictWord{148, 0, 47},
+ dictWord{135, 0, 309},
+ dictWord{
+ 4,
+ 10,
+ 313,
+ },
+ dictWord{133, 10, 577},
+ dictWord{8, 10, 184},
+ dictWord{141, 10, 433},
+ dictWord{135, 10, 935},
+ dictWord{12, 10, 186},
+ dictWord{
+ 12,
+ 10,
+ 292,
+ },
+ dictWord{14, 10, 100},
+ dictWord{146, 10, 70},
+ dictWord{136, 0, 363},
+ dictWord{14, 0, 175},
+ dictWord{11, 10, 402},
+ dictWord{12, 10, 109},
+ dictWord{
+ 12,
+ 10,
+ 431,
+ },
+ dictWord{13, 10, 179},
+ dictWord{13, 10, 206},
+ dictWord{14, 10, 217},
+ dictWord{16, 10, 3},
+ dictWord{148, 10, 53},
+ dictWord{5, 10, 886},
+ dictWord{
+ 6,
+ 10,
+ 46,
+ },
+ dictWord{6, 10, 1790},
+ dictWord{7, 10, 14},
+ dictWord{7, 10, 732},
+ dictWord{7, 10, 1654},
+ dictWord{8, 10, 95},
+ dictWord{8, 10, 327},
+ dictWord{
+ 8,
+ 10,
+ 616,
+ },
+ dictWord{9, 10, 892},
+ dictWord{10, 10, 598},
+ dictWord{10, 10, 769},
+ dictWord{11, 10, 134},
+ dictWord{11, 10, 747},
+ dictWord{12, 10, 378},
+ dictWord{
+ 142,
+ 10,
+ 97,
+ },
+ dictWord{136, 0, 666},
+ dictWord{135, 0, 1675},
+ dictWord{6, 0, 655},
+ dictWord{134, 0, 1600},
+ dictWord{135, 0, 808},
+ dictWord{133, 10, 1021},
+ dictWord{4, 11, 28},
+ dictWord{5, 11, 440},
+ dictWord{7, 11, 248},
+ dictWord{11, 11, 833},
+ dictWord{140, 11, 344},
+ dictWord{134, 11, 1654},
+ dictWord{
+ 132,
+ 0,
+ 280,
+ },
+ dictWord{140, 0, 54},
+ dictWord{4, 0, 421},
+ dictWord{133, 0, 548},
+ dictWord{132, 10, 153},
+ dictWord{6, 11, 339},
+ dictWord{135, 11, 923},
+ dictWord{
+ 133,
+ 11,
+ 853,
+ },
+ dictWord{133, 10, 798},
+ dictWord{132, 10, 587},
+ dictWord{6, 11, 249},
+ dictWord{7, 11, 1234},
+ dictWord{139, 11, 573},
+ dictWord{6, 10, 598},
+ dictWord{7, 10, 42},
+ dictWord{8, 10, 695},
+ dictWord{10, 10, 212},
+ dictWord{11, 10, 158},
+ dictWord{14, 10, 196},
+ dictWord{145, 10, 85},
+ dictWord{7, 0, 249},
+ dictWord{5, 10, 957},
+ dictWord{133, 10, 1008},
+ dictWord{4, 10, 129},
+ dictWord{135, 10, 465},
+ dictWord{6, 0, 254},
+ dictWord{7, 0, 842},
+ dictWord{7, 0, 1659},
+ dictWord{9, 0, 109},
+ dictWord{10, 0, 103},
+ dictWord{7, 10, 908},
+ dictWord{7, 10, 1201},
+ dictWord{9, 10, 755},
+ dictWord{11, 10, 906},
+ dictWord{12, 10, 527},
+ dictWord{146, 10, 7},
+ dictWord{5, 0, 262},
+ dictWord{136, 10, 450},
+ dictWord{144, 0, 1},
+ dictWord{10, 11, 201},
+ dictWord{142, 11, 319},
+ dictWord{7, 11, 49},
+ dictWord{
+ 7,
+ 11,
+ 392,
+ },
+ dictWord{8, 11, 20},
+ dictWord{8, 11, 172},
+ dictWord{8, 11, 690},
+ dictWord{9, 11, 383},
+ dictWord{9, 11, 845},
+ dictWord{10, 11, 48},
+ dictWord{
+ 11,
+ 11,
+ 293,
+ },
+ dictWord{11, 11, 832},
+ dictWord{11, 11, 920},
+ dictWord{141, 11, 221},
+ dictWord{5, 11, 858},
+ dictWord{133, 11, 992},
+ dictWord{134, 0, 805},
+ dictWord{139, 10, 1003},
+ dictWord{6, 0, 1630},
+ dictWord{134, 11, 307},
+ dictWord{7, 11, 1512},
+ dictWord{135, 11, 1794},
+ dictWord{6, 11, 268},
+ dictWord{
+ 137,
+ 11,
+ 62,
+ },
+ dictWord{135, 10, 1868},
+ dictWord{133, 0, 671},
+ dictWord{4, 0, 989},
+ dictWord{8, 0, 972},
+ dictWord{136, 0, 998},
+ dictWord{132, 11, 423},
+ dictWord{132, 0, 889},
+ dictWord{135, 0, 1382},
+ dictWord{135, 0, 1910},
+ dictWord{7, 10, 965},
+ dictWord{7, 10, 1460},
+ dictWord{135, 10, 1604},
+ dictWord{
+ 4,
+ 0,
+ 627,
+ },
+ dictWord{5, 0, 775},
+ dictWord{138, 11, 106},
+ dictWord{134, 11, 348},
+ dictWord{7, 0, 202},
+ dictWord{11, 0, 362},
+ dictWord{11, 0, 948},
+ dictWord{
+ 140,
+ 0,
+ 388,
+ },
+ dictWord{138, 11, 771},
+ dictWord{6, 11, 613},
+ dictWord{136, 11, 223},
+ dictWord{6, 0, 560},
+ dictWord{7, 0, 451},
+ dictWord{8, 0, 389},
+ dictWord{
+ 12,
+ 0,
+ 490,
+ },
+ dictWord{13, 0, 16},
+ dictWord{13, 0, 215},
+ dictWord{13, 0, 351},
+ dictWord{18, 0, 132},
+ dictWord{147, 0, 125},
+ dictWord{135, 0, 841},
+ dictWord{
+ 136,
+ 0,
+ 566,
+ },
+ dictWord{136, 0, 938},
+ dictWord{132, 11, 670},
+ dictWord{5, 0, 912},
+ dictWord{6, 0, 1695},
+ dictWord{140, 11, 55},
+ dictWord{9, 11, 40},
+ dictWord{
+ 139,
+ 11,
+ 136,
+ },
+ dictWord{7, 0, 1361},
+ dictWord{7, 10, 982},
+ dictWord{10, 10, 32},
+ dictWord{143, 10, 56},
+ dictWord{11, 11, 259},
+ dictWord{140, 11, 270},
+ dictWord{
+ 5,
+ 0,
+ 236,
+ },
+ dictWord{6, 0, 572},
+ dictWord{8, 0, 492},
+ dictWord{11, 0, 618},
+ dictWord{144, 0, 56},
+ dictWord{8, 11, 572},
+ dictWord{9, 11, 310},
+ dictWord{9, 11, 682},
+ dictWord{137, 11, 698},
+ dictWord{134, 0, 1854},
+ dictWord{5, 0, 190},
+ dictWord{136, 0, 318},
+ dictWord{133, 10, 435},
+ dictWord{135, 0, 1376},
+ dictWord{
+ 4,
+ 11,
+ 296,
+ },
+ dictWord{6, 11, 352},
+ dictWord{7, 11, 401},
+ dictWord{7, 11, 1410},
+ dictWord{7, 11, 1594},
+ dictWord{7, 11, 1674},
+ dictWord{8, 11, 63},
+ dictWord{
+ 8,
+ 11,
+ 660,
+ },
+ dictWord{137, 11, 74},
+ dictWord{7, 0, 349},
+ dictWord{5, 10, 85},
+ dictWord{6, 10, 419},
+ dictWord{7, 10, 305},
+ dictWord{7, 10, 361},
+ dictWord{7, 10, 1337},
+ dictWord{8, 10, 71},
+ dictWord{140, 10, 519},
+ dictWord{4, 11, 139},
+ dictWord{4, 11, 388},
+ dictWord{140, 11, 188},
+ dictWord{6, 0, 1972},
+ dictWord{6, 0, 2013},
+ dictWord{8, 0, 951},
+ dictWord{10, 0, 947},
+ dictWord{10, 0, 974},
+ dictWord{10, 0, 1018},
+ dictWord{142, 0, 476},
+ dictWord{140, 10, 688},
+ dictWord{
+ 135,
+ 10,
+ 740,
+ },
+ dictWord{5, 10, 691},
+ dictWord{7, 10, 345},
+ dictWord{9, 10, 94},
+ dictWord{140, 10, 169},
+ dictWord{9, 0, 344},
+ dictWord{5, 10, 183},
+ dictWord{6, 10, 582},
+ dictWord{10, 10, 679},
+ dictWord{140, 10, 435},
+ dictWord{135, 10, 511},
+ dictWord{132, 0, 850},
+ dictWord{8, 11, 441},
+ dictWord{10, 11, 314},
+ dictWord{
+ 143,
+ 11,
+ 3,
+ },
+ dictWord{7, 10, 1993},
+ dictWord{136, 10, 684},
+ dictWord{4, 11, 747},
+ dictWord{6, 11, 290},
+ dictWord{6, 10, 583},
+ dictWord{7, 11, 649},
+ dictWord{
+ 7,
+ 11,
+ 1479,
+ },
+ dictWord{135, 11, 1583},
+ dictWord{133, 11, 232},
+ dictWord{133, 10, 704},
+ dictWord{134, 0, 910},
+ dictWord{4, 10, 179},
+ dictWord{5, 10, 198},
+ dictWord{133, 10, 697},
+ dictWord{7, 10, 347},
+ dictWord{7, 10, 971},
+ dictWord{8, 10, 181},
+ dictWord{138, 10, 711},
+ dictWord{136, 11, 525},
+ dictWord{
+ 14,
+ 0,
+ 19,
+ },
+ dictWord{14, 0, 28},
+ dictWord{144, 0, 29},
+ dictWord{7, 0, 85},
+ dictWord{7, 0, 247},
+ dictWord{8, 0, 585},
+ dictWord{138, 0, 163},
+ dictWord{4, 0, 487},
+ dictWord{
+ 7,
+ 11,
+ 472,
+ },
+ dictWord{7, 11, 1801},
+ dictWord{10, 11, 748},
+ dictWord{141, 11, 458},
+ dictWord{4, 10, 243},
+ dictWord{5, 10, 203},
+ dictWord{7, 10, 19},
+ dictWord{
+ 7,
+ 10,
+ 71,
+ },
+ dictWord{7, 10, 113},
+ dictWord{10, 10, 405},
+ dictWord{11, 10, 357},
+ dictWord{142, 10, 240},
+ dictWord{7, 10, 1450},
+ dictWord{139, 10, 99},
+ dictWord{132, 11, 425},
+ dictWord{138, 0, 145},
+ dictWord{147, 0, 83},
+ dictWord{6, 10, 492},
+ dictWord{137, 11, 247},
+ dictWord{4, 0, 1013},
+ dictWord{
+ 134,
+ 0,
+ 2033,
+ },
+ dictWord{5, 10, 134},
+ dictWord{6, 10, 408},
+ dictWord{6, 10, 495},
+ dictWord{135, 10, 1593},
+ dictWord{135, 0, 1922},
+ dictWord{134, 11, 1768},
+ dictWord{4, 0, 124},
+ dictWord{10, 0, 457},
+ dictWord{11, 0, 121},
+ dictWord{11, 0, 169},
+ dictWord{11, 0, 870},
+ dictWord{11, 0, 874},
+ dictWord{12, 0, 214},
+ dictWord{
+ 14,
+ 0,
+ 187,
+ },
+ dictWord{143, 0, 77},
+ dictWord{5, 0, 557},
+ dictWord{135, 0, 1457},
+ dictWord{139, 0, 66},
+ dictWord{5, 11, 943},
+ dictWord{6, 11, 1779},
+ dictWord{
+ 142,
+ 10,
+ 4,
+ },
+ dictWord{4, 10, 248},
+ dictWord{4, 10, 665},
+ dictWord{7, 10, 137},
+ dictWord{137, 10, 349},
+ dictWord{7, 0, 1193},
+ dictWord{5, 11, 245},
+ dictWord{
+ 6,
+ 11,
+ 576,
+ },
+ dictWord{7, 11, 582},
+ dictWord{136, 11, 225},
+ dictWord{144, 0, 82},
+ dictWord{7, 10, 1270},
+ dictWord{139, 10, 612},
+ dictWord{5, 0, 454},
+ dictWord{
+ 10,
+ 0,
+ 352,
+ },
+ dictWord{138, 11, 352},
+ dictWord{18, 0, 57},
+ dictWord{5, 10, 371},
+ dictWord{135, 10, 563},
+ dictWord{135, 0, 1333},
+ dictWord{6, 0, 107},
+ dictWord{
+ 7,
+ 0,
+ 638,
+ },
+ dictWord{7, 0, 1632},
+ dictWord{9, 0, 396},
+ dictWord{134, 11, 610},
+ dictWord{5, 0, 370},
+ dictWord{134, 0, 1756},
+ dictWord{4, 10, 374},
+ dictWord{
+ 7,
+ 10,
+ 547,
+ },
+ dictWord{7, 10, 1700},
+ dictWord{7, 10, 1833},
+ dictWord{139, 10, 858},
+ dictWord{133, 0, 204},
+ dictWord{6, 0, 1305},
+ dictWord{9, 10, 311},
+ dictWord{
+ 141,
+ 10,
+ 42,
+ },
+ dictWord{5, 0, 970},
+ dictWord{134, 0, 1706},
+ dictWord{6, 10, 1647},
+ dictWord{7, 10, 1552},
+ dictWord{7, 10, 2010},
+ dictWord{9, 10, 494},
+ dictWord{137, 10, 509},
+ dictWord{13, 11, 455},
+ dictWord{15, 11, 99},
+ dictWord{15, 11, 129},
+ dictWord{144, 11, 68},
+ dictWord{135, 0, 3},
+ dictWord{4, 0, 35},
+ dictWord{
+ 5,
+ 0,
+ 121,
+ },
+ dictWord{5, 0, 483},
+ dictWord{5, 0, 685},
+ dictWord{6, 0, 489},
+ dictWord{6, 0, 782},
+ dictWord{6, 0, 1032},
+ dictWord{7, 0, 1204},
+ dictWord{136, 0, 394},
+ dictWord{4, 0, 921},
+ dictWord{133, 0, 1007},
+ dictWord{8, 11, 360},
+ dictWord{138, 11, 63},
+ dictWord{135, 0, 1696},
+ dictWord{134, 0, 1519},
+ dictWord{
+ 132,
+ 11,
+ 443,
+ },
+ dictWord{135, 11, 944},
+ dictWord{6, 10, 123},
+ dictWord{7, 10, 214},
+ dictWord{9, 10, 728},
+ dictWord{10, 10, 157},
+ dictWord{11, 10, 346},
+ dictWord{11, 10, 662},
+ dictWord{143, 10, 106},
+ dictWord{137, 0, 981},
+ dictWord{135, 10, 1435},
+ dictWord{134, 0, 1072},
+ dictWord{132, 0, 712},
+ dictWord{
+ 134,
+ 0,
+ 1629,
+ },
+ dictWord{134, 0, 728},
+ dictWord{4, 11, 298},
+ dictWord{137, 11, 483},
+ dictWord{6, 0, 1177},
+ dictWord{6, 0, 1271},
+ dictWord{5, 11, 164},
+ dictWord{
+ 7,
+ 11,
+ 121,
+ },
+ dictWord{142, 11, 189},
+ dictWord{7, 0, 1608},
+ dictWord{4, 10, 707},
+ dictWord{5, 10, 588},
+ dictWord{6, 10, 393},
+ dictWord{13, 10, 106},
+ dictWord{
+ 18,
+ 10,
+ 49,
+ },
+ dictWord{147, 10, 41},
+ dictWord{23, 0, 16},
+ dictWord{151, 11, 16},
+ dictWord{6, 10, 211},
+ dictWord{7, 10, 1690},
+ dictWord{11, 10, 486},
+ dictWord{140, 10, 369},
+ dictWord{133, 0, 485},
+ dictWord{19, 11, 15},
+ dictWord{149, 11, 27},
+ dictWord{4, 11, 172},
+ dictWord{9, 11, 611},
+ dictWord{10, 11, 436},
+ dictWord{12, 11, 673},
+ dictWord{141, 11, 255},
+ dictWord{5, 11, 844},
+ dictWord{10, 11, 484},
+ dictWord{11, 11, 754},
+ dictWord{12, 11, 457},
+ dictWord{
+ 14,
+ 11,
+ 171,
+ },
+ dictWord{14, 11, 389},
+ dictWord{146, 11, 153},
+ dictWord{4, 0, 285},
+ dictWord{5, 0, 27},
+ dictWord{5, 0, 317},
+ dictWord{6, 0, 301},
+ dictWord{7, 0, 7},
+ dictWord{
+ 8,
+ 0,
+ 153,
+ },
+ dictWord{10, 0, 766},
+ dictWord{11, 0, 468},
+ dictWord{12, 0, 467},
+ dictWord{141, 0, 143},
+ dictWord{134, 0, 1462},
+ dictWord{9, 11, 263},
+ dictWord{
+ 10,
+ 11,
+ 147,
+ },
+ dictWord{138, 11, 492},
+ dictWord{133, 11, 537},
+ dictWord{6, 0, 1945},
+ dictWord{6, 0, 1986},
+ dictWord{6, 0, 1991},
+ dictWord{134, 0, 2038},
+ dictWord{134, 10, 219},
+ dictWord{137, 11, 842},
+ dictWord{14, 0, 52},
+ dictWord{17, 0, 50},
+ dictWord{5, 10, 582},
+ dictWord{6, 10, 1646},
+ dictWord{7, 10, 99},
+ dictWord{7, 10, 1962},
+ dictWord{7, 10, 1986},
+ dictWord{8, 10, 515},
+ dictWord{8, 10, 773},
+ dictWord{9, 10, 23},
+ dictWord{9, 10, 491},
+ dictWord{12, 10, 620},
+ dictWord{142, 10, 93},
+ dictWord{138, 11, 97},
+ dictWord{20, 0, 21},
+ dictWord{20, 0, 44},
+ dictWord{133, 10, 851},
+ dictWord{136, 0, 819},
+ dictWord{139, 0, 917},
+ dictWord{5, 11, 230},
+ dictWord{5, 11, 392},
+ dictWord{6, 11, 420},
+ dictWord{8, 10, 762},
+ dictWord{8, 10, 812},
+ dictWord{9, 11, 568},
+ dictWord{9, 10, 910},
+ dictWord{140, 11, 612},
+ dictWord{135, 0, 784},
+ dictWord{15, 0, 135},
+ dictWord{143, 11, 135},
+ dictWord{10, 0, 454},
+ dictWord{140, 0, 324},
+ dictWord{4, 11, 0},
+ dictWord{5, 11, 41},
+ dictWord{7, 11, 1459},
+ dictWord{7, 11, 1469},
+ dictWord{7, 11, 1618},
+ dictWord{7, 11, 1859},
+ dictWord{9, 11, 549},
+ dictWord{139, 11, 905},
+ dictWord{4, 10, 98},
+ dictWord{7, 10, 1365},
+ dictWord{9, 10, 422},
+ dictWord{9, 10, 670},
+ dictWord{10, 10, 775},
+ dictWord{11, 10, 210},
+ dictWord{13, 10, 26},
+ dictWord{13, 10, 457},
+ dictWord{141, 10, 476},
+ dictWord{6, 0, 1719},
+ dictWord{6, 0, 1735},
+ dictWord{7, 0, 2016},
+ dictWord{7, 0, 2020},
+ dictWord{8, 0, 837},
+ dictWord{137, 0, 852},
+ dictWord{133, 11, 696},
+ dictWord{135, 0, 852},
+ dictWord{132, 0, 952},
+ dictWord{134, 10, 1730},
+ dictWord{132, 11, 771},
+ dictWord{
+ 138,
+ 0,
+ 568,
+ },
+ dictWord{137, 0, 448},
+ dictWord{139, 0, 146},
+ dictWord{8, 0, 67},
+ dictWord{138, 0, 419},
+ dictWord{133, 11, 921},
+ dictWord{137, 10, 147},
+ dictWord{134, 0, 1826},
+ dictWord{10, 0, 657},
+ dictWord{14, 0, 297},
+ dictWord{142, 0, 361},
+ dictWord{6, 0, 666},
+ dictWord{6, 0, 767},
+ dictWord{134, 0, 1542},
+ dictWord{139, 0, 729},
+ dictWord{6, 11, 180},
+ dictWord{7, 11, 1137},
+ dictWord{8, 11, 751},
+ dictWord{139, 11, 805},
+ dictWord{4, 11, 183},
+ dictWord{7, 11, 271},
+ dictWord{11, 11, 824},
+ dictWord{11, 11, 952},
+ dictWord{13, 11, 278},
+ dictWord{13, 11, 339},
+ dictWord{13, 11, 482},
+ dictWord{14, 11, 424},
+ dictWord{
+ 148,
+ 11,
+ 99,
+ },
+ dictWord{4, 0, 669},
+ dictWord{5, 11, 477},
+ dictWord{5, 11, 596},
+ dictWord{6, 11, 505},
+ dictWord{7, 11, 1221},
+ dictWord{11, 11, 907},
+ dictWord{
+ 12,
+ 11,
+ 209,
+ },
+ dictWord{141, 11, 214},
+ dictWord{135, 11, 1215},
+ dictWord{5, 0, 402},
+ dictWord{6, 10, 30},
+ dictWord{11, 10, 56},
+ dictWord{139, 10, 305},
+ dictWord{
+ 7,
+ 11,
+ 564,
+ },
+ dictWord{142, 11, 168},
+ dictWord{139, 0, 152},
+ dictWord{7, 0, 912},
+ dictWord{135, 10, 1614},
+ dictWord{4, 10, 150},
+ dictWord{5, 10, 303},
+ dictWord{134, 10, 327},
+ dictWord{7, 0, 320},
+ dictWord{8, 0, 51},
+ dictWord{9, 0, 868},
+ dictWord{10, 0, 833},
+ dictWord{12, 0, 481},
+ dictWord{12, 0, 570},
+ dictWord{
+ 148,
+ 0,
+ 106,
+ },
+ dictWord{132, 0, 445},
+ dictWord{7, 11, 274},
+ dictWord{11, 11, 263},
+ dictWord{11, 11, 479},
+ dictWord{11, 11, 507},
+ dictWord{140, 11, 277},
+ dictWord{10, 0, 555},
+ dictWord{11, 0, 308},
+ dictWord{19, 0, 95},
+ dictWord{6, 11, 1645},
+ dictWord{8, 10, 192},
+ dictWord{10, 10, 78},
+ dictWord{141, 10, 359},
+ dictWord{135, 10, 786},
+ dictWord{6, 11, 92},
+ dictWord{6, 11, 188},
+ dictWord{7, 11, 1269},
+ dictWord{7, 11, 1524},
+ dictWord{7, 11, 1876},
+ dictWord{10, 11, 228},
+ dictWord{139, 11, 1020},
+ dictWord{4, 11, 459},
+ dictWord{133, 11, 966},
+ dictWord{11, 0, 386},
+ dictWord{6, 10, 1638},
+ dictWord{7, 10, 79},
+ dictWord{
+ 7,
+ 10,
+ 496,
+ },
+ dictWord{9, 10, 138},
+ dictWord{10, 10, 336},
+ dictWord{12, 10, 412},
+ dictWord{12, 10, 440},
+ dictWord{142, 10, 305},
+ dictWord{133, 0, 239},
+ dictWord{
+ 7,
+ 0,
+ 83,
+ },
+ dictWord{7, 0, 1990},
+ dictWord{8, 0, 130},
+ dictWord{139, 0, 720},
+ dictWord{138, 11, 709},
+ dictWord{4, 0, 143},
+ dictWord{5, 0, 550},
+ dictWord{
+ 133,
+ 0,
+ 752,
+ },
+ dictWord{5, 0, 123},
+ dictWord{6, 0, 530},
+ dictWord{7, 0, 348},
+ dictWord{135, 0, 1419},
+ dictWord{135, 0, 2024},
+ dictWord{6, 11, 18},
+ dictWord{7, 11, 179},
+ dictWord{7, 11, 721},
+ dictWord{7, 11, 932},
+ dictWord{8, 11, 548},
+ dictWord{8, 11, 757},
+ dictWord{9, 11, 54},
+ dictWord{9, 11, 65},
+ dictWord{9, 11, 532},
+ dictWord{
+ 9,
+ 11,
+ 844,
+ },
+ dictWord{10, 11, 113},
+ dictWord{10, 11, 117},
+ dictWord{10, 11, 236},
+ dictWord{10, 11, 315},
+ dictWord{10, 11, 430},
+ dictWord{10, 11, 798},
+ dictWord{11, 11, 153},
+ dictWord{11, 11, 351},
+ dictWord{11, 11, 375},
+ dictWord{12, 11, 78},
+ dictWord{12, 11, 151},
+ dictWord{12, 11, 392},
+ dictWord{
+ 14,
+ 11,
+ 248,
+ },
+ dictWord{143, 11, 23},
+ dictWord{7, 10, 204},
+ dictWord{7, 10, 415},
+ dictWord{8, 10, 42},
+ dictWord{10, 10, 85},
+ dictWord{139, 10, 564},
+ dictWord{
+ 134,
+ 0,
+ 958,
+ },
+ dictWord{133, 11, 965},
+ dictWord{132, 0, 210},
+ dictWord{135, 11, 1429},
+ dictWord{138, 11, 480},
+ dictWord{134, 11, 182},
+ dictWord{
+ 139,
+ 11,
+ 345,
+ },
+ dictWord{10, 11, 65},
+ dictWord{10, 11, 488},
+ dictWord{138, 11, 497},
+ dictWord{4, 10, 3},
+ dictWord{5, 10, 247},
+ dictWord{5, 10, 644},
+ dictWord{
+ 7,
+ 10,
+ 744,
+ },
+ dictWord{7, 10, 1207},
+ dictWord{7, 10, 1225},
+ dictWord{7, 10, 1909},
+ dictWord{146, 10, 147},
+ dictWord{132, 0, 430},
+ dictWord{5, 10, 285},
+ dictWord{
+ 9,
+ 10,
+ 67,
+ },
+ dictWord{13, 10, 473},
+ dictWord{143, 10, 82},
+ dictWord{144, 11, 16},
+ dictWord{7, 11, 1162},
+ dictWord{9, 11, 588},
+ dictWord{10, 11, 260},
+ dictWord{151, 10, 8},
+ dictWord{133, 0, 213},
+ dictWord{138, 0, 7},
+ dictWord{135, 0, 801},
+ dictWord{134, 11, 1786},
+ dictWord{135, 11, 308},
+ dictWord{6, 0, 936},
+ dictWord{134, 0, 1289},
+ dictWord{133, 0, 108},
+ dictWord{132, 0, 885},
+ dictWord{133, 0, 219},
+ dictWord{139, 0, 587},
+ dictWord{4, 0, 193},
+ dictWord{5, 0, 916},
+ dictWord{6, 0, 1041},
+ dictWord{7, 0, 364},
+ dictWord{10, 0, 398},
+ dictWord{10, 0, 726},
+ dictWord{11, 0, 317},
+ dictWord{11, 0, 626},
+ dictWord{12, 0, 142},
+ dictWord{12, 0, 288},
+ dictWord{12, 0, 678},
+ dictWord{13, 0, 313},
+ dictWord{15, 0, 113},
+ dictWord{146, 0, 114},
+ dictWord{135, 0, 1165},
+ dictWord{6, 0, 241},
+ dictWord{
+ 9,
+ 0,
+ 342,
+ },
+ dictWord{10, 0, 729},
+ dictWord{11, 0, 284},
+ dictWord{11, 0, 445},
+ dictWord{11, 0, 651},
+ dictWord{11, 0, 863},
+ dictWord{13, 0, 398},
+ dictWord{
+ 146,
+ 0,
+ 99,
+ },
+ dictWord{7, 0, 907},
+ dictWord{136, 0, 832},
+ dictWord{9, 0, 303},
+ dictWord{4, 10, 29},
+ dictWord{6, 10, 532},
+ dictWord{7, 10, 1628},
+ dictWord{7, 10, 1648},
+ dictWord{9, 10, 350},
+ dictWord{10, 10, 433},
+ dictWord{11, 10, 97},
+ dictWord{11, 10, 557},
+ dictWord{11, 10, 745},
+ dictWord{12, 10, 289},
+ dictWord{
+ 12,
+ 10,
+ 335,
+ },
+ dictWord{12, 10, 348},
+ dictWord{12, 10, 606},
+ dictWord{13, 10, 116},
+ dictWord{13, 10, 233},
+ dictWord{13, 10, 466},
+ dictWord{14, 10, 181},
+ dictWord{
+ 14,
+ 10,
+ 209,
+ },
+ dictWord{14, 10, 232},
+ dictWord{14, 10, 236},
+ dictWord{14, 10, 300},
+ dictWord{16, 10, 41},
+ dictWord{148, 10, 97},
+ dictWord{7, 11, 423},
+ dictWord{7, 10, 1692},
+ dictWord{136, 11, 588},
+ dictWord{6, 0, 931},
+ dictWord{134, 0, 1454},
+ dictWord{5, 10, 501},
+ dictWord{7, 10, 1704},
+ dictWord{9, 10, 553},
+ dictWord{11, 10, 520},
+ dictWord{12, 10, 557},
+ dictWord{141, 10, 249},
+ dictWord{136, 11, 287},
+ dictWord{4, 0, 562},
+ dictWord{9, 0, 254},
+ dictWord{
+ 139,
+ 0,
+ 879,
+ },
+ dictWord{132, 0, 786},
+ dictWord{14, 11, 32},
+ dictWord{18, 11, 85},
+ dictWord{20, 11, 2},
+ dictWord{152, 11, 16},
+ dictWord{135, 0, 1294},
+ dictWord{
+ 7,
+ 11,
+ 723,
+ },
+ dictWord{135, 11, 1135},
+ dictWord{6, 0, 216},
+ dictWord{7, 0, 901},
+ dictWord{7, 0, 1343},
+ dictWord{8, 0, 493},
+ dictWord{134, 11, 403},
+ dictWord{
+ 7,
+ 11,
+ 719,
+ },
+ dictWord{8, 11, 809},
+ dictWord{136, 11, 834},
+ dictWord{5, 11, 210},
+ dictWord{6, 11, 213},
+ dictWord{7, 11, 60},
+ dictWord{10, 11, 364},
+ dictWord{
+ 139,
+ 11,
+ 135,
+ },
+ dictWord{7, 0, 341},
+ dictWord{11, 0, 219},
+ dictWord{5, 11, 607},
+ dictWord{8, 11, 326},
+ dictWord{136, 11, 490},
+ dictWord{4, 11, 701},
+ dictWord{
+ 5,
+ 11,
+ 472,
+ },
+ dictWord{5, 11, 639},
+ dictWord{7, 11, 1249},
+ dictWord{9, 11, 758},
+ dictWord{139, 11, 896},
+ dictWord{135, 11, 380},
+ dictWord{135, 11, 1947},
+ dictWord{139, 0, 130},
+ dictWord{135, 0, 1734},
+ dictWord{10, 0, 115},
+ dictWord{11, 0, 420},
+ dictWord{12, 0, 154},
+ dictWord{13, 0, 404},
+ dictWord{14, 0, 346},
+ dictWord{143, 0, 54},
+ dictWord{134, 10, 129},
+ dictWord{4, 11, 386},
+ dictWord{7, 11, 41},
+ dictWord{8, 11, 405},
+ dictWord{9, 11, 497},
+ dictWord{11, 11, 110},
+ dictWord{11, 11, 360},
+ dictWord{15, 11, 37},
+ dictWord{144, 11, 84},
+ dictWord{141, 11, 282},
+ dictWord{5, 11, 46},
+ dictWord{7, 11, 1452},
+ dictWord{7, 11, 1480},
+ dictWord{8, 11, 634},
+ dictWord{140, 11, 472},
+ dictWord{4, 11, 524},
+ dictWord{136, 11, 810},
+ dictWord{10, 11, 238},
+ dictWord{141, 11, 33},
+ dictWord{
+ 133,
+ 0,
+ 604,
+ },
+ dictWord{5, 0, 1011},
+ dictWord{136, 0, 701},
+ dictWord{8, 0, 856},
+ dictWord{8, 0, 858},
+ dictWord{8, 0, 879},
+ dictWord{12, 0, 702},
+ dictWord{142, 0, 447},
+ dictWord{4, 0, 54},
+ dictWord{5, 0, 666},
+ dictWord{7, 0, 1039},
+ dictWord{7, 0, 1130},
+ dictWord{9, 0, 195},
+ dictWord{138, 0, 302},
+ dictWord{4, 10, 25},
+ dictWord{
+ 5,
+ 10,
+ 60,
+ },
+ dictWord{6, 10, 504},
+ dictWord{7, 10, 614},
+ dictWord{7, 10, 1155},
+ dictWord{140, 10, 0},
+ dictWord{7, 10, 1248},
+ dictWord{11, 10, 621},
+ dictWord{
+ 139,
+ 10,
+ 702,
+ },
+ dictWord{133, 11, 997},
+ dictWord{137, 10, 321},
+ dictWord{134, 0, 1669},
+ dictWord{134, 0, 1791},
+ dictWord{4, 10, 379},
+ dictWord{
+ 135,
+ 10,
+ 1397,
+ },
+ dictWord{138, 11, 372},
+ dictWord{5, 11, 782},
+ dictWord{5, 11, 829},
+ dictWord{134, 11, 1738},
+ dictWord{135, 0, 1228},
+ dictWord{4, 10, 118},
+ dictWord{6, 10, 274},
+ dictWord{6, 10, 361},
+ dictWord{7, 10, 75},
+ dictWord{141, 10, 441},
+ dictWord{132, 0, 623},
+ dictWord{9, 11, 279},
+ dictWord{10, 11, 407},
+ dictWord{14, 11, 84},
+ dictWord{150, 11, 18},
+ dictWord{137, 10, 841},
+ dictWord{135, 0, 798},
+ dictWord{140, 10, 693},
+ dictWord{5, 10, 314},
+ dictWord{6, 10, 221},
+ dictWord{7, 10, 419},
+ dictWord{10, 10, 650},
+ dictWord{11, 10, 396},
+ dictWord{12, 10, 156},
+ dictWord{13, 10, 369},
+ dictWord{14, 10, 333},
+ dictWord{
+ 145,
+ 10,
+ 47,
+ },
+ dictWord{135, 11, 1372},
+ dictWord{7, 0, 122},
+ dictWord{9, 0, 259},
+ dictWord{10, 0, 84},
+ dictWord{11, 0, 470},
+ dictWord{12, 0, 541},
+ dictWord{
+ 141,
+ 0,
+ 379,
+ },
+ dictWord{134, 0, 837},
+ dictWord{8, 0, 1013},
+ dictWord{4, 11, 78},
+ dictWord{5, 11, 96},
+ dictWord{5, 11, 182},
+ dictWord{7, 11, 1724},
+ dictWord{
+ 7,
+ 11,
+ 1825,
+ },
+ dictWord{10, 11, 394},
+ dictWord{10, 11, 471},
+ dictWord{11, 11, 532},
+ dictWord{14, 11, 340},
+ dictWord{145, 11, 88},
+ dictWord{134, 0, 577},
+ dictWord{135, 11, 1964},
+ dictWord{132, 10, 913},
+ dictWord{134, 0, 460},
+ dictWord{8, 0, 891},
+ dictWord{10, 0, 901},
+ dictWord{10, 0, 919},
+ dictWord{10, 0, 932},
+ dictWord{12, 0, 715},
+ dictWord{12, 0, 728},
+ dictWord{12, 0, 777},
+ dictWord{14, 0, 457},
+ dictWord{144, 0, 103},
+ dictWord{5, 0, 82},
+ dictWord{5, 0, 131},
+ dictWord{
+ 7,
+ 0,
+ 1755,
+ },
+ dictWord{8, 0, 31},
+ dictWord{9, 0, 168},
+ dictWord{9, 0, 764},
+ dictWord{139, 0, 869},
+ dictWord{136, 10, 475},
+ dictWord{6, 0, 605},
+ dictWord{
+ 5,
+ 10,
+ 1016,
+ },
+ dictWord{9, 11, 601},
+ dictWord{9, 11, 619},
+ dictWord{10, 11, 505},
+ dictWord{10, 11, 732},
+ dictWord{11, 11, 355},
+ dictWord{140, 11, 139},
+ dictWord{
+ 7,
+ 10,
+ 602,
+ },
+ dictWord{8, 10, 179},
+ dictWord{10, 10, 781},
+ dictWord{140, 10, 126},
+ dictWord{134, 0, 1246},
+ dictWord{6, 10, 329},
+ dictWord{138, 10, 111},
+ dictWord{6, 11, 215},
+ dictWord{7, 11, 1028},
+ dictWord{7, 11, 1473},
+ dictWord{7, 11, 1721},
+ dictWord{9, 11, 424},
+ dictWord{138, 11, 779},
+ dictWord{5, 0, 278},
+ dictWord{137, 0, 68},
+ dictWord{6, 0, 932},
+ dictWord{6, 0, 1084},
+ dictWord{144, 0, 86},
+ dictWord{4, 0, 163},
+ dictWord{5, 0, 201},
+ dictWord{5, 0, 307},
+ dictWord{
+ 5,
+ 0,
+ 310,
+ },
+ dictWord{6, 0, 335},
+ dictWord{7, 0, 284},
+ dictWord{7, 0, 1660},
+ dictWord{136, 0, 165},
+ dictWord{136, 0, 781},
+ dictWord{134, 0, 707},
+ dictWord{6, 0, 33},
+ dictWord{135, 0, 1244},
+ dictWord{5, 10, 821},
+ dictWord{6, 11, 67},
+ dictWord{6, 10, 1687},
+ dictWord{7, 11, 258},
+ dictWord{7, 11, 1630},
+ dictWord{9, 11, 354},
+ dictWord{9, 11, 675},
+ dictWord{10, 11, 830},
+ dictWord{14, 11, 80},
+ dictWord{145, 11, 80},
+ dictWord{6, 11, 141},
+ dictWord{7, 11, 225},
+ dictWord{9, 11, 59},
+ dictWord{9, 11, 607},
+ dictWord{10, 11, 312},
+ dictWord{11, 11, 687},
+ dictWord{12, 11, 555},
+ dictWord{13, 11, 373},
+ dictWord{13, 11, 494},
+ dictWord{148, 11, 58},
+ dictWord{134, 0, 1113},
+ dictWord{9, 0, 388},
+ dictWord{5, 10, 71},
+ dictWord{7, 10, 1407},
+ dictWord{9, 10, 704},
+ dictWord{10, 10, 261},
+ dictWord{10, 10, 619},
+ dictWord{11, 10, 547},
+ dictWord{11, 10, 619},
+ dictWord{143, 10, 157},
+ dictWord{7, 0, 1953},
+ dictWord{136, 0, 720},
+ dictWord{138, 0, 203},
+ dictWord{
+ 7,
+ 10,
+ 2008,
+ },
+ dictWord{9, 10, 337},
+ dictWord{138, 10, 517},
+ dictWord{6, 0, 326},
+ dictWord{7, 0, 677},
+ dictWord{137, 0, 425},
+ dictWord{139, 11, 81},
+ dictWord{
+ 7,
+ 0,
+ 1316,
+ },
+ dictWord{7, 0, 1412},
+ dictWord{7, 0, 1839},
+ dictWord{9, 0, 589},
+ dictWord{11, 0, 241},
+ dictWord{11, 0, 676},
+ dictWord{11, 0, 811},
+ dictWord{11, 0, 891},
+ dictWord{12, 0, 140},
+ dictWord{12, 0, 346},
+ dictWord{12, 0, 479},
+ dictWord{13, 0, 140},
+ dictWord{13, 0, 381},
+ dictWord{14, 0, 188},
+ dictWord{18, 0, 30},
+ dictWord{148, 0, 108},
+ dictWord{5, 0, 416},
+ dictWord{6, 10, 86},
+ dictWord{6, 10, 603},
+ dictWord{7, 10, 292},
+ dictWord{7, 10, 561},
+ dictWord{8, 10, 257},
+ dictWord{
+ 8,
+ 10,
+ 382,
+ },
+ dictWord{9, 10, 721},
+ dictWord{9, 10, 778},
+ dictWord{11, 10, 581},
+ dictWord{140, 10, 466},
+ dictWord{4, 10, 486},
+ dictWord{133, 10, 491},
+ dictWord{134, 0, 1300},
+ dictWord{132, 10, 72},
+ dictWord{7, 0, 847},
+ dictWord{6, 10, 265},
+ dictWord{7, 11, 430},
+ dictWord{139, 11, 46},
+ dictWord{5, 11, 602},
+ dictWord{6, 11, 106},
+ dictWord{7, 11, 1786},
+ dictWord{7, 11, 1821},
+ dictWord{7, 11, 2018},
+ dictWord{9, 11, 418},
+ dictWord{137, 11, 763},
+ dictWord{5, 0, 358},
+ dictWord{7, 0, 535},
+ dictWord{7, 0, 1184},
+ dictWord{10, 0, 662},
+ dictWord{13, 0, 212},
+ dictWord{13, 0, 304},
+ dictWord{13, 0, 333},
+ dictWord{145, 0, 98},
+ dictWord{
+ 5,
+ 11,
+ 65,
+ },
+ dictWord{6, 11, 416},
+ dictWord{7, 11, 1720},
+ dictWord{7, 11, 1924},
+ dictWord{8, 11, 677},
+ dictWord{10, 11, 109},
+ dictWord{11, 11, 14},
+ dictWord{
+ 11,
+ 11,
+ 70,
+ },
+ dictWord{11, 11, 569},
+ dictWord{11, 11, 735},
+ dictWord{15, 11, 153},
+ dictWord{148, 11, 80},
+ dictWord{6, 0, 1823},
+ dictWord{8, 0, 839},
+ dictWord{
+ 8,
+ 0,
+ 852,
+ },
+ dictWord{8, 0, 903},
+ dictWord{10, 0, 940},
+ dictWord{12, 0, 707},
+ dictWord{140, 0, 775},
+ dictWord{135, 11, 1229},
+ dictWord{6, 0, 1522},
+ dictWord{
+ 140,
+ 0,
+ 654,
+ },
+ dictWord{136, 11, 595},
+ dictWord{139, 0, 163},
+ dictWord{141, 0, 314},
+ dictWord{132, 0, 978},
+ dictWord{4, 0, 601},
+ dictWord{6, 0, 2035},
+ dictWord{137, 10, 234},
+ dictWord{5, 10, 815},
+ dictWord{6, 10, 1688},
+ dictWord{134, 10, 1755},
+ dictWord{133, 0, 946},
+ dictWord{136, 0, 434},
+ dictWord{
+ 6,
+ 10,
+ 197,
+ },
+ dictWord{136, 10, 205},
+ dictWord{7, 0, 411},
+ dictWord{7, 0, 590},
+ dictWord{8, 0, 631},
+ dictWord{9, 0, 323},
+ dictWord{10, 0, 355},
+ dictWord{11, 0, 491},
+ dictWord{12, 0, 143},
+ dictWord{12, 0, 402},
+ dictWord{13, 0, 73},
+ dictWord{14, 0, 408},
+ dictWord{15, 0, 107},
+ dictWord{146, 0, 71},
+ dictWord{7, 0, 1467},
+ dictWord{
+ 8,
+ 0,
+ 328,
+ },
+ dictWord{10, 0, 544},
+ dictWord{11, 0, 955},
+ dictWord{12, 0, 13},
+ dictWord{13, 0, 320},
+ dictWord{145, 0, 83},
+ dictWord{142, 0, 410},
+ dictWord{
+ 11,
+ 0,
+ 511,
+ },
+ dictWord{13, 0, 394},
+ dictWord{14, 0, 298},
+ dictWord{14, 0, 318},
+ dictWord{146, 0, 103},
+ dictWord{6, 10, 452},
+ dictWord{7, 10, 312},
+ dictWord{
+ 138,
+ 10,
+ 219,
+ },
+ dictWord{138, 10, 589},
+ dictWord{4, 10, 333},
+ dictWord{9, 10, 176},
+ dictWord{12, 10, 353},
+ dictWord{141, 10, 187},
+ dictWord{135, 11, 329},
+ dictWord{132, 11, 469},
+ dictWord{5, 0, 835},
+ dictWord{134, 0, 483},
+ dictWord{134, 11, 1743},
+ dictWord{5, 11, 929},
+ dictWord{6, 11, 340},
+ dictWord{8, 11, 376},
+ dictWord{136, 11, 807},
+ dictWord{134, 10, 1685},
+ dictWord{132, 0, 677},
+ dictWord{5, 11, 218},
+ dictWord{7, 11, 1610},
+ dictWord{138, 11, 83},
+ dictWord{
+ 5,
+ 11,
+ 571,
+ },
+ dictWord{135, 11, 1842},
+ dictWord{132, 11, 455},
+ dictWord{137, 0, 70},
+ dictWord{135, 0, 1405},
+ dictWord{7, 10, 135},
+ dictWord{8, 10, 7},
+ dictWord{
+ 8,
+ 10,
+ 62,
+ },
+ dictWord{9, 10, 243},
+ dictWord{10, 10, 658},
+ dictWord{10, 10, 697},
+ dictWord{11, 10, 456},
+ dictWord{139, 10, 756},
+ dictWord{9, 10, 395},
+ dictWord{138, 10, 79},
+ dictWord{137, 0, 108},
+ dictWord{6, 11, 161},
+ dictWord{7, 11, 372},
+ dictWord{137, 11, 597},
+ dictWord{132, 11, 349},
+ dictWord{
+ 132,
+ 0,
+ 777,
+ },
+ dictWord{132, 0, 331},
+ dictWord{135, 10, 631},
+ dictWord{133, 0, 747},
+ dictWord{6, 11, 432},
+ dictWord{6, 11, 608},
+ dictWord{139, 11, 322},
+ dictWord{138, 10, 835},
+ dictWord{5, 11, 468},
+ dictWord{7, 11, 1809},
+ dictWord{10, 11, 325},
+ dictWord{11, 11, 856},
+ dictWord{12, 11, 345},
+ dictWord{
+ 143,
+ 11,
+ 104,
+ },
+ dictWord{133, 11, 223},
+ dictWord{7, 10, 406},
+ dictWord{7, 10, 459},
+ dictWord{8, 10, 606},
+ dictWord{139, 10, 726},
+ dictWord{132, 11, 566},
+ dictWord{142, 0, 68},
+ dictWord{4, 11, 59},
+ dictWord{135, 11, 1394},
+ dictWord{6, 11, 436},
+ dictWord{139, 11, 481},
+ dictWord{4, 11, 48},
+ dictWord{5, 11, 271},
+ dictWord{135, 11, 953},
+ dictWord{139, 11, 170},
+ dictWord{5, 11, 610},
+ dictWord{136, 11, 457},
+ dictWord{133, 11, 755},
+ dictWord{135, 11, 1217},
+ dictWord{
+ 133,
+ 10,
+ 612,
+ },
+ dictWord{132, 11, 197},
+ dictWord{132, 0, 505},
+ dictWord{4, 10, 372},
+ dictWord{7, 10, 482},
+ dictWord{8, 10, 158},
+ dictWord{9, 10, 602},
+ dictWord{
+ 9,
+ 10,
+ 615,
+ },
+ dictWord{10, 10, 245},
+ dictWord{10, 10, 678},
+ dictWord{10, 10, 744},
+ dictWord{11, 10, 248},
+ dictWord{139, 10, 806},
+ dictWord{133, 0, 326},
+ dictWord{5, 10, 854},
+ dictWord{135, 10, 1991},
+ dictWord{4, 0, 691},
+ dictWord{146, 0, 16},
+ dictWord{6, 0, 628},
+ dictWord{9, 0, 35},
+ dictWord{10, 0, 680},
+ dictWord{10, 0, 793},
+ dictWord{11, 0, 364},
+ dictWord{13, 0, 357},
+ dictWord{143, 0, 164},
+ dictWord{138, 0, 654},
+ dictWord{6, 0, 32},
+ dictWord{7, 0, 385},
+ dictWord{
+ 7,
+ 0,
+ 757,
+ },
+ dictWord{7, 0, 1916},
+ dictWord{8, 0, 37},
+ dictWord{8, 0, 94},
+ dictWord{8, 0, 711},
+ dictWord{9, 0, 541},
+ dictWord{10, 0, 162},
+ dictWord{10, 0, 795},
+ dictWord{
+ 11,
+ 0,
+ 989,
+ },
+ dictWord{11, 0, 1010},
+ dictWord{12, 0, 14},
+ dictWord{142, 0, 308},
+ dictWord{133, 11, 217},
+ dictWord{6, 0, 152},
+ dictWord{6, 0, 349},
+ dictWord{
+ 6,
+ 0,
+ 1682,
+ },
+ dictWord{7, 0, 1252},
+ dictWord{8, 0, 112},
+ dictWord{9, 0, 435},
+ dictWord{9, 0, 668},
+ dictWord{10, 0, 290},
+ dictWord{10, 0, 319},
+ dictWord{10, 0, 815},
+ dictWord{11, 0, 180},
+ dictWord{11, 0, 837},
+ dictWord{12, 0, 240},
+ dictWord{13, 0, 152},
+ dictWord{13, 0, 219},
+ dictWord{142, 0, 158},
+ dictWord{4, 0, 581},
+ dictWord{134, 0, 726},
+ dictWord{5, 10, 195},
+ dictWord{135, 10, 1685},
+ dictWord{6, 0, 126},
+ dictWord{7, 0, 573},
+ dictWord{8, 0, 397},
+ dictWord{142, 0, 44},
+ dictWord{138, 0, 89},
+ dictWord{7, 10, 1997},
+ dictWord{8, 10, 730},
+ dictWord{139, 10, 1006},
+ dictWord{134, 0, 1531},
+ dictWord{134, 0, 1167},
+ dictWord{
+ 5,
+ 0,
+ 926,
+ },
+ dictWord{12, 0, 203},
+ dictWord{133, 10, 751},
+ dictWord{4, 11, 165},
+ dictWord{7, 11, 1398},
+ dictWord{135, 11, 1829},
+ dictWord{7, 0, 1232},
+ dictWord{137, 0, 531},
+ dictWord{135, 10, 821},
+ dictWord{134, 0, 943},
+ dictWord{133, 0, 670},
+ dictWord{4, 0, 880},
+ dictWord{139, 0, 231},
+ dictWord{
+ 134,
+ 0,
+ 1617,
+ },
+ dictWord{135, 0, 1957},
+ dictWord{5, 11, 9},
+ dictWord{7, 11, 297},
+ dictWord{7, 11, 966},
+ dictWord{140, 11, 306},
+ dictWord{6, 0, 975},
+ dictWord{
+ 134,
+ 0,
+ 985,
+ },
+ dictWord{5, 10, 950},
+ dictWord{5, 10, 994},
+ dictWord{134, 10, 351},
+ dictWord{12, 11, 21},
+ dictWord{151, 11, 7},
+ dictWord{5, 11, 146},
+ dictWord{
+ 6,
+ 11,
+ 411,
+ },
+ dictWord{138, 11, 721},
+ dictWord{7, 0, 242},
+ dictWord{135, 0, 1942},
+ dictWord{6, 11, 177},
+ dictWord{135, 11, 467},
+ dictWord{5, 0, 421},
+ dictWord{
+ 7,
+ 10,
+ 47,
+ },
+ dictWord{137, 10, 684},
+ dictWord{5, 0, 834},
+ dictWord{7, 0, 1202},
+ dictWord{8, 0, 14},
+ dictWord{9, 0, 481},
+ dictWord{137, 0, 880},
+ dictWord{138, 0, 465},
+ dictWord{6, 0, 688},
+ dictWord{9, 0, 834},
+ dictWord{132, 10, 350},
+ dictWord{132, 0, 855},
+ dictWord{4, 0, 357},
+ dictWord{6, 0, 172},
+ dictWord{7, 0, 143},
+ dictWord{137, 0, 413},
+ dictWord{133, 11, 200},
+ dictWord{132, 0, 590},
+ dictWord{7, 10, 1812},
+ dictWord{13, 10, 259},
+ dictWord{13, 10, 356},
+ dictWord{
+ 14,
+ 10,
+ 242,
+ },
+ dictWord{147, 10, 114},
+ dictWord{133, 10, 967},
+ dictWord{11, 0, 114},
+ dictWord{4, 10, 473},
+ dictWord{7, 10, 623},
+ dictWord{8, 10, 808},
+ dictWord{
+ 9,
+ 10,
+ 871,
+ },
+ dictWord{9, 10, 893},
+ dictWord{11, 10, 431},
+ dictWord{12, 10, 112},
+ dictWord{12, 10, 217},
+ dictWord{12, 10, 243},
+ dictWord{12, 10, 562},
+ dictWord{
+ 12,
+ 10,
+ 663,
+ },
+ dictWord{12, 10, 683},
+ dictWord{13, 10, 141},
+ dictWord{13, 10, 197},
+ dictWord{13, 10, 227},
+ dictWord{13, 10, 406},
+ dictWord{13, 10, 487},
+ dictWord{14, 10, 156},
+ dictWord{14, 10, 203},
+ dictWord{14, 10, 224},
+ dictWord{14, 10, 256},
+ dictWord{18, 10, 58},
+ dictWord{150, 10, 0},
+ dictWord{
+ 138,
+ 10,
+ 286,
+ },
+ dictWord{4, 10, 222},
+ dictWord{7, 10, 286},
+ dictWord{136, 10, 629},
+ dictWord{5, 0, 169},
+ dictWord{7, 0, 333},
+ dictWord{136, 0, 45},
+ dictWord{
+ 134,
+ 11,
+ 481,
+ },
+ dictWord{132, 0, 198},
+ dictWord{4, 0, 24},
+ dictWord{5, 0, 140},
+ dictWord{5, 0, 185},
+ dictWord{7, 0, 1500},
+ dictWord{11, 0, 565},
+ dictWord{11, 0, 838},
+ dictWord{4, 11, 84},
+ dictWord{7, 11, 1482},
+ dictWord{10, 11, 76},
+ dictWord{138, 11, 142},
+ dictWord{133, 0, 585},
+ dictWord{141, 10, 306},
+ dictWord{
+ 133,
+ 11,
+ 1015,
+ },
+ dictWord{4, 11, 315},
+ dictWord{5, 11, 507},
+ dictWord{135, 11, 1370},
+ dictWord{136, 10, 146},
+ dictWord{6, 0, 691},
+ dictWord{134, 0, 1503},
+ dictWord{
+ 4,
+ 0,
+ 334,
+ },
+ dictWord{133, 0, 593},
+ dictWord{4, 10, 465},
+ dictWord{135, 10, 1663},
+ dictWord{142, 11, 173},
+ dictWord{135, 0, 913},
+ dictWord{12, 0, 116},
+ dictWord{134, 11, 1722},
+ dictWord{134, 0, 1360},
+ dictWord{132, 0, 802},
+ dictWord{8, 11, 222},
+ dictWord{8, 11, 476},
+ dictWord{9, 11, 238},
+ dictWord{
+ 11,
+ 11,
+ 516,
+ },
+ dictWord{11, 11, 575},
+ dictWord{15, 11, 109},
+ dictWord{146, 11, 100},
+ dictWord{6, 0, 308},
+ dictWord{9, 0, 673},
+ dictWord{7, 10, 138},
+ dictWord{
+ 7,
+ 10,
+ 517,
+ },
+ dictWord{139, 10, 238},
+ dictWord{132, 0, 709},
+ dictWord{6, 0, 1876},
+ dictWord{6, 0, 1895},
+ dictWord{9, 0, 994},
+ dictWord{9, 0, 1006},
+ dictWord{
+ 12,
+ 0,
+ 829,
+ },
+ dictWord{12, 0, 888},
+ dictWord{12, 0, 891},
+ dictWord{146, 0, 185},
+ dictWord{148, 10, 94},
+ dictWord{4, 0, 228},
+ dictWord{133, 0, 897},
+ dictWord{
+ 7,
+ 0,
+ 1840,
+ },
+ dictWord{5, 10, 495},
+ dictWord{7, 10, 834},
+ dictWord{9, 10, 733},
+ dictWord{139, 10, 378},
+ dictWord{133, 10, 559},
+ dictWord{6, 10, 21},
+ dictWord{
+ 6,
+ 10,
+ 1737,
+ },
+ dictWord{7, 10, 1444},
+ dictWord{136, 10, 224},
+ dictWord{4, 0, 608},
+ dictWord{133, 0, 497},
+ dictWord{6, 11, 40},
+ dictWord{135, 11, 1781},
+ dictWord{134, 0, 1573},
+ dictWord{135, 0, 2039},
+ dictWord{6, 0, 540},
+ dictWord{136, 0, 136},
+ dictWord{4, 0, 897},
+ dictWord{5, 0, 786},
+ dictWord{133, 10, 519},
+ dictWord{6, 0, 1878},
+ dictWord{6, 0, 1884},
+ dictWord{9, 0, 938},
+ dictWord{9, 0, 948},
+ dictWord{9, 0, 955},
+ dictWord{9, 0, 973},
+ dictWord{9, 0, 1012},
+ dictWord{
+ 12,
+ 0,
+ 895,
+ },
+ dictWord{12, 0, 927},
+ dictWord{143, 0, 254},
+ dictWord{134, 0, 1469},
+ dictWord{133, 0, 999},
+ dictWord{4, 0, 299},
+ dictWord{135, 0, 1004},
+ dictWord{
+ 4,
+ 0,
+ 745,
+ },
+ dictWord{133, 0, 578},
+ dictWord{136, 11, 574},
+ dictWord{133, 0, 456},
+ dictWord{134, 0, 1457},
+ dictWord{7, 0, 1679},
+ dictWord{132, 10, 402},
+ dictWord{7, 0, 693},
+ dictWord{8, 0, 180},
+ dictWord{12, 0, 163},
+ dictWord{8, 10, 323},
+ dictWord{136, 10, 479},
+ dictWord{11, 10, 580},
+ dictWord{142, 10, 201},
+ dictWord{5, 10, 59},
+ dictWord{135, 10, 672},
+ dictWord{132, 11, 354},
+ dictWord{146, 10, 34},
+ dictWord{4, 0, 755},
+ dictWord{135, 11, 1558},
+ dictWord{
+ 7,
+ 0,
+ 1740,
+ },
+ dictWord{146, 0, 48},
+ dictWord{4, 10, 85},
+ dictWord{135, 10, 549},
+ dictWord{139, 0, 338},
+ dictWord{133, 10, 94},
+ dictWord{134, 0, 1091},
+ dictWord{135, 11, 469},
+ dictWord{12, 0, 695},
+ dictWord{12, 0, 704},
+ dictWord{20, 0, 113},
+ dictWord{5, 11, 830},
+ dictWord{14, 11, 338},
+ dictWord{148, 11, 81},
+ dictWord{135, 0, 1464},
+ dictWord{6, 10, 11},
+ dictWord{135, 10, 187},
+ dictWord{135, 0, 975},
+ dictWord{13, 0, 335},
+ dictWord{132, 10, 522},
+ dictWord{
+ 134,
+ 0,
+ 1979,
+ },
+ dictWord{5, 11, 496},
+ dictWord{135, 11, 203},
+ dictWord{4, 10, 52},
+ dictWord{135, 10, 661},
+ dictWord{7, 0, 1566},
+ dictWord{8, 0, 269},
+ dictWord{
+ 9,
+ 0,
+ 212,
+ },
+ dictWord{9, 0, 718},
+ dictWord{14, 0, 15},
+ dictWord{14, 0, 132},
+ dictWord{142, 0, 227},
+ dictWord{4, 0, 890},
+ dictWord{5, 0, 805},
+ dictWord{5, 0, 819},
+ dictWord{
+ 5,
+ 0,
+ 961,
+ },
+ dictWord{6, 0, 396},
+ dictWord{6, 0, 1631},
+ dictWord{6, 0, 1678},
+ dictWord{7, 0, 1967},
+ dictWord{7, 0, 2041},
+ dictWord{9, 0, 630},
+ dictWord{11, 0, 8},
+ dictWord{11, 0, 1019},
+ dictWord{12, 0, 176},
+ dictWord{13, 0, 225},
+ dictWord{14, 0, 292},
+ dictWord{21, 0, 24},
+ dictWord{4, 10, 383},
+ dictWord{133, 10, 520},
+ dictWord{134, 11, 547},
+ dictWord{135, 11, 1748},
+ dictWord{5, 11, 88},
+ dictWord{137, 11, 239},
+ dictWord{146, 11, 128},
+ dictWord{7, 11, 650},
+ dictWord{
+ 135,
+ 11,
+ 1310,
+ },
+ dictWord{4, 10, 281},
+ dictWord{5, 10, 38},
+ dictWord{7, 10, 194},
+ dictWord{7, 10, 668},
+ dictWord{7, 10, 1893},
+ dictWord{137, 10, 397},
+ dictWord{135, 0, 1815},
+ dictWord{9, 10, 635},
+ dictWord{139, 10, 559},
+ dictWord{7, 0, 1505},
+ dictWord{10, 0, 190},
+ dictWord{10, 0, 634},
+ dictWord{11, 0, 792},
+ dictWord{12, 0, 358},
+ dictWord{140, 0, 447},
+ dictWord{5, 0, 0},
+ dictWord{6, 0, 536},
+ dictWord{7, 0, 604},
+ dictWord{13, 0, 445},
+ dictWord{145, 0, 126},
+ dictWord{
+ 7,
+ 11,
+ 1076,
+ },
+ dictWord{9, 11, 80},
+ dictWord{11, 11, 78},
+ dictWord{11, 11, 421},
+ dictWord{11, 11, 534},
+ dictWord{140, 11, 545},
+ dictWord{8, 0, 966},
+ dictWord{
+ 10,
+ 0,
+ 1023,
+ },
+ dictWord{14, 11, 369},
+ dictWord{146, 11, 72},
+ dictWord{135, 11, 1641},
+ dictWord{6, 0, 232},
+ dictWord{6, 0, 412},
+ dictWord{7, 0, 1074},
+ dictWord{
+ 8,
+ 0,
+ 9,
+ },
+ dictWord{8, 0, 157},
+ dictWord{8, 0, 786},
+ dictWord{9, 0, 196},
+ dictWord{9, 0, 352},
+ dictWord{9, 0, 457},
+ dictWord{10, 0, 337},
+ dictWord{11, 0, 232},
+ dictWord{
+ 11,
+ 0,
+ 877,
+ },
+ dictWord{12, 0, 480},
+ dictWord{140, 0, 546},
+ dictWord{135, 0, 958},
+ dictWord{4, 0, 382},
+ dictWord{136, 0, 579},
+ dictWord{4, 0, 212},
+ dictWord{
+ 135,
+ 0,
+ 1206,
+ },
+ dictWord{4, 11, 497},
+ dictWord{5, 11, 657},
+ dictWord{135, 11, 1584},
+ dictWord{132, 0, 681},
+ dictWord{8, 0, 971},
+ dictWord{138, 0, 965},
+ dictWord{
+ 5,
+ 10,
+ 448,
+ },
+ dictWord{136, 10, 535},
+ dictWord{14, 0, 16},
+ dictWord{146, 0, 44},
+ dictWord{11, 0, 584},
+ dictWord{11, 0, 616},
+ dictWord{14, 0, 275},
+ dictWord{
+ 11,
+ 11,
+ 584,
+ },
+ dictWord{11, 11, 616},
+ dictWord{142, 11, 275},
+ dictWord{136, 11, 13},
+ dictWord{7, 10, 610},
+ dictWord{135, 10, 1501},
+ dictWord{7, 11, 642},
+ dictWord{8, 11, 250},
+ dictWord{11, 11, 123},
+ dictWord{11, 11, 137},
+ dictWord{13, 11, 48},
+ dictWord{142, 11, 95},
+ dictWord{133, 0, 655},
+ dictWord{17, 0, 67},
+ dictWord{147, 0, 74},
+ dictWord{134, 0, 751},
+ dictWord{134, 0, 1967},
+ dictWord{6, 0, 231},
+ dictWord{136, 0, 423},
+ dictWord{5, 0, 300},
+ dictWord{138, 0, 1016},
+ dictWord{4, 10, 319},
+ dictWord{5, 10, 699},
+ dictWord{138, 10, 673},
+ dictWord{6, 0, 237},
+ dictWord{7, 0, 611},
+ dictWord{8, 0, 100},
+ dictWord{9, 0, 416},
+ dictWord{
+ 11,
+ 0,
+ 335,
+ },
+ dictWord{12, 0, 173},
+ dictWord{18, 0, 101},
+ dictWord{6, 10, 336},
+ dictWord{8, 10, 552},
+ dictWord{9, 10, 285},
+ dictWord{10, 10, 99},
+ dictWord{
+ 139,
+ 10,
+ 568,
+ },
+ dictWord{134, 0, 1370},
+ dictWord{7, 10, 1406},
+ dictWord{9, 10, 218},
+ dictWord{141, 10, 222},
+ dictWord{133, 10, 256},
+ dictWord{
+ 135,
+ 0,
+ 1208,
+ },
+ dictWord{14, 11, 213},
+ dictWord{148, 11, 38},
+ dictWord{6, 0, 1219},
+ dictWord{135, 11, 1642},
+ dictWord{13, 0, 417},
+ dictWord{14, 0, 129},
+ dictWord{143, 0, 15},
+ dictWord{10, 11, 545},
+ dictWord{140, 11, 301},
+ dictWord{17, 10, 39},
+ dictWord{148, 10, 36},
+ dictWord{133, 0, 199},
+ dictWord{4, 11, 904},
+ dictWord{133, 11, 794},
+ dictWord{12, 0, 427},
+ dictWord{146, 0, 38},
+ dictWord{134, 0, 949},
+ dictWord{8, 0, 665},
+ dictWord{135, 10, 634},
+ dictWord{
+ 132,
+ 10,
+ 618,
+ },
+ dictWord{135, 10, 259},
+ dictWord{132, 10, 339},
+ dictWord{133, 11, 761},
+ dictWord{141, 10, 169},
+ dictWord{132, 10, 759},
+ dictWord{5, 0, 688},
+ dictWord{7, 0, 539},
+ dictWord{135, 0, 712},
+ dictWord{7, 11, 386},
+ dictWord{138, 11, 713},
+ dictWord{134, 0, 1186},
+ dictWord{6, 11, 7},
+ dictWord{6, 11, 35},
+ dictWord{
+ 7,
+ 11,
+ 147,
+ },
+ dictWord{7, 11, 1069},
+ dictWord{7, 11, 1568},
+ dictWord{7, 11, 1575},
+ dictWord{7, 11, 1917},
+ dictWord{8, 11, 43},
+ dictWord{8, 11, 208},
+ dictWord{
+ 9,
+ 11,
+ 128,
+ },
+ dictWord{9, 11, 866},
+ dictWord{10, 11, 20},
+ dictWord{11, 11, 981},
+ dictWord{147, 11, 33},
+ dictWord{7, 11, 893},
+ dictWord{8, 10, 482},
+ dictWord{141, 11, 424},
+ dictWord{6, 0, 312},
+ dictWord{6, 0, 1715},
+ dictWord{10, 0, 584},
+ dictWord{11, 0, 546},
+ dictWord{11, 0, 692},
+ dictWord{12, 0, 259},
+ dictWord{
+ 12,
+ 0,
+ 295,
+ },
+ dictWord{13, 0, 46},
+ dictWord{141, 0, 154},
+ dictWord{5, 10, 336},
+ dictWord{6, 10, 341},
+ dictWord{6, 10, 478},
+ dictWord{6, 10, 1763},
+ dictWord{
+ 136,
+ 10,
+ 386,
+ },
+ dictWord{137, 0, 151},
+ dictWord{132, 0, 588},
+ dictWord{152, 0, 4},
+ dictWord{6, 11, 322},
+ dictWord{9, 11, 552},
+ dictWord{11, 11, 274},
+ dictWord{
+ 13,
+ 11,
+ 209,
+ },
+ dictWord{13, 11, 499},
+ dictWord{14, 11, 85},
+ dictWord{15, 11, 126},
+ dictWord{145, 11, 70},
+ dictWord{135, 10, 73},
+ dictWord{4, 0, 231},
+ dictWord{
+ 5,
+ 0,
+ 61,
+ },
+ dictWord{6, 0, 104},
+ dictWord{7, 0, 729},
+ dictWord{7, 0, 964},
+ dictWord{7, 0, 1658},
+ dictWord{140, 0, 414},
+ dictWord{6, 0, 263},
+ dictWord{138, 0, 757},
+ dictWord{135, 10, 1971},
+ dictWord{4, 0, 612},
+ dictWord{133, 0, 561},
+ dictWord{132, 0, 320},
+ dictWord{135, 10, 1344},
+ dictWord{8, 11, 83},
+ dictWord{
+ 8,
+ 11,
+ 817,
+ },
+ dictWord{9, 11, 28},
+ dictWord{9, 11, 29},
+ dictWord{9, 11, 885},
+ dictWord{10, 11, 387},
+ dictWord{11, 11, 633},
+ dictWord{11, 11, 740},
+ dictWord{
+ 13,
+ 11,
+ 235,
+ },
+ dictWord{13, 11, 254},
+ dictWord{15, 11, 143},
+ dictWord{143, 11, 146},
+ dictWord{5, 10, 396},
+ dictWord{134, 10, 501},
+ dictWord{140, 11, 49},
+ dictWord{132, 0, 225},
+ dictWord{4, 10, 929},
+ dictWord{5, 10, 799},
+ dictWord{8, 10, 46},
+ dictWord{136, 10, 740},
+ dictWord{4, 0, 405},
+ dictWord{7, 0, 817},
+ dictWord{
+ 14,
+ 0,
+ 58,
+ },
+ dictWord{17, 0, 37},
+ dictWord{146, 0, 124},
+ dictWord{133, 0, 974},
+ dictWord{4, 11, 412},
+ dictWord{133, 11, 581},
+ dictWord{4, 10, 892},
+ dictWord{
+ 133,
+ 10,
+ 770,
+ },
+ dictWord{4, 0, 996},
+ dictWord{134, 0, 2026},
+ dictWord{4, 0, 527},
+ dictWord{5, 0, 235},
+ dictWord{7, 0, 1239},
+ dictWord{11, 0, 131},
+ dictWord{
+ 140,
+ 0,
+ 370,
+ },
+ dictWord{9, 0, 16},
+ dictWord{13, 0, 386},
+ dictWord{135, 11, 421},
+ dictWord{7, 0, 956},
+ dictWord{7, 0, 1157},
+ dictWord{7, 0, 1506},
+ dictWord{7, 0, 1606},
+ dictWord{7, 0, 1615},
+ dictWord{7, 0, 1619},
+ dictWord{7, 0, 1736},
+ dictWord{7, 0, 1775},
+ dictWord{8, 0, 590},
+ dictWord{9, 0, 324},
+ dictWord{9, 0, 736},
+ dictWord{
+ 9,
+ 0,
+ 774,
+ },
+ dictWord{9, 0, 776},
+ dictWord{9, 0, 784},
+ dictWord{10, 0, 567},
+ dictWord{10, 0, 708},
+ dictWord{11, 0, 518},
+ dictWord{11, 0, 613},
+ dictWord{11, 0, 695},
+ dictWord{11, 0, 716},
+ dictWord{11, 0, 739},
+ dictWord{11, 0, 770},
+ dictWord{11, 0, 771},
+ dictWord{11, 0, 848},
+ dictWord{11, 0, 857},
+ dictWord{11, 0, 931},
+ dictWord{
+ 11,
+ 0,
+ 947,
+ },
+ dictWord{12, 0, 326},
+ dictWord{12, 0, 387},
+ dictWord{12, 0, 484},
+ dictWord{12, 0, 528},
+ dictWord{12, 0, 552},
+ dictWord{12, 0, 613},
+ dictWord{
+ 13,
+ 0,
+ 189,
+ },
+ dictWord{13, 0, 256},
+ dictWord{13, 0, 340},
+ dictWord{13, 0, 432},
+ dictWord{13, 0, 436},
+ dictWord{13, 0, 440},
+ dictWord{13, 0, 454},
+ dictWord{14, 0, 174},
+ dictWord{14, 0, 220},
+ dictWord{14, 0, 284},
+ dictWord{14, 0, 390},
+ dictWord{145, 0, 121},
+ dictWord{135, 10, 158},
+ dictWord{9, 0, 137},
+ dictWord{138, 0, 221},
+ dictWord{4, 11, 110},
+ dictWord{10, 11, 415},
+ dictWord{10, 11, 597},
+ dictWord{142, 11, 206},
+ dictWord{141, 11, 496},
+ dictWord{135, 11, 205},
+ dictWord{
+ 151,
+ 10,
+ 25,
+ },
+ dictWord{135, 11, 778},
+ dictWord{7, 11, 1656},
+ dictWord{7, 10, 2001},
+ dictWord{9, 11, 369},
+ dictWord{10, 11, 338},
+ dictWord{10, 11, 490},
+ dictWord{11, 11, 154},
+ dictWord{11, 11, 545},
+ dictWord{11, 11, 775},
+ dictWord{13, 11, 77},
+ dictWord{141, 11, 274},
+ dictWord{4, 11, 444},
+ dictWord{
+ 10,
+ 11,
+ 146,
+ },
+ dictWord{140, 11, 9},
+ dictWord{7, 0, 390},
+ dictWord{138, 0, 140},
+ dictWord{135, 0, 1144},
+ dictWord{134, 0, 464},
+ dictWord{7, 10, 1461},
+ dictWord{
+ 140,
+ 10,
+ 91,
+ },
+ dictWord{132, 10, 602},
+ dictWord{4, 11, 283},
+ dictWord{135, 11, 1194},
+ dictWord{5, 0, 407},
+ dictWord{11, 0, 204},
+ dictWord{11, 0, 243},
+ dictWord{
+ 11,
+ 0,
+ 489,
+ },
+ dictWord{12, 0, 293},
+ dictWord{19, 0, 37},
+ dictWord{20, 0, 73},
+ dictWord{150, 0, 38},
+ dictWord{7, 0, 1218},
+ dictWord{136, 0, 303},
+ dictWord{
+ 5,
+ 0,
+ 325,
+ },
+ dictWord{8, 0, 5},
+ dictWord{8, 0, 227},
+ dictWord{9, 0, 105},
+ dictWord{10, 0, 585},
+ dictWord{12, 0, 614},
+ dictWord{4, 10, 13},
+ dictWord{5, 10, 567},
+ dictWord{
+ 7,
+ 10,
+ 1498,
+ },
+ dictWord{9, 10, 124},
+ dictWord{11, 10, 521},
+ dictWord{140, 10, 405},
+ dictWord{135, 10, 1006},
+ dictWord{7, 0, 800},
+ dictWord{10, 0, 12},
+ dictWord{134, 11, 1720},
+ dictWord{135, 0, 1783},
+ dictWord{132, 10, 735},
+ dictWord{138, 10, 812},
+ dictWord{4, 10, 170},
+ dictWord{135, 10, 323},
+ dictWord{
+ 6,
+ 0,
+ 621,
+ },
+ dictWord{13, 0, 504},
+ dictWord{144, 0, 89},
+ dictWord{5, 10, 304},
+ dictWord{135, 10, 1403},
+ dictWord{137, 11, 216},
+ dictWord{6, 0, 920},
+ dictWord{
+ 6,
+ 0,
+ 1104,
+ },
+ dictWord{9, 11, 183},
+ dictWord{139, 11, 286},
+ dictWord{4, 0, 376},
+ dictWord{133, 10, 742},
+ dictWord{134, 0, 218},
+ dictWord{8, 0, 641},
+ dictWord{
+ 11,
+ 0,
+ 388,
+ },
+ dictWord{140, 0, 580},
+ dictWord{7, 0, 454},
+ dictWord{7, 0, 782},
+ dictWord{8, 0, 768},
+ dictWord{140, 0, 686},
+ dictWord{137, 11, 33},
+ dictWord{
+ 133,
+ 10,
+ 111,
+ },
+ dictWord{144, 0, 0},
+ dictWord{10, 0, 676},
+ dictWord{140, 0, 462},
+ dictWord{6, 0, 164},
+ dictWord{136, 11, 735},
+ dictWord{133, 10, 444},
+ dictWord{
+ 150,
+ 0,
+ 50,
+ },
+ dictWord{7, 11, 1862},
+ dictWord{12, 11, 491},
+ dictWord{12, 11, 520},
+ dictWord{13, 11, 383},
+ dictWord{14, 11, 244},
+ dictWord{146, 11, 12},
+ dictWord{
+ 5,
+ 11,
+ 132,
+ },
+ dictWord{9, 11, 486},
+ dictWord{9, 11, 715},
+ dictWord{10, 11, 458},
+ dictWord{11, 11, 373},
+ dictWord{11, 11, 668},
+ dictWord{11, 11, 795},
+ dictWord{11, 11, 897},
+ dictWord{12, 11, 272},
+ dictWord{12, 11, 424},
+ dictWord{12, 11, 539},
+ dictWord{12, 11, 558},
+ dictWord{14, 11, 245},
+ dictWord{
+ 14,
+ 11,
+ 263,
+ },
+ dictWord{14, 11, 264},
+ dictWord{14, 11, 393},
+ dictWord{142, 11, 403},
+ dictWord{8, 10, 123},
+ dictWord{15, 10, 6},
+ dictWord{144, 10, 7},
+ dictWord{
+ 6,
+ 0,
+ 285,
+ },
+ dictWord{8, 0, 654},
+ dictWord{11, 0, 749},
+ dictWord{12, 0, 190},
+ dictWord{12, 0, 327},
+ dictWord{13, 0, 120},
+ dictWord{13, 0, 121},
+ dictWord{13, 0, 327},
+ dictWord{15, 0, 47},
+ dictWord{146, 0, 40},
+ dictWord{5, 11, 8},
+ dictWord{6, 11, 89},
+ dictWord{6, 11, 400},
+ dictWord{7, 11, 1569},
+ dictWord{7, 11, 1623},
+ dictWord{
+ 7,
+ 11,
+ 1850,
+ },
+ dictWord{8, 11, 218},
+ dictWord{8, 11, 422},
+ dictWord{9, 11, 570},
+ dictWord{138, 11, 626},
+ dictWord{6, 11, 387},
+ dictWord{7, 11, 882},
+ dictWord{141, 11, 111},
+ dictWord{6, 0, 343},
+ dictWord{7, 0, 195},
+ dictWord{9, 0, 226},
+ dictWord{10, 0, 197},
+ dictWord{10, 0, 575},
+ dictWord{11, 0, 502},
+ dictWord{
+ 11,
+ 0,
+ 899,
+ },
+ dictWord{6, 11, 224},
+ dictWord{7, 11, 877},
+ dictWord{137, 11, 647},
+ dictWord{5, 10, 937},
+ dictWord{135, 10, 100},
+ dictWord{135, 11, 790},
+ dictWord{150, 0, 29},
+ dictWord{147, 0, 8},
+ dictWord{134, 0, 1812},
+ dictWord{149, 0, 8},
+ dictWord{135, 11, 394},
+ dictWord{7, 0, 1125},
+ dictWord{9, 0, 143},
+ dictWord{
+ 11,
+ 0,
+ 61,
+ },
+ dictWord{14, 0, 405},
+ dictWord{150, 0, 21},
+ dictWord{10, 11, 755},
+ dictWord{147, 11, 29},
+ dictWord{9, 11, 378},
+ dictWord{141, 11, 162},
+ dictWord{135, 10, 922},
+ dictWord{5, 10, 619},
+ dictWord{133, 10, 698},
+ dictWord{134, 0, 1327},
+ dictWord{6, 0, 1598},
+ dictWord{137, 0, 575},
+ dictWord{
+ 9,
+ 11,
+ 569,
+ },
+ dictWord{12, 11, 12},
+ dictWord{12, 11, 81},
+ dictWord{12, 11, 319},
+ dictWord{13, 11, 69},
+ dictWord{14, 11, 259},
+ dictWord{16, 11, 87},
+ dictWord{
+ 17,
+ 11,
+ 1,
+ },
+ dictWord{17, 11, 21},
+ dictWord{17, 11, 24},
+ dictWord{18, 11, 15},
+ dictWord{18, 11, 56},
+ dictWord{18, 11, 59},
+ dictWord{18, 11, 127},
+ dictWord{18, 11, 154},
+ dictWord{19, 11, 19},
+ dictWord{148, 11, 31},
+ dictWord{6, 0, 895},
+ dictWord{135, 11, 1231},
+ dictWord{5, 0, 959},
+ dictWord{7, 11, 124},
+ dictWord{136, 11, 38},
+ dictWord{5, 11, 261},
+ dictWord{7, 11, 78},
+ dictWord{7, 11, 199},
+ dictWord{8, 11, 815},
+ dictWord{9, 11, 126},
+ dictWord{138, 11, 342},
+ dictWord{5, 10, 917},
+ dictWord{134, 10, 1659},
+ dictWord{7, 0, 1759},
+ dictWord{5, 11, 595},
+ dictWord{135, 11, 1863},
+ dictWord{136, 0, 173},
+ dictWord{134, 0, 266},
+ dictWord{
+ 142,
+ 0,
+ 261,
+ },
+ dictWord{132, 11, 628},
+ dictWord{5, 10, 251},
+ dictWord{5, 10, 956},
+ dictWord{8, 10, 268},
+ dictWord{9, 10, 214},
+ dictWord{146, 10, 142},
+ dictWord{
+ 7,
+ 11,
+ 266,
+ },
+ dictWord{136, 11, 804},
+ dictWord{135, 11, 208},
+ dictWord{6, 11, 79},
+ dictWord{7, 11, 1021},
+ dictWord{135, 11, 1519},
+ dictWord{11, 11, 704},
+ dictWord{141, 11, 396},
+ dictWord{5, 10, 346},
+ dictWord{5, 10, 711},
+ dictWord{136, 10, 390},
+ dictWord{136, 11, 741},
+ dictWord{134, 11, 376},
+ dictWord{
+ 134,
+ 0,
+ 1427,
+ },
+ dictWord{6, 0, 1033},
+ dictWord{6, 0, 1217},
+ dictWord{136, 0, 300},
+ dictWord{133, 10, 624},
+ dictWord{6, 11, 100},
+ dictWord{7, 11, 244},
+ dictWord{
+ 7,
+ 11,
+ 632,
+ },
+ dictWord{7, 11, 1609},
+ dictWord{8, 11, 178},
+ dictWord{8, 11, 638},
+ dictWord{141, 11, 58},
+ dictWord{6, 0, 584},
+ dictWord{5, 10, 783},
+ dictWord{
+ 7,
+ 10,
+ 1998,
+ },
+ dictWord{135, 10, 2047},
+ dictWord{5, 0, 427},
+ dictWord{5, 0, 734},
+ dictWord{7, 0, 478},
+ dictWord{136, 0, 52},
+ dictWord{7, 0, 239},
+ dictWord{
+ 11,
+ 0,
+ 217,
+ },
+ dictWord{142, 0, 165},
+ dictWord{134, 0, 1129},
+ dictWord{6, 0, 168},
+ dictWord{6, 0, 1734},
+ dictWord{7, 0, 20},
+ dictWord{7, 0, 1056},
+ dictWord{8, 0, 732},
+ dictWord{9, 0, 406},
+ dictWord{9, 0, 911},
+ dictWord{138, 0, 694},
+ dictWord{132, 10, 594},
+ dictWord{133, 11, 791},
+ dictWord{7, 11, 686},
+ dictWord{8, 11, 33},
+ dictWord{8, 11, 238},
+ dictWord{10, 11, 616},
+ dictWord{11, 11, 467},
+ dictWord{11, 11, 881},
+ dictWord{13, 11, 217},
+ dictWord{13, 11, 253},
+ dictWord{
+ 142,
+ 11,
+ 268,
+ },
+ dictWord{137, 11, 476},
+ dictWord{134, 0, 418},
+ dictWord{133, 0, 613},
+ dictWord{132, 0, 632},
+ dictWord{132, 11, 447},
+ dictWord{7, 0, 32},
+ dictWord{
+ 7,
+ 0,
+ 984,
+ },
+ dictWord{8, 0, 85},
+ dictWord{8, 0, 709},
+ dictWord{9, 0, 579},
+ dictWord{9, 0, 847},
+ dictWord{9, 0, 856},
+ dictWord{10, 0, 799},
+ dictWord{11, 0, 258},
+ dictWord{
+ 11,
+ 0,
+ 1007,
+ },
+ dictWord{12, 0, 331},
+ dictWord{12, 0, 615},
+ dictWord{13, 0, 188},
+ dictWord{13, 0, 435},
+ dictWord{14, 0, 8},
+ dictWord{15, 0, 165},
+ dictWord{
+ 16,
+ 0,
+ 27,
+ },
+ dictWord{20, 0, 40},
+ dictWord{144, 11, 35},
+ dictWord{4, 11, 128},
+ dictWord{5, 11, 415},
+ dictWord{6, 11, 462},
+ dictWord{7, 11, 294},
+ dictWord{7, 11, 578},
+ dictWord{10, 11, 710},
+ dictWord{139, 11, 86},
+ dictWord{5, 0, 694},
+ dictWord{136, 0, 909},
+ dictWord{7, 0, 1109},
+ dictWord{11, 0, 7},
+ dictWord{5, 10, 37},
+ dictWord{
+ 6,
+ 10,
+ 39,
+ },
+ dictWord{6, 10, 451},
+ dictWord{7, 10, 218},
+ dictWord{7, 10, 1166},
+ dictWord{7, 10, 1687},
+ dictWord{8, 10, 662},
+ dictWord{144, 10, 2},
+ dictWord{
+ 136,
+ 11,
+ 587,
+ },
+ dictWord{6, 11, 427},
+ dictWord{7, 11, 1018},
+ dictWord{138, 11, 692},
+ dictWord{4, 11, 195},
+ dictWord{6, 10, 508},
+ dictWord{135, 11, 802},
+ dictWord{4, 0, 167},
+ dictWord{135, 0, 82},
+ dictWord{5, 0, 62},
+ dictWord{6, 0, 24},
+ dictWord{6, 0, 534},
+ dictWord{7, 0, 74},
+ dictWord{7, 0, 678},
+ dictWord{7, 0, 684},
+ dictWord{
+ 7,
+ 0,
+ 1043,
+ },
+ dictWord{7, 0, 1072},
+ dictWord{8, 0, 280},
+ dictWord{8, 0, 541},
+ dictWord{8, 0, 686},
+ dictWord{9, 0, 258},
+ dictWord{10, 0, 519},
+ dictWord{11, 0, 252},
+ dictWord{140, 0, 282},
+ dictWord{138, 0, 33},
+ dictWord{4, 0, 359},
+ dictWord{133, 11, 738},
+ dictWord{7, 0, 980},
+ dictWord{9, 0, 328},
+ dictWord{13, 0, 186},
+ dictWord{13, 0, 364},
+ dictWord{7, 10, 635},
+ dictWord{7, 10, 796},
+ dictWord{8, 10, 331},
+ dictWord{9, 10, 330},
+ dictWord{9, 10, 865},
+ dictWord{10, 10, 119},
+ dictWord{
+ 10,
+ 10,
+ 235,
+ },
+ dictWord{11, 10, 111},
+ dictWord{11, 10, 129},
+ dictWord{11, 10, 240},
+ dictWord{12, 10, 31},
+ dictWord{12, 10, 66},
+ dictWord{12, 10, 222},
+ dictWord{12, 10, 269},
+ dictWord{12, 10, 599},
+ dictWord{12, 10, 684},
+ dictWord{12, 10, 689},
+ dictWord{12, 10, 691},
+ dictWord{142, 10, 345},
+ dictWord{
+ 137,
+ 10,
+ 527,
+ },
+ dictWord{6, 0, 596},
+ dictWord{7, 0, 585},
+ dictWord{135, 10, 702},
+ dictWord{134, 11, 1683},
+ dictWord{133, 0, 211},
+ dictWord{6, 0, 145},
+ dictWord{
+ 141,
+ 0,
+ 336,
+ },
+ dictWord{134, 0, 1130},
+ dictWord{7, 0, 873},
+ dictWord{6, 10, 37},
+ dictWord{7, 10, 1666},
+ dictWord{8, 10, 195},
+ dictWord{8, 10, 316},
+ dictWord{
+ 9,
+ 10,
+ 178,
+ },
+ dictWord{9, 10, 276},
+ dictWord{9, 10, 339},
+ dictWord{9, 10, 536},
+ dictWord{10, 10, 102},
+ dictWord{10, 10, 362},
+ dictWord{10, 10, 785},
+ dictWord{
+ 11,
+ 10,
+ 55,
+ },
+ dictWord{11, 10, 149},
+ dictWord{11, 10, 773},
+ dictWord{13, 10, 416},
+ dictWord{13, 10, 419},
+ dictWord{14, 10, 38},
+ dictWord{14, 10, 41},
+ dictWord{
+ 142,
+ 10,
+ 210,
+ },
+ dictWord{8, 0, 840},
+ dictWord{136, 0, 841},
+ dictWord{132, 0, 263},
+ dictWord{5, 11, 3},
+ dictWord{8, 11, 578},
+ dictWord{9, 11, 118},
+ dictWord{
+ 10,
+ 11,
+ 705,
+ },
+ dictWord{12, 11, 383},
+ dictWord{141, 11, 279},
+ dictWord{132, 0, 916},
+ dictWord{133, 11, 229},
+ dictWord{133, 10, 645},
+ dictWord{15, 0, 155},
+ dictWord{16, 0, 79},
+ dictWord{8, 11, 102},
+ dictWord{10, 11, 578},
+ dictWord{10, 11, 672},
+ dictWord{12, 11, 496},
+ dictWord{13, 11, 408},
+ dictWord{14, 11, 121},
+ dictWord{145, 11, 106},
+ dictWord{4, 0, 599},
+ dictWord{5, 0, 592},
+ dictWord{6, 0, 1634},
+ dictWord{7, 0, 5},
+ dictWord{7, 0, 55},
+ dictWord{7, 0, 67},
+ dictWord{7, 0, 97},
+ dictWord{7, 0, 691},
+ dictWord{7, 0, 979},
+ dictWord{7, 0, 1600},
+ dictWord{7, 0, 1697},
+ dictWord{8, 0, 207},
+ dictWord{8, 0, 214},
+ dictWord{8, 0, 231},
+ dictWord{8, 0, 294},
+ dictWord{8, 0, 336},
+ dictWord{8, 0, 428},
+ dictWord{8, 0, 471},
+ dictWord{8, 0, 622},
+ dictWord{8, 0, 626},
+ dictWord{8, 0, 679},
+ dictWord{8, 0, 759},
+ dictWord{8, 0, 829},
+ dictWord{9, 0, 11},
+ dictWord{9, 0, 246},
+ dictWord{9, 0, 484},
+ dictWord{9, 0, 573},
+ dictWord{9, 0, 706},
+ dictWord{9, 0, 762},
+ dictWord{9, 0, 798},
+ dictWord{9, 0, 855},
+ dictWord{9, 0, 870},
+ dictWord{9, 0, 912},
+ dictWord{10, 0, 303},
+ dictWord{10, 0, 335},
+ dictWord{10, 0, 424},
+ dictWord{10, 0, 461},
+ dictWord{10, 0, 543},
+ dictWord{
+ 10,
+ 0,
+ 759,
+ },
+ dictWord{10, 0, 814},
+ dictWord{11, 0, 59},
+ dictWord{11, 0, 199},
+ dictWord{11, 0, 235},
+ dictWord{11, 0, 590},
+ dictWord{11, 0, 631},
+ dictWord{11, 0, 929},
+ dictWord{11, 0, 963},
+ dictWord{11, 0, 987},
+ dictWord{12, 0, 114},
+ dictWord{12, 0, 182},
+ dictWord{12, 0, 226},
+ dictWord{12, 0, 332},
+ dictWord{12, 0, 439},
+ dictWord{12, 0, 575},
+ dictWord{12, 0, 598},
+ dictWord{12, 0, 675},
+ dictWord{13, 0, 8},
+ dictWord{13, 0, 125},
+ dictWord{13, 0, 194},
+ dictWord{13, 0, 287},
+ dictWord{
+ 14,
+ 0,
+ 197,
+ },
+ dictWord{14, 0, 383},
+ dictWord{15, 0, 53},
+ dictWord{17, 0, 63},
+ dictWord{19, 0, 46},
+ dictWord{19, 0, 98},
+ dictWord{19, 0, 106},
+ dictWord{148, 0, 85},
+ dictWord{
+ 7,
+ 0,
+ 1356,
+ },
+ dictWord{132, 10, 290},
+ dictWord{6, 10, 70},
+ dictWord{7, 10, 1292},
+ dictWord{10, 10, 762},
+ dictWord{139, 10, 288},
+ dictWord{150, 11, 55},
+ dictWord{4, 0, 593},
+ dictWord{8, 11, 115},
+ dictWord{8, 11, 350},
+ dictWord{9, 11, 489},
+ dictWord{10, 11, 128},
+ dictWord{11, 11, 306},
+ dictWord{12, 11, 373},
+ dictWord{14, 11, 30},
+ dictWord{17, 11, 79},
+ dictWord{147, 11, 80},
+ dictWord{135, 11, 1235},
+ dictWord{134, 0, 1392},
+ dictWord{4, 11, 230},
+ dictWord{
+ 133,
+ 11,
+ 702,
+ },
+ dictWord{147, 0, 126},
+ dictWord{7, 10, 131},
+ dictWord{7, 10, 422},
+ dictWord{8, 10, 210},
+ dictWord{140, 10, 573},
+ dictWord{134, 0, 1179},
+ dictWord{
+ 139,
+ 11,
+ 435,
+ },
+ dictWord{139, 10, 797},
+ dictWord{134, 11, 1728},
+ dictWord{4, 0, 162},
+ dictWord{18, 11, 26},
+ dictWord{19, 11, 42},
+ dictWord{20, 11, 43},
+ dictWord{21, 11, 0},
+ dictWord{23, 11, 27},
+ dictWord{152, 11, 14},
+ dictWord{132, 10, 936},
+ dictWord{6, 0, 765},
+ dictWord{5, 10, 453},
+ dictWord{134, 10, 441},
+ dictWord{133, 0, 187},
+ dictWord{135, 0, 1286},
+ dictWord{6, 0, 635},
+ dictWord{6, 0, 904},
+ dictWord{6, 0, 1210},
+ dictWord{134, 0, 1489},
+ dictWord{4, 0, 215},
+ dictWord{
+ 8,
+ 0,
+ 890,
+ },
+ dictWord{9, 0, 38},
+ dictWord{10, 0, 923},
+ dictWord{11, 0, 23},
+ dictWord{11, 0, 127},
+ dictWord{139, 0, 796},
+ dictWord{6, 0, 1165},
+ dictWord{
+ 134,
+ 0,
+ 1306,
+ },
+ dictWord{7, 0, 716},
+ dictWord{13, 0, 97},
+ dictWord{141, 0, 251},
+ dictWord{132, 10, 653},
+ dictWord{136, 0, 657},
+ dictWord{146, 10, 80},
+ dictWord{
+ 5,
+ 11,
+ 622,
+ },
+ dictWord{7, 11, 1032},
+ dictWord{11, 11, 26},
+ dictWord{11, 11, 213},
+ dictWord{11, 11, 707},
+ dictWord{12, 11, 380},
+ dictWord{13, 11, 226},
+ dictWord{141, 11, 355},
+ dictWord{6, 0, 299},
+ dictWord{5, 11, 70},
+ dictWord{6, 11, 334},
+ dictWord{9, 11, 171},
+ dictWord{11, 11, 637},
+ dictWord{12, 11, 202},
+ dictWord{14, 11, 222},
+ dictWord{145, 11, 42},
+ dictWord{142, 0, 134},
+ dictWord{4, 11, 23},
+ dictWord{5, 11, 313},
+ dictWord{5, 11, 1014},
+ dictWord{6, 11, 50},
+ dictWord{
+ 6,
+ 11,
+ 51,
+ },
+ dictWord{7, 11, 142},
+ dictWord{7, 11, 384},
+ dictWord{9, 11, 783},
+ dictWord{139, 11, 741},
+ dictWord{4, 11, 141},
+ dictWord{7, 11, 559},
+ dictWord{
+ 8,
+ 11,
+ 640,
+ },
+ dictWord{9, 11, 460},
+ dictWord{12, 11, 183},
+ dictWord{141, 11, 488},
+ dictWord{136, 11, 614},
+ dictWord{7, 10, 1368},
+ dictWord{8, 10, 232},
+ dictWord{8, 10, 361},
+ dictWord{10, 10, 682},
+ dictWord{138, 10, 742},
+ dictWord{137, 10, 534},
+ dictWord{6, 0, 1082},
+ dictWord{140, 0, 658},
+ dictWord{
+ 137,
+ 10,
+ 27,
+ },
+ dictWord{135, 0, 2002},
+ dictWord{142, 10, 12},
+ dictWord{4, 0, 28},
+ dictWord{5, 0, 440},
+ dictWord{7, 0, 248},
+ dictWord{11, 0, 833},
+ dictWord{140, 0, 344},
+ dictWord{7, 10, 736},
+ dictWord{139, 10, 264},
+ dictWord{134, 10, 1657},
+ dictWord{134, 0, 1654},
+ dictWord{138, 0, 531},
+ dictWord{5, 11, 222},
+ dictWord{
+ 9,
+ 11,
+ 140,
+ },
+ dictWord{138, 11, 534},
+ dictWord{6, 0, 634},
+ dictWord{6, 0, 798},
+ dictWord{134, 0, 840},
+ dictWord{138, 11, 503},
+ dictWord{135, 10, 127},
+ dictWord{133, 0, 853},
+ dictWord{5, 11, 154},
+ dictWord{7, 11, 1491},
+ dictWord{10, 11, 379},
+ dictWord{138, 11, 485},
+ dictWord{6, 0, 249},
+ dictWord{7, 0, 1234},
+ dictWord{139, 0, 573},
+ dictWord{133, 11, 716},
+ dictWord{7, 11, 1570},
+ dictWord{140, 11, 542},
+ dictWord{136, 10, 364},
+ dictWord{138, 0, 527},
+ dictWord{
+ 4,
+ 11,
+ 91,
+ },
+ dictWord{5, 11, 388},
+ dictWord{5, 11, 845},
+ dictWord{6, 11, 206},
+ dictWord{6, 11, 252},
+ dictWord{6, 11, 365},
+ dictWord{7, 11, 136},
+ dictWord{7, 11, 531},
+ dictWord{8, 11, 264},
+ dictWord{136, 11, 621},
+ dictWord{134, 0, 1419},
+ dictWord{135, 11, 1441},
+ dictWord{7, 0, 49},
+ dictWord{7, 0, 392},
+ dictWord{8, 0, 20},
+ dictWord{8, 0, 172},
+ dictWord{8, 0, 690},
+ dictWord{9, 0, 383},
+ dictWord{9, 0, 845},
+ dictWord{10, 0, 48},
+ dictWord{11, 0, 293},
+ dictWord{11, 0, 832},
+ dictWord{
+ 11,
+ 0,
+ 920,
+ },
+ dictWord{11, 0, 984},
+ dictWord{141, 0, 221},
+ dictWord{5, 0, 858},
+ dictWord{133, 0, 992},
+ dictWord{5, 0, 728},
+ dictWord{137, 10, 792},
+ dictWord{
+ 5,
+ 10,
+ 909,
+ },
+ dictWord{9, 10, 849},
+ dictWord{138, 10, 805},
+ dictWord{7, 0, 525},
+ dictWord{7, 0, 1579},
+ dictWord{8, 0, 497},
+ dictWord{136, 0, 573},
+ dictWord{6, 0, 268},
+ dictWord{137, 0, 62},
+ dictWord{135, 11, 576},
+ dictWord{134, 0, 1201},
+ dictWord{5, 11, 771},
+ dictWord{5, 11, 863},
+ dictWord{5, 11, 898},
+ dictWord{
+ 6,
+ 11,
+ 1632,
+ },
+ dictWord{6, 11, 1644},
+ dictWord{134, 11, 1780},
+ dictWord{133, 11, 331},
+ dictWord{7, 0, 193},
+ dictWord{7, 0, 1105},
+ dictWord{10, 0, 495},
+ dictWord{
+ 7,
+ 10,
+ 397,
+ },
+ dictWord{8, 10, 124},
+ dictWord{8, 10, 619},
+ dictWord{9, 10, 305},
+ dictWord{11, 10, 40},
+ dictWord{12, 10, 349},
+ dictWord{13, 10, 134},
+ dictWord{
+ 13,
+ 10,
+ 295,
+ },
+ dictWord{14, 10, 155},
+ dictWord{15, 10, 120},
+ dictWord{146, 10, 105},
+ dictWord{138, 0, 106},
+ dictWord{6, 0, 859},
+ dictWord{5, 11, 107},
+ dictWord{
+ 7,
+ 11,
+ 201,
+ },
+ dictWord{136, 11, 518},
+ dictWord{6, 11, 446},
+ dictWord{135, 11, 1817},
+ dictWord{13, 0, 23},
+ dictWord{4, 10, 262},
+ dictWord{135, 10, 342},
+ dictWord{133, 10, 641},
+ dictWord{137, 11, 851},
+ dictWord{6, 0, 925},
+ dictWord{137, 0, 813},
+ dictWord{132, 11, 504},
+ dictWord{6, 0, 613},
+ dictWord{
+ 136,
+ 0,
+ 223,
+ },
+ dictWord{4, 10, 99},
+ dictWord{6, 10, 250},
+ dictWord{6, 10, 346},
+ dictWord{8, 10, 127},
+ dictWord{138, 10, 81},
+ dictWord{136, 0, 953},
+ dictWord{
+ 132,
+ 10,
+ 915,
+ },
+ dictWord{139, 11, 892},
+ dictWord{5, 10, 75},
+ dictWord{9, 10, 517},
+ dictWord{10, 10, 470},
+ dictWord{12, 10, 155},
+ dictWord{141, 10, 224},
+ dictWord{
+ 4,
+ 0,
+ 666,
+ },
+ dictWord{7, 0, 1017},
+ dictWord{7, 11, 996},
+ dictWord{138, 11, 390},
+ dictWord{5, 11, 883},
+ dictWord{133, 11, 975},
+ dictWord{14, 10, 83},
+ dictWord{
+ 142,
+ 11,
+ 83,
+ },
+ dictWord{4, 0, 670},
+ dictWord{5, 11, 922},
+ dictWord{134, 11, 1707},
+ dictWord{135, 0, 216},
+ dictWord{9, 0, 40},
+ dictWord{11, 0, 136},
+ dictWord{
+ 135,
+ 11,
+ 787,
+ },
+ dictWord{5, 10, 954},
+ dictWord{5, 11, 993},
+ dictWord{7, 11, 515},
+ dictWord{137, 11, 91},
+ dictWord{139, 0, 259},
+ dictWord{7, 0, 1114},
+ dictWord{
+ 9,
+ 0,
+ 310,
+ },
+ dictWord{9, 0, 682},
+ dictWord{10, 0, 440},
+ dictWord{13, 0, 40},
+ dictWord{6, 10, 304},
+ dictWord{8, 10, 418},
+ dictWord{11, 10, 341},
+ dictWord{
+ 139,
+ 10,
+ 675,
+ },
+ dictWord{14, 0, 296},
+ dictWord{9, 10, 410},
+ dictWord{139, 10, 425},
+ dictWord{10, 11, 377},
+ dictWord{12, 11, 363},
+ dictWord{13, 11, 68},
+ dictWord{
+ 13,
+ 11,
+ 94,
+ },
+ dictWord{14, 11, 108},
+ dictWord{142, 11, 306},
+ dictWord{7, 0, 1401},
+ dictWord{135, 0, 1476},
+ dictWord{4, 0, 296},
+ dictWord{6, 0, 475},
+ dictWord{
+ 7,
+ 0,
+ 401,
+ },
+ dictWord{7, 0, 1410},
+ dictWord{7, 0, 1594},
+ dictWord{7, 0, 1674},
+ dictWord{8, 0, 63},
+ dictWord{8, 0, 660},
+ dictWord{137, 0, 74},
+ dictWord{4, 0, 139},
+ dictWord{4, 0, 388},
+ dictWord{140, 0, 188},
+ dictWord{132, 0, 797},
+ dictWord{132, 11, 766},
+ dictWord{5, 11, 103},
+ dictWord{7, 11, 921},
+ dictWord{8, 11, 580},
+ dictWord{8, 11, 593},
+ dictWord{8, 11, 630},
+ dictWord{138, 11, 28},
+ dictWord{4, 11, 911},
+ dictWord{5, 11, 867},
+ dictWord{133, 11, 1013},
+ dictWord{134, 10, 14},
+ dictWord{134, 0, 1572},
+ dictWord{134, 10, 1708},
+ dictWord{21, 0, 39},
+ dictWord{5, 10, 113},
+ dictWord{6, 10, 243},
+ dictWord{7, 10, 1865},
+ dictWord{
+ 11,
+ 10,
+ 161,
+ },
+ dictWord{16, 10, 37},
+ dictWord{145, 10, 99},
+ dictWord{7, 11, 1563},
+ dictWord{141, 11, 182},
+ dictWord{5, 11, 135},
+ dictWord{6, 11, 519},
+ dictWord{
+ 7,
+ 11,
+ 1722,
+ },
+ dictWord{10, 11, 271},
+ dictWord{11, 11, 261},
+ dictWord{145, 11, 54},
+ dictWord{132, 10, 274},
+ dictWord{134, 0, 1594},
+ dictWord{4, 11, 300},
+ dictWord{5, 11, 436},
+ dictWord{135, 11, 484},
+ dictWord{4, 0, 747},
+ dictWord{6, 0, 290},
+ dictWord{7, 0, 649},
+ dictWord{7, 0, 1479},
+ dictWord{135, 0, 1583},
+ dictWord{133, 11, 535},
+ dictWord{147, 11, 82},
+ dictWord{133, 0, 232},
+ dictWord{137, 0, 887},
+ dictWord{135, 10, 166},
+ dictWord{136, 0, 521},
+ dictWord{4, 0, 14},
+ dictWord{7, 0, 472},
+ dictWord{7, 0, 1801},
+ dictWord{10, 0, 748},
+ dictWord{141, 0, 458},
+ dictWord{134, 0, 741},
+ dictWord{134, 0, 992},
+ dictWord{16, 0, 111},
+ dictWord{137, 10, 304},
+ dictWord{4, 0, 425},
+ dictWord{5, 11, 387},
+ dictWord{7, 11, 557},
+ dictWord{12, 11, 547},
+ dictWord{142, 11, 86},
+ dictWord{
+ 135,
+ 11,
+ 1747,
+ },
+ dictWord{5, 10, 654},
+ dictWord{135, 11, 1489},
+ dictWord{7, 0, 789},
+ dictWord{4, 11, 6},
+ dictWord{5, 11, 708},
+ dictWord{136, 11, 75},
+ dictWord{
+ 6,
+ 10,
+ 273,
+ },
+ dictWord{10, 10, 188},
+ dictWord{13, 10, 377},
+ dictWord{146, 10, 77},
+ dictWord{6, 0, 1593},
+ dictWord{4, 11, 303},
+ dictWord{7, 11, 619},
+ dictWord{
+ 10,
+ 11,
+ 547,
+ },
+ dictWord{10, 11, 687},
+ dictWord{11, 11, 122},
+ dictWord{140, 11, 601},
+ dictWord{134, 0, 1768},
+ dictWord{135, 10, 410},
+ dictWord{138, 11, 772},
+ dictWord{11, 0, 233},
+ dictWord{139, 10, 524},
+ dictWord{5, 0, 943},
+ dictWord{134, 0, 1779},
+ dictWord{134, 10, 1785},
+ dictWord{136, 11, 529},
+ dictWord{
+ 132,
+ 0,
+ 955,
+ },
+ dictWord{5, 0, 245},
+ dictWord{6, 0, 576},
+ dictWord{7, 0, 582},
+ dictWord{136, 0, 225},
+ dictWord{132, 10, 780},
+ dictWord{142, 0, 241},
+ dictWord{
+ 134,
+ 0,
+ 1943,
+ },
+ dictWord{4, 11, 106},
+ dictWord{7, 11, 310},
+ dictWord{7, 11, 1785},
+ dictWord{10, 11, 690},
+ dictWord{139, 11, 717},
+ dictWord{134, 0, 1284},
+ dictWord{5, 11, 890},
+ dictWord{133, 11, 988},
+ dictWord{6, 11, 626},
+ dictWord{142, 11, 431},
+ dictWord{10, 11, 706},
+ dictWord{145, 11, 32},
+ dictWord{
+ 137,
+ 11,
+ 332,
+ },
+ dictWord{132, 11, 698},
+ dictWord{135, 0, 709},
+ dictWord{5, 10, 948},
+ dictWord{138, 11, 17},
+ dictWord{136, 0, 554},
+ dictWord{134, 0, 1564},
+ dictWord{139, 10, 941},
+ dictWord{132, 0, 443},
+ dictWord{134, 0, 909},
+ dictWord{134, 11, 84},
+ dictWord{142, 0, 280},
+ dictWord{4, 10, 532},
+ dictWord{5, 10, 706},
+ dictWord{135, 10, 662},
+ dictWord{132, 0, 729},
+ dictWord{5, 10, 837},
+ dictWord{6, 10, 1651},
+ dictWord{139, 10, 985},
+ dictWord{135, 10, 1861},
+ dictWord{
+ 4,
+ 0,
+ 348,
+ },
+ dictWord{152, 11, 3},
+ dictWord{5, 11, 986},
+ dictWord{6, 11, 130},
+ dictWord{7, 11, 1582},
+ dictWord{8, 11, 458},
+ dictWord{10, 11, 101},
+ dictWord{
+ 10,
+ 11,
+ 318,
+ },
+ dictWord{138, 11, 823},
+ dictWord{134, 0, 758},
+ dictWord{4, 0, 298},
+ dictWord{137, 0, 848},
+ dictWord{4, 10, 330},
+ dictWord{7, 10, 933},
+ dictWord{
+ 7,
+ 10,
+ 2012,
+ },
+ dictWord{136, 10, 292},
+ dictWord{7, 11, 1644},
+ dictWord{137, 11, 129},
+ dictWord{6, 0, 1422},
+ dictWord{9, 0, 829},
+ dictWord{135, 10, 767},
+ dictWord{5, 0, 164},
+ dictWord{7, 0, 121},
+ dictWord{142, 0, 189},
+ dictWord{7, 0, 812},
+ dictWord{7, 0, 1261},
+ dictWord{7, 0, 1360},
+ dictWord{9, 0, 632},
+ dictWord{
+ 140,
+ 0,
+ 352,
+ },
+ dictWord{135, 11, 1788},
+ dictWord{139, 0, 556},
+ dictWord{135, 11, 997},
+ dictWord{145, 10, 114},
+ dictWord{4, 0, 172},
+ dictWord{9, 0, 611},
+ dictWord{10, 0, 436},
+ dictWord{12, 0, 673},
+ dictWord{13, 0, 255},
+ dictWord{137, 10, 883},
+ dictWord{11, 0, 530},
+ dictWord{138, 10, 274},
+ dictWord{133, 0, 844},
+ dictWord{134, 0, 984},
+ dictWord{13, 0, 232},
+ dictWord{18, 0, 35},
+ dictWord{4, 10, 703},
+ dictWord{135, 10, 207},
+ dictWord{132, 10, 571},
+ dictWord{9, 0, 263},
+ dictWord{10, 0, 147},
+ dictWord{138, 0, 492},
+ dictWord{7, 11, 1756},
+ dictWord{137, 11, 98},
+ dictWord{5, 10, 873},
+ dictWord{5, 10, 960},
+ dictWord{8, 10, 823},
+ dictWord{137, 10, 881},
+ dictWord{133, 0, 537},
+ dictWord{132, 0, 859},
+ dictWord{7, 11, 1046},
+ dictWord{139, 11, 160},
+ dictWord{137, 0, 842},
+ dictWord{
+ 139,
+ 10,
+ 283,
+ },
+ dictWord{5, 10, 33},
+ dictWord{6, 10, 470},
+ dictWord{139, 10, 424},
+ dictWord{6, 11, 45},
+ dictWord{7, 11, 433},
+ dictWord{8, 11, 129},
+ dictWord{
+ 9,
+ 11,
+ 21,
+ },
+ dictWord{10, 11, 392},
+ dictWord{11, 11, 79},
+ dictWord{12, 11, 499},
+ dictWord{13, 11, 199},
+ dictWord{141, 11, 451},
+ dictWord{135, 0, 1291},
+ dictWord{135, 10, 1882},
+ dictWord{7, 11, 558},
+ dictWord{136, 11, 353},
+ dictWord{134, 0, 1482},
+ dictWord{5, 0, 230},
+ dictWord{5, 0, 392},
+ dictWord{6, 0, 420},
+ dictWord{9, 0, 568},
+ dictWord{140, 0, 612},
+ dictWord{6, 0, 262},
+ dictWord{7, 10, 90},
+ dictWord{7, 10, 664},
+ dictWord{7, 10, 830},
+ dictWord{7, 10, 1380},
+ dictWord{
+ 7,
+ 10,
+ 2025,
+ },
+ dictWord{8, 11, 81},
+ dictWord{8, 10, 448},
+ dictWord{8, 10, 828},
+ dictWord{9, 11, 189},
+ dictWord{9, 11, 201},
+ dictWord{11, 11, 478},
+ dictWord{
+ 11,
+ 11,
+ 712,
+ },
+ dictWord{141, 11, 338},
+ dictWord{142, 0, 31},
+ dictWord{5, 11, 353},
+ dictWord{151, 11, 26},
+ dictWord{132, 0, 753},
+ dictWord{4, 0, 0},
+ dictWord{
+ 5,
+ 0,
+ 41,
+ },
+ dictWord{7, 0, 1459},
+ dictWord{7, 0, 1469},
+ dictWord{7, 0, 1859},
+ dictWord{9, 0, 549},
+ dictWord{139, 0, 905},
+ dictWord{9, 10, 417},
+ dictWord{
+ 137,
+ 10,
+ 493,
+ },
+ dictWord{135, 11, 1113},
+ dictWord{133, 0, 696},
+ dictWord{141, 11, 448},
+ dictWord{134, 10, 295},
+ dictWord{132, 0, 834},
+ dictWord{4, 0, 771},
+ dictWord{5, 10, 1019},
+ dictWord{6, 11, 25},
+ dictWord{7, 11, 855},
+ dictWord{7, 11, 1258},
+ dictWord{144, 11, 32},
+ dictWord{134, 0, 1076},
+ dictWord{133, 0, 921},
+ dictWord{133, 0, 674},
+ dictWord{4, 11, 4},
+ dictWord{7, 11, 1118},
+ dictWord{7, 11, 1320},
+ dictWord{7, 11, 1706},
+ dictWord{8, 11, 277},
+ dictWord{9, 11, 622},
+ dictWord{10, 11, 9},
+ dictWord{11, 11, 724},
+ dictWord{12, 11, 350},
+ dictWord{12, 11, 397},
+ dictWord{13, 11, 28},
+ dictWord{13, 11, 159},
+ dictWord{15, 11, 89},
+ dictWord{18, 11, 5},
+ dictWord{19, 11, 9},
+ dictWord{20, 11, 34},
+ dictWord{150, 11, 47},
+ dictWord{134, 10, 208},
+ dictWord{6, 0, 444},
+ dictWord{136, 0, 308},
+ dictWord{
+ 6,
+ 0,
+ 180,
+ },
+ dictWord{7, 0, 1137},
+ dictWord{8, 0, 751},
+ dictWord{139, 0, 805},
+ dictWord{4, 0, 183},
+ dictWord{7, 0, 271},
+ dictWord{11, 0, 824},
+ dictWord{
+ 11,
+ 0,
+ 952,
+ },
+ dictWord{13, 0, 278},
+ dictWord{13, 0, 339},
+ dictWord{13, 0, 482},
+ dictWord{14, 0, 424},
+ dictWord{148, 0, 99},
+ dictWord{7, 11, 317},
+ dictWord{
+ 135,
+ 11,
+ 569,
+ },
+ dictWord{4, 0, 19},
+ dictWord{5, 0, 477},
+ dictWord{5, 0, 596},
+ dictWord{6, 0, 505},
+ dictWord{7, 0, 1221},
+ dictWord{11, 0, 907},
+ dictWord{12, 0, 209},
+ dictWord{141, 0, 214},
+ dictWord{135, 0, 1215},
+ dictWord{6, 0, 271},
+ dictWord{7, 0, 398},
+ dictWord{8, 0, 387},
+ dictWord{10, 0, 344},
+ dictWord{7, 10, 448},
+ dictWord{
+ 7,
+ 10,
+ 1629,
+ },
+ dictWord{7, 10, 1813},
+ dictWord{8, 10, 442},
+ dictWord{9, 10, 710},
+ dictWord{10, 10, 282},
+ dictWord{138, 10, 722},
+ dictWord{11, 10, 844},
+ dictWord{12, 10, 104},
+ dictWord{140, 10, 625},
+ dictWord{134, 11, 255},
+ dictWord{133, 10, 787},
+ dictWord{134, 0, 1645},
+ dictWord{11, 11, 956},
+ dictWord{
+ 151,
+ 11,
+ 3,
+ },
+ dictWord{6, 0, 92},
+ dictWord{6, 0, 188},
+ dictWord{7, 0, 209},
+ dictWord{7, 0, 1269},
+ dictWord{7, 0, 1524},
+ dictWord{7, 0, 1876},
+ dictWord{8, 0, 661},
+ dictWord{10, 0, 42},
+ dictWord{10, 0, 228},
+ dictWord{11, 0, 58},
+ dictWord{11, 0, 1020},
+ dictWord{12, 0, 58},
+ dictWord{12, 0, 118},
+ dictWord{141, 0, 32},
+ dictWord{
+ 4,
+ 0,
+ 459,
+ },
+ dictWord{133, 0, 966},
+ dictWord{4, 11, 536},
+ dictWord{7, 11, 1141},
+ dictWord{10, 11, 723},
+ dictWord{139, 11, 371},
+ dictWord{140, 0, 330},
+ dictWord{134, 0, 1557},
+ dictWord{7, 11, 285},
+ dictWord{135, 11, 876},
+ dictWord{136, 10, 491},
+ dictWord{135, 11, 560},
+ dictWord{6, 0, 18},
+ dictWord{7, 0, 179},
+ dictWord{7, 0, 932},
+ dictWord{8, 0, 548},
+ dictWord{8, 0, 757},
+ dictWord{9, 0, 54},
+ dictWord{9, 0, 65},
+ dictWord{9, 0, 532},
+ dictWord{9, 0, 844},
+ dictWord{10, 0, 113},
+ dictWord{10, 0, 117},
+ dictWord{10, 0, 315},
+ dictWord{10, 0, 560},
+ dictWord{10, 0, 622},
+ dictWord{10, 0, 798},
+ dictWord{11, 0, 153},
+ dictWord{11, 0, 351},
+ dictWord{
+ 11,
+ 0,
+ 375,
+ },
+ dictWord{12, 0, 78},
+ dictWord{12, 0, 151},
+ dictWord{12, 0, 392},
+ dictWord{12, 0, 666},
+ dictWord{14, 0, 248},
+ dictWord{143, 0, 23},
+ dictWord{
+ 6,
+ 0,
+ 1742,
+ },
+ dictWord{132, 11, 690},
+ dictWord{4, 10, 403},
+ dictWord{5, 10, 441},
+ dictWord{7, 10, 450},
+ dictWord{10, 10, 840},
+ dictWord{11, 10, 101},
+ dictWord{
+ 12,
+ 10,
+ 193,
+ },
+ dictWord{141, 10, 430},
+ dictWord{133, 0, 965},
+ dictWord{134, 0, 182},
+ dictWord{10, 0, 65},
+ dictWord{10, 0, 488},
+ dictWord{138, 0, 497},
+ dictWord{135, 11, 1346},
+ dictWord{6, 0, 973},
+ dictWord{6, 0, 1158},
+ dictWord{10, 11, 200},
+ dictWord{19, 11, 2},
+ dictWord{151, 11, 22},
+ dictWord{4, 11, 190},
+ dictWord{133, 11, 554},
+ dictWord{133, 10, 679},
+ dictWord{7, 0, 328},
+ dictWord{137, 10, 326},
+ dictWord{133, 11, 1001},
+ dictWord{9, 0, 588},
+ dictWord{
+ 138,
+ 0,
+ 260,
+ },
+ dictWord{133, 11, 446},
+ dictWord{135, 10, 1128},
+ dictWord{135, 10, 1796},
+ dictWord{147, 11, 119},
+ dictWord{134, 0, 1786},
+ dictWord{
+ 6,
+ 0,
+ 1328,
+ },
+ dictWord{6, 0, 1985},
+ dictWord{8, 0, 962},
+ dictWord{138, 0, 1017},
+ dictWord{135, 0, 308},
+ dictWord{11, 0, 508},
+ dictWord{4, 10, 574},
+ dictWord{
+ 7,
+ 10,
+ 350,
+ },
+ dictWord{7, 10, 1024},
+ dictWord{8, 10, 338},
+ dictWord{9, 10, 677},
+ dictWord{138, 10, 808},
+ dictWord{138, 11, 752},
+ dictWord{135, 10, 1081},
+ dictWord{137, 11, 96},
+ dictWord{7, 10, 1676},
+ dictWord{135, 10, 2037},
+ dictWord{136, 0, 588},
+ dictWord{132, 11, 304},
+ dictWord{133, 0, 614},
+ dictWord{
+ 140,
+ 0,
+ 793,
+ },
+ dictWord{136, 0, 287},
+ dictWord{137, 10, 297},
+ dictWord{141, 10, 37},
+ dictWord{6, 11, 53},
+ dictWord{6, 11, 199},
+ dictWord{7, 11, 1408},
+ dictWord{
+ 8,
+ 11,
+ 32,
+ },
+ dictWord{8, 11, 93},
+ dictWord{9, 11, 437},
+ dictWord{10, 11, 397},
+ dictWord{10, 11, 629},
+ dictWord{11, 11, 593},
+ dictWord{11, 11, 763},
+ dictWord{
+ 13,
+ 11,
+ 326,
+ },
+ dictWord{145, 11, 35},
+ dictWord{134, 11, 105},
+ dictWord{9, 11, 320},
+ dictWord{10, 11, 506},
+ dictWord{138, 11, 794},
+ dictWord{5, 11, 114},
+ dictWord{5, 11, 255},
+ dictWord{141, 11, 285},
+ dictWord{140, 0, 290},
+ dictWord{7, 11, 2035},
+ dictWord{8, 11, 19},
+ dictWord{9, 11, 89},
+ dictWord{138, 11, 831},
+ dictWord{134, 0, 1136},
+ dictWord{7, 0, 719},
+ dictWord{8, 0, 796},
+ dictWord{8, 0, 809},
+ dictWord{8, 0, 834},
+ dictWord{6, 10, 306},
+ dictWord{7, 10, 1140},
+ dictWord{
+ 7,
+ 10,
+ 1340,
+ },
+ dictWord{8, 10, 133},
+ dictWord{138, 10, 449},
+ dictWord{139, 10, 1011},
+ dictWord{5, 0, 210},
+ dictWord{6, 0, 213},
+ dictWord{7, 0, 60},
+ dictWord{
+ 10,
+ 0,
+ 364,
+ },
+ dictWord{139, 0, 135},
+ dictWord{5, 0, 607},
+ dictWord{8, 0, 326},
+ dictWord{136, 0, 490},
+ dictWord{138, 11, 176},
+ dictWord{132, 0, 701},
+ dictWord{
+ 5,
+ 0,
+ 472,
+ },
+ dictWord{7, 0, 380},
+ dictWord{137, 0, 758},
+ dictWord{135, 0, 1947},
+ dictWord{6, 0, 1079},
+ dictWord{138, 0, 278},
+ dictWord{138, 11, 391},
+ dictWord{
+ 5,
+ 10,
+ 329,
+ },
+ dictWord{8, 10, 260},
+ dictWord{139, 11, 156},
+ dictWord{4, 0, 386},
+ dictWord{7, 0, 41},
+ dictWord{8, 0, 405},
+ dictWord{8, 0, 728},
+ dictWord{9, 0, 497},
+ dictWord{11, 0, 110},
+ dictWord{11, 0, 360},
+ dictWord{15, 0, 37},
+ dictWord{144, 0, 84},
+ dictWord{5, 0, 46},
+ dictWord{7, 0, 1452},
+ dictWord{7, 0, 1480},
+ dictWord{
+ 8,
+ 0,
+ 634,
+ },
+ dictWord{140, 0, 472},
+ dictWord{136, 0, 961},
+ dictWord{4, 0, 524},
+ dictWord{136, 0, 810},
+ dictWord{10, 0, 238},
+ dictWord{141, 0, 33},
+ dictWord{
+ 132,
+ 10,
+ 657,
+ },
+ dictWord{152, 10, 7},
+ dictWord{133, 0, 532},
+ dictWord{5, 0, 997},
+ dictWord{135, 10, 1665},
+ dictWord{7, 11, 594},
+ dictWord{7, 11, 851},
+ dictWord{
+ 7,
+ 11,
+ 1858,
+ },
+ dictWord{9, 11, 411},
+ dictWord{9, 11, 574},
+ dictWord{9, 11, 666},
+ dictWord{9, 11, 737},
+ dictWord{10, 11, 346},
+ dictWord{10, 11, 712},
+ dictWord{
+ 11,
+ 11,
+ 246,
+ },
+ dictWord{11, 11, 432},
+ dictWord{11, 11, 517},
+ dictWord{11, 11, 647},
+ dictWord{11, 11, 679},
+ dictWord{11, 11, 727},
+ dictWord{12, 11, 304},
+ dictWord{12, 11, 305},
+ dictWord{12, 11, 323},
+ dictWord{12, 11, 483},
+ dictWord{12, 11, 572},
+ dictWord{12, 11, 593},
+ dictWord{12, 11, 602},
+ dictWord{
+ 13,
+ 11,
+ 95,
+ },
+ dictWord{13, 11, 101},
+ dictWord{13, 11, 171},
+ dictWord{13, 11, 315},
+ dictWord{13, 11, 378},
+ dictWord{13, 11, 425},
+ dictWord{13, 11, 475},
+ dictWord{
+ 14,
+ 11,
+ 63,
+ },
+ dictWord{14, 11, 380},
+ dictWord{14, 11, 384},
+ dictWord{15, 11, 133},
+ dictWord{18, 11, 112},
+ dictWord{148, 11, 72},
+ dictWord{5, 11, 955},
+ dictWord{136, 11, 814},
+ dictWord{134, 0, 1301},
+ dictWord{5, 10, 66},
+ dictWord{7, 10, 1896},
+ dictWord{136, 10, 288},
+ dictWord{133, 11, 56},
+ dictWord{
+ 134,
+ 10,
+ 1643,
+ },
+ dictWord{6, 0, 1298},
+ dictWord{148, 11, 100},
+ dictWord{5, 0, 782},
+ dictWord{5, 0, 829},
+ dictWord{6, 0, 671},
+ dictWord{6, 0, 1156},
+ dictWord{6, 0, 1738},
+ dictWord{137, 11, 621},
+ dictWord{4, 0, 306},
+ dictWord{5, 0, 570},
+ dictWord{7, 0, 1347},
+ dictWord{5, 10, 91},
+ dictWord{5, 10, 648},
+ dictWord{5, 10, 750},
+ dictWord{
+ 5,
+ 10,
+ 781,
+ },
+ dictWord{6, 10, 54},
+ dictWord{6, 10, 112},
+ dictWord{6, 10, 402},
+ dictWord{6, 10, 1732},
+ dictWord{7, 10, 315},
+ dictWord{7, 10, 749},
+ dictWord{
+ 7,
+ 10,
+ 1900,
+ },
+ dictWord{9, 10, 78},
+ dictWord{9, 10, 508},
+ dictWord{10, 10, 611},
+ dictWord{10, 10, 811},
+ dictWord{11, 10, 510},
+ dictWord{11, 10, 728},
+ dictWord{
+ 13,
+ 10,
+ 36,
+ },
+ dictWord{14, 10, 39},
+ dictWord{16, 10, 83},
+ dictWord{17, 10, 124},
+ dictWord{148, 10, 30},
+ dictWord{8, 10, 570},
+ dictWord{9, 11, 477},
+ dictWord{
+ 141,
+ 11,
+ 78,
+ },
+ dictWord{4, 11, 639},
+ dictWord{10, 11, 4},
+ dictWord{10, 10, 322},
+ dictWord{10, 10, 719},
+ dictWord{11, 10, 407},
+ dictWord{11, 11, 638},
+ dictWord{
+ 12,
+ 11,
+ 177,
+ },
+ dictWord{148, 11, 57},
+ dictWord{7, 0, 1823},
+ dictWord{139, 0, 693},
+ dictWord{7, 0, 759},
+ dictWord{5, 11, 758},
+ dictWord{8, 10, 125},
+ dictWord{
+ 8,
+ 10,
+ 369,
+ },
+ dictWord{8, 10, 524},
+ dictWord{10, 10, 486},
+ dictWord{11, 10, 13},
+ dictWord{11, 10, 381},
+ dictWord{11, 10, 736},
+ dictWord{11, 10, 766},
+ dictWord{
+ 11,
+ 10,
+ 845,
+ },
+ dictWord{13, 10, 114},
+ dictWord{13, 10, 292},
+ dictWord{142, 10, 47},
+ dictWord{7, 0, 1932},
+ dictWord{6, 10, 1684},
+ dictWord{6, 10, 1731},
+ dictWord{7, 10, 356},
+ dictWord{8, 10, 54},
+ dictWord{8, 10, 221},
+ dictWord{9, 10, 225},
+ dictWord{9, 10, 356},
+ dictWord{10, 10, 77},
+ dictWord{10, 10, 446},
+ dictWord{
+ 10,
+ 10,
+ 731,
+ },
+ dictWord{12, 10, 404},
+ dictWord{141, 10, 491},
+ dictWord{135, 11, 552},
+ dictWord{135, 11, 1112},
+ dictWord{4, 0, 78},
+ dictWord{5, 0, 96},
+ dictWord{
+ 5,
+ 0,
+ 182,
+ },
+ dictWord{6, 0, 1257},
+ dictWord{7, 0, 1724},
+ dictWord{7, 0, 1825},
+ dictWord{10, 0, 394},
+ dictWord{10, 0, 471},
+ dictWord{11, 0, 532},
+ dictWord{
+ 14,
+ 0,
+ 340,
+ },
+ dictWord{145, 0, 88},
+ dictWord{139, 11, 328},
+ dictWord{135, 0, 1964},
+ dictWord{132, 10, 411},
+ dictWord{4, 10, 80},
+ dictWord{5, 10, 44},
+ dictWord{
+ 137,
+ 11,
+ 133,
+ },
+ dictWord{5, 11, 110},
+ dictWord{6, 11, 169},
+ dictWord{6, 11, 1702},
+ dictWord{7, 11, 400},
+ dictWord{8, 11, 538},
+ dictWord{9, 11, 184},
+ dictWord{
+ 9,
+ 11,
+ 524,
+ },
+ dictWord{140, 11, 218},
+ dictWord{4, 0, 521},
+ dictWord{5, 10, 299},
+ dictWord{7, 10, 1083},
+ dictWord{140, 11, 554},
+ dictWord{6, 11, 133},
+ dictWord{
+ 9,
+ 11,
+ 353,
+ },
+ dictWord{12, 11, 628},
+ dictWord{146, 11, 79},
+ dictWord{6, 0, 215},
+ dictWord{7, 0, 584},
+ dictWord{7, 0, 1028},
+ dictWord{7, 0, 1473},
+ dictWord{
+ 7,
+ 0,
+ 1721,
+ },
+ dictWord{9, 0, 424},
+ dictWord{138, 0, 779},
+ dictWord{7, 0, 857},
+ dictWord{7, 0, 1209},
+ dictWord{7, 10, 1713},
+ dictWord{9, 10, 537},
+ dictWord{
+ 10,
+ 10,
+ 165,
+ },
+ dictWord{12, 10, 219},
+ dictWord{140, 10, 561},
+ dictWord{4, 10, 219},
+ dictWord{6, 11, 93},
+ dictWord{7, 11, 1422},
+ dictWord{7, 10, 1761},
+ dictWord{
+ 7,
+ 11,
+ 1851,
+ },
+ dictWord{8, 11, 673},
+ dictWord{9, 10, 86},
+ dictWord{9, 11, 529},
+ dictWord{140, 11, 43},
+ dictWord{137, 11, 371},
+ dictWord{136, 0, 671},
+ dictWord{
+ 5,
+ 0,
+ 328,
+ },
+ dictWord{135, 0, 918},
+ dictWord{132, 0, 529},
+ dictWord{9, 11, 25},
+ dictWord{10, 11, 467},
+ dictWord{138, 11, 559},
+ dictWord{4, 11, 335},
+ dictWord{
+ 135,
+ 11,
+ 942,
+ },
+ dictWord{134, 0, 716},
+ dictWord{134, 0, 1509},
+ dictWord{6, 0, 67},
+ dictWord{7, 0, 258},
+ dictWord{7, 0, 1630},
+ dictWord{9, 0, 354},
+ dictWord{
+ 9,
+ 0,
+ 675,
+ },
+ dictWord{10, 0, 830},
+ dictWord{14, 0, 80},
+ dictWord{17, 0, 80},
+ dictWord{140, 10, 428},
+ dictWord{134, 0, 1112},
+ dictWord{6, 0, 141},
+ dictWord{7, 0, 225},
+ dictWord{9, 0, 59},
+ dictWord{9, 0, 607},
+ dictWord{10, 0, 312},
+ dictWord{11, 0, 687},
+ dictWord{12, 0, 555},
+ dictWord{13, 0, 373},
+ dictWord{13, 0, 494},
+ dictWord{
+ 148,
+ 0,
+ 58,
+ },
+ dictWord{133, 10, 514},
+ dictWord{8, 11, 39},
+ dictWord{10, 11, 773},
+ dictWord{11, 11, 84},
+ dictWord{12, 11, 205},
+ dictWord{142, 11, 1},
+ dictWord{
+ 8,
+ 0,
+ 783,
+ },
+ dictWord{5, 11, 601},
+ dictWord{133, 11, 870},
+ dictWord{136, 11, 594},
+ dictWord{4, 10, 55},
+ dictWord{5, 10, 301},
+ dictWord{6, 10, 571},
+ dictWord{
+ 14,
+ 10,
+ 49,
+ },
+ dictWord{146, 10, 102},
+ dictWord{132, 11, 181},
+ dictWord{134, 11, 1652},
+ dictWord{133, 10, 364},
+ dictWord{4, 11, 97},
+ dictWord{5, 11, 147},
+ dictWord{6, 11, 286},
+ dictWord{7, 11, 1362},
+ dictWord{141, 11, 176},
+ dictWord{4, 10, 76},
+ dictWord{7, 10, 1550},
+ dictWord{9, 10, 306},
+ dictWord{9, 10, 430},
+ dictWord{9, 10, 663},
+ dictWord{10, 10, 683},
+ dictWord{11, 10, 427},
+ dictWord{11, 10, 753},
+ dictWord{12, 10, 334},
+ dictWord{12, 10, 442},
+ dictWord{
+ 14,
+ 10,
+ 258,
+ },
+ dictWord{14, 10, 366},
+ dictWord{143, 10, 131},
+ dictWord{137, 10, 52},
+ dictWord{6, 0, 955},
+ dictWord{134, 0, 1498},
+ dictWord{6, 11, 375},
+ dictWord{
+ 7,
+ 11,
+ 169,
+ },
+ dictWord{7, 11, 254},
+ dictWord{136, 11, 780},
+ dictWord{7, 0, 430},
+ dictWord{11, 0, 46},
+ dictWord{14, 0, 343},
+ dictWord{142, 11, 343},
+ dictWord{
+ 135,
+ 0,
+ 1183,
+ },
+ dictWord{5, 0, 602},
+ dictWord{7, 0, 2018},
+ dictWord{9, 0, 418},
+ dictWord{9, 0, 803},
+ dictWord{135, 11, 1447},
+ dictWord{8, 0, 677},
+ dictWord{
+ 135,
+ 11,
+ 1044,
+ },
+ dictWord{139, 11, 285},
+ dictWord{4, 10, 656},
+ dictWord{135, 10, 779},
+ dictWord{135, 10, 144},
+ dictWord{5, 11, 629},
+ dictWord{
+ 135,
+ 11,
+ 1549,
+ },
+ dictWord{135, 10, 1373},
+ dictWord{138, 11, 209},
+ dictWord{7, 10, 554},
+ dictWord{7, 10, 605},
+ dictWord{141, 10, 10},
+ dictWord{5, 10, 838},
+ dictWord{
+ 5,
+ 10,
+ 841,
+ },
+ dictWord{134, 10, 1649},
+ dictWord{133, 10, 1012},
+ dictWord{6, 0, 1357},
+ dictWord{134, 0, 1380},
+ dictWord{144, 0, 53},
+ dictWord{6, 0, 590},
+ dictWord{7, 10, 365},
+ dictWord{7, 10, 1357},
+ dictWord{7, 10, 1497},
+ dictWord{8, 10, 154},
+ dictWord{141, 10, 281},
+ dictWord{133, 10, 340},
+ dictWord{
+ 132,
+ 11,
+ 420,
+ },
+ dictWord{135, 0, 329},
+ dictWord{147, 11, 32},
+ dictWord{4, 0, 469},
+ dictWord{10, 11, 429},
+ dictWord{139, 10, 495},
+ dictWord{8, 10, 261},
+ dictWord{
+ 9,
+ 10,
+ 144,
+ },
+ dictWord{9, 10, 466},
+ dictWord{10, 10, 370},
+ dictWord{12, 10, 470},
+ dictWord{13, 10, 144},
+ dictWord{142, 10, 348},
+ dictWord{142, 0, 460},
+ dictWord{4, 11, 325},
+ dictWord{9, 10, 897},
+ dictWord{138, 11, 125},
+ dictWord{6, 0, 1743},
+ dictWord{6, 10, 248},
+ dictWord{9, 10, 546},
+ dictWord{10, 10, 535},
+ dictWord{11, 10, 681},
+ dictWord{141, 10, 135},
+ dictWord{4, 0, 990},
+ dictWord{5, 0, 929},
+ dictWord{6, 0, 340},
+ dictWord{8, 0, 376},
+ dictWord{8, 0, 807},
+ dictWord{
+ 8,
+ 0,
+ 963,
+ },
+ dictWord{8, 0, 980},
+ dictWord{138, 0, 1007},
+ dictWord{134, 0, 1603},
+ dictWord{140, 0, 250},
+ dictWord{4, 11, 714},
+ dictWord{133, 11, 469},
+ dictWord{134, 10, 567},
+ dictWord{136, 10, 445},
+ dictWord{5, 0, 218},
+ dictWord{7, 0, 1610},
+ dictWord{8, 0, 646},
+ dictWord{10, 0, 83},
+ dictWord{11, 11, 138},
+ dictWord{140, 11, 40},
+ dictWord{7, 0, 1512},
+ dictWord{135, 0, 1794},
+ dictWord{135, 11, 1216},
+ dictWord{11, 0, 0},
+ dictWord{16, 0, 78},
+ dictWord{132, 11, 718},
+ dictWord{133, 0, 571},
+ dictWord{132, 0, 455},
+ dictWord{134, 0, 1012},
+ dictWord{5, 11, 124},
+ dictWord{5, 11, 144},
+ dictWord{6, 11, 548},
+ dictWord{7, 11, 15},
+ dictWord{7, 11, 153},
+ dictWord{137, 11, 629},
+ dictWord{142, 11, 10},
+ dictWord{6, 11, 75},
+ dictWord{7, 11, 1531},
+ dictWord{8, 11, 416},
+ dictWord{9, 11, 240},
+ dictWord{9, 11, 275},
+ dictWord{10, 11, 100},
+ dictWord{11, 11, 658},
+ dictWord{11, 11, 979},
+ dictWord{12, 11, 86},
+ dictWord{13, 11, 468},
+ dictWord{14, 11, 66},
+ dictWord{14, 11, 207},
+ dictWord{15, 11, 20},
+ dictWord{15, 11, 25},
+ dictWord{144, 11, 58},
+ dictWord{132, 10, 577},
+ dictWord{5, 11, 141},
+ dictWord{
+ 5,
+ 11,
+ 915,
+ },
+ dictWord{6, 11, 1783},
+ dictWord{7, 11, 211},
+ dictWord{7, 11, 698},
+ dictWord{7, 11, 1353},
+ dictWord{9, 11, 83},
+ dictWord{9, 11, 281},
+ dictWord{
+ 10,
+ 11,
+ 376,
+ },
+ dictWord{10, 11, 431},
+ dictWord{11, 11, 543},
+ dictWord{12, 11, 664},
+ dictWord{13, 11, 280},
+ dictWord{13, 11, 428},
+ dictWord{14, 11, 61},
+ dictWord{
+ 14,
+ 11,
+ 128,
+ },
+ dictWord{17, 11, 52},
+ dictWord{145, 11, 81},
+ dictWord{6, 0, 161},
+ dictWord{7, 0, 372},
+ dictWord{137, 0, 597},
+ dictWord{132, 0, 349},
+ dictWord{
+ 10,
+ 11,
+ 702,
+ },
+ dictWord{139, 11, 245},
+ dictWord{134, 0, 524},
+ dictWord{134, 10, 174},
+ dictWord{6, 0, 432},
+ dictWord{9, 0, 751},
+ dictWord{139, 0, 322},
+ dictWord{147, 11, 94},
+ dictWord{4, 11, 338},
+ dictWord{133, 11, 400},
+ dictWord{5, 0, 468},
+ dictWord{10, 0, 325},
+ dictWord{11, 0, 856},
+ dictWord{12, 0, 345},
+ dictWord{143, 0, 104},
+ dictWord{133, 0, 223},
+ dictWord{132, 0, 566},
+ dictWord{4, 11, 221},
+ dictWord{5, 11, 659},
+ dictWord{5, 11, 989},
+ dictWord{7, 11, 697},
+ dictWord{7, 11, 1211},
+ dictWord{138, 11, 284},
+ dictWord{135, 11, 1070},
+ dictWord{4, 0, 59},
+ dictWord{135, 0, 1394},
+ dictWord{6, 0, 436},
+ dictWord{11, 0, 481},
+ dictWord{5, 10, 878},
+ dictWord{133, 10, 972},
+ dictWord{4, 0, 48},
+ dictWord{5, 0, 271},
+ dictWord{135, 0, 953},
+ dictWord{5, 0, 610},
+ dictWord{136, 0, 457},
+ dictWord{
+ 4,
+ 0,
+ 773,
+ },
+ dictWord{5, 0, 618},
+ dictWord{137, 0, 756},
+ dictWord{133, 0, 755},
+ dictWord{135, 0, 1217},
+ dictWord{138, 11, 507},
+ dictWord{132, 10, 351},
+ dictWord{132, 0, 197},
+ dictWord{143, 11, 78},
+ dictWord{4, 11, 188},
+ dictWord{7, 11, 805},
+ dictWord{11, 11, 276},
+ dictWord{142, 11, 293},
+ dictWord{
+ 5,
+ 11,
+ 884,
+ },
+ dictWord{139, 11, 991},
+ dictWord{132, 10, 286},
+ dictWord{10, 0, 259},
+ dictWord{10, 0, 428},
+ dictWord{7, 10, 438},
+ dictWord{7, 10, 627},
+ dictWord{
+ 7,
+ 10,
+ 1516,
+ },
+ dictWord{8, 10, 40},
+ dictWord{9, 10, 56},
+ dictWord{9, 10, 294},
+ dictWord{11, 10, 969},
+ dictWord{11, 10, 995},
+ dictWord{146, 10, 148},
+ dictWord{
+ 4,
+ 0,
+ 356,
+ },
+ dictWord{5, 0, 217},
+ dictWord{5, 0, 492},
+ dictWord{5, 0, 656},
+ dictWord{8, 0, 544},
+ dictWord{136, 11, 544},
+ dictWord{5, 0, 259},
+ dictWord{6, 0, 1230},
+ dictWord{7, 0, 414},
+ dictWord{7, 0, 854},
+ dictWord{142, 0, 107},
+ dictWord{132, 0, 1007},
+ dictWord{15, 0, 14},
+ dictWord{144, 0, 5},
+ dictWord{6, 0, 1580},
+ dictWord{
+ 132,
+ 10,
+ 738,
+ },
+ dictWord{132, 11, 596},
+ dictWord{132, 0, 673},
+ dictWord{133, 10, 866},
+ dictWord{6, 0, 1843},
+ dictWord{135, 11, 1847},
+ dictWord{4, 0, 165},
+ dictWord{7, 0, 1398},
+ dictWord{135, 0, 1829},
+ dictWord{135, 11, 1634},
+ dictWord{147, 11, 65},
+ dictWord{6, 0, 885},
+ dictWord{6, 0, 1009},
+ dictWord{
+ 137,
+ 0,
+ 809,
+ },
+ dictWord{133, 10, 116},
+ dictWord{132, 10, 457},
+ dictWord{136, 11, 770},
+ dictWord{9, 0, 498},
+ dictWord{12, 0, 181},
+ dictWord{10, 11, 361},
+ dictWord{142, 11, 316},
+ dictWord{134, 11, 595},
+ dictWord{5, 0, 9},
+ dictWord{7, 0, 297},
+ dictWord{7, 0, 966},
+ dictWord{140, 0, 306},
+ dictWord{4, 11, 89},
+ dictWord{
+ 5,
+ 11,
+ 489,
+ },
+ dictWord{6, 11, 315},
+ dictWord{7, 11, 553},
+ dictWord{7, 11, 1745},
+ dictWord{138, 11, 243},
+ dictWord{134, 0, 1487},
+ dictWord{132, 0, 437},
+ dictWord{
+ 5,
+ 0,
+ 146,
+ },
+ dictWord{6, 0, 411},
+ dictWord{138, 0, 721},
+ dictWord{5, 10, 527},
+ dictWord{6, 10, 189},
+ dictWord{135, 10, 859},
+ dictWord{11, 10, 104},
+ dictWord{
+ 11,
+ 10,
+ 554,
+ },
+ dictWord{15, 10, 60},
+ dictWord{143, 10, 125},
+ dictWord{6, 11, 1658},
+ dictWord{9, 11, 3},
+ dictWord{10, 11, 154},
+ dictWord{11, 11, 641},
+ dictWord{13, 11, 85},
+ dictWord{13, 11, 201},
+ dictWord{141, 11, 346},
+ dictWord{6, 0, 177},
+ dictWord{135, 0, 467},
+ dictWord{134, 0, 1377},
+ dictWord{
+ 134,
+ 10,
+ 116,
+ },
+ dictWord{136, 11, 645},
+ dictWord{4, 11, 166},
+ dictWord{5, 11, 505},
+ dictWord{6, 11, 1670},
+ dictWord{137, 11, 110},
+ dictWord{133, 10, 487},
+ dictWord{
+ 4,
+ 10,
+ 86,
+ },
+ dictWord{5, 10, 667},
+ dictWord{5, 10, 753},
+ dictWord{6, 10, 316},
+ dictWord{6, 10, 455},
+ dictWord{135, 10, 946},
+ dictWord{133, 0, 200},
+ dictWord{132, 0, 959},
+ dictWord{6, 0, 1928},
+ dictWord{134, 0, 1957},
+ dictWord{139, 11, 203},
+ dictWord{150, 10, 45},
+ dictWord{4, 10, 79},
+ dictWord{7, 10, 1773},
+ dictWord{10, 10, 450},
+ dictWord{11, 10, 589},
+ dictWord{13, 10, 332},
+ dictWord{13, 10, 493},
+ dictWord{14, 10, 183},
+ dictWord{14, 10, 334},
+ dictWord{
+ 14,
+ 10,
+ 362,
+ },
+ dictWord{14, 10, 368},
+ dictWord{14, 10, 376},
+ dictWord{14, 10, 379},
+ dictWord{19, 10, 90},
+ dictWord{19, 10, 103},
+ dictWord{19, 10, 127},
+ dictWord{148, 10, 90},
+ dictWord{6, 0, 1435},
+ dictWord{135, 11, 1275},
+ dictWord{134, 0, 481},
+ dictWord{7, 11, 445},
+ dictWord{8, 11, 307},
+ dictWord{8, 11, 704},
+ dictWord{10, 11, 41},
+ dictWord{10, 11, 439},
+ dictWord{11, 11, 237},
+ dictWord{11, 11, 622},
+ dictWord{140, 11, 201},
+ dictWord{135, 11, 869},
+ dictWord{
+ 4,
+ 0,
+ 84,
+ },
+ dictWord{7, 0, 1482},
+ dictWord{10, 0, 76},
+ dictWord{138, 0, 142},
+ dictWord{11, 11, 277},
+ dictWord{144, 11, 14},
+ dictWord{135, 11, 1977},
+ dictWord{
+ 4,
+ 11,
+ 189,
+ },
+ dictWord{5, 11, 713},
+ dictWord{136, 11, 57},
+ dictWord{133, 0, 1015},
+ dictWord{138, 11, 371},
+ dictWord{4, 0, 315},
+ dictWord{5, 0, 507},
+ dictWord{
+ 135,
+ 0,
+ 1370,
+ },
+ dictWord{4, 11, 552},
+ dictWord{142, 10, 381},
+ dictWord{9, 0, 759},
+ dictWord{16, 0, 31},
+ dictWord{16, 0, 39},
+ dictWord{16, 0, 75},
+ dictWord{18, 0, 24},
+ dictWord{20, 0, 42},
+ dictWord{152, 0, 1},
+ dictWord{134, 0, 712},
+ dictWord{134, 0, 1722},
+ dictWord{133, 10, 663},
+ dictWord{133, 10, 846},
+ dictWord{
+ 8,
+ 0,
+ 222,
+ },
+ dictWord{8, 0, 476},
+ dictWord{9, 0, 238},
+ dictWord{11, 0, 516},
+ dictWord{11, 0, 575},
+ dictWord{15, 0, 109},
+ dictWord{146, 0, 100},
+ dictWord{7, 0, 1402},
+ dictWord{7, 0, 1414},
+ dictWord{12, 0, 456},
+ dictWord{5, 10, 378},
+ dictWord{8, 10, 465},
+ dictWord{9, 10, 286},
+ dictWord{10, 10, 185},
+ dictWord{10, 10, 562},
+ dictWord{10, 10, 635},
+ dictWord{11, 10, 31},
+ dictWord{11, 10, 393},
+ dictWord{13, 10, 312},
+ dictWord{18, 10, 65},
+ dictWord{18, 10, 96},
+ dictWord{147, 10, 89},
+ dictWord{4, 0, 986},
+ dictWord{6, 0, 1958},
+ dictWord{6, 0, 2032},
+ dictWord{8, 0, 934},
+ dictWord{138, 0, 985},
+ dictWord{7, 10, 1880},
+ dictWord{9, 10, 680},
+ dictWord{139, 10, 798},
+ dictWord{134, 10, 1770},
+ dictWord{145, 11, 49},
+ dictWord{132, 11, 614},
+ dictWord{132, 10, 648},
+ dictWord{5, 10, 945},
+ dictWord{
+ 6,
+ 10,
+ 1656,
+ },
+ dictWord{6, 10, 1787},
+ dictWord{7, 10, 167},
+ dictWord{8, 10, 824},
+ dictWord{9, 10, 391},
+ dictWord{10, 10, 375},
+ dictWord{139, 10, 185},
+ dictWord{138, 11, 661},
+ dictWord{7, 0, 1273},
+ dictWord{135, 11, 1945},
+ dictWord{7, 0, 706},
+ dictWord{7, 0, 1058},
+ dictWord{138, 0, 538},
+ dictWord{7, 10, 1645},
+ dictWord{8, 10, 352},
+ dictWord{137, 10, 249},
+ dictWord{132, 10, 152},
+ dictWord{11, 0, 92},
+ dictWord{11, 0, 196},
+ dictWord{11, 0, 409},
+ dictWord{11, 0, 450},
+ dictWord{11, 0, 666},
+ dictWord{11, 0, 777},
+ dictWord{12, 0, 262},
+ dictWord{13, 0, 385},
+ dictWord{13, 0, 393},
+ dictWord{15, 0, 115},
+ dictWord{16, 0, 45},
+ dictWord{145, 0, 82},
+ dictWord{133, 10, 1006},
+ dictWord{6, 0, 40},
+ dictWord{135, 0, 1781},
+ dictWord{9, 11, 614},
+ dictWord{139, 11, 327},
+ dictWord{5, 10, 420},
+ dictWord{135, 10, 1449},
+ dictWord{135, 0, 431},
+ dictWord{10, 0, 97},
+ dictWord{135, 10, 832},
+ dictWord{6, 0, 423},
+ dictWord{7, 0, 665},
+ dictWord{
+ 135,
+ 0,
+ 1210,
+ },
+ dictWord{7, 0, 237},
+ dictWord{8, 0, 664},
+ dictWord{9, 0, 42},
+ dictWord{9, 0, 266},
+ dictWord{9, 0, 380},
+ dictWord{9, 0, 645},
+ dictWord{10, 0, 177},
+ dictWord{
+ 138,
+ 0,
+ 276,
+ },
+ dictWord{7, 0, 264},
+ dictWord{133, 10, 351},
+ dictWord{8, 0, 213},
+ dictWord{5, 10, 40},
+ dictWord{7, 10, 598},
+ dictWord{7, 10, 1638},
+ dictWord{
+ 9,
+ 10,
+ 166,
+ },
+ dictWord{9, 10, 640},
+ dictWord{9, 10, 685},
+ dictWord{9, 10, 773},
+ dictWord{11, 10, 215},
+ dictWord{13, 10, 65},
+ dictWord{14, 10, 172},
+ dictWord{
+ 14,
+ 10,
+ 317,
+ },
+ dictWord{145, 10, 6},
+ dictWord{5, 11, 84},
+ dictWord{134, 11, 163},
+ dictWord{8, 10, 60},
+ dictWord{9, 10, 343},
+ dictWord{139, 10, 769},
+ dictWord{
+ 137,
+ 0,
+ 455,
+ },
+ dictWord{133, 11, 410},
+ dictWord{8, 0, 906},
+ dictWord{12, 0, 700},
+ dictWord{12, 0, 706},
+ dictWord{140, 0, 729},
+ dictWord{21, 11, 33},
+ dictWord{
+ 150,
+ 11,
+ 40,
+ },
+ dictWord{7, 10, 1951},
+ dictWord{8, 10, 765},
+ dictWord{8, 10, 772},
+ dictWord{140, 10, 671},
+ dictWord{7, 10, 108},
+ dictWord{8, 10, 219},
+ dictWord{
+ 8,
+ 10,
+ 388,
+ },
+ dictWord{9, 10, 639},
+ dictWord{9, 10, 775},
+ dictWord{11, 10, 275},
+ dictWord{140, 10, 464},
+ dictWord{5, 11, 322},
+ dictWord{7, 11, 1941},
+ dictWord{
+ 8,
+ 11,
+ 186,
+ },
+ dictWord{9, 11, 262},
+ dictWord{10, 11, 187},
+ dictWord{14, 11, 208},
+ dictWord{146, 11, 130},
+ dictWord{139, 0, 624},
+ dictWord{8, 0, 574},
+ dictWord{
+ 5,
+ 11,
+ 227,
+ },
+ dictWord{140, 11, 29},
+ dictWord{7, 11, 1546},
+ dictWord{11, 11, 299},
+ dictWord{142, 11, 407},
+ dictWord{5, 10, 15},
+ dictWord{6, 10, 56},
+ dictWord{
+ 7,
+ 10,
+ 1758,
+ },
+ dictWord{8, 10, 500},
+ dictWord{9, 10, 730},
+ dictWord{11, 10, 331},
+ dictWord{13, 10, 150},
+ dictWord{142, 10, 282},
+ dictWord{7, 11, 1395},
+ dictWord{8, 11, 486},
+ dictWord{9, 11, 236},
+ dictWord{9, 11, 878},
+ dictWord{10, 11, 218},
+ dictWord{11, 11, 95},
+ dictWord{19, 11, 17},
+ dictWord{147, 11, 31},
+ dictWord{135, 11, 2043},
+ dictWord{4, 0, 354},
+ dictWord{146, 11, 4},
+ dictWord{140, 11, 80},
+ dictWord{135, 0, 1558},
+ dictWord{134, 10, 1886},
+ dictWord{
+ 5,
+ 10,
+ 205,
+ },
+ dictWord{6, 10, 438},
+ dictWord{137, 10, 711},
+ dictWord{133, 11, 522},
+ dictWord{133, 10, 534},
+ dictWord{7, 0, 235},
+ dictWord{7, 0, 1475},
+ dictWord{
+ 15,
+ 0,
+ 68,
+ },
+ dictWord{146, 0, 120},
+ dictWord{137, 10, 691},
+ dictWord{4, 0, 942},
+ dictWord{6, 0, 1813},
+ dictWord{8, 0, 917},
+ dictWord{10, 0, 884},
+ dictWord{
+ 12,
+ 0,
+ 696,
+ },
+ dictWord{12, 0, 717},
+ dictWord{12, 0, 723},
+ dictWord{12, 0, 738},
+ dictWord{12, 0, 749},
+ dictWord{12, 0, 780},
+ dictWord{16, 0, 97},
+ dictWord{146, 0, 169},
+ dictWord{6, 10, 443},
+ dictWord{8, 11, 562},
+ dictWord{9, 10, 237},
+ dictWord{9, 10, 571},
+ dictWord{9, 10, 695},
+ dictWord{10, 10, 139},
+ dictWord{11, 10, 715},
+ dictWord{12, 10, 417},
+ dictWord{141, 10, 421},
+ dictWord{135, 0, 957},
+ dictWord{133, 0, 830},
+ dictWord{134, 11, 1771},
+ dictWord{146, 0, 23},
+ dictWord{
+ 5,
+ 0,
+ 496,
+ },
+ dictWord{6, 0, 694},
+ dictWord{7, 0, 203},
+ dictWord{7, 11, 1190},
+ dictWord{137, 11, 620},
+ dictWord{137, 11, 132},
+ dictWord{6, 0, 547},
+ dictWord{
+ 134,
+ 0,
+ 1549,
+ },
+ dictWord{8, 11, 258},
+ dictWord{9, 11, 208},
+ dictWord{137, 11, 359},
+ dictWord{4, 0, 864},
+ dictWord{5, 0, 88},
+ dictWord{137, 0, 239},
+ dictWord{
+ 135,
+ 11,
+ 493,
+ },
+ dictWord{4, 11, 317},
+ dictWord{135, 11, 1279},
+ dictWord{132, 11, 477},
+ dictWord{4, 10, 578},
+ dictWord{5, 11, 63},
+ dictWord{133, 11, 509},
+ dictWord{
+ 7,
+ 0,
+ 650,
+ },
+ dictWord{135, 0, 1310},
+ dictWord{7, 0, 1076},
+ dictWord{9, 0, 80},
+ dictWord{11, 0, 78},
+ dictWord{11, 0, 421},
+ dictWord{11, 0, 534},
+ dictWord{
+ 140,
+ 0,
+ 545,
+ },
+ dictWord{132, 11, 288},
+ dictWord{12, 0, 553},
+ dictWord{14, 0, 118},
+ dictWord{133, 10, 923},
+ dictWord{7, 0, 274},
+ dictWord{11, 0, 479},
+ dictWord{
+ 139,
+ 0,
+ 507,
+ },
+ dictWord{8, 11, 89},
+ dictWord{8, 11, 620},
+ dictWord{9, 11, 49},
+ dictWord{10, 11, 774},
+ dictWord{11, 11, 628},
+ dictWord{12, 11, 322},
+ dictWord{
+ 143,
+ 11,
+ 124,
+ },
+ dictWord{4, 0, 497},
+ dictWord{135, 0, 1584},
+ dictWord{7, 0, 261},
+ dictWord{7, 0, 1115},
+ dictWord{7, 0, 1354},
+ dictWord{7, 0, 1404},
+ dictWord{
+ 7,
+ 0,
+ 1588,
+ },
+ dictWord{7, 0, 1705},
+ dictWord{7, 0, 1902},
+ dictWord{9, 0, 465},
+ dictWord{10, 0, 248},
+ dictWord{10, 0, 349},
+ dictWord{10, 0, 647},
+ dictWord{11, 0, 527},
+ dictWord{11, 0, 660},
+ dictWord{11, 0, 669},
+ dictWord{12, 0, 529},
+ dictWord{13, 0, 305},
+ dictWord{132, 10, 924},
+ dictWord{133, 10, 665},
+ dictWord{
+ 136,
+ 0,
+ 13,
+ },
+ dictWord{6, 0, 791},
+ dictWord{138, 11, 120},
+ dictWord{7, 0, 642},
+ dictWord{8, 0, 250},
+ dictWord{11, 0, 123},
+ dictWord{11, 0, 137},
+ dictWord{13, 0, 48},
+ dictWord{142, 0, 95},
+ dictWord{4, 10, 265},
+ dictWord{7, 10, 807},
+ dictWord{135, 10, 950},
+ dictWord{5, 10, 93},
+ dictWord{140, 10, 267},
+ dictWord{135, 0, 1429},
+ dictWord{4, 0, 949},
+ dictWord{10, 0, 885},
+ dictWord{10, 0, 891},
+ dictWord{10, 0, 900},
+ dictWord{10, 0, 939},
+ dictWord{12, 0, 760},
+ dictWord{142, 0, 449},
+ dictWord{139, 11, 366},
+ dictWord{132, 0, 818},
+ dictWord{134, 11, 85},
+ dictWord{135, 10, 994},
+ dictWord{7, 0, 330},
+ dictWord{5, 10, 233},
+ dictWord{5, 10, 320},
+ dictWord{6, 10, 140},
+ dictWord{136, 10, 295},
+ dictWord{4, 0, 1004},
+ dictWord{8, 0, 982},
+ dictWord{136, 0, 993},
+ dictWord{133, 10, 978},
+ dictWord{4, 10, 905},
+ dictWord{6, 10, 1701},
+ dictWord{137, 10, 843},
+ dictWord{10, 0, 545},
+ dictWord{140, 0, 301},
+ dictWord{6, 0, 947},
+ dictWord{134, 0, 1062},
+ dictWord{
+ 134,
+ 0,
+ 1188,
+ },
+ dictWord{4, 0, 904},
+ dictWord{5, 0, 794},
+ dictWord{152, 10, 6},
+ dictWord{134, 0, 1372},
+ dictWord{135, 11, 608},
+ dictWord{5, 11, 279},
+ dictWord{
+ 6,
+ 11,
+ 235,
+ },
+ dictWord{7, 11, 468},
+ dictWord{8, 11, 446},
+ dictWord{9, 11, 637},
+ dictWord{10, 11, 717},
+ dictWord{11, 11, 738},
+ dictWord{140, 11, 514},
+ dictWord{
+ 132,
+ 10,
+ 509,
+ },
+ dictWord{5, 11, 17},
+ dictWord{6, 11, 371},
+ dictWord{137, 11, 528},
+ dictWord{132, 0, 693},
+ dictWord{4, 11, 115},
+ dictWord{5, 11, 669},
+ dictWord{
+ 6,
+ 11,
+ 407,
+ },
+ dictWord{8, 11, 311},
+ dictWord{11, 11, 10},
+ dictWord{141, 11, 5},
+ dictWord{11, 0, 377},
+ dictWord{7, 10, 273},
+ dictWord{137, 11, 381},
+ dictWord{
+ 135,
+ 0,
+ 695,
+ },
+ dictWord{7, 0, 386},
+ dictWord{138, 0, 713},
+ dictWord{135, 10, 1041},
+ dictWord{134, 0, 1291},
+ dictWord{6, 0, 7},
+ dictWord{6, 0, 35},
+ dictWord{
+ 7,
+ 0,
+ 147,
+ },
+ dictWord{7, 0, 1069},
+ dictWord{7, 0, 1568},
+ dictWord{7, 0, 1575},
+ dictWord{7, 0, 1917},
+ dictWord{8, 0, 43},
+ dictWord{8, 0, 208},
+ dictWord{9, 0, 128},
+ dictWord{
+ 9,
+ 0,
+ 866,
+ },
+ dictWord{10, 0, 20},
+ dictWord{11, 0, 981},
+ dictWord{147, 0, 33},
+ dictWord{7, 0, 893},
+ dictWord{141, 0, 424},
+ dictWord{139, 10, 234},
+ dictWord{
+ 150,
+ 11,
+ 56,
+ },
+ dictWord{5, 11, 779},
+ dictWord{5, 11, 807},
+ dictWord{6, 11, 1655},
+ dictWord{134, 11, 1676},
+ dictWord{5, 10, 802},
+ dictWord{7, 10, 2021},
+ dictWord{136, 10, 805},
+ dictWord{4, 11, 196},
+ dictWord{5, 10, 167},
+ dictWord{5, 11, 558},
+ dictWord{5, 10, 899},
+ dictWord{5, 11, 949},
+ dictWord{6, 10, 410},
+ dictWord{137, 10, 777},
+ dictWord{137, 10, 789},
+ dictWord{134, 10, 1705},
+ dictWord{8, 0, 904},
+ dictWord{140, 0, 787},
+ dictWord{6, 0, 322},
+ dictWord{9, 0, 552},
+ dictWord{11, 0, 274},
+ dictWord{13, 0, 209},
+ dictWord{13, 0, 499},
+ dictWord{14, 0, 85},
+ dictWord{15, 0, 126},
+ dictWord{145, 0, 70},
+ dictWord{135, 10, 10},
+ dictWord{
+ 5,
+ 10,
+ 11,
+ },
+ dictWord{6, 10, 117},
+ dictWord{6, 10, 485},
+ dictWord{7, 10, 1133},
+ dictWord{9, 10, 582},
+ dictWord{9, 10, 594},
+ dictWord{11, 10, 21},
+ dictWord{
+ 11,
+ 10,
+ 818,
+ },
+ dictWord{12, 10, 535},
+ dictWord{141, 10, 86},
+ dictWord{4, 10, 264},
+ dictWord{7, 10, 1067},
+ dictWord{8, 10, 204},
+ dictWord{8, 10, 385},
+ dictWord{139, 10, 953},
+ dictWord{132, 11, 752},
+ dictWord{138, 10, 56},
+ dictWord{133, 10, 470},
+ dictWord{6, 0, 1808},
+ dictWord{8, 0, 83},
+ dictWord{8, 0, 742},
+ dictWord{8, 0, 817},
+ dictWord{9, 0, 28},
+ dictWord{9, 0, 29},
+ dictWord{9, 0, 885},
+ dictWord{10, 0, 387},
+ dictWord{11, 0, 633},
+ dictWord{11, 0, 740},
+ dictWord{13, 0, 235},
+ dictWord{13, 0, 254},
+ dictWord{15, 0, 143},
+ dictWord{143, 0, 146},
+ dictWord{140, 0, 49},
+ dictWord{134, 0, 1832},
+ dictWord{4, 11, 227},
+ dictWord{5, 11, 159},
+ dictWord{5, 11, 409},
+ dictWord{7, 11, 80},
+ dictWord{10, 11, 294},
+ dictWord{10, 11, 479},
+ dictWord{12, 11, 418},
+ dictWord{14, 11, 50},
+ dictWord{14, 11, 249},
+ dictWord{142, 11, 295},
+ dictWord{7, 11, 1470},
+ dictWord{8, 11, 66},
+ dictWord{8, 11, 137},
+ dictWord{8, 11, 761},
+ dictWord{9, 11, 638},
+ dictWord{11, 11, 80},
+ dictWord{11, 11, 212},
+ dictWord{11, 11, 368},
+ dictWord{11, 11, 418},
+ dictWord{12, 11, 8},
+ dictWord{13, 11, 15},
+ dictWord{16, 11, 61},
+ dictWord{17, 11, 59},
+ dictWord{19, 11, 28},
+ dictWord{148, 11, 84},
+ dictWord{139, 10, 1015},
+ dictWord{138, 11, 468},
+ dictWord{135, 0, 421},
+ dictWord{6, 0, 415},
+ dictWord{
+ 7,
+ 0,
+ 1049,
+ },
+ dictWord{137, 0, 442},
+ dictWord{6, 11, 38},
+ dictWord{7, 11, 1220},
+ dictWord{8, 11, 185},
+ dictWord{8, 11, 256},
+ dictWord{9, 11, 22},
+ dictWord{
+ 9,
+ 11,
+ 331,
+ },
+ dictWord{10, 11, 738},
+ dictWord{11, 11, 205},
+ dictWord{11, 11, 540},
+ dictWord{11, 11, 746},
+ dictWord{13, 11, 399},
+ dictWord{13, 11, 465},
+ dictWord{
+ 14,
+ 11,
+ 88,
+ },
+ dictWord{142, 11, 194},
+ dictWord{139, 0, 289},
+ dictWord{133, 10, 715},
+ dictWord{4, 0, 110},
+ dictWord{10, 0, 415},
+ dictWord{10, 0, 597},
+ dictWord{142, 0, 206},
+ dictWord{4, 11, 159},
+ dictWord{6, 11, 115},
+ dictWord{7, 11, 252},
+ dictWord{7, 11, 257},
+ dictWord{7, 11, 1928},
+ dictWord{8, 11, 69},
+ dictWord{
+ 9,
+ 11,
+ 384,
+ },
+ dictWord{10, 11, 91},
+ dictWord{10, 11, 615},
+ dictWord{12, 11, 375},
+ dictWord{14, 11, 235},
+ dictWord{18, 11, 117},
+ dictWord{147, 11, 123},
+ dictWord{5, 11, 911},
+ dictWord{136, 11, 278},
+ dictWord{7, 0, 205},
+ dictWord{7, 0, 2000},
+ dictWord{8, 10, 794},
+ dictWord{9, 10, 400},
+ dictWord{10, 10, 298},
+ dictWord{142, 10, 228},
+ dictWord{135, 11, 1774},
+ dictWord{4, 11, 151},
+ dictWord{7, 11, 1567},
+ dictWord{8, 11, 351},
+ dictWord{137, 11, 322},
+ dictWord{
+ 136,
+ 10,
+ 724,
+ },
+ dictWord{133, 11, 990},
+ dictWord{7, 0, 1539},
+ dictWord{11, 0, 512},
+ dictWord{13, 0, 205},
+ dictWord{19, 0, 30},
+ dictWord{22, 0, 36},
+ dictWord{23, 0, 19},
+ dictWord{135, 11, 1539},
+ dictWord{5, 11, 194},
+ dictWord{7, 11, 1662},
+ dictWord{9, 11, 90},
+ dictWord{140, 11, 180},
+ dictWord{6, 10, 190},
+ dictWord{
+ 7,
+ 10,
+ 768,
+ },
+ dictWord{135, 10, 1170},
+ dictWord{134, 0, 1340},
+ dictWord{4, 0, 283},
+ dictWord{135, 0, 1194},
+ dictWord{133, 11, 425},
+ dictWord{133, 11, 971},
+ dictWord{12, 0, 549},
+ dictWord{14, 10, 67},
+ dictWord{147, 10, 60},
+ dictWord{135, 10, 1023},
+ dictWord{134, 0, 1720},
+ dictWord{138, 11, 587},
+ dictWord{
+ 5,
+ 11,
+ 72,
+ },
+ dictWord{6, 11, 264},
+ dictWord{7, 11, 21},
+ dictWord{7, 11, 46},
+ dictWord{7, 11, 2013},
+ dictWord{8, 11, 215},
+ dictWord{8, 11, 513},
+ dictWord{10, 11, 266},
+ dictWord{139, 11, 22},
+ dictWord{5, 0, 319},
+ dictWord{135, 0, 534},
+ dictWord{6, 10, 137},
+ dictWord{9, 10, 75},
+ dictWord{9, 10, 253},
+ dictWord{10, 10, 194},
+ dictWord{138, 10, 444},
+ dictWord{7, 0, 1180},
+ dictWord{20, 0, 112},
+ dictWord{6, 11, 239},
+ dictWord{7, 11, 118},
+ dictWord{10, 11, 95},
+ dictWord{11, 11, 603},
+ dictWord{13, 11, 443},
+ dictWord{14, 11, 160},
+ dictWord{143, 11, 4},
+ dictWord{134, 11, 431},
+ dictWord{5, 11, 874},
+ dictWord{6, 11, 1677},
+ dictWord{
+ 11,
+ 10,
+ 643,
+ },
+ dictWord{12, 10, 115},
+ dictWord{143, 11, 0},
+ dictWord{134, 0, 967},
+ dictWord{6, 11, 65},
+ dictWord{7, 11, 939},
+ dictWord{7, 11, 1172},
+ dictWord{
+ 7,
+ 11,
+ 1671,
+ },
+ dictWord{9, 11, 540},
+ dictWord{10, 11, 696},
+ dictWord{11, 11, 265},
+ dictWord{11, 11, 732},
+ dictWord{11, 11, 928},
+ dictWord{11, 11, 937},
+ dictWord{
+ 12,
+ 11,
+ 399,
+ },
+ dictWord{13, 11, 438},
+ dictWord{149, 11, 19},
+ dictWord{137, 11, 200},
+ dictWord{135, 0, 1940},
+ dictWord{5, 10, 760},
+ dictWord{7, 10, 542},
+ dictWord{8, 10, 135},
+ dictWord{136, 10, 496},
+ dictWord{140, 11, 44},
+ dictWord{7, 11, 1655},
+ dictWord{136, 11, 305},
+ dictWord{7, 10, 319},
+ dictWord{
+ 7,
+ 10,
+ 355,
+ },
+ dictWord{7, 10, 763},
+ dictWord{10, 10, 389},
+ dictWord{145, 10, 43},
+ dictWord{136, 0, 735},
+ dictWord{138, 10, 786},
+ dictWord{137, 11, 19},
+ dictWord{132, 11, 696},
+ dictWord{5, 0, 132},
+ dictWord{9, 0, 486},
+ dictWord{9, 0, 715},
+ dictWord{10, 0, 458},
+ dictWord{11, 0, 373},
+ dictWord{11, 0, 668},
+ dictWord{
+ 11,
+ 0,
+ 795,
+ },
+ dictWord{11, 0, 897},
+ dictWord{12, 0, 272},
+ dictWord{12, 0, 424},
+ dictWord{12, 0, 539},
+ dictWord{12, 0, 558},
+ dictWord{14, 0, 245},
+ dictWord{
+ 14,
+ 0,
+ 263,
+ },
+ dictWord{14, 0, 264},
+ dictWord{14, 0, 393},
+ dictWord{142, 0, 403},
+ dictWord{10, 0, 38},
+ dictWord{139, 0, 784},
+ dictWord{132, 0, 838},
+ dictWord{
+ 4,
+ 11,
+ 302,
+ },
+ dictWord{135, 11, 1766},
+ dictWord{133, 0, 379},
+ dictWord{5, 0, 8},
+ dictWord{6, 0, 89},
+ dictWord{6, 0, 400},
+ dictWord{7, 0, 1569},
+ dictWord{7, 0, 1623},
+ dictWord{7, 0, 1850},
+ dictWord{8, 0, 218},
+ dictWord{8, 0, 422},
+ dictWord{9, 0, 570},
+ dictWord{10, 0, 626},
+ dictWord{4, 11, 726},
+ dictWord{133, 11, 630},
+ dictWord{
+ 4,
+ 0,
+ 1017,
+ },
+ dictWord{138, 0, 660},
+ dictWord{6, 0, 387},
+ dictWord{7, 0, 882},
+ dictWord{141, 0, 111},
+ dictWord{6, 0, 224},
+ dictWord{7, 0, 877},
+ dictWord{
+ 137,
+ 0,
+ 647,
+ },
+ dictWord{4, 10, 58},
+ dictWord{5, 10, 286},
+ dictWord{6, 10, 319},
+ dictWord{7, 10, 402},
+ dictWord{7, 10, 1254},
+ dictWord{7, 10, 1903},
+ dictWord{
+ 8,
+ 10,
+ 356,
+ },
+ dictWord{140, 10, 408},
+ dictWord{135, 0, 790},
+ dictWord{9, 0, 510},
+ dictWord{10, 0, 53},
+ dictWord{4, 10, 389},
+ dictWord{9, 10, 181},
+ dictWord{
+ 10,
+ 10,
+ 29,
+ },
+ dictWord{10, 10, 816},
+ dictWord{11, 10, 311},
+ dictWord{11, 10, 561},
+ dictWord{12, 10, 67},
+ dictWord{141, 10, 181},
+ dictWord{142, 0, 458},
+ dictWord{
+ 6,
+ 11,
+ 118,
+ },
+ dictWord{7, 11, 215},
+ dictWord{7, 11, 1521},
+ dictWord{140, 11, 11},
+ dictWord{134, 0, 954},
+ dictWord{135, 0, 394},
+ dictWord{134, 0, 1367},
+ dictWord{5, 11, 225},
+ dictWord{133, 10, 373},
+ dictWord{132, 0, 882},
+ dictWord{7, 0, 1409},
+ dictWord{135, 10, 1972},
+ dictWord{135, 10, 1793},
+ dictWord{
+ 4,
+ 11,
+ 370,
+ },
+ dictWord{5, 11, 756},
+ dictWord{135, 11, 1326},
+ dictWord{150, 11, 13},
+ dictWord{7, 11, 354},
+ dictWord{10, 11, 410},
+ dictWord{139, 11, 815},
+ dictWord{6, 11, 1662},
+ dictWord{7, 11, 48},
+ dictWord{8, 11, 771},
+ dictWord{10, 11, 116},
+ dictWord{13, 11, 104},
+ dictWord{14, 11, 105},
+ dictWord{14, 11, 184},
+ dictWord{15, 11, 168},
+ dictWord{19, 11, 92},
+ dictWord{148, 11, 68},
+ dictWord{7, 0, 124},
+ dictWord{136, 0, 38},
+ dictWord{5, 0, 261},
+ dictWord{7, 0, 78},
+ dictWord{
+ 7,
+ 0,
+ 199,
+ },
+ dictWord{8, 0, 815},
+ dictWord{9, 0, 126},
+ dictWord{10, 0, 342},
+ dictWord{140, 0, 647},
+ dictWord{4, 0, 628},
+ dictWord{140, 0, 724},
+ dictWord{7, 0, 266},
+ dictWord{8, 0, 804},
+ dictWord{7, 10, 1651},
+ dictWord{145, 10, 89},
+ dictWord{135, 0, 208},
+ dictWord{134, 0, 1178},
+ dictWord{6, 0, 79},
+ dictWord{135, 0, 1519},
+ dictWord{132, 10, 672},
+ dictWord{133, 10, 737},
+ dictWord{136, 0, 741},
+ dictWord{132, 11, 120},
+ dictWord{4, 0, 710},
+ dictWord{6, 0, 376},
+ dictWord{
+ 134,
+ 0,
+ 606,
+ },
+ dictWord{134, 0, 1347},
+ dictWord{134, 0, 1494},
+ dictWord{6, 0, 850},
+ dictWord{6, 0, 1553},
+ dictWord{137, 0, 821},
+ dictWord{5, 10, 145},
+ dictWord{
+ 134,
+ 11,
+ 593,
+ },
+ dictWord{7, 0, 1311},
+ dictWord{140, 0, 135},
+ dictWord{4, 0, 467},
+ dictWord{5, 0, 405},
+ dictWord{134, 0, 544},
+ dictWord{5, 11, 820},
+ dictWord{
+ 135,
+ 11,
+ 931,
+ },
+ dictWord{6, 0, 100},
+ dictWord{7, 0, 244},
+ dictWord{7, 0, 632},
+ dictWord{7, 0, 1609},
+ dictWord{8, 0, 178},
+ dictWord{8, 0, 638},
+ dictWord{141, 0, 58},
+ dictWord{4, 10, 387},
+ dictWord{135, 10, 1288},
+ dictWord{6, 11, 151},
+ dictWord{6, 11, 1675},
+ dictWord{7, 11, 383},
+ dictWord{151, 11, 10},
+ dictWord{
+ 132,
+ 0,
+ 481,
+ },
+ dictWord{135, 10, 550},
+ dictWord{134, 0, 1378},
+ dictWord{6, 11, 1624},
+ dictWord{11, 11, 11},
+ dictWord{12, 11, 422},
+ dictWord{13, 11, 262},
+ dictWord{142, 11, 360},
+ dictWord{133, 0, 791},
+ dictWord{4, 11, 43},
+ dictWord{5, 11, 344},
+ dictWord{133, 11, 357},
+ dictWord{7, 0, 1227},
+ dictWord{140, 0, 978},
+ dictWord{7, 0, 686},
+ dictWord{8, 0, 33},
+ dictWord{8, 0, 238},
+ dictWord{10, 0, 616},
+ dictWord{11, 0, 467},
+ dictWord{11, 0, 881},
+ dictWord{13, 0, 217},
+ dictWord{
+ 13,
+ 0,
+ 253,
+ },
+ dictWord{142, 0, 268},
+ dictWord{137, 0, 857},
+ dictWord{8, 0, 467},
+ dictWord{8, 0, 1006},
+ dictWord{7, 11, 148},
+ dictWord{8, 11, 284},
+ dictWord{
+ 141,
+ 11,
+ 63,
+ },
+ dictWord{4, 10, 576},
+ dictWord{135, 10, 1263},
+ dictWord{133, 11, 888},
+ dictWord{5, 10, 919},
+ dictWord{134, 10, 1673},
+ dictWord{20, 10, 37},
+ dictWord{148, 11, 37},
+ dictWord{132, 0, 447},
+ dictWord{132, 11, 711},
+ dictWord{4, 0, 128},
+ dictWord{5, 0, 415},
+ dictWord{6, 0, 462},
+ dictWord{7, 0, 294},
+ dictWord{
+ 7,
+ 0,
+ 578,
+ },
+ dictWord{10, 0, 710},
+ dictWord{139, 0, 86},
+ dictWord{4, 10, 82},
+ dictWord{5, 10, 333},
+ dictWord{5, 10, 904},
+ dictWord{6, 10, 207},
+ dictWord{7, 10, 325},
+ dictWord{7, 10, 1726},
+ dictWord{8, 10, 101},
+ dictWord{10, 10, 778},
+ dictWord{139, 10, 220},
+ dictWord{136, 0, 587},
+ dictWord{137, 11, 440},
+ dictWord{
+ 133,
+ 10,
+ 903,
+ },
+ dictWord{6, 0, 427},
+ dictWord{7, 0, 1018},
+ dictWord{138, 0, 692},
+ dictWord{4, 0, 195},
+ dictWord{135, 0, 802},
+ dictWord{140, 10, 147},
+ dictWord{
+ 134,
+ 0,
+ 1546,
+ },
+ dictWord{134, 0, 684},
+ dictWord{132, 10, 705},
+ dictWord{136, 0, 345},
+ dictWord{11, 11, 678},
+ dictWord{140, 11, 307},
+ dictWord{
+ 133,
+ 0,
+ 365,
+ },
+ dictWord{134, 0, 1683},
+ dictWord{4, 11, 65},
+ dictWord{5, 11, 479},
+ dictWord{5, 11, 1004},
+ dictWord{7, 11, 1913},
+ dictWord{8, 11, 317},
+ dictWord{
+ 9,
+ 11,
+ 302,
+ },
+ dictWord{10, 11, 612},
+ dictWord{141, 11, 22},
+ dictWord{138, 0, 472},
+ dictWord{4, 11, 261},
+ dictWord{135, 11, 510},
+ dictWord{134, 10, 90},
+ dictWord{142, 0, 433},
+ dictWord{151, 0, 28},
+ dictWord{4, 11, 291},
+ dictWord{7, 11, 101},
+ dictWord{9, 11, 515},
+ dictWord{12, 11, 152},
+ dictWord{12, 11, 443},
+ dictWord{13, 11, 392},
+ dictWord{142, 11, 357},
+ dictWord{140, 0, 997},
+ dictWord{5, 0, 3},
+ dictWord{8, 0, 578},
+ dictWord{9, 0, 118},
+ dictWord{10, 0, 705},
+ dictWord{
+ 141,
+ 0,
+ 279,
+ },
+ dictWord{135, 11, 1266},
+ dictWord{7, 10, 813},
+ dictWord{12, 10, 497},
+ dictWord{141, 10, 56},
+ dictWord{133, 0, 229},
+ dictWord{6, 10, 125},
+ dictWord{135, 10, 1277},
+ dictWord{8, 0, 102},
+ dictWord{10, 0, 578},
+ dictWord{10, 0, 672},
+ dictWord{12, 0, 496},
+ dictWord{13, 0, 408},
+ dictWord{14, 0, 121},
+ dictWord{17, 0, 106},
+ dictWord{151, 10, 12},
+ dictWord{6, 0, 866},
+ dictWord{134, 0, 1080},
+ dictWord{136, 0, 1022},
+ dictWord{4, 11, 130},
+ dictWord{135, 11, 843},
+ dictWord{5, 11, 42},
+ dictWord{5, 11, 879},
+ dictWord{7, 11, 245},
+ dictWord{7, 11, 324},
+ dictWord{7, 11, 1532},
+ dictWord{11, 11, 463},
+ dictWord{11, 11, 472},
+ dictWord{13, 11, 363},
+ dictWord{144, 11, 52},
+ dictWord{150, 0, 55},
+ dictWord{8, 0, 115},
+ dictWord{8, 0, 350},
+ dictWord{9, 0, 489},
+ dictWord{10, 0, 128},
+ dictWord{
+ 11,
+ 0,
+ 306,
+ },
+ dictWord{12, 0, 373},
+ dictWord{14, 0, 30},
+ dictWord{17, 0, 79},
+ dictWord{19, 0, 80},
+ dictWord{4, 11, 134},
+ dictWord{133, 11, 372},
+ dictWord{
+ 134,
+ 0,
+ 657,
+ },
+ dictWord{134, 0, 933},
+ dictWord{135, 11, 1147},
+ dictWord{4, 0, 230},
+ dictWord{133, 0, 702},
+ dictWord{134, 0, 1728},
+ dictWord{4, 0, 484},
+ dictWord{
+ 18,
+ 0,
+ 26,
+ },
+ dictWord{19, 0, 42},
+ dictWord{20, 0, 43},
+ dictWord{21, 0, 0},
+ dictWord{23, 0, 27},
+ dictWord{152, 0, 14},
+ dictWord{7, 0, 185},
+ dictWord{135, 0, 703},
+ dictWord{
+ 6,
+ 0,
+ 417,
+ },
+ dictWord{10, 0, 618},
+ dictWord{7, 10, 1106},
+ dictWord{9, 10, 770},
+ dictWord{11, 10, 112},
+ dictWord{140, 10, 413},
+ dictWord{134, 0, 803},
+ dictWord{132, 11, 644},
+ dictWord{134, 0, 1262},
+ dictWord{7, 11, 540},
+ dictWord{12, 10, 271},
+ dictWord{145, 10, 109},
+ dictWord{135, 11, 123},
+ dictWord{
+ 132,
+ 0,
+ 633,
+ },
+ dictWord{134, 11, 623},
+ dictWord{4, 11, 908},
+ dictWord{5, 11, 359},
+ dictWord{5, 11, 508},
+ dictWord{6, 11, 1723},
+ dictWord{7, 11, 343},
+ dictWord{
+ 7,
+ 11,
+ 1996,
+ },
+ dictWord{135, 11, 2026},
+ dictWord{135, 0, 479},
+ dictWord{10, 0, 262},
+ dictWord{7, 10, 304},
+ dictWord{9, 10, 646},
+ dictWord{9, 10, 862},
+ dictWord{
+ 11,
+ 10,
+ 696,
+ },
+ dictWord{12, 10, 208},
+ dictWord{15, 10, 79},
+ dictWord{147, 10, 108},
+ dictWord{4, 11, 341},
+ dictWord{135, 11, 480},
+ dictWord{134, 0, 830},
+ dictWord{5, 0, 70},
+ dictWord{5, 0, 622},
+ dictWord{6, 0, 334},
+ dictWord{7, 0, 1032},
+ dictWord{9, 0, 171},
+ dictWord{11, 0, 26},
+ dictWord{11, 0, 213},
+ dictWord{
+ 11,
+ 0,
+ 637,
+ },
+ dictWord{11, 0, 707},
+ dictWord{12, 0, 202},
+ dictWord{12, 0, 380},
+ dictWord{13, 0, 226},
+ dictWord{13, 0, 355},
+ dictWord{14, 0, 222},
+ dictWord{145, 0, 42},
+ dictWord{135, 10, 981},
+ dictWord{143, 0, 217},
+ dictWord{137, 11, 114},
+ dictWord{4, 0, 23},
+ dictWord{4, 0, 141},
+ dictWord{5, 0, 313},
+ dictWord{5, 0, 1014},
+ dictWord{6, 0, 50},
+ dictWord{6, 0, 51},
+ dictWord{7, 0, 142},
+ dictWord{7, 0, 384},
+ dictWord{7, 0, 559},
+ dictWord{8, 0, 640},
+ dictWord{9, 0, 460},
+ dictWord{9, 0, 783},
+ dictWord{11, 0, 741},
+ dictWord{12, 0, 183},
+ dictWord{141, 0, 488},
+ dictWord{141, 0, 360},
+ dictWord{7, 0, 1586},
+ dictWord{7, 11, 1995},
+ dictWord{8, 11, 299},
+ dictWord{11, 11, 890},
+ dictWord{140, 11, 674},
+ dictWord{132, 10, 434},
+ dictWord{7, 0, 652},
+ dictWord{134, 10, 550},
+ dictWord{7, 0, 766},
+ dictWord{5, 10, 553},
+ dictWord{138, 10, 824},
+ dictWord{7, 0, 737},
+ dictWord{8, 0, 298},
+ dictWord{136, 10, 452},
+ dictWord{4, 11, 238},
+ dictWord{5, 11, 503},
+ dictWord{6, 11, 179},
+ dictWord{7, 11, 2003},
+ dictWord{8, 11, 381},
+ dictWord{8, 11, 473},
+ dictWord{9, 11, 149},
+ dictWord{10, 11, 183},
+ dictWord{15, 11, 45},
+ dictWord{143, 11, 86},
+ dictWord{133, 10, 292},
+ dictWord{5, 0, 222},
+ dictWord{9, 0, 655},
+ dictWord{138, 0, 534},
+ dictWord{138, 10, 135},
+ dictWord{4, 11, 121},
+ dictWord{5, 11, 156},
+ dictWord{5, 11, 349},
+ dictWord{9, 11, 136},
+ dictWord{10, 11, 605},
+ dictWord{14, 11, 342},
+ dictWord{147, 11, 107},
+ dictWord{137, 0, 906},
+ dictWord{6, 0, 1013},
+ dictWord{134, 0, 1250},
+ dictWord{6, 0, 1956},
+ dictWord{6, 0, 2009},
+ dictWord{8, 0, 991},
+ dictWord{144, 0, 120},
+ dictWord{135, 11, 1192},
+ dictWord{
+ 138,
+ 0,
+ 503,
+ },
+ dictWord{5, 0, 154},
+ dictWord{7, 0, 1491},
+ dictWord{10, 0, 379},
+ dictWord{138, 0, 485},
+ dictWord{6, 0, 1867},
+ dictWord{6, 0, 1914},
+ dictWord{6, 0, 1925},
+ dictWord{9, 0, 917},
+ dictWord{9, 0, 925},
+ dictWord{9, 0, 932},
+ dictWord{9, 0, 951},
+ dictWord{9, 0, 1007},
+ dictWord{9, 0, 1013},
+ dictWord{12, 0, 806},
+ dictWord{
+ 12,
+ 0,
+ 810,
+ },
+ dictWord{12, 0, 814},
+ dictWord{12, 0, 816},
+ dictWord{12, 0, 824},
+ dictWord{12, 0, 832},
+ dictWord{12, 0, 837},
+ dictWord{12, 0, 863},
+ dictWord{
+ 12,
+ 0,
+ 868,
+ },
+ dictWord{12, 0, 870},
+ dictWord{12, 0, 889},
+ dictWord{12, 0, 892},
+ dictWord{12, 0, 900},
+ dictWord{12, 0, 902},
+ dictWord{12, 0, 908},
+ dictWord{12, 0, 933},
+ dictWord{12, 0, 942},
+ dictWord{12, 0, 949},
+ dictWord{12, 0, 954},
+ dictWord{15, 0, 175},
+ dictWord{15, 0, 203},
+ dictWord{15, 0, 213},
+ dictWord{15, 0, 218},
+ dictWord{15, 0, 225},
+ dictWord{15, 0, 231},
+ dictWord{15, 0, 239},
+ dictWord{15, 0, 248},
+ dictWord{15, 0, 252},
+ dictWord{18, 0, 190},
+ dictWord{18, 0, 204},
+ dictWord{
+ 18,
+ 0,
+ 215,
+ },
+ dictWord{18, 0, 216},
+ dictWord{18, 0, 222},
+ dictWord{18, 0, 225},
+ dictWord{18, 0, 230},
+ dictWord{18, 0, 239},
+ dictWord{18, 0, 241},
+ dictWord{
+ 21,
+ 0,
+ 42,
+ },
+ dictWord{21, 0, 43},
+ dictWord{21, 0, 44},
+ dictWord{21, 0, 45},
+ dictWord{21, 0, 46},
+ dictWord{21, 0, 53},
+ dictWord{24, 0, 27},
+ dictWord{152, 0, 31},
+ dictWord{
+ 133,
+ 0,
+ 716,
+ },
+ dictWord{135, 0, 844},
+ dictWord{4, 0, 91},
+ dictWord{5, 0, 388},
+ dictWord{5, 0, 845},
+ dictWord{6, 0, 206},
+ dictWord{6, 0, 252},
+ dictWord{6, 0, 365},
+ dictWord{
+ 7,
+ 0,
+ 136,
+ },
+ dictWord{7, 0, 531},
+ dictWord{136, 0, 621},
+ dictWord{7, 10, 393},
+ dictWord{10, 10, 603},
+ dictWord{139, 10, 206},
+ dictWord{6, 11, 80},
+ dictWord{
+ 6,
+ 11,
+ 1694,
+ },
+ dictWord{7, 11, 173},
+ dictWord{7, 11, 1974},
+ dictWord{9, 11, 547},
+ dictWord{10, 11, 730},
+ dictWord{14, 11, 18},
+ dictWord{150, 11, 39},
+ dictWord{137, 0, 748},
+ dictWord{4, 11, 923},
+ dictWord{134, 11, 1711},
+ dictWord{4, 10, 912},
+ dictWord{137, 10, 232},
+ dictWord{7, 10, 98},
+ dictWord{7, 10, 1973},
+ dictWord{136, 10, 716},
+ dictWord{14, 0, 103},
+ dictWord{133, 10, 733},
+ dictWord{132, 11, 595},
+ dictWord{12, 0, 158},
+ dictWord{18, 0, 8},
+ dictWord{19, 0, 62},
+ dictWord{20, 0, 6},
+ dictWord{22, 0, 4},
+ dictWord{23, 0, 2},
+ dictWord{23, 0, 9},
+ dictWord{5, 11, 240},
+ dictWord{6, 11, 459},
+ dictWord{7, 11, 12},
+ dictWord{7, 11, 114},
+ dictWord{7, 11, 502},
+ dictWord{7, 11, 1751},
+ dictWord{7, 11, 1753},
+ dictWord{7, 11, 1805},
+ dictWord{8, 11, 658},
+ dictWord{9, 11, 1},
+ dictWord{11, 11, 959},
+ dictWord{13, 11, 446},
+ dictWord{142, 11, 211},
+ dictWord{135, 0, 576},
+ dictWord{5, 0, 771},
+ dictWord{5, 0, 863},
+ dictWord{5, 0, 898},
+ dictWord{6, 0, 648},
+ dictWord{
+ 6,
+ 0,
+ 1632,
+ },
+ dictWord{6, 0, 1644},
+ dictWord{134, 0, 1780},
+ dictWord{133, 0, 331},
+ dictWord{7, 11, 633},
+ dictWord{7, 11, 905},
+ dictWord{7, 11, 909},
+ dictWord{
+ 7,
+ 11,
+ 1538,
+ },
+ dictWord{9, 11, 767},
+ dictWord{140, 11, 636},
+ dictWord{140, 0, 632},
+ dictWord{5, 0, 107},
+ dictWord{7, 0, 201},
+ dictWord{136, 0, 518},
+ dictWord{
+ 6,
+ 0,
+ 446,
+ },
+ dictWord{7, 0, 1817},
+ dictWord{134, 11, 490},
+ dictWord{9, 0, 851},
+ dictWord{141, 0, 510},
+ dictWord{7, 11, 250},
+ dictWord{8, 11, 506},
+ dictWord{
+ 136,
+ 11,
+ 507,
+ },
+ dictWord{4, 0, 504},
+ dictWord{137, 10, 72},
+ dictWord{132, 11, 158},
+ dictWord{4, 11, 140},
+ dictWord{7, 11, 362},
+ dictWord{8, 11, 209},
+ dictWord{
+ 9,
+ 11,
+ 10,
+ },
+ dictWord{9, 11, 160},
+ dictWord{9, 11, 503},
+ dictWord{10, 11, 689},
+ dictWord{11, 11, 350},
+ dictWord{11, 11, 553},
+ dictWord{11, 11, 725},
+ dictWord{
+ 12,
+ 11,
+ 252,
+ },
+ dictWord{12, 11, 583},
+ dictWord{13, 11, 192},
+ dictWord{13, 11, 352},
+ dictWord{14, 11, 269},
+ dictWord{14, 11, 356},
+ dictWord{148, 11, 50},
+ dictWord{6, 11, 597},
+ dictWord{135, 11, 1318},
+ dictWord{135, 10, 1454},
+ dictWord{5, 0, 883},
+ dictWord{5, 0, 975},
+ dictWord{8, 0, 392},
+ dictWord{148, 0, 7},
+ dictWord{6, 11, 228},
+ dictWord{7, 11, 1341},
+ dictWord{9, 11, 408},
+ dictWord{138, 11, 343},
+ dictWord{11, 11, 348},
+ dictWord{11, 10, 600},
+ dictWord{12, 11, 99},
+ dictWord{13, 10, 245},
+ dictWord{18, 11, 1},
+ dictWord{18, 11, 11},
+ dictWord{147, 11, 4},
+ dictWord{134, 11, 296},
+ dictWord{5, 0, 922},
+ dictWord{134, 0, 1707},
+ dictWord{132, 11, 557},
+ dictWord{4, 11, 548},
+ dictWord{7, 10, 164},
+ dictWord{7, 10, 1571},
+ dictWord{9, 10, 107},
+ dictWord{140, 10, 225},
+ dictWord{
+ 7,
+ 11,
+ 197,
+ },
+ dictWord{8, 11, 142},
+ dictWord{8, 11, 325},
+ dictWord{9, 11, 150},
+ dictWord{9, 11, 596},
+ dictWord{10, 11, 350},
+ dictWord{10, 11, 353},
+ dictWord{
+ 11,
+ 11,
+ 74,
+ },
+ dictWord{11, 11, 315},
+ dictWord{14, 11, 423},
+ dictWord{143, 11, 141},
+ dictWord{5, 0, 993},
+ dictWord{7, 0, 515},
+ dictWord{137, 0, 91},
+ dictWord{4, 0, 131},
+ dictWord{8, 0, 200},
+ dictWord{5, 10, 484},
+ dictWord{5, 10, 510},
+ dictWord{6, 10, 434},
+ dictWord{7, 10, 1000},
+ dictWord{7, 10, 1098},
+ dictWord{136, 10, 2},
+ dictWord{152, 0, 10},
+ dictWord{4, 11, 62},
+ dictWord{5, 11, 83},
+ dictWord{6, 11, 399},
+ dictWord{6, 11, 579},
+ dictWord{7, 11, 692},
+ dictWord{7, 11, 846},
+ dictWord{
+ 7,
+ 11,
+ 1015,
+ },
+ dictWord{7, 11, 1799},
+ dictWord{8, 11, 403},
+ dictWord{9, 11, 394},
+ dictWord{10, 11, 133},
+ dictWord{12, 11, 4},
+ dictWord{12, 11, 297},
+ dictWord{
+ 12,
+ 11,
+ 452,
+ },
+ dictWord{16, 11, 81},
+ dictWord{18, 11, 19},
+ dictWord{18, 11, 25},
+ dictWord{21, 11, 14},
+ dictWord{22, 11, 12},
+ dictWord{151, 11, 18},
+ dictWord{
+ 140,
+ 11,
+ 459,
+ },
+ dictWord{132, 11, 177},
+ dictWord{7, 0, 1433},
+ dictWord{9, 0, 365},
+ dictWord{137, 11, 365},
+ dictWord{132, 10, 460},
+ dictWord{5, 0, 103},
+ dictWord{
+ 6,
+ 0,
+ 2004,
+ },
+ dictWord{7, 0, 921},
+ dictWord{8, 0, 580},
+ dictWord{8, 0, 593},
+ dictWord{8, 0, 630},
+ dictWord{10, 0, 28},
+ dictWord{5, 11, 411},
+ dictWord{
+ 135,
+ 11,
+ 653,
+ },
+ dictWord{4, 10, 932},
+ dictWord{133, 10, 891},
+ dictWord{4, 0, 911},
+ dictWord{5, 0, 867},
+ dictWord{5, 0, 1013},
+ dictWord{7, 0, 2034},
+ dictWord{8, 0, 798},
+ dictWord{136, 0, 813},
+ dictWord{7, 11, 439},
+ dictWord{10, 11, 727},
+ dictWord{11, 11, 260},
+ dictWord{139, 11, 684},
+ dictWord{136, 10, 625},
+ dictWord{
+ 5,
+ 11,
+ 208,
+ },
+ dictWord{7, 11, 753},
+ dictWord{135, 11, 1528},
+ dictWord{5, 0, 461},
+ dictWord{7, 0, 1925},
+ dictWord{12, 0, 39},
+ dictWord{13, 0, 265},
+ dictWord{
+ 13,
+ 0,
+ 439,
+ },
+ dictWord{134, 10, 76},
+ dictWord{6, 0, 853},
+ dictWord{8, 10, 92},
+ dictWord{137, 10, 221},
+ dictWord{5, 0, 135},
+ dictWord{6, 0, 519},
+ dictWord{7, 0, 1722},
+ dictWord{10, 0, 271},
+ dictWord{11, 0, 261},
+ dictWord{145, 0, 54},
+ dictWord{139, 11, 814},
+ dictWord{14, 0, 338},
+ dictWord{148, 0, 81},
+ dictWord{4, 0, 300},
+ dictWord{133, 0, 436},
+ dictWord{5, 0, 419},
+ dictWord{5, 0, 687},
+ dictWord{7, 0, 864},
+ dictWord{9, 0, 470},
+ dictWord{135, 11, 864},
+ dictWord{9, 0, 836},
+ dictWord{
+ 133,
+ 11,
+ 242,
+ },
+ dictWord{134, 0, 1937},
+ dictWord{4, 10, 763},
+ dictWord{133, 11, 953},
+ dictWord{132, 10, 622},
+ dictWord{132, 0, 393},
+ dictWord{
+ 133,
+ 10,
+ 253,
+ },
+ dictWord{8, 0, 357},
+ dictWord{10, 0, 745},
+ dictWord{14, 0, 426},
+ dictWord{17, 0, 94},
+ dictWord{19, 0, 57},
+ dictWord{135, 10, 546},
+ dictWord{5, 11, 615},
+ dictWord{146, 11, 37},
+ dictWord{9, 10, 73},
+ dictWord{10, 10, 110},
+ dictWord{14, 10, 185},
+ dictWord{145, 10, 119},
+ dictWord{11, 0, 703},
+ dictWord{7, 10, 624},
+ dictWord{7, 10, 916},
+ dictWord{10, 10, 256},
+ dictWord{139, 10, 87},
+ dictWord{133, 11, 290},
+ dictWord{5, 10, 212},
+ dictWord{12, 10, 35},
+ dictWord{
+ 141,
+ 10,
+ 382,
+ },
+ dictWord{132, 11, 380},
+ dictWord{5, 11, 52},
+ dictWord{7, 11, 277},
+ dictWord{9, 11, 368},
+ dictWord{139, 11, 791},
+ dictWord{133, 0, 387},
+ dictWord{
+ 10,
+ 11,
+ 138,
+ },
+ dictWord{139, 11, 476},
+ dictWord{4, 0, 6},
+ dictWord{5, 0, 708},
+ dictWord{136, 0, 75},
+ dictWord{7, 0, 1351},
+ dictWord{9, 0, 581},
+ dictWord{10, 0, 639},
+ dictWord{11, 0, 453},
+ dictWord{140, 0, 584},
+ dictWord{132, 0, 303},
+ dictWord{138, 0, 772},
+ dictWord{135, 10, 1175},
+ dictWord{4, 0, 749},
+ dictWord{
+ 5,
+ 10,
+ 816,
+ },
+ dictWord{6, 11, 256},
+ dictWord{7, 11, 307},
+ dictWord{7, 11, 999},
+ dictWord{7, 11, 1481},
+ dictWord{7, 11, 1732},
+ dictWord{7, 11, 1738},
+ dictWord{
+ 8,
+ 11,
+ 265,
+ },
+ dictWord{9, 11, 414},
+ dictWord{11, 11, 316},
+ dictWord{12, 11, 52},
+ dictWord{13, 11, 420},
+ dictWord{147, 11, 100},
+ dictWord{135, 11, 1296},
+ dictWord{
+ 6,
+ 0,
+ 1065,
+ },
+ dictWord{5, 10, 869},
+ dictWord{5, 10, 968},
+ dictWord{6, 10, 1626},
+ dictWord{8, 10, 734},
+ dictWord{136, 10, 784},
+ dictWord{4, 10, 542},
+ dictWord{
+ 6,
+ 10,
+ 1716,
+ },
+ dictWord{6, 10, 1727},
+ dictWord{7, 10, 1082},
+ dictWord{7, 10, 1545},
+ dictWord{8, 10, 56},
+ dictWord{8, 10, 118},
+ dictWord{8, 10, 412},
+ dictWord{
+ 8,
+ 10,
+ 564,
+ },
+ dictWord{9, 10, 888},
+ dictWord{9, 10, 908},
+ dictWord{10, 10, 50},
+ dictWord{10, 10, 423},
+ dictWord{11, 10, 685},
+ dictWord{11, 10, 697},
+ dictWord{11, 10, 933},
+ dictWord{12, 10, 299},
+ dictWord{13, 10, 126},
+ dictWord{13, 10, 136},
+ dictWord{13, 10, 170},
+ dictWord{141, 10, 190},
+ dictWord{
+ 134,
+ 0,
+ 226,
+ },
+ dictWord{4, 0, 106},
+ dictWord{7, 0, 310},
+ dictWord{11, 0, 717},
+ dictWord{133, 11, 723},
+ dictWord{5, 0, 890},
+ dictWord{5, 0, 988},
+ dictWord{4, 10, 232},
+ dictWord{9, 10, 202},
+ dictWord{10, 10, 474},
+ dictWord{140, 10, 433},
+ dictWord{6, 0, 626},
+ dictWord{142, 0, 431},
+ dictWord{10, 0, 706},
+ dictWord{150, 0, 44},
+ dictWord{13, 0, 51},
+ dictWord{6, 10, 108},
+ dictWord{7, 10, 1003},
+ dictWord{7, 10, 1181},
+ dictWord{8, 10, 111},
+ dictWord{136, 10, 343},
+ dictWord{132, 0, 698},
+ dictWord{5, 11, 109},
+ dictWord{6, 11, 1784},
+ dictWord{7, 11, 1895},
+ dictWord{12, 11, 296},
+ dictWord{140, 11, 302},
+ dictWord{134, 0, 828},
+ dictWord{
+ 134,
+ 10,
+ 1712,
+ },
+ dictWord{138, 0, 17},
+ dictWord{7, 0, 1929},
+ dictWord{4, 10, 133},
+ dictWord{5, 11, 216},
+ dictWord{7, 10, 711},
+ dictWord{7, 10, 1298},
+ dictWord{
+ 7,
+ 10,
+ 1585,
+ },
+ dictWord{7, 11, 1879},
+ dictWord{9, 11, 141},
+ dictWord{9, 11, 270},
+ dictWord{9, 11, 679},
+ dictWord{10, 11, 159},
+ dictWord{10, 11, 553},
+ dictWord{
+ 11,
+ 11,
+ 197,
+ },
+ dictWord{11, 11, 438},
+ dictWord{12, 11, 538},
+ dictWord{12, 11, 559},
+ dictWord{13, 11, 193},
+ dictWord{13, 11, 423},
+ dictWord{14, 11, 144},
+ dictWord{14, 11, 166},
+ dictWord{14, 11, 167},
+ dictWord{15, 11, 67},
+ dictWord{147, 11, 84},
+ dictWord{141, 11, 127},
+ dictWord{7, 11, 1872},
+ dictWord{
+ 137,
+ 11,
+ 81,
+ },
+ dictWord{6, 10, 99},
+ dictWord{7, 10, 1808},
+ dictWord{145, 10, 57},
+ dictWord{134, 11, 391},
+ dictWord{5, 0, 689},
+ dictWord{6, 0, 84},
+ dictWord{7, 0, 1250},
+ dictWord{6, 10, 574},
+ dictWord{7, 10, 428},
+ dictWord{10, 10, 669},
+ dictWord{11, 10, 485},
+ dictWord{11, 10, 840},
+ dictWord{12, 10, 300},
+ dictWord{
+ 142,
+ 10,
+ 250,
+ },
+ dictWord{7, 11, 322},
+ dictWord{136, 11, 249},
+ dictWord{7, 11, 432},
+ dictWord{135, 11, 1649},
+ dictWord{135, 10, 1871},
+ dictWord{137, 10, 252},
+ dictWord{6, 11, 155},
+ dictWord{140, 11, 234},
+ dictWord{7, 0, 871},
+ dictWord{19, 0, 27},
+ dictWord{147, 11, 27},
+ dictWord{140, 0, 498},
+ dictWord{5, 0, 986},
+ dictWord{6, 0, 130},
+ dictWord{138, 0, 823},
+ dictWord{6, 0, 1793},
+ dictWord{7, 0, 1582},
+ dictWord{8, 0, 458},
+ dictWord{10, 0, 101},
+ dictWord{10, 0, 318},
+ dictWord{
+ 10,
+ 0,
+ 945,
+ },
+ dictWord{12, 0, 734},
+ dictWord{16, 0, 104},
+ dictWord{18, 0, 177},
+ dictWord{6, 10, 323},
+ dictWord{135, 10, 1564},
+ dictWord{5, 11, 632},
+ dictWord{
+ 138,
+ 11,
+ 526,
+ },
+ dictWord{10, 0, 435},
+ dictWord{7, 10, 461},
+ dictWord{136, 10, 775},
+ dictWord{6, 11, 144},
+ dictWord{7, 11, 948},
+ dictWord{7, 11, 1042},
+ dictWord{
+ 7,
+ 11,
+ 1857,
+ },
+ dictWord{8, 11, 235},
+ dictWord{8, 11, 461},
+ dictWord{9, 11, 453},
+ dictWord{9, 11, 530},
+ dictWord{10, 11, 354},
+ dictWord{17, 11, 77},
+ dictWord{
+ 19,
+ 11,
+ 99,
+ },
+ dictWord{148, 11, 79},
+ dictWord{138, 0, 966},
+ dictWord{7, 0, 1644},
+ dictWord{137, 0, 129},
+ dictWord{135, 0, 997},
+ dictWord{136, 0, 502},
+ dictWord{
+ 5,
+ 11,
+ 196,
+ },
+ dictWord{6, 11, 486},
+ dictWord{7, 11, 212},
+ dictWord{8, 11, 309},
+ dictWord{136, 11, 346},
+ dictWord{7, 10, 727},
+ dictWord{146, 10, 73},
+ dictWord{132, 0, 823},
+ dictWord{132, 11, 686},
+ dictWord{135, 0, 1927},
+ dictWord{4, 0, 762},
+ dictWord{7, 0, 1756},
+ dictWord{137, 0, 98},
+ dictWord{136, 10, 577},
+ dictWord{24, 0, 8},
+ dictWord{4, 11, 30},
+ dictWord{5, 11, 43},
+ dictWord{152, 11, 8},
+ dictWord{7, 0, 1046},
+ dictWord{139, 0, 160},
+ dictWord{7, 0, 492},
+ dictWord{
+ 4,
+ 10,
+ 413,
+ },
+ dictWord{5, 10, 677},
+ dictWord{7, 11, 492},
+ dictWord{8, 10, 432},
+ dictWord{140, 10, 280},
+ dictWord{6, 0, 45},
+ dictWord{7, 0, 433},
+ dictWord{8, 0, 129},
+ dictWord{9, 0, 21},
+ dictWord{10, 0, 392},
+ dictWord{11, 0, 79},
+ dictWord{12, 0, 499},
+ dictWord{13, 0, 199},
+ dictWord{141, 0, 451},
+ dictWord{7, 0, 558},
+ dictWord{
+ 136,
+ 0,
+ 353,
+ },
+ dictWord{4, 11, 220},
+ dictWord{7, 11, 1535},
+ dictWord{9, 11, 93},
+ dictWord{139, 11, 474},
+ dictWord{7, 10, 646},
+ dictWord{7, 10, 1730},
+ dictWord{
+ 11,
+ 10,
+ 446,
+ },
+ dictWord{141, 10, 178},
+ dictWord{133, 0, 785},
+ dictWord{134, 0, 1145},
+ dictWord{8, 0, 81},
+ dictWord{9, 0, 189},
+ dictWord{9, 0, 201},
+ dictWord{
+ 11,
+ 0,
+ 478,
+ },
+ dictWord{11, 0, 712},
+ dictWord{141, 0, 338},
+ dictWord{5, 0, 353},
+ dictWord{151, 0, 26},
+ dictWord{11, 0, 762},
+ dictWord{132, 10, 395},
+ dictWord{
+ 134,
+ 0,
+ 2024,
+ },
+ dictWord{4, 0, 611},
+ dictWord{133, 0, 606},
+ dictWord{9, 10, 174},
+ dictWord{10, 10, 164},
+ dictWord{11, 10, 440},
+ dictWord{11, 10, 841},
+ dictWord{
+ 143,
+ 10,
+ 98,
+ },
+ dictWord{134, 10, 426},
+ dictWord{10, 10, 608},
+ dictWord{139, 10, 1002},
+ dictWord{138, 10, 250},
+ dictWord{6, 0, 25},
+ dictWord{7, 0, 855},
+ dictWord{7, 0, 1258},
+ dictWord{144, 0, 32},
+ dictWord{7, 11, 1725},
+ dictWord{138, 11, 393},
+ dictWord{5, 11, 263},
+ dictWord{134, 11, 414},
+ dictWord{6, 0, 2011},
+ dictWord{133, 10, 476},
+ dictWord{4, 0, 4},
+ dictWord{7, 0, 1118},
+ dictWord{7, 0, 1320},
+ dictWord{7, 0, 1706},
+ dictWord{8, 0, 277},
+ dictWord{9, 0, 622},
+ dictWord{
+ 10,
+ 0,
+ 9,
+ },
+ dictWord{11, 0, 724},
+ dictWord{12, 0, 350},
+ dictWord{12, 0, 397},
+ dictWord{13, 0, 28},
+ dictWord{13, 0, 159},
+ dictWord{15, 0, 89},
+ dictWord{18, 0, 5},
+ dictWord{
+ 19,
+ 0,
+ 9,
+ },
+ dictWord{20, 0, 34},
+ dictWord{22, 0, 47},
+ dictWord{6, 11, 178},
+ dictWord{6, 11, 1750},
+ dictWord{8, 11, 251},
+ dictWord{9, 11, 690},
+ dictWord{
+ 10,
+ 11,
+ 155,
+ },
+ dictWord{10, 11, 196},
+ dictWord{10, 11, 373},
+ dictWord{11, 11, 698},
+ dictWord{13, 11, 155},
+ dictWord{148, 11, 93},
+ dictWord{5, 11, 97},
+ dictWord{
+ 137,
+ 11,
+ 393,
+ },
+ dictWord{7, 0, 764},
+ dictWord{11, 0, 461},
+ dictWord{12, 0, 172},
+ dictWord{5, 10, 76},
+ dictWord{6, 10, 458},
+ dictWord{6, 10, 497},
+ dictWord{
+ 7,
+ 10,
+ 868,
+ },
+ dictWord{9, 10, 658},
+ dictWord{10, 10, 594},
+ dictWord{11, 10, 566},
+ dictWord{12, 10, 338},
+ dictWord{141, 10, 200},
+ dictWord{134, 0, 1449},
+ dictWord{138, 11, 40},
+ dictWord{134, 11, 1639},
+ dictWord{134, 0, 1445},
+ dictWord{6, 0, 1168},
+ dictWord{4, 10, 526},
+ dictWord{7, 10, 1029},
+ dictWord{
+ 135,
+ 10,
+ 1054,
+ },
+ dictWord{4, 11, 191},
+ dictWord{7, 11, 934},
+ dictWord{8, 11, 647},
+ dictWord{145, 11, 97},
+ dictWord{132, 10, 636},
+ dictWord{6, 0, 233},
+ dictWord{
+ 7,
+ 10,
+ 660,
+ },
+ dictWord{7, 10, 1124},
+ dictWord{17, 10, 31},
+ dictWord{19, 10, 22},
+ dictWord{151, 10, 14},
+ dictWord{6, 10, 1699},
+ dictWord{136, 11, 110},
+ dictWord{
+ 12,
+ 11,
+ 246,
+ },
+ dictWord{15, 11, 162},
+ dictWord{19, 11, 64},
+ dictWord{20, 11, 8},
+ dictWord{20, 11, 95},
+ dictWord{22, 11, 24},
+ dictWord{152, 11, 17},
+ dictWord{
+ 5,
+ 11,
+ 165,
+ },
+ dictWord{9, 11, 346},
+ dictWord{138, 11, 655},
+ dictWord{5, 11, 319},
+ dictWord{135, 11, 534},
+ dictWord{134, 0, 255},
+ dictWord{9, 0, 216},
+ dictWord{
+ 8,
+ 11,
+ 128,
+ },
+ dictWord{139, 11, 179},
+ dictWord{9, 0, 183},
+ dictWord{139, 0, 286},
+ dictWord{11, 0, 956},
+ dictWord{151, 0, 3},
+ dictWord{4, 0, 536},
+ dictWord{
+ 7,
+ 0,
+ 1141,
+ },
+ dictWord{10, 0, 723},
+ dictWord{139, 0, 371},
+ dictWord{4, 10, 279},
+ dictWord{7, 10, 301},
+ dictWord{137, 10, 362},
+ dictWord{7, 0, 285},
+ dictWord{
+ 5,
+ 11,
+ 57,
+ },
+ dictWord{6, 11, 101},
+ dictWord{6, 11, 1663},
+ dictWord{7, 11, 132},
+ dictWord{7, 11, 1048},
+ dictWord{7, 11, 1154},
+ dictWord{7, 11, 1415},
+ dictWord{
+ 7,
+ 11,
+ 1507,
+ },
+ dictWord{12, 11, 493},
+ dictWord{15, 11, 105},
+ dictWord{151, 11, 15},
+ dictWord{5, 11, 459},
+ dictWord{7, 11, 1073},
+ dictWord{7, 10, 1743},
+ dictWord{
+ 8,
+ 11,
+ 241,
+ },
+ dictWord{136, 11, 334},
+ dictWord{4, 10, 178},
+ dictWord{133, 10, 399},
+ dictWord{135, 0, 560},
+ dictWord{132, 0, 690},
+ dictWord{135, 0, 1246},
+ dictWord{18, 0, 157},
+ dictWord{147, 0, 63},
+ dictWord{10, 0, 599},
+ dictWord{11, 0, 33},
+ dictWord{12, 0, 571},
+ dictWord{149, 0, 1},
+ dictWord{6, 11, 324},
+ dictWord{
+ 6,
+ 11,
+ 520,
+ },
+ dictWord{7, 11, 338},
+ dictWord{7, 11, 1616},
+ dictWord{7, 11, 1729},
+ dictWord{8, 11, 228},
+ dictWord{9, 11, 69},
+ dictWord{139, 11, 750},
+ dictWord{
+ 7,
+ 0,
+ 1862,
+ },
+ dictWord{12, 0, 491},
+ dictWord{12, 0, 520},
+ dictWord{13, 0, 383},
+ dictWord{142, 0, 244},
+ dictWord{135, 11, 734},
+ dictWord{134, 10, 1692},
+ dictWord{10, 0, 448},
+ dictWord{11, 0, 630},
+ dictWord{17, 0, 117},
+ dictWord{6, 10, 202},
+ dictWord{7, 11, 705},
+ dictWord{12, 10, 360},
+ dictWord{17, 10, 118},
+ dictWord{18, 10, 27},
+ dictWord{148, 10, 67},
+ dictWord{4, 11, 73},
+ dictWord{6, 11, 612},
+ dictWord{7, 11, 927},
+ dictWord{7, 11, 1822},
+ dictWord{8, 11, 217},
+ dictWord{
+ 9,
+ 11,
+ 472,
+ },
+ dictWord{9, 11, 765},
+ dictWord{9, 11, 766},
+ dictWord{10, 11, 408},
+ dictWord{11, 11, 51},
+ dictWord{11, 11, 793},
+ dictWord{12, 11, 266},
+ dictWord{
+ 15,
+ 11,
+ 158,
+ },
+ dictWord{20, 11, 89},
+ dictWord{150, 11, 32},
+ dictWord{4, 0, 190},
+ dictWord{133, 0, 554},
+ dictWord{133, 0, 1001},
+ dictWord{5, 11, 389},
+ dictWord{
+ 8,
+ 11,
+ 636,
+ },
+ dictWord{137, 11, 229},
+ dictWord{5, 0, 446},
+ dictWord{7, 10, 872},
+ dictWord{10, 10, 516},
+ dictWord{139, 10, 167},
+ dictWord{137, 10, 313},
+ dictWord{132, 10, 224},
+ dictWord{134, 0, 1313},
+ dictWord{5, 10, 546},
+ dictWord{7, 10, 35},
+ dictWord{8, 10, 11},
+ dictWord{8, 10, 12},
+ dictWord{9, 10, 315},
+ dictWord{9, 10, 533},
+ dictWord{10, 10, 802},
+ dictWord{11, 10, 166},
+ dictWord{12, 10, 525},
+ dictWord{142, 10, 243},
+ dictWord{6, 0, 636},
+ dictWord{137, 0, 837},
+ dictWord{5, 10, 241},
+ dictWord{8, 10, 242},
+ dictWord{9, 10, 451},
+ dictWord{10, 10, 667},
+ dictWord{11, 10, 598},
+ dictWord{140, 10, 429},
+ dictWord{22, 10, 46},
+ dictWord{150, 11, 46},
+ dictWord{136, 11, 472},
+ dictWord{11, 0, 278},
+ dictWord{142, 0, 73},
+ dictWord{141, 11, 185},
+ dictWord{132, 0, 868},
+ dictWord{
+ 134,
+ 0,
+ 972,
+ },
+ dictWord{4, 10, 366},
+ dictWord{137, 10, 516},
+ dictWord{138, 0, 1010},
+ dictWord{5, 11, 189},
+ dictWord{6, 10, 1736},
+ dictWord{7, 11, 442},
+ dictWord{
+ 7,
+ 11,
+ 443,
+ },
+ dictWord{8, 11, 281},
+ dictWord{12, 11, 174},
+ dictWord{13, 11, 83},
+ dictWord{141, 11, 261},
+ dictWord{139, 11, 384},
+ dictWord{6, 11, 2},
+ dictWord{
+ 7,
+ 11,
+ 191,
+ },
+ dictWord{7, 11, 446},
+ dictWord{7, 11, 758},
+ dictWord{7, 11, 1262},
+ dictWord{7, 11, 1737},
+ dictWord{8, 11, 22},
+ dictWord{8, 11, 270},
+ dictWord{
+ 8,
+ 11,
+ 612,
+ },
+ dictWord{9, 11, 4},
+ dictWord{9, 11, 167},
+ dictWord{9, 11, 312},
+ dictWord{9, 11, 436},
+ dictWord{10, 11, 156},
+ dictWord{10, 11, 216},
+ dictWord{
+ 10,
+ 11,
+ 311,
+ },
+ dictWord{10, 11, 623},
+ dictWord{11, 11, 72},
+ dictWord{11, 11, 330},
+ dictWord{11, 11, 455},
+ dictWord{12, 11, 101},
+ dictWord{12, 11, 321},
+ dictWord{
+ 12,
+ 11,
+ 504,
+ },
+ dictWord{12, 11, 530},
+ dictWord{12, 11, 543},
+ dictWord{13, 11, 17},
+ dictWord{13, 11, 156},
+ dictWord{13, 11, 334},
+ dictWord{14, 11, 48},
+ dictWord{15, 11, 70},
+ dictWord{17, 11, 60},
+ dictWord{148, 11, 64},
+ dictWord{6, 10, 331},
+ dictWord{136, 10, 623},
+ dictWord{135, 0, 1231},
+ dictWord{132, 0, 304},
+ dictWord{6, 11, 60},
+ dictWord{7, 11, 670},
+ dictWord{7, 11, 1327},
+ dictWord{8, 11, 411},
+ dictWord{8, 11, 435},
+ dictWord{9, 11, 653},
+ dictWord{9, 11, 740},
+ dictWord{10, 11, 385},
+ dictWord{11, 11, 222},
+ dictWord{11, 11, 324},
+ dictWord{11, 11, 829},
+ dictWord{140, 11, 611},
+ dictWord{7, 0, 506},
+ dictWord{6, 11, 166},
+ dictWord{7, 11, 374},
+ dictWord{135, 11, 1174},
+ dictWord{14, 11, 43},
+ dictWord{146, 11, 21},
+ dictWord{135, 11, 1694},
+ dictWord{135, 10, 1888},
+ dictWord{
+ 5,
+ 11,
+ 206,
+ },
+ dictWord{134, 11, 398},
+ dictWord{135, 11, 50},
+ dictWord{150, 0, 26},
+ dictWord{6, 0, 53},
+ dictWord{6, 0, 199},
+ dictWord{7, 0, 1408},
+ dictWord{
+ 8,
+ 0,
+ 32,
+ },
+ dictWord{8, 0, 93},
+ dictWord{10, 0, 397},
+ dictWord{10, 0, 629},
+ dictWord{11, 0, 593},
+ dictWord{11, 0, 763},
+ dictWord{13, 0, 326},
+ dictWord{145, 0, 35},
+ dictWord{134, 0, 105},
+ dictWord{132, 10, 394},
+ dictWord{4, 0, 843},
+ dictWord{138, 0, 794},
+ dictWord{11, 0, 704},
+ dictWord{141, 0, 396},
+ dictWord{5, 0, 114},
+ dictWord{5, 0, 255},
+ dictWord{141, 0, 285},
+ dictWord{6, 0, 619},
+ dictWord{7, 0, 898},
+ dictWord{7, 0, 1092},
+ dictWord{8, 0, 485},
+ dictWord{18, 0, 28},
+ dictWord{
+ 19,
+ 0,
+ 116,
+ },
+ dictWord{135, 10, 1931},
+ dictWord{9, 0, 145},
+ dictWord{7, 10, 574},
+ dictWord{135, 10, 1719},
+ dictWord{7, 0, 2035},
+ dictWord{8, 0, 19},
+ dictWord{
+ 9,
+ 0,
+ 89,
+ },
+ dictWord{138, 0, 831},
+ dictWord{132, 10, 658},
+ dictWord{6, 11, 517},
+ dictWord{7, 11, 1159},
+ dictWord{10, 11, 621},
+ dictWord{139, 11, 192},
+ dictWord{
+ 7,
+ 0,
+ 1933,
+ },
+ dictWord{7, 11, 1933},
+ dictWord{9, 10, 781},
+ dictWord{10, 10, 144},
+ dictWord{11, 10, 385},
+ dictWord{13, 10, 161},
+ dictWord{13, 10, 228},
+ dictWord{13, 10, 268},
+ dictWord{148, 10, 107},
+ dictWord{136, 10, 374},
+ dictWord{10, 11, 223},
+ dictWord{139, 11, 645},
+ dictWord{135, 0, 1728},
+ dictWord{
+ 7,
+ 11,
+ 64,
+ },
+ dictWord{7, 11, 289},
+ dictWord{136, 11, 245},
+ dictWord{4, 10, 344},
+ dictWord{6, 10, 498},
+ dictWord{139, 10, 323},
+ dictWord{136, 0, 746},
+ dictWord{
+ 135,
+ 10,
+ 1063,
+ },
+ dictWord{137, 10, 155},
+ dictWord{4, 0, 987},
+ dictWord{6, 0, 1964},
+ dictWord{6, 0, 1974},
+ dictWord{6, 0, 1990},
+ dictWord{136, 0, 995},
+ dictWord{133, 11, 609},
+ dictWord{133, 10, 906},
+ dictWord{134, 0, 1550},
+ dictWord{134, 0, 874},
+ dictWord{5, 11, 129},
+ dictWord{6, 11, 61},
+ dictWord{
+ 135,
+ 11,
+ 947,
+ },
+ dictWord{4, 0, 1018},
+ dictWord{6, 0, 1938},
+ dictWord{6, 0, 2021},
+ dictWord{134, 0, 2039},
+ dictWord{132, 0, 814},
+ dictWord{11, 0, 126},
+ dictWord{
+ 139,
+ 0,
+ 287,
+ },
+ dictWord{134, 0, 1264},
+ dictWord{5, 0, 955},
+ dictWord{136, 0, 814},
+ dictWord{141, 11, 506},
+ dictWord{132, 11, 314},
+ dictWord{6, 0, 981},
+ dictWord{139, 11, 1000},
+ dictWord{5, 0, 56},
+ dictWord{8, 0, 892},
+ dictWord{8, 0, 915},
+ dictWord{140, 0, 776},
+ dictWord{148, 0, 100},
+ dictWord{10, 0, 4},
+ dictWord{
+ 10,
+ 0,
+ 13,
+ },
+ dictWord{11, 0, 638},
+ dictWord{148, 0, 57},
+ dictWord{148, 11, 74},
+ dictWord{5, 0, 738},
+ dictWord{132, 10, 616},
+ dictWord{133, 11, 637},
+ dictWord{
+ 136,
+ 10,
+ 692,
+ },
+ dictWord{133, 0, 758},
+ dictWord{132, 10, 305},
+ dictWord{137, 11, 590},
+ dictWord{5, 11, 280},
+ dictWord{135, 11, 1226},
+ dictWord{
+ 134,
+ 11,
+ 494,
+ },
+ dictWord{135, 0, 1112},
+ dictWord{133, 11, 281},
+ dictWord{13, 0, 44},
+ dictWord{14, 0, 214},
+ dictWord{5, 10, 214},
+ dictWord{7, 10, 603},
+ dictWord{
+ 8,
+ 10,
+ 611,
+ },
+ dictWord{9, 10, 686},
+ dictWord{10, 10, 88},
+ dictWord{11, 10, 459},
+ dictWord{11, 10, 496},
+ dictWord{12, 10, 463},
+ dictWord{140, 10, 590},
+ dictWord{
+ 139,
+ 0,
+ 328,
+ },
+ dictWord{135, 11, 1064},
+ dictWord{137, 0, 133},
+ dictWord{7, 0, 168},
+ dictWord{13, 0, 196},
+ dictWord{141, 0, 237},
+ dictWord{134, 10, 1703},
+ dictWord{134, 0, 1152},
+ dictWord{135, 0, 1245},
+ dictWord{5, 0, 110},
+ dictWord{6, 0, 169},
+ dictWord{6, 0, 1702},
+ dictWord{7, 0, 400},
+ dictWord{8, 0, 538},
+ dictWord{
+ 9,
+ 0,
+ 184,
+ },
+ dictWord{9, 0, 524},
+ dictWord{140, 0, 218},
+ dictWord{6, 0, 1816},
+ dictWord{10, 0, 871},
+ dictWord{12, 0, 769},
+ dictWord{140, 0, 785},
+ dictWord{
+ 132,
+ 11,
+ 630,
+ },
+ dictWord{7, 11, 33},
+ dictWord{7, 11, 120},
+ dictWord{8, 11, 489},
+ dictWord{9, 11, 319},
+ dictWord{10, 11, 820},
+ dictWord{11, 11, 1004},
+ dictWord{
+ 12,
+ 11,
+ 379,
+ },
+ dictWord{13, 11, 117},
+ dictWord{13, 11, 412},
+ dictWord{14, 11, 25},
+ dictWord{15, 11, 52},
+ dictWord{15, 11, 161},
+ dictWord{16, 11, 47},
+ dictWord{149, 11, 2},
+ dictWord{6, 0, 133},
+ dictWord{8, 0, 413},
+ dictWord{9, 0, 353},
+ dictWord{139, 0, 993},
+ dictWord{145, 10, 19},
+ dictWord{4, 11, 937},
+ dictWord{
+ 133,
+ 11,
+ 801,
+ },
+ dictWord{134, 0, 978},
+ dictWord{6, 0, 93},
+ dictWord{6, 0, 1508},
+ dictWord{7, 0, 1422},
+ dictWord{7, 0, 1851},
+ dictWord{8, 0, 673},
+ dictWord{9, 0, 529},
+ dictWord{140, 0, 43},
+ dictWord{6, 0, 317},
+ dictWord{10, 0, 512},
+ dictWord{4, 10, 737},
+ dictWord{11, 10, 294},
+ dictWord{12, 10, 60},
+ dictWord{12, 10, 437},
+ dictWord{13, 10, 64},
+ dictWord{13, 10, 380},
+ dictWord{142, 10, 430},
+ dictWord{9, 0, 371},
+ dictWord{7, 11, 1591},
+ dictWord{144, 11, 43},
+ dictWord{6, 10, 1758},
+ dictWord{8, 10, 520},
+ dictWord{9, 10, 345},
+ dictWord{9, 10, 403},
+ dictWord{142, 10, 350},
+ dictWord{5, 0, 526},
+ dictWord{10, 10, 242},
+ dictWord{
+ 138,
+ 10,
+ 579,
+ },
+ dictWord{9, 0, 25},
+ dictWord{10, 0, 467},
+ dictWord{138, 0, 559},
+ dictWord{5, 10, 139},
+ dictWord{7, 10, 1168},
+ dictWord{138, 10, 539},
+ dictWord{
+ 4,
+ 0,
+ 335,
+ },
+ dictWord{135, 0, 942},
+ dictWord{140, 0, 754},
+ dictWord{132, 11, 365},
+ dictWord{11, 0, 182},
+ dictWord{142, 0, 195},
+ dictWord{142, 11, 29},
+ dictWord{
+ 5,
+ 11,
+ 7,
+ },
+ dictWord{139, 11, 774},
+ dictWord{4, 11, 746},
+ dictWord{135, 11, 1090},
+ dictWord{8, 0, 39},
+ dictWord{10, 0, 773},
+ dictWord{11, 0, 84},
+ dictWord{
+ 12,
+ 0,
+ 205,
+ },
+ dictWord{142, 0, 1},
+ dictWord{5, 0, 601},
+ dictWord{5, 0, 870},
+ dictWord{5, 11, 360},
+ dictWord{136, 11, 237},
+ dictWord{132, 0, 181},
+ dictWord{
+ 136,
+ 0,
+ 370,
+ },
+ dictWord{134, 0, 1652},
+ dictWord{8, 0, 358},
+ dictWord{4, 10, 107},
+ dictWord{7, 10, 613},
+ dictWord{8, 10, 439},
+ dictWord{8, 10, 504},
+ dictWord{
+ 9,
+ 10,
+ 501,
+ },
+ dictWord{10, 10, 383},
+ dictWord{139, 10, 477},
+ dictWord{132, 10, 229},
+ dictWord{137, 11, 785},
+ dictWord{4, 0, 97},
+ dictWord{5, 0, 147},
+ dictWord{
+ 6,
+ 0,
+ 286,
+ },
+ dictWord{7, 0, 1362},
+ dictWord{141, 0, 176},
+ dictWord{6, 0, 537},
+ dictWord{7, 0, 788},
+ dictWord{7, 0, 1816},
+ dictWord{132, 10, 903},
+ dictWord{
+ 140,
+ 10,
+ 71,
+ },
+ dictWord{6, 0, 743},
+ dictWord{134, 0, 1223},
+ dictWord{6, 0, 375},
+ dictWord{7, 0, 169},
+ dictWord{7, 0, 254},
+ dictWord{8, 0, 780},
+ dictWord{135, 11, 1493},
+ dictWord{7, 0, 1714},
+ dictWord{4, 10, 47},
+ dictWord{6, 10, 373},
+ dictWord{7, 10, 452},
+ dictWord{7, 10, 543},
+ dictWord{7, 10, 1856},
+ dictWord{9, 10, 6},
+ dictWord{
+ 11,
+ 10,
+ 257,
+ },
+ dictWord{139, 10, 391},
+ dictWord{6, 0, 896},
+ dictWord{136, 0, 1003},
+ dictWord{135, 0, 1447},
+ dictWord{137, 11, 341},
+ dictWord{5, 10, 980},
+ dictWord{134, 10, 1754},
+ dictWord{145, 11, 22},
+ dictWord{4, 11, 277},
+ dictWord{5, 11, 608},
+ dictWord{6, 11, 493},
+ dictWord{7, 11, 457},
+ dictWord{
+ 140,
+ 11,
+ 384,
+ },
+ dictWord{7, 10, 536},
+ dictWord{7, 10, 1331},
+ dictWord{136, 10, 143},
+ dictWord{140, 0, 744},
+ dictWord{7, 11, 27},
+ dictWord{135, 11, 316},
+ dictWord{
+ 18,
+ 0,
+ 126,
+ },
+ dictWord{5, 10, 19},
+ dictWord{134, 10, 533},
+ dictWord{4, 0, 788},
+ dictWord{11, 0, 41},
+ dictWord{5, 11, 552},
+ dictWord{5, 11, 586},
+ dictWord{
+ 5,
+ 11,
+ 676,
+ },
+ dictWord{6, 11, 448},
+ dictWord{8, 11, 244},
+ dictWord{11, 11, 1},
+ dictWord{11, 11, 41},
+ dictWord{13, 11, 3},
+ dictWord{16, 11, 54},
+ dictWord{17, 11, 4},
+ dictWord{146, 11, 13},
+ dictWord{4, 0, 985},
+ dictWord{6, 0, 1801},
+ dictWord{4, 11, 401},
+ dictWord{137, 11, 264},
+ dictWord{5, 10, 395},
+ dictWord{5, 10, 951},
+ dictWord{134, 10, 1776},
+ dictWord{5, 0, 629},
+ dictWord{135, 0, 1549},
+ dictWord{11, 10, 663},
+ dictWord{12, 10, 210},
+ dictWord{13, 10, 166},
+ dictWord{
+ 13,
+ 10,
+ 310,
+ },
+ dictWord{14, 10, 373},
+ dictWord{147, 10, 43},
+ dictWord{9, 11, 543},
+ dictWord{10, 11, 524},
+ dictWord{11, 11, 30},
+ dictWord{12, 11, 524},
+ dictWord{
+ 14,
+ 11,
+ 315,
+ },
+ dictWord{16, 11, 18},
+ dictWord{20, 11, 26},
+ dictWord{148, 11, 65},
+ dictWord{4, 11, 205},
+ dictWord{5, 11, 623},
+ dictWord{7, 11, 104},
+ dictWord{
+ 136,
+ 11,
+ 519,
+ },
+ dictWord{5, 0, 293},
+ dictWord{134, 0, 601},
+ dictWord{7, 11, 579},
+ dictWord{9, 11, 41},
+ dictWord{9, 11, 244},
+ dictWord{9, 11, 669},
+ dictWord{
+ 10,
+ 11,
+ 5,
+ },
+ dictWord{11, 11, 861},
+ dictWord{11, 11, 951},
+ dictWord{139, 11, 980},
+ dictWord{132, 11, 717},
+ dictWord{132, 10, 695},
+ dictWord{7, 10, 497},
+ dictWord{
+ 9,
+ 10,
+ 387,
+ },
+ dictWord{147, 10, 81},
+ dictWord{132, 0, 420},
+ dictWord{142, 0, 37},
+ dictWord{6, 0, 1134},
+ dictWord{6, 0, 1900},
+ dictWord{12, 0, 830},
+ dictWord{
+ 12,
+ 0,
+ 878,
+ },
+ dictWord{12, 0, 894},
+ dictWord{15, 0, 221},
+ dictWord{143, 0, 245},
+ dictWord{132, 11, 489},
+ dictWord{7, 0, 1570},
+ dictWord{140, 0, 542},
+ dictWord{
+ 8,
+ 0,
+ 933,
+ },
+ dictWord{136, 0, 957},
+ dictWord{6, 0, 1371},
+ dictWord{7, 0, 31},
+ dictWord{8, 0, 373},
+ dictWord{5, 10, 284},
+ dictWord{6, 10, 49},
+ dictWord{6, 10, 350},
+ dictWord{7, 10, 377},
+ dictWord{7, 10, 1693},
+ dictWord{8, 10, 678},
+ dictWord{9, 10, 161},
+ dictWord{9, 10, 585},
+ dictWord{9, 10, 671},
+ dictWord{9, 10, 839},
+ dictWord{11, 10, 912},
+ dictWord{141, 10, 427},
+ dictWord{135, 11, 892},
+ dictWord{4, 0, 325},
+ dictWord{138, 0, 125},
+ dictWord{139, 11, 47},
+ dictWord{
+ 132,
+ 10,
+ 597,
+ },
+ dictWord{138, 0, 323},
+ dictWord{6, 0, 1547},
+ dictWord{7, 11, 1605},
+ dictWord{9, 11, 473},
+ dictWord{11, 11, 962},
+ dictWord{146, 11, 139},
+ dictWord{
+ 139,
+ 10,
+ 908,
+ },
+ dictWord{7, 11, 819},
+ dictWord{9, 11, 26},
+ dictWord{9, 11, 392},
+ dictWord{10, 11, 152},
+ dictWord{10, 11, 226},
+ dictWord{11, 11, 19},
+ dictWord{
+ 12,
+ 11,
+ 276,
+ },
+ dictWord{12, 11, 426},
+ dictWord{12, 11, 589},
+ dictWord{13, 11, 460},
+ dictWord{15, 11, 97},
+ dictWord{19, 11, 48},
+ dictWord{148, 11, 104},
+ dictWord{135, 11, 51},
+ dictWord{4, 0, 718},
+ dictWord{135, 0, 1216},
+ dictWord{6, 0, 1896},
+ dictWord{6, 0, 1905},
+ dictWord{6, 0, 1912},
+ dictWord{9, 0, 947},
+ dictWord{
+ 9,
+ 0,
+ 974,
+ },
+ dictWord{12, 0, 809},
+ dictWord{12, 0, 850},
+ dictWord{12, 0, 858},
+ dictWord{12, 0, 874},
+ dictWord{12, 0, 887},
+ dictWord{12, 0, 904},
+ dictWord{
+ 12,
+ 0,
+ 929,
+ },
+ dictWord{12, 0, 948},
+ dictWord{12, 0, 952},
+ dictWord{15, 0, 198},
+ dictWord{15, 0, 206},
+ dictWord{15, 0, 220},
+ dictWord{15, 0, 227},
+ dictWord{15, 0, 247},
+ dictWord{18, 0, 188},
+ dictWord{21, 0, 48},
+ dictWord{21, 0, 50},
+ dictWord{24, 0, 25},
+ dictWord{24, 0, 29},
+ dictWord{7, 11, 761},
+ dictWord{7, 11, 1051},
+ dictWord{
+ 137,
+ 11,
+ 545,
+ },
+ dictWord{5, 0, 124},
+ dictWord{5, 0, 144},
+ dictWord{6, 0, 548},
+ dictWord{7, 0, 15},
+ dictWord{7, 0, 153},
+ dictWord{137, 0, 629},
+ dictWord{
+ 135,
+ 11,
+ 606,
+ },
+ dictWord{135, 10, 2014},
+ dictWord{7, 10, 2007},
+ dictWord{9, 11, 46},
+ dictWord{9, 10, 101},
+ dictWord{9, 10, 450},
+ dictWord{10, 10, 66},
+ dictWord{
+ 10,
+ 10,
+ 842,
+ },
+ dictWord{11, 10, 536},
+ dictWord{140, 10, 587},
+ dictWord{6, 0, 75},
+ dictWord{7, 0, 1531},
+ dictWord{8, 0, 416},
+ dictWord{9, 0, 240},
+ dictWord{9, 0, 275},
+ dictWord{10, 0, 100},
+ dictWord{11, 0, 658},
+ dictWord{11, 0, 979},
+ dictWord{12, 0, 86},
+ dictWord{14, 0, 207},
+ dictWord{15, 0, 20},
+ dictWord{143, 0, 25},
+ dictWord{
+ 5,
+ 0,
+ 141,
+ },
+ dictWord{5, 0, 915},
+ dictWord{6, 0, 1783},
+ dictWord{7, 0, 211},
+ dictWord{7, 0, 698},
+ dictWord{7, 0, 1353},
+ dictWord{9, 0, 83},
+ dictWord{9, 0, 281},
+ dictWord{
+ 10,
+ 0,
+ 376,
+ },
+ dictWord{10, 0, 431},
+ dictWord{11, 0, 543},
+ dictWord{12, 0, 664},
+ dictWord{13, 0, 280},
+ dictWord{13, 0, 428},
+ dictWord{14, 0, 61},
+ dictWord{
+ 14,
+ 0,
+ 128,
+ },
+ dictWord{17, 0, 52},
+ dictWord{145, 0, 81},
+ dictWord{132, 11, 674},
+ dictWord{135, 0, 533},
+ dictWord{149, 0, 6},
+ dictWord{132, 11, 770},
+ dictWord{
+ 133,
+ 0,
+ 538,
+ },
+ dictWord{5, 11, 79},
+ dictWord{7, 11, 1027},
+ dictWord{7, 11, 1477},
+ dictWord{139, 11, 52},
+ dictWord{139, 10, 62},
+ dictWord{4, 0, 338},
+ dictWord{
+ 133,
+ 0,
+ 400,
+ },
+ dictWord{5, 11, 789},
+ dictWord{134, 11, 195},
+ dictWord{4, 11, 251},
+ dictWord{4, 11, 688},
+ dictWord{7, 11, 513},
+ dictWord{7, 11, 1284},
+ dictWord{
+ 9,
+ 11,
+ 87,
+ },
+ dictWord{138, 11, 365},
+ dictWord{134, 10, 1766},
+ dictWord{6, 0, 0},
+ dictWord{7, 0, 84},
+ dictWord{11, 0, 895},
+ dictWord{145, 0, 11},
+ dictWord{
+ 139,
+ 0,
+ 892,
+ },
+ dictWord{4, 0, 221},
+ dictWord{5, 0, 659},
+ dictWord{7, 0, 697},
+ dictWord{7, 0, 1211},
+ dictWord{138, 0, 284},
+ dictWord{133, 0, 989},
+ dictWord{
+ 133,
+ 11,
+ 889,
+ },
+ dictWord{4, 11, 160},
+ dictWord{5, 11, 330},
+ dictWord{7, 11, 1434},
+ dictWord{136, 11, 174},
+ dictWord{6, 10, 1665},
+ dictWord{7, 10, 256},
+ dictWord{
+ 7,
+ 10,
+ 1388,
+ },
+ dictWord{10, 10, 499},
+ dictWord{139, 10, 670},
+ dictWord{7, 0, 848},
+ dictWord{4, 10, 22},
+ dictWord{5, 10, 10},
+ dictWord{136, 10, 97},
+ dictWord{
+ 138,
+ 0,
+ 507,
+ },
+ dictWord{133, 10, 481},
+ dictWord{4, 0, 188},
+ dictWord{135, 0, 805},
+ dictWord{5, 0, 884},
+ dictWord{6, 0, 732},
+ dictWord{139, 0, 991},
+ dictWord{
+ 135,
+ 11,
+ 968,
+ },
+ dictWord{11, 11, 636},
+ dictWord{15, 11, 145},
+ dictWord{17, 11, 34},
+ dictWord{19, 11, 50},
+ dictWord{151, 11, 20},
+ dictWord{7, 0, 959},
+ dictWord{
+ 16,
+ 0,
+ 60,
+ },
+ dictWord{6, 10, 134},
+ dictWord{7, 10, 437},
+ dictWord{9, 10, 37},
+ dictWord{14, 10, 285},
+ dictWord{142, 10, 371},
+ dictWord{7, 10, 486},
+ dictWord{
+ 8,
+ 10,
+ 155,
+ },
+ dictWord{11, 10, 93},
+ dictWord{140, 10, 164},
+ dictWord{134, 0, 1653},
+ dictWord{7, 0, 337},
+ dictWord{133, 10, 591},
+ dictWord{6, 0, 1989},
+ dictWord{
+ 8,
+ 0,
+ 922,
+ },
+ dictWord{8, 0, 978},
+ dictWord{133, 11, 374},
+ dictWord{132, 0, 638},
+ dictWord{138, 0, 500},
+ dictWord{133, 11, 731},
+ dictWord{5, 10, 380},
+ dictWord{
+ 5,
+ 10,
+ 650,
+ },
+ dictWord{136, 10, 310},
+ dictWord{138, 11, 381},
+ dictWord{4, 10, 364},
+ dictWord{7, 10, 1156},
+ dictWord{7, 10, 1187},
+ dictWord{137, 10, 409},
+ dictWord{137, 11, 224},
+ dictWord{140, 0, 166},
+ dictWord{134, 10, 482},
+ dictWord{4, 11, 626},
+ dictWord{5, 11, 642},
+ dictWord{6, 11, 425},
+ dictWord{
+ 10,
+ 11,
+ 202,
+ },
+ dictWord{139, 11, 141},
+ dictWord{4, 10, 781},
+ dictWord{6, 10, 487},
+ dictWord{7, 10, 926},
+ dictWord{8, 10, 263},
+ dictWord{139, 10, 500},
+ dictWord{
+ 135,
+ 0,
+ 418,
+ },
+ dictWord{4, 10, 94},
+ dictWord{135, 10, 1265},
+ dictWord{136, 0, 760},
+ dictWord{132, 10, 417},
+ dictWord{136, 11, 835},
+ dictWord{5, 10, 348},
+ dictWord{134, 10, 522},
+ dictWord{6, 0, 1277},
+ dictWord{134, 0, 1538},
+ dictWord{139, 11, 541},
+ dictWord{135, 11, 1597},
+ dictWord{5, 11, 384},
+ dictWord{
+ 8,
+ 11,
+ 455,
+ },
+ dictWord{140, 11, 48},
+ dictWord{136, 0, 770},
+ dictWord{5, 11, 264},
+ dictWord{134, 11, 184},
+ dictWord{4, 0, 89},
+ dictWord{5, 0, 489},
+ dictWord{
+ 6,
+ 0,
+ 315,
+ },
+ dictWord{7, 0, 553},
+ dictWord{7, 0, 1745},
+ dictWord{138, 0, 243},
+ dictWord{4, 10, 408},
+ dictWord{4, 10, 741},
+ dictWord{135, 10, 500},
+ dictWord{
+ 134,
+ 0,
+ 1396,
+ },
+ dictWord{133, 0, 560},
+ dictWord{6, 0, 1658},
+ dictWord{9, 0, 3},
+ dictWord{10, 0, 154},
+ dictWord{11, 0, 641},
+ dictWord{13, 0, 85},
+ dictWord{13, 0, 201},
+ dictWord{141, 0, 346},
+ dictWord{135, 11, 1595},
+ dictWord{5, 11, 633},
+ dictWord{6, 11, 28},
+ dictWord{7, 11, 219},
+ dictWord{135, 11, 1323},
+ dictWord{
+ 9,
+ 11,
+ 769,
+ },
+ dictWord{140, 11, 185},
+ dictWord{135, 11, 785},
+ dictWord{7, 11, 359},
+ dictWord{8, 11, 243},
+ dictWord{140, 11, 175},
+ dictWord{138, 0, 586},
+ dictWord{
+ 7,
+ 0,
+ 1271,
+ },
+ dictWord{134, 10, 73},
+ dictWord{132, 11, 105},
+ dictWord{4, 0, 166},
+ dictWord{5, 0, 505},
+ dictWord{134, 0, 1670},
+ dictWord{133, 10, 576},
+ dictWord{4, 11, 324},
+ dictWord{138, 11, 104},
+ dictWord{142, 10, 231},
+ dictWord{6, 0, 637},
+ dictWord{7, 10, 1264},
+ dictWord{7, 10, 1678},
+ dictWord{
+ 11,
+ 10,
+ 945,
+ },
+ dictWord{12, 10, 341},
+ dictWord{12, 10, 471},
+ dictWord{12, 10, 569},
+ dictWord{23, 11, 21},
+ dictWord{151, 11, 23},
+ dictWord{8, 11, 559},
+ dictWord{
+ 141,
+ 11,
+ 109,
+ },
+ dictWord{134, 0, 1947},
+ dictWord{7, 0, 445},
+ dictWord{8, 0, 307},
+ dictWord{8, 0, 704},
+ dictWord{10, 0, 41},
+ dictWord{10, 0, 439},
+ dictWord{
+ 11,
+ 0,
+ 237,
+ },
+ dictWord{11, 0, 622},
+ dictWord{140, 0, 201},
+ dictWord{135, 11, 963},
+ dictWord{135, 0, 1977},
+ dictWord{4, 0, 189},
+ dictWord{5, 0, 713},
+ dictWord{
+ 136,
+ 0,
+ 57,
+ },
+ dictWord{138, 0, 371},
+ dictWord{135, 10, 538},
+ dictWord{132, 0, 552},
+ dictWord{6, 0, 883},
+ dictWord{133, 10, 413},
+ dictWord{6, 0, 923},
+ dictWord{
+ 132,
+ 11,
+ 758,
+ },
+ dictWord{138, 11, 215},
+ dictWord{136, 10, 495},
+ dictWord{7, 10, 54},
+ dictWord{8, 10, 312},
+ dictWord{10, 10, 191},
+ dictWord{10, 10, 614},
+ dictWord{140, 10, 567},
+ dictWord{7, 11, 351},
+ dictWord{139, 11, 128},
+ dictWord{7, 0, 875},
+ dictWord{6, 10, 468},
+ dictWord{7, 10, 1478},
+ dictWord{8, 10, 530},
+ dictWord{142, 10, 290},
+ dictWord{135, 0, 1788},
+ dictWord{17, 0, 49},
+ dictWord{133, 11, 918},
+ dictWord{12, 11, 398},
+ dictWord{20, 11, 39},
+ dictWord{
+ 21,
+ 11,
+ 11,
+ },
+ dictWord{150, 11, 41},
+ dictWord{10, 0, 661},
+ dictWord{6, 10, 484},
+ dictWord{135, 10, 822},
+ dictWord{135, 0, 1945},
+ dictWord{134, 0, 794},
+ dictWord{
+ 137,
+ 10,
+ 900,
+ },
+ dictWord{135, 10, 1335},
+ dictWord{6, 10, 1724},
+ dictWord{135, 10, 2022},
+ dictWord{132, 11, 340},
+ dictWord{134, 0, 1135},
+ dictWord{
+ 4,
+ 0,
+ 784,
+ },
+ dictWord{133, 0, 745},
+ dictWord{5, 0, 84},
+ dictWord{134, 0, 163},
+ dictWord{133, 0, 410},
+ dictWord{4, 0, 976},
+ dictWord{5, 11, 985},
+ dictWord{7, 11, 509},
+ dictWord{7, 11, 529},
+ dictWord{145, 11, 96},
+ dictWord{132, 10, 474},
+ dictWord{134, 0, 703},
+ dictWord{135, 11, 1919},
+ dictWord{5, 0, 322},
+ dictWord{
+ 8,
+ 0,
+ 186,
+ },
+ dictWord{9, 0, 262},
+ dictWord{10, 0, 187},
+ dictWord{142, 0, 208},
+ dictWord{135, 10, 1504},
+ dictWord{133, 0, 227},
+ dictWord{9, 0, 560},
+ dictWord{
+ 13,
+ 0,
+ 208,
+ },
+ dictWord{133, 10, 305},
+ dictWord{132, 11, 247},
+ dictWord{7, 0, 1395},
+ dictWord{8, 0, 486},
+ dictWord{9, 0, 236},
+ dictWord{9, 0, 878},
+ dictWord{
+ 10,
+ 0,
+ 218,
+ },
+ dictWord{11, 0, 95},
+ dictWord{19, 0, 17},
+ dictWord{147, 0, 31},
+ dictWord{7, 0, 2043},
+ dictWord{8, 0, 672},
+ dictWord{141, 0, 448},
+ dictWord{4, 11, 184},
+ dictWord{5, 11, 390},
+ dictWord{6, 11, 337},
+ dictWord{7, 11, 23},
+ dictWord{7, 11, 494},
+ dictWord{7, 11, 618},
+ dictWord{7, 11, 1456},
+ dictWord{8, 11, 27},
+ dictWord{
+ 8,
+ 11,
+ 599,
+ },
+ dictWord{10, 11, 153},
+ dictWord{139, 11, 710},
+ dictWord{135, 0, 466},
+ dictWord{135, 10, 1236},
+ dictWord{6, 0, 167},
+ dictWord{7, 0, 186},
+ dictWord{7, 0, 656},
+ dictWord{10, 0, 643},
+ dictWord{4, 10, 480},
+ dictWord{6, 10, 302},
+ dictWord{6, 10, 1642},
+ dictWord{7, 10, 837},
+ dictWord{7, 10, 1547},
+ dictWord{
+ 7,
+ 10,
+ 1657,
+ },
+ dictWord{8, 10, 429},
+ dictWord{9, 10, 228},
+ dictWord{13, 10, 289},
+ dictWord{13, 10, 343},
+ dictWord{147, 10, 101},
+ dictWord{134, 0, 1428},
+ dictWord{134, 0, 1440},
+ dictWord{5, 0, 412},
+ dictWord{7, 10, 278},
+ dictWord{10, 10, 739},
+ dictWord{11, 10, 708},
+ dictWord{141, 10, 348},
+ dictWord{
+ 134,
+ 0,
+ 1118,
+ },
+ dictWord{136, 0, 562},
+ dictWord{148, 11, 46},
+ dictWord{9, 0, 316},
+ dictWord{139, 0, 256},
+ dictWord{134, 0, 1771},
+ dictWord{135, 0, 1190},
+ dictWord{137, 0, 132},
+ dictWord{10, 11, 227},
+ dictWord{11, 11, 497},
+ dictWord{11, 11, 709},
+ dictWord{140, 11, 415},
+ dictWord{143, 0, 66},
+ dictWord{6, 11, 360},
+ dictWord{7, 11, 1664},
+ dictWord{136, 11, 478},
+ dictWord{144, 10, 28},
+ dictWord{4, 0, 317},
+ dictWord{135, 0, 1279},
+ dictWord{5, 0, 63},
+ dictWord{
+ 133,
+ 0,
+ 509,
+ },
+ dictWord{136, 11, 699},
+ dictWord{145, 10, 36},
+ dictWord{134, 0, 1475},
+ dictWord{11, 11, 343},
+ dictWord{142, 11, 127},
+ dictWord{132, 11, 739},
+ dictWord{132, 0, 288},
+ dictWord{135, 11, 1757},
+ dictWord{8, 0, 89},
+ dictWord{8, 0, 620},
+ dictWord{9, 0, 608},
+ dictWord{11, 0, 628},
+ dictWord{12, 0, 322},
+ dictWord{143, 0, 124},
+ dictWord{134, 0, 1225},
+ dictWord{7, 0, 1189},
+ dictWord{4, 11, 67},
+ dictWord{5, 11, 422},
+ dictWord{6, 10, 363},
+ dictWord{7, 11, 1037},
+ dictWord{7, 11, 1289},
+ dictWord{7, 11, 1555},
+ dictWord{7, 10, 1955},
+ dictWord{8, 10, 725},
+ dictWord{9, 11, 741},
+ dictWord{145, 11, 108},
+ dictWord{
+ 134,
+ 0,
+ 1468,
+ },
+ dictWord{6, 0, 689},
+ dictWord{134, 0, 1451},
+ dictWord{138, 0, 120},
+ dictWord{151, 0, 1},
+ dictWord{137, 10, 805},
+ dictWord{142, 0, 329},
+ dictWord{
+ 5,
+ 10,
+ 813,
+ },
+ dictWord{135, 10, 2046},
+ dictWord{135, 0, 226},
+ dictWord{138, 11, 96},
+ dictWord{7, 0, 1855},
+ dictWord{5, 10, 712},
+ dictWord{11, 10, 17},
+ dictWord{13, 10, 321},
+ dictWord{144, 10, 67},
+ dictWord{9, 0, 461},
+ dictWord{6, 10, 320},
+ dictWord{7, 10, 781},
+ dictWord{7, 10, 1921},
+ dictWord{9, 10, 55},
+ dictWord{
+ 10,
+ 10,
+ 186,
+ },
+ dictWord{10, 10, 273},
+ dictWord{10, 10, 664},
+ dictWord{10, 10, 801},
+ dictWord{11, 10, 996},
+ dictWord{11, 10, 997},
+ dictWord{13, 10, 157},
+ dictWord{142, 10, 170},
+ dictWord{8, 11, 203},
+ dictWord{8, 10, 271},
+ dictWord{11, 11, 823},
+ dictWord{11, 11, 846},
+ dictWord{12, 11, 482},
+ dictWord{
+ 13,
+ 11,
+ 133,
+ },
+ dictWord{13, 11, 277},
+ dictWord{13, 11, 302},
+ dictWord{13, 11, 464},
+ dictWord{14, 11, 205},
+ dictWord{142, 11, 221},
+ dictWord{135, 0, 1346},
+ dictWord{4, 11, 449},
+ dictWord{133, 11, 718},
+ dictWord{134, 0, 85},
+ dictWord{14, 0, 299},
+ dictWord{7, 10, 103},
+ dictWord{7, 10, 863},
+ dictWord{11, 10, 184},
+ dictWord{145, 10, 62},
+ dictWord{4, 11, 355},
+ dictWord{6, 11, 311},
+ dictWord{9, 11, 256},
+ dictWord{138, 11, 404},
+ dictWord{137, 10, 659},
+ dictWord{
+ 138,
+ 11,
+ 758,
+ },
+ dictWord{133, 11, 827},
+ dictWord{5, 11, 64},
+ dictWord{140, 11, 581},
+ dictWord{134, 0, 1171},
+ dictWord{4, 11, 442},
+ dictWord{7, 11, 1047},
+ dictWord{
+ 7,
+ 11,
+ 1352,
+ },
+ dictWord{135, 11, 1643},
+ dictWord{132, 0, 980},
+ dictWord{5, 11, 977},
+ dictWord{6, 11, 288},
+ dictWord{7, 11, 528},
+ dictWord{135, 11, 1065},
+ dictWord{5, 0, 279},
+ dictWord{6, 0, 235},
+ dictWord{7, 0, 468},
+ dictWord{8, 0, 446},
+ dictWord{9, 0, 637},
+ dictWord{10, 0, 717},
+ dictWord{11, 0, 738},
+ dictWord{
+ 140,
+ 0,
+ 514,
+ },
+ dictWord{132, 0, 293},
+ dictWord{11, 10, 337},
+ dictWord{142, 10, 303},
+ dictWord{136, 11, 285},
+ dictWord{5, 0, 17},
+ dictWord{6, 0, 371},
+ dictWord{
+ 9,
+ 0,
+ 528,
+ },
+ dictWord{12, 0, 364},
+ dictWord{132, 11, 254},
+ dictWord{5, 10, 77},
+ dictWord{7, 10, 1455},
+ dictWord{10, 10, 843},
+ dictWord{147, 10, 73},
+ dictWord{
+ 150,
+ 0,
+ 5,
+ },
+ dictWord{132, 10, 458},
+ dictWord{6, 11, 12},
+ dictWord{7, 11, 1219},
+ dictWord{145, 11, 73},
+ dictWord{135, 10, 1420},
+ dictWord{6, 10, 109},
+ dictWord{138, 10, 382},
+ dictWord{135, 11, 125},
+ dictWord{6, 10, 330},
+ dictWord{7, 10, 1084},
+ dictWord{139, 10, 142},
+ dictWord{6, 11, 369},
+ dictWord{
+ 6,
+ 11,
+ 502,
+ },
+ dictWord{7, 11, 1036},
+ dictWord{8, 11, 348},
+ dictWord{9, 11, 452},
+ dictWord{10, 11, 26},
+ dictWord{11, 11, 224},
+ dictWord{11, 11, 387},
+ dictWord{
+ 11,
+ 11,
+ 772,
+ },
+ dictWord{12, 11, 95},
+ dictWord{12, 11, 629},
+ dictWord{13, 11, 195},
+ dictWord{13, 11, 207},
+ dictWord{13, 11, 241},
+ dictWord{14, 11, 260},
+ dictWord{
+ 14,
+ 11,
+ 270,
+ },
+ dictWord{143, 11, 140},
+ dictWord{132, 11, 269},
+ dictWord{5, 11, 480},
+ dictWord{7, 11, 532},
+ dictWord{7, 11, 1197},
+ dictWord{7, 11, 1358},
+ dictWord{8, 11, 291},
+ dictWord{11, 11, 349},
+ dictWord{142, 11, 396},
+ dictWord{150, 0, 48},
+ dictWord{10, 0, 601},
+ dictWord{13, 0, 353},
+ dictWord{141, 0, 376},
+ dictWord{5, 0, 779},
+ dictWord{5, 0, 807},
+ dictWord{6, 0, 1655},
+ dictWord{134, 0, 1676},
+ dictWord{142, 11, 223},
+ dictWord{4, 0, 196},
+ dictWord{5, 0, 558},
+ dictWord{133, 0, 949},
+ dictWord{148, 11, 15},
+ dictWord{135, 11, 1764},
+ dictWord{134, 0, 1322},
+ dictWord{132, 0, 752},
+ dictWord{139, 0, 737},
+ dictWord{
+ 135,
+ 11,
+ 657,
+ },
+ dictWord{136, 11, 533},
+ dictWord{135, 0, 412},
+ dictWord{4, 0, 227},
+ dictWord{5, 0, 159},
+ dictWord{5, 0, 409},
+ dictWord{7, 0, 80},
+ dictWord{8, 0, 556},
+ dictWord{10, 0, 479},
+ dictWord{12, 0, 418},
+ dictWord{14, 0, 50},
+ dictWord{14, 0, 123},
+ dictWord{14, 0, 192},
+ dictWord{14, 0, 249},
+ dictWord{14, 0, 295},
+ dictWord{143, 0, 27},
+ dictWord{7, 0, 1470},
+ dictWord{8, 0, 66},
+ dictWord{8, 0, 137},
+ dictWord{8, 0, 761},
+ dictWord{9, 0, 638},
+ dictWord{11, 0, 80},
+ dictWord{11, 0, 212},
+ dictWord{11, 0, 368},
+ dictWord{11, 0, 418},
+ dictWord{12, 0, 8},
+ dictWord{13, 0, 15},
+ dictWord{16, 0, 61},
+ dictWord{17, 0, 59},
+ dictWord{19, 0, 28},
+ dictWord{
+ 148,
+ 0,
+ 84,
+ },
+ dictWord{135, 10, 1985},
+ dictWord{4, 11, 211},
+ dictWord{4, 11, 332},
+ dictWord{5, 11, 335},
+ dictWord{6, 11, 238},
+ dictWord{7, 11, 269},
+ dictWord{
+ 7,
+ 11,
+ 811,
+ },
+ dictWord{7, 11, 1797},
+ dictWord{8, 10, 122},
+ dictWord{8, 11, 836},
+ dictWord{9, 11, 507},
+ dictWord{141, 11, 242},
+ dictWord{6, 0, 683},
+ dictWord{
+ 134,
+ 0,
+ 1252,
+ },
+ dictWord{4, 0, 873},
+ dictWord{132, 10, 234},
+ dictWord{134, 0, 835},
+ dictWord{6, 0, 38},
+ dictWord{7, 0, 1220},
+ dictWord{8, 0, 185},
+ dictWord{8, 0, 256},
+ dictWord{9, 0, 22},
+ dictWord{9, 0, 331},
+ dictWord{10, 0, 738},
+ dictWord{11, 0, 205},
+ dictWord{11, 0, 540},
+ dictWord{11, 0, 746},
+ dictWord{13, 0, 465},
+ dictWord{
+ 14,
+ 0,
+ 88,
+ },
+ dictWord{142, 0, 194},
+ dictWord{138, 0, 986},
+ dictWord{5, 11, 1009},
+ dictWord{12, 11, 582},
+ dictWord{146, 11, 131},
+ dictWord{4, 0, 159},
+ dictWord{
+ 6,
+ 0,
+ 115,
+ },
+ dictWord{7, 0, 252},
+ dictWord{7, 0, 257},
+ dictWord{7, 0, 1928},
+ dictWord{8, 0, 69},
+ dictWord{9, 0, 384},
+ dictWord{10, 0, 91},
+ dictWord{10, 0, 615},
+ dictWord{
+ 12,
+ 0,
+ 375,
+ },
+ dictWord{14, 0, 235},
+ dictWord{18, 0, 117},
+ dictWord{147, 0, 123},
+ dictWord{133, 0, 911},
+ dictWord{136, 0, 278},
+ dictWord{5, 10, 430},
+ dictWord{
+ 5,
+ 10,
+ 932,
+ },
+ dictWord{6, 10, 131},
+ dictWord{7, 10, 417},
+ dictWord{9, 10, 522},
+ dictWord{11, 10, 314},
+ dictWord{141, 10, 390},
+ dictWord{14, 10, 149},
+ dictWord{14, 10, 399},
+ dictWord{143, 10, 57},
+ dictWord{4, 0, 151},
+ dictWord{7, 0, 1567},
+ dictWord{136, 0, 749},
+ dictWord{5, 11, 228},
+ dictWord{6, 11, 203},
+ dictWord{
+ 7,
+ 11,
+ 156,
+ },
+ dictWord{8, 11, 347},
+ dictWord{137, 11, 265},
+ dictWord{132, 10, 507},
+ dictWord{10, 0, 989},
+ dictWord{140, 0, 956},
+ dictWord{133, 0, 990},
+ dictWord{5, 0, 194},
+ dictWord{6, 0, 927},
+ dictWord{7, 0, 1662},
+ dictWord{9, 0, 90},
+ dictWord{140, 0, 564},
+ dictWord{4, 10, 343},
+ dictWord{133, 10, 511},
+ dictWord{133, 0, 425},
+ dictWord{7, 10, 455},
+ dictWord{138, 10, 591},
+ dictWord{4, 0, 774},
+ dictWord{7, 11, 476},
+ dictWord{7, 11, 1592},
+ dictWord{138, 11, 87},
+ dictWord{5, 0, 971},
+ dictWord{135, 10, 1381},
+ dictWord{5, 11, 318},
+ dictWord{147, 11, 121},
+ dictWord{5, 11, 291},
+ dictWord{7, 11, 765},
+ dictWord{9, 11, 389},
+ dictWord{140, 11, 548},
+ dictWord{134, 10, 575},
+ dictWord{4, 0, 827},
+ dictWord{12, 0, 646},
+ dictWord{12, 0, 705},
+ dictWord{12, 0, 712},
+ dictWord{140, 0, 714},
+ dictWord{139, 0, 752},
+ dictWord{137, 0, 662},
+ dictWord{5, 0, 72},
+ dictWord{6, 0, 264},
+ dictWord{7, 0, 21},
+ dictWord{7, 0, 46},
+ dictWord{7, 0, 2013},
+ dictWord{
+ 8,
+ 0,
+ 215,
+ },
+ dictWord{8, 0, 513},
+ dictWord{10, 0, 266},
+ dictWord{139, 0, 22},
+ dictWord{139, 11, 522},
+ dictWord{6, 0, 239},
+ dictWord{7, 0, 118},
+ dictWord{10, 0, 95},
+ dictWord{11, 0, 603},
+ dictWord{13, 0, 443},
+ dictWord{14, 0, 160},
+ dictWord{143, 0, 4},
+ dictWord{6, 0, 431},
+ dictWord{134, 0, 669},
+ dictWord{7, 10, 1127},
+ dictWord{
+ 7,
+ 10,
+ 1572,
+ },
+ dictWord{10, 10, 297},
+ dictWord{10, 10, 422},
+ dictWord{11, 10, 764},
+ dictWord{11, 10, 810},
+ dictWord{12, 10, 264},
+ dictWord{13, 10, 102},
+ dictWord{13, 10, 300},
+ dictWord{13, 10, 484},
+ dictWord{14, 10, 147},
+ dictWord{14, 10, 229},
+ dictWord{17, 10, 71},
+ dictWord{18, 10, 118},
+ dictWord{
+ 147,
+ 10,
+ 120,
+ },
+ dictWord{5, 0, 874},
+ dictWord{6, 0, 1677},
+ dictWord{15, 0, 0},
+ dictWord{10, 11, 525},
+ dictWord{139, 11, 82},
+ dictWord{6, 0, 65},
+ dictWord{7, 0, 939},
+ dictWord{
+ 7,
+ 0,
+ 1172,
+ },
+ dictWord{7, 0, 1671},
+ dictWord{9, 0, 540},
+ dictWord{10, 0, 696},
+ dictWord{11, 0, 265},
+ dictWord{11, 0, 732},
+ dictWord{11, 0, 928},
+ dictWord{
+ 11,
+ 0,
+ 937,
+ },
+ dictWord{141, 0, 438},
+ dictWord{134, 0, 1350},
+ dictWord{136, 11, 547},
+ dictWord{132, 11, 422},
+ dictWord{5, 11, 355},
+ dictWord{145, 11, 0},
+ dictWord{137, 11, 905},
+ dictWord{5, 0, 682},
+ dictWord{135, 0, 1887},
+ dictWord{132, 0, 809},
+ dictWord{4, 0, 696},
+ dictWord{133, 11, 865},
+ dictWord{6, 0, 1074},
+ dictWord{6, 0, 1472},
+ dictWord{14, 10, 35},
+ dictWord{142, 10, 191},
+ dictWord{5, 11, 914},
+ dictWord{134, 11, 1625},
+ dictWord{133, 11, 234},
+ dictWord{
+ 135,
+ 11,
+ 1383,
+ },
+ dictWord{137, 11, 780},
+ dictWord{132, 10, 125},
+ dictWord{4, 0, 726},
+ dictWord{133, 0, 630},
+ dictWord{8, 0, 802},
+ dictWord{136, 0, 838},
+ dictWord{132, 10, 721},
+ dictWord{6, 0, 1337},
+ dictWord{7, 0, 776},
+ dictWord{19, 0, 56},
+ dictWord{136, 10, 145},
+ dictWord{132, 0, 970},
+ dictWord{7, 10, 792},
+ dictWord{8, 10, 147},
+ dictWord{10, 10, 821},
+ dictWord{139, 10, 1021},
+ dictWord{139, 10, 970},
+ dictWord{8, 0, 940},
+ dictWord{137, 0, 797},
+ dictWord{
+ 135,
+ 11,
+ 1312,
+ },
+ dictWord{9, 0, 248},
+ dictWord{10, 0, 400},
+ dictWord{7, 11, 816},
+ dictWord{7, 11, 1241},
+ dictWord{7, 10, 1999},
+ dictWord{9, 11, 283},
+ dictWord{
+ 9,
+ 11,
+ 520,
+ },
+ dictWord{10, 11, 213},
+ dictWord{10, 11, 307},
+ dictWord{10, 11, 463},
+ dictWord{10, 11, 671},
+ dictWord{10, 11, 746},
+ dictWord{11, 11, 401},
+ dictWord{
+ 11,
+ 11,
+ 794,
+ },
+ dictWord{12, 11, 517},
+ dictWord{18, 11, 107},
+ dictWord{147, 11, 115},
+ dictWord{6, 0, 1951},
+ dictWord{134, 0, 2040},
+ dictWord{
+ 135,
+ 11,
+ 339,
+ },
+ dictWord{13, 0, 41},
+ dictWord{15, 0, 93},
+ dictWord{5, 10, 168},
+ dictWord{5, 10, 930},
+ dictWord{8, 10, 74},
+ dictWord{9, 10, 623},
+ dictWord{12, 10, 500},
+ dictWord{140, 10, 579},
+ dictWord{6, 0, 118},
+ dictWord{7, 0, 215},
+ dictWord{7, 0, 1521},
+ dictWord{140, 0, 11},
+ dictWord{6, 10, 220},
+ dictWord{7, 10, 1101},
+ dictWord{141, 10, 105},
+ dictWord{6, 11, 421},
+ dictWord{7, 11, 61},
+ dictWord{7, 11, 1540},
+ dictWord{10, 11, 11},
+ dictWord{138, 11, 501},
+ dictWord{7, 0, 615},
+ dictWord{138, 0, 251},
+ dictWord{140, 11, 631},
+ dictWord{135, 0, 1044},
+ dictWord{6, 10, 19},
+ dictWord{7, 10, 1413},
+ dictWord{139, 10, 428},
+ dictWord{
+ 133,
+ 0,
+ 225,
+ },
+ dictWord{7, 10, 96},
+ dictWord{8, 10, 401},
+ dictWord{8, 10, 703},
+ dictWord{137, 10, 896},
+ dictWord{145, 10, 116},
+ dictWord{6, 11, 102},
+ dictWord{
+ 7,
+ 11,
+ 72,
+ },
+ dictWord{15, 11, 142},
+ dictWord{147, 11, 67},
+ dictWord{7, 10, 1961},
+ dictWord{7, 10, 1965},
+ dictWord{8, 10, 702},
+ dictWord{136, 10, 750},
+ dictWord{
+ 7,
+ 10,
+ 2030,
+ },
+ dictWord{8, 10, 150},
+ dictWord{8, 10, 737},
+ dictWord{12, 10, 366},
+ dictWord{151, 11, 30},
+ dictWord{4, 0, 370},
+ dictWord{5, 0, 756},
+ dictWord{
+ 7,
+ 0,
+ 1326,
+ },
+ dictWord{135, 11, 823},
+ dictWord{8, 10, 800},
+ dictWord{9, 10, 148},
+ dictWord{9, 10, 872},
+ dictWord{9, 10, 890},
+ dictWord{11, 10, 309},
+ dictWord{
+ 11,
+ 10,
+ 1001,
+ },
+ dictWord{13, 10, 267},
+ dictWord{141, 10, 323},
+ dictWord{6, 0, 1662},
+ dictWord{7, 0, 48},
+ dictWord{8, 0, 771},
+ dictWord{10, 0, 116},
+ dictWord{
+ 13,
+ 0,
+ 104,
+ },
+ dictWord{14, 0, 105},
+ dictWord{14, 0, 184},
+ dictWord{15, 0, 168},
+ dictWord{19, 0, 92},
+ dictWord{148, 0, 68},
+ dictWord{10, 0, 209},
+ dictWord{
+ 135,
+ 11,
+ 1870,
+ },
+ dictWord{7, 11, 68},
+ dictWord{8, 11, 48},
+ dictWord{8, 11, 88},
+ dictWord{8, 11, 582},
+ dictWord{8, 11, 681},
+ dictWord{9, 11, 373},
+ dictWord{9, 11, 864},
+ dictWord{11, 11, 157},
+ dictWord{11, 11, 336},
+ dictWord{11, 11, 843},
+ dictWord{148, 11, 27},
+ dictWord{134, 0, 930},
+ dictWord{4, 11, 88},
+ dictWord{5, 11, 137},
+ dictWord{5, 11, 174},
+ dictWord{5, 11, 777},
+ dictWord{6, 11, 1664},
+ dictWord{6, 11, 1725},
+ dictWord{7, 11, 77},
+ dictWord{7, 11, 426},
+ dictWord{7, 11, 1317},
+ dictWord{7, 11, 1355},
+ dictWord{8, 11, 126},
+ dictWord{8, 11, 563},
+ dictWord{9, 11, 523},
+ dictWord{9, 11, 750},
+ dictWord{10, 11, 310},
+ dictWord{10, 11, 836},
+ dictWord{11, 11, 42},
+ dictWord{11, 11, 318},
+ dictWord{11, 11, 731},
+ dictWord{12, 11, 68},
+ dictWord{12, 11, 92},
+ dictWord{12, 11, 507},
+ dictWord{12, 11, 692},
+ dictWord{13, 11, 81},
+ dictWord{13, 11, 238},
+ dictWord{13, 11, 374},
+ dictWord{18, 11, 138},
+ dictWord{19, 11, 78},
+ dictWord{19, 11, 111},
+ dictWord{20, 11, 55},
+ dictWord{20, 11, 77},
+ dictWord{148, 11, 92},
+ dictWord{4, 11, 938},
+ dictWord{135, 11, 1831},
+ dictWord{5, 10, 547},
+ dictWord{7, 10, 424},
+ dictWord{
+ 8,
+ 11,
+ 617,
+ },
+ dictWord{138, 11, 351},
+ dictWord{6, 0, 1286},
+ dictWord{6, 11, 1668},
+ dictWord{7, 11, 1499},
+ dictWord{8, 11, 117},
+ dictWord{9, 11, 314},
+ dictWord{
+ 138,
+ 11,
+ 174,
+ },
+ dictWord{6, 0, 759},
+ dictWord{6, 0, 894},
+ dictWord{7, 11, 707},
+ dictWord{139, 11, 563},
+ dictWord{4, 0, 120},
+ dictWord{135, 0, 1894},
+ dictWord{
+ 9,
+ 0,
+ 385,
+ },
+ dictWord{149, 0, 17},
+ dictWord{138, 0, 429},
+ dictWord{133, 11, 403},
+ dictWord{5, 0, 820},
+ dictWord{135, 0, 931},
+ dictWord{10, 0, 199},
+ dictWord{
+ 133,
+ 10,
+ 133,
+ },
+ dictWord{6, 0, 151},
+ dictWord{6, 0, 1675},
+ dictWord{7, 0, 383},
+ dictWord{151, 0, 10},
+ dictWord{6, 0, 761},
+ dictWord{136, 10, 187},
+ dictWord{
+ 8,
+ 0,
+ 365,
+ },
+ dictWord{10, 10, 0},
+ dictWord{10, 10, 818},
+ dictWord{139, 10, 988},
+ dictWord{4, 11, 44},
+ dictWord{5, 11, 311},
+ dictWord{6, 11, 156},
+ dictWord{
+ 7,
+ 11,
+ 639,
+ },
+ dictWord{7, 11, 762},
+ dictWord{7, 11, 1827},
+ dictWord{9, 11, 8},
+ dictWord{9, 11, 462},
+ dictWord{148, 11, 83},
+ dictWord{4, 11, 346},
+ dictWord{7, 11, 115},
+ dictWord{9, 11, 180},
+ dictWord{9, 11, 456},
+ dictWord{138, 11, 363},
+ dictWord{136, 10, 685},
+ dictWord{7, 0, 1086},
+ dictWord{145, 0, 46},
+ dictWord{
+ 6,
+ 0,
+ 1624,
+ },
+ dictWord{11, 0, 11},
+ dictWord{12, 0, 422},
+ dictWord{13, 0, 444},
+ dictWord{142, 0, 360},
+ dictWord{6, 0, 1020},
+ dictWord{6, 0, 1260},
+ dictWord{
+ 134,
+ 0,
+ 1589,
+ },
+ dictWord{4, 0, 43},
+ dictWord{5, 0, 344},
+ dictWord{5, 0, 357},
+ dictWord{14, 0, 472},
+ dictWord{150, 0, 58},
+ dictWord{6, 0, 1864},
+ dictWord{6, 0, 1866},
+ dictWord{6, 0, 1868},
+ dictWord{6, 0, 1869},
+ dictWord{6, 0, 1874},
+ dictWord{6, 0, 1877},
+ dictWord{6, 0, 1903},
+ dictWord{6, 0, 1911},
+ dictWord{9, 0, 920},
+ dictWord{
+ 9,
+ 0,
+ 921,
+ },
+ dictWord{9, 0, 924},
+ dictWord{9, 0, 946},
+ dictWord{9, 0, 959},
+ dictWord{9, 0, 963},
+ dictWord{9, 0, 970},
+ dictWord{9, 0, 997},
+ dictWord{9, 0, 1008},
+ dictWord{
+ 9,
+ 0,
+ 1017,
+ },
+ dictWord{12, 0, 795},
+ dictWord{12, 0, 797},
+ dictWord{12, 0, 798},
+ dictWord{12, 0, 800},
+ dictWord{12, 0, 803},
+ dictWord{12, 0, 811},
+ dictWord{
+ 12,
+ 0,
+ 820,
+ },
+ dictWord{12, 0, 821},
+ dictWord{12, 0, 839},
+ dictWord{12, 0, 841},
+ dictWord{12, 0, 848},
+ dictWord{12, 0, 911},
+ dictWord{12, 0, 921},
+ dictWord{12, 0, 922},
+ dictWord{12, 0, 925},
+ dictWord{12, 0, 937},
+ dictWord{12, 0, 944},
+ dictWord{12, 0, 945},
+ dictWord{12, 0, 953},
+ dictWord{15, 0, 184},
+ dictWord{15, 0, 191},
+ dictWord{15, 0, 199},
+ dictWord{15, 0, 237},
+ dictWord{15, 0, 240},
+ dictWord{15, 0, 243},
+ dictWord{15, 0, 246},
+ dictWord{18, 0, 203},
+ dictWord{21, 0, 40},
+ dictWord{
+ 21,
+ 0,
+ 52,
+ },
+ dictWord{21, 0, 57},
+ dictWord{24, 0, 23},
+ dictWord{24, 0, 28},
+ dictWord{152, 0, 30},
+ dictWord{134, 0, 725},
+ dictWord{145, 11, 58},
+ dictWord{133, 0, 888},
+ dictWord{137, 10, 874},
+ dictWord{4, 0, 711},
+ dictWord{8, 10, 774},
+ dictWord{10, 10, 670},
+ dictWord{140, 10, 51},
+ dictWord{144, 11, 40},
+ dictWord{
+ 6,
+ 11,
+ 185,
+ },
+ dictWord{7, 11, 1899},
+ dictWord{139, 11, 673},
+ dictWord{137, 10, 701},
+ dictWord{137, 0, 440},
+ dictWord{4, 11, 327},
+ dictWord{5, 11, 478},
+ dictWord{
+ 7,
+ 11,
+ 1332,
+ },
+ dictWord{8, 11, 753},
+ dictWord{140, 11, 227},
+ dictWord{4, 10, 127},
+ dictWord{5, 10, 350},
+ dictWord{6, 10, 356},
+ dictWord{8, 10, 426},
+ dictWord{
+ 9,
+ 10,
+ 572,
+ },
+ dictWord{10, 10, 247},
+ dictWord{139, 10, 312},
+ dictWord{5, 11, 1020},
+ dictWord{133, 11, 1022},
+ dictWord{4, 11, 103},
+ dictWord{
+ 133,
+ 11,
+ 401,
+ },
+ dictWord{6, 0, 1913},
+ dictWord{6, 0, 1926},
+ dictWord{6, 0, 1959},
+ dictWord{9, 0, 914},
+ dictWord{9, 0, 939},
+ dictWord{9, 0, 952},
+ dictWord{9, 0, 979},
+ dictWord{
+ 9,
+ 0,
+ 990,
+ },
+ dictWord{9, 0, 998},
+ dictWord{9, 0, 1003},
+ dictWord{9, 0, 1023},
+ dictWord{12, 0, 827},
+ dictWord{12, 0, 834},
+ dictWord{12, 0, 845},
+ dictWord{
+ 12,
+ 0,
+ 912,
+ },
+ dictWord{12, 0, 935},
+ dictWord{12, 0, 951},
+ dictWord{15, 0, 172},
+ dictWord{15, 0, 174},
+ dictWord{18, 0, 198},
+ dictWord{149, 0, 63},
+ dictWord{5, 0, 958},
+ dictWord{5, 0, 987},
+ dictWord{4, 11, 499},
+ dictWord{135, 11, 1421},
+ dictWord{7, 0, 885},
+ dictWord{6, 10, 59},
+ dictWord{6, 10, 1762},
+ dictWord{9, 10, 603},
+ dictWord{141, 10, 397},
+ dictWord{10, 11, 62},
+ dictWord{141, 11, 164},
+ dictWord{4, 0, 847},
+ dictWord{135, 0, 326},
+ dictWord{11, 0, 276},
+ dictWord{142, 0, 293},
+ dictWord{4, 0, 65},
+ dictWord{5, 0, 479},
+ dictWord{5, 0, 1004},
+ dictWord{7, 0, 1913},
+ dictWord{8, 0, 317},
+ dictWord{9, 0, 302},
+ dictWord{10, 0, 612},
+ dictWord{
+ 13,
+ 0,
+ 22,
+ },
+ dictWord{132, 11, 96},
+ dictWord{4, 0, 261},
+ dictWord{135, 0, 510},
+ dictWord{135, 0, 1514},
+ dictWord{6, 10, 111},
+ dictWord{7, 10, 4},
+ dictWord{8, 10, 163},
+ dictWord{8, 10, 776},
+ dictWord{138, 10, 566},
+ dictWord{4, 0, 291},
+ dictWord{9, 0, 515},
+ dictWord{12, 0, 152},
+ dictWord{12, 0, 443},
+ dictWord{13, 0, 392},
+ dictWord{142, 0, 357},
+ dictWord{7, 11, 399},
+ dictWord{135, 11, 1492},
+ dictWord{4, 0, 589},
+ dictWord{139, 0, 282},
+ dictWord{6, 11, 563},
+ dictWord{
+ 135,
+ 10,
+ 1994,
+ },
+ dictWord{5, 10, 297},
+ dictWord{135, 10, 1038},
+ dictWord{4, 0, 130},
+ dictWord{7, 0, 843},
+ dictWord{135, 0, 1562},
+ dictWord{5, 0, 42},
+ dictWord{
+ 5,
+ 0,
+ 879,
+ },
+ dictWord{7, 0, 245},
+ dictWord{7, 0, 324},
+ dictWord{7, 0, 1532},
+ dictWord{11, 0, 463},
+ dictWord{11, 0, 472},
+ dictWord{13, 0, 363},
+ dictWord{144, 0, 52},
+ dictWord{4, 0, 134},
+ dictWord{133, 0, 372},
+ dictWord{133, 0, 680},
+ dictWord{136, 10, 363},
+ dictWord{6, 0, 1997},
+ dictWord{8, 0, 935},
+ dictWord{136, 0, 977},
+ dictWord{4, 0, 810},
+ dictWord{135, 0, 1634},
+ dictWord{135, 10, 1675},
+ dictWord{7, 0, 1390},
+ dictWord{4, 11, 910},
+ dictWord{133, 11, 832},
+ dictWord{
+ 7,
+ 10,
+ 808,
+ },
+ dictWord{8, 11, 266},
+ dictWord{139, 11, 578},
+ dictWord{132, 0, 644},
+ dictWord{4, 0, 982},
+ dictWord{138, 0, 867},
+ dictWord{132, 10, 280},
+ dictWord{
+ 135,
+ 0,
+ 540,
+ },
+ dictWord{140, 10, 54},
+ dictWord{135, 0, 123},
+ dictWord{134, 0, 1978},
+ dictWord{4, 10, 421},
+ dictWord{133, 10, 548},
+ dictWord{6, 0, 623},
+ dictWord{136, 0, 789},
+ dictWord{4, 0, 908},
+ dictWord{5, 0, 359},
+ dictWord{5, 0, 508},
+ dictWord{6, 0, 1723},
+ dictWord{7, 0, 343},
+ dictWord{7, 0, 1996},
+ dictWord{
+ 135,
+ 0,
+ 2026,
+ },
+ dictWord{134, 0, 1220},
+ dictWord{4, 0, 341},
+ dictWord{135, 0, 480},
+ dictWord{6, 10, 254},
+ dictWord{9, 10, 109},
+ dictWord{138, 10, 103},
+ dictWord{
+ 134,
+ 0,
+ 888,
+ },
+ dictWord{8, 11, 528},
+ dictWord{137, 11, 348},
+ dictWord{7, 0, 1995},
+ dictWord{8, 0, 299},
+ dictWord{11, 0, 890},
+ dictWord{12, 0, 674},
+ dictWord{
+ 4,
+ 11,
+ 20,
+ },
+ dictWord{133, 11, 616},
+ dictWord{135, 11, 1094},
+ dictWord{134, 10, 1630},
+ dictWord{4, 0, 238},
+ dictWord{5, 0, 503},
+ dictWord{6, 0, 179},
+ dictWord{
+ 7,
+ 0,
+ 2003,
+ },
+ dictWord{8, 0, 381},
+ dictWord{8, 0, 473},
+ dictWord{9, 0, 149},
+ dictWord{10, 0, 788},
+ dictWord{15, 0, 45},
+ dictWord{15, 0, 86},
+ dictWord{20, 0, 110},
+ dictWord{150, 0, 57},
+ dictWord{133, 10, 671},
+ dictWord{4, 11, 26},
+ dictWord{5, 11, 429},
+ dictWord{6, 11, 245},
+ dictWord{7, 11, 704},
+ dictWord{7, 11, 1379},
+ dictWord{135, 11, 1474},
+ dictWord{4, 0, 121},
+ dictWord{5, 0, 156},
+ dictWord{5, 0, 349},
+ dictWord{9, 0, 431},
+ dictWord{10, 0, 605},
+ dictWord{142, 0, 342},
+ dictWord{
+ 7,
+ 11,
+ 943,
+ },
+ dictWord{139, 11, 614},
+ dictWord{132, 10, 889},
+ dictWord{132, 11, 621},
+ dictWord{7, 10, 1382},
+ dictWord{7, 11, 1382},
+ dictWord{
+ 135,
+ 10,
+ 1910,
+ },
+ dictWord{132, 10, 627},
+ dictWord{133, 10, 775},
+ dictWord{133, 11, 542},
+ dictWord{133, 11, 868},
+ dictWord{136, 11, 433},
+ dictWord{6, 0, 1373},
+ dictWord{7, 0, 1011},
+ dictWord{11, 10, 362},
+ dictWord{11, 10, 948},
+ dictWord{140, 10, 388},
+ dictWord{6, 0, 80},
+ dictWord{7, 0, 173},
+ dictWord{9, 0, 547},
+ dictWord{10, 0, 730},
+ dictWord{14, 0, 18},
+ dictWord{22, 0, 39},
+ dictWord{135, 11, 1495},
+ dictWord{6, 0, 1694},
+ dictWord{135, 0, 1974},
+ dictWord{140, 0, 196},
+ dictWord{4, 0, 923},
+ dictWord{6, 0, 507},
+ dictWord{6, 0, 1711},
+ dictWord{7, 10, 451},
+ dictWord{8, 10, 389},
+ dictWord{12, 10, 490},
+ dictWord{13, 10, 16},
+ dictWord{
+ 13,
+ 10,
+ 215,
+ },
+ dictWord{13, 10, 351},
+ dictWord{18, 10, 132},
+ dictWord{147, 10, 125},
+ dictWord{6, 0, 646},
+ dictWord{134, 0, 1047},
+ dictWord{135, 10, 841},
+ dictWord{136, 10, 566},
+ dictWord{6, 0, 1611},
+ dictWord{135, 0, 1214},
+ dictWord{139, 0, 926},
+ dictWord{132, 11, 525},
+ dictWord{132, 0, 595},
+ dictWord{
+ 5,
+ 0,
+ 240,
+ },
+ dictWord{6, 0, 459},
+ dictWord{7, 0, 12},
+ dictWord{7, 0, 114},
+ dictWord{7, 0, 949},
+ dictWord{7, 0, 1753},
+ dictWord{7, 0, 1805},
+ dictWord{8, 0, 658},
+ dictWord{
+ 9,
+ 0,
+ 1,
+ },
+ dictWord{11, 0, 959},
+ dictWord{141, 0, 446},
+ dictWord{5, 10, 912},
+ dictWord{134, 10, 1695},
+ dictWord{132, 0, 446},
+ dictWord{7, 11, 62},
+ dictWord{
+ 12,
+ 11,
+ 45,
+ },
+ dictWord{147, 11, 112},
+ dictWord{5, 10, 236},
+ dictWord{6, 10, 572},
+ dictWord{8, 10, 492},
+ dictWord{11, 10, 618},
+ dictWord{144, 10, 56},
+ dictWord{
+ 5,
+ 10,
+ 190,
+ },
+ dictWord{136, 10, 318},
+ dictWord{135, 10, 1376},
+ dictWord{4, 11, 223},
+ dictWord{6, 11, 359},
+ dictWord{11, 11, 3},
+ dictWord{13, 11, 108},
+ dictWord{
+ 14,
+ 11,
+ 89,
+ },
+ dictWord{144, 11, 22},
+ dictWord{132, 11, 647},
+ dictWord{134, 0, 490},
+ dictWord{134, 0, 491},
+ dictWord{134, 0, 1584},
+ dictWord{
+ 135,
+ 11,
+ 685,
+ },
+ dictWord{138, 11, 220},
+ dictWord{7, 0, 250},
+ dictWord{136, 0, 507},
+ dictWord{132, 0, 158},
+ dictWord{4, 0, 140},
+ dictWord{7, 0, 362},
+ dictWord{8, 0, 209},
+ dictWord{9, 0, 10},
+ dictWord{9, 0, 160},
+ dictWord{9, 0, 503},
+ dictWord{9, 0, 614},
+ dictWord{10, 0, 689},
+ dictWord{11, 0, 327},
+ dictWord{11, 0, 553},
+ dictWord{
+ 11,
+ 0,
+ 725,
+ },
+ dictWord{11, 0, 767},
+ dictWord{12, 0, 252},
+ dictWord{12, 0, 583},
+ dictWord{13, 0, 192},
+ dictWord{14, 0, 269},
+ dictWord{14, 0, 356},
+ dictWord{148, 0, 50},
+ dictWord{19, 0, 1},
+ dictWord{19, 0, 26},
+ dictWord{150, 0, 9},
+ dictWord{132, 11, 109},
+ dictWord{6, 0, 228},
+ dictWord{7, 0, 1341},
+ dictWord{9, 0, 408},
+ dictWord{
+ 138,
+ 0,
+ 343,
+ },
+ dictWord{4, 0, 373},
+ dictWord{5, 0, 283},
+ dictWord{6, 0, 480},
+ dictWord{7, 0, 609},
+ dictWord{10, 0, 860},
+ dictWord{138, 0, 878},
+ dictWord{6, 0, 779},
+ dictWord{134, 0, 1209},
+ dictWord{4, 0, 557},
+ dictWord{7, 11, 263},
+ dictWord{7, 11, 628},
+ dictWord{136, 11, 349},
+ dictWord{132, 0, 548},
+ dictWord{7, 0, 197},
+ dictWord{8, 0, 142},
+ dictWord{8, 0, 325},
+ dictWord{9, 0, 150},
+ dictWord{9, 0, 596},
+ dictWord{10, 0, 350},
+ dictWord{10, 0, 353},
+ dictWord{11, 0, 74},
+ dictWord{
+ 11,
+ 0,
+ 315,
+ },
+ dictWord{12, 0, 662},
+ dictWord{12, 0, 681},
+ dictWord{14, 0, 423},
+ dictWord{143, 0, 141},
+ dictWord{4, 11, 40},
+ dictWord{10, 11, 67},
+ dictWord{
+ 11,
+ 11,
+ 117,
+ },
+ dictWord{11, 11, 768},
+ dictWord{139, 11, 935},
+ dictWord{7, 11, 992},
+ dictWord{8, 11, 301},
+ dictWord{9, 11, 722},
+ dictWord{12, 11, 63},
+ dictWord{
+ 13,
+ 11,
+ 29,
+ },
+ dictWord{14, 11, 161},
+ dictWord{143, 11, 18},
+ dictWord{6, 0, 1490},
+ dictWord{138, 11, 532},
+ dictWord{5, 0, 580},
+ dictWord{7, 0, 378},
+ dictWord{
+ 7,
+ 0,
+ 674,
+ },
+ dictWord{7, 0, 1424},
+ dictWord{15, 0, 83},
+ dictWord{16, 0, 11},
+ dictWord{15, 11, 83},
+ dictWord{144, 11, 11},
+ dictWord{6, 0, 1057},
+ dictWord{6, 0, 1335},
+ dictWord{10, 0, 316},
+ dictWord{7, 10, 85},
+ dictWord{7, 10, 247},
+ dictWord{8, 10, 585},
+ dictWord{138, 10, 163},
+ dictWord{4, 0, 169},
+ dictWord{5, 0, 83},
+ dictWord{
+ 6,
+ 0,
+ 399,
+ },
+ dictWord{6, 0, 579},
+ dictWord{6, 0, 1513},
+ dictWord{7, 0, 692},
+ dictWord{7, 0, 846},
+ dictWord{7, 0, 1015},
+ dictWord{7, 0, 1799},
+ dictWord{8, 0, 403},
+ dictWord{9, 0, 394},
+ dictWord{10, 0, 133},
+ dictWord{12, 0, 4},
+ dictWord{12, 0, 297},
+ dictWord{12, 0, 452},
+ dictWord{16, 0, 81},
+ dictWord{18, 0, 25},
+ dictWord{21, 0, 14},
+ dictWord{22, 0, 12},
+ dictWord{151, 0, 18},
+ dictWord{134, 0, 1106},
+ dictWord{7, 0, 1546},
+ dictWord{11, 0, 299},
+ dictWord{142, 0, 407},
+ dictWord{134, 0, 1192},
+ dictWord{132, 0, 177},
+ dictWord{5, 0, 411},
+ dictWord{135, 0, 653},
+ dictWord{7, 0, 439},
+ dictWord{10, 0, 727},
+ dictWord{11, 0, 260},
+ dictWord{139, 0, 684},
+ dictWord{138, 10, 145},
+ dictWord{147, 10, 83},
+ dictWord{5, 0, 208},
+ dictWord{7, 0, 753},
+ dictWord{135, 0, 1528},
+ dictWord{137, 11, 617},
+ dictWord{
+ 135,
+ 10,
+ 1922,
+ },
+ dictWord{135, 11, 825},
+ dictWord{11, 0, 422},
+ dictWord{13, 0, 389},
+ dictWord{4, 10, 124},
+ dictWord{10, 10, 457},
+ dictWord{11, 10, 121},
+ dictWord{
+ 11,
+ 10,
+ 169,
+ },
+ dictWord{11, 10, 870},
+ dictWord{12, 10, 214},
+ dictWord{14, 10, 187},
+ dictWord{143, 10, 77},
+ dictWord{11, 0, 615},
+ dictWord{15, 0, 58},
+ dictWord{
+ 11,
+ 11,
+ 615,
+ },
+ dictWord{143, 11, 58},
+ dictWord{9, 0, 618},
+ dictWord{138, 0, 482},
+ dictWord{6, 0, 1952},
+ dictWord{6, 0, 1970},
+ dictWord{142, 0, 505},
+ dictWord{
+ 7,
+ 10,
+ 1193,
+ },
+ dictWord{135, 11, 1838},
+ dictWord{133, 0, 242},
+ dictWord{135, 10, 1333},
+ dictWord{6, 10, 107},
+ dictWord{7, 10, 638},
+ dictWord{
+ 7,
+ 10,
+ 1632,
+ },
+ dictWord{137, 10, 396},
+ dictWord{133, 0, 953},
+ dictWord{5, 10, 370},
+ dictWord{134, 10, 1756},
+ dictWord{5, 11, 28},
+ dictWord{6, 11, 204},
+ dictWord{
+ 10,
+ 11,
+ 320,
+ },
+ dictWord{10, 11, 583},
+ dictWord{13, 11, 502},
+ dictWord{14, 11, 72},
+ dictWord{14, 11, 274},
+ dictWord{14, 11, 312},
+ dictWord{14, 11, 344},
+ dictWord{15, 11, 159},
+ dictWord{16, 11, 62},
+ dictWord{16, 11, 69},
+ dictWord{17, 11, 30},
+ dictWord{18, 11, 42},
+ dictWord{18, 11, 53},
+ dictWord{18, 11, 84},
+ dictWord{18, 11, 140},
+ dictWord{19, 11, 68},
+ dictWord{19, 11, 85},
+ dictWord{20, 11, 5},
+ dictWord{20, 11, 45},
+ dictWord{20, 11, 101},
+ dictWord{22, 11, 7},
+ dictWord{
+ 150,
+ 11,
+ 20,
+ },
+ dictWord{4, 11, 558},
+ dictWord{6, 11, 390},
+ dictWord{7, 11, 162},
+ dictWord{7, 11, 689},
+ dictWord{9, 11, 360},
+ dictWord{138, 11, 653},
+ dictWord{
+ 11,
+ 0,
+ 802,
+ },
+ dictWord{141, 0, 67},
+ dictWord{133, 10, 204},
+ dictWord{133, 0, 290},
+ dictWord{5, 10, 970},
+ dictWord{134, 10, 1706},
+ dictWord{132, 0, 380},
+ dictWord{5, 0, 52},
+ dictWord{7, 0, 277},
+ dictWord{9, 0, 368},
+ dictWord{139, 0, 791},
+ dictWord{5, 11, 856},
+ dictWord{6, 11, 1672},
+ dictWord{6, 11, 1757},
+ dictWord{
+ 6,
+ 11,
+ 1781,
+ },
+ dictWord{7, 11, 1150},
+ dictWord{7, 11, 1425},
+ dictWord{7, 11, 1453},
+ dictWord{140, 11, 513},
+ dictWord{5, 11, 92},
+ dictWord{7, 10, 3},
+ dictWord{
+ 10,
+ 11,
+ 736,
+ },
+ dictWord{140, 11, 102},
+ dictWord{4, 0, 112},
+ dictWord{5, 0, 653},
+ dictWord{5, 10, 483},
+ dictWord{5, 10, 685},
+ dictWord{6, 10, 489},
+ dictWord{
+ 7,
+ 10,
+ 1204,
+ },
+ dictWord{136, 10, 394},
+ dictWord{132, 10, 921},
+ dictWord{6, 0, 1028},
+ dictWord{133, 10, 1007},
+ dictWord{5, 11, 590},
+ dictWord{9, 11, 213},
+ dictWord{145, 11, 91},
+ dictWord{135, 10, 1696},
+ dictWord{10, 0, 138},
+ dictWord{139, 0, 476},
+ dictWord{5, 0, 725},
+ dictWord{5, 0, 727},
+ dictWord{135, 0, 1811},
+ dictWord{4, 0, 979},
+ dictWord{6, 0, 1821},
+ dictWord{6, 0, 1838},
+ dictWord{8, 0, 876},
+ dictWord{8, 0, 883},
+ dictWord{8, 0, 889},
+ dictWord{8, 0, 893},
+ dictWord{
+ 8,
+ 0,
+ 895,
+ },
+ dictWord{10, 0, 934},
+ dictWord{12, 0, 720},
+ dictWord{14, 0, 459},
+ dictWord{148, 0, 123},
+ dictWord{135, 11, 551},
+ dictWord{4, 0, 38},
+ dictWord{6, 0, 435},
+ dictWord{7, 0, 307},
+ dictWord{7, 0, 999},
+ dictWord{7, 0, 1481},
+ dictWord{7, 0, 1732},
+ dictWord{7, 0, 1738},
+ dictWord{8, 0, 371},
+ dictWord{9, 0, 414},
+ dictWord{
+ 11,
+ 0,
+ 316,
+ },
+ dictWord{12, 0, 52},
+ dictWord{13, 0, 420},
+ dictWord{147, 0, 100},
+ dictWord{135, 0, 1296},
+ dictWord{132, 10, 712},
+ dictWord{134, 10, 1629},
+ dictWord{133, 0, 723},
+ dictWord{134, 0, 651},
+ dictWord{136, 11, 191},
+ dictWord{9, 11, 791},
+ dictWord{10, 11, 93},
+ dictWord{11, 11, 301},
+ dictWord{16, 11, 13},
+ dictWord{17, 11, 23},
+ dictWord{18, 11, 135},
+ dictWord{19, 11, 12},
+ dictWord{20, 11, 1},
+ dictWord{20, 11, 12},
+ dictWord{148, 11, 14},
+ dictWord{136, 11, 503},
+ dictWord{6, 11, 466},
+ dictWord{135, 11, 671},
+ dictWord{6, 0, 1200},
+ dictWord{134, 0, 1330},
+ dictWord{135, 0, 1255},
+ dictWord{134, 0, 986},
+ dictWord{
+ 5,
+ 0,
+ 109,
+ },
+ dictWord{6, 0, 1784},
+ dictWord{7, 0, 1895},
+ dictWord{12, 0, 296},
+ dictWord{140, 0, 302},
+ dictWord{135, 11, 983},
+ dictWord{133, 10, 485},
+ dictWord{
+ 134,
+ 0,
+ 660,
+ },
+ dictWord{134, 0, 800},
+ dictWord{5, 0, 216},
+ dictWord{5, 0, 294},
+ dictWord{6, 0, 591},
+ dictWord{7, 0, 1879},
+ dictWord{9, 0, 141},
+ dictWord{9, 0, 270},
+ dictWord{9, 0, 679},
+ dictWord{10, 0, 159},
+ dictWord{11, 0, 197},
+ dictWord{11, 0, 438},
+ dictWord{12, 0, 538},
+ dictWord{12, 0, 559},
+ dictWord{14, 0, 144},
+ dictWord{
+ 14,
+ 0,
+ 167,
+ },
+ dictWord{15, 0, 67},
+ dictWord{4, 10, 285},
+ dictWord{5, 10, 317},
+ dictWord{6, 10, 301},
+ dictWord{7, 10, 7},
+ dictWord{8, 10, 153},
+ dictWord{
+ 10,
+ 10,
+ 766,
+ },
+ dictWord{11, 10, 468},
+ dictWord{12, 10, 467},
+ dictWord{141, 10, 143},
+ dictWord{136, 0, 945},
+ dictWord{134, 0, 1090},
+ dictWord{137, 0, 81},
+ dictWord{12, 11, 468},
+ dictWord{19, 11, 96},
+ dictWord{148, 11, 24},
+ dictWord{134, 0, 391},
+ dictWord{138, 11, 241},
+ dictWord{7, 0, 322},
+ dictWord{136, 0, 249},
+ dictWord{134, 0, 1412},
+ dictWord{135, 11, 795},
+ dictWord{5, 0, 632},
+ dictWord{138, 0, 526},
+ dictWord{136, 10, 819},
+ dictWord{6, 0, 144},
+ dictWord{7, 0, 948},
+ dictWord{7, 0, 1042},
+ dictWord{8, 0, 235},
+ dictWord{8, 0, 461},
+ dictWord{9, 0, 453},
+ dictWord{9, 0, 796},
+ dictWord{10, 0, 354},
+ dictWord{17, 0, 77},
+ dictWord{
+ 135,
+ 11,
+ 954,
+ },
+ dictWord{139, 10, 917},
+ dictWord{6, 0, 940},
+ dictWord{134, 0, 1228},
+ dictWord{4, 0, 362},
+ dictWord{7, 0, 52},
+ dictWord{135, 0, 303},
+ dictWord{
+ 6,
+ 11,
+ 549,
+ },
+ dictWord{8, 11, 34},
+ dictWord{8, 11, 283},
+ dictWord{9, 11, 165},
+ dictWord{138, 11, 475},
+ dictWord{7, 11, 370},
+ dictWord{7, 11, 1007},
+ dictWord{
+ 7,
+ 11,
+ 1177,
+ },
+ dictWord{135, 11, 1565},
+ dictWord{5, 11, 652},
+ dictWord{5, 11, 701},
+ dictWord{135, 11, 449},
+ dictWord{5, 0, 196},
+ dictWord{6, 0, 486},
+ dictWord{
+ 7,
+ 0,
+ 212,
+ },
+ dictWord{8, 0, 309},
+ dictWord{136, 0, 346},
+ dictWord{6, 10, 1719},
+ dictWord{6, 10, 1735},
+ dictWord{7, 10, 2016},
+ dictWord{7, 10, 2020},
+ dictWord{
+ 8,
+ 10,
+ 837,
+ },
+ dictWord{137, 10, 852},
+ dictWord{6, 11, 159},
+ dictWord{6, 11, 364},
+ dictWord{7, 11, 516},
+ dictWord{7, 11, 1439},
+ dictWord{137, 11, 518},
+ dictWord{135, 0, 1912},
+ dictWord{135, 0, 1290},
+ dictWord{132, 0, 686},
+ dictWord{141, 11, 151},
+ dictWord{138, 0, 625},
+ dictWord{136, 0, 706},
+ dictWord{
+ 138,
+ 10,
+ 568,
+ },
+ dictWord{139, 0, 412},
+ dictWord{4, 0, 30},
+ dictWord{133, 0, 43},
+ dictWord{8, 10, 67},
+ dictWord{138, 10, 419},
+ dictWord{7, 0, 967},
+ dictWord{
+ 141,
+ 0,
+ 11,
+ },
+ dictWord{12, 0, 758},
+ dictWord{14, 0, 441},
+ dictWord{142, 0, 462},
+ dictWord{10, 10, 657},
+ dictWord{14, 10, 297},
+ dictWord{142, 10, 361},
+ dictWord{
+ 139,
+ 10,
+ 729,
+ },
+ dictWord{4, 0, 220},
+ dictWord{135, 0, 1535},
+ dictWord{7, 11, 501},
+ dictWord{9, 11, 111},
+ dictWord{10, 11, 141},
+ dictWord{11, 11, 332},
+ dictWord{
+ 13,
+ 11,
+ 43,
+ },
+ dictWord{13, 11, 429},
+ dictWord{14, 11, 130},
+ dictWord{14, 11, 415},
+ dictWord{145, 11, 102},
+ dictWord{4, 0, 950},
+ dictWord{6, 0, 1859},
+ dictWord{
+ 7,
+ 0,
+ 11,
+ },
+ dictWord{8, 0, 873},
+ dictWord{12, 0, 710},
+ dictWord{12, 0, 718},
+ dictWord{12, 0, 748},
+ dictWord{12, 0, 765},
+ dictWord{148, 0, 124},
+ dictWord{
+ 5,
+ 11,
+ 149,
+ },
+ dictWord{5, 11, 935},
+ dictWord{136, 11, 233},
+ dictWord{142, 11, 291},
+ dictWord{134, 0, 1579},
+ dictWord{7, 0, 890},
+ dictWord{8, 10, 51},
+ dictWord{
+ 9,
+ 10,
+ 868,
+ },
+ dictWord{10, 10, 833},
+ dictWord{12, 10, 481},
+ dictWord{12, 10, 570},
+ dictWord{148, 10, 106},
+ dictWord{141, 0, 2},
+ dictWord{132, 10, 445},
+ dictWord{136, 11, 801},
+ dictWord{135, 0, 1774},
+ dictWord{7, 0, 1725},
+ dictWord{138, 0, 393},
+ dictWord{5, 0, 263},
+ dictWord{134, 0, 414},
+ dictWord{
+ 132,
+ 11,
+ 322,
+ },
+ dictWord{133, 10, 239},
+ dictWord{7, 0, 456},
+ dictWord{7, 10, 1990},
+ dictWord{8, 10, 130},
+ dictWord{139, 10, 720},
+ dictWord{137, 0, 818},
+ dictWord{
+ 5,
+ 10,
+ 123,
+ },
+ dictWord{6, 10, 530},
+ dictWord{7, 10, 348},
+ dictWord{135, 10, 1419},
+ dictWord{135, 10, 2024},
+ dictWord{6, 0, 178},
+ dictWord{6, 0, 1750},
+ dictWord{8, 0, 251},
+ dictWord{9, 0, 690},
+ dictWord{10, 0, 155},
+ dictWord{10, 0, 196},
+ dictWord{10, 0, 373},
+ dictWord{11, 0, 698},
+ dictWord{13, 0, 155},
+ dictWord{
+ 148,
+ 0,
+ 93,
+ },
+ dictWord{5, 0, 97},
+ dictWord{137, 0, 393},
+ dictWord{134, 0, 674},
+ dictWord{11, 0, 223},
+ dictWord{140, 0, 168},
+ dictWord{132, 10, 210},
+ dictWord{
+ 139,
+ 11,
+ 464,
+ },
+ dictWord{6, 0, 1639},
+ dictWord{146, 0, 159},
+ dictWord{139, 11, 2},
+ dictWord{7, 0, 934},
+ dictWord{8, 0, 647},
+ dictWord{17, 0, 97},
+ dictWord{19, 0, 59},
+ dictWord{150, 0, 2},
+ dictWord{132, 0, 191},
+ dictWord{5, 0, 165},
+ dictWord{9, 0, 346},
+ dictWord{10, 0, 655},
+ dictWord{11, 0, 885},
+ dictWord{4, 10, 430},
+ dictWord{135, 11, 357},
+ dictWord{133, 0, 877},
+ dictWord{5, 10, 213},
+ dictWord{133, 11, 406},
+ dictWord{8, 0, 128},
+ dictWord{139, 0, 179},
+ dictWord{6, 11, 69},
+ dictWord{135, 11, 117},
+ dictWord{135, 0, 1297},
+ dictWord{11, 11, 43},
+ dictWord{13, 11, 72},
+ dictWord{141, 11, 142},
+ dictWord{135, 11, 1830},
+ dictWord{
+ 142,
+ 0,
+ 164,
+ },
+ dictWord{5, 0, 57},
+ dictWord{6, 0, 101},
+ dictWord{6, 0, 586},
+ dictWord{6, 0, 1663},
+ dictWord{7, 0, 132},
+ dictWord{7, 0, 1154},
+ dictWord{7, 0, 1415},
+ dictWord{7, 0, 1507},
+ dictWord{12, 0, 493},
+ dictWord{15, 0, 105},
+ dictWord{151, 0, 15},
+ dictWord{5, 0, 459},
+ dictWord{7, 0, 1073},
+ dictWord{8, 0, 241},
+ dictWord{
+ 136,
+ 0,
+ 334,
+ },
+ dictWord{133, 11, 826},
+ dictWord{133, 10, 108},
+ dictWord{5, 10, 219},
+ dictWord{10, 11, 132},
+ dictWord{11, 11, 191},
+ dictWord{11, 11, 358},
+ dictWord{139, 11, 460},
+ dictWord{6, 0, 324},
+ dictWord{6, 0, 520},
+ dictWord{7, 0, 338},
+ dictWord{7, 0, 1729},
+ dictWord{8, 0, 228},
+ dictWord{139, 0, 750},
+ dictWord{
+ 21,
+ 0,
+ 30,
+ },
+ dictWord{22, 0, 53},
+ dictWord{4, 10, 193},
+ dictWord{5, 10, 916},
+ dictWord{7, 10, 364},
+ dictWord{10, 10, 398},
+ dictWord{10, 10, 726},
+ dictWord{
+ 11,
+ 10,
+ 317,
+ },
+ dictWord{11, 10, 626},
+ dictWord{12, 10, 142},
+ dictWord{12, 10, 288},
+ dictWord{12, 10, 678},
+ dictWord{13, 10, 313},
+ dictWord{15, 10, 113},
+ dictWord{146, 10, 114},
+ dictWord{6, 11, 110},
+ dictWord{135, 11, 1681},
+ dictWord{135, 0, 910},
+ dictWord{6, 10, 241},
+ dictWord{7, 10, 907},
+ dictWord{8, 10, 832},
+ dictWord{9, 10, 342},
+ dictWord{10, 10, 729},
+ dictWord{11, 10, 284},
+ dictWord{11, 10, 445},
+ dictWord{11, 10, 651},
+ dictWord{11, 10, 863},
+ dictWord{
+ 13,
+ 10,
+ 398,
+ },
+ dictWord{146, 10, 99},
+ dictWord{7, 0, 705},
+ dictWord{9, 0, 734},
+ dictWord{5, 11, 1000},
+ dictWord{7, 11, 733},
+ dictWord{137, 11, 583},
+ dictWord{4, 0, 73},
+ dictWord{6, 0, 612},
+ dictWord{7, 0, 927},
+ dictWord{7, 0, 1822},
+ dictWord{8, 0, 217},
+ dictWord{9, 0, 765},
+ dictWord{9, 0, 766},
+ dictWord{10, 0, 408},
+ dictWord{
+ 11,
+ 0,
+ 51,
+ },
+ dictWord{11, 0, 793},
+ dictWord{12, 0, 266},
+ dictWord{15, 0, 158},
+ dictWord{20, 0, 89},
+ dictWord{150, 0, 32},
+ dictWord{7, 0, 1330},
+ dictWord{4, 11, 297},
+ dictWord{6, 11, 529},
+ dictWord{7, 11, 152},
+ dictWord{7, 11, 713},
+ dictWord{7, 11, 1845},
+ dictWord{8, 11, 710},
+ dictWord{8, 11, 717},
+ dictWord{140, 11, 639},
+ dictWord{5, 0, 389},
+ dictWord{136, 0, 636},
+ dictWord{134, 0, 1409},
+ dictWord{4, 10, 562},
+ dictWord{9, 10, 254},
+ dictWord{139, 10, 879},
+ dictWord{134, 0, 893},
+ dictWord{132, 10, 786},
+ dictWord{4, 11, 520},
+ dictWord{135, 11, 575},
+ dictWord{136, 0, 21},
+ dictWord{140, 0, 721},
+ dictWord{136, 0, 959},
+ dictWord{
+ 7,
+ 11,
+ 1428,
+ },
+ dictWord{7, 11, 1640},
+ dictWord{9, 11, 169},
+ dictWord{9, 11, 182},
+ dictWord{9, 11, 367},
+ dictWord{9, 11, 478},
+ dictWord{9, 11, 506},
+ dictWord{
+ 9,
+ 11,
+ 551,
+ },
+ dictWord{9, 11, 648},
+ dictWord{9, 11, 651},
+ dictWord{9, 11, 697},
+ dictWord{9, 11, 705},
+ dictWord{9, 11, 725},
+ dictWord{9, 11, 787},
+ dictWord{9, 11, 794},
+ dictWord{10, 11, 198},
+ dictWord{10, 11, 214},
+ dictWord{10, 11, 267},
+ dictWord{10, 11, 275},
+ dictWord{10, 11, 456},
+ dictWord{10, 11, 551},
+ dictWord{
+ 10,
+ 11,
+ 561,
+ },
+ dictWord{10, 11, 613},
+ dictWord{10, 11, 627},
+ dictWord{10, 11, 668},
+ dictWord{10, 11, 675},
+ dictWord{10, 11, 691},
+ dictWord{10, 11, 695},
+ dictWord{10, 11, 707},
+ dictWord{10, 11, 715},
+ dictWord{11, 11, 183},
+ dictWord{11, 11, 201},
+ dictWord{11, 11, 244},
+ dictWord{11, 11, 262},
+ dictWord{
+ 11,
+ 11,
+ 352,
+ },
+ dictWord{11, 11, 439},
+ dictWord{11, 11, 493},
+ dictWord{11, 11, 572},
+ dictWord{11, 11, 591},
+ dictWord{11, 11, 608},
+ dictWord{11, 11, 611},
+ dictWord{
+ 11,
+ 11,
+ 646,
+ },
+ dictWord{11, 11, 674},
+ dictWord{11, 11, 711},
+ dictWord{11, 11, 751},
+ dictWord{11, 11, 761},
+ dictWord{11, 11, 776},
+ dictWord{11, 11, 785},
+ dictWord{11, 11, 850},
+ dictWord{11, 11, 853},
+ dictWord{11, 11, 862},
+ dictWord{11, 11, 865},
+ dictWord{11, 11, 868},
+ dictWord{11, 11, 898},
+ dictWord{
+ 11,
+ 11,
+ 902,
+ },
+ dictWord{11, 11, 903},
+ dictWord{11, 11, 910},
+ dictWord{11, 11, 932},
+ dictWord{11, 11, 942},
+ dictWord{11, 11, 957},
+ dictWord{11, 11, 967},
+ dictWord{
+ 11,
+ 11,
+ 972,
+ },
+ dictWord{12, 11, 148},
+ dictWord{12, 11, 195},
+ dictWord{12, 11, 220},
+ dictWord{12, 11, 237},
+ dictWord{12, 11, 318},
+ dictWord{12, 11, 339},
+ dictWord{12, 11, 393},
+ dictWord{12, 11, 445},
+ dictWord{12, 11, 450},
+ dictWord{12, 11, 474},
+ dictWord{12, 11, 509},
+ dictWord{12, 11, 533},
+ dictWord{
+ 12,
+ 11,
+ 591,
+ },
+ dictWord{12, 11, 594},
+ dictWord{12, 11, 597},
+ dictWord{12, 11, 621},
+ dictWord{12, 11, 633},
+ dictWord{12, 11, 642},
+ dictWord{13, 11, 59},
+ dictWord{
+ 13,
+ 11,
+ 60,
+ },
+ dictWord{13, 11, 145},
+ dictWord{13, 11, 239},
+ dictWord{13, 11, 250},
+ dictWord{13, 11, 273},
+ dictWord{13, 11, 329},
+ dictWord{13, 11, 344},
+ dictWord{13, 11, 365},
+ dictWord{13, 11, 372},
+ dictWord{13, 11, 387},
+ dictWord{13, 11, 403},
+ dictWord{13, 11, 414},
+ dictWord{13, 11, 456},
+ dictWord{
+ 13,
+ 11,
+ 478,
+ },
+ dictWord{13, 11, 483},
+ dictWord{13, 11, 489},
+ dictWord{14, 11, 55},
+ dictWord{14, 11, 57},
+ dictWord{14, 11, 81},
+ dictWord{14, 11, 90},
+ dictWord{
+ 14,
+ 11,
+ 148,
+ },
+ dictWord{14, 11, 239},
+ dictWord{14, 11, 266},
+ dictWord{14, 11, 321},
+ dictWord{14, 11, 326},
+ dictWord{14, 11, 327},
+ dictWord{14, 11, 330},
+ dictWord{
+ 14,
+ 11,
+ 347,
+ },
+ dictWord{14, 11, 355},
+ dictWord{14, 11, 401},
+ dictWord{14, 11, 411},
+ dictWord{14, 11, 414},
+ dictWord{14, 11, 416},
+ dictWord{14, 11, 420},
+ dictWord{15, 11, 61},
+ dictWord{15, 11, 74},
+ dictWord{15, 11, 87},
+ dictWord{15, 11, 88},
+ dictWord{15, 11, 94},
+ dictWord{15, 11, 96},
+ dictWord{15, 11, 116},
+ dictWord{15, 11, 149},
+ dictWord{15, 11, 154},
+ dictWord{16, 11, 50},
+ dictWord{16, 11, 63},
+ dictWord{16, 11, 73},
+ dictWord{17, 11, 2},
+ dictWord{17, 11, 66},
+ dictWord{
+ 17,
+ 11,
+ 92,
+ },
+ dictWord{17, 11, 103},
+ dictWord{17, 11, 112},
+ dictWord{18, 11, 50},
+ dictWord{18, 11, 54},
+ dictWord{18, 11, 82},
+ dictWord{18, 11, 86},
+ dictWord{
+ 18,
+ 11,
+ 90,
+ },
+ dictWord{18, 11, 111},
+ dictWord{18, 11, 115},
+ dictWord{18, 11, 156},
+ dictWord{19, 11, 40},
+ dictWord{19, 11, 79},
+ dictWord{20, 11, 78},
+ dictWord{
+ 149,
+ 11,
+ 22,
+ },
+ dictWord{137, 11, 170},
+ dictWord{134, 0, 1433},
+ dictWord{135, 11, 1307},
+ dictWord{139, 11, 411},
+ dictWord{5, 0, 189},
+ dictWord{7, 0, 442},
+ dictWord{7, 0, 443},
+ dictWord{8, 0, 281},
+ dictWord{12, 0, 174},
+ dictWord{141, 0, 261},
+ dictWord{6, 10, 216},
+ dictWord{7, 10, 901},
+ dictWord{7, 10, 1343},
+ dictWord{136, 10, 493},
+ dictWord{5, 11, 397},
+ dictWord{6, 11, 154},
+ dictWord{7, 10, 341},
+ dictWord{7, 11, 676},
+ dictWord{8, 11, 443},
+ dictWord{8, 11, 609},
+ dictWord{
+ 9,
+ 11,
+ 24,
+ },
+ dictWord{9, 11, 325},
+ dictWord{10, 11, 35},
+ dictWord{11, 10, 219},
+ dictWord{11, 11, 535},
+ dictWord{11, 11, 672},
+ dictWord{11, 11, 1018},
+ dictWord{12, 11, 637},
+ dictWord{144, 11, 30},
+ dictWord{6, 0, 2},
+ dictWord{7, 0, 191},
+ dictWord{7, 0, 446},
+ dictWord{7, 0, 1262},
+ dictWord{7, 0, 1737},
+ dictWord{8, 0, 22},
+ dictWord{8, 0, 270},
+ dictWord{8, 0, 612},
+ dictWord{9, 0, 4},
+ dictWord{9, 0, 312},
+ dictWord{9, 0, 436},
+ dictWord{9, 0, 626},
+ dictWord{10, 0, 216},
+ dictWord{10, 0, 311},
+ dictWord{10, 0, 521},
+ dictWord{10, 0, 623},
+ dictWord{11, 0, 72},
+ dictWord{11, 0, 330},
+ dictWord{11, 0, 455},
+ dictWord{12, 0, 321},
+ dictWord{12, 0, 504},
+ dictWord{12, 0, 530},
+ dictWord{12, 0, 543},
+ dictWord{13, 0, 17},
+ dictWord{13, 0, 156},
+ dictWord{13, 0, 334},
+ dictWord{14, 0, 131},
+ dictWord{17, 0, 60},
+ dictWord{
+ 148,
+ 0,
+ 64,
+ },
+ dictWord{7, 0, 354},
+ dictWord{10, 0, 410},
+ dictWord{139, 0, 815},
+ dictWord{139, 10, 130},
+ dictWord{7, 10, 1734},
+ dictWord{137, 11, 631},
+ dictWord{
+ 12,
+ 0,
+ 425,
+ },
+ dictWord{15, 0, 112},
+ dictWord{10, 10, 115},
+ dictWord{11, 10, 420},
+ dictWord{13, 10, 404},
+ dictWord{14, 10, 346},
+ dictWord{143, 10, 54},
+ dictWord{
+ 6,
+ 0,
+ 60,
+ },
+ dictWord{6, 0, 166},
+ dictWord{7, 0, 374},
+ dictWord{7, 0, 670},
+ dictWord{7, 0, 1327},
+ dictWord{8, 0, 411},
+ dictWord{8, 0, 435},
+ dictWord{9, 0, 653},
+ dictWord{
+ 9,
+ 0,
+ 740,
+ },
+ dictWord{10, 0, 385},
+ dictWord{11, 0, 222},
+ dictWord{11, 0, 324},
+ dictWord{11, 0, 829},
+ dictWord{140, 0, 611},
+ dictWord{7, 0, 1611},
+ dictWord{
+ 13,
+ 0,
+ 14,
+ },
+ dictWord{15, 0, 44},
+ dictWord{19, 0, 13},
+ dictWord{148, 0, 76},
+ dictWord{133, 11, 981},
+ dictWord{4, 11, 56},
+ dictWord{7, 11, 1791},
+ dictWord{8, 11, 607},
+ dictWord{8, 11, 651},
+ dictWord{11, 11, 465},
+ dictWord{11, 11, 835},
+ dictWord{12, 11, 337},
+ dictWord{141, 11, 480},
+ dictWord{6, 0, 1478},
+ dictWord{
+ 5,
+ 10,
+ 1011,
+ },
+ dictWord{136, 10, 701},
+ dictWord{139, 0, 596},
+ dictWord{5, 0, 206},
+ dictWord{134, 0, 398},
+ dictWord{4, 10, 54},
+ dictWord{5, 10, 666},
+ dictWord{
+ 7,
+ 10,
+ 1039,
+ },
+ dictWord{7, 10, 1130},
+ dictWord{9, 10, 195},
+ dictWord{138, 10, 302},
+ dictWord{7, 0, 50},
+ dictWord{9, 11, 158},
+ dictWord{138, 11, 411},
+ dictWord{
+ 135,
+ 11,
+ 1120,
+ },
+ dictWord{6, 0, 517},
+ dictWord{7, 0, 1159},
+ dictWord{10, 0, 621},
+ dictWord{11, 0, 192},
+ dictWord{134, 10, 1669},
+ dictWord{4, 0, 592},
+ dictWord{
+ 6,
+ 0,
+ 600,
+ },
+ dictWord{135, 0, 1653},
+ dictWord{10, 0, 223},
+ dictWord{139, 0, 645},
+ dictWord{136, 11, 139},
+ dictWord{7, 0, 64},
+ dictWord{136, 0, 245},
+ dictWord{
+ 142,
+ 0,
+ 278,
+ },
+ dictWord{6, 11, 622},
+ dictWord{135, 11, 1030},
+ dictWord{136, 0, 604},
+ dictWord{134, 0, 1502},
+ dictWord{138, 0, 265},
+ dictWord{
+ 141,
+ 11,
+ 168,
+ },
+ dictWord{7, 0, 1763},
+ dictWord{140, 0, 310},
+ dictWord{7, 10, 798},
+ dictWord{139, 11, 719},
+ dictWord{7, 11, 160},
+ dictWord{10, 11, 624},
+ dictWord{
+ 142,
+ 11,
+ 279,
+ },
+ dictWord{132, 11, 363},
+ dictWord{7, 10, 122},
+ dictWord{9, 10, 259},
+ dictWord{10, 10, 84},
+ dictWord{11, 10, 470},
+ dictWord{12, 10, 541},
+ dictWord{141, 10, 379},
+ dictWord{5, 0, 129},
+ dictWord{6, 0, 61},
+ dictWord{135, 0, 947},
+ dictWord{134, 0, 1356},
+ dictWord{135, 11, 1191},
+ dictWord{13, 0, 505},
+ dictWord{141, 0, 506},
+ dictWord{11, 0, 1000},
+ dictWord{5, 10, 82},
+ dictWord{5, 10, 131},
+ dictWord{7, 10, 1755},
+ dictWord{8, 10, 31},
+ dictWord{9, 10, 168},
+ dictWord{9, 10, 764},
+ dictWord{139, 10, 869},
+ dictWord{134, 0, 966},
+ dictWord{134, 10, 605},
+ dictWord{134, 11, 292},
+ dictWord{5, 11, 177},
+ dictWord{
+ 6,
+ 11,
+ 616,
+ },
+ dictWord{7, 11, 827},
+ dictWord{9, 11, 525},
+ dictWord{138, 11, 656},
+ dictWord{135, 11, 1486},
+ dictWord{138, 11, 31},
+ dictWord{5, 10, 278},
+ dictWord{137, 10, 68},
+ dictWord{4, 10, 163},
+ dictWord{5, 10, 201},
+ dictWord{5, 10, 307},
+ dictWord{5, 10, 310},
+ dictWord{6, 10, 335},
+ dictWord{7, 10, 284},
+ dictWord{136, 10, 165},
+ dictWord{6, 0, 839},
+ dictWord{135, 10, 1660},
+ dictWord{136, 10, 781},
+ dictWord{6, 10, 33},
+ dictWord{135, 10, 1244},
+ dictWord{
+ 133,
+ 0,
+ 637,
+ },
+ dictWord{4, 11, 161},
+ dictWord{133, 11, 631},
+ dictWord{137, 0, 590},
+ dictWord{7, 10, 1953},
+ dictWord{136, 10, 720},
+ dictWord{5, 0, 280},
+ dictWord{
+ 7,
+ 0,
+ 1226,
+ },
+ dictWord{138, 10, 203},
+ dictWord{134, 0, 1386},
+ dictWord{5, 0, 281},
+ dictWord{6, 0, 1026},
+ dictWord{6, 10, 326},
+ dictWord{7, 10, 677},
+ dictWord{
+ 137,
+ 10,
+ 425,
+ },
+ dictWord{7, 11, 1557},
+ dictWord{135, 11, 1684},
+ dictWord{135, 0, 1064},
+ dictWord{9, 11, 469},
+ dictWord{9, 11, 709},
+ dictWord{12, 11, 512},
+ dictWord{14, 11, 65},
+ dictWord{145, 11, 12},
+ dictWord{134, 0, 917},
+ dictWord{10, 11, 229},
+ dictWord{11, 11, 73},
+ dictWord{11, 11, 376},
+ dictWord{
+ 139,
+ 11,
+ 433,
+ },
+ dictWord{7, 0, 555},
+ dictWord{9, 0, 192},
+ dictWord{13, 0, 30},
+ dictWord{13, 0, 49},
+ dictWord{15, 0, 150},
+ dictWord{16, 0, 76},
+ dictWord{20, 0, 52},
+ dictWord{
+ 7,
+ 10,
+ 1316,
+ },
+ dictWord{7, 10, 1412},
+ dictWord{7, 10, 1839},
+ dictWord{9, 10, 589},
+ dictWord{11, 10, 241},
+ dictWord{11, 10, 676},
+ dictWord{11, 10, 811},
+ dictWord{11, 10, 891},
+ dictWord{12, 10, 140},
+ dictWord{12, 10, 346},
+ dictWord{12, 10, 479},
+ dictWord{13, 10, 381},
+ dictWord{14, 10, 188},
+ dictWord{
+ 146,
+ 10,
+ 30,
+ },
+ dictWord{149, 0, 15},
+ dictWord{6, 0, 1882},
+ dictWord{6, 0, 1883},
+ dictWord{6, 0, 1897},
+ dictWord{9, 0, 945},
+ dictWord{9, 0, 1014},
+ dictWord{9, 0, 1020},
+ dictWord{12, 0, 823},
+ dictWord{12, 0, 842},
+ dictWord{12, 0, 866},
+ dictWord{12, 0, 934},
+ dictWord{15, 0, 242},
+ dictWord{146, 0, 208},
+ dictWord{6, 0, 965},
+ dictWord{134, 0, 1499},
+ dictWord{7, 0, 33},
+ dictWord{7, 0, 120},
+ dictWord{8, 0, 489},
+ dictWord{9, 0, 319},
+ dictWord{10, 0, 820},
+ dictWord{11, 0, 1004},
+ dictWord{
+ 12,
+ 0,
+ 379,
+ },
+ dictWord{12, 0, 679},
+ dictWord{13, 0, 117},
+ dictWord{13, 0, 412},
+ dictWord{14, 0, 25},
+ dictWord{15, 0, 52},
+ dictWord{15, 0, 161},
+ dictWord{16, 0, 47},
+ dictWord{149, 0, 2},
+ dictWord{6, 11, 558},
+ dictWord{7, 11, 651},
+ dictWord{8, 11, 421},
+ dictWord{9, 11, 0},
+ dictWord{138, 11, 34},
+ dictWord{4, 0, 937},
+ dictWord{
+ 5,
+ 0,
+ 801,
+ },
+ dictWord{7, 0, 473},
+ dictWord{5, 10, 358},
+ dictWord{7, 10, 1184},
+ dictWord{10, 10, 662},
+ dictWord{13, 10, 212},
+ dictWord{13, 10, 304},
+ dictWord{
+ 13,
+ 10,
+ 333,
+ },
+ dictWord{145, 10, 98},
+ dictWord{132, 0, 877},
+ dictWord{6, 0, 693},
+ dictWord{134, 0, 824},
+ dictWord{132, 0, 365},
+ dictWord{7, 11, 1832},
+ dictWord{
+ 138,
+ 11,
+ 374,
+ },
+ dictWord{5, 0, 7},
+ dictWord{139, 0, 774},
+ dictWord{4, 0, 734},
+ dictWord{5, 0, 662},
+ dictWord{134, 0, 430},
+ dictWord{4, 0, 746},
+ dictWord{
+ 135,
+ 0,
+ 1090,
+ },
+ dictWord{5, 0, 360},
+ dictWord{8, 0, 237},
+ dictWord{10, 0, 231},
+ dictWord{147, 0, 124},
+ dictWord{138, 11, 348},
+ dictWord{6, 11, 6},
+ dictWord{7, 11, 81},
+ dictWord{7, 11, 771},
+ dictWord{7, 11, 1731},
+ dictWord{9, 11, 405},
+ dictWord{138, 11, 421},
+ dictWord{6, 0, 740},
+ dictWord{137, 0, 822},
+ dictWord{
+ 133,
+ 10,
+ 946,
+ },
+ dictWord{7, 0, 1485},
+ dictWord{136, 0, 929},
+ dictWord{7, 10, 411},
+ dictWord{8, 10, 631},
+ dictWord{9, 10, 323},
+ dictWord{10, 10, 355},
+ dictWord{
+ 11,
+ 10,
+ 491,
+ },
+ dictWord{12, 10, 143},
+ dictWord{12, 10, 402},
+ dictWord{13, 10, 73},
+ dictWord{14, 10, 408},
+ dictWord{15, 10, 107},
+ dictWord{146, 10, 71},
+ dictWord{
+ 135,
+ 10,
+ 590,
+ },
+ dictWord{5, 11, 881},
+ dictWord{133, 11, 885},
+ dictWord{150, 11, 25},
+ dictWord{4, 0, 852},
+ dictWord{5, 11, 142},
+ dictWord{134, 11, 546},
+ dictWord{7, 10, 1467},
+ dictWord{8, 10, 328},
+ dictWord{10, 10, 544},
+ dictWord{11, 10, 955},
+ dictWord{13, 10, 320},
+ dictWord{145, 10, 83},
+ dictWord{9, 0, 17},
+ dictWord{10, 0, 291},
+ dictWord{11, 10, 511},
+ dictWord{13, 10, 394},
+ dictWord{14, 10, 298},
+ dictWord{14, 10, 318},
+ dictWord{146, 10, 103},
+ dictWord{5, 11, 466},
+ dictWord{11, 11, 571},
+ dictWord{12, 11, 198},
+ dictWord{13, 11, 283},
+ dictWord{14, 11, 186},
+ dictWord{15, 11, 21},
+ dictWord{143, 11, 103},
+ dictWord{
+ 134,
+ 0,
+ 1001,
+ },
+ dictWord{4, 11, 185},
+ dictWord{5, 11, 257},
+ dictWord{5, 11, 839},
+ dictWord{5, 11, 936},
+ dictWord{7, 11, 171},
+ dictWord{9, 11, 399},
+ dictWord{
+ 10,
+ 11,
+ 258,
+ },
+ dictWord{10, 11, 395},
+ dictWord{10, 11, 734},
+ dictWord{11, 11, 1014},
+ dictWord{12, 11, 23},
+ dictWord{13, 11, 350},
+ dictWord{14, 11, 150},
+ dictWord{147, 11, 6},
+ dictWord{143, 0, 35},
+ dictWord{132, 0, 831},
+ dictWord{5, 10, 835},
+ dictWord{134, 10, 483},
+ dictWord{4, 0, 277},
+ dictWord{5, 0, 608},
+ dictWord{
+ 6,
+ 0,
+ 493,
+ },
+ dictWord{7, 0, 457},
+ dictWord{12, 0, 384},
+ dictWord{7, 11, 404},
+ dictWord{7, 11, 1377},
+ dictWord{7, 11, 1430},
+ dictWord{7, 11, 2017},
+ dictWord{
+ 8,
+ 11,
+ 149,
+ },
+ dictWord{8, 11, 239},
+ dictWord{8, 11, 512},
+ dictWord{8, 11, 793},
+ dictWord{8, 11, 818},
+ dictWord{9, 11, 474},
+ dictWord{9, 11, 595},
+ dictWord{
+ 10,
+ 11,
+ 122,
+ },
+ dictWord{10, 11, 565},
+ dictWord{10, 11, 649},
+ dictWord{10, 11, 783},
+ dictWord{11, 11, 239},
+ dictWord{11, 11, 295},
+ dictWord{11, 11, 447},
+ dictWord{
+ 11,
+ 11,
+ 528,
+ },
+ dictWord{11, 11, 639},
+ dictWord{11, 11, 800},
+ dictWord{11, 11, 936},
+ dictWord{12, 11, 25},
+ dictWord{12, 11, 73},
+ dictWord{12, 11, 77},
+ dictWord{12, 11, 157},
+ dictWord{12, 11, 316},
+ dictWord{12, 11, 390},
+ dictWord{12, 11, 391},
+ dictWord{12, 11, 394},
+ dictWord{12, 11, 395},
+ dictWord{
+ 12,
+ 11,
+ 478,
+ },
+ dictWord{12, 11, 503},
+ dictWord{12, 11, 592},
+ dictWord{12, 11, 680},
+ dictWord{13, 11, 50},
+ dictWord{13, 11, 53},
+ dictWord{13, 11, 132},
+ dictWord{
+ 13,
+ 11,
+ 198,
+ },
+ dictWord{13, 11, 275},
+ dictWord{13, 11, 322},
+ dictWord{13, 11, 415},
+ dictWord{14, 11, 71},
+ dictWord{14, 11, 257},
+ dictWord{14, 11, 395},
+ dictWord{15, 11, 71},
+ dictWord{15, 11, 136},
+ dictWord{17, 11, 123},
+ dictWord{18, 11, 93},
+ dictWord{147, 11, 58},
+ dictWord{134, 0, 1351},
+ dictWord{7, 0, 27},
+ dictWord{135, 0, 316},
+ dictWord{136, 11, 712},
+ dictWord{136, 0, 984},
+ dictWord{133, 0, 552},
+ dictWord{137, 0, 264},
+ dictWord{132, 0, 401},
+ dictWord{6, 0, 710},
+ dictWord{6, 0, 1111},
+ dictWord{134, 0, 1343},
+ dictWord{134, 0, 1211},
+ dictWord{9, 0, 543},
+ dictWord{10, 0, 524},
+ dictWord{11, 0, 108},
+ dictWord{11, 0, 653},
+ dictWord{12, 0, 524},
+ dictWord{13, 0, 123},
+ dictWord{14, 0, 252},
+ dictWord{16, 0, 18},
+ dictWord{19, 0, 38},
+ dictWord{20, 0, 26},
+ dictWord{20, 0, 65},
+ dictWord{
+ 21,
+ 0,
+ 3,
+ },
+ dictWord{151, 0, 11},
+ dictWord{4, 0, 205},
+ dictWord{5, 0, 623},
+ dictWord{7, 0, 104},
+ dictWord{8, 0, 519},
+ dictWord{137, 0, 716},
+ dictWord{132, 10, 677},
+ dictWord{4, 11, 377},
+ dictWord{152, 11, 13},
+ dictWord{135, 11, 1673},
+ dictWord{7, 0, 579},
+ dictWord{9, 0, 41},
+ dictWord{9, 0, 244},
+ dictWord{9, 0, 669},
+ dictWord{
+ 10,
+ 0,
+ 5,
+ },
+ dictWord{11, 0, 861},
+ dictWord{11, 0, 951},
+ dictWord{139, 0, 980},
+ dictWord{132, 0, 717},
+ dictWord{136, 0, 1011},
+ dictWord{132, 0, 805},
+ dictWord{
+ 4,
+ 11,
+ 180,
+ },
+ dictWord{135, 11, 1906},
+ dictWord{132, 10, 777},
+ dictWord{132, 10, 331},
+ dictWord{132, 0, 489},
+ dictWord{6, 0, 1024},
+ dictWord{4, 11, 491},
+ dictWord{133, 10, 747},
+ dictWord{135, 11, 1182},
+ dictWord{4, 11, 171},
+ dictWord{138, 11, 234},
+ dictWord{4, 11, 586},
+ dictWord{7, 11, 1186},
+ dictWord{
+ 138,
+ 11,
+ 631,
+ },
+ dictWord{135, 0, 892},
+ dictWord{135, 11, 336},
+ dictWord{9, 11, 931},
+ dictWord{10, 11, 334},
+ dictWord{148, 11, 71},
+ dictWord{137, 0, 473},
+ dictWord{6, 0, 864},
+ dictWord{12, 0, 659},
+ dictWord{139, 11, 926},
+ dictWord{7, 0, 819},
+ dictWord{9, 0, 26},
+ dictWord{9, 0, 392},
+ dictWord{10, 0, 152},
+ dictWord{
+ 10,
+ 0,
+ 226,
+ },
+ dictWord{11, 0, 19},
+ dictWord{12, 0, 276},
+ dictWord{12, 0, 426},
+ dictWord{12, 0, 589},
+ dictWord{13, 0, 460},
+ dictWord{15, 0, 97},
+ dictWord{19, 0, 48},
+ dictWord{148, 0, 104},
+ dictWord{135, 0, 51},
+ dictWord{133, 10, 326},
+ dictWord{4, 10, 691},
+ dictWord{146, 10, 16},
+ dictWord{9, 0, 130},
+ dictWord{11, 0, 765},
+ dictWord{10, 10, 680},
+ dictWord{10, 10, 793},
+ dictWord{141, 10, 357},
+ dictWord{133, 11, 765},
+ dictWord{8, 0, 229},
+ dictWord{6, 10, 32},
+ dictWord{7, 10, 385},
+ dictWord{7, 10, 757},
+ dictWord{7, 10, 1916},
+ dictWord{8, 10, 94},
+ dictWord{8, 10, 711},
+ dictWord{9, 10, 541},
+ dictWord{10, 10, 162},
+ dictWord{10, 10, 795},
+ dictWord{11, 10, 989},
+ dictWord{11, 10, 1010},
+ dictWord{12, 10, 14},
+ dictWord{142, 10, 308},
+ dictWord{7, 11, 474},
+ dictWord{137, 11, 578},
+ dictWord{
+ 132,
+ 0,
+ 674,
+ },
+ dictWord{132, 0, 770},
+ dictWord{5, 0, 79},
+ dictWord{7, 0, 1027},
+ dictWord{7, 0, 1477},
+ dictWord{139, 0, 52},
+ dictWord{133, 11, 424},
+ dictWord{
+ 134,
+ 0,
+ 1666,
+ },
+ dictWord{6, 0, 409},
+ dictWord{6, 10, 349},
+ dictWord{6, 10, 1682},
+ dictWord{7, 10, 1252},
+ dictWord{8, 10, 112},
+ dictWord{8, 11, 714},
+ dictWord{
+ 9,
+ 10,
+ 435,
+ },
+ dictWord{9, 10, 668},
+ dictWord{10, 10, 290},
+ dictWord{10, 10, 319},
+ dictWord{10, 10, 815},
+ dictWord{11, 10, 180},
+ dictWord{11, 10, 837},
+ dictWord{
+ 12,
+ 10,
+ 240,
+ },
+ dictWord{13, 10, 152},
+ dictWord{13, 10, 219},
+ dictWord{142, 10, 158},
+ dictWord{5, 0, 789},
+ dictWord{134, 0, 195},
+ dictWord{4, 0, 251},
+ dictWord{
+ 4,
+ 0,
+ 688,
+ },
+ dictWord{7, 0, 513},
+ dictWord{135, 0, 1284},
+ dictWord{132, 10, 581},
+ dictWord{9, 11, 420},
+ dictWord{10, 11, 269},
+ dictWord{10, 11, 285},
+ dictWord{10, 11, 576},
+ dictWord{11, 11, 397},
+ dictWord{13, 11, 175},
+ dictWord{145, 11, 90},
+ dictWord{6, 10, 126},
+ dictWord{7, 10, 573},
+ dictWord{8, 10, 397},
+ dictWord{142, 10, 44},
+ dictWord{132, 11, 429},
+ dictWord{133, 0, 889},
+ dictWord{4, 0, 160},
+ dictWord{5, 0, 330},
+ dictWord{7, 0, 1434},
+ dictWord{136, 0, 174},
+ dictWord{7, 11, 18},
+ dictWord{7, 11, 699},
+ dictWord{7, 11, 1966},
+ dictWord{8, 11, 752},
+ dictWord{9, 11, 273},
+ dictWord{9, 11, 412},
+ dictWord{9, 11, 703},
+ dictWord{
+ 10,
+ 11,
+ 71,
+ },
+ dictWord{10, 11, 427},
+ dictWord{10, 11, 508},
+ dictWord{146, 11, 97},
+ dictWord{6, 0, 872},
+ dictWord{134, 0, 899},
+ dictWord{133, 10, 926},
+ dictWord{134, 0, 1126},
+ dictWord{134, 0, 918},
+ dictWord{4, 11, 53},
+ dictWord{5, 11, 186},
+ dictWord{135, 11, 752},
+ dictWord{7, 0, 268},
+ dictWord{136, 0, 569},
+ dictWord{134, 0, 1224},
+ dictWord{6, 0, 1361},
+ dictWord{7, 10, 1232},
+ dictWord{137, 10, 531},
+ dictWord{8, 11, 575},
+ dictWord{10, 11, 289},
+ dictWord{
+ 139,
+ 11,
+ 319,
+ },
+ dictWord{133, 10, 670},
+ dictWord{132, 11, 675},
+ dictWord{133, 0, 374},
+ dictWord{135, 10, 1957},
+ dictWord{133, 0, 731},
+ dictWord{11, 0, 190},
+ dictWord{15, 0, 49},
+ dictWord{11, 11, 190},
+ dictWord{143, 11, 49},
+ dictWord{4, 0, 626},
+ dictWord{5, 0, 506},
+ dictWord{5, 0, 642},
+ dictWord{6, 0, 425},
+ dictWord{
+ 10,
+ 0,
+ 202,
+ },
+ dictWord{139, 0, 141},
+ dictWord{137, 0, 444},
+ dictWord{7, 10, 242},
+ dictWord{135, 10, 1942},
+ dictWord{6, 11, 209},
+ dictWord{8, 11, 468},
+ dictWord{
+ 9,
+ 11,
+ 210,
+ },
+ dictWord{11, 11, 36},
+ dictWord{12, 11, 28},
+ dictWord{12, 11, 630},
+ dictWord{13, 11, 21},
+ dictWord{13, 11, 349},
+ dictWord{14, 11, 7},
+ dictWord{
+ 145,
+ 11,
+ 13,
+ },
+ dictWord{4, 11, 342},
+ dictWord{135, 11, 1179},
+ dictWord{5, 10, 834},
+ dictWord{7, 10, 1202},
+ dictWord{8, 10, 14},
+ dictWord{9, 10, 481},
+ dictWord{
+ 137,
+ 10,
+ 880,
+ },
+ dictWord{4, 11, 928},
+ dictWord{133, 11, 910},
+ dictWord{4, 11, 318},
+ dictWord{4, 11, 496},
+ dictWord{7, 11, 856},
+ dictWord{139, 11, 654},
+ dictWord{136, 0, 835},
+ dictWord{7, 0, 1526},
+ dictWord{138, 10, 465},
+ dictWord{151, 0, 17},
+ dictWord{135, 0, 477},
+ dictWord{4, 10, 357},
+ dictWord{6, 10, 172},
+ dictWord{7, 10, 143},
+ dictWord{137, 10, 413},
+ dictWord{6, 0, 1374},
+ dictWord{138, 0, 994},
+ dictWord{18, 0, 76},
+ dictWord{132, 10, 590},
+ dictWord{7, 0, 287},
+ dictWord{8, 0, 355},
+ dictWord{9, 0, 293},
+ dictWord{137, 0, 743},
+ dictWord{134, 0, 1389},
+ dictWord{7, 11, 915},
+ dictWord{8, 11, 247},
+ dictWord{147, 11, 0},
+ dictWord{
+ 4,
+ 11,
+ 202,
+ },
+ dictWord{5, 11, 382},
+ dictWord{6, 11, 454},
+ dictWord{7, 11, 936},
+ dictWord{7, 11, 1803},
+ dictWord{8, 11, 758},
+ dictWord{9, 11, 375},
+ dictWord{
+ 9,
+ 11,
+ 895,
+ },
+ dictWord{10, 11, 743},
+ dictWord{10, 11, 792},
+ dictWord{11, 11, 978},
+ dictWord{11, 11, 1012},
+ dictWord{142, 11, 109},
+ dictWord{5, 0, 384},
+ dictWord{8, 0, 455},
+ dictWord{140, 0, 48},
+ dictWord{132, 11, 390},
+ dictWord{5, 10, 169},
+ dictWord{7, 10, 333},
+ dictWord{136, 10, 45},
+ dictWord{5, 0, 264},
+ dictWord{134, 0, 184},
+ dictWord{138, 11, 791},
+ dictWord{133, 11, 717},
+ dictWord{132, 10, 198},
+ dictWord{6, 11, 445},
+ dictWord{7, 11, 332},
+ dictWord{
+ 137,
+ 11,
+ 909,
+ },
+ dictWord{136, 0, 1001},
+ dictWord{4, 10, 24},
+ dictWord{5, 10, 140},
+ dictWord{5, 10, 185},
+ dictWord{7, 10, 1500},
+ dictWord{11, 10, 565},
+ dictWord{
+ 139,
+ 10,
+ 838,
+ },
+ dictWord{134, 11, 578},
+ dictWord{5, 0, 633},
+ dictWord{6, 0, 28},
+ dictWord{135, 0, 1323},
+ dictWord{132, 0, 851},
+ dictWord{136, 11, 267},
+ dictWord{
+ 7,
+ 0,
+ 359,
+ },
+ dictWord{8, 0, 243},
+ dictWord{140, 0, 175},
+ dictWord{4, 10, 334},
+ dictWord{133, 10, 593},
+ dictWord{141, 11, 87},
+ dictWord{136, 11, 766},
+ dictWord{10, 0, 287},
+ dictWord{12, 0, 138},
+ dictWord{10, 11, 287},
+ dictWord{140, 11, 138},
+ dictWord{4, 0, 105},
+ dictWord{132, 0, 740},
+ dictWord{140, 10, 116},
+ dictWord{134, 0, 857},
+ dictWord{135, 11, 1841},
+ dictWord{6, 0, 1402},
+ dictWord{137, 0, 819},
+ dictWord{132, 11, 584},
+ dictWord{132, 10, 709},
+ dictWord{
+ 133,
+ 10,
+ 897,
+ },
+ dictWord{5, 0, 224},
+ dictWord{13, 0, 174},
+ dictWord{146, 0, 52},
+ dictWord{135, 10, 1840},
+ dictWord{4, 10, 608},
+ dictWord{133, 10, 497},
+ dictWord{139, 11, 60},
+ dictWord{4, 0, 758},
+ dictWord{135, 0, 1649},
+ dictWord{4, 11, 226},
+ dictWord{4, 11, 326},
+ dictWord{135, 11, 1770},
+ dictWord{5, 11, 426},
+ dictWord{8, 11, 30},
+ dictWord{9, 11, 2},
+ dictWord{11, 11, 549},
+ dictWord{147, 11, 122},
+ dictWord{135, 10, 2039},
+ dictWord{6, 10, 540},
+ dictWord{
+ 136,
+ 10,
+ 136,
+ },
+ dictWord{4, 0, 573},
+ dictWord{8, 0, 655},
+ dictWord{4, 10, 897},
+ dictWord{133, 10, 786},
+ dictWord{7, 0, 351},
+ dictWord{139, 0, 128},
+ dictWord{
+ 133,
+ 10,
+ 999,
+ },
+ dictWord{4, 10, 299},
+ dictWord{135, 10, 1004},
+ dictWord{133, 0, 918},
+ dictWord{132, 11, 345},
+ dictWord{4, 11, 385},
+ dictWord{7, 11, 265},
+ dictWord{135, 11, 587},
+ dictWord{133, 10, 456},
+ dictWord{136, 10, 180},
+ dictWord{6, 0, 687},
+ dictWord{134, 0, 1537},
+ dictWord{4, 11, 347},
+ dictWord{
+ 5,
+ 11,
+ 423,
+ },
+ dictWord{5, 11, 996},
+ dictWord{135, 11, 1329},
+ dictWord{132, 10, 755},
+ dictWord{7, 11, 1259},
+ dictWord{9, 11, 125},
+ dictWord{11, 11, 65},
+ dictWord{140, 11, 285},
+ dictWord{5, 11, 136},
+ dictWord{6, 11, 136},
+ dictWord{136, 11, 644},
+ dictWord{134, 0, 1525},
+ dictWord{4, 0, 1009},
+ dictWord{
+ 135,
+ 0,
+ 1139,
+ },
+ dictWord{139, 10, 338},
+ dictWord{132, 0, 340},
+ dictWord{135, 10, 1464},
+ dictWord{8, 0, 847},
+ dictWord{10, 0, 861},
+ dictWord{10, 0, 876},
+ dictWord{
+ 10,
+ 0,
+ 889,
+ },
+ dictWord{10, 0, 922},
+ dictWord{10, 0, 929},
+ dictWord{10, 0, 933},
+ dictWord{12, 0, 784},
+ dictWord{140, 0, 791},
+ dictWord{139, 0, 176},
+ dictWord{
+ 9,
+ 11,
+ 134,
+ },
+ dictWord{10, 11, 2},
+ dictWord{10, 11, 27},
+ dictWord{10, 11, 333},
+ dictWord{11, 11, 722},
+ dictWord{143, 11, 1},
+ dictWord{4, 11, 433},
+ dictWord{
+ 133,
+ 11,
+ 719,
+ },
+ dictWord{5, 0, 985},
+ dictWord{7, 0, 509},
+ dictWord{7, 0, 529},
+ dictWord{145, 0, 96},
+ dictWord{132, 0, 615},
+ dictWord{4, 10, 890},
+ dictWord{
+ 5,
+ 10,
+ 805,
+ },
+ dictWord{5, 10, 819},
+ dictWord{5, 10, 961},
+ dictWord{6, 10, 396},
+ dictWord{6, 10, 1631},
+ dictWord{6, 10, 1678},
+ dictWord{7, 10, 1967},
+ dictWord{
+ 7,
+ 10,
+ 2041,
+ },
+ dictWord{9, 10, 630},
+ dictWord{11, 10, 8},
+ dictWord{11, 10, 1019},
+ dictWord{12, 10, 176},
+ dictWord{13, 10, 225},
+ dictWord{14, 10, 292},
+ dictWord{
+ 149,
+ 10,
+ 24,
+ },
+ dictWord{135, 0, 1919},
+ dictWord{134, 0, 1131},
+ dictWord{144, 11, 21},
+ dictWord{144, 11, 51},
+ dictWord{135, 10, 1815},
+ dictWord{4, 0, 247},
+ dictWord{7, 10, 1505},
+ dictWord{10, 10, 190},
+ dictWord{10, 10, 634},
+ dictWord{11, 10, 792},
+ dictWord{12, 10, 358},
+ dictWord{140, 10, 447},
+ dictWord{
+ 5,
+ 10,
+ 0,
+ },
+ dictWord{6, 10, 536},
+ dictWord{7, 10, 604},
+ dictWord{13, 10, 445},
+ dictWord{145, 10, 126},
+ dictWord{4, 0, 184},
+ dictWord{5, 0, 390},
+ dictWord{6, 0, 337},
+ dictWord{7, 0, 23},
+ dictWord{7, 0, 494},
+ dictWord{7, 0, 618},
+ dictWord{7, 0, 1456},
+ dictWord{8, 0, 27},
+ dictWord{8, 0, 599},
+ dictWord{10, 0, 153},
+ dictWord{
+ 139,
+ 0,
+ 710,
+ },
+ dictWord{6, 10, 232},
+ dictWord{6, 10, 412},
+ dictWord{7, 10, 1074},
+ dictWord{8, 10, 9},
+ dictWord{8, 10, 157},
+ dictWord{8, 10, 786},
+ dictWord{9, 10, 196},
+ dictWord{9, 10, 352},
+ dictWord{9, 10, 457},
+ dictWord{10, 10, 337},
+ dictWord{11, 10, 232},
+ dictWord{11, 10, 877},
+ dictWord{12, 10, 480},
+ dictWord{
+ 140,
+ 10,
+ 546,
+ },
+ dictWord{13, 0, 38},
+ dictWord{135, 10, 958},
+ dictWord{4, 10, 382},
+ dictWord{136, 10, 579},
+ dictWord{4, 10, 212},
+ dictWord{135, 10, 1206},
+ dictWord{
+ 4,
+ 11,
+ 555,
+ },
+ dictWord{8, 11, 536},
+ dictWord{138, 11, 288},
+ dictWord{11, 11, 139},
+ dictWord{139, 11, 171},
+ dictWord{9, 11, 370},
+ dictWord{138, 11, 90},
+ dictWord{132, 0, 1015},
+ dictWord{134, 0, 1088},
+ dictWord{5, 10, 655},
+ dictWord{135, 11, 977},
+ dictWord{134, 0, 1585},
+ dictWord{17, 10, 67},
+ dictWord{
+ 147,
+ 10,
+ 74,
+ },
+ dictWord{10, 0, 227},
+ dictWord{11, 0, 497},
+ dictWord{11, 0, 709},
+ dictWord{140, 0, 415},
+ dictWord{6, 0, 360},
+ dictWord{7, 0, 1664},
+ dictWord{
+ 136,
+ 0,
+ 478,
+ },
+ dictWord{7, 0, 95},
+ dictWord{6, 10, 231},
+ dictWord{136, 10, 423},
+ dictWord{140, 11, 65},
+ dictWord{4, 11, 257},
+ dictWord{135, 11, 2031},
+ dictWord{
+ 135,
+ 11,
+ 1768,
+ },
+ dictWord{133, 10, 300},
+ dictWord{139, 11, 211},
+ dictWord{136, 0, 699},
+ dictWord{6, 10, 237},
+ dictWord{7, 10, 611},
+ dictWord{8, 10, 100},
+ dictWord{9, 10, 416},
+ dictWord{11, 10, 335},
+ dictWord{12, 10, 173},
+ dictWord{146, 10, 101},
+ dictWord{14, 0, 26},
+ dictWord{146, 0, 150},
+ dictWord{6, 0, 581},
+ dictWord{135, 0, 1119},
+ dictWord{135, 10, 1208},
+ dictWord{132, 0, 739},
+ dictWord{6, 11, 83},
+ dictWord{6, 11, 1733},
+ dictWord{135, 11, 1389},
+ dictWord{
+ 137,
+ 0,
+ 869,
+ },
+ dictWord{4, 0, 67},
+ dictWord{5, 0, 422},
+ dictWord{7, 0, 1037},
+ dictWord{7, 0, 1289},
+ dictWord{7, 0, 1555},
+ dictWord{9, 0, 741},
+ dictWord{145, 0, 108},
+ dictWord{133, 10, 199},
+ dictWord{12, 10, 427},
+ dictWord{146, 10, 38},
+ dictWord{136, 0, 464},
+ dictWord{142, 0, 42},
+ dictWord{10, 0, 96},
+ dictWord{8, 11, 501},
+ dictWord{137, 11, 696},
+ dictWord{134, 11, 592},
+ dictWord{4, 0, 512},
+ dictWord{4, 0, 966},
+ dictWord{5, 0, 342},
+ dictWord{6, 0, 1855},
+ dictWord{8, 0, 869},
+ dictWord{8, 0, 875},
+ dictWord{8, 0, 901},
+ dictWord{144, 0, 26},
+ dictWord{8, 0, 203},
+ dictWord{11, 0, 823},
+ dictWord{11, 0, 846},
+ dictWord{12, 0, 482},
+ dictWord{
+ 13,
+ 0,
+ 277,
+ },
+ dictWord{13, 0, 302},
+ dictWord{13, 0, 464},
+ dictWord{14, 0, 205},
+ dictWord{142, 0, 221},
+ dictWord{4, 0, 449},
+ dictWord{133, 0, 718},
+ dictWord{
+ 7,
+ 11,
+ 1718,
+ },
+ dictWord{9, 11, 95},
+ dictWord{9, 11, 274},
+ dictWord{10, 11, 279},
+ dictWord{10, 11, 317},
+ dictWord{10, 11, 420},
+ dictWord{11, 11, 303},
+ dictWord{
+ 11,
+ 11,
+ 808,
+ },
+ dictWord{12, 11, 134},
+ dictWord{12, 11, 367},
+ dictWord{13, 11, 149},
+ dictWord{13, 11, 347},
+ dictWord{14, 11, 349},
+ dictWord{14, 11, 406},
+ dictWord{18, 11, 22},
+ dictWord{18, 11, 89},
+ dictWord{18, 11, 122},
+ dictWord{147, 11, 47},
+ dictWord{133, 11, 26},
+ dictWord{4, 0, 355},
+ dictWord{6, 0, 311},
+ dictWord{
+ 9,
+ 0,
+ 256,
+ },
+ dictWord{138, 0, 404},
+ dictWord{132, 11, 550},
+ dictWord{10, 0, 758},
+ dictWord{6, 10, 312},
+ dictWord{6, 10, 1715},
+ dictWord{10, 10, 584},
+ dictWord{11, 10, 546},
+ dictWord{11, 10, 692},
+ dictWord{12, 10, 259},
+ dictWord{12, 10, 295},
+ dictWord{13, 10, 46},
+ dictWord{141, 10, 154},
+ dictWord{
+ 136,
+ 11,
+ 822,
+ },
+ dictWord{5, 0, 827},
+ dictWord{4, 11, 902},
+ dictWord{5, 11, 809},
+ dictWord{6, 11, 122},
+ dictWord{135, 11, 896},
+ dictWord{5, 0, 64},
+ dictWord{140, 0, 581},
+ dictWord{4, 0, 442},
+ dictWord{6, 0, 739},
+ dictWord{7, 0, 1047},
+ dictWord{7, 0, 1352},
+ dictWord{7, 0, 1643},
+ dictWord{7, 11, 1911},
+ dictWord{9, 11, 449},
+ dictWord{10, 11, 192},
+ dictWord{138, 11, 740},
+ dictWord{135, 11, 262},
+ dictWord{132, 10, 588},
+ dictWord{133, 11, 620},
+ dictWord{5, 0, 977},
+ dictWord{
+ 6,
+ 0,
+ 288,
+ },
+ dictWord{7, 0, 528},
+ dictWord{4, 11, 34},
+ dictWord{5, 11, 574},
+ dictWord{7, 11, 279},
+ dictWord{7, 11, 1624},
+ dictWord{136, 11, 601},
+ dictWord{
+ 6,
+ 0,
+ 1375,
+ },
+ dictWord{4, 10, 231},
+ dictWord{5, 10, 61},
+ dictWord{6, 10, 104},
+ dictWord{7, 10, 729},
+ dictWord{7, 10, 964},
+ dictWord{7, 10, 1658},
+ dictWord{
+ 140,
+ 10,
+ 414,
+ },
+ dictWord{6, 10, 263},
+ dictWord{138, 10, 757},
+ dictWord{132, 10, 320},
+ dictWord{4, 0, 254},
+ dictWord{7, 0, 1309},
+ dictWord{5, 11, 332},
+ dictWord{
+ 135,
+ 11,
+ 1309,
+ },
+ dictWord{6, 11, 261},
+ dictWord{8, 11, 182},
+ dictWord{139, 11, 943},
+ dictWord{132, 10, 225},
+ dictWord{6, 0, 12},
+ dictWord{135, 0, 1219},
+ dictWord{4, 0, 275},
+ dictWord{12, 0, 376},
+ dictWord{6, 11, 1721},
+ dictWord{141, 11, 490},
+ dictWord{4, 11, 933},
+ dictWord{133, 11, 880},
+ dictWord{6, 0, 951},
+ dictWord{6, 0, 1109},
+ dictWord{6, 0, 1181},
+ dictWord{7, 0, 154},
+ dictWord{4, 10, 405},
+ dictWord{7, 10, 817},
+ dictWord{14, 10, 58},
+ dictWord{17, 10, 37},
+ dictWord{
+ 146,
+ 10,
+ 124,
+ },
+ dictWord{6, 0, 1520},
+ dictWord{133, 10, 974},
+ dictWord{134, 0, 1753},
+ dictWord{6, 0, 369},
+ dictWord{6, 0, 502},
+ dictWord{7, 0, 1036},
+ dictWord{
+ 8,
+ 0,
+ 348,
+ },
+ dictWord{9, 0, 452},
+ dictWord{10, 0, 26},
+ dictWord{11, 0, 224},
+ dictWord{11, 0, 387},
+ dictWord{11, 0, 772},
+ dictWord{12, 0, 95},
+ dictWord{12, 0, 629},
+ dictWord{13, 0, 195},
+ dictWord{13, 0, 207},
+ dictWord{13, 0, 241},
+ dictWord{14, 0, 260},
+ dictWord{14, 0, 270},
+ dictWord{143, 0, 140},
+ dictWord{132, 0, 269},
+ dictWord{5, 0, 480},
+ dictWord{7, 0, 532},
+ dictWord{7, 0, 1197},
+ dictWord{7, 0, 1358},
+ dictWord{8, 0, 291},
+ dictWord{11, 0, 349},
+ dictWord{142, 0, 396},
+ dictWord{
+ 5,
+ 10,
+ 235,
+ },
+ dictWord{7, 10, 1239},
+ dictWord{11, 10, 131},
+ dictWord{140, 10, 370},
+ dictWord{7, 10, 956},
+ dictWord{7, 10, 1157},
+ dictWord{7, 10, 1506},
+ dictWord{
+ 7,
+ 10,
+ 1606,
+ },
+ dictWord{7, 10, 1615},
+ dictWord{7, 10, 1619},
+ dictWord{7, 10, 1736},
+ dictWord{7, 10, 1775},
+ dictWord{8, 10, 590},
+ dictWord{9, 10, 324},
+ dictWord{9, 10, 736},
+ dictWord{9, 10, 774},
+ dictWord{9, 10, 776},
+ dictWord{9, 10, 784},
+ dictWord{10, 10, 567},
+ dictWord{10, 10, 708},
+ dictWord{11, 10, 518},
+ dictWord{11, 10, 613},
+ dictWord{11, 10, 695},
+ dictWord{11, 10, 716},
+ dictWord{11, 10, 739},
+ dictWord{11, 10, 770},
+ dictWord{11, 10, 771},
+ dictWord{
+ 11,
+ 10,
+ 848,
+ },
+ dictWord{11, 10, 857},
+ dictWord{11, 10, 931},
+ dictWord{11, 10, 947},
+ dictWord{12, 10, 326},
+ dictWord{12, 10, 387},
+ dictWord{12, 10, 484},
+ dictWord{
+ 12,
+ 10,
+ 528,
+ },
+ dictWord{12, 10, 552},
+ dictWord{12, 10, 613},
+ dictWord{13, 10, 189},
+ dictWord{13, 10, 256},
+ dictWord{13, 10, 340},
+ dictWord{13, 10, 432},
+ dictWord{13, 10, 436},
+ dictWord{13, 10, 440},
+ dictWord{13, 10, 454},
+ dictWord{14, 10, 174},
+ dictWord{14, 10, 220},
+ dictWord{14, 10, 284},
+ dictWord{
+ 14,
+ 10,
+ 390,
+ },
+ dictWord{145, 10, 121},
+ dictWord{8, 11, 598},
+ dictWord{9, 11, 664},
+ dictWord{138, 11, 441},
+ dictWord{9, 10, 137},
+ dictWord{138, 10, 221},
+ dictWord{133, 11, 812},
+ dictWord{148, 0, 15},
+ dictWord{134, 0, 1341},
+ dictWord{6, 0, 1017},
+ dictWord{4, 11, 137},
+ dictWord{7, 11, 1178},
+ dictWord{
+ 135,
+ 11,
+ 1520,
+ },
+ dictWord{7, 10, 390},
+ dictWord{138, 10, 140},
+ dictWord{7, 11, 1260},
+ dictWord{135, 11, 1790},
+ dictWord{137, 11, 191},
+ dictWord{
+ 135,
+ 10,
+ 1144,
+ },
+ dictWord{6, 0, 1810},
+ dictWord{7, 0, 657},
+ dictWord{8, 0, 886},
+ dictWord{10, 0, 857},
+ dictWord{14, 0, 440},
+ dictWord{144, 0, 96},
+ dictWord{8, 0, 533},
+ dictWord{6, 11, 1661},
+ dictWord{7, 11, 1975},
+ dictWord{7, 11, 2009},
+ dictWord{135, 11, 2011},
+ dictWord{6, 0, 1453},
+ dictWord{134, 10, 464},
+ dictWord{
+ 132,
+ 11,
+ 715,
+ },
+ dictWord{5, 10, 407},
+ dictWord{11, 10, 204},
+ dictWord{11, 10, 243},
+ dictWord{11, 10, 489},
+ dictWord{12, 10, 293},
+ dictWord{19, 10, 37},
+ dictWord{20, 10, 73},
+ dictWord{150, 10, 38},
+ dictWord{133, 11, 703},
+ dictWord{4, 0, 211},
+ dictWord{7, 0, 1483},
+ dictWord{5, 10, 325},
+ dictWord{8, 10, 5},
+ dictWord{
+ 8,
+ 10,
+ 227,
+ },
+ dictWord{9, 10, 105},
+ dictWord{10, 10, 585},
+ dictWord{140, 10, 614},
+ dictWord{4, 0, 332},
+ dictWord{5, 0, 335},
+ dictWord{6, 0, 238},
+ dictWord{
+ 7,
+ 0,
+ 269,
+ },
+ dictWord{7, 0, 811},
+ dictWord{7, 0, 1797},
+ dictWord{8, 0, 836},
+ dictWord{9, 0, 507},
+ dictWord{141, 0, 242},
+ dictWord{5, 11, 89},
+ dictWord{7, 11, 1915},
+ dictWord{9, 11, 185},
+ dictWord{9, 11, 235},
+ dictWord{9, 11, 496},
+ dictWord{10, 11, 64},
+ dictWord{10, 11, 270},
+ dictWord{10, 11, 403},
+ dictWord{10, 11, 469},
+ dictWord{10, 11, 529},
+ dictWord{10, 11, 590},
+ dictWord{11, 11, 140},
+ dictWord{11, 11, 860},
+ dictWord{13, 11, 1},
+ dictWord{13, 11, 422},
+ dictWord{14, 11, 341},
+ dictWord{14, 11, 364},
+ dictWord{17, 11, 93},
+ dictWord{18, 11, 113},
+ dictWord{19, 11, 97},
+ dictWord{147, 11, 113},
+ dictWord{133, 11, 695},
+ dictWord{
+ 16,
+ 0,
+ 19,
+ },
+ dictWord{5, 11, 6},
+ dictWord{6, 11, 183},
+ dictWord{6, 10, 621},
+ dictWord{7, 11, 680},
+ dictWord{7, 11, 978},
+ dictWord{7, 11, 1013},
+ dictWord{7, 11, 1055},
+ dictWord{12, 11, 230},
+ dictWord{13, 11, 172},
+ dictWord{13, 10, 504},
+ dictWord{146, 11, 29},
+ dictWord{136, 0, 156},
+ dictWord{133, 0, 1009},
+ dictWord{
+ 6,
+ 11,
+ 29,
+ },
+ dictWord{139, 11, 63},
+ dictWord{134, 0, 820},
+ dictWord{134, 10, 218},
+ dictWord{7, 10, 454},
+ dictWord{7, 10, 782},
+ dictWord{8, 10, 768},
+ dictWord{
+ 140,
+ 10,
+ 686,
+ },
+ dictWord{5, 0, 228},
+ dictWord{6, 0, 203},
+ dictWord{7, 0, 156},
+ dictWord{8, 0, 347},
+ dictWord{9, 0, 265},
+ dictWord{18, 0, 39},
+ dictWord{20, 0, 54},
+ dictWord{21, 0, 31},
+ dictWord{22, 0, 3},
+ dictWord{23, 0, 0},
+ dictWord{15, 11, 8},
+ dictWord{18, 11, 39},
+ dictWord{20, 11, 54},
+ dictWord{21, 11, 31},
+ dictWord{22, 11, 3},
+ dictWord{151, 11, 0},
+ dictWord{7, 0, 1131},
+ dictWord{135, 0, 1468},
+ dictWord{144, 10, 0},
+ dictWord{134, 0, 1276},
+ dictWord{10, 10, 676},
+ dictWord{
+ 140,
+ 10,
+ 462,
+ },
+ dictWord{132, 11, 311},
+ dictWord{134, 11, 1740},
+ dictWord{7, 11, 170},
+ dictWord{8, 11, 90},
+ dictWord{8, 11, 177},
+ dictWord{8, 11, 415},
+ dictWord{
+ 11,
+ 11,
+ 714,
+ },
+ dictWord{142, 11, 281},
+ dictWord{134, 10, 164},
+ dictWord{6, 0, 1792},
+ dictWord{138, 0, 849},
+ dictWord{150, 10, 50},
+ dictWord{5, 0, 291},
+ dictWord{5, 0, 318},
+ dictWord{7, 0, 765},
+ dictWord{9, 0, 389},
+ dictWord{12, 0, 548},
+ dictWord{8, 11, 522},
+ dictWord{142, 11, 328},
+ dictWord{11, 11, 91},
+ dictWord{
+ 13,
+ 11,
+ 129,
+ },
+ dictWord{15, 11, 101},
+ dictWord{145, 11, 125},
+ dictWord{4, 11, 494},
+ dictWord{6, 11, 74},
+ dictWord{7, 11, 44},
+ dictWord{7, 11, 407},
+ dictWord{
+ 8,
+ 11,
+ 551,
+ },
+ dictWord{12, 11, 17},
+ dictWord{15, 11, 5},
+ dictWord{148, 11, 11},
+ dictWord{4, 11, 276},
+ dictWord{133, 11, 296},
+ dictWord{6, 10, 343},
+ dictWord{
+ 7,
+ 10,
+ 195,
+ },
+ dictWord{7, 11, 1777},
+ dictWord{9, 10, 226},
+ dictWord{10, 10, 197},
+ dictWord{10, 10, 575},
+ dictWord{11, 10, 502},
+ dictWord{139, 10, 899},
+ dictWord{
+ 10,
+ 0,
+ 525,
+ },
+ dictWord{139, 0, 82},
+ dictWord{14, 0, 453},
+ dictWord{4, 11, 7},
+ dictWord{5, 11, 90},
+ dictWord{5, 11, 158},
+ dictWord{6, 11, 542},
+ dictWord{7, 11, 221},
+ dictWord{7, 11, 1574},
+ dictWord{9, 11, 490},
+ dictWord{10, 11, 540},
+ dictWord{11, 11, 443},
+ dictWord{139, 11, 757},
+ dictWord{135, 0, 666},
+ dictWord{
+ 22,
+ 10,
+ 29,
+ },
+ dictWord{150, 11, 29},
+ dictWord{4, 0, 422},
+ dictWord{147, 10, 8},
+ dictWord{5, 0, 355},
+ dictWord{145, 0, 0},
+ dictWord{6, 0, 1873},
+ dictWord{9, 0, 918},
+ dictWord{7, 11, 588},
+ dictWord{9, 11, 175},
+ dictWord{138, 11, 530},
+ dictWord{143, 11, 31},
+ dictWord{11, 0, 165},
+ dictWord{7, 10, 1125},
+ dictWord{9, 10, 143},
+ dictWord{14, 10, 405},
+ dictWord{150, 10, 21},
+ dictWord{9, 0, 260},
+ dictWord{137, 0, 905},
+ dictWord{5, 11, 872},
+ dictWord{6, 11, 57},
+ dictWord{6, 11, 479},
+ dictWord{
+ 6,
+ 11,
+ 562,
+ },
+ dictWord{7, 11, 471},
+ dictWord{7, 11, 1060},
+ dictWord{9, 11, 447},
+ dictWord{9, 11, 454},
+ dictWord{141, 11, 6},
+ dictWord{138, 11, 704},
+ dictWord{133, 0, 865},
+ dictWord{5, 0, 914},
+ dictWord{134, 0, 1625},
+ dictWord{133, 0, 234},
+ dictWord{7, 0, 1383},
+ dictWord{5, 11, 31},
+ dictWord{6, 11, 614},
+ dictWord{145, 11, 61},
+ dictWord{7, 11, 1200},
+ dictWord{138, 11, 460},
+ dictWord{6, 11, 424},
+ dictWord{135, 11, 1866},
+ dictWord{136, 0, 306},
+ dictWord{
+ 5,
+ 10,
+ 959,
+ },
+ dictWord{12, 11, 30},
+ dictWord{13, 11, 148},
+ dictWord{14, 11, 87},
+ dictWord{14, 11, 182},
+ dictWord{16, 11, 42},
+ dictWord{18, 11, 92},
+ dictWord{
+ 148,
+ 11,
+ 70,
+ },
+ dictWord{6, 0, 1919},
+ dictWord{6, 0, 1921},
+ dictWord{9, 0, 923},
+ dictWord{9, 0, 930},
+ dictWord{9, 0, 941},
+ dictWord{9, 0, 949},
+ dictWord{9, 0, 987},
+ dictWord{
+ 9,
+ 0,
+ 988,
+ },
+ dictWord{9, 0, 992},
+ dictWord{12, 0, 802},
+ dictWord{12, 0, 815},
+ dictWord{12, 0, 856},
+ dictWord{12, 0, 885},
+ dictWord{12, 0, 893},
+ dictWord{
+ 12,
+ 0,
+ 898,
+ },
+ dictWord{12, 0, 919},
+ dictWord{12, 0, 920},
+ dictWord{12, 0, 941},
+ dictWord{12, 0, 947},
+ dictWord{15, 0, 183},
+ dictWord{15, 0, 185},
+ dictWord{15, 0, 189},
+ dictWord{15, 0, 197},
+ dictWord{15, 0, 202},
+ dictWord{15, 0, 233},
+ dictWord{18, 0, 218},
+ dictWord{18, 0, 219},
+ dictWord{18, 0, 233},
+ dictWord{143, 11, 156},
+ dictWord{135, 10, 1759},
+ dictWord{136, 10, 173},
+ dictWord{13, 0, 163},
+ dictWord{13, 0, 180},
+ dictWord{18, 0, 78},
+ dictWord{20, 0, 35},
+ dictWord{5, 11, 13},
+ dictWord{134, 11, 142},
+ dictWord{134, 10, 266},
+ dictWord{6, 11, 97},
+ dictWord{7, 11, 116},
+ dictWord{8, 11, 322},
+ dictWord{8, 11, 755},
+ dictWord{9, 11, 548},
+ dictWord{10, 11, 714},
+ dictWord{11, 11, 884},
+ dictWord{141, 11, 324},
+ dictWord{135, 0, 1312},
+ dictWord{9, 0, 814},
+ dictWord{137, 11, 676},
+ dictWord{
+ 133,
+ 0,
+ 707,
+ },
+ dictWord{135, 0, 1493},
+ dictWord{6, 0, 421},
+ dictWord{7, 0, 61},
+ dictWord{7, 0, 1540},
+ dictWord{10, 0, 11},
+ dictWord{138, 0, 501},
+ dictWord{12, 0, 733},
+ dictWord{12, 0, 766},
+ dictWord{7, 11, 866},
+ dictWord{135, 11, 1163},
+ dictWord{137, 0, 341},
+ dictWord{142, 0, 98},
+ dictWord{145, 11, 115},
+ dictWord{
+ 135,
+ 11,
+ 1111,
+ },
+ dictWord{136, 10, 300},
+ dictWord{136, 0, 1014},
+ dictWord{8, 11, 1},
+ dictWord{9, 11, 112},
+ dictWord{138, 11, 326},
+ dictWord{132, 11, 730},
+ dictWord{5, 11, 488},
+ dictWord{6, 11, 527},
+ dictWord{7, 11, 489},
+ dictWord{7, 11, 1636},
+ dictWord{8, 11, 121},
+ dictWord{8, 11, 144},
+ dictWord{8, 11, 359},
+ dictWord{
+ 9,
+ 11,
+ 193,
+ },
+ dictWord{9, 11, 241},
+ dictWord{9, 11, 336},
+ dictWord{9, 11, 882},
+ dictWord{11, 11, 266},
+ dictWord{11, 11, 372},
+ dictWord{11, 11, 944},
+ dictWord{
+ 12,
+ 11,
+ 401,
+ },
+ dictWord{140, 11, 641},
+ dictWord{6, 0, 971},
+ dictWord{134, 0, 1121},
+ dictWord{6, 0, 102},
+ dictWord{7, 0, 72},
+ dictWord{15, 0, 142},
+ dictWord{
+ 147,
+ 0,
+ 67,
+ },
+ dictWord{151, 0, 30},
+ dictWord{135, 0, 823},
+ dictWord{134, 0, 1045},
+ dictWord{5, 10, 427},
+ dictWord{5, 10, 734},
+ dictWord{7, 10, 478},
+ dictWord{
+ 136,
+ 10,
+ 52,
+ },
+ dictWord{7, 0, 1930},
+ dictWord{11, 10, 217},
+ dictWord{142, 10, 165},
+ dictWord{6, 0, 1512},
+ dictWord{135, 0, 1870},
+ dictWord{9, 11, 31},
+ dictWord{
+ 10,
+ 11,
+ 244,
+ },
+ dictWord{10, 11, 699},
+ dictWord{12, 11, 149},
+ dictWord{141, 11, 497},
+ dictWord{133, 11, 377},
+ dictWord{145, 11, 101},
+ dictWord{
+ 10,
+ 11,
+ 158,
+ },
+ dictWord{13, 11, 13},
+ dictWord{13, 11, 137},
+ dictWord{13, 11, 258},
+ dictWord{14, 11, 111},
+ dictWord{14, 11, 225},
+ dictWord{14, 11, 253},
+ dictWord{
+ 14,
+ 11,
+ 304,
+ },
+ dictWord{14, 11, 339},
+ dictWord{14, 11, 417},
+ dictWord{146, 11, 33},
+ dictWord{6, 0, 87},
+ dictWord{6, 10, 1734},
+ dictWord{7, 10, 20},
+ dictWord{
+ 7,
+ 10,
+ 1056,
+ },
+ dictWord{8, 10, 732},
+ dictWord{9, 10, 406},
+ dictWord{9, 10, 911},
+ dictWord{138, 10, 694},
+ dictWord{134, 0, 1243},
+ dictWord{137, 0, 245},
+ dictWord{
+ 7,
+ 0,
+ 68,
+ },
+ dictWord{8, 0, 48},
+ dictWord{8, 0, 88},
+ dictWord{8, 0, 582},
+ dictWord{8, 0, 681},
+ dictWord{9, 0, 373},
+ dictWord{9, 0, 864},
+ dictWord{11, 0, 157},
+ dictWord{
+ 11,
+ 0,
+ 336,
+ },
+ dictWord{11, 0, 843},
+ dictWord{148, 0, 27},
+ dictWord{8, 11, 663},
+ dictWord{144, 11, 8},
+ dictWord{133, 10, 613},
+ dictWord{4, 0, 88},
+ dictWord{
+ 5,
+ 0,
+ 137,
+ },
+ dictWord{5, 0, 174},
+ dictWord{5, 0, 777},
+ dictWord{6, 0, 1664},
+ dictWord{6, 0, 1725},
+ dictWord{7, 0, 77},
+ dictWord{7, 0, 426},
+ dictWord{7, 0, 1317},
+ dictWord{
+ 7,
+ 0,
+ 1355,
+ },
+ dictWord{8, 0, 126},
+ dictWord{8, 0, 563},
+ dictWord{9, 0, 523},
+ dictWord{9, 0, 750},
+ dictWord{10, 0, 310},
+ dictWord{10, 0, 836},
+ dictWord{11, 0, 42},
+ dictWord{11, 0, 318},
+ dictWord{11, 0, 731},
+ dictWord{12, 0, 68},
+ dictWord{12, 0, 92},
+ dictWord{12, 0, 507},
+ dictWord{12, 0, 692},
+ dictWord{13, 0, 81},
+ dictWord{
+ 13,
+ 0,
+ 238,
+ },
+ dictWord{13, 0, 374},
+ dictWord{14, 0, 436},
+ dictWord{18, 0, 138},
+ dictWord{19, 0, 78},
+ dictWord{19, 0, 111},
+ dictWord{20, 0, 55},
+ dictWord{20, 0, 77},
+ dictWord{148, 0, 92},
+ dictWord{141, 0, 418},
+ dictWord{4, 0, 938},
+ dictWord{137, 0, 625},
+ dictWord{138, 0, 351},
+ dictWord{5, 11, 843},
+ dictWord{7, 10, 32},
+ dictWord{
+ 7,
+ 10,
+ 984,
+ },
+ dictWord{8, 10, 85},
+ dictWord{8, 10, 709},
+ dictWord{9, 10, 579},
+ dictWord{9, 10, 847},
+ dictWord{9, 10, 856},
+ dictWord{10, 10, 799},
+ dictWord{
+ 11,
+ 10,
+ 258,
+ },
+ dictWord{11, 10, 1007},
+ dictWord{12, 10, 331},
+ dictWord{12, 10, 615},
+ dictWord{13, 10, 188},
+ dictWord{13, 10, 435},
+ dictWord{14, 10, 8},
+ dictWord{
+ 15,
+ 10,
+ 165,
+ },
+ dictWord{16, 10, 27},
+ dictWord{148, 10, 40},
+ dictWord{6, 0, 1668},
+ dictWord{7, 0, 1499},
+ dictWord{8, 0, 117},
+ dictWord{9, 0, 314},
+ dictWord{
+ 138,
+ 0,
+ 174,
+ },
+ dictWord{135, 0, 707},
+ dictWord{132, 11, 554},
+ dictWord{133, 11, 536},
+ dictWord{5, 0, 403},
+ dictWord{5, 11, 207},
+ dictWord{9, 11, 79},
+ dictWord{
+ 11,
+ 11,
+ 625,
+ },
+ dictWord{145, 11, 7},
+ dictWord{132, 11, 424},
+ dictWord{136, 11, 785},
+ dictWord{4, 10, 167},
+ dictWord{135, 10, 82},
+ dictWord{9, 0, 7},
+ dictWord{
+ 23,
+ 0,
+ 6,
+ },
+ dictWord{9, 11, 7},
+ dictWord{151, 11, 6},
+ dictWord{6, 0, 282},
+ dictWord{5, 10, 62},
+ dictWord{6, 10, 534},
+ dictWord{7, 10, 74},
+ dictWord{7, 10, 678},
+ dictWord{
+ 7,
+ 10,
+ 684,
+ },
+ dictWord{7, 10, 1043},
+ dictWord{7, 10, 1072},
+ dictWord{8, 10, 280},
+ dictWord{8, 10, 541},
+ dictWord{8, 10, 686},
+ dictWord{9, 10, 258},
+ dictWord{
+ 10,
+ 10,
+ 519,
+ },
+ dictWord{11, 10, 252},
+ dictWord{140, 10, 282},
+ dictWord{138, 10, 33},
+ dictWord{132, 10, 359},
+ dictWord{4, 0, 44},
+ dictWord{5, 0, 311},
+ dictWord{
+ 6,
+ 0,
+ 156,
+ },
+ dictWord{7, 0, 639},
+ dictWord{7, 0, 762},
+ dictWord{7, 0, 1827},
+ dictWord{9, 0, 8},
+ dictWord{9, 0, 462},
+ dictWord{148, 0, 83},
+ dictWord{7, 11, 769},
+ dictWord{
+ 9,
+ 11,
+ 18,
+ },
+ dictWord{138, 11, 358},
+ dictWord{4, 0, 346},
+ dictWord{7, 0, 115},
+ dictWord{9, 0, 180},
+ dictWord{9, 0, 456},
+ dictWord{10, 0, 363},
+ dictWord{
+ 4,
+ 11,
+ 896,
+ },
+ dictWord{134, 11, 1777},
+ dictWord{133, 10, 211},
+ dictWord{7, 0, 761},
+ dictWord{7, 0, 1051},
+ dictWord{137, 0, 545},
+ dictWord{6, 10, 145},
+ dictWord{
+ 141,
+ 10,
+ 336,
+ },
+ dictWord{7, 11, 750},
+ dictWord{9, 11, 223},
+ dictWord{11, 11, 27},
+ dictWord{11, 11, 466},
+ dictWord{12, 11, 624},
+ dictWord{14, 11, 265},
+ dictWord{146, 11, 61},
+ dictWord{6, 0, 752},
+ dictWord{6, 0, 768},
+ dictWord{6, 0, 1195},
+ dictWord{6, 0, 1254},
+ dictWord{6, 0, 1619},
+ dictWord{137, 0, 835},
+ dictWord{
+ 6,
+ 0,
+ 1936,
+ },
+ dictWord{8, 0, 930},
+ dictWord{136, 0, 960},
+ dictWord{132, 10, 263},
+ dictWord{132, 11, 249},
+ dictWord{12, 0, 653},
+ dictWord{132, 10, 916},
+ dictWord{4, 11, 603},
+ dictWord{133, 11, 661},
+ dictWord{8, 0, 344},
+ dictWord{4, 11, 11},
+ dictWord{6, 11, 128},
+ dictWord{7, 11, 231},
+ dictWord{7, 11, 1533},
+ dictWord{138, 11, 725},
+ dictWord{134, 0, 1483},
+ dictWord{134, 0, 875},
+ dictWord{6, 0, 185},
+ dictWord{7, 0, 1899},
+ dictWord{9, 0, 875},
+ dictWord{139, 0, 673},
+ dictWord{15, 10, 155},
+ dictWord{144, 10, 79},
+ dictWord{7, 0, 93},
+ dictWord{7, 0, 210},
+ dictWord{7, 0, 1223},
+ dictWord{8, 0, 451},
+ dictWord{8, 0, 460},
+ dictWord{
+ 11,
+ 0,
+ 353,
+ },
+ dictWord{11, 0, 475},
+ dictWord{4, 10, 599},
+ dictWord{6, 10, 1634},
+ dictWord{7, 10, 67},
+ dictWord{7, 10, 691},
+ dictWord{7, 10, 979},
+ dictWord{
+ 7,
+ 10,
+ 1697,
+ },
+ dictWord{8, 10, 207},
+ dictWord{8, 10, 214},
+ dictWord{8, 10, 231},
+ dictWord{8, 10, 294},
+ dictWord{8, 10, 336},
+ dictWord{8, 10, 428},
+ dictWord{
+ 8,
+ 10,
+ 471,
+ },
+ dictWord{8, 10, 622},
+ dictWord{8, 10, 626},
+ dictWord{8, 10, 679},
+ dictWord{8, 10, 759},
+ dictWord{8, 10, 829},
+ dictWord{9, 10, 11},
+ dictWord{9, 10, 246},
+ dictWord{9, 10, 484},
+ dictWord{9, 10, 573},
+ dictWord{9, 10, 706},
+ dictWord{9, 10, 762},
+ dictWord{9, 10, 798},
+ dictWord{9, 10, 855},
+ dictWord{9, 10, 870},
+ dictWord{
+ 9,
+ 10,
+ 912,
+ },
+ dictWord{10, 10, 303},
+ dictWord{10, 10, 335},
+ dictWord{10, 10, 424},
+ dictWord{10, 10, 461},
+ dictWord{10, 10, 543},
+ dictWord{10, 10, 759},
+ dictWord{10, 10, 814},
+ dictWord{11, 10, 59},
+ dictWord{11, 10, 235},
+ dictWord{11, 10, 590},
+ dictWord{11, 10, 929},
+ dictWord{11, 10, 963},
+ dictWord{
+ 11,
+ 10,
+ 987,
+ },
+ dictWord{12, 10, 114},
+ dictWord{12, 10, 182},
+ dictWord{12, 10, 226},
+ dictWord{12, 10, 332},
+ dictWord{12, 10, 439},
+ dictWord{12, 10, 575},
+ dictWord{
+ 12,
+ 10,
+ 598,
+ },
+ dictWord{12, 10, 675},
+ dictWord{13, 10, 8},
+ dictWord{13, 10, 125},
+ dictWord{13, 10, 194},
+ dictWord{13, 10, 287},
+ dictWord{14, 10, 197},
+ dictWord{14, 10, 383},
+ dictWord{15, 10, 53},
+ dictWord{17, 10, 63},
+ dictWord{19, 10, 46},
+ dictWord{19, 10, 98},
+ dictWord{19, 10, 106},
+ dictWord{148, 10, 85},
+ dictWord{132, 11, 476},
+ dictWord{4, 0, 327},
+ dictWord{5, 0, 478},
+ dictWord{7, 0, 1332},
+ dictWord{136, 0, 753},
+ dictWord{5, 0, 1020},
+ dictWord{133, 0, 1022},
+ dictWord{135, 11, 1807},
+ dictWord{4, 0, 103},
+ dictWord{133, 0, 401},
+ dictWord{4, 0, 499},
+ dictWord{135, 0, 1421},
+ dictWord{10, 0, 207},
+ dictWord{13, 0, 164},
+ dictWord{147, 10, 126},
+ dictWord{9, 11, 20},
+ dictWord{10, 11, 324},
+ dictWord{139, 11, 488},
+ dictWord{132, 0, 96},
+ dictWord{9, 11, 280},
+ dictWord{
+ 138,
+ 11,
+ 134,
+ },
+ dictWord{135, 0, 968},
+ dictWord{133, 10, 187},
+ dictWord{135, 10, 1286},
+ dictWord{5, 11, 112},
+ dictWord{6, 11, 103},
+ dictWord{134, 11, 150},
+ dictWord{8, 0, 914},
+ dictWord{10, 0, 3},
+ dictWord{4, 10, 215},
+ dictWord{9, 10, 38},
+ dictWord{11, 10, 23},
+ dictWord{11, 10, 127},
+ dictWord{139, 10, 796},
+ dictWord{
+ 135,
+ 0,
+ 399,
+ },
+ dictWord{6, 0, 563},
+ dictWord{137, 0, 224},
+ dictWord{6, 0, 704},
+ dictWord{134, 0, 1214},
+ dictWord{4, 11, 708},
+ dictWord{8, 11, 15},
+ dictWord{
+ 9,
+ 11,
+ 50,
+ },
+ dictWord{9, 11, 386},
+ dictWord{11, 11, 18},
+ dictWord{11, 11, 529},
+ dictWord{140, 11, 228},
+ dictWord{4, 11, 563},
+ dictWord{7, 11, 109},
+ dictWord{
+ 7,
+ 11,
+ 592,
+ },
+ dictWord{7, 11, 637},
+ dictWord{7, 11, 770},
+ dictWord{7, 11, 1701},
+ dictWord{8, 11, 436},
+ dictWord{8, 11, 463},
+ dictWord{9, 11, 60},
+ dictWord{9, 11, 335},
+ dictWord{9, 11, 904},
+ dictWord{10, 11, 73},
+ dictWord{11, 11, 434},
+ dictWord{12, 11, 585},
+ dictWord{13, 11, 331},
+ dictWord{18, 11, 110},
+ dictWord{
+ 148,
+ 11,
+ 60,
+ },
+ dictWord{134, 0, 1559},
+ dictWord{132, 11, 502},
+ dictWord{6, 11, 347},
+ dictWord{138, 11, 161},
+ dictWord{4, 11, 33},
+ dictWord{5, 11, 102},
+ dictWord{
+ 5,
+ 11,
+ 500,
+ },
+ dictWord{6, 11, 284},
+ dictWord{7, 11, 1079},
+ dictWord{7, 11, 1423},
+ dictWord{7, 11, 1702},
+ dictWord{8, 11, 470},
+ dictWord{9, 11, 554},
+ dictWord{
+ 9,
+ 11,
+ 723,
+ },
+ dictWord{139, 11, 333},
+ dictWord{7, 11, 246},
+ dictWord{135, 11, 840},
+ dictWord{6, 11, 10},
+ dictWord{8, 11, 571},
+ dictWord{9, 11, 739},
+ dictWord{
+ 143,
+ 11,
+ 91,
+ },
+ dictWord{8, 0, 861},
+ dictWord{10, 0, 905},
+ dictWord{12, 0, 730},
+ dictWord{12, 0, 789},
+ dictWord{133, 11, 626},
+ dictWord{134, 0, 946},
+ dictWord{
+ 5,
+ 0,
+ 746,
+ },
+ dictWord{12, 0, 333},
+ dictWord{14, 0, 332},
+ dictWord{12, 11, 333},
+ dictWord{142, 11, 332},
+ dictWord{5, 11, 18},
+ dictWord{6, 11, 526},
+ dictWord{
+ 13,
+ 11,
+ 24,
+ },
+ dictWord{13, 11, 110},
+ dictWord{19, 11, 5},
+ dictWord{147, 11, 44},
+ dictWord{4, 0, 910},
+ dictWord{5, 0, 832},
+ dictWord{135, 10, 2002},
+ dictWord{
+ 10,
+ 11,
+ 768,
+ },
+ dictWord{139, 11, 787},
+ dictWord{4, 11, 309},
+ dictWord{5, 11, 462},
+ dictWord{7, 11, 970},
+ dictWord{135, 11, 1097},
+ dictWord{4, 10, 28},
+ dictWord{
+ 5,
+ 10,
+ 440,
+ },
+ dictWord{7, 10, 248},
+ dictWord{11, 10, 833},
+ dictWord{140, 10, 344},
+ dictWord{134, 10, 1654},
+ dictWord{6, 0, 632},
+ dictWord{6, 0, 652},
+ dictWord{
+ 6,
+ 0,
+ 1272,
+ },
+ dictWord{6, 0, 1384},
+ dictWord{134, 0, 1560},
+ dictWord{134, 11, 1704},
+ dictWord{6, 0, 1393},
+ dictWord{133, 10, 853},
+ dictWord{6, 10, 249},
+ dictWord{7, 10, 1234},
+ dictWord{139, 10, 573},
+ dictWord{5, 11, 86},
+ dictWord{7, 11, 743},
+ dictWord{9, 11, 85},
+ dictWord{10, 11, 281},
+ dictWord{10, 11, 432},
+ dictWord{11, 11, 490},
+ dictWord{12, 11, 251},
+ dictWord{13, 11, 118},
+ dictWord{14, 11, 378},
+ dictWord{146, 11, 143},
+ dictWord{5, 11, 524},
+ dictWord{
+ 133,
+ 11,
+ 744,
+ },
+ dictWord{134, 0, 1514},
+ dictWord{10, 0, 201},
+ dictWord{142, 0, 319},
+ dictWord{7, 0, 717},
+ dictWord{10, 0, 510},
+ dictWord{7, 10, 392},
+ dictWord{
+ 8,
+ 10,
+ 20,
+ },
+ dictWord{8, 10, 172},
+ dictWord{8, 10, 690},
+ dictWord{9, 10, 383},
+ dictWord{9, 10, 845},
+ dictWord{11, 10, 293},
+ dictWord{11, 10, 832},
+ dictWord{
+ 11,
+ 10,
+ 920,
+ },
+ dictWord{11, 10, 984},
+ dictWord{141, 10, 221},
+ dictWord{134, 0, 1381},
+ dictWord{5, 10, 858},
+ dictWord{133, 10, 992},
+ dictWord{8, 0, 528},
+ dictWord{137, 0, 348},
+ dictWord{10, 11, 107},
+ dictWord{140, 11, 436},
+ dictWord{4, 0, 20},
+ dictWord{133, 0, 616},
+ dictWord{134, 0, 1251},
+ dictWord{
+ 132,
+ 11,
+ 927,
+ },
+ dictWord{10, 11, 123},
+ dictWord{12, 11, 670},
+ dictWord{13, 11, 371},
+ dictWord{14, 11, 142},
+ dictWord{146, 11, 94},
+ dictWord{134, 0, 1163},
+ dictWord{
+ 7,
+ 11,
+ 1149,
+ },
+ dictWord{137, 11, 156},
+ dictWord{134, 0, 307},
+ dictWord{133, 11, 778},
+ dictWord{7, 0, 1091},
+ dictWord{135, 0, 1765},
+ dictWord{
+ 5,
+ 11,
+ 502,
+ },
+ dictWord{6, 10, 268},
+ dictWord{137, 10, 62},
+ dictWord{8, 11, 196},
+ dictWord{10, 11, 283},
+ dictWord{139, 11, 406},
+ dictWord{4, 0, 26},
+ dictWord{
+ 5,
+ 0,
+ 429,
+ },
+ dictWord{6, 0, 245},
+ dictWord{7, 0, 704},
+ dictWord{7, 0, 1379},
+ dictWord{135, 0, 1474},
+ dictWord{133, 11, 855},
+ dictWord{132, 0, 881},
+ dictWord{
+ 4,
+ 0,
+ 621,
+ },
+ dictWord{135, 11, 1596},
+ dictWord{7, 11, 1400},
+ dictWord{9, 11, 446},
+ dictWord{138, 11, 45},
+ dictWord{6, 0, 736},
+ dictWord{138, 10, 106},
+ dictWord{133, 0, 542},
+ dictWord{134, 0, 348},
+ dictWord{133, 0, 868},
+ dictWord{136, 0, 433},
+ dictWord{135, 0, 1495},
+ dictWord{138, 0, 771},
+ dictWord{
+ 6,
+ 10,
+ 613,
+ },
+ dictWord{136, 10, 223},
+ dictWord{138, 0, 215},
+ dictWord{141, 0, 124},
+ dictWord{136, 11, 391},
+ dictWord{135, 11, 172},
+ dictWord{132, 10, 670},
+ dictWord{140, 0, 55},
+ dictWord{9, 10, 40},
+ dictWord{139, 10, 136},
+ dictWord{7, 0, 62},
+ dictWord{147, 0, 112},
+ dictWord{132, 0, 856},
+ dictWord{132, 11, 568},
+ dictWord{12, 0, 270},
+ dictWord{139, 10, 259},
+ dictWord{8, 0, 572},
+ dictWord{137, 0, 698},
+ dictWord{4, 11, 732},
+ dictWord{9, 10, 310},
+ dictWord{137, 10, 682},
+ dictWord{142, 10, 296},
+ dictWord{134, 0, 939},
+ dictWord{136, 11, 733},
+ dictWord{135, 11, 1435},
+ dictWord{7, 10, 1401},
+ dictWord{135, 10, 1476},
+ dictWord{6, 0, 352},
+ dictWord{4, 10, 296},
+ dictWord{7, 10, 401},
+ dictWord{7, 10, 1410},
+ dictWord{7, 10, 1594},
+ dictWord{7, 10, 1674},
+ dictWord{8, 10, 63},
+ dictWord{
+ 8,
+ 10,
+ 660,
+ },
+ dictWord{137, 10, 74},
+ dictWord{4, 11, 428},
+ dictWord{133, 11, 668},
+ dictWord{4, 10, 139},
+ dictWord{4, 10, 388},
+ dictWord{140, 10, 188},
+ dictWord{7, 11, 2015},
+ dictWord{140, 11, 665},
+ dictWord{132, 0, 647},
+ dictWord{146, 0, 10},
+ dictWord{138, 0, 220},
+ dictWord{142, 0, 464},
+ dictWord{
+ 132,
+ 0,
+ 109,
+ },
+ dictWord{134, 0, 1746},
+ dictWord{6, 0, 515},
+ dictWord{4, 10, 747},
+ dictWord{6, 11, 1623},
+ dictWord{6, 11, 1681},
+ dictWord{7, 10, 649},
+ dictWord{
+ 7,
+ 10,
+ 1479,
+ },
+ dictWord{135, 10, 1583},
+ dictWord{133, 10, 232},
+ dictWord{135, 0, 566},
+ dictWord{137, 10, 887},
+ dictWord{4, 0, 40},
+ dictWord{10, 0, 67},
+ dictWord{
+ 11,
+ 0,
+ 117,
+ },
+ dictWord{11, 0, 768},
+ dictWord{139, 0, 935},
+ dictWord{132, 0, 801},
+ dictWord{7, 0, 992},
+ dictWord{8, 0, 301},
+ dictWord{9, 0, 722},
+ dictWord{
+ 12,
+ 0,
+ 63,
+ },
+ dictWord{13, 0, 29},
+ dictWord{14, 0, 161},
+ dictWord{143, 0, 18},
+ dictWord{139, 0, 923},
+ dictWord{6, 11, 1748},
+ dictWord{8, 11, 715},
+ dictWord{9, 11, 802},
+ dictWord{10, 11, 46},
+ dictWord{10, 11, 819},
+ dictWord{13, 11, 308},
+ dictWord{14, 11, 351},
+ dictWord{14, 11, 363},
+ dictWord{146, 11, 67},
+ dictWord{
+ 137,
+ 11,
+ 745,
+ },
+ dictWord{7, 0, 1145},
+ dictWord{4, 10, 14},
+ dictWord{7, 10, 1801},
+ dictWord{10, 10, 748},
+ dictWord{141, 10, 458},
+ dictWord{4, 11, 63},
+ dictWord{
+ 5,
+ 11,
+ 347,
+ },
+ dictWord{134, 11, 474},
+ dictWord{135, 0, 568},
+ dictWord{4, 10, 425},
+ dictWord{7, 11, 577},
+ dictWord{7, 11, 1432},
+ dictWord{9, 11, 475},
+ dictWord{
+ 9,
+ 11,
+ 505,
+ },
+ dictWord{9, 11, 526},
+ dictWord{9, 11, 609},
+ dictWord{9, 11, 689},
+ dictWord{9, 11, 726},
+ dictWord{9, 11, 735},
+ dictWord{9, 11, 738},
+ dictWord{
+ 10,
+ 11,
+ 556,
+ },
+ dictWord{10, 11, 674},
+ dictWord{10, 11, 684},
+ dictWord{11, 11, 89},
+ dictWord{11, 11, 202},
+ dictWord{11, 11, 272},
+ dictWord{11, 11, 380},
+ dictWord{
+ 11,
+ 11,
+ 415,
+ },
+ dictWord{11, 11, 505},
+ dictWord{11, 11, 537},
+ dictWord{11, 11, 550},
+ dictWord{11, 11, 562},
+ dictWord{11, 11, 640},
+ dictWord{11, 11, 667},
+ dictWord{11, 11, 688},
+ dictWord{11, 11, 847},
+ dictWord{11, 11, 927},
+ dictWord{11, 11, 930},
+ dictWord{11, 11, 940},
+ dictWord{12, 11, 144},
+ dictWord{
+ 12,
+ 11,
+ 325,
+ },
+ dictWord{12, 11, 329},
+ dictWord{12, 11, 389},
+ dictWord{12, 11, 403},
+ dictWord{12, 11, 451},
+ dictWord{12, 11, 515},
+ dictWord{12, 11, 604},
+ dictWord{
+ 12,
+ 11,
+ 616,
+ },
+ dictWord{12, 11, 626},
+ dictWord{13, 11, 66},
+ dictWord{13, 11, 131},
+ dictWord{13, 11, 167},
+ dictWord{13, 11, 236},
+ dictWord{13, 11, 368},
+ dictWord{13, 11, 411},
+ dictWord{13, 11, 434},
+ dictWord{13, 11, 453},
+ dictWord{13, 11, 461},
+ dictWord{13, 11, 474},
+ dictWord{14, 11, 59},
+ dictWord{14, 11, 60},
+ dictWord{14, 11, 139},
+ dictWord{14, 11, 152},
+ dictWord{14, 11, 276},
+ dictWord{14, 11, 353},
+ dictWord{14, 11, 402},
+ dictWord{15, 11, 28},
+ dictWord{
+ 15,
+ 11,
+ 81,
+ },
+ dictWord{15, 11, 123},
+ dictWord{15, 11, 152},
+ dictWord{18, 11, 136},
+ dictWord{148, 11, 88},
+ dictWord{137, 0, 247},
+ dictWord{135, 11, 1622},
+ dictWord{
+ 9,
+ 11,
+ 544,
+ },
+ dictWord{11, 11, 413},
+ dictWord{144, 11, 25},
+ dictWord{4, 0, 645},
+ dictWord{7, 0, 825},
+ dictWord{6, 10, 1768},
+ dictWord{135, 11, 89},
+ dictWord{140, 0, 328},
+ dictWord{5, 10, 943},
+ dictWord{134, 10, 1779},
+ dictWord{134, 0, 1363},
+ dictWord{5, 10, 245},
+ dictWord{6, 10, 576},
+ dictWord{7, 10, 582},
+ dictWord{136, 10, 225},
+ dictWord{134, 0, 1280},
+ dictWord{5, 11, 824},
+ dictWord{133, 11, 941},
+ dictWord{7, 11, 440},
+ dictWord{8, 11, 230},
+ dictWord{
+ 139,
+ 11,
+ 106,
+ },
+ dictWord{5, 0, 28},
+ dictWord{6, 0, 204},
+ dictWord{10, 0, 320},
+ dictWord{10, 0, 583},
+ dictWord{13, 0, 502},
+ dictWord{14, 0, 72},
+ dictWord{14, 0, 274},
+ dictWord{14, 0, 312},
+ dictWord{14, 0, 344},
+ dictWord{15, 0, 159},
+ dictWord{16, 0, 62},
+ dictWord{16, 0, 69},
+ dictWord{17, 0, 30},
+ dictWord{18, 0, 42},
+ dictWord{
+ 18,
+ 0,
+ 53,
+ },
+ dictWord{18, 0, 84},
+ dictWord{18, 0, 140},
+ dictWord{19, 0, 68},
+ dictWord{19, 0, 85},
+ dictWord{20, 0, 5},
+ dictWord{20, 0, 45},
+ dictWord{20, 0, 101},
+ dictWord{
+ 22,
+ 0,
+ 7,
+ },
+ dictWord{150, 0, 20},
+ dictWord{4, 0, 558},
+ dictWord{6, 0, 390},
+ dictWord{7, 0, 162},
+ dictWord{7, 0, 689},
+ dictWord{9, 0, 360},
+ dictWord{138, 0, 653},
+ dictWord{134, 0, 764},
+ dictWord{6, 0, 862},
+ dictWord{137, 0, 833},
+ dictWord{5, 0, 856},
+ dictWord{6, 0, 1672},
+ dictWord{6, 0, 1757},
+ dictWord{134, 0, 1781},
+ dictWord{
+ 5,
+ 0,
+ 92,
+ },
+ dictWord{10, 0, 736},
+ dictWord{140, 0, 102},
+ dictWord{6, 0, 1927},
+ dictWord{6, 0, 1944},
+ dictWord{8, 0, 924},
+ dictWord{8, 0, 948},
+ dictWord{
+ 10,
+ 0,
+ 967,
+ },
+ dictWord{138, 0, 978},
+ dictWord{134, 0, 1479},
+ dictWord{5, 0, 590},
+ dictWord{8, 0, 360},
+ dictWord{9, 0, 213},
+ dictWord{138, 0, 63},
+ dictWord{
+ 134,
+ 0,
+ 1521,
+ },
+ dictWord{6, 0, 709},
+ dictWord{134, 0, 891},
+ dictWord{132, 10, 443},
+ dictWord{13, 0, 477},
+ dictWord{14, 0, 120},
+ dictWord{148, 0, 61},
+ dictWord{
+ 4,
+ 11,
+ 914,
+ },
+ dictWord{5, 11, 800},
+ dictWord{133, 11, 852},
+ dictWord{10, 11, 54},
+ dictWord{141, 11, 115},
+ dictWord{4, 11, 918},
+ dictWord{133, 11, 876},
+ dictWord{139, 11, 152},
+ dictWord{4, 11, 92},
+ dictWord{133, 11, 274},
+ dictWord{135, 11, 1901},
+ dictWord{9, 11, 800},
+ dictWord{10, 11, 693},
+ dictWord{
+ 11,
+ 11,
+ 482,
+ },
+ dictWord{11, 11, 734},
+ dictWord{139, 11, 789},
+ dictWord{9, 0, 483},
+ dictWord{132, 10, 298},
+ dictWord{6, 0, 1213},
+ dictWord{141, 11, 498},
+ dictWord{135, 11, 1451},
+ dictWord{133, 11, 743},
+ dictWord{4, 0, 1022},
+ dictWord{10, 0, 1000},
+ dictWord{12, 0, 957},
+ dictWord{12, 0, 980},
+ dictWord{
+ 12,
+ 0,
+ 1013,
+ },
+ dictWord{14, 0, 481},
+ dictWord{144, 0, 116},
+ dictWord{8, 0, 503},
+ dictWord{17, 0, 29},
+ dictWord{4, 11, 49},
+ dictWord{7, 11, 280},
+ dictWord{
+ 135,
+ 11,
+ 1633,
+ },
+ dictWord{135, 0, 1712},
+ dictWord{134, 0, 466},
+ dictWord{136, 11, 47},
+ dictWord{5, 10, 164},
+ dictWord{7, 10, 121},
+ dictWord{142, 10, 189},
+ dictWord{
+ 7,
+ 10,
+ 812,
+ },
+ dictWord{7, 10, 1261},
+ dictWord{7, 10, 1360},
+ dictWord{9, 10, 632},
+ dictWord{140, 10, 352},
+ dictWord{139, 10, 556},
+ dictWord{132, 0, 731},
+ dictWord{5, 11, 272},
+ dictWord{5, 11, 908},
+ dictWord{5, 11, 942},
+ dictWord{7, 11, 1008},
+ dictWord{7, 11, 1560},
+ dictWord{8, 11, 197},
+ dictWord{9, 11, 47},
+ dictWord{11, 11, 538},
+ dictWord{139, 11, 742},
+ dictWord{4, 10, 172},
+ dictWord{9, 10, 611},
+ dictWord{10, 10, 436},
+ dictWord{12, 10, 673},
+ dictWord{
+ 141,
+ 10,
+ 255,
+ },
+ dictWord{133, 10, 844},
+ dictWord{10, 0, 484},
+ dictWord{11, 0, 754},
+ dictWord{12, 0, 457},
+ dictWord{14, 0, 171},
+ dictWord{14, 0, 389},
+ dictWord{
+ 146,
+ 0,
+ 153,
+ },
+ dictWord{9, 10, 263},
+ dictWord{10, 10, 147},
+ dictWord{138, 10, 492},
+ dictWord{137, 11, 891},
+ dictWord{138, 0, 241},
+ dictWord{133, 10, 537},
+ dictWord{6, 0, 2005},
+ dictWord{136, 0, 964},
+ dictWord{137, 10, 842},
+ dictWord{151, 11, 8},
+ dictWord{4, 11, 407},
+ dictWord{132, 11, 560},
+ dictWord{
+ 135,
+ 11,
+ 1884,
+ },
+ dictWord{6, 0, 1100},
+ dictWord{134, 0, 1242},
+ dictWord{135, 0, 954},
+ dictWord{5, 10, 230},
+ dictWord{5, 10, 392},
+ dictWord{6, 10, 420},
+ dictWord{
+ 9,
+ 10,
+ 568,
+ },
+ dictWord{140, 10, 612},
+ dictWord{4, 11, 475},
+ dictWord{11, 11, 35},
+ dictWord{11, 11, 90},
+ dictWord{13, 11, 7},
+ dictWord{13, 11, 71},
+ dictWord{
+ 13,
+ 11,
+ 177,
+ },
+ dictWord{142, 11, 422},
+ dictWord{136, 11, 332},
+ dictWord{135, 0, 1958},
+ dictWord{6, 0, 549},
+ dictWord{8, 0, 34},
+ dictWord{8, 0, 283},
+ dictWord{
+ 9,
+ 0,
+ 165,
+ },
+ dictWord{138, 0, 475},
+ dictWord{10, 0, 952},
+ dictWord{12, 0, 966},
+ dictWord{140, 0, 994},
+ dictWord{5, 0, 652},
+ dictWord{5, 0, 701},
+ dictWord{
+ 135,
+ 0,
+ 449,
+ },
+ dictWord{4, 0, 655},
+ dictWord{7, 0, 850},
+ dictWord{17, 0, 75},
+ dictWord{146, 0, 137},
+ dictWord{4, 0, 146},
+ dictWord{7, 0, 1618},
+ dictWord{8, 0, 670},
+ dictWord{
+ 5,
+ 10,
+ 41,
+ },
+ dictWord{7, 10, 1459},
+ dictWord{7, 10, 1469},
+ dictWord{7, 10, 1859},
+ dictWord{9, 10, 549},
+ dictWord{139, 10, 905},
+ dictWord{133, 10, 696},
+ dictWord{6, 0, 159},
+ dictWord{6, 0, 364},
+ dictWord{7, 0, 516},
+ dictWord{137, 0, 518},
+ dictWord{135, 0, 1439},
+ dictWord{6, 11, 222},
+ dictWord{7, 11, 636},
+ dictWord{
+ 7,
+ 11,
+ 1620,
+ },
+ dictWord{8, 11, 409},
+ dictWord{9, 11, 693},
+ dictWord{139, 11, 77},
+ dictWord{13, 0, 151},
+ dictWord{141, 11, 45},
+ dictWord{6, 0, 1027},
+ dictWord{
+ 4,
+ 11,
+ 336,
+ },
+ dictWord{132, 10, 771},
+ dictWord{139, 11, 392},
+ dictWord{10, 11, 121},
+ dictWord{11, 11, 175},
+ dictWord{149, 11, 16},
+ dictWord{8, 0, 950},
+ dictWord{138, 0, 983},
+ dictWord{133, 10, 921},
+ dictWord{135, 0, 993},
+ dictWord{6, 10, 180},
+ dictWord{7, 10, 1137},
+ dictWord{8, 10, 751},
+ dictWord{
+ 139,
+ 10,
+ 805,
+ },
+ dictWord{7, 0, 501},
+ dictWord{9, 0, 111},
+ dictWord{10, 0, 141},
+ dictWord{11, 0, 332},
+ dictWord{13, 0, 43},
+ dictWord{13, 0, 429},
+ dictWord{14, 0, 130},
+ dictWord{14, 0, 415},
+ dictWord{145, 0, 102},
+ dictWord{4, 10, 183},
+ dictWord{5, 11, 882},
+ dictWord{7, 10, 271},
+ dictWord{11, 10, 824},
+ dictWord{11, 10, 952},
+ dictWord{13, 10, 278},
+ dictWord{13, 10, 339},
+ dictWord{13, 10, 482},
+ dictWord{14, 10, 424},
+ dictWord{148, 10, 99},
+ dictWord{4, 10, 19},
+ dictWord{5, 10, 477},
+ dictWord{5, 10, 596},
+ dictWord{6, 10, 505},
+ dictWord{7, 10, 1221},
+ dictWord{11, 10, 907},
+ dictWord{12, 10, 209},
+ dictWord{141, 10, 214},
+ dictWord{
+ 135,
+ 10,
+ 1215,
+ },
+ dictWord{133, 0, 452},
+ dictWord{132, 11, 426},
+ dictWord{5, 0, 149},
+ dictWord{136, 0, 233},
+ dictWord{133, 0, 935},
+ dictWord{6, 11, 58},
+ dictWord{
+ 7,
+ 11,
+ 654,
+ },
+ dictWord{7, 11, 745},
+ dictWord{7, 11, 1969},
+ dictWord{8, 11, 240},
+ dictWord{8, 11, 675},
+ dictWord{9, 11, 479},
+ dictWord{9, 11, 731},
+ dictWord{
+ 10,
+ 11,
+ 330,
+ },
+ dictWord{10, 11, 593},
+ dictWord{10, 11, 817},
+ dictWord{11, 11, 32},
+ dictWord{11, 11, 133},
+ dictWord{11, 11, 221},
+ dictWord{145, 11, 68},
+ dictWord{
+ 12,
+ 0,
+ 582,
+ },
+ dictWord{18, 0, 131},
+ dictWord{7, 11, 102},
+ dictWord{137, 11, 538},
+ dictWord{136, 0, 801},
+ dictWord{134, 10, 1645},
+ dictWord{132, 0, 70},
+ dictWord{6, 10, 92},
+ dictWord{6, 10, 188},
+ dictWord{7, 10, 1269},
+ dictWord{7, 10, 1524},
+ dictWord{7, 10, 1876},
+ dictWord{10, 10, 228},
+ dictWord{139, 10, 1020},
+ dictWord{4, 10, 459},
+ dictWord{133, 10, 966},
+ dictWord{138, 0, 369},
+ dictWord{16, 0, 36},
+ dictWord{140, 10, 330},
+ dictWord{141, 11, 366},
+ dictWord{
+ 7,
+ 0,
+ 721,
+ },
+ dictWord{10, 0, 236},
+ dictWord{12, 0, 204},
+ dictWord{6, 10, 18},
+ dictWord{7, 10, 932},
+ dictWord{8, 10, 757},
+ dictWord{9, 10, 54},
+ dictWord{9, 10, 65},
+ dictWord{9, 10, 844},
+ dictWord{10, 10, 113},
+ dictWord{10, 10, 315},
+ dictWord{10, 10, 798},
+ dictWord{11, 10, 153},
+ dictWord{12, 10, 151},
+ dictWord{12, 10, 392},
+ dictWord{12, 10, 666},
+ dictWord{142, 10, 248},
+ dictWord{7, 0, 241},
+ dictWord{10, 0, 430},
+ dictWord{8, 10, 548},
+ dictWord{9, 10, 532},
+ dictWord{10, 10, 117},
+ dictWord{11, 10, 351},
+ dictWord{11, 10, 375},
+ dictWord{143, 10, 23},
+ dictWord{134, 10, 1742},
+ dictWord{133, 10, 965},
+ dictWord{133, 11, 566},
+ dictWord{
+ 6,
+ 11,
+ 48,
+ },
+ dictWord{135, 11, 63},
+ dictWord{134, 10, 182},
+ dictWord{10, 10, 65},
+ dictWord{10, 10, 488},
+ dictWord{138, 10, 497},
+ dictWord{6, 11, 114},
+ dictWord{7, 11, 1224},
+ dictWord{7, 11, 1556},
+ dictWord{136, 11, 3},
+ dictWord{134, 0, 1817},
+ dictWord{8, 11, 576},
+ dictWord{137, 11, 267},
+ dictWord{
+ 6,
+ 0,
+ 1078,
+ },
+ dictWord{144, 0, 16},
+ dictWord{9, 10, 588},
+ dictWord{138, 10, 260},
+ dictWord{138, 0, 1021},
+ dictWord{5, 0, 406},
+ dictWord{134, 0, 2022},
+ dictWord{133, 11, 933},
+ dictWord{6, 0, 69},
+ dictWord{135, 0, 117},
+ dictWord{7, 0, 1830},
+ dictWord{136, 11, 427},
+ dictWord{4, 0, 432},
+ dictWord{135, 0, 824},
+ dictWord{134, 10, 1786},
+ dictWord{133, 0, 826},
+ dictWord{139, 11, 67},
+ dictWord{133, 11, 759},
+ dictWord{135, 10, 308},
+ dictWord{137, 0, 816},
+ dictWord{
+ 133,
+ 0,
+ 1000,
+ },
+ dictWord{4, 0, 297},
+ dictWord{6, 0, 529},
+ dictWord{7, 0, 152},
+ dictWord{7, 0, 713},
+ dictWord{7, 0, 1845},
+ dictWord{8, 0, 710},
+ dictWord{8, 0, 717},
+ dictWord{12, 0, 639},
+ dictWord{140, 0, 685},
+ dictWord{7, 0, 423},
+ dictWord{136, 10, 588},
+ dictWord{136, 10, 287},
+ dictWord{136, 0, 510},
+ dictWord{
+ 134,
+ 0,
+ 1048,
+ },
+ dictWord{6, 0, 618},
+ dictWord{7, 11, 56},
+ dictWord{7, 11, 1989},
+ dictWord{8, 11, 337},
+ dictWord{8, 11, 738},
+ dictWord{9, 11, 600},
+ dictWord{
+ 10,
+ 11,
+ 483,
+ },
+ dictWord{12, 11, 37},
+ dictWord{13, 11, 447},
+ dictWord{142, 11, 92},
+ dictWord{4, 0, 520},
+ dictWord{135, 0, 575},
+ dictWord{8, 0, 990},
+ dictWord{
+ 138,
+ 0,
+ 977,
+ },
+ dictWord{135, 11, 774},
+ dictWord{9, 11, 347},
+ dictWord{11, 11, 24},
+ dictWord{140, 11, 170},
+ dictWord{136, 11, 379},
+ dictWord{140, 10, 290},
+ dictWord{132, 11, 328},
+ dictWord{4, 0, 321},
+ dictWord{134, 0, 569},
+ dictWord{4, 11, 101},
+ dictWord{135, 11, 1171},
+ dictWord{7, 0, 723},
+ dictWord{7, 0, 1135},
+ dictWord{5, 11, 833},
+ dictWord{136, 11, 744},
+ dictWord{7, 10, 719},
+ dictWord{8, 10, 809},
+ dictWord{136, 10, 834},
+ dictWord{8, 0, 921},
+ dictWord{136, 10, 796},
+ dictWord{5, 10, 210},
+ dictWord{6, 10, 213},
+ dictWord{7, 10, 60},
+ dictWord{10, 10, 364},
+ dictWord{139, 10, 135},
+ dictWord{5, 0, 397},
+ dictWord{6, 0, 154},
+ dictWord{7, 0, 676},
+ dictWord{8, 0, 443},
+ dictWord{8, 0, 609},
+ dictWord{9, 0, 24},
+ dictWord{9, 0, 325},
+ dictWord{10, 0, 35},
+ dictWord{11, 0, 535},
+ dictWord{11, 0, 672},
+ dictWord{11, 0, 1018},
+ dictWord{12, 0, 637},
+ dictWord{16, 0, 30},
+ dictWord{5, 10, 607},
+ dictWord{8, 10, 326},
+ dictWord{136, 10, 490},
+ dictWord{4, 10, 701},
+ dictWord{5, 10, 472},
+ dictWord{6, 11, 9},
+ dictWord{6, 11, 397},
+ dictWord{7, 11, 53},
+ dictWord{7, 11, 1742},
+ dictWord{9, 10, 758},
+ dictWord{10, 11, 632},
+ dictWord{
+ 11,
+ 11,
+ 828,
+ },
+ dictWord{140, 11, 146},
+ dictWord{135, 10, 380},
+ dictWord{135, 10, 1947},
+ dictWord{148, 11, 109},
+ dictWord{10, 10, 278},
+ dictWord{
+ 138,
+ 11,
+ 278,
+ },
+ dictWord{134, 0, 856},
+ dictWord{7, 0, 139},
+ dictWord{4, 10, 386},
+ dictWord{8, 10, 405},
+ dictWord{8, 10, 728},
+ dictWord{9, 10, 497},
+ dictWord{
+ 11,
+ 10,
+ 110,
+ },
+ dictWord{11, 10, 360},
+ dictWord{15, 10, 37},
+ dictWord{144, 10, 84},
+ dictWord{141, 0, 282},
+ dictWord{133, 0, 981},
+ dictWord{5, 0, 288},
+ dictWord{
+ 7,
+ 10,
+ 1452,
+ },
+ dictWord{7, 10, 1480},
+ dictWord{8, 10, 634},
+ dictWord{140, 10, 472},
+ dictWord{7, 0, 1890},
+ dictWord{8, 11, 367},
+ dictWord{10, 11, 760},
+ dictWord{
+ 14,
+ 11,
+ 79,
+ },
+ dictWord{20, 11, 17},
+ dictWord{152, 11, 0},
+ dictWord{4, 10, 524},
+ dictWord{136, 10, 810},
+ dictWord{4, 0, 56},
+ dictWord{7, 0, 1791},
+ dictWord{
+ 8,
+ 0,
+ 607,
+ },
+ dictWord{8, 0, 651},
+ dictWord{11, 0, 465},
+ dictWord{11, 0, 835},
+ dictWord{12, 0, 337},
+ dictWord{141, 0, 480},
+ dictWord{10, 10, 238},
+ dictWord{
+ 141,
+ 10,
+ 33,
+ },
+ dictWord{11, 11, 417},
+ dictWord{12, 11, 223},
+ dictWord{140, 11, 265},
+ dictWord{9, 0, 158},
+ dictWord{10, 0, 411},
+ dictWord{140, 0, 261},
+ dictWord{
+ 133,
+ 10,
+ 532,
+ },
+ dictWord{133, 10, 997},
+ dictWord{12, 11, 186},
+ dictWord{12, 11, 292},
+ dictWord{14, 11, 100},
+ dictWord{146, 11, 70},
+ dictWord{6, 0, 1403},
+ dictWord{136, 0, 617},
+ dictWord{134, 0, 1205},
+ dictWord{139, 0, 563},
+ dictWord{4, 0, 242},
+ dictWord{134, 0, 333},
+ dictWord{4, 11, 186},
+ dictWord{5, 11, 157},
+ dictWord{8, 11, 168},
+ dictWord{138, 11, 6},
+ dictWord{132, 0, 369},
+ dictWord{133, 11, 875},
+ dictWord{5, 10, 782},
+ dictWord{5, 10, 829},
+ dictWord{
+ 134,
+ 10,
+ 1738,
+ },
+ dictWord{134, 0, 622},
+ dictWord{135, 11, 1272},
+ dictWord{6, 0, 1407},
+ dictWord{7, 11, 111},
+ dictWord{136, 11, 581},
+ dictWord{7, 10, 1823},
+ dictWord{139, 10, 693},
+ dictWord{7, 0, 160},
+ dictWord{10, 0, 624},
+ dictWord{142, 0, 279},
+ dictWord{132, 0, 363},
+ dictWord{10, 11, 589},
+ dictWord{12, 11, 111},
+ dictWord{13, 11, 260},
+ dictWord{14, 11, 82},
+ dictWord{18, 11, 63},
+ dictWord{147, 11, 45},
+ dictWord{7, 11, 1364},
+ dictWord{7, 11, 1907},
+ dictWord{
+ 141,
+ 11,
+ 158,
+ },
+ dictWord{4, 11, 404},
+ dictWord{4, 11, 659},
+ dictWord{135, 11, 675},
+ dictWord{13, 11, 211},
+ dictWord{14, 11, 133},
+ dictWord{14, 11, 204},
+ dictWord{
+ 15,
+ 11,
+ 64,
+ },
+ dictWord{15, 11, 69},
+ dictWord{15, 11, 114},
+ dictWord{16, 11, 10},
+ dictWord{19, 11, 23},
+ dictWord{19, 11, 35},
+ dictWord{19, 11, 39},
+ dictWord{
+ 19,
+ 11,
+ 51,
+ },
+ dictWord{19, 11, 71},
+ dictWord{19, 11, 75},
+ dictWord{152, 11, 15},
+ dictWord{4, 10, 78},
+ dictWord{5, 10, 96},
+ dictWord{5, 10, 182},
+ dictWord{7, 10, 1724},
+ dictWord{7, 10, 1825},
+ dictWord{10, 10, 394},
+ dictWord{10, 10, 471},
+ dictWord{11, 10, 532},
+ dictWord{14, 10, 340},
+ dictWord{145, 10, 88},
+ dictWord{
+ 135,
+ 10,
+ 1964,
+ },
+ dictWord{133, 11, 391},
+ dictWord{11, 11, 887},
+ dictWord{14, 11, 365},
+ dictWord{142, 11, 375},
+ dictWord{5, 11, 540},
+ dictWord{6, 11, 1697},
+ dictWord{7, 11, 222},
+ dictWord{136, 11, 341},
+ dictWord{134, 11, 78},
+ dictWord{9, 0, 601},
+ dictWord{9, 0, 619},
+ dictWord{10, 0, 505},
+ dictWord{10, 0, 732},
+ dictWord{11, 0, 355},
+ dictWord{140, 0, 139},
+ dictWord{134, 0, 292},
+ dictWord{139, 0, 174},
+ dictWord{5, 0, 177},
+ dictWord{6, 0, 616},
+ dictWord{7, 0, 827},
+ dictWord{
+ 9,
+ 0,
+ 525,
+ },
+ dictWord{138, 0, 656},
+ dictWord{10, 0, 31},
+ dictWord{6, 10, 215},
+ dictWord{7, 10, 1028},
+ dictWord{7, 10, 1473},
+ dictWord{7, 10, 1721},
+ dictWord{
+ 9,
+ 10,
+ 424,
+ },
+ dictWord{138, 10, 779},
+ dictWord{135, 10, 584},
+ dictWord{136, 11, 293},
+ dictWord{134, 0, 685},
+ dictWord{135, 11, 1868},
+ dictWord{
+ 133,
+ 11,
+ 460,
+ },
+ dictWord{7, 0, 647},
+ dictWord{6, 10, 67},
+ dictWord{7, 10, 1630},
+ dictWord{9, 10, 354},
+ dictWord{9, 10, 675},
+ dictWord{10, 10, 830},
+ dictWord{
+ 14,
+ 10,
+ 80,
+ },
+ dictWord{145, 10, 80},
+ dictWord{4, 0, 161},
+ dictWord{133, 0, 631},
+ dictWord{6, 10, 141},
+ dictWord{7, 10, 225},
+ dictWord{9, 10, 59},
+ dictWord{9, 10, 607},
+ dictWord{10, 10, 312},
+ dictWord{11, 10, 687},
+ dictWord{12, 10, 555},
+ dictWord{13, 10, 373},
+ dictWord{13, 10, 494},
+ dictWord{148, 10, 58},
+ dictWord{
+ 7,
+ 11,
+ 965,
+ },
+ dictWord{7, 11, 1460},
+ dictWord{135, 11, 1604},
+ dictWord{136, 10, 783},
+ dictWord{134, 11, 388},
+ dictWord{6, 0, 722},
+ dictWord{6, 0, 1267},
+ dictWord{
+ 4,
+ 11,
+ 511,
+ },
+ dictWord{9, 11, 333},
+ dictWord{9, 11, 379},
+ dictWord{10, 11, 602},
+ dictWord{11, 11, 441},
+ dictWord{11, 11, 723},
+ dictWord{11, 11, 976},
+ dictWord{140, 11, 357},
+ dictWord{134, 0, 1797},
+ dictWord{135, 0, 1684},
+ dictWord{9, 0, 469},
+ dictWord{9, 0, 709},
+ dictWord{12, 0, 512},
+ dictWord{14, 0, 65},
+ dictWord{17, 0, 12},
+ dictWord{5, 11, 938},
+ dictWord{136, 11, 707},
+ dictWord{7, 0, 1230},
+ dictWord{136, 0, 531},
+ dictWord{10, 0, 229},
+ dictWord{11, 0, 73},
+ dictWord{
+ 11,
+ 0,
+ 376,
+ },
+ dictWord{139, 0, 433},
+ dictWord{12, 0, 268},
+ dictWord{12, 0, 640},
+ dictWord{142, 0, 119},
+ dictWord{7, 10, 430},
+ dictWord{139, 10, 46},
+ dictWord{
+ 6,
+ 0,
+ 558,
+ },
+ dictWord{7, 0, 651},
+ dictWord{8, 0, 421},
+ dictWord{9, 0, 0},
+ dictWord{10, 0, 34},
+ dictWord{139, 0, 1008},
+ dictWord{6, 0, 106},
+ dictWord{7, 0, 1786},
+ dictWord{7, 0, 1821},
+ dictWord{9, 0, 102},
+ dictWord{9, 0, 763},
+ dictWord{5, 10, 602},
+ dictWord{7, 10, 2018},
+ dictWord{137, 10, 418},
+ dictWord{5, 0, 65},
+ dictWord{
+ 6,
+ 0,
+ 416,
+ },
+ dictWord{7, 0, 1720},
+ dictWord{7, 0, 1924},
+ dictWord{10, 0, 109},
+ dictWord{11, 0, 14},
+ dictWord{11, 0, 70},
+ dictWord{11, 0, 569},
+ dictWord{11, 0, 735},
+ dictWord{15, 0, 153},
+ dictWord{20, 0, 80},
+ dictWord{136, 10, 677},
+ dictWord{135, 11, 1625},
+ dictWord{137, 11, 772},
+ dictWord{136, 0, 595},
+ dictWord{
+ 6,
+ 11,
+ 469,
+ },
+ dictWord{7, 11, 1709},
+ dictWord{138, 11, 515},
+ dictWord{7, 0, 1832},
+ dictWord{138, 0, 374},
+ dictWord{9, 0, 106},
+ dictWord{9, 0, 163},
+ dictWord{
+ 9,
+ 0,
+ 296,
+ },
+ dictWord{10, 0, 167},
+ dictWord{10, 0, 172},
+ dictWord{10, 0, 777},
+ dictWord{139, 0, 16},
+ dictWord{6, 0, 6},
+ dictWord{7, 0, 81},
+ dictWord{7, 0, 771},
+ dictWord{
+ 7,
+ 0,
+ 1731,
+ },
+ dictWord{9, 0, 405},
+ dictWord{138, 0, 421},
+ dictWord{4, 11, 500},
+ dictWord{135, 11, 938},
+ dictWord{5, 11, 68},
+ dictWord{134, 11, 383},
+ dictWord{
+ 5,
+ 0,
+ 881,
+ },
+ dictWord{133, 0, 885},
+ dictWord{6, 0, 854},
+ dictWord{6, 0, 1132},
+ dictWord{6, 0, 1495},
+ dictWord{6, 0, 1526},
+ dictWord{6, 0, 1533},
+ dictWord{
+ 134,
+ 0,
+ 1577,
+ },
+ dictWord{4, 11, 337},
+ dictWord{6, 11, 353},
+ dictWord{7, 11, 1934},
+ dictWord{8, 11, 488},
+ dictWord{137, 11, 429},
+ dictWord{7, 11, 236},
+ dictWord{
+ 7,
+ 11,
+ 1795,
+ },
+ dictWord{8, 11, 259},
+ dictWord{9, 11, 135},
+ dictWord{9, 11, 177},
+ dictWord{10, 11, 825},
+ dictWord{11, 11, 115},
+ dictWord{11, 11, 370},
+ dictWord{
+ 11,
+ 11,
+ 405,
+ },
+ dictWord{11, 11, 604},
+ dictWord{12, 11, 10},
+ dictWord{12, 11, 667},
+ dictWord{12, 11, 669},
+ dictWord{13, 11, 76},
+ dictWord{14, 11, 310},
+ dictWord{15, 11, 76},
+ dictWord{15, 11, 147},
+ dictWord{148, 11, 23},
+ dictWord{5, 0, 142},
+ dictWord{134, 0, 546},
+ dictWord{4, 11, 15},
+ dictWord{5, 11, 22},
+ dictWord{
+ 6,
+ 11,
+ 244,
+ },
+ dictWord{7, 11, 40},
+ dictWord{7, 11, 200},
+ dictWord{7, 11, 906},
+ dictWord{7, 11, 1199},
+ dictWord{9, 11, 616},
+ dictWord{10, 11, 716},
+ dictWord{
+ 11,
+ 11,
+ 635,
+ },
+ dictWord{11, 11, 801},
+ dictWord{140, 11, 458},
+ dictWord{5, 0, 466},
+ dictWord{11, 0, 571},
+ dictWord{12, 0, 198},
+ dictWord{13, 0, 283},
+ dictWord{
+ 14,
+ 0,
+ 186,
+ },
+ dictWord{15, 0, 21},
+ dictWord{15, 0, 103},
+ dictWord{135, 10, 329},
+ dictWord{4, 0, 185},
+ dictWord{5, 0, 257},
+ dictWord{5, 0, 839},
+ dictWord{5, 0, 936},
+ dictWord{9, 0, 399},
+ dictWord{10, 0, 258},
+ dictWord{10, 0, 395},
+ dictWord{10, 0, 734},
+ dictWord{11, 0, 1014},
+ dictWord{12, 0, 23},
+ dictWord{13, 0, 350},
+ dictWord{
+ 14,
+ 0,
+ 150,
+ },
+ dictWord{19, 0, 6},
+ dictWord{135, 11, 1735},
+ dictWord{12, 11, 36},
+ dictWord{141, 11, 337},
+ dictWord{5, 11, 598},
+ dictWord{7, 11, 791},
+ dictWord{
+ 8,
+ 11,
+ 108,
+ },
+ dictWord{137, 11, 123},
+ dictWord{132, 10, 469},
+ dictWord{7, 0, 404},
+ dictWord{7, 0, 1377},
+ dictWord{7, 0, 1430},
+ dictWord{7, 0, 2017},
+ dictWord{
+ 8,
+ 0,
+ 149,
+ },
+ dictWord{8, 0, 239},
+ dictWord{8, 0, 512},
+ dictWord{8, 0, 793},
+ dictWord{8, 0, 818},
+ dictWord{9, 0, 474},
+ dictWord{9, 0, 595},
+ dictWord{10, 0, 122},
+ dictWord{10, 0, 565},
+ dictWord{10, 0, 649},
+ dictWord{10, 0, 783},
+ dictWord{11, 0, 239},
+ dictWord{11, 0, 295},
+ dictWord{11, 0, 447},
+ dictWord{11, 0, 528},
+ dictWord{
+ 11,
+ 0,
+ 639,
+ },
+ dictWord{11, 0, 800},
+ dictWord{12, 0, 25},
+ dictWord{12, 0, 77},
+ dictWord{12, 0, 157},
+ dictWord{12, 0, 256},
+ dictWord{12, 0, 316},
+ dictWord{12, 0, 390},
+ dictWord{12, 0, 391},
+ dictWord{12, 0, 395},
+ dictWord{12, 0, 478},
+ dictWord{12, 0, 503},
+ dictWord{12, 0, 592},
+ dictWord{12, 0, 680},
+ dictWord{13, 0, 50},
+ dictWord{13, 0, 53},
+ dictWord{13, 0, 132},
+ dictWord{13, 0, 198},
+ dictWord{13, 0, 322},
+ dictWord{13, 0, 415},
+ dictWord{13, 0, 511},
+ dictWord{14, 0, 71},
+ dictWord{
+ 14,
+ 0,
+ 395,
+ },
+ dictWord{15, 0, 71},
+ dictWord{15, 0, 136},
+ dictWord{17, 0, 123},
+ dictWord{18, 0, 93},
+ dictWord{147, 0, 58},
+ dictWord{136, 0, 712},
+ dictWord{
+ 134,
+ 10,
+ 1743,
+ },
+ dictWord{5, 10, 929},
+ dictWord{6, 10, 340},
+ dictWord{8, 10, 376},
+ dictWord{136, 10, 807},
+ dictWord{6, 0, 1848},
+ dictWord{8, 0, 860},
+ dictWord{
+ 10,
+ 0,
+ 856,
+ },
+ dictWord{10, 0, 859},
+ dictWord{10, 0, 925},
+ dictWord{10, 0, 941},
+ dictWord{140, 0, 762},
+ dictWord{6, 0, 629},
+ dictWord{6, 0, 906},
+ dictWord{9, 0, 810},
+ dictWord{140, 0, 652},
+ dictWord{5, 10, 218},
+ dictWord{7, 10, 1610},
+ dictWord{138, 10, 83},
+ dictWord{7, 10, 1512},
+ dictWord{135, 10, 1794},
+ dictWord{
+ 4,
+ 0,
+ 377,
+ },
+ dictWord{24, 0, 13},
+ dictWord{4, 11, 155},
+ dictWord{7, 11, 1689},
+ dictWord{11, 10, 0},
+ dictWord{144, 10, 78},
+ dictWord{4, 11, 164},
+ dictWord{5, 11, 151},
+ dictWord{5, 11, 730},
+ dictWord{5, 11, 741},
+ dictWord{7, 11, 498},
+ dictWord{7, 11, 870},
+ dictWord{7, 11, 1542},
+ dictWord{12, 11, 213},
+ dictWord{14, 11, 36},
+ dictWord{14, 11, 391},
+ dictWord{17, 11, 111},
+ dictWord{18, 11, 6},
+ dictWord{18, 11, 46},
+ dictWord{18, 11, 151},
+ dictWord{19, 11, 36},
+ dictWord{20, 11, 32},
+ dictWord{20, 11, 56},
+ dictWord{20, 11, 69},
+ dictWord{20, 11, 102},
+ dictWord{21, 11, 4},
+ dictWord{22, 11, 8},
+ dictWord{22, 11, 10},
+ dictWord{22, 11, 14},
+ dictWord{
+ 150,
+ 11,
+ 31,
+ },
+ dictWord{7, 0, 1842},
+ dictWord{133, 10, 571},
+ dictWord{4, 10, 455},
+ dictWord{4, 11, 624},
+ dictWord{135, 11, 1752},
+ dictWord{134, 0, 1501},
+ dictWord{4, 11, 492},
+ dictWord{5, 11, 451},
+ dictWord{6, 10, 161},
+ dictWord{7, 10, 372},
+ dictWord{137, 10, 597},
+ dictWord{132, 10, 349},
+ dictWord{4, 0, 180},
+ dictWord{135, 0, 1906},
+ dictWord{135, 11, 835},
+ dictWord{141, 11, 70},
+ dictWord{132, 0, 491},
+ dictWord{137, 10, 751},
+ dictWord{6, 10, 432},
+ dictWord{
+ 139,
+ 10,
+ 322,
+ },
+ dictWord{4, 0, 171},
+ dictWord{138, 0, 234},
+ dictWord{6, 11, 113},
+ dictWord{135, 11, 436},
+ dictWord{4, 0, 586},
+ dictWord{7, 0, 1186},
+ dictWord{
+ 138,
+ 0,
+ 631,
+ },
+ dictWord{5, 10, 468},
+ dictWord{10, 10, 325},
+ dictWord{11, 10, 856},
+ dictWord{12, 10, 345},
+ dictWord{143, 10, 104},
+ dictWord{5, 10, 223},
+ dictWord{10, 11, 592},
+ dictWord{10, 11, 753},
+ dictWord{12, 11, 317},
+ dictWord{12, 11, 355},
+ dictWord{12, 11, 465},
+ dictWord{12, 11, 469},
+ dictWord{
+ 12,
+ 11,
+ 560,
+ },
+ dictWord{12, 11, 578},
+ dictWord{141, 11, 243},
+ dictWord{132, 10, 566},
+ dictWord{135, 11, 520},
+ dictWord{4, 10, 59},
+ dictWord{135, 10, 1394},
+ dictWord{6, 10, 436},
+ dictWord{139, 10, 481},
+ dictWord{9, 0, 931},
+ dictWord{10, 0, 334},
+ dictWord{20, 0, 71},
+ dictWord{4, 10, 48},
+ dictWord{5, 10, 271},
+ dictWord{
+ 7,
+ 10,
+ 953,
+ },
+ dictWord{135, 11, 1878},
+ dictWord{11, 0, 170},
+ dictWord{5, 10, 610},
+ dictWord{136, 10, 457},
+ dictWord{133, 10, 755},
+ dictWord{6, 0, 1587},
+ dictWord{135, 10, 1217},
+ dictWord{4, 10, 197},
+ dictWord{149, 11, 26},
+ dictWord{133, 11, 585},
+ dictWord{137, 11, 521},
+ dictWord{133, 0, 765},
+ dictWord{
+ 133,
+ 10,
+ 217,
+ },
+ dictWord{139, 11, 586},
+ dictWord{133, 0, 424},
+ dictWord{9, 11, 752},
+ dictWord{12, 11, 610},
+ dictWord{13, 11, 431},
+ dictWord{16, 11, 59},
+ dictWord{146, 11, 109},
+ dictWord{136, 0, 714},
+ dictWord{7, 0, 685},
+ dictWord{132, 11, 307},
+ dictWord{9, 0, 420},
+ dictWord{10, 0, 269},
+ dictWord{10, 0, 285},
+ dictWord{10, 0, 576},
+ dictWord{11, 0, 397},
+ dictWord{13, 0, 175},
+ dictWord{145, 0, 90},
+ dictWord{132, 0, 429},
+ dictWord{133, 11, 964},
+ dictWord{9, 11, 463},
+ dictWord{138, 11, 595},
+ dictWord{7, 0, 18},
+ dictWord{7, 0, 699},
+ dictWord{7, 0, 1966},
+ dictWord{8, 0, 752},
+ dictWord{9, 0, 273},
+ dictWord{9, 0, 412},
+ dictWord{
+ 9,
+ 0,
+ 703,
+ },
+ dictWord{10, 0, 71},
+ dictWord{10, 0, 427},
+ dictWord{138, 0, 508},
+ dictWord{4, 10, 165},
+ dictWord{7, 10, 1398},
+ dictWord{135, 10, 1829},
+ dictWord{
+ 4,
+ 0,
+ 53,
+ },
+ dictWord{5, 0, 186},
+ dictWord{7, 0, 752},
+ dictWord{7, 0, 828},
+ dictWord{142, 0, 116},
+ dictWord{8, 0, 575},
+ dictWord{10, 0, 289},
+ dictWord{139, 0, 319},
+ dictWord{132, 0, 675},
+ dictWord{134, 0, 1424},
+ dictWord{4, 11, 75},
+ dictWord{5, 11, 180},
+ dictWord{6, 11, 500},
+ dictWord{7, 11, 58},
+ dictWord{7, 11, 710},
+ dictWord{138, 11, 645},
+ dictWord{133, 11, 649},
+ dictWord{6, 11, 276},
+ dictWord{7, 11, 282},
+ dictWord{7, 11, 879},
+ dictWord{7, 11, 924},
+ dictWord{8, 11, 459},
+ dictWord{9, 11, 599},
+ dictWord{9, 11, 754},
+ dictWord{11, 11, 574},
+ dictWord{12, 11, 128},
+ dictWord{12, 11, 494},
+ dictWord{13, 11, 52},
+ dictWord{13, 11, 301},
+ dictWord{15, 11, 30},
+ dictWord{143, 11, 132},
+ dictWord{6, 0, 647},
+ dictWord{134, 0, 1095},
+ dictWord{5, 10, 9},
+ dictWord{7, 10, 297},
+ dictWord{7, 10, 966},
+ dictWord{140, 10, 306},
+ dictWord{132, 11, 200},
+ dictWord{134, 0, 1334},
+ dictWord{5, 10, 146},
+ dictWord{6, 10, 411},
+ dictWord{138, 10, 721},
+ dictWord{
+ 6,
+ 0,
+ 209,
+ },
+ dictWord{6, 0, 1141},
+ dictWord{6, 0, 1288},
+ dictWord{8, 0, 468},
+ dictWord{9, 0, 210},
+ dictWord{11, 0, 36},
+ dictWord{12, 0, 28},
+ dictWord{12, 0, 630},
+ dictWord{13, 0, 21},
+ dictWord{13, 0, 349},
+ dictWord{14, 0, 7},
+ dictWord{145, 0, 13},
+ dictWord{6, 10, 177},
+ dictWord{135, 10, 467},
+ dictWord{4, 0, 342},
+ dictWord{
+ 135,
+ 0,
+ 1179,
+ },
+ dictWord{10, 11, 454},
+ dictWord{140, 11, 324},
+ dictWord{4, 0, 928},
+ dictWord{133, 0, 910},
+ dictWord{7, 0, 1838},
+ dictWord{6, 11, 225},
+ dictWord{
+ 137,
+ 11,
+ 211,
+ },
+ dictWord{16, 0, 101},
+ dictWord{20, 0, 115},
+ dictWord{20, 0, 118},
+ dictWord{148, 0, 122},
+ dictWord{4, 0, 496},
+ dictWord{135, 0, 856},
+ dictWord{
+ 4,
+ 0,
+ 318,
+ },
+ dictWord{11, 0, 654},
+ dictWord{7, 11, 718},
+ dictWord{139, 11, 102},
+ dictWord{8, 11, 58},
+ dictWord{9, 11, 724},
+ dictWord{11, 11, 809},
+ dictWord{
+ 13,
+ 11,
+ 113,
+ },
+ dictWord{145, 11, 72},
+ dictWord{5, 10, 200},
+ dictWord{6, 11, 345},
+ dictWord{135, 11, 1247},
+ dictWord{8, 11, 767},
+ dictWord{8, 11, 803},
+ dictWord{
+ 9,
+ 11,
+ 301,
+ },
+ dictWord{137, 11, 903},
+ dictWord{7, 0, 915},
+ dictWord{8, 0, 247},
+ dictWord{19, 0, 0},
+ dictWord{7, 11, 1949},
+ dictWord{136, 11, 674},
+ dictWord{
+ 4,
+ 0,
+ 202,
+ },
+ dictWord{5, 0, 382},
+ dictWord{6, 0, 454},
+ dictWord{7, 0, 936},
+ dictWord{7, 0, 1803},
+ dictWord{8, 0, 758},
+ dictWord{9, 0, 375},
+ dictWord{9, 0, 895},
+ dictWord{
+ 10,
+ 0,
+ 743,
+ },
+ dictWord{10, 0, 792},
+ dictWord{11, 0, 978},
+ dictWord{11, 0, 1012},
+ dictWord{142, 0, 109},
+ dictWord{7, 0, 1150},
+ dictWord{7, 0, 1425},
+ dictWord{
+ 7,
+ 0,
+ 1453,
+ },
+ dictWord{140, 0, 513},
+ dictWord{134, 11, 259},
+ dictWord{138, 0, 791},
+ dictWord{11, 0, 821},
+ dictWord{12, 0, 110},
+ dictWord{12, 0, 153},
+ dictWord{
+ 18,
+ 0,
+ 41,
+ },
+ dictWord{150, 0, 19},
+ dictWord{134, 10, 481},
+ dictWord{132, 0, 796},
+ dictWord{6, 0, 445},
+ dictWord{9, 0, 909},
+ dictWord{136, 11, 254},
+ dictWord{
+ 10,
+ 0,
+ 776,
+ },
+ dictWord{13, 0, 345},
+ dictWord{142, 0, 425},
+ dictWord{4, 10, 84},
+ dictWord{7, 10, 1482},
+ dictWord{10, 10, 76},
+ dictWord{138, 10, 142},
+ dictWord{
+ 135,
+ 11,
+ 742,
+ },
+ dictWord{6, 0, 578},
+ dictWord{133, 10, 1015},
+ dictWord{6, 0, 1387},
+ dictWord{4, 10, 315},
+ dictWord{5, 10, 507},
+ dictWord{135, 10, 1370},
+ dictWord{4, 0, 438},
+ dictWord{133, 0, 555},
+ dictWord{136, 0, 766},
+ dictWord{133, 11, 248},
+ dictWord{134, 10, 1722},
+ dictWord{4, 11, 116},
+ dictWord{5, 11, 95},
+ dictWord{5, 11, 445},
+ dictWord{7, 11, 1688},
+ dictWord{8, 11, 29},
+ dictWord{9, 11, 272},
+ dictWord{11, 11, 509},
+ dictWord{139, 11, 915},
+ dictWord{135, 0, 541},
+ dictWord{133, 11, 543},
+ dictWord{8, 10, 222},
+ dictWord{8, 10, 476},
+ dictWord{9, 10, 238},
+ dictWord{11, 10, 516},
+ dictWord{11, 10, 575},
+ dictWord{
+ 15,
+ 10,
+ 109,
+ },
+ dictWord{146, 10, 100},
+ dictWord{6, 0, 880},
+ dictWord{134, 0, 1191},
+ dictWord{5, 11, 181},
+ dictWord{136, 11, 41},
+ dictWord{134, 0, 1506},
+ dictWord{132, 11, 681},
+ dictWord{7, 11, 25},
+ dictWord{8, 11, 202},
+ dictWord{138, 11, 536},
+ dictWord{139, 0, 983},
+ dictWord{137, 0, 768},
+ dictWord{132, 0, 584},
+ dictWord{9, 11, 423},
+ dictWord{140, 11, 89},
+ dictWord{8, 11, 113},
+ dictWord{9, 11, 877},
+ dictWord{10, 11, 554},
+ dictWord{11, 11, 83},
+ dictWord{12, 11, 136},
+ dictWord{147, 11, 109},
+ dictWord{7, 10, 706},
+ dictWord{7, 10, 1058},
+ dictWord{138, 10, 538},
+ dictWord{133, 11, 976},
+ dictWord{4, 11, 206},
+ dictWord{
+ 135,
+ 11,
+ 746,
+ },
+ dictWord{136, 11, 526},
+ dictWord{140, 0, 737},
+ dictWord{11, 10, 92},
+ dictWord{11, 10, 196},
+ dictWord{11, 10, 409},
+ dictWord{11, 10, 450},
+ dictWord{11, 10, 666},
+ dictWord{11, 10, 777},
+ dictWord{12, 10, 262},
+ dictWord{13, 10, 385},
+ dictWord{13, 10, 393},
+ dictWord{15, 10, 115},
+ dictWord{
+ 16,
+ 10,
+ 45,
+ },
+ dictWord{145, 10, 82},
+ dictWord{4, 0, 226},
+ dictWord{4, 0, 326},
+ dictWord{7, 0, 1770},
+ dictWord{4, 11, 319},
+ dictWord{5, 11, 699},
+ dictWord{138, 11, 673},
+ dictWord{6, 10, 40},
+ dictWord{135, 10, 1781},
+ dictWord{5, 0, 426},
+ dictWord{8, 0, 30},
+ dictWord{9, 0, 2},
+ dictWord{11, 0, 549},
+ dictWord{147, 0, 122},
+ dictWord{
+ 6,
+ 0,
+ 1161,
+ },
+ dictWord{134, 0, 1329},
+ dictWord{138, 10, 97},
+ dictWord{6, 10, 423},
+ dictWord{7, 10, 665},
+ dictWord{135, 10, 1210},
+ dictWord{7, 11, 13},
+ dictWord{
+ 8,
+ 11,
+ 226,
+ },
+ dictWord{10, 11, 537},
+ dictWord{11, 11, 570},
+ dictWord{11, 11, 605},
+ dictWord{11, 11, 799},
+ dictWord{11, 11, 804},
+ dictWord{12, 11, 85},
+ dictWord{12, 11, 516},
+ dictWord{12, 11, 623},
+ dictWord{13, 11, 112},
+ dictWord{13, 11, 361},
+ dictWord{14, 11, 77},
+ dictWord{14, 11, 78},
+ dictWord{17, 11, 28},
+ dictWord{147, 11, 110},
+ dictWord{132, 11, 769},
+ dictWord{132, 11, 551},
+ dictWord{132, 11, 728},
+ dictWord{147, 0, 117},
+ dictWord{9, 11, 57},
+ dictWord{
+ 9,
+ 11,
+ 459,
+ },
+ dictWord{10, 11, 425},
+ dictWord{11, 11, 119},
+ dictWord{12, 11, 184},
+ dictWord{12, 11, 371},
+ dictWord{13, 11, 358},
+ dictWord{145, 11, 51},
+ dictWord{
+ 5,
+ 11,
+ 188,
+ },
+ dictWord{5, 11, 814},
+ dictWord{8, 11, 10},
+ dictWord{9, 11, 421},
+ dictWord{9, 11, 729},
+ dictWord{10, 11, 609},
+ dictWord{139, 11, 689},
+ dictWord{134, 11, 624},
+ dictWord{135, 11, 298},
+ dictWord{135, 0, 462},
+ dictWord{4, 0, 345},
+ dictWord{139, 10, 624},
+ dictWord{136, 10, 574},
+ dictWord{
+ 4,
+ 0,
+ 385,
+ },
+ dictWord{7, 0, 265},
+ dictWord{135, 0, 587},
+ dictWord{6, 0, 808},
+ dictWord{132, 11, 528},
+ dictWord{133, 0, 398},
+ dictWord{132, 10, 354},
+ dictWord{
+ 4,
+ 0,
+ 347,
+ },
+ dictWord{5, 0, 423},
+ dictWord{5, 0, 996},
+ dictWord{135, 0, 1329},
+ dictWord{135, 10, 1558},
+ dictWord{7, 0, 1259},
+ dictWord{9, 0, 125},
+ dictWord{
+ 139,
+ 0,
+ 65,
+ },
+ dictWord{5, 0, 136},
+ dictWord{6, 0, 136},
+ dictWord{136, 0, 644},
+ dictWord{5, 11, 104},
+ dictWord{6, 11, 173},
+ dictWord{135, 11, 1631},
+ dictWord{
+ 135,
+ 0,
+ 469,
+ },
+ dictWord{133, 10, 830},
+ dictWord{4, 0, 278},
+ dictWord{5, 0, 465},
+ dictWord{135, 0, 1367},
+ dictWord{7, 11, 810},
+ dictWord{8, 11, 138},
+ dictWord{
+ 8,
+ 11,
+ 342,
+ },
+ dictWord{9, 11, 84},
+ dictWord{10, 11, 193},
+ dictWord{11, 11, 883},
+ dictWord{140, 11, 359},
+ dictWord{5, 10, 496},
+ dictWord{135, 10, 203},
+ dictWord{
+ 4,
+ 0,
+ 433,
+ },
+ dictWord{133, 0, 719},
+ dictWord{6, 11, 95},
+ dictWord{134, 10, 547},
+ dictWord{5, 10, 88},
+ dictWord{137, 10, 239},
+ dictWord{6, 11, 406},
+ dictWord{
+ 10,
+ 11,
+ 409,
+ },
+ dictWord{10, 11, 447},
+ dictWord{11, 11, 44},
+ dictWord{140, 11, 100},
+ dictWord{134, 0, 1423},
+ dictWord{7, 10, 650},
+ dictWord{135, 10, 1310},
+ dictWord{134, 0, 749},
+ dictWord{135, 11, 1243},
+ dictWord{135, 0, 1363},
+ dictWord{6, 0, 381},
+ dictWord{7, 0, 645},
+ dictWord{7, 0, 694},
+ dictWord{8, 0, 546},
+ dictWord{7, 10, 1076},
+ dictWord{9, 10, 80},
+ dictWord{11, 10, 78},
+ dictWord{11, 10, 421},
+ dictWord{11, 10, 534},
+ dictWord{140, 10, 545},
+ dictWord{
+ 134,
+ 11,
+ 1636,
+ },
+ dictWord{135, 11, 1344},
+ dictWord{12, 0, 277},
+ dictWord{7, 10, 274},
+ dictWord{11, 10, 479},
+ dictWord{139, 10, 507},
+ dictWord{6, 0, 705},
+ dictWord{
+ 6,
+ 0,
+ 783,
+ },
+ dictWord{6, 0, 1275},
+ dictWord{6, 0, 1481},
+ dictWord{4, 11, 282},
+ dictWord{7, 11, 1034},
+ dictWord{11, 11, 398},
+ dictWord{11, 11, 634},
+ dictWord{
+ 12,
+ 11,
+ 1,
+ },
+ dictWord{12, 11, 79},
+ dictWord{12, 11, 544},
+ dictWord{14, 11, 237},
+ dictWord{17, 11, 10},
+ dictWord{146, 11, 20},
+ dictWord{134, 0, 453},
+ dictWord{
+ 4,
+ 0,
+ 555,
+ },
+ dictWord{8, 0, 536},
+ dictWord{10, 0, 288},
+ dictWord{11, 0, 1005},
+ dictWord{4, 10, 497},
+ dictWord{135, 10, 1584},
+ dictWord{5, 11, 118},
+ dictWord{
+ 5,
+ 11,
+ 499,
+ },
+ dictWord{6, 11, 476},
+ dictWord{7, 11, 600},
+ dictWord{7, 11, 888},
+ dictWord{135, 11, 1096},
+ dictWord{138, 0, 987},
+ dictWord{7, 0, 1107},
+ dictWord{
+ 7,
+ 10,
+ 261,
+ },
+ dictWord{7, 10, 1115},
+ dictWord{7, 10, 1354},
+ dictWord{7, 10, 1588},
+ dictWord{7, 10, 1705},
+ dictWord{7, 10, 1902},
+ dictWord{9, 10, 465},
+ dictWord{10, 10, 248},
+ dictWord{10, 10, 349},
+ dictWord{10, 10, 647},
+ dictWord{11, 10, 527},
+ dictWord{11, 10, 660},
+ dictWord{11, 10, 669},
+ dictWord{
+ 12,
+ 10,
+ 529,
+ },
+ dictWord{141, 10, 305},
+ dictWord{7, 11, 296},
+ dictWord{7, 11, 596},
+ dictWord{8, 11, 560},
+ dictWord{8, 11, 586},
+ dictWord{9, 11, 612},
+ dictWord{
+ 11,
+ 11,
+ 100,
+ },
+ dictWord{11, 11, 304},
+ dictWord{12, 11, 46},
+ dictWord{13, 11, 89},
+ dictWord{14, 11, 112},
+ dictWord{145, 11, 122},
+ dictWord{9, 0, 370},
+ dictWord{
+ 138,
+ 0,
+ 90,
+ },
+ dictWord{136, 10, 13},
+ dictWord{132, 0, 860},
+ dictWord{7, 10, 642},
+ dictWord{8, 10, 250},
+ dictWord{11, 10, 123},
+ dictWord{11, 10, 137},
+ dictWord{
+ 13,
+ 10,
+ 48,
+ },
+ dictWord{142, 10, 95},
+ dictWord{135, 10, 1429},
+ dictWord{137, 11, 321},
+ dictWord{132, 0, 257},
+ dictWord{135, 0, 2031},
+ dictWord{7, 0, 1768},
+ dictWord{7, 11, 1599},
+ dictWord{7, 11, 1723},
+ dictWord{8, 11, 79},
+ dictWord{8, 11, 106},
+ dictWord{8, 11, 190},
+ dictWord{8, 11, 302},
+ dictWord{8, 11, 383},
+ dictWord{9, 11, 119},
+ dictWord{9, 11, 233},
+ dictWord{9, 11, 298},
+ dictWord{9, 11, 419},
+ dictWord{9, 11, 471},
+ dictWord{10, 11, 181},
+ dictWord{10, 11, 406},
+ dictWord{11, 11, 57},
+ dictWord{11, 11, 85},
+ dictWord{11, 11, 120},
+ dictWord{11, 11, 177},
+ dictWord{11, 11, 296},
+ dictWord{11, 11, 382},
+ dictWord{11, 11, 454},
+ dictWord{11, 11, 758},
+ dictWord{11, 11, 999},
+ dictWord{12, 11, 27},
+ dictWord{12, 11, 98},
+ dictWord{12, 11, 131},
+ dictWord{12, 11, 245},
+ dictWord{
+ 12,
+ 11,
+ 312,
+ },
+ dictWord{12, 11, 446},
+ dictWord{12, 11, 454},
+ dictWord{13, 11, 25},
+ dictWord{13, 11, 98},
+ dictWord{13, 11, 426},
+ dictWord{13, 11, 508},
+ dictWord{
+ 14,
+ 11,
+ 6,
+ },
+ dictWord{14, 11, 163},
+ dictWord{14, 11, 272},
+ dictWord{14, 11, 277},
+ dictWord{14, 11, 370},
+ dictWord{15, 11, 95},
+ dictWord{15, 11, 138},
+ dictWord{
+ 15,
+ 11,
+ 167,
+ },
+ dictWord{17, 11, 18},
+ dictWord{17, 11, 38},
+ dictWord{20, 11, 96},
+ dictWord{149, 11, 32},
+ dictWord{5, 11, 722},
+ dictWord{134, 11, 1759},
+ dictWord{145, 11, 16},
+ dictWord{6, 0, 1071},
+ dictWord{134, 0, 1561},
+ dictWord{10, 10, 545},
+ dictWord{140, 10, 301},
+ dictWord{6, 0, 83},
+ dictWord{6, 0, 1733},
+ dictWord{135, 0, 1389},
+ dictWord{4, 0, 835},
+ dictWord{135, 0, 1818},
+ dictWord{133, 11, 258},
+ dictWord{4, 10, 904},
+ dictWord{133, 10, 794},
+ dictWord{
+ 134,
+ 0,
+ 2006,
+ },
+ dictWord{5, 11, 30},
+ dictWord{7, 11, 495},
+ dictWord{8, 11, 134},
+ dictWord{9, 11, 788},
+ dictWord{140, 11, 438},
+ dictWord{135, 11, 2004},
+ dictWord{
+ 137,
+ 0,
+ 696,
+ },
+ dictWord{5, 11, 50},
+ dictWord{6, 11, 439},
+ dictWord{7, 11, 780},
+ dictWord{135, 11, 1040},
+ dictWord{7, 11, 772},
+ dictWord{7, 11, 1104},
+ dictWord{
+ 7,
+ 11,
+ 1647,
+ },
+ dictWord{11, 11, 269},
+ dictWord{11, 11, 539},
+ dictWord{11, 11, 607},
+ dictWord{11, 11, 627},
+ dictWord{11, 11, 706},
+ dictWord{11, 11, 975},
+ dictWord{12, 11, 248},
+ dictWord{12, 11, 311},
+ dictWord{12, 11, 434},
+ dictWord{12, 11, 600},
+ dictWord{12, 11, 622},
+ dictWord{13, 11, 297},
+ dictWord{
+ 13,
+ 11,
+ 367,
+ },
+ dictWord{13, 11, 485},
+ dictWord{14, 11, 69},
+ dictWord{14, 11, 409},
+ dictWord{143, 11, 108},
+ dictWord{5, 11, 1},
+ dictWord{6, 11, 81},
+ dictWord{
+ 138,
+ 11,
+ 520,
+ },
+ dictWord{7, 0, 1718},
+ dictWord{9, 0, 95},
+ dictWord{9, 0, 274},
+ dictWord{10, 0, 279},
+ dictWord{10, 0, 317},
+ dictWord{10, 0, 420},
+ dictWord{11, 0, 303},
+ dictWord{11, 0, 808},
+ dictWord{12, 0, 134},
+ dictWord{12, 0, 367},
+ dictWord{13, 0, 149},
+ dictWord{13, 0, 347},
+ dictWord{14, 0, 349},
+ dictWord{14, 0, 406},
+ dictWord{
+ 18,
+ 0,
+ 22,
+ },
+ dictWord{18, 0, 89},
+ dictWord{18, 0, 122},
+ dictWord{147, 0, 47},
+ dictWord{5, 11, 482},
+ dictWord{8, 11, 98},
+ dictWord{9, 11, 172},
+ dictWord{10, 11, 222},
+ dictWord{10, 11, 700},
+ dictWord{10, 11, 822},
+ dictWord{11, 11, 302},
+ dictWord{11, 11, 778},
+ dictWord{12, 11, 50},
+ dictWord{12, 11, 127},
+ dictWord{
+ 12,
+ 11,
+ 396,
+ },
+ dictWord{13, 11, 62},
+ dictWord{13, 11, 328},
+ dictWord{14, 11, 122},
+ dictWord{147, 11, 72},
+ dictWord{7, 10, 386},
+ dictWord{138, 10, 713},
+ dictWord{
+ 6,
+ 10,
+ 7,
+ },
+ dictWord{6, 10, 35},
+ dictWord{7, 10, 147},
+ dictWord{7, 10, 1069},
+ dictWord{7, 10, 1568},
+ dictWord{7, 10, 1575},
+ dictWord{7, 10, 1917},
+ dictWord{
+ 8,
+ 10,
+ 43,
+ },
+ dictWord{8, 10, 208},
+ dictWord{9, 10, 128},
+ dictWord{9, 10, 866},
+ dictWord{10, 10, 20},
+ dictWord{11, 10, 981},
+ dictWord{147, 10, 33},
+ dictWord{
+ 133,
+ 0,
+ 26,
+ },
+ dictWord{132, 0, 550},
+ dictWord{5, 11, 2},
+ dictWord{7, 11, 1494},
+ dictWord{136, 11, 589},
+ dictWord{6, 11, 512},
+ dictWord{7, 11, 797},
+ dictWord{
+ 8,
+ 11,
+ 253,
+ },
+ dictWord{9, 11, 77},
+ dictWord{10, 11, 1},
+ dictWord{10, 11, 129},
+ dictWord{10, 11, 225},
+ dictWord{11, 11, 118},
+ dictWord{11, 11, 226},
+ dictWord{
+ 11,
+ 11,
+ 251,
+ },
+ dictWord{11, 11, 430},
+ dictWord{11, 11, 701},
+ dictWord{11, 11, 974},
+ dictWord{11, 11, 982},
+ dictWord{12, 11, 64},
+ dictWord{12, 11, 260},
+ dictWord{
+ 12,
+ 11,
+ 488,
+ },
+ dictWord{140, 11, 690},
+ dictWord{7, 10, 893},
+ dictWord{141, 10, 424},
+ dictWord{134, 0, 901},
+ dictWord{136, 0, 822},
+ dictWord{4, 0, 902},
+ dictWord{5, 0, 809},
+ dictWord{134, 0, 122},
+ dictWord{6, 0, 807},
+ dictWord{134, 0, 1366},
+ dictWord{7, 0, 262},
+ dictWord{5, 11, 748},
+ dictWord{134, 11, 553},
+ dictWord{133, 0, 620},
+ dictWord{4, 0, 34},
+ dictWord{5, 0, 574},
+ dictWord{7, 0, 279},
+ dictWord{7, 0, 1624},
+ dictWord{136, 0, 601},
+ dictWord{9, 0, 170},
+ dictWord{
+ 6,
+ 10,
+ 322,
+ },
+ dictWord{9, 10, 552},
+ dictWord{11, 10, 274},
+ dictWord{13, 10, 209},
+ dictWord{13, 10, 499},
+ dictWord{14, 10, 85},
+ dictWord{15, 10, 126},
+ dictWord{
+ 145,
+ 10,
+ 70,
+ },
+ dictWord{132, 0, 537},
+ dictWord{4, 11, 12},
+ dictWord{7, 11, 420},
+ dictWord{7, 11, 522},
+ dictWord{7, 11, 809},
+ dictWord{8, 11, 797},
+ dictWord{
+ 141,
+ 11,
+ 88,
+ },
+ dictWord{133, 0, 332},
+ dictWord{8, 10, 83},
+ dictWord{8, 10, 742},
+ dictWord{8, 10, 817},
+ dictWord{9, 10, 28},
+ dictWord{9, 10, 29},
+ dictWord{9, 10, 885},
+ dictWord{10, 10, 387},
+ dictWord{11, 10, 633},
+ dictWord{11, 10, 740},
+ dictWord{13, 10, 235},
+ dictWord{13, 10, 254},
+ dictWord{15, 10, 143},
+ dictWord{
+ 143,
+ 10,
+ 146,
+ },
+ dictWord{6, 0, 1909},
+ dictWord{9, 0, 964},
+ dictWord{12, 0, 822},
+ dictWord{12, 0, 854},
+ dictWord{12, 0, 865},
+ dictWord{12, 0, 910},
+ dictWord{12, 0, 938},
+ dictWord{15, 0, 169},
+ dictWord{15, 0, 208},
+ dictWord{15, 0, 211},
+ dictWord{18, 0, 205},
+ dictWord{18, 0, 206},
+ dictWord{18, 0, 220},
+ dictWord{18, 0, 223},
+ dictWord{152, 0, 24},
+ dictWord{140, 10, 49},
+ dictWord{5, 11, 528},
+ dictWord{135, 11, 1580},
+ dictWord{6, 0, 261},
+ dictWord{8, 0, 182},
+ dictWord{139, 0, 943},
+ dictWord{134, 0, 1721},
+ dictWord{4, 0, 933},
+ dictWord{133, 0, 880},
+ dictWord{136, 11, 321},
+ dictWord{5, 11, 266},
+ dictWord{9, 11, 290},
+ dictWord{9, 11, 364},
+ dictWord{10, 11, 293},
+ dictWord{11, 11, 606},
+ dictWord{142, 11, 45},
+ dictWord{6, 0, 1609},
+ dictWord{4, 11, 50},
+ dictWord{6, 11, 510},
+ dictWord{6, 11, 594},
+ dictWord{9, 11, 121},
+ dictWord{10, 11, 49},
+ dictWord{10, 11, 412},
+ dictWord{139, 11, 834},
+ dictWord{7, 0, 895},
+ dictWord{136, 11, 748},
+ dictWord{132, 11, 466},
+ dictWord{4, 10, 110},
+ dictWord{10, 10, 415},
+ dictWord{10, 10, 597},
+ dictWord{142, 10, 206},
+ dictWord{133, 0, 812},
+ dictWord{135, 11, 281},
+ dictWord{
+ 6,
+ 0,
+ 1890,
+ },
+ dictWord{6, 0, 1902},
+ dictWord{6, 0, 1916},
+ dictWord{9, 0, 929},
+ dictWord{9, 0, 942},
+ dictWord{9, 0, 975},
+ dictWord{9, 0, 984},
+ dictWord{9, 0, 986},
+ dictWord{
+ 9,
+ 0,
+ 1011,
+ },
+ dictWord{9, 0, 1019},
+ dictWord{12, 0, 804},
+ dictWord{12, 0, 851},
+ dictWord{12, 0, 867},
+ dictWord{12, 0, 916},
+ dictWord{12, 0, 923},
+ dictWord{
+ 15,
+ 0,
+ 194,
+ },
+ dictWord{15, 0, 204},
+ dictWord{15, 0, 210},
+ dictWord{15, 0, 222},
+ dictWord{15, 0, 223},
+ dictWord{15, 0, 229},
+ dictWord{15, 0, 250},
+ dictWord{
+ 18,
+ 0,
+ 179,
+ },
+ dictWord{18, 0, 186},
+ dictWord{18, 0, 192},
+ dictWord{7, 10, 205},
+ dictWord{135, 10, 2000},
+ dictWord{132, 11, 667},
+ dictWord{135, 0, 778},
+ dictWord{
+ 4,
+ 0,
+ 137,
+ },
+ dictWord{7, 0, 1178},
+ dictWord{135, 0, 1520},
+ dictWord{134, 0, 1314},
+ dictWord{4, 11, 242},
+ dictWord{134, 11, 333},
+ dictWord{6, 0, 1661},
+ dictWord{7, 0, 1975},
+ dictWord{7, 0, 2009},
+ dictWord{135, 0, 2011},
+ dictWord{134, 0, 1591},
+ dictWord{4, 10, 283},
+ dictWord{135, 10, 1194},
+ dictWord{
+ 11,
+ 0,
+ 820,
+ },
+ dictWord{150, 0, 51},
+ dictWord{4, 11, 39},
+ dictWord{5, 11, 36},
+ dictWord{7, 11, 1843},
+ dictWord{8, 11, 407},
+ dictWord{11, 11, 144},
+ dictWord{
+ 140,
+ 11,
+ 523,
+ },
+ dictWord{134, 10, 1720},
+ dictWord{4, 11, 510},
+ dictWord{7, 11, 29},
+ dictWord{7, 11, 66},
+ dictWord{7, 11, 1980},
+ dictWord{10, 11, 487},
+ dictWord{
+ 10,
+ 11,
+ 809,
+ },
+ dictWord{146, 11, 9},
+ dictWord{5, 0, 89},
+ dictWord{7, 0, 1915},
+ dictWord{9, 0, 185},
+ dictWord{9, 0, 235},
+ dictWord{10, 0, 64},
+ dictWord{10, 0, 270},
+ dictWord{10, 0, 403},
+ dictWord{10, 0, 469},
+ dictWord{10, 0, 529},
+ dictWord{10, 0, 590},
+ dictWord{11, 0, 140},
+ dictWord{11, 0, 860},
+ dictWord{13, 0, 1},
+ dictWord{
+ 13,
+ 0,
+ 422,
+ },
+ dictWord{14, 0, 341},
+ dictWord{14, 0, 364},
+ dictWord{17, 0, 93},
+ dictWord{18, 0, 113},
+ dictWord{19, 0, 97},
+ dictWord{147, 0, 113},
+ dictWord{133, 0, 695},
+ dictWord{6, 0, 987},
+ dictWord{134, 0, 1160},
+ dictWord{5, 0, 6},
+ dictWord{6, 0, 183},
+ dictWord{7, 0, 680},
+ dictWord{7, 0, 978},
+ dictWord{7, 0, 1013},
+ dictWord{
+ 7,
+ 0,
+ 1055,
+ },
+ dictWord{12, 0, 230},
+ dictWord{13, 0, 172},
+ dictWord{146, 0, 29},
+ dictWord{134, 11, 570},
+ dictWord{132, 11, 787},
+ dictWord{134, 11, 518},
+ dictWord{
+ 6,
+ 0,
+ 29,
+ },
+ dictWord{139, 0, 63},
+ dictWord{132, 11, 516},
+ dictWord{136, 11, 821},
+ dictWord{132, 0, 311},
+ dictWord{134, 0, 1740},
+ dictWord{7, 0, 170},
+ dictWord{8, 0, 90},
+ dictWord{8, 0, 177},
+ dictWord{8, 0, 415},
+ dictWord{11, 0, 714},
+ dictWord{14, 0, 281},
+ dictWord{136, 10, 735},
+ dictWord{134, 0, 1961},
+ dictWord{
+ 135,
+ 11,
+ 1405,
+ },
+ dictWord{4, 11, 10},
+ dictWord{7, 11, 917},
+ dictWord{139, 11, 786},
+ dictWord{5, 10, 132},
+ dictWord{9, 10, 486},
+ dictWord{9, 10, 715},
+ dictWord{
+ 10,
+ 10,
+ 458,
+ },
+ dictWord{11, 10, 373},
+ dictWord{11, 10, 668},
+ dictWord{11, 10, 795},
+ dictWord{11, 10, 897},
+ dictWord{12, 10, 272},
+ dictWord{12, 10, 424},
+ dictWord{12, 10, 539},
+ dictWord{12, 10, 558},
+ dictWord{14, 10, 245},
+ dictWord{14, 10, 263},
+ dictWord{14, 10, 264},
+ dictWord{14, 10, 393},
+ dictWord{
+ 142,
+ 10,
+ 403,
+ },
+ dictWord{11, 0, 91},
+ dictWord{13, 0, 129},
+ dictWord{15, 0, 101},
+ dictWord{145, 0, 125},
+ dictWord{135, 0, 1132},
+ dictWord{4, 0, 494},
+ dictWord{6, 0, 74},
+ dictWord{7, 0, 44},
+ dictWord{7, 0, 407},
+ dictWord{12, 0, 17},
+ dictWord{15, 0, 5},
+ dictWord{148, 0, 11},
+ dictWord{133, 10, 379},
+ dictWord{5, 0, 270},
+ dictWord{
+ 5,
+ 11,
+ 684,
+ },
+ dictWord{6, 10, 89},
+ dictWord{6, 10, 400},
+ dictWord{7, 10, 1569},
+ dictWord{7, 10, 1623},
+ dictWord{7, 10, 1850},
+ dictWord{8, 10, 218},
+ dictWord{
+ 8,
+ 10,
+ 422,
+ },
+ dictWord{9, 10, 570},
+ dictWord{138, 10, 626},
+ dictWord{4, 0, 276},
+ dictWord{133, 0, 296},
+ dictWord{6, 0, 1523},
+ dictWord{134, 11, 27},
+ dictWord{
+ 6,
+ 10,
+ 387,
+ },
+ dictWord{7, 10, 882},
+ dictWord{141, 10, 111},
+ dictWord{6, 10, 224},
+ dictWord{7, 10, 877},
+ dictWord{137, 10, 647},
+ dictWord{135, 10, 790},
+ dictWord{
+ 4,
+ 0,
+ 7,
+ },
+ dictWord{5, 0, 90},
+ dictWord{5, 0, 158},
+ dictWord{6, 0, 542},
+ dictWord{7, 0, 221},
+ dictWord{7, 0, 1574},
+ dictWord{9, 0, 490},
+ dictWord{10, 0, 540},
+ dictWord{
+ 11,
+ 0,
+ 443,
+ },
+ dictWord{139, 0, 757},
+ dictWord{7, 0, 588},
+ dictWord{9, 0, 175},
+ dictWord{138, 0, 530},
+ dictWord{135, 10, 394},
+ dictWord{142, 11, 23},
+ dictWord{
+ 134,
+ 0,
+ 786,
+ },
+ dictWord{135, 0, 580},
+ dictWord{7, 0, 88},
+ dictWord{136, 0, 627},
+ dictWord{5, 0, 872},
+ dictWord{6, 0, 57},
+ dictWord{7, 0, 471},
+ dictWord{9, 0, 447},
+ dictWord{137, 0, 454},
+ dictWord{6, 11, 342},
+ dictWord{6, 11, 496},
+ dictWord{8, 11, 275},
+ dictWord{137, 11, 206},
+ dictWord{4, 11, 909},
+ dictWord{133, 11, 940},
+ dictWord{6, 0, 735},
+ dictWord{132, 11, 891},
+ dictWord{8, 0, 845},
+ dictWord{8, 0, 916},
+ dictWord{135, 10, 1409},
+ dictWord{5, 0, 31},
+ dictWord{134, 0, 614},
+ dictWord{11, 0, 458},
+ dictWord{12, 0, 15},
+ dictWord{140, 0, 432},
+ dictWord{8, 0, 330},
+ dictWord{140, 0, 477},
+ dictWord{4, 0, 530},
+ dictWord{5, 0, 521},
+ dictWord{
+ 7,
+ 0,
+ 1200,
+ },
+ dictWord{10, 0, 460},
+ dictWord{132, 11, 687},
+ dictWord{6, 0, 424},
+ dictWord{135, 0, 1866},
+ dictWord{9, 0, 569},
+ dictWord{12, 0, 12},
+ dictWord{
+ 12,
+ 0,
+ 81,
+ },
+ dictWord{12, 0, 319},
+ dictWord{13, 0, 69},
+ dictWord{14, 0, 259},
+ dictWord{16, 0, 87},
+ dictWord{17, 0, 1},
+ dictWord{17, 0, 21},
+ dictWord{17, 0, 24},
+ dictWord{
+ 18,
+ 0,
+ 15,
+ },
+ dictWord{18, 0, 56},
+ dictWord{18, 0, 59},
+ dictWord{18, 0, 127},
+ dictWord{18, 0, 154},
+ dictWord{19, 0, 19},
+ dictWord{148, 0, 31},
+ dictWord{7, 0, 1302},
+ dictWord{136, 10, 38},
+ dictWord{134, 11, 253},
+ dictWord{5, 10, 261},
+ dictWord{7, 10, 78},
+ dictWord{7, 10, 199},
+ dictWord{8, 10, 815},
+ dictWord{9, 10, 126},
+ dictWord{138, 10, 342},
+ dictWord{5, 0, 595},
+ dictWord{135, 0, 1863},
+ dictWord{6, 11, 41},
+ dictWord{141, 11, 160},
+ dictWord{5, 0, 13},
+ dictWord{134, 0, 142},
+ dictWord{6, 0, 97},
+ dictWord{7, 0, 116},
+ dictWord{8, 0, 322},
+ dictWord{8, 0, 755},
+ dictWord{9, 0, 548},
+ dictWord{10, 0, 714},
+ dictWord{11, 0, 884},
+ dictWord{13, 0, 324},
+ dictWord{7, 11, 1304},
+ dictWord{138, 11, 477},
+ dictWord{132, 10, 628},
+ dictWord{134, 11, 1718},
+ dictWord{7, 10, 266},
+ dictWord{136, 10, 804},
+ dictWord{135, 10, 208},
+ dictWord{7, 0, 1021},
+ dictWord{6, 10, 79},
+ dictWord{135, 10, 1519},
+ dictWord{7, 0, 1472},
+ dictWord{135, 0, 1554},
+ dictWord{6, 11, 362},
+ dictWord{146, 11, 51},
+ dictWord{7, 0, 1071},
+ dictWord{7, 0, 1541},
+ dictWord{7, 0, 1767},
+ dictWord{7, 0, 1806},
+ dictWord{11, 0, 162},
+ dictWord{11, 0, 242},
+ dictWord{11, 0, 452},
+ dictWord{12, 0, 605},
+ dictWord{15, 0, 26},
+ dictWord{144, 0, 44},
+ dictWord{136, 10, 741},
+ dictWord{133, 11, 115},
+ dictWord{145, 0, 115},
+ dictWord{134, 10, 376},
+ dictWord{6, 0, 1406},
+ dictWord{134, 0, 1543},
+ dictWord{5, 11, 193},
+ dictWord{12, 11, 178},
+ dictWord{13, 11, 130},
+ dictWord{
+ 145,
+ 11,
+ 84,
+ },
+ dictWord{135, 0, 1111},
+ dictWord{8, 0, 1},
+ dictWord{9, 0, 650},
+ dictWord{10, 0, 326},
+ dictWord{5, 11, 705},
+ dictWord{137, 11, 606},
+ dictWord{5, 0, 488},
+ dictWord{6, 0, 527},
+ dictWord{7, 0, 489},
+ dictWord{7, 0, 1636},
+ dictWord{8, 0, 121},
+ dictWord{8, 0, 144},
+ dictWord{8, 0, 359},
+ dictWord{9, 0, 193},
+ dictWord{9, 0, 241},
+ dictWord{9, 0, 336},
+ dictWord{9, 0, 882},
+ dictWord{11, 0, 266},
+ dictWord{11, 0, 372},
+ dictWord{11, 0, 944},
+ dictWord{12, 0, 401},
+ dictWord{140, 0, 641},
+ dictWord{135, 11, 174},
+ dictWord{6, 0, 267},
+ dictWord{7, 10, 244},
+ dictWord{7, 10, 632},
+ dictWord{7, 10, 1609},
+ dictWord{8, 10, 178},
+ dictWord{8, 10, 638},
+ dictWord{141, 10, 58},
+ dictWord{134, 0, 1983},
+ dictWord{134, 0, 1155},
+ dictWord{134, 0, 1575},
+ dictWord{134, 0, 1438},
+ dictWord{9, 0, 31},
+ dictWord{
+ 10,
+ 0,
+ 244,
+ },
+ dictWord{10, 0, 699},
+ dictWord{12, 0, 149},
+ dictWord{141, 0, 497},
+ dictWord{133, 0, 377},
+ dictWord{4, 11, 122},
+ dictWord{5, 11, 796},
+ dictWord{
+ 5,
+ 11,
+ 952,
+ },
+ dictWord{6, 11, 1660},
+ dictWord{6, 11, 1671},
+ dictWord{8, 11, 567},
+ dictWord{9, 11, 687},
+ dictWord{9, 11, 742},
+ dictWord{10, 11, 686},
+ dictWord{
+ 11,
+ 11,
+ 356,
+ },
+ dictWord{11, 11, 682},
+ dictWord{140, 11, 281},
+ dictWord{145, 0, 101},
+ dictWord{11, 11, 0},
+ dictWord{144, 11, 78},
+ dictWord{5, 11, 179},
+ dictWord{
+ 5,
+ 10,
+ 791,
+ },
+ dictWord{7, 11, 1095},
+ dictWord{135, 11, 1213},
+ dictWord{8, 11, 372},
+ dictWord{9, 11, 122},
+ dictWord{138, 11, 175},
+ dictWord{7, 10, 686},
+ dictWord{8, 10, 33},
+ dictWord{8, 10, 238},
+ dictWord{10, 10, 616},
+ dictWord{11, 10, 467},
+ dictWord{11, 10, 881},
+ dictWord{13, 10, 217},
+ dictWord{13, 10, 253},
+ dictWord{142, 10, 268},
+ dictWord{9, 0, 476},
+ dictWord{4, 11, 66},
+ dictWord{7, 11, 722},
+ dictWord{135, 11, 904},
+ dictWord{7, 11, 352},
+ dictWord{137, 11, 684},
+ dictWord{135, 0, 2023},
+ dictWord{135, 0, 1836},
+ dictWord{132, 10, 447},
+ dictWord{5, 0, 843},
+ dictWord{144, 0, 35},
+ dictWord{137, 11, 779},
+ dictWord{
+ 141,
+ 11,
+ 35,
+ },
+ dictWord{4, 10, 128},
+ dictWord{5, 10, 415},
+ dictWord{6, 10, 462},
+ dictWord{7, 10, 294},
+ dictWord{7, 10, 578},
+ dictWord{10, 10, 710},
+ dictWord{
+ 139,
+ 10,
+ 86,
+ },
+ dictWord{132, 0, 554},
+ dictWord{133, 0, 536},
+ dictWord{136, 10, 587},
+ dictWord{5, 0, 207},
+ dictWord{9, 0, 79},
+ dictWord{11, 0, 625},
+ dictWord{
+ 145,
+ 0,
+ 7,
+ },
+ dictWord{7, 0, 1371},
+ dictWord{6, 10, 427},
+ dictWord{138, 10, 692},
+ dictWord{4, 0, 424},
+ dictWord{4, 10, 195},
+ dictWord{135, 10, 802},
+ dictWord{
+ 8,
+ 0,
+ 785,
+ },
+ dictWord{133, 11, 564},
+ dictWord{135, 0, 336},
+ dictWord{4, 0, 896},
+ dictWord{6, 0, 1777},
+ dictWord{134, 11, 556},
+ dictWord{137, 11, 103},
+ dictWord{134, 10, 1683},
+ dictWord{7, 11, 544},
+ dictWord{8, 11, 719},
+ dictWord{138, 11, 61},
+ dictWord{138, 10, 472},
+ dictWord{4, 11, 5},
+ dictWord{5, 11, 498},
+ dictWord{136, 11, 637},
+ dictWord{7, 0, 750},
+ dictWord{9, 0, 223},
+ dictWord{11, 0, 27},
+ dictWord{11, 0, 466},
+ dictWord{12, 0, 624},
+ dictWord{14, 0, 265},
+ dictWord{
+ 146,
+ 0,
+ 61,
+ },
+ dictWord{12, 0, 238},
+ dictWord{18, 0, 155},
+ dictWord{12, 11, 238},
+ dictWord{146, 11, 155},
+ dictWord{151, 10, 28},
+ dictWord{133, 11, 927},
+ dictWord{12, 0, 383},
+ dictWord{5, 10, 3},
+ dictWord{8, 10, 578},
+ dictWord{9, 10, 118},
+ dictWord{10, 10, 705},
+ dictWord{141, 10, 279},
+ dictWord{4, 11, 893},
+ dictWord{
+ 5,
+ 11,
+ 780,
+ },
+ dictWord{133, 11, 893},
+ dictWord{4, 0, 603},
+ dictWord{133, 0, 661},
+ dictWord{4, 0, 11},
+ dictWord{6, 0, 128},
+ dictWord{7, 0, 231},
+ dictWord{
+ 7,
+ 0,
+ 1533,
+ },
+ dictWord{10, 0, 725},
+ dictWord{5, 10, 229},
+ dictWord{5, 11, 238},
+ dictWord{135, 11, 1350},
+ dictWord{8, 10, 102},
+ dictWord{10, 10, 578},
+ dictWord{
+ 10,
+ 10,
+ 672,
+ },
+ dictWord{12, 10, 496},
+ dictWord{13, 10, 408},
+ dictWord{14, 10, 121},
+ dictWord{145, 10, 106},
+ dictWord{132, 0, 476},
+ dictWord{134, 0, 1552},
+ dictWord{134, 11, 1729},
+ dictWord{8, 10, 115},
+ dictWord{8, 10, 350},
+ dictWord{9, 10, 489},
+ dictWord{10, 10, 128},
+ dictWord{11, 10, 306},
+ dictWord{
+ 12,
+ 10,
+ 373,
+ },
+ dictWord{14, 10, 30},
+ dictWord{17, 10, 79},
+ dictWord{19, 10, 80},
+ dictWord{150, 10, 55},
+ dictWord{135, 0, 1807},
+ dictWord{4, 0, 680},
+ dictWord{
+ 4,
+ 11,
+ 60,
+ },
+ dictWord{7, 11, 760},
+ dictWord{7, 11, 1800},
+ dictWord{8, 11, 314},
+ dictWord{9, 11, 700},
+ dictWord{139, 11, 487},
+ dictWord{4, 10, 230},
+ dictWord{
+ 5,
+ 10,
+ 702,
+ },
+ dictWord{148, 11, 94},
+ dictWord{132, 11, 228},
+ dictWord{139, 0, 435},
+ dictWord{9, 0, 20},
+ dictWord{10, 0, 324},
+ dictWord{10, 0, 807},
+ dictWord{
+ 139,
+ 0,
+ 488,
+ },
+ dictWord{6, 10, 1728},
+ dictWord{136, 11, 419},
+ dictWord{4, 10, 484},
+ dictWord{18, 10, 26},
+ dictWord{19, 10, 42},
+ dictWord{20, 10, 43},
+ dictWord{
+ 21,
+ 10,
+ 0,
+ },
+ dictWord{23, 10, 27},
+ dictWord{152, 10, 14},
+ dictWord{135, 0, 1431},
+ dictWord{133, 11, 828},
+ dictWord{5, 0, 112},
+ dictWord{6, 0, 103},
+ dictWord{
+ 6,
+ 0,
+ 150,
+ },
+ dictWord{7, 0, 1303},
+ dictWord{9, 0, 292},
+ dictWord{10, 0, 481},
+ dictWord{20, 0, 13},
+ dictWord{7, 11, 176},
+ dictWord{7, 11, 178},
+ dictWord{7, 11, 1110},
+ dictWord{10, 11, 481},
+ dictWord{148, 11, 13},
+ dictWord{138, 0, 356},
+ dictWord{4, 11, 51},
+ dictWord{5, 11, 39},
+ dictWord{6, 11, 4},
+ dictWord{7, 11, 591},
+ dictWord{
+ 7,
+ 11,
+ 849,
+ },
+ dictWord{7, 11, 951},
+ dictWord{7, 11, 1129},
+ dictWord{7, 11, 1613},
+ dictWord{7, 11, 1760},
+ dictWord{7, 11, 1988},
+ dictWord{9, 11, 434},
+ dictWord{10, 11, 754},
+ dictWord{11, 11, 25},
+ dictWord{11, 11, 37},
+ dictWord{139, 11, 414},
+ dictWord{6, 0, 1963},
+ dictWord{134, 0, 2000},
+ dictWord{
+ 132,
+ 10,
+ 633,
+ },
+ dictWord{6, 0, 1244},
+ dictWord{133, 11, 902},
+ dictWord{135, 11, 928},
+ dictWord{140, 0, 18},
+ dictWord{138, 0, 204},
+ dictWord{135, 11, 1173},
+ dictWord{134, 0, 867},
+ dictWord{4, 0, 708},
+ dictWord{8, 0, 15},
+ dictWord{9, 0, 50},
+ dictWord{9, 0, 386},
+ dictWord{11, 0, 18},
+ dictWord{11, 0, 529},
+ dictWord{140, 0, 228},
+ dictWord{134, 11, 270},
+ dictWord{4, 0, 563},
+ dictWord{7, 0, 109},
+ dictWord{7, 0, 592},
+ dictWord{7, 0, 637},
+ dictWord{7, 0, 770},
+ dictWord{8, 0, 463},
+ dictWord{
+ 9,
+ 0,
+ 60,
+ },
+ dictWord{9, 0, 335},
+ dictWord{9, 0, 904},
+ dictWord{10, 0, 73},
+ dictWord{11, 0, 434},
+ dictWord{12, 0, 585},
+ dictWord{13, 0, 331},
+ dictWord{18, 0, 110},
+ dictWord{148, 0, 60},
+ dictWord{132, 0, 502},
+ dictWord{14, 11, 359},
+ dictWord{19, 11, 52},
+ dictWord{148, 11, 47},
+ dictWord{6, 11, 377},
+ dictWord{7, 11, 1025},
+ dictWord{9, 11, 613},
+ dictWord{145, 11, 104},
+ dictWord{6, 0, 347},
+ dictWord{10, 0, 161},
+ dictWord{5, 10, 70},
+ dictWord{5, 10, 622},
+ dictWord{6, 10, 334},
+ dictWord{
+ 7,
+ 10,
+ 1032,
+ },
+ dictWord{9, 10, 171},
+ dictWord{11, 10, 26},
+ dictWord{11, 10, 213},
+ dictWord{11, 10, 637},
+ dictWord{11, 10, 707},
+ dictWord{12, 10, 202},
+ dictWord{12, 10, 380},
+ dictWord{13, 10, 226},
+ dictWord{13, 10, 355},
+ dictWord{14, 10, 222},
+ dictWord{145, 10, 42},
+ dictWord{132, 11, 416},
+ dictWord{4, 0, 33},
+ dictWord{5, 0, 102},
+ dictWord{6, 0, 284},
+ dictWord{7, 0, 1079},
+ dictWord{7, 0, 1423},
+ dictWord{7, 0, 1702},
+ dictWord{8, 0, 470},
+ dictWord{9, 0, 554},
+ dictWord{
+ 9,
+ 0,
+ 723,
+ },
+ dictWord{11, 0, 333},
+ dictWord{142, 11, 372},
+ dictWord{5, 11, 152},
+ dictWord{5, 11, 197},
+ dictWord{7, 11, 340},
+ dictWord{7, 11, 867},
+ dictWord{
+ 10,
+ 11,
+ 548,
+ },
+ dictWord{10, 11, 581},
+ dictWord{11, 11, 6},
+ dictWord{12, 11, 3},
+ dictWord{12, 11, 19},
+ dictWord{14, 11, 110},
+ dictWord{142, 11, 289},
+ dictWord{
+ 7,
+ 0,
+ 246,
+ },
+ dictWord{135, 0, 840},
+ dictWord{6, 0, 10},
+ dictWord{8, 0, 571},
+ dictWord{9, 0, 739},
+ dictWord{143, 0, 91},
+ dictWord{6, 0, 465},
+ dictWord{7, 0, 1465},
+ dictWord{
+ 4,
+ 10,
+ 23,
+ },
+ dictWord{4, 10, 141},
+ dictWord{5, 10, 313},
+ dictWord{5, 10, 1014},
+ dictWord{6, 10, 50},
+ dictWord{7, 10, 142},
+ dictWord{7, 10, 559},
+ dictWord{
+ 8,
+ 10,
+ 640,
+ },
+ dictWord{9, 10, 460},
+ dictWord{9, 10, 783},
+ dictWord{11, 10, 741},
+ dictWord{12, 10, 183},
+ dictWord{141, 10, 488},
+ dictWord{133, 0, 626},
+ dictWord{
+ 136,
+ 0,
+ 614,
+ },
+ dictWord{138, 0, 237},
+ dictWord{7, 11, 34},
+ dictWord{7, 11, 190},
+ dictWord{8, 11, 28},
+ dictWord{8, 11, 141},
+ dictWord{8, 11, 444},
+ dictWord{
+ 8,
+ 11,
+ 811,
+ },
+ dictWord{9, 11, 468},
+ dictWord{11, 11, 334},
+ dictWord{12, 11, 24},
+ dictWord{12, 11, 386},
+ dictWord{140, 11, 576},
+ dictWord{133, 11, 757},
+ dictWord{
+ 5,
+ 0,
+ 18,
+ },
+ dictWord{6, 0, 526},
+ dictWord{13, 0, 24},
+ dictWord{13, 0, 110},
+ dictWord{19, 0, 5},
+ dictWord{147, 0, 44},
+ dictWord{6, 0, 506},
+ dictWord{134, 11, 506},
+ dictWord{135, 11, 1553},
+ dictWord{4, 0, 309},
+ dictWord{5, 0, 462},
+ dictWord{7, 0, 970},
+ dictWord{7, 0, 1097},
+ dictWord{22, 0, 30},
+ dictWord{22, 0, 33},
+ dictWord{
+ 7,
+ 11,
+ 1385,
+ },
+ dictWord{11, 11, 582},
+ dictWord{11, 11, 650},
+ dictWord{11, 11, 901},
+ dictWord{11, 11, 949},
+ dictWord{12, 11, 232},
+ dictWord{12, 11, 236},
+ dictWord{13, 11, 413},
+ dictWord{13, 11, 501},
+ dictWord{146, 11, 116},
+ dictWord{9, 0, 140},
+ dictWord{5, 10, 222},
+ dictWord{138, 10, 534},
+ dictWord{6, 0, 1056},
+ dictWord{137, 10, 906},
+ dictWord{134, 0, 1704},
+ dictWord{138, 10, 503},
+ dictWord{134, 0, 1036},
+ dictWord{5, 10, 154},
+ dictWord{7, 10, 1491},
+ dictWord{
+ 10,
+ 10,
+ 379,
+ },
+ dictWord{138, 10, 485},
+ dictWord{4, 11, 383},
+ dictWord{133, 10, 716},
+ dictWord{134, 0, 1315},
+ dictWord{5, 0, 86},
+ dictWord{7, 0, 743},
+ dictWord{
+ 9,
+ 0,
+ 85,
+ },
+ dictWord{10, 0, 281},
+ dictWord{10, 0, 432},
+ dictWord{11, 0, 825},
+ dictWord{12, 0, 251},
+ dictWord{13, 0, 118},
+ dictWord{142, 0, 378},
+ dictWord{
+ 8,
+ 0,
+ 264,
+ },
+ dictWord{4, 10, 91},
+ dictWord{5, 10, 388},
+ dictWord{5, 10, 845},
+ dictWord{6, 10, 206},
+ dictWord{6, 10, 252},
+ dictWord{6, 10, 365},
+ dictWord{7, 10, 136},
+ dictWord{7, 10, 531},
+ dictWord{136, 10, 621},
+ dictWord{5, 0, 524},
+ dictWord{133, 0, 744},
+ dictWord{5, 11, 277},
+ dictWord{141, 11, 247},
+ dictWord{
+ 132,
+ 11,
+ 435,
+ },
+ dictWord{10, 0, 107},
+ dictWord{140, 0, 436},
+ dictWord{132, 0, 927},
+ dictWord{10, 0, 123},
+ dictWord{12, 0, 670},
+ dictWord{146, 0, 94},
+ dictWord{
+ 7,
+ 0,
+ 1149,
+ },
+ dictWord{9, 0, 156},
+ dictWord{138, 0, 957},
+ dictWord{5, 11, 265},
+ dictWord{6, 11, 212},
+ dictWord{135, 11, 28},
+ dictWord{133, 0, 778},
+ dictWord{
+ 133,
+ 0,
+ 502,
+ },
+ dictWord{8, 0, 196},
+ dictWord{10, 0, 283},
+ dictWord{139, 0, 406},
+ dictWord{135, 10, 576},
+ dictWord{136, 11, 535},
+ dictWord{134, 0, 1312},
+ dictWord{
+ 5,
+ 10,
+ 771,
+ },
+ dictWord{5, 10, 863},
+ dictWord{5, 10, 898},
+ dictWord{6, 10, 1632},
+ dictWord{6, 10, 1644},
+ dictWord{134, 10, 1780},
+ dictWord{5, 0, 855},
+ dictWord{5, 10, 331},
+ dictWord{135, 11, 1487},
+ dictWord{132, 11, 702},
+ dictWord{5, 11, 808},
+ dictWord{135, 11, 2045},
+ dictWord{7, 0, 1400},
+ dictWord{
+ 9,
+ 0,
+ 446,
+ },
+ dictWord{138, 0, 45},
+ dictWord{140, 10, 632},
+ dictWord{132, 0, 1003},
+ dictWord{5, 11, 166},
+ dictWord{8, 11, 739},
+ dictWord{140, 11, 511},
+ dictWord{
+ 5,
+ 10,
+ 107,
+ },
+ dictWord{7, 10, 201},
+ dictWord{136, 10, 518},
+ dictWord{6, 10, 446},
+ dictWord{135, 10, 1817},
+ dictWord{134, 0, 1532},
+ dictWord{
+ 134,
+ 0,
+ 1097,
+ },
+ dictWord{4, 11, 119},
+ dictWord{5, 11, 170},
+ dictWord{5, 11, 447},
+ dictWord{7, 11, 1708},
+ dictWord{7, 11, 1889},
+ dictWord{9, 11, 357},
+ dictWord{
+ 9,
+ 11,
+ 719,
+ },
+ dictWord{12, 11, 486},
+ dictWord{140, 11, 596},
+ dictWord{9, 10, 851},
+ dictWord{141, 10, 510},
+ dictWord{7, 0, 612},
+ dictWord{8, 0, 545},
+ dictWord{
+ 8,
+ 0,
+ 568,
+ },
+ dictWord{8, 0, 642},
+ dictWord{9, 0, 717},
+ dictWord{10, 0, 541},
+ dictWord{10, 0, 763},
+ dictWord{11, 0, 449},
+ dictWord{12, 0, 489},
+ dictWord{13, 0, 153},
+ dictWord{13, 0, 296},
+ dictWord{14, 0, 138},
+ dictWord{14, 0, 392},
+ dictWord{15, 0, 50},
+ dictWord{16, 0, 6},
+ dictWord{16, 0, 12},
+ dictWord{20, 0, 9},
+ dictWord{
+ 132,
+ 10,
+ 504,
+ },
+ dictWord{4, 11, 450},
+ dictWord{135, 11, 1158},
+ dictWord{11, 0, 54},
+ dictWord{13, 0, 173},
+ dictWord{13, 0, 294},
+ dictWord{5, 10, 883},
+ dictWord{
+ 5,
+ 10,
+ 975,
+ },
+ dictWord{8, 10, 392},
+ dictWord{148, 10, 7},
+ dictWord{13, 0, 455},
+ dictWord{15, 0, 99},
+ dictWord{15, 0, 129},
+ dictWord{144, 0, 68},
+ dictWord{135, 0, 172},
+ dictWord{132, 11, 754},
+ dictWord{5, 10, 922},
+ dictWord{134, 10, 1707},
+ dictWord{134, 0, 1029},
+ dictWord{17, 11, 39},
+ dictWord{148, 11, 36},
+ dictWord{
+ 4,
+ 0,
+ 568,
+ },
+ dictWord{5, 10, 993},
+ dictWord{7, 10, 515},
+ dictWord{137, 10, 91},
+ dictWord{132, 0, 732},
+ dictWord{10, 0, 617},
+ dictWord{138, 11, 617},
+ dictWord{
+ 134,
+ 0,
+ 974,
+ },
+ dictWord{7, 0, 989},
+ dictWord{10, 0, 377},
+ dictWord{12, 0, 363},
+ dictWord{13, 0, 68},
+ dictWord{13, 0, 94},
+ dictWord{14, 0, 108},
+ dictWord{
+ 142,
+ 0,
+ 306,
+ },
+ dictWord{136, 0, 733},
+ dictWord{132, 0, 428},
+ dictWord{7, 0, 1789},
+ dictWord{135, 11, 1062},
+ dictWord{7, 0, 2015},
+ dictWord{140, 0, 665},
+ dictWord{135, 10, 1433},
+ dictWord{5, 0, 287},
+ dictWord{7, 10, 921},
+ dictWord{8, 10, 580},
+ dictWord{8, 10, 593},
+ dictWord{8, 10, 630},
+ dictWord{138, 10, 28},
+ dictWord{138, 0, 806},
+ dictWord{4, 10, 911},
+ dictWord{5, 10, 867},
+ dictWord{5, 10, 1013},
+ dictWord{7, 10, 2034},
+ dictWord{8, 10, 798},
+ dictWord{136, 10, 813},
+ dictWord{134, 0, 1539},
+ dictWord{8, 11, 523},
+ dictWord{150, 11, 34},
+ dictWord{135, 11, 740},
+ dictWord{7, 11, 238},
+ dictWord{7, 11, 2033},
+ dictWord{
+ 8,
+ 11,
+ 120,
+ },
+ dictWord{8, 11, 188},
+ dictWord{8, 11, 659},
+ dictWord{9, 11, 598},
+ dictWord{10, 11, 466},
+ dictWord{12, 11, 342},
+ dictWord{12, 11, 588},
+ dictWord{
+ 13,
+ 11,
+ 503,
+ },
+ dictWord{14, 11, 246},
+ dictWord{143, 11, 92},
+ dictWord{7, 0, 1563},
+ dictWord{141, 0, 182},
+ dictWord{5, 10, 135},
+ dictWord{6, 10, 519},
+ dictWord{
+ 7,
+ 10,
+ 1722,
+ },
+ dictWord{10, 10, 271},
+ dictWord{11, 10, 261},
+ dictWord{145, 10, 54},
+ dictWord{14, 10, 338},
+ dictWord{148, 10, 81},
+ dictWord{7, 0, 484},
+ dictWord{
+ 4,
+ 10,
+ 300,
+ },
+ dictWord{133, 10, 436},
+ dictWord{145, 11, 114},
+ dictWord{6, 0, 1623},
+ dictWord{134, 0, 1681},
+ dictWord{133, 11, 640},
+ dictWord{4, 11, 201},
+ dictWord{7, 11, 1744},
+ dictWord{8, 11, 602},
+ dictWord{11, 11, 247},
+ dictWord{11, 11, 826},
+ dictWord{145, 11, 65},
+ dictWord{8, 11, 164},
+ dictWord{
+ 146,
+ 11,
+ 62,
+ },
+ dictWord{6, 0, 1833},
+ dictWord{6, 0, 1861},
+ dictWord{136, 0, 878},
+ dictWord{134, 0, 1569},
+ dictWord{8, 10, 357},
+ dictWord{10, 10, 745},
+ dictWord{
+ 14,
+ 10,
+ 426,
+ },
+ dictWord{17, 10, 94},
+ dictWord{147, 10, 57},
+ dictWord{12, 0, 93},
+ dictWord{12, 0, 501},
+ dictWord{13, 0, 362},
+ dictWord{14, 0, 151},
+ dictWord{15, 0, 40},
+ dictWord{15, 0, 59},
+ dictWord{16, 0, 46},
+ dictWord{17, 0, 25},
+ dictWord{18, 0, 14},
+ dictWord{18, 0, 134},
+ dictWord{19, 0, 25},
+ dictWord{19, 0, 69},
+ dictWord{
+ 20,
+ 0,
+ 16,
+ },
+ dictWord{20, 0, 19},
+ dictWord{20, 0, 66},
+ dictWord{21, 0, 23},
+ dictWord{21, 0, 25},
+ dictWord{150, 0, 42},
+ dictWord{6, 0, 1748},
+ dictWord{8, 0, 715},
+ dictWord{
+ 9,
+ 0,
+ 802,
+ },
+ dictWord{10, 0, 46},
+ dictWord{10, 0, 819},
+ dictWord{13, 0, 308},
+ dictWord{14, 0, 351},
+ dictWord{14, 0, 363},
+ dictWord{146, 0, 67},
+ dictWord{
+ 132,
+ 0,
+ 994,
+ },
+ dictWord{4, 0, 63},
+ dictWord{133, 0, 347},
+ dictWord{132, 0, 591},
+ dictWord{133, 0, 749},
+ dictWord{7, 11, 1577},
+ dictWord{10, 11, 304},
+ dictWord{
+ 10,
+ 11,
+ 549,
+ },
+ dictWord{11, 11, 424},
+ dictWord{12, 11, 365},
+ dictWord{13, 11, 220},
+ dictWord{13, 11, 240},
+ dictWord{142, 11, 33},
+ dictWord{133, 0, 366},
+ dictWord{
+ 7,
+ 0,
+ 557,
+ },
+ dictWord{12, 0, 547},
+ dictWord{14, 0, 86},
+ dictWord{133, 10, 387},
+ dictWord{135, 0, 1747},
+ dictWord{132, 11, 907},
+ dictWord{5, 11, 100},
+ dictWord{10, 11, 329},
+ dictWord{12, 11, 416},
+ dictWord{149, 11, 29},
+ dictWord{4, 10, 6},
+ dictWord{5, 10, 708},
+ dictWord{136, 10, 75},
+ dictWord{7, 10, 1351},
+ dictWord{9, 10, 581},
+ dictWord{10, 10, 639},
+ dictWord{11, 10, 453},
+ dictWord{140, 10, 584},
+ dictWord{7, 0, 89},
+ dictWord{132, 10, 303},
+ dictWord{138, 10, 772},
+ dictWord{132, 11, 176},
+ dictWord{5, 11, 636},
+ dictWord{5, 11, 998},
+ dictWord{8, 11, 26},
+ dictWord{137, 11, 358},
+ dictWord{7, 11, 9},
+ dictWord{7, 11, 1508},
+ dictWord{9, 11, 317},
+ dictWord{10, 11, 210},
+ dictWord{10, 11, 292},
+ dictWord{10, 11, 533},
+ dictWord{11, 11, 555},
+ dictWord{12, 11, 526},
+ dictWord{
+ 12,
+ 11,
+ 607,
+ },
+ dictWord{13, 11, 263},
+ dictWord{13, 11, 459},
+ dictWord{142, 11, 271},
+ dictWord{134, 0, 1463},
+ dictWord{6, 0, 772},
+ dictWord{6, 0, 1137},
+ dictWord{
+ 139,
+ 11,
+ 595,
+ },
+ dictWord{7, 0, 977},
+ dictWord{139, 11, 66},
+ dictWord{138, 0, 893},
+ dictWord{20, 0, 48},
+ dictWord{148, 11, 48},
+ dictWord{5, 0, 824},
+ dictWord{
+ 133,
+ 0,
+ 941,
+ },
+ dictWord{134, 11, 295},
+ dictWord{7, 0, 1543},
+ dictWord{7, 0, 1785},
+ dictWord{10, 0, 690},
+ dictWord{4, 10, 106},
+ dictWord{139, 10, 717},
+ dictWord{
+ 7,
+ 0,
+ 440,
+ },
+ dictWord{8, 0, 230},
+ dictWord{139, 0, 106},
+ dictWord{5, 10, 890},
+ dictWord{133, 10, 988},
+ dictWord{6, 10, 626},
+ dictWord{142, 10, 431},
+ dictWord{
+ 10,
+ 11,
+ 127,
+ },
+ dictWord{141, 11, 27},
+ dictWord{17, 0, 32},
+ dictWord{10, 10, 706},
+ dictWord{150, 10, 44},
+ dictWord{132, 0, 216},
+ dictWord{137, 0, 332},
+ dictWord{4, 10, 698},
+ dictWord{136, 11, 119},
+ dictWord{139, 11, 267},
+ dictWord{138, 10, 17},
+ dictWord{11, 11, 526},
+ dictWord{11, 11, 939},
+ dictWord{
+ 141,
+ 11,
+ 290,
+ },
+ dictWord{7, 11, 1167},
+ dictWord{11, 11, 934},
+ dictWord{13, 11, 391},
+ dictWord{145, 11, 76},
+ dictWord{139, 11, 39},
+ dictWord{134, 10, 84},
+ dictWord{
+ 4,
+ 0,
+ 914,
+ },
+ dictWord{5, 0, 800},
+ dictWord{133, 0, 852},
+ dictWord{10, 0, 416},
+ dictWord{141, 0, 115},
+ dictWord{7, 0, 564},
+ dictWord{142, 0, 168},
+ dictWord{
+ 4,
+ 0,
+ 918,
+ },
+ dictWord{133, 0, 876},
+ dictWord{134, 0, 1764},
+ dictWord{152, 0, 3},
+ dictWord{4, 0, 92},
+ dictWord{5, 0, 274},
+ dictWord{7, 11, 126},
+ dictWord{136, 11, 84},
+ dictWord{140, 10, 498},
+ dictWord{136, 11, 790},
+ dictWord{8, 0, 501},
+ dictWord{5, 10, 986},
+ dictWord{6, 10, 130},
+ dictWord{7, 10, 1582},
+ dictWord{
+ 8,
+ 10,
+ 458,
+ },
+ dictWord{10, 10, 101},
+ dictWord{10, 10, 318},
+ dictWord{138, 10, 823},
+ dictWord{6, 11, 64},
+ dictWord{12, 11, 377},
+ dictWord{141, 11, 309},
+ dictWord{
+ 5,
+ 0,
+ 743,
+ },
+ dictWord{138, 0, 851},
+ dictWord{4, 0, 49},
+ dictWord{7, 0, 280},
+ dictWord{135, 0, 1633},
+ dictWord{134, 0, 879},
+ dictWord{136, 0, 47},
+ dictWord{
+ 7,
+ 10,
+ 1644,
+ },
+ dictWord{137, 10, 129},
+ dictWord{132, 0, 865},
+ dictWord{134, 0, 1202},
+ dictWord{9, 11, 34},
+ dictWord{139, 11, 484},
+ dictWord{135, 10, 997},
+ dictWord{5, 0, 272},
+ dictWord{5, 0, 908},
+ dictWord{5, 0, 942},
+ dictWord{8, 0, 197},
+ dictWord{9, 0, 47},
+ dictWord{11, 0, 538},
+ dictWord{139, 0, 742},
+ dictWord{
+ 6,
+ 11,
+ 1700,
+ },
+ dictWord{7, 11, 26},
+ dictWord{7, 11, 293},
+ dictWord{7, 11, 382},
+ dictWord{7, 11, 1026},
+ dictWord{7, 11, 1087},
+ dictWord{7, 11, 2027},
+ dictWord{
+ 8,
+ 11,
+ 24,
+ },
+ dictWord{8, 11, 114},
+ dictWord{8, 11, 252},
+ dictWord{8, 11, 727},
+ dictWord{8, 11, 729},
+ dictWord{9, 11, 30},
+ dictWord{9, 11, 199},
+ dictWord{9, 11, 231},
+ dictWord{9, 11, 251},
+ dictWord{9, 11, 334},
+ dictWord{9, 11, 361},
+ dictWord{9, 11, 488},
+ dictWord{9, 11, 712},
+ dictWord{10, 11, 55},
+ dictWord{10, 11, 60},
+ dictWord{
+ 10,
+ 11,
+ 232,
+ },
+ dictWord{10, 11, 332},
+ dictWord{10, 11, 384},
+ dictWord{10, 11, 396},
+ dictWord{10, 11, 504},
+ dictWord{10, 11, 542},
+ dictWord{10, 11, 652},
+ dictWord{11, 11, 20},
+ dictWord{11, 11, 48},
+ dictWord{11, 11, 207},
+ dictWord{11, 11, 291},
+ dictWord{11, 11, 298},
+ dictWord{11, 11, 342},
+ dictWord{
+ 11,
+ 11,
+ 365,
+ },
+ dictWord{11, 11, 394},
+ dictWord{11, 11, 620},
+ dictWord{11, 11, 705},
+ dictWord{11, 11, 1017},
+ dictWord{12, 11, 123},
+ dictWord{12, 11, 340},
+ dictWord{12, 11, 406},
+ dictWord{12, 11, 643},
+ dictWord{13, 11, 61},
+ dictWord{13, 11, 269},
+ dictWord{13, 11, 311},
+ dictWord{13, 11, 319},
+ dictWord{13, 11, 486},
+ dictWord{14, 11, 234},
+ dictWord{15, 11, 62},
+ dictWord{15, 11, 85},
+ dictWord{16, 11, 71},
+ dictWord{18, 11, 119},
+ dictWord{148, 11, 105},
+ dictWord{
+ 6,
+ 0,
+ 1455,
+ },
+ dictWord{150, 11, 37},
+ dictWord{135, 10, 1927},
+ dictWord{135, 0, 1911},
+ dictWord{137, 0, 891},
+ dictWord{7, 10, 1756},
+ dictWord{137, 10, 98},
+ dictWord{7, 10, 1046},
+ dictWord{139, 10, 160},
+ dictWord{132, 0, 761},
+ dictWord{6, 11, 379},
+ dictWord{7, 11, 270},
+ dictWord{7, 11, 1116},
+ dictWord{
+ 8,
+ 11,
+ 176,
+ },
+ dictWord{8, 11, 183},
+ dictWord{9, 11, 432},
+ dictWord{9, 11, 661},
+ dictWord{12, 11, 247},
+ dictWord{12, 11, 617},
+ dictWord{146, 11, 125},
+ dictWord{
+ 6,
+ 10,
+ 45,
+ },
+ dictWord{7, 10, 433},
+ dictWord{8, 10, 129},
+ dictWord{9, 10, 21},
+ dictWord{10, 10, 392},
+ dictWord{11, 10, 79},
+ dictWord{12, 10, 499},
+ dictWord{
+ 13,
+ 10,
+ 199,
+ },
+ dictWord{141, 10, 451},
+ dictWord{4, 0, 407},
+ dictWord{5, 11, 792},
+ dictWord{133, 11, 900},
+ dictWord{132, 0, 560},
+ dictWord{135, 0, 183},
+ dictWord{
+ 13,
+ 0,
+ 490,
+ },
+ dictWord{7, 10, 558},
+ dictWord{136, 10, 353},
+ dictWord{4, 0, 475},
+ dictWord{6, 0, 731},
+ dictWord{11, 0, 35},
+ dictWord{13, 0, 71},
+ dictWord{13, 0, 177},
+ dictWord{14, 0, 422},
+ dictWord{133, 10, 785},
+ dictWord{8, 10, 81},
+ dictWord{9, 10, 189},
+ dictWord{9, 10, 201},
+ dictWord{11, 10, 478},
+ dictWord{11, 10, 712},
+ dictWord{141, 10, 338},
+ dictWord{4, 0, 418},
+ dictWord{4, 0, 819},
+ dictWord{133, 10, 353},
+ dictWord{151, 10, 26},
+ dictWord{4, 11, 901},
+ dictWord{
+ 133,
+ 11,
+ 776,
+ },
+ dictWord{132, 0, 575},
+ dictWord{7, 0, 818},
+ dictWord{16, 0, 92},
+ dictWord{17, 0, 14},
+ dictWord{17, 0, 45},
+ dictWord{18, 0, 75},
+ dictWord{148, 0, 18},
+ dictWord{
+ 6,
+ 0,
+ 222,
+ },
+ dictWord{7, 0, 636},
+ dictWord{7, 0, 1620},
+ dictWord{8, 0, 409},
+ dictWord{9, 0, 693},
+ dictWord{139, 0, 77},
+ dictWord{6, 10, 25},
+ dictWord{7, 10, 855},
+ dictWord{7, 10, 1258},
+ dictWord{144, 10, 32},
+ dictWord{6, 0, 1880},
+ dictWord{6, 0, 1887},
+ dictWord{6, 0, 1918},
+ dictWord{6, 0, 1924},
+ dictWord{9, 0, 967},
+ dictWord{9, 0, 995},
+ dictWord{9, 0, 1015},
+ dictWord{12, 0, 826},
+ dictWord{12, 0, 849},
+ dictWord{12, 0, 857},
+ dictWord{12, 0, 860},
+ dictWord{12, 0, 886},
+ dictWord{
+ 12,
+ 0,
+ 932,
+ },
+ dictWord{18, 0, 228},
+ dictWord{18, 0, 231},
+ dictWord{146, 0, 240},
+ dictWord{134, 0, 633},
+ dictWord{134, 0, 1308},
+ dictWord{4, 11, 37},
+ dictWord{
+ 5,
+ 11,
+ 334,
+ },
+ dictWord{135, 11, 1253},
+ dictWord{10, 0, 86},
+ dictWord{4, 10, 4},
+ dictWord{7, 10, 1118},
+ dictWord{7, 10, 1320},
+ dictWord{7, 10, 1706},
+ dictWord{
+ 8,
+ 10,
+ 277,
+ },
+ dictWord{9, 10, 622},
+ dictWord{11, 10, 724},
+ dictWord{12, 10, 350},
+ dictWord{12, 10, 397},
+ dictWord{13, 10, 28},
+ dictWord{13, 10, 159},
+ dictWord{
+ 15,
+ 10,
+ 89,
+ },
+ dictWord{18, 10, 5},
+ dictWord{19, 10, 9},
+ dictWord{20, 10, 34},
+ dictWord{150, 10, 47},
+ dictWord{132, 11, 508},
+ dictWord{137, 11, 448},
+ dictWord{
+ 12,
+ 11,
+ 107,
+ },
+ dictWord{146, 11, 31},
+ dictWord{132, 0, 817},
+ dictWord{134, 0, 663},
+ dictWord{133, 0, 882},
+ dictWord{134, 0, 914},
+ dictWord{132, 11, 540},
+ dictWord{132, 11, 533},
+ dictWord{136, 11, 608},
+ dictWord{8, 0, 885},
+ dictWord{138, 0, 865},
+ dictWord{132, 0, 426},
+ dictWord{6, 0, 58},
+ dictWord{7, 0, 745},
+ dictWord{7, 0, 1969},
+ dictWord{8, 0, 399},
+ dictWord{8, 0, 675},
+ dictWord{9, 0, 479},
+ dictWord{9, 0, 731},
+ dictWord{10, 0, 330},
+ dictWord{10, 0, 593},
+ dictWord{
+ 10,
+ 0,
+ 817,
+ },
+ dictWord{11, 0, 32},
+ dictWord{11, 0, 133},
+ dictWord{11, 0, 221},
+ dictWord{145, 0, 68},
+ dictWord{134, 10, 255},
+ dictWord{7, 0, 102},
+ dictWord{
+ 137,
+ 0,
+ 538,
+ },
+ dictWord{137, 10, 216},
+ dictWord{7, 11, 253},
+ dictWord{136, 11, 549},
+ dictWord{135, 11, 912},
+ dictWord{9, 10, 183},
+ dictWord{139, 10, 286},
+ dictWord{11, 10, 956},
+ dictWord{151, 10, 3},
+ dictWord{8, 11, 527},
+ dictWord{18, 11, 60},
+ dictWord{147, 11, 24},
+ dictWord{4, 10, 536},
+ dictWord{7, 10, 1141},
+ dictWord{10, 10, 723},
+ dictWord{139, 10, 371},
+ dictWord{133, 11, 920},
+ dictWord{7, 0, 876},
+ dictWord{135, 10, 285},
+ dictWord{135, 10, 560},
+ dictWord{
+ 132,
+ 10,
+ 690,
+ },
+ dictWord{142, 11, 126},
+ dictWord{11, 10, 33},
+ dictWord{12, 10, 571},
+ dictWord{149, 10, 1},
+ dictWord{133, 0, 566},
+ dictWord{9, 0, 139},
+ dictWord{
+ 10,
+ 0,
+ 399,
+ },
+ dictWord{11, 0, 469},
+ dictWord{12, 0, 634},
+ dictWord{13, 0, 223},
+ dictWord{132, 11, 483},
+ dictWord{6, 0, 48},
+ dictWord{135, 0, 63},
+ dictWord{18, 0, 12},
+ dictWord{7, 10, 1862},
+ dictWord{12, 10, 491},
+ dictWord{12, 10, 520},
+ dictWord{13, 10, 383},
+ dictWord{142, 10, 244},
+ dictWord{135, 11, 1665},
+ dictWord{132, 11, 448},
+ dictWord{9, 11, 495},
+ dictWord{146, 11, 104},
+ dictWord{6, 0, 114},
+ dictWord{7, 0, 1224},
+ dictWord{7, 0, 1556},
+ dictWord{136, 0, 3},
+ dictWord{
+ 4,
+ 10,
+ 190,
+ },
+ dictWord{133, 10, 554},
+ dictWord{8, 0, 576},
+ dictWord{9, 0, 267},
+ dictWord{133, 10, 1001},
+ dictWord{133, 10, 446},
+ dictWord{133, 0, 933},
+ dictWord{139, 11, 1009},
+ dictWord{8, 11, 653},
+ dictWord{13, 11, 93},
+ dictWord{147, 11, 14},
+ dictWord{6, 0, 692},
+ dictWord{6, 0, 821},
+ dictWord{134, 0, 1077},
+ dictWord{5, 11, 172},
+ dictWord{135, 11, 801},
+ dictWord{138, 0, 752},
+ dictWord{4, 0, 375},
+ dictWord{134, 0, 638},
+ dictWord{134, 0, 1011},
+ dictWord{
+ 140,
+ 11,
+ 540,
+ },
+ dictWord{9, 0, 96},
+ dictWord{133, 11, 260},
+ dictWord{139, 11, 587},
+ dictWord{135, 10, 1231},
+ dictWord{12, 0, 30},
+ dictWord{13, 0, 148},
+ dictWord{
+ 14,
+ 0,
+ 87,
+ },
+ dictWord{14, 0, 182},
+ dictWord{16, 0, 42},
+ dictWord{20, 0, 70},
+ dictWord{132, 10, 304},
+ dictWord{6, 0, 1398},
+ dictWord{7, 0, 56},
+ dictWord{7, 0, 1989},
+ dictWord{8, 0, 337},
+ dictWord{8, 0, 738},
+ dictWord{9, 0, 600},
+ dictWord{12, 0, 37},
+ dictWord{13, 0, 447},
+ dictWord{142, 0, 92},
+ dictWord{138, 0, 666},
+ dictWord{
+ 5,
+ 0,
+ 394,
+ },
+ dictWord{7, 0, 487},
+ dictWord{136, 0, 246},
+ dictWord{9, 0, 437},
+ dictWord{6, 10, 53},
+ dictWord{6, 10, 199},
+ dictWord{7, 10, 1408},
+ dictWord{8, 10, 32},
+ dictWord{8, 10, 93},
+ dictWord{10, 10, 397},
+ dictWord{10, 10, 629},
+ dictWord{11, 10, 593},
+ dictWord{11, 10, 763},
+ dictWord{13, 10, 326},
+ dictWord{145, 10, 35},
+ dictWord{134, 10, 105},
+ dictWord{9, 0, 320},
+ dictWord{10, 0, 506},
+ dictWord{138, 10, 794},
+ dictWord{7, 11, 57},
+ dictWord{8, 11, 167},
+ dictWord{8, 11, 375},
+ dictWord{9, 11, 82},
+ dictWord{9, 11, 561},
+ dictWord{10, 11, 620},
+ dictWord{10, 11, 770},
+ dictWord{11, 10, 704},
+ dictWord{141, 10, 396},
+ dictWord{6, 0, 1003},
+ dictWord{5, 10, 114},
+ dictWord{5, 10, 255},
+ dictWord{141, 10, 285},
+ dictWord{7, 0, 866},
+ dictWord{135, 0, 1163},
+ dictWord{133, 11, 531},
+ dictWord{
+ 132,
+ 0,
+ 328,
+ },
+ dictWord{7, 10, 2035},
+ dictWord{8, 10, 19},
+ dictWord{9, 10, 89},
+ dictWord{138, 10, 831},
+ dictWord{8, 11, 194},
+ dictWord{136, 11, 756},
+ dictWord{
+ 136,
+ 0,
+ 1000,
+ },
+ dictWord{5, 11, 453},
+ dictWord{134, 11, 441},
+ dictWord{4, 0, 101},
+ dictWord{5, 0, 833},
+ dictWord{7, 0, 1171},
+ dictWord{136, 0, 744},
+ dictWord{
+ 133,
+ 0,
+ 726,
+ },
+ dictWord{136, 10, 746},
+ dictWord{138, 0, 176},
+ dictWord{6, 0, 9},
+ dictWord{6, 0, 397},
+ dictWord{7, 0, 53},
+ dictWord{7, 0, 1742},
+ dictWord{10, 0, 632},
+ dictWord{11, 0, 828},
+ dictWord{140, 0, 146},
+ dictWord{135, 11, 22},
+ dictWord{145, 11, 64},
+ dictWord{132, 0, 839},
+ dictWord{11, 0, 417},
+ dictWord{12, 0, 223},
+ dictWord{140, 0, 265},
+ dictWord{4, 11, 102},
+ dictWord{7, 11, 815},
+ dictWord{7, 11, 1699},
+ dictWord{139, 11, 964},
+ dictWord{5, 10, 955},
+ dictWord{
+ 136,
+ 10,
+ 814,
+ },
+ dictWord{6, 0, 1931},
+ dictWord{6, 0, 2007},
+ dictWord{18, 0, 246},
+ dictWord{146, 0, 247},
+ dictWord{8, 0, 198},
+ dictWord{11, 0, 29},
+ dictWord{140, 0, 534},
+ dictWord{135, 0, 1771},
+ dictWord{6, 0, 846},
+ dictWord{7, 11, 1010},
+ dictWord{11, 11, 733},
+ dictWord{11, 11, 759},
+ dictWord{12, 11, 563},
+ dictWord{
+ 13,
+ 11,
+ 34,
+ },
+ dictWord{14, 11, 101},
+ dictWord{18, 11, 45},
+ dictWord{146, 11, 129},
+ dictWord{4, 0, 186},
+ dictWord{5, 0, 157},
+ dictWord{8, 0, 168},
+ dictWord{138, 0, 6},
+ dictWord{132, 11, 899},
+ dictWord{133, 10, 56},
+ dictWord{148, 10, 100},
+ dictWord{133, 0, 875},
+ dictWord{5, 0, 773},
+ dictWord{5, 0, 991},
+ dictWord{6, 0, 1635},
+ dictWord{134, 0, 1788},
+ dictWord{6, 0, 1274},
+ dictWord{9, 0, 477},
+ dictWord{141, 0, 78},
+ dictWord{4, 0, 639},
+ dictWord{7, 0, 111},
+ dictWord{8, 0, 581},
+ dictWord{
+ 12,
+ 0,
+ 177,
+ },
+ dictWord{6, 11, 52},
+ dictWord{9, 11, 104},
+ dictWord{9, 11, 559},
+ dictWord{10, 10, 4},
+ dictWord{10, 10, 13},
+ dictWord{11, 10, 638},
+ dictWord{
+ 12,
+ 11,
+ 308,
+ },
+ dictWord{19, 11, 87},
+ dictWord{148, 10, 57},
+ dictWord{132, 11, 604},
+ dictWord{4, 11, 301},
+ dictWord{133, 10, 738},
+ dictWord{133, 10, 758},
+ dictWord{134, 0, 1747},
+ dictWord{7, 11, 1440},
+ dictWord{11, 11, 854},
+ dictWord{11, 11, 872},
+ dictWord{11, 11, 921},
+ dictWord{12, 11, 551},
+ dictWord{
+ 13,
+ 11,
+ 472,
+ },
+ dictWord{142, 11, 367},
+ dictWord{7, 0, 1364},
+ dictWord{7, 0, 1907},
+ dictWord{141, 0, 158},
+ dictWord{134, 0, 873},
+ dictWord{4, 0, 404},
+ dictWord{
+ 4,
+ 0,
+ 659,
+ },
+ dictWord{7, 0, 552},
+ dictWord{135, 0, 675},
+ dictWord{135, 10, 1112},
+ dictWord{139, 10, 328},
+ dictWord{7, 11, 508},
+ dictWord{137, 10, 133},
+ dictWord{133, 0, 391},
+ dictWord{5, 10, 110},
+ dictWord{6, 10, 169},
+ dictWord{6, 10, 1702},
+ dictWord{7, 10, 400},
+ dictWord{8, 10, 538},
+ dictWord{9, 10, 184},
+ dictWord{
+ 9,
+ 10,
+ 524,
+ },
+ dictWord{140, 10, 218},
+ dictWord{6, 11, 310},
+ dictWord{7, 11, 1849},
+ dictWord{8, 11, 72},
+ dictWord{8, 11, 272},
+ dictWord{8, 11, 431},
+ dictWord{
+ 9,
+ 11,
+ 12,
+ },
+ dictWord{9, 11, 351},
+ dictWord{10, 11, 563},
+ dictWord{10, 11, 630},
+ dictWord{10, 11, 810},
+ dictWord{11, 11, 367},
+ dictWord{11, 11, 599},
+ dictWord{11, 11, 686},
+ dictWord{140, 11, 672},
+ dictWord{5, 0, 540},
+ dictWord{6, 0, 1697},
+ dictWord{136, 0, 668},
+ dictWord{132, 0, 883},
+ dictWord{134, 0, 78},
+ dictWord{12, 0, 628},
+ dictWord{18, 0, 79},
+ dictWord{6, 10, 133},
+ dictWord{9, 10, 353},
+ dictWord{139, 10, 993},
+ dictWord{6, 11, 181},
+ dictWord{7, 11, 537},
+ dictWord{
+ 8,
+ 11,
+ 64,
+ },
+ dictWord{9, 11, 127},
+ dictWord{10, 11, 496},
+ dictWord{12, 11, 510},
+ dictWord{141, 11, 384},
+ dictWord{6, 10, 93},
+ dictWord{7, 10, 1422},
+ dictWord{
+ 7,
+ 10,
+ 1851,
+ },
+ dictWord{8, 10, 673},
+ dictWord{9, 10, 529},
+ dictWord{140, 10, 43},
+ dictWord{137, 10, 371},
+ dictWord{134, 0, 1460},
+ dictWord{134, 0, 962},
+ dictWord{4, 11, 244},
+ dictWord{135, 11, 233},
+ dictWord{9, 10, 25},
+ dictWord{10, 10, 467},
+ dictWord{138, 10, 559},
+ dictWord{4, 10, 335},
+ dictWord{
+ 135,
+ 10,
+ 942,
+ },
+ dictWord{133, 0, 460},
+ dictWord{135, 11, 334},
+ dictWord{134, 11, 1650},
+ dictWord{4, 0, 199},
+ dictWord{139, 0, 34},
+ dictWord{5, 10, 601},
+ dictWord{
+ 8,
+ 10,
+ 39,
+ },
+ dictWord{10, 10, 773},
+ dictWord{11, 10, 84},
+ dictWord{12, 10, 205},
+ dictWord{142, 10, 1},
+ dictWord{133, 10, 870},
+ dictWord{134, 0, 388},
+ dictWord{14, 0, 474},
+ dictWord{148, 0, 120},
+ dictWord{133, 11, 369},
+ dictWord{139, 0, 271},
+ dictWord{4, 0, 511},
+ dictWord{9, 0, 333},
+ dictWord{9, 0, 379},
+ dictWord{
+ 10,
+ 0,
+ 602,
+ },
+ dictWord{11, 0, 441},
+ dictWord{11, 0, 723},
+ dictWord{11, 0, 976},
+ dictWord{12, 0, 357},
+ dictWord{132, 10, 181},
+ dictWord{134, 0, 608},
+ dictWord{134, 10, 1652},
+ dictWord{22, 0, 49},
+ dictWord{137, 11, 338},
+ dictWord{140, 0, 988},
+ dictWord{134, 0, 617},
+ dictWord{5, 0, 938},
+ dictWord{136, 0, 707},
+ dictWord{132, 10, 97},
+ dictWord{5, 10, 147},
+ dictWord{6, 10, 286},
+ dictWord{7, 10, 1362},
+ dictWord{141, 10, 176},
+ dictWord{6, 0, 756},
+ dictWord{
+ 134,
+ 0,
+ 1149,
+ },
+ dictWord{133, 11, 896},
+ dictWord{6, 10, 375},
+ dictWord{7, 10, 169},
+ dictWord{7, 10, 254},
+ dictWord{136, 10, 780},
+ dictWord{134, 0, 1583},
+ dictWord{135, 10, 1447},
+ dictWord{139, 0, 285},
+ dictWord{7, 11, 1117},
+ dictWord{8, 11, 393},
+ dictWord{136, 11, 539},
+ dictWord{135, 0, 344},
+ dictWord{
+ 6,
+ 0,
+ 469,
+ },
+ dictWord{7, 0, 1709},
+ dictWord{138, 0, 515},
+ dictWord{5, 10, 629},
+ dictWord{135, 10, 1549},
+ dictWord{5, 11, 4},
+ dictWord{5, 11, 810},
+ dictWord{
+ 6,
+ 11,
+ 13,
+ },
+ dictWord{6, 11, 538},
+ dictWord{6, 11, 1690},
+ dictWord{6, 11, 1726},
+ dictWord{7, 11, 499},
+ dictWord{7, 11, 1819},
+ dictWord{8, 11, 148},
+ dictWord{
+ 8,
+ 11,
+ 696,
+ },
+ dictWord{8, 11, 791},
+ dictWord{12, 11, 125},
+ dictWord{13, 11, 54},
+ dictWord{143, 11, 9},
+ dictWord{135, 11, 1268},
+ dictWord{137, 0, 404},
+ dictWord{
+ 132,
+ 0,
+ 500,
+ },
+ dictWord{5, 0, 68},
+ dictWord{134, 0, 383},
+ dictWord{11, 0, 216},
+ dictWord{139, 0, 340},
+ dictWord{4, 11, 925},
+ dictWord{5, 11, 803},
+ dictWord{
+ 8,
+ 11,
+ 698,
+ },
+ dictWord{138, 11, 828},
+ dictWord{4, 0, 337},
+ dictWord{6, 0, 353},
+ dictWord{7, 0, 1934},
+ dictWord{8, 0, 488},
+ dictWord{137, 0, 429},
+ dictWord{7, 0, 236},
+ dictWord{7, 0, 1795},
+ dictWord{8, 0, 259},
+ dictWord{9, 0, 135},
+ dictWord{9, 0, 177},
+ dictWord{9, 0, 860},
+ dictWord{10, 0, 825},
+ dictWord{11, 0, 115},
+ dictWord{
+ 11,
+ 0,
+ 370,
+ },
+ dictWord{11, 0, 405},
+ dictWord{11, 0, 604},
+ dictWord{12, 0, 10},
+ dictWord{12, 0, 667},
+ dictWord{12, 0, 669},
+ dictWord{13, 0, 76},
+ dictWord{14, 0, 310},
+ dictWord{15, 0, 76},
+ dictWord{15, 0, 147},
+ dictWord{148, 0, 23},
+ dictWord{4, 0, 15},
+ dictWord{4, 0, 490},
+ dictWord{5, 0, 22},
+ dictWord{6, 0, 244},
+ dictWord{7, 0, 40},
+ dictWord{7, 0, 200},
+ dictWord{7, 0, 906},
+ dictWord{7, 0, 1199},
+ dictWord{9, 0, 616},
+ dictWord{10, 0, 716},
+ dictWord{11, 0, 635},
+ dictWord{11, 0, 801},
+ dictWord{
+ 140,
+ 0,
+ 458,
+ },
+ dictWord{12, 0, 756},
+ dictWord{132, 10, 420},
+ dictWord{134, 0, 1504},
+ dictWord{6, 0, 757},
+ dictWord{133, 11, 383},
+ dictWord{6, 0, 1266},
+ dictWord{
+ 135,
+ 0,
+ 1735,
+ },
+ dictWord{5, 0, 598},
+ dictWord{7, 0, 791},
+ dictWord{8, 0, 108},
+ dictWord{9, 0, 123},
+ dictWord{7, 10, 1570},
+ dictWord{140, 10, 542},
+ dictWord{
+ 142,
+ 11,
+ 410,
+ },
+ dictWord{9, 11, 660},
+ dictWord{138, 11, 347},
+}
diff --git a/vendor/github.com/andybalholm/brotli/symbol_list.go b/vendor/github.com/andybalholm/brotli/symbol_list.go
new file mode 100644
index 0000000..c5cb49e
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/symbol_list.go
@@ -0,0 +1,22 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Utilities for building Huffman decoding tables. */
+
+type symbolList struct {
+ storage []uint16
+ offset int
+}
+
+func symbolListGet(sl symbolList, i int) uint16 {
+ return sl.storage[i+sl.offset]
+}
+
+func symbolListPut(sl symbolList, i int, val uint16) {
+ sl.storage[i+sl.offset] = val
+}
diff --git a/vendor/github.com/andybalholm/brotli/transform.go b/vendor/github.com/andybalholm/brotli/transform.go
new file mode 100644
index 0000000..d2c043a
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/transform.go
@@ -0,0 +1,641 @@
+package brotli
+
+const (
+ transformIdentity = 0
+ transformOmitLast1 = 1
+ transformOmitLast2 = 2
+ transformOmitLast3 = 3
+ transformOmitLast4 = 4
+ transformOmitLast5 = 5
+ transformOmitLast6 = 6
+ transformOmitLast7 = 7
+ transformOmitLast8 = 8
+ transformOmitLast9 = 9
+ transformUppercaseFirst = 10
+ transformUppercaseAll = 11
+ transformOmitFirst1 = 12
+ transformOmitFirst2 = 13
+ transformOmitFirst3 = 14
+ transformOmitFirst4 = 15
+ transformOmitFirst5 = 16
+ transformOmitFirst6 = 17
+ transformOmitFirst7 = 18
+ transformOmitFirst8 = 19
+ transformOmitFirst9 = 20
+ transformShiftFirst = 21
+ transformShiftAll = 22 + iota - 22
+ numTransformTypes
+)
+
+const transformsMaxCutOff = transformOmitLast9
+
+type transforms struct {
+ prefix_suffix_size uint16
+ prefix_suffix []byte
+ prefix_suffix_map []uint16
+ num_transforms uint32
+ transforms []byte
+ params []byte
+ cutOffTransforms [transformsMaxCutOff + 1]int16
+}
+
+func transformPrefixId(t *transforms, I int) byte {
+ return t.transforms[(I*3)+0]
+}
+
+func transformType(t *transforms, I int) byte {
+ return t.transforms[(I*3)+1]
+}
+
+func transformSuffixId(t *transforms, I int) byte {
+ return t.transforms[(I*3)+2]
+}
+
+func transformPrefix(t *transforms, I int) []byte {
+ return t.prefix_suffix[t.prefix_suffix_map[transformPrefixId(t, I)]:]
+}
+
+func transformSuffix(t *transforms, I int) []byte {
+ return t.prefix_suffix[t.prefix_suffix_map[transformSuffixId(t, I)]:]
+}
+
+/* RFC 7932 transforms string data */
+const kPrefixSuffix string = "\001 \002, \010 of the \004 of \002s \001.\005 and \004 " + "in \001\"\004 to \002\">\001\n\002. \001]\005 for \003 a \006 " + "that \001'\006 with \006 from \004 by \001(\006. T" + "he \004 on \004 as \004 is \004ing \002\n\t\001:\003ed " + "\002=\"\004 at \003ly \001,\002='\005.com/\007. This \005" + " not \003er \003al \004ful \004ive \005less \004es" + "t \004ize \002\xc2\xa0\004ous \005 the \002e \000"
+
+var kPrefixSuffixMap = [50]uint16{
+ 0x00,
+ 0x02,
+ 0x05,
+ 0x0E,
+ 0x13,
+ 0x16,
+ 0x18,
+ 0x1E,
+ 0x23,
+ 0x25,
+ 0x2A,
+ 0x2D,
+ 0x2F,
+ 0x32,
+ 0x34,
+ 0x3A,
+ 0x3E,
+ 0x45,
+ 0x47,
+ 0x4E,
+ 0x55,
+ 0x5A,
+ 0x5C,
+ 0x63,
+ 0x68,
+ 0x6D,
+ 0x72,
+ 0x77,
+ 0x7A,
+ 0x7C,
+ 0x80,
+ 0x83,
+ 0x88,
+ 0x8C,
+ 0x8E,
+ 0x91,
+ 0x97,
+ 0x9F,
+ 0xA5,
+ 0xA9,
+ 0xAD,
+ 0xB2,
+ 0xB7,
+ 0xBD,
+ 0xC2,
+ 0xC7,
+ 0xCA,
+ 0xCF,
+ 0xD5,
+ 0xD8,
+}
+
+/* RFC 7932 transforms */
+var kTransformsData = []byte{
+ 49,
+ transformIdentity,
+ 49,
+ 49,
+ transformIdentity,
+ 0,
+ 0,
+ transformIdentity,
+ 0,
+ 49,
+ transformOmitFirst1,
+ 49,
+ 49,
+ transformUppercaseFirst,
+ 0,
+ 49,
+ transformIdentity,
+ 47,
+ 0,
+ transformIdentity,
+ 49,
+ 4,
+ transformIdentity,
+ 0,
+ 49,
+ transformIdentity,
+ 3,
+ 49,
+ transformUppercaseFirst,
+ 49,
+ 49,
+ transformIdentity,
+ 6,
+ 49,
+ transformOmitFirst2,
+ 49,
+ 49,
+ transformOmitLast1,
+ 49,
+ 1,
+ transformIdentity,
+ 0,
+ 49,
+ transformIdentity,
+ 1,
+ 0,
+ transformUppercaseFirst,
+ 0,
+ 49,
+ transformIdentity,
+ 7,
+ 49,
+ transformIdentity,
+ 9,
+ 48,
+ transformIdentity,
+ 0,
+ 49,
+ transformIdentity,
+ 8,
+ 49,
+ transformIdentity,
+ 5,
+ 49,
+ transformIdentity,
+ 10,
+ 49,
+ transformIdentity,
+ 11,
+ 49,
+ transformOmitLast3,
+ 49,
+ 49,
+ transformIdentity,
+ 13,
+ 49,
+ transformIdentity,
+ 14,
+ 49,
+ transformOmitFirst3,
+ 49,
+ 49,
+ transformOmitLast2,
+ 49,
+ 49,
+ transformIdentity,
+ 15,
+ 49,
+ transformIdentity,
+ 16,
+ 0,
+ transformUppercaseFirst,
+ 49,
+ 49,
+ transformIdentity,
+ 12,
+ 5,
+ transformIdentity,
+ 49,
+ 0,
+ transformIdentity,
+ 1,
+ 49,
+ transformOmitFirst4,
+ 49,
+ 49,
+ transformIdentity,
+ 18,
+ 49,
+ transformIdentity,
+ 17,
+ 49,
+ transformIdentity,
+ 19,
+ 49,
+ transformIdentity,
+ 20,
+ 49,
+ transformOmitFirst5,
+ 49,
+ 49,
+ transformOmitFirst6,
+ 49,
+ 47,
+ transformIdentity,
+ 49,
+ 49,
+ transformOmitLast4,
+ 49,
+ 49,
+ transformIdentity,
+ 22,
+ 49,
+ transformUppercaseAll,
+ 49,
+ 49,
+ transformIdentity,
+ 23,
+ 49,
+ transformIdentity,
+ 24,
+ 49,
+ transformIdentity,
+ 25,
+ 49,
+ transformOmitLast7,
+ 49,
+ 49,
+ transformOmitLast1,
+ 26,
+ 49,
+ transformIdentity,
+ 27,
+ 49,
+ transformIdentity,
+ 28,
+ 0,
+ transformIdentity,
+ 12,
+ 49,
+ transformIdentity,
+ 29,
+ 49,
+ transformOmitFirst9,
+ 49,
+ 49,
+ transformOmitFirst7,
+ 49,
+ 49,
+ transformOmitLast6,
+ 49,
+ 49,
+ transformIdentity,
+ 21,
+ 49,
+ transformUppercaseFirst,
+ 1,
+ 49,
+ transformOmitLast8,
+ 49,
+ 49,
+ transformIdentity,
+ 31,
+ 49,
+ transformIdentity,
+ 32,
+ 47,
+ transformIdentity,
+ 3,
+ 49,
+ transformOmitLast5,
+ 49,
+ 49,
+ transformOmitLast9,
+ 49,
+ 0,
+ transformUppercaseFirst,
+ 1,
+ 49,
+ transformUppercaseFirst,
+ 8,
+ 5,
+ transformIdentity,
+ 21,
+ 49,
+ transformUppercaseAll,
+ 0,
+ 49,
+ transformUppercaseFirst,
+ 10,
+ 49,
+ transformIdentity,
+ 30,
+ 0,
+ transformIdentity,
+ 5,
+ 35,
+ transformIdentity,
+ 49,
+ 47,
+ transformIdentity,
+ 2,
+ 49,
+ transformUppercaseFirst,
+ 17,
+ 49,
+ transformIdentity,
+ 36,
+ 49,
+ transformIdentity,
+ 33,
+ 5,
+ transformIdentity,
+ 0,
+ 49,
+ transformUppercaseFirst,
+ 21,
+ 49,
+ transformUppercaseFirst,
+ 5,
+ 49,
+ transformIdentity,
+ 37,
+ 0,
+ transformIdentity,
+ 30,
+ 49,
+ transformIdentity,
+ 38,
+ 0,
+ transformUppercaseAll,
+ 0,
+ 49,
+ transformIdentity,
+ 39,
+ 0,
+ transformUppercaseAll,
+ 49,
+ 49,
+ transformIdentity,
+ 34,
+ 49,
+ transformUppercaseAll,
+ 8,
+ 49,
+ transformUppercaseFirst,
+ 12,
+ 0,
+ transformIdentity,
+ 21,
+ 49,
+ transformIdentity,
+ 40,
+ 0,
+ transformUppercaseFirst,
+ 12,
+ 49,
+ transformIdentity,
+ 41,
+ 49,
+ transformIdentity,
+ 42,
+ 49,
+ transformUppercaseAll,
+ 17,
+ 49,
+ transformIdentity,
+ 43,
+ 0,
+ transformUppercaseFirst,
+ 5,
+ 49,
+ transformUppercaseAll,
+ 10,
+ 0,
+ transformIdentity,
+ 34,
+ 49,
+ transformUppercaseFirst,
+ 33,
+ 49,
+ transformIdentity,
+ 44,
+ 49,
+ transformUppercaseAll,
+ 5,
+ 45,
+ transformIdentity,
+ 49,
+ 0,
+ transformIdentity,
+ 33,
+ 49,
+ transformUppercaseFirst,
+ 30,
+ 49,
+ transformUppercaseAll,
+ 30,
+ 49,
+ transformIdentity,
+ 46,
+ 49,
+ transformUppercaseAll,
+ 1,
+ 49,
+ transformUppercaseFirst,
+ 34,
+ 0,
+ transformUppercaseFirst,
+ 33,
+ 0,
+ transformUppercaseAll,
+ 30,
+ 0,
+ transformUppercaseAll,
+ 1,
+ 49,
+ transformUppercaseAll,
+ 33,
+ 49,
+ transformUppercaseAll,
+ 21,
+ 49,
+ transformUppercaseAll,
+ 12,
+ 0,
+ transformUppercaseAll,
+ 5,
+ 49,
+ transformUppercaseAll,
+ 34,
+ 0,
+ transformUppercaseAll,
+ 12,
+ 0,
+ transformUppercaseFirst,
+ 30,
+ 0,
+ transformUppercaseAll,
+ 34,
+ 0,
+ transformUppercaseFirst,
+ 34,
+}
+
+var kBrotliTransforms = transforms{
+ 217,
+ []byte(kPrefixSuffix),
+ kPrefixSuffixMap[:],
+ 121,
+ kTransformsData,
+ nil, /* no extra parameters */
+ [transformsMaxCutOff + 1]int16{0, 12, 27, 23, 42, 63, 56, 48, 59, 64},
+}
+
+func getTransforms() *transforms {
+ return &kBrotliTransforms
+}
+
+func toUpperCase(p []byte) int {
+ if p[0] < 0xC0 {
+ if p[0] >= 'a' && p[0] <= 'z' {
+ p[0] ^= 32
+ }
+
+ return 1
+ }
+
+ /* An overly simplified uppercasing model for UTF-8. */
+ if p[0] < 0xE0 {
+ p[1] ^= 32
+ return 2
+ }
+
+ /* An arbitrary transform for three byte characters. */
+ p[2] ^= 5
+
+ return 3
+}
+
+func shiftTransform(word []byte, word_len int, parameter uint16) int {
+ /* Limited sign extension: scalar < (1 << 24). */
+ var scalar uint32 = (uint32(parameter) & 0x7FFF) + (0x1000000 - (uint32(parameter) & 0x8000))
+ if word[0] < 0x80 {
+ /* 1-byte rune / 0sssssss / 7 bit scalar (ASCII). */
+ scalar += uint32(word[0])
+
+ word[0] = byte(scalar & 0x7F)
+ return 1
+ } else if word[0] < 0xC0 {
+ /* Continuation / 10AAAAAA. */
+ return 1
+ } else if word[0] < 0xE0 {
+ /* 2-byte rune / 110sssss AAssssss / 11 bit scalar. */
+ if word_len < 2 {
+ return 1
+ }
+ scalar += uint32(word[1]&0x3F | (word[0]&0x1F)<<6)
+ word[0] = byte(0xC0 | (scalar>>6)&0x1F)
+ word[1] = byte(uint32(word[1]&0xC0) | scalar&0x3F)
+ return 2
+ } else if word[0] < 0xF0 {
+ /* 3-byte rune / 1110ssss AAssssss BBssssss / 16 bit scalar. */
+ if word_len < 3 {
+ return word_len
+ }
+ scalar += uint32(word[2])&0x3F | uint32(word[1]&0x3F)<<6 | uint32(word[0]&0x0F)<<12
+ word[0] = byte(0xE0 | (scalar>>12)&0x0F)
+ word[1] = byte(uint32(word[1]&0xC0) | (scalar>>6)&0x3F)
+ word[2] = byte(uint32(word[2]&0xC0) | scalar&0x3F)
+ return 3
+ } else if word[0] < 0xF8 {
+ /* 4-byte rune / 11110sss AAssssss BBssssss CCssssss / 21 bit scalar. */
+ if word_len < 4 {
+ return word_len
+ }
+ scalar += uint32(word[3])&0x3F | uint32(word[2]&0x3F)<<6 | uint32(word[1]&0x3F)<<12 | uint32(word[0]&0x07)<<18
+ word[0] = byte(0xF0 | (scalar>>18)&0x07)
+ word[1] = byte(uint32(word[1]&0xC0) | (scalar>>12)&0x3F)
+ word[2] = byte(uint32(word[2]&0xC0) | (scalar>>6)&0x3F)
+ word[3] = byte(uint32(word[3]&0xC0) | scalar&0x3F)
+ return 4
+ }
+
+ return 1
+}
+
+func transformDictionaryWord(dst []byte, word []byte, len int, trans *transforms, transform_idx int) int {
+ var idx int = 0
+ var prefix []byte = transformPrefix(trans, transform_idx)
+ var type_ byte = transformType(trans, transform_idx)
+ var suffix []byte = transformSuffix(trans, transform_idx)
+ {
+ var prefix_len int = int(prefix[0])
+ prefix = prefix[1:]
+ for {
+ tmp1 := prefix_len
+ prefix_len--
+ if tmp1 == 0 {
+ break
+ }
+ dst[idx] = prefix[0]
+ idx++
+ prefix = prefix[1:]
+ }
+ }
+ {
+ var t int = int(type_)
+ var i int = 0
+ if t <= transformOmitLast9 {
+ len -= t
+ } else if t >= transformOmitFirst1 && t <= transformOmitFirst9 {
+ var skip int = t - (transformOmitFirst1 - 1)
+ word = word[skip:]
+ len -= skip
+ }
+
+ for i < len {
+ dst[idx] = word[i]
+ idx++
+ i++
+ }
+ if t == transformUppercaseFirst {
+ toUpperCase(dst[idx-len:])
+ } else if t == transformUppercaseAll {
+ var uppercase []byte = dst
+ uppercase = uppercase[idx-len:]
+ for len > 0 {
+ var step int = toUpperCase(uppercase)
+ uppercase = uppercase[step:]
+ len -= step
+ }
+ } else if t == transformShiftFirst {
+ var param uint16 = uint16(trans.params[transform_idx*2]) + uint16(trans.params[transform_idx*2+1])<<8
+ shiftTransform(dst[idx-len:], int(len), param)
+ } else if t == transformShiftAll {
+ var param uint16 = uint16(trans.params[transform_idx*2]) + uint16(trans.params[transform_idx*2+1])<<8
+ var shift []byte = dst
+ shift = shift[idx-len:]
+ for len > 0 {
+ var step int = shiftTransform(shift, int(len), param)
+ shift = shift[step:]
+ len -= step
+ }
+ }
+ }
+ {
+ var suffix_len int = int(suffix[0])
+ suffix = suffix[1:]
+ for {
+ tmp2 := suffix_len
+ suffix_len--
+ if tmp2 == 0 {
+ break
+ }
+ dst[idx] = suffix[0]
+ idx++
+ suffix = suffix[1:]
+ }
+ return idx
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/utf8_util.go b/vendor/github.com/andybalholm/brotli/utf8_util.go
new file mode 100644
index 0000000..3244247
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/utf8_util.go
@@ -0,0 +1,70 @@
+package brotli
+
+/* Copyright 2013 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Heuristics for deciding about the UTF8-ness of strings. */
+
+const kMinUTF8Ratio float64 = 0.75
+
+/* Returns 1 if at least min_fraction of the bytes between pos and
+ pos + length in the (data, mask) ring-buffer is UTF8-encoded, otherwise
+ returns 0. */
+func parseAsUTF8(symbol *int, input []byte, size uint) uint {
+ /* ASCII */
+ if input[0]&0x80 == 0 {
+ *symbol = int(input[0])
+ if *symbol > 0 {
+ return 1
+ }
+ }
+
+ /* 2-byte UTF8 */
+ if size > 1 && input[0]&0xE0 == 0xC0 && input[1]&0xC0 == 0x80 {
+ *symbol = (int(input[0])&0x1F)<<6 | int(input[1])&0x3F
+ if *symbol > 0x7F {
+ return 2
+ }
+ }
+
+ /* 3-byte UFT8 */
+ if size > 2 && input[0]&0xF0 == 0xE0 && input[1]&0xC0 == 0x80 && input[2]&0xC0 == 0x80 {
+ *symbol = (int(input[0])&0x0F)<<12 | (int(input[1])&0x3F)<<6 | int(input[2])&0x3F
+ if *symbol > 0x7FF {
+ return 3
+ }
+ }
+
+ /* 4-byte UFT8 */
+ if size > 3 && input[0]&0xF8 == 0xF0 && input[1]&0xC0 == 0x80 && input[2]&0xC0 == 0x80 && input[3]&0xC0 == 0x80 {
+ *symbol = (int(input[0])&0x07)<<18 | (int(input[1])&0x3F)<<12 | (int(input[2])&0x3F)<<6 | int(input[3])&0x3F
+ if *symbol > 0xFFFF && *symbol <= 0x10FFFF {
+ return 4
+ }
+ }
+
+ /* Not UTF8, emit a special symbol above the UTF8-code space */
+ *symbol = 0x110000 | int(input[0])
+
+ return 1
+}
+
+/* Returns 1 if at least min_fraction of the data is UTF8-encoded.*/
+func isMostlyUTF8(data []byte, pos uint, mask uint, length uint, min_fraction float64) bool {
+ var size_utf8 uint = 0
+ var i uint = 0
+ for i < length {
+ var symbol int
+ current_data := data[(pos+i)&mask:]
+ var bytes_read uint = parseAsUTF8(&symbol, current_data, length-i)
+ i += bytes_read
+ if symbol < 0x110000 {
+ size_utf8 += bytes_read
+ }
+ }
+
+ return float64(size_utf8) > min_fraction*float64(length)
+}
diff --git a/vendor/github.com/andybalholm/brotli/util.go b/vendor/github.com/andybalholm/brotli/util.go
new file mode 100644
index 0000000..a84553a
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/util.go
@@ -0,0 +1,7 @@
+package brotli
+
+func assert(cond bool) {
+ if !cond {
+ panic("assertion failure")
+ }
+}
diff --git a/vendor/github.com/andybalholm/brotli/write_bits.go b/vendor/github.com/andybalholm/brotli/write_bits.go
new file mode 100644
index 0000000..8729901
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/write_bits.go
@@ -0,0 +1,52 @@
+package brotli
+
+import "encoding/binary"
+
+/* Copyright 2010 Google Inc. All Rights Reserved.
+
+ Distributed under MIT license.
+ See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
+*/
+
+/* Write bits into a byte array. */
+
+/* This function writes bits into bytes in increasing addresses, and within
+ a byte least-significant-bit first.
+
+ The function can write up to 56 bits in one go with WriteBits
+ Example: let's assume that 3 bits (Rs below) have been written already:
+
+ BYTE-0 BYTE+1 BYTE+2
+
+ 0000 0RRR 0000 0000 0000 0000
+
+ Now, we could write 5 or less bits in MSB by just sifting by 3
+ and OR'ing to BYTE-0.
+
+ For n bits, we take the last 5 bits, OR that with high bits in BYTE-0,
+ and locate the rest in BYTE+1, BYTE+2, etc. */
+func writeBits(n_bits uint, bits uint64, pos *uint, array []byte) {
+ /* This branch of the code can write up to 56 bits at a time,
+ 7 bits are lost by being perhaps already in *p and at least
+ 1 bit is needed to initialize the bit-stream ahead (i.e. if 7
+ bits are in *p and we write 57 bits, then the next write will
+ access a byte that was never initialized). */
+ p := array[*pos>>3:]
+ v := uint64(p[0])
+ v |= bits << (*pos & 7)
+ binary.LittleEndian.PutUint64(p, v)
+ *pos += n_bits
+}
+
+func writeSingleBit(bit bool, pos *uint, array []byte) {
+ if bit {
+ writeBits(1, 1, pos, array)
+ } else {
+ writeBits(1, 0, pos, array)
+ }
+}
+
+func writeBitsPrepareStorage(pos uint, array []byte) {
+ assert(pos&7 == 0)
+ array[pos>>3] = 0
+}
diff --git a/vendor/github.com/andybalholm/brotli/writer.go b/vendor/github.com/andybalholm/brotli/writer.go
new file mode 100644
index 0000000..39feaef
--- /dev/null
+++ b/vendor/github.com/andybalholm/brotli/writer.go
@@ -0,0 +1,119 @@
+package brotli
+
+import (
+ "errors"
+ "io"
+)
+
+const (
+ BestSpeed = 0
+ BestCompression = 11
+ DefaultCompression = 6
+)
+
+// WriterOptions configures Writer.
+type WriterOptions struct {
+ // Quality controls the compression-speed vs compression-density trade-offs.
+ // The higher the quality, the slower the compression. Range is 0 to 11.
+ Quality int
+ // LGWin is the base 2 logarithm of the sliding window size.
+ // Range is 10 to 24. 0 indicates automatic configuration based on Quality.
+ LGWin int
+}
+
+var (
+ errEncode = errors.New("brotli: encode error")
+ errWriterClosed = errors.New("brotli: Writer is closed")
+)
+
+// Writes to the returned writer are compressed and written to dst.
+// It is the caller's responsibility to call Close on the Writer when done.
+// Writes may be buffered and not flushed until Close.
+func NewWriter(dst io.Writer) *Writer {
+ return NewWriterLevel(dst, DefaultCompression)
+}
+
+// NewWriterLevel is like NewWriter but specifies the compression level instead
+// of assuming DefaultCompression.
+// The compression level can be DefaultCompression or any integer value between
+// BestSpeed and BestCompression inclusive.
+func NewWriterLevel(dst io.Writer, level int) *Writer {
+ return NewWriterOptions(dst, WriterOptions{
+ Quality: level,
+ })
+}
+
+// NewWriterOptions is like NewWriter but specifies WriterOptions
+func NewWriterOptions(dst io.Writer, options WriterOptions) *Writer {
+ w := new(Writer)
+ w.options = options
+ w.Reset(dst)
+ return w
+}
+
+// Reset discards the Writer's state and makes it equivalent to the result of
+// its original state from NewWriter or NewWriterLevel, but writing to dst
+// instead. This permits reusing a Writer rather than allocating a new one.
+func (w *Writer) Reset(dst io.Writer) {
+ encoderInitState(w)
+ w.params.quality = w.options.Quality
+ if w.options.LGWin > 0 {
+ w.params.lgwin = uint(w.options.LGWin)
+ }
+ w.dst = dst
+ w.err = nil
+}
+
+func (w *Writer) writeChunk(p []byte, op int) (n int, err error) {
+ if w.dst == nil {
+ return 0, errWriterClosed
+ }
+ if w.err != nil {
+ return 0, w.err
+ }
+
+ for {
+ availableIn := uint(len(p))
+ nextIn := p
+ success := encoderCompressStream(w, op, &availableIn, &nextIn)
+ bytesConsumed := len(p) - int(availableIn)
+ p = p[bytesConsumed:]
+ n += bytesConsumed
+ if !success {
+ return n, errEncode
+ }
+
+ if len(p) == 0 || w.err != nil {
+ return n, w.err
+ }
+ }
+}
+
+// Flush outputs encoded data for all input provided to Write. The resulting
+// output can be decoded to match all input before Flush, but the stream is
+// not yet complete until after Close.
+// Flush has a negative impact on compression.
+func (w *Writer) Flush() error {
+ _, err := w.writeChunk(nil, operationFlush)
+ return err
+}
+
+// Close flushes remaining data to the decorated writer.
+func (w *Writer) Close() error {
+ // If stream is already closed, it is reported by `writeChunk`.
+ _, err := w.writeChunk(nil, operationFinish)
+ w.dst = nil
+ return err
+}
+
+// Write implements io.Writer. Flush or Close must be called to ensure that the
+// encoded bytes are actually flushed to the underlying Writer.
+func (w *Writer) Write(p []byte) (n int, err error) {
+ return w.writeChunk(p, operationProcess)
+}
+
+type nopCloser struct {
+ io.Writer
+}
+
+func (nopCloser) Close() error { return nil }
diff --git a/vendor/github.com/antlr4-go/antlr/v4/.gitignore b/vendor/github.com/antlr4-go/antlr/v4/.gitignore
new file mode 100644
index 0000000..38ea34f
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/.gitignore
@@ -0,0 +1,18 @@
+### Go template
+
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+
+# Go workspace file
+go.work
+
+# No Goland stuff in this repo
+.idea
diff --git a/vendor/github.com/antlr4-go/antlr/v4/LICENSE b/vendor/github.com/antlr4-go/antlr/v4/LICENSE
new file mode 100644
index 0000000..a22292e
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/LICENSE
@@ -0,0 +1,28 @@
+Copyright (c) 2012-2023 The ANTLR Project. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+3. Neither name of copyright holders nor the names of its contributors
+may be used to endorse or promote products derived from this software
+without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/antlr4-go/antlr/v4/README.md b/vendor/github.com/antlr4-go/antlr/v4/README.md
new file mode 100644
index 0000000..03e5b83
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/README.md
@@ -0,0 +1,54 @@
+[![Go Report Card](https://goreportcard.com/badge/github.com/antlr4-go/antlr?style=flat-square)](https://goreportcard.com/report/github.com/antlr4-go/antlr)
+[![PkgGoDev](https://pkg.go.dev/badge/github.com/github.com/antlr4-go/antlr)](https://pkg.go.dev/github.com/antlr4-go/antlr)
+[![Release](https://img.shields.io/github/v/release/antlr4-go/antlr?sort=semver&style=flat-square)](https://github.com/antlr4-go/antlr/releases/latest)
+[![Release](https://img.shields.io/github/go-mod/go-version/antlr4-go/antlr?style=flat-square)](https://github.com/antlr4-go/antlr/releases/latest)
+[![Maintenance](https://img.shields.io/badge/Maintained%3F-yes-green.svg?style=flat-square)](https://github.com/antlr4-go/antlr/commit-activity)
+[![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
+[![GitHub stars](https://img.shields.io/github/stars/antlr4-go/antlr?style=flat-square&label=Star&maxAge=2592000)](https://GitHub.com/Naereen/StrapDown.js/stargazers/)
+# ANTLR4 Go Runtime Module Repo
+
+IMPORTANT: Please submit PRs via a clone of the https://github.com/antlr/antlr4 repo, and not here.
+
+ - Do not submit PRs or any change requests to this repo
+ - This repo is read only and is updated by the ANTLR team to create a new release of the Go Runtime for ANTLR
+ - This repo contains the Go runtime that your generated projects should import
+
+## Introduction
+
+This repo contains the official modules for the Go Runtime for ANTLR. It is a copy of the runtime maintained
+at: https://github.com/antlr/antlr4/tree/master/runtime/Go/antlr and is automatically updated by the ANTLR team to create
+the official Go runtime release only. No development work is carried out in this repo and PRs are not accepted here.
+
+The dev branch of this repo is kept in sync with the dev branch of the main ANTLR repo and is updated periodically.
+
+### Why?
+
+The `go get` command is unable to retrieve the Go runtime when it is embedded so
+deeply in the main repo. A `go get` against the `antlr/antlr4` repo, while retrieving the correct source code for the runtime,
+does not correctly resolve tags and will create a reference in your `go.mod` file that is unclear, will not upgrade smoothly and
+causes confusion.
+
+For instance, the current Go runtime release, which is tagged with v4.13.0 in `antlr/antlr4` is retrieved by go get as:
+
+```sh
+require (
+ github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230219212500-1f9a474cc2dc
+)
+```
+
+Where you would expect to see:
+
+```sh
+require (
+ github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.13.0
+)
+```
+
+The decision was taken to create a separate org in a separate repo to hold the official Go runtime for ANTLR and
+from whence users can expect `go get` to behave as expected.
+
+
+# Documentation
+Please read the official documentation at: https://github.com/antlr/antlr4/blob/master/doc/index.md for tips on
+migrating existing projects to use the new module location and for information on how to use the Go runtime in
+general.
diff --git a/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go b/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go
new file mode 100644
index 0000000..3bb4fd7
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go
@@ -0,0 +1,102 @@
+/*
+Package antlr implements the Go version of the ANTLR 4 runtime.
+
+# The ANTLR Tool
+
+ANTLR (ANother Tool for Language Recognition) is a powerful parser generator for reading, processing, executing,
+or translating structured text or binary files. It's widely used to build languages, tools, and frameworks.
+From a grammar, ANTLR generates a parser that can build parse trees and also generates a listener interface
+(or visitor) that makes it easy to respond to the recognition of phrases of interest.
+
+# Go Runtime
+
+At version 4.11.x and prior, the Go runtime was not properly versioned for go modules. After this point, the runtime
+source code to be imported was held in the `runtime/Go/antlr/v4` directory, and the go.mod file was updated to reflect the version of
+ANTLR4 that it is compatible with (I.E. uses the /v4 path).
+
+However, this was found to be problematic, as it meant that with the runtime embedded so far underneath the root
+of the repo, the `go get` and related commands could not properly resolve the location of the go runtime source code.
+This meant that the reference to the runtime in your `go.mod` file would refer to the correct source code, but would not
+list the release tag such as @4.12.0 - this was confusing, to say the least.
+
+As of 4.12.1, the runtime is now available as a go module in its own repo, and can be imported as `github.com/antlr4-go/antlr`
+(the go get command should also be used with this path). See the main documentation for the ANTLR4 project for more information,
+which is available at [ANTLR docs]. The documentation for using the Go runtime is available at [Go runtime docs].
+
+This means that if you are using the source code without modules, you should also use the source code in the [new repo].
+Though we highly recommend that you use go modules, as they are now idiomatic for Go.
+
+I am aware that this change will prove Hyrum's Law, but am prepared to live with it for the common good.
+
+Go runtime author: [Jim Idle] jimi@idle.ws
+
+# Code Generation
+
+ANTLR supports the generation of code in a number of [target languages], and the generated code is supported by a
+runtime library, written specifically to support the generated code in the target language. This library is the
+runtime for the Go target.
+
+To generate code for the go target, it is generally recommended to place the source grammar files in a package of
+their own, and use the `.sh` script method of generating code, using the go generate directive. In that same directory
+it is usual, though not required, to place the antlr tool that should be used to generate the code. That does mean
+that the antlr tool JAR file will be checked in to your source code control though, so you are, of course, free to use any other
+way of specifying the version of the ANTLR tool to use, such as aliasing in `.zshrc` or equivalent, or a profile in
+your IDE, or configuration in your CI system. Checking in the jar does mean that it is easy to reproduce the build as
+it was at any point in its history.
+
+Here is a general/recommended template for an ANTLR based recognizer in Go:
+
+ .
+ ├── parser
+ │ ├── mygrammar.g4
+ │ ├── antlr-4.12.1-complete.jar
+ │ ├── generate.go
+ │ └── generate.sh
+ ├── parsing - generated code goes here
+ │ └── error_listeners.go
+ ├── go.mod
+ ├── go.sum
+ ├── main.go
+ └── main_test.go
+
+Make sure that the package statement in your grammar file(s) reflects the go package the generated code will exist in.
+
+The generate.go file then looks like this:
+
+ package parser
+
+ //go:generate ./generate.sh
+
+And the generate.sh file will look similar to this:
+
+ #!/bin/sh
+
+ alias antlr4='java -Xmx500M -cp "./antlr4-4.12.1-complete.jar:$CLASSPATH" org.antlr.v4.Tool'
+ antlr4 -Dlanguage=Go -no-visitor -package parsing *.g4
+
+depending on whether you want visitors or listeners or any other ANTLR options. Not that another option here
+is to generate the code into a
+
+From the command line at the root of your source package (location of go.mo)d) you can then simply issue the command:
+
+ go generate ./...
+
+Which will generate the code for the parser, and place it in the parsing package. You can then use the generated code
+by importing the parsing package.
+
+There are no hard and fast rules on this. It is just a recommendation. You can generate the code in any way and to anywhere you like.
+
+# Copyright Notice
+
+Copyright (c) 2012-2023 The ANTLR Project. All rights reserved.
+
+Use of this file is governed by the BSD 3-clause license, which can be found in the [LICENSE.txt] file in the project root.
+
+[target languages]: https://github.com/antlr/antlr4/tree/master/runtime
+[LICENSE.txt]: https://github.com/antlr/antlr4/blob/master/LICENSE.txt
+[ANTLR docs]: https://github.com/antlr/antlr4/blob/master/doc/index.md
+[new repo]: https://github.com/antlr4-go/antlr
+[Jim Idle]: https://github.com/jimidle
+[Go runtime docs]: https://github.com/antlr/antlr4/blob/master/doc/go-target.md
+*/
+package antlr
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn.go b/vendor/github.com/antlr4-go/antlr/v4/atn.go
new file mode 100644
index 0000000..cdeefed
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/atn.go
@@ -0,0 +1,179 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import "sync"
+
+// ATNInvalidAltNumber is used to represent an ALT number that has yet to be calculated or
+// which is invalid for a particular struct such as [*antlr.BaseRuleContext]
+var ATNInvalidAltNumber int
+
+// ATN represents an “[Augmented Transition Network]”, though general in ANTLR the term
+// “Augmented Recursive Transition Network” though there are some descriptions of “[Recursive Transition Network]”
+// in existence.
+//
+// ATNs represent the main networks in the system and are serialized by the code generator and support [ALL(*)].
+//
+// [Augmented Transition Network]: https://en.wikipedia.org/wiki/Augmented_transition_network
+// [ALL(*)]: https://www.antlr.org/papers/allstar-techreport.pdf
+// [Recursive Transition Network]: https://en.wikipedia.org/wiki/Recursive_transition_network
+type ATN struct {
+
+ // DecisionToState is the decision points for all rules, sub-rules, optional
+ // blocks, ()+, ()*, etc. Each sub-rule/rule is a decision point, and we must track them, so we
+ // can go back later and build DFA predictors for them. This includes
+ // all the rules, sub-rules, optional blocks, ()+, ()* etc...
+ DecisionToState []DecisionState
+
+ // grammarType is the ATN type and is used for deserializing ATNs from strings.
+ grammarType int
+
+ // lexerActions is referenced by action transitions in the ATN for lexer ATNs.
+ lexerActions []LexerAction
+
+ // maxTokenType is the maximum value for any symbol recognized by a transition in the ATN.
+ maxTokenType int
+
+ modeNameToStartState map[string]*TokensStartState
+
+ modeToStartState []*TokensStartState
+
+ // ruleToStartState maps from rule index to starting state number.
+ ruleToStartState []*RuleStartState
+
+ // ruleToStopState maps from rule index to stop state number.
+ ruleToStopState []*RuleStopState
+
+ // ruleToTokenType maps the rule index to the resulting token type for lexer
+ // ATNs. For parser ATNs, it maps the rule index to the generated bypass token
+ // type if ATNDeserializationOptions.isGenerateRuleBypassTransitions was
+ // specified, and otherwise is nil.
+ ruleToTokenType []int
+
+ // ATNStates is a list of all states in the ATN, ordered by state number.
+ //
+ states []ATNState
+
+ mu sync.Mutex
+ stateMu sync.RWMutex
+ edgeMu sync.RWMutex
+}
+
+// NewATN returns a new ATN struct representing the given grammarType and is used
+// for runtime deserialization of ATNs from the code generated by the ANTLR tool
+func NewATN(grammarType int, maxTokenType int) *ATN {
+ return &ATN{
+ grammarType: grammarType,
+ maxTokenType: maxTokenType,
+ modeNameToStartState: make(map[string]*TokensStartState),
+ }
+}
+
+// NextTokensInContext computes and returns the set of valid tokens that can occur starting
+// in state s. If ctx is nil, the set of tokens will not include what can follow
+// the rule surrounding s. In other words, the set will be restricted to tokens
+// reachable staying within the rule of s.
+func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
+ return NewLL1Analyzer(a).Look(s, nil, ctx)
+}
+
+// NextTokensNoContext computes and returns the set of valid tokens that can occur starting
+// in state s and staying in same rule. [antlr.Token.EPSILON] is in set if we reach end of
+// rule.
+func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
+ a.mu.Lock()
+ defer a.mu.Unlock()
+ iset := s.GetNextTokenWithinRule()
+ if iset == nil {
+ iset = a.NextTokensInContext(s, nil)
+ iset.readOnly = true
+ s.SetNextTokenWithinRule(iset)
+ }
+ return iset
+}
+
+// NextTokens computes and returns the set of valid tokens starting in state s, by
+// calling either [NextTokensNoContext] (ctx == nil) or [NextTokensInContext] (ctx != nil).
+func (a *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {
+ if ctx == nil {
+ return a.NextTokensNoContext(s)
+ }
+
+ return a.NextTokensInContext(s, ctx)
+}
+
+func (a *ATN) addState(state ATNState) {
+ if state != nil {
+ state.SetATN(a)
+ state.SetStateNumber(len(a.states))
+ }
+
+ a.states = append(a.states, state)
+}
+
+func (a *ATN) removeState(state ATNState) {
+ a.states[state.GetStateNumber()] = nil // Just free the memory; don't shift states in the slice
+}
+
+func (a *ATN) defineDecisionState(s DecisionState) int {
+ a.DecisionToState = append(a.DecisionToState, s)
+ s.setDecision(len(a.DecisionToState) - 1)
+
+ return s.getDecision()
+}
+
+func (a *ATN) getDecisionState(decision int) DecisionState {
+ if len(a.DecisionToState) == 0 {
+ return nil
+ }
+
+ return a.DecisionToState[decision]
+}
+
+// getExpectedTokens computes the set of input symbols which could follow ATN
+// state number stateNumber in the specified full parse context ctx and returns
+// the set of potentially valid input symbols which could follow the specified
+// state in the specified context. This method considers the complete parser
+// context, but does not evaluate semantic predicates (i.e. all predicates
+// encountered during the calculation are assumed true). If a path in the ATN
+// exists from the starting state to the RuleStopState of the outermost context
+// without Matching any symbols, Token.EOF is added to the returned set.
+//
+// A nil ctx defaults to ParserRuleContext.EMPTY.
+//
+// It panics if the ATN does not contain state stateNumber.
+func (a *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
+ if stateNumber < 0 || stateNumber >= len(a.states) {
+ panic("Invalid state number.")
+ }
+
+ s := a.states[stateNumber]
+ following := a.NextTokens(s, nil)
+
+ if !following.contains(TokenEpsilon) {
+ return following
+ }
+
+ expected := NewIntervalSet()
+
+ expected.addSet(following)
+ expected.removeOne(TokenEpsilon)
+
+ for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
+ invokingState := a.states[ctx.GetInvokingState()]
+ rt := invokingState.GetTransitions()[0]
+
+ following = a.NextTokens(rt.(*RuleTransition).followState, nil)
+ expected.addSet(following)
+ expected.removeOne(TokenEpsilon)
+ ctx = ctx.GetParent().(RuleContext)
+ }
+
+ if following.contains(TokenEpsilon) {
+ expected.addOne(TokenEOF)
+ }
+
+ return expected
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_config.go b/vendor/github.com/antlr4-go/antlr/v4/atn_config.go
new file mode 100644
index 0000000..a83f25d
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/atn_config.go
@@ -0,0 +1,335 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+)
+
+const (
+ lexerConfig = iota // Indicates that this ATNConfig is for a lexer
+ parserConfig // Indicates that this ATNConfig is for a parser
+)
+
+// ATNConfig is a tuple: (ATN state, predicted alt, syntactic, semantic
+// context). The syntactic context is a graph-structured stack node whose
+// path(s) to the root is the rule invocation(s) chain used to arrive in the
+// state. The semantic context is the tree of semantic predicates encountered
+// before reaching an ATN state.
+type ATNConfig struct {
+ precedenceFilterSuppressed bool
+ state ATNState
+ alt int
+ context *PredictionContext
+ semanticContext SemanticContext
+ reachesIntoOuterContext int
+ cType int // lexerConfig or parserConfig
+ lexerActionExecutor *LexerActionExecutor
+ passedThroughNonGreedyDecision bool
+}
+
+// NewATNConfig6 creates a new ATNConfig instance given a state, alt and context only
+func NewATNConfig6(state ATNState, alt int, context *PredictionContext) *ATNConfig {
+ return NewATNConfig5(state, alt, context, SemanticContextNone)
+}
+
+// NewATNConfig5 creates a new ATNConfig instance given a state, alt, context and semantic context
+func NewATNConfig5(state ATNState, alt int, context *PredictionContext, semanticContext SemanticContext) *ATNConfig {
+ if semanticContext == nil {
+ panic("semanticContext cannot be nil") // TODO: Necessary?
+ }
+
+ pac := &ATNConfig{}
+ pac.state = state
+ pac.alt = alt
+ pac.context = context
+ pac.semanticContext = semanticContext
+ pac.cType = parserConfig
+ return pac
+}
+
+// NewATNConfig4 creates a new ATNConfig instance given an existing config, and a state only
+func NewATNConfig4(c *ATNConfig, state ATNState) *ATNConfig {
+ return NewATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
+}
+
+// NewATNConfig3 creates a new ATNConfig instance given an existing config, a state and a semantic context
+func NewATNConfig3(c *ATNConfig, state ATNState, semanticContext SemanticContext) *ATNConfig {
+ return NewATNConfig(c, state, c.GetContext(), semanticContext)
+}
+
+// NewATNConfig2 creates a new ATNConfig instance given an existing config, and a context only
+func NewATNConfig2(c *ATNConfig, semanticContext SemanticContext) *ATNConfig {
+ return NewATNConfig(c, c.GetState(), c.GetContext(), semanticContext)
+}
+
+// NewATNConfig1 creates a new ATNConfig instance given an existing config, a state, and a context only
+func NewATNConfig1(c *ATNConfig, state ATNState, context *PredictionContext) *ATNConfig {
+ return NewATNConfig(c, state, context, c.GetSemanticContext())
+}
+
+// NewATNConfig creates a new ATNConfig instance given an existing config, a state, a context and a semantic context, other 'constructors'
+// are just wrappers around this one.
+func NewATNConfig(c *ATNConfig, state ATNState, context *PredictionContext, semanticContext SemanticContext) *ATNConfig {
+ if semanticContext == nil {
+ panic("semanticContext cannot be nil") // TODO: Remove this - probably put here for some bug that is now fixed
+ }
+ b := &ATNConfig{}
+ b.InitATNConfig(c, state, c.GetAlt(), context, semanticContext)
+ b.cType = parserConfig
+ return b
+}
+
+func (a *ATNConfig) InitATNConfig(c *ATNConfig, state ATNState, alt int, context *PredictionContext, semanticContext SemanticContext) {
+
+ a.state = state
+ a.alt = alt
+ a.context = context
+ a.semanticContext = semanticContext
+ a.reachesIntoOuterContext = c.GetReachesIntoOuterContext()
+ a.precedenceFilterSuppressed = c.getPrecedenceFilterSuppressed()
+}
+
+func (a *ATNConfig) getPrecedenceFilterSuppressed() bool {
+ return a.precedenceFilterSuppressed
+}
+
+func (a *ATNConfig) setPrecedenceFilterSuppressed(v bool) {
+ a.precedenceFilterSuppressed = v
+}
+
+// GetState returns the ATN state associated with this configuration
+func (a *ATNConfig) GetState() ATNState {
+ return a.state
+}
+
+// GetAlt returns the alternative associated with this configuration
+func (a *ATNConfig) GetAlt() int {
+ return a.alt
+}
+
+// SetContext sets the rule invocation stack associated with this configuration
+func (a *ATNConfig) SetContext(v *PredictionContext) {
+ a.context = v
+}
+
+// GetContext returns the rule invocation stack associated with this configuration
+func (a *ATNConfig) GetContext() *PredictionContext {
+ return a.context
+}
+
+// GetSemanticContext returns the semantic context associated with this configuration
+func (a *ATNConfig) GetSemanticContext() SemanticContext {
+ return a.semanticContext
+}
+
+// GetReachesIntoOuterContext returns the count of references to an outer context from this configuration
+func (a *ATNConfig) GetReachesIntoOuterContext() int {
+ return a.reachesIntoOuterContext
+}
+
+// SetReachesIntoOuterContext sets the count of references to an outer context from this configuration
+func (a *ATNConfig) SetReachesIntoOuterContext(v int) {
+ a.reachesIntoOuterContext = v
+}
+
+// Equals is the default comparison function for an ATNConfig when no specialist implementation is required
+// for a collection.
+//
+// An ATN configuration is equal to another if both have the same state, they
+// predict the same alternative, and syntactic/semantic contexts are the same.
+func (a *ATNConfig) Equals(o Collectable[*ATNConfig]) bool {
+ switch a.cType {
+ case lexerConfig:
+ return a.LEquals(o)
+ case parserConfig:
+ return a.PEquals(o)
+ default:
+ panic("Invalid ATNConfig type")
+ }
+}
+
+// PEquals is the default comparison function for a Parser ATNConfig when no specialist implementation is required
+// for a collection.
+//
+// An ATN configuration is equal to another if both have the same state, they
+// predict the same alternative, and syntactic/semantic contexts are the same.
+func (a *ATNConfig) PEquals(o Collectable[*ATNConfig]) bool {
+ var other, ok = o.(*ATNConfig)
+
+ if !ok {
+ return false
+ }
+ if a == other {
+ return true
+ } else if other == nil {
+ return false
+ }
+
+ var equal bool
+
+ if a.context == nil {
+ equal = other.context == nil
+ } else {
+ equal = a.context.Equals(other.context)
+ }
+
+ var (
+ nums = a.state.GetStateNumber() == other.state.GetStateNumber()
+ alts = a.alt == other.alt
+ cons = a.semanticContext.Equals(other.semanticContext)
+ sups = a.precedenceFilterSuppressed == other.precedenceFilterSuppressed
+ )
+
+ return nums && alts && cons && sups && equal
+}
+
+// Hash is the default hash function for a parser ATNConfig, when no specialist hash function
+// is required for a collection
+func (a *ATNConfig) Hash() int {
+ switch a.cType {
+ case lexerConfig:
+ return a.LHash()
+ case parserConfig:
+ return a.PHash()
+ default:
+ panic("Invalid ATNConfig type")
+ }
+}
+
+// PHash is the default hash function for a parser ATNConfig, when no specialist hash function
+// is required for a collection
+func (a *ATNConfig) PHash() int {
+ var c int
+ if a.context != nil {
+ c = a.context.Hash()
+ }
+
+ h := murmurInit(7)
+ h = murmurUpdate(h, a.state.GetStateNumber())
+ h = murmurUpdate(h, a.alt)
+ h = murmurUpdate(h, c)
+ h = murmurUpdate(h, a.semanticContext.Hash())
+ return murmurFinish(h, 4)
+}
+
+// String returns a string representation of the ATNConfig, usually used for debugging purposes
+func (a *ATNConfig) String() string {
+ var s1, s2, s3 string
+
+ if a.context != nil {
+ s1 = ",[" + fmt.Sprint(a.context) + "]"
+ }
+
+ if a.semanticContext != SemanticContextNone {
+ s2 = "," + fmt.Sprint(a.semanticContext)
+ }
+
+ if a.reachesIntoOuterContext > 0 {
+ s3 = ",up=" + fmt.Sprint(a.reachesIntoOuterContext)
+ }
+
+ return fmt.Sprintf("(%v,%v%v%v%v)", a.state, a.alt, s1, s2, s3)
+}
+
+func NewLexerATNConfig6(state ATNState, alt int, context *PredictionContext) *ATNConfig {
+ lac := &ATNConfig{}
+ lac.state = state
+ lac.alt = alt
+ lac.context = context
+ lac.semanticContext = SemanticContextNone
+ lac.cType = lexerConfig
+ return lac
+}
+
+func NewLexerATNConfig4(c *ATNConfig, state ATNState) *ATNConfig {
+ lac := &ATNConfig{}
+ lac.lexerActionExecutor = c.lexerActionExecutor
+ lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
+ lac.InitATNConfig(c, state, c.GetAlt(), c.GetContext(), c.GetSemanticContext())
+ lac.cType = lexerConfig
+ return lac
+}
+
+func NewLexerATNConfig3(c *ATNConfig, state ATNState, lexerActionExecutor *LexerActionExecutor) *ATNConfig {
+ lac := &ATNConfig{}
+ lac.lexerActionExecutor = lexerActionExecutor
+ lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
+ lac.InitATNConfig(c, state, c.GetAlt(), c.GetContext(), c.GetSemanticContext())
+ lac.cType = lexerConfig
+ return lac
+}
+
+func NewLexerATNConfig2(c *ATNConfig, state ATNState, context *PredictionContext) *ATNConfig {
+ lac := &ATNConfig{}
+ lac.lexerActionExecutor = c.lexerActionExecutor
+ lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
+ lac.InitATNConfig(c, state, c.GetAlt(), context, c.GetSemanticContext())
+ lac.cType = lexerConfig
+ return lac
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewLexerATNConfig1(state ATNState, alt int, context *PredictionContext) *ATNConfig {
+ lac := &ATNConfig{}
+ lac.state = state
+ lac.alt = alt
+ lac.context = context
+ lac.semanticContext = SemanticContextNone
+ lac.cType = lexerConfig
+ return lac
+}
+
+// LHash is the default hash function for Lexer ATNConfig objects, it can be used directly or via
+// the default comparator [ObjEqComparator].
+func (a *ATNConfig) LHash() int {
+ var f int
+ if a.passedThroughNonGreedyDecision {
+ f = 1
+ } else {
+ f = 0
+ }
+ h := murmurInit(7)
+ h = murmurUpdate(h, a.state.GetStateNumber())
+ h = murmurUpdate(h, a.alt)
+ h = murmurUpdate(h, a.context.Hash())
+ h = murmurUpdate(h, a.semanticContext.Hash())
+ h = murmurUpdate(h, f)
+ h = murmurUpdate(h, a.lexerActionExecutor.Hash())
+ h = murmurFinish(h, 6)
+ return h
+}
+
+// LEquals is the default comparison function for Lexer ATNConfig objects, it can be used directly or via
+// the default comparator [ObjEqComparator].
+func (a *ATNConfig) LEquals(other Collectable[*ATNConfig]) bool {
+ var otherT, ok = other.(*ATNConfig)
+ if !ok {
+ return false
+ } else if a == otherT {
+ return true
+ } else if a.passedThroughNonGreedyDecision != otherT.passedThroughNonGreedyDecision {
+ return false
+ }
+
+ switch {
+ case a.lexerActionExecutor == nil && otherT.lexerActionExecutor == nil:
+ return true
+ case a.lexerActionExecutor != nil && otherT.lexerActionExecutor != nil:
+ if !a.lexerActionExecutor.Equals(otherT.lexerActionExecutor) {
+ return false
+ }
+ default:
+ return false // One but not both, are nil
+ }
+
+ return a.PEquals(otherT)
+}
+
+func checkNonGreedyDecision(source *ATNConfig, target ATNState) bool {
+ var ds, ok = target.(DecisionState)
+
+ return source.passedThroughNonGreedyDecision || (ok && ds.getNonGreedy())
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go b/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go
new file mode 100644
index 0000000..52dbaf8
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go
@@ -0,0 +1,301 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+)
+
+// ATNConfigSet is a specialized set of ATNConfig that tracks information
+// about its elements and can combine similar configurations using a
+// graph-structured stack.
+type ATNConfigSet struct {
+ cachedHash int
+
+ // configLookup is used to determine whether two ATNConfigSets are equal. We
+ // need all configurations with the same (s, i, _, semctx) to be equal. A key
+ // effectively doubles the number of objects associated with ATNConfigs. All
+ // keys are hashed by (s, i, _, pi), not including the context. Wiped out when
+ // read-only because a set becomes a DFA state.
+ configLookup *JStore[*ATNConfig, Comparator[*ATNConfig]]
+
+ // configs is the added elements that did not match an existing key in configLookup
+ configs []*ATNConfig
+
+ // TODO: These fields make me pretty uncomfortable, but it is nice to pack up
+ // info together because it saves re-computation. Can we track conflicts as they
+ // are added to save scanning configs later?
+ conflictingAlts *BitSet
+
+ // dipsIntoOuterContext is used by parsers and lexers. In a lexer, it indicates
+ // we hit a pred while computing a closure operation. Do not make a DFA state
+ // from the ATNConfigSet in this case. TODO: How is this used by parsers?
+ dipsIntoOuterContext bool
+
+ // fullCtx is whether it is part of a full context LL prediction. Used to
+ // determine how to merge $. It is a wildcard with SLL, but not for an LL
+ // context merge.
+ fullCtx bool
+
+ // Used in parser and lexer. In lexer, it indicates we hit a pred
+ // while computing a closure operation. Don't make a DFA state from this set.
+ hasSemanticContext bool
+
+ // readOnly is whether it is read-only. Do not
+ // allow any code to manipulate the set if true because DFA states will point at
+ // sets and those must not change. It not, protect other fields; conflictingAlts
+ // in particular, which is assigned after readOnly.
+ readOnly bool
+
+ // TODO: These fields make me pretty uncomfortable, but it is nice to pack up
+ // info together because it saves re-computation. Can we track conflicts as they
+ // are added to save scanning configs later?
+ uniqueAlt int
+}
+
+// Alts returns the combined set of alts for all the configurations in this set.
+func (b *ATNConfigSet) Alts() *BitSet {
+ alts := NewBitSet()
+ for _, it := range b.configs {
+ alts.add(it.GetAlt())
+ }
+ return alts
+}
+
+// NewATNConfigSet creates a new ATNConfigSet instance.
+func NewATNConfigSet(fullCtx bool) *ATNConfigSet {
+ return &ATNConfigSet{
+ cachedHash: -1,
+ configLookup: NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfCompInst, ATNConfigLookupCollection, "NewATNConfigSet()"),
+ fullCtx: fullCtx,
+ }
+}
+
+// Add merges contexts with existing configs for (s, i, pi, _),
+// where 's' is the ATNConfig.state, 'i' is the ATNConfig.alt, and
+// 'pi' is the [ATNConfig].semanticContext.
+//
+// We use (s,i,pi) as the key.
+// Updates dipsIntoOuterContext and hasSemanticContext when necessary.
+func (b *ATNConfigSet) Add(config *ATNConfig, mergeCache *JPCMap) bool {
+ if b.readOnly {
+ panic("set is read-only")
+ }
+
+ if config.GetSemanticContext() != SemanticContextNone {
+ b.hasSemanticContext = true
+ }
+
+ if config.GetReachesIntoOuterContext() > 0 {
+ b.dipsIntoOuterContext = true
+ }
+
+ existing, present := b.configLookup.Put(config)
+
+ // The config was not already in the set
+ //
+ if !present {
+ b.cachedHash = -1
+ b.configs = append(b.configs, config) // Track order here
+ return true
+ }
+
+ // Merge a previous (s, i, pi, _) with it and save the result
+ rootIsWildcard := !b.fullCtx
+ merged := merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache)
+
+ // No need to check for existing.context because config.context is in the cache,
+ // since the only way to create new graphs is the "call rule" and here. We cache
+ // at both places.
+ existing.SetReachesIntoOuterContext(intMax(existing.GetReachesIntoOuterContext(), config.GetReachesIntoOuterContext()))
+
+ // Preserve the precedence filter suppression during the merge
+ if config.getPrecedenceFilterSuppressed() {
+ existing.setPrecedenceFilterSuppressed(true)
+ }
+
+ // Replace the context because there is no need to do alt mapping
+ existing.SetContext(merged)
+
+ return true
+}
+
+// GetStates returns the set of states represented by all configurations in this config set
+func (b *ATNConfigSet) GetStates() *JStore[ATNState, Comparator[ATNState]] {
+
+ // states uses the standard comparator and Hash() provided by the ATNState instance
+ //
+ states := NewJStore[ATNState, Comparator[ATNState]](aStateEqInst, ATNStateCollection, "ATNConfigSet.GetStates()")
+
+ for i := 0; i < len(b.configs); i++ {
+ states.Put(b.configs[i].GetState())
+ }
+
+ return states
+}
+
+func (b *ATNConfigSet) GetPredicates() []SemanticContext {
+ predicates := make([]SemanticContext, 0)
+
+ for i := 0; i < len(b.configs); i++ {
+ c := b.configs[i].GetSemanticContext()
+
+ if c != SemanticContextNone {
+ predicates = append(predicates, c)
+ }
+ }
+
+ return predicates
+}
+
+func (b *ATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
+ if b.readOnly {
+ panic("set is read-only")
+ }
+
+ // Empty indicate no optimization is possible
+ if b.configLookup == nil || b.configLookup.Len() == 0 {
+ return
+ }
+
+ for i := 0; i < len(b.configs); i++ {
+ config := b.configs[i]
+ config.SetContext(interpreter.getCachedContext(config.GetContext()))
+ }
+}
+
+func (b *ATNConfigSet) AddAll(coll []*ATNConfig) bool {
+ for i := 0; i < len(coll); i++ {
+ b.Add(coll[i], nil)
+ }
+
+ return false
+}
+
+// Compare The configs are only equal if they are in the same order and their Equals function returns true.
+// Java uses ArrayList.equals(), which requires the same order.
+func (b *ATNConfigSet) Compare(bs *ATNConfigSet) bool {
+ if len(b.configs) != len(bs.configs) {
+ return false
+ }
+ for i := 0; i < len(b.configs); i++ {
+ if !b.configs[i].Equals(bs.configs[i]) {
+ return false
+ }
+ }
+
+ return true
+}
+
+func (b *ATNConfigSet) Equals(other Collectable[ATNConfig]) bool {
+ if b == other {
+ return true
+ } else if _, ok := other.(*ATNConfigSet); !ok {
+ return false
+ }
+
+ other2 := other.(*ATNConfigSet)
+ var eca bool
+ switch {
+ case b.conflictingAlts == nil && other2.conflictingAlts == nil:
+ eca = true
+ case b.conflictingAlts != nil && other2.conflictingAlts != nil:
+ eca = b.conflictingAlts.equals(other2.conflictingAlts)
+ }
+ return b.configs != nil &&
+ b.fullCtx == other2.fullCtx &&
+ b.uniqueAlt == other2.uniqueAlt &&
+ eca &&
+ b.hasSemanticContext == other2.hasSemanticContext &&
+ b.dipsIntoOuterContext == other2.dipsIntoOuterContext &&
+ b.Compare(other2)
+}
+
+func (b *ATNConfigSet) Hash() int {
+ if b.readOnly {
+ if b.cachedHash == -1 {
+ b.cachedHash = b.hashCodeConfigs()
+ }
+
+ return b.cachedHash
+ }
+
+ return b.hashCodeConfigs()
+}
+
+func (b *ATNConfigSet) hashCodeConfigs() int {
+ h := 1
+ for _, config := range b.configs {
+ h = 31*h + config.Hash()
+ }
+ return h
+}
+
+func (b *ATNConfigSet) Contains(item *ATNConfig) bool {
+ if b.readOnly {
+ panic("not implemented for read-only sets")
+ }
+ if b.configLookup == nil {
+ return false
+ }
+ return b.configLookup.Contains(item)
+}
+
+func (b *ATNConfigSet) ContainsFast(item *ATNConfig) bool {
+ return b.Contains(item)
+}
+
+func (b *ATNConfigSet) Clear() {
+ if b.readOnly {
+ panic("set is read-only")
+ }
+ b.configs = make([]*ATNConfig, 0)
+ b.cachedHash = -1
+ b.configLookup = NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfCompInst, ATNConfigLookupCollection, "NewATNConfigSet()")
+}
+
+func (b *ATNConfigSet) String() string {
+
+ s := "["
+
+ for i, c := range b.configs {
+ s += c.String()
+
+ if i != len(b.configs)-1 {
+ s += ", "
+ }
+ }
+
+ s += "]"
+
+ if b.hasSemanticContext {
+ s += ",hasSemanticContext=" + fmt.Sprint(b.hasSemanticContext)
+ }
+
+ if b.uniqueAlt != ATNInvalidAltNumber {
+ s += ",uniqueAlt=" + fmt.Sprint(b.uniqueAlt)
+ }
+
+ if b.conflictingAlts != nil {
+ s += ",conflictingAlts=" + b.conflictingAlts.String()
+ }
+
+ if b.dipsIntoOuterContext {
+ s += ",dipsIntoOuterContext"
+ }
+
+ return s
+}
+
+// NewOrderedATNConfigSet creates a config set with a slightly different Hash/Equal pair
+// for use in lexers.
+func NewOrderedATNConfigSet() *ATNConfigSet {
+ return &ATNConfigSet{
+ cachedHash: -1,
+ // This set uses the standard Hash() and Equals() from ATNConfig
+ configLookup: NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ATNConfigCollection, "ATNConfigSet.NewOrderedATNConfigSet()"),
+ fullCtx: false,
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go b/vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go
new file mode 100644
index 0000000..bdb30b3
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go
@@ -0,0 +1,62 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import "errors"
+
+var defaultATNDeserializationOptions = ATNDeserializationOptions{true, true, false}
+
+type ATNDeserializationOptions struct {
+ readOnly bool
+ verifyATN bool
+ generateRuleBypassTransitions bool
+}
+
+func (opts *ATNDeserializationOptions) ReadOnly() bool {
+ return opts.readOnly
+}
+
+func (opts *ATNDeserializationOptions) SetReadOnly(readOnly bool) {
+ if opts.readOnly {
+ panic(errors.New("cannot mutate read only ATNDeserializationOptions"))
+ }
+ opts.readOnly = readOnly
+}
+
+func (opts *ATNDeserializationOptions) VerifyATN() bool {
+ return opts.verifyATN
+}
+
+func (opts *ATNDeserializationOptions) SetVerifyATN(verifyATN bool) {
+ if opts.readOnly {
+ panic(errors.New("cannot mutate read only ATNDeserializationOptions"))
+ }
+ opts.verifyATN = verifyATN
+}
+
+func (opts *ATNDeserializationOptions) GenerateRuleBypassTransitions() bool {
+ return opts.generateRuleBypassTransitions
+}
+
+func (opts *ATNDeserializationOptions) SetGenerateRuleBypassTransitions(generateRuleBypassTransitions bool) {
+ if opts.readOnly {
+ panic(errors.New("cannot mutate read only ATNDeserializationOptions"))
+ }
+ opts.generateRuleBypassTransitions = generateRuleBypassTransitions
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func DefaultATNDeserializationOptions() *ATNDeserializationOptions {
+ return NewATNDeserializationOptions(&defaultATNDeserializationOptions)
+}
+
+func NewATNDeserializationOptions(other *ATNDeserializationOptions) *ATNDeserializationOptions {
+ o := new(ATNDeserializationOptions)
+ if other != nil {
+ *o = *other
+ o.readOnly = false
+ }
+ return o
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go b/vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go
new file mode 100644
index 0000000..2dcb9ae
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go
@@ -0,0 +1,684 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strconv"
+)
+
+const serializedVersion = 4
+
+type loopEndStateIntPair struct {
+ item0 *LoopEndState
+ item1 int
+}
+
+type blockStartStateIntPair struct {
+ item0 BlockStartState
+ item1 int
+}
+
+type ATNDeserializer struct {
+ options *ATNDeserializationOptions
+ data []int32
+ pos int
+}
+
+func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer {
+ if options == nil {
+ options = &defaultATNDeserializationOptions
+ }
+
+ return &ATNDeserializer{options: options}
+}
+
+//goland:noinspection GoUnusedFunction
+func stringInSlice(a string, list []string) int {
+ for i, b := range list {
+ if b == a {
+ return i
+ }
+ }
+
+ return -1
+}
+
+func (a *ATNDeserializer) Deserialize(data []int32) *ATN {
+ a.data = data
+ a.pos = 0
+ a.checkVersion()
+
+ atn := a.readATN()
+
+ a.readStates(atn)
+ a.readRules(atn)
+ a.readModes(atn)
+
+ sets := a.readSets(atn, nil)
+
+ a.readEdges(atn, sets)
+ a.readDecisions(atn)
+ a.readLexerActions(atn)
+ a.markPrecedenceDecisions(atn)
+ a.verifyATN(atn)
+
+ if a.options.GenerateRuleBypassTransitions() && atn.grammarType == ATNTypeParser {
+ a.generateRuleBypassTransitions(atn)
+ // Re-verify after modification
+ a.verifyATN(atn)
+ }
+
+ return atn
+
+}
+
+func (a *ATNDeserializer) checkVersion() {
+ version := a.readInt()
+
+ if version != serializedVersion {
+ panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(serializedVersion) + ").")
+ }
+}
+
+func (a *ATNDeserializer) readATN() *ATN {
+ grammarType := a.readInt()
+ maxTokenType := a.readInt()
+
+ return NewATN(grammarType, maxTokenType)
+}
+
+func (a *ATNDeserializer) readStates(atn *ATN) {
+ nstates := a.readInt()
+
+ // Allocate worst case size.
+ loopBackStateNumbers := make([]loopEndStateIntPair, 0, nstates)
+ endStateNumbers := make([]blockStartStateIntPair, 0, nstates)
+
+ // Preallocate states slice.
+ atn.states = make([]ATNState, 0, nstates)
+
+ for i := 0; i < nstates; i++ {
+ stype := a.readInt()
+
+ // Ignore bad types of states
+ if stype == ATNStateInvalidType {
+ atn.addState(nil)
+ continue
+ }
+
+ ruleIndex := a.readInt()
+
+ s := a.stateFactory(stype, ruleIndex)
+
+ if stype == ATNStateLoopEnd {
+ loopBackStateNumber := a.readInt()
+
+ loopBackStateNumbers = append(loopBackStateNumbers, loopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
+ } else if s2, ok := s.(BlockStartState); ok {
+ endStateNumber := a.readInt()
+
+ endStateNumbers = append(endStateNumbers, blockStartStateIntPair{s2, endStateNumber})
+ }
+
+ atn.addState(s)
+ }
+
+ // Delay the assignment of loop back and end states until we know all the state
+ // instances have been initialized
+ for _, pair := range loopBackStateNumbers {
+ pair.item0.loopBackState = atn.states[pair.item1]
+ }
+
+ for _, pair := range endStateNumbers {
+ pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
+ }
+
+ numNonGreedyStates := a.readInt()
+ for j := 0; j < numNonGreedyStates; j++ {
+ stateNumber := a.readInt()
+
+ atn.states[stateNumber].(DecisionState).setNonGreedy(true)
+ }
+
+ numPrecedenceStates := a.readInt()
+ for j := 0; j < numPrecedenceStates; j++ {
+ stateNumber := a.readInt()
+
+ atn.states[stateNumber].(*RuleStartState).isPrecedenceRule = true
+ }
+}
+
+func (a *ATNDeserializer) readRules(atn *ATN) {
+ nrules := a.readInt()
+
+ if atn.grammarType == ATNTypeLexer {
+ atn.ruleToTokenType = make([]int, nrules)
+ }
+
+ atn.ruleToStartState = make([]*RuleStartState, nrules)
+
+ for i := range atn.ruleToStartState {
+ s := a.readInt()
+ startState := atn.states[s].(*RuleStartState)
+
+ atn.ruleToStartState[i] = startState
+
+ if atn.grammarType == ATNTypeLexer {
+ tokenType := a.readInt()
+
+ atn.ruleToTokenType[i] = tokenType
+ }
+ }
+
+ atn.ruleToStopState = make([]*RuleStopState, nrules)
+
+ for _, state := range atn.states {
+ if s2, ok := state.(*RuleStopState); ok {
+ atn.ruleToStopState[s2.ruleIndex] = s2
+ atn.ruleToStartState[s2.ruleIndex].stopState = s2
+ }
+ }
+}
+
+func (a *ATNDeserializer) readModes(atn *ATN) {
+ nmodes := a.readInt()
+ atn.modeToStartState = make([]*TokensStartState, nmodes)
+
+ for i := range atn.modeToStartState {
+ s := a.readInt()
+
+ atn.modeToStartState[i] = atn.states[s].(*TokensStartState)
+ }
+}
+
+func (a *ATNDeserializer) readSets(_ *ATN, sets []*IntervalSet) []*IntervalSet {
+ m := a.readInt()
+
+ // Preallocate the needed capacity.
+ if cap(sets)-len(sets) < m {
+ isets := make([]*IntervalSet, len(sets), len(sets)+m)
+ copy(isets, sets)
+ sets = isets
+ }
+
+ for i := 0; i < m; i++ {
+ iset := NewIntervalSet()
+
+ sets = append(sets, iset)
+
+ n := a.readInt()
+ containsEOF := a.readInt()
+
+ if containsEOF != 0 {
+ iset.addOne(-1)
+ }
+
+ for j := 0; j < n; j++ {
+ i1 := a.readInt()
+ i2 := a.readInt()
+
+ iset.addRange(i1, i2)
+ }
+ }
+
+ return sets
+}
+
+func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
+ nedges := a.readInt()
+
+ for i := 0; i < nedges; i++ {
+ var (
+ src = a.readInt()
+ trg = a.readInt()
+ ttype = a.readInt()
+ arg1 = a.readInt()
+ arg2 = a.readInt()
+ arg3 = a.readInt()
+ trans = a.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
+ srcState = atn.states[src]
+ )
+
+ srcState.AddTransition(trans, -1)
+ }
+
+ // Edges for rule stop states can be derived, so they are not serialized
+ for _, state := range atn.states {
+ for _, t := range state.GetTransitions() {
+ var rt, ok = t.(*RuleTransition)
+
+ if !ok {
+ continue
+ }
+
+ outermostPrecedenceReturn := -1
+
+ if atn.ruleToStartState[rt.getTarget().GetRuleIndex()].isPrecedenceRule {
+ if rt.precedence == 0 {
+ outermostPrecedenceReturn = rt.getTarget().GetRuleIndex()
+ }
+ }
+
+ trans := NewEpsilonTransition(rt.followState, outermostPrecedenceReturn)
+
+ atn.ruleToStopState[rt.getTarget().GetRuleIndex()].AddTransition(trans, -1)
+ }
+ }
+
+ for _, state := range atn.states {
+ if s2, ok := state.(BlockStartState); ok {
+ // We need to know the end state to set its start state
+ if s2.getEndState() == nil {
+ panic("IllegalState")
+ }
+
+ // Block end states can only be associated to a single block start state
+ if s2.getEndState().startState != nil {
+ panic("IllegalState")
+ }
+
+ s2.getEndState().startState = state
+ }
+
+ if s2, ok := state.(*PlusLoopbackState); ok {
+ for _, t := range s2.GetTransitions() {
+ if t2, ok := t.getTarget().(*PlusBlockStartState); ok {
+ t2.loopBackState = state
+ }
+ }
+ } else if s2, ok := state.(*StarLoopbackState); ok {
+ for _, t := range s2.GetTransitions() {
+ if t2, ok := t.getTarget().(*StarLoopEntryState); ok {
+ t2.loopBackState = state
+ }
+ }
+ }
+ }
+}
+
+func (a *ATNDeserializer) readDecisions(atn *ATN) {
+ ndecisions := a.readInt()
+
+ for i := 0; i < ndecisions; i++ {
+ s := a.readInt()
+ decState := atn.states[s].(DecisionState)
+
+ atn.DecisionToState = append(atn.DecisionToState, decState)
+ decState.setDecision(i)
+ }
+}
+
+func (a *ATNDeserializer) readLexerActions(atn *ATN) {
+ if atn.grammarType == ATNTypeLexer {
+ count := a.readInt()
+
+ atn.lexerActions = make([]LexerAction, count)
+
+ for i := range atn.lexerActions {
+ actionType := a.readInt()
+ data1 := a.readInt()
+ data2 := a.readInt()
+ atn.lexerActions[i] = a.lexerActionFactory(actionType, data1, data2)
+ }
+ }
+}
+
+func (a *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
+ count := len(atn.ruleToStartState)
+
+ for i := 0; i < count; i++ {
+ atn.ruleToTokenType[i] = atn.maxTokenType + i + 1
+ }
+
+ for i := 0; i < count; i++ {
+ a.generateRuleBypassTransition(atn, i)
+ }
+}
+
+func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
+ bypassStart := NewBasicBlockStartState()
+
+ bypassStart.ruleIndex = idx
+ atn.addState(bypassStart)
+
+ bypassStop := NewBlockEndState()
+
+ bypassStop.ruleIndex = idx
+ atn.addState(bypassStop)
+
+ bypassStart.endState = bypassStop
+
+ atn.defineDecisionState(&bypassStart.BaseDecisionState)
+
+ bypassStop.startState = bypassStart
+
+ var excludeTransition Transition
+ var endState ATNState
+
+ if atn.ruleToStartState[idx].isPrecedenceRule {
+ // Wrap from the beginning of the rule to the StarLoopEntryState
+ endState = nil
+
+ for i := 0; i < len(atn.states); i++ {
+ state := atn.states[i]
+
+ if a.stateIsEndStateFor(state, idx) != nil {
+ endState = state
+ excludeTransition = state.(*StarLoopEntryState).loopBackState.GetTransitions()[0]
+
+ break
+ }
+ }
+
+ if excludeTransition == nil {
+ panic("Couldn't identify final state of the precedence rule prefix section.")
+ }
+ } else {
+ endState = atn.ruleToStopState[idx]
+ }
+
+ // All non-excluded transitions that currently target end state need to target
+ // blockEnd instead
+ for i := 0; i < len(atn.states); i++ {
+ state := atn.states[i]
+
+ for j := 0; j < len(state.GetTransitions()); j++ {
+ transition := state.GetTransitions()[j]
+
+ if transition == excludeTransition {
+ continue
+ }
+
+ if transition.getTarget() == endState {
+ transition.setTarget(bypassStop)
+ }
+ }
+ }
+
+ // All transitions leaving the rule start state need to leave blockStart instead
+ ruleToStartState := atn.ruleToStartState[idx]
+ count := len(ruleToStartState.GetTransitions())
+
+ for count > 0 {
+ bypassStart.AddTransition(ruleToStartState.GetTransitions()[count-1], -1)
+ ruleToStartState.SetTransitions([]Transition{ruleToStartState.GetTransitions()[len(ruleToStartState.GetTransitions())-1]})
+ }
+
+ // Link the new states
+ atn.ruleToStartState[idx].AddTransition(NewEpsilonTransition(bypassStart, -1), -1)
+ bypassStop.AddTransition(NewEpsilonTransition(endState, -1), -1)
+
+ MatchState := NewBasicState()
+
+ atn.addState(MatchState)
+ MatchState.AddTransition(NewAtomTransition(bypassStop, atn.ruleToTokenType[idx]), -1)
+ bypassStart.AddTransition(NewEpsilonTransition(MatchState, -1), -1)
+}
+
+func (a *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
+ if state.GetRuleIndex() != idx {
+ return nil
+ }
+
+ if _, ok := state.(*StarLoopEntryState); !ok {
+ return nil
+ }
+
+ maybeLoopEndState := state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
+
+ if _, ok := maybeLoopEndState.(*LoopEndState); !ok {
+ return nil
+ }
+
+ var _, ok = maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
+
+ if maybeLoopEndState.(*LoopEndState).epsilonOnlyTransitions && ok {
+ return state
+ }
+
+ return nil
+}
+
+// markPrecedenceDecisions analyzes the StarLoopEntryState states in the
+// specified ATN to set the StarLoopEntryState.precedenceRuleDecision field to
+// the correct value.
+func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
+ for _, state := range atn.states {
+ if _, ok := state.(*StarLoopEntryState); !ok {
+ continue
+ }
+
+ // We analyze the [ATN] to determine if an ATN decision state is the
+ // decision for the closure block that determines whether a
+ // precedence rule should continue or complete.
+ if atn.ruleToStartState[state.GetRuleIndex()].isPrecedenceRule {
+ maybeLoopEndState := state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
+
+ if s3, ok := maybeLoopEndState.(*LoopEndState); ok {
+ var _, ok2 = maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
+
+ if s3.epsilonOnlyTransitions && ok2 {
+ state.(*StarLoopEntryState).precedenceRuleDecision = true
+ }
+ }
+ }
+ }
+}
+
+func (a *ATNDeserializer) verifyATN(atn *ATN) {
+ if !a.options.VerifyATN() {
+ return
+ }
+
+ // Verify assumptions
+ for _, state := range atn.states {
+ if state == nil {
+ continue
+ }
+
+ a.checkCondition(state.GetEpsilonOnlyTransitions() || len(state.GetTransitions()) <= 1, "")
+
+ switch s2 := state.(type) {
+ case *PlusBlockStartState:
+ a.checkCondition(s2.loopBackState != nil, "")
+
+ case *StarLoopEntryState:
+ a.checkCondition(s2.loopBackState != nil, "")
+ a.checkCondition(len(s2.GetTransitions()) == 2, "")
+
+ switch s2.transitions[0].getTarget().(type) {
+ case *StarBlockStartState:
+ _, ok := s2.transitions[1].getTarget().(*LoopEndState)
+
+ a.checkCondition(ok, "")
+ a.checkCondition(!s2.nonGreedy, "")
+
+ case *LoopEndState:
+ var _, ok = s2.transitions[1].getTarget().(*StarBlockStartState)
+
+ a.checkCondition(ok, "")
+ a.checkCondition(s2.nonGreedy, "")
+
+ default:
+ panic("IllegalState")
+ }
+
+ case *StarLoopbackState:
+ a.checkCondition(len(state.GetTransitions()) == 1, "")
+
+ var _, ok = state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
+
+ a.checkCondition(ok, "")
+
+ case *LoopEndState:
+ a.checkCondition(s2.loopBackState != nil, "")
+
+ case *RuleStartState:
+ a.checkCondition(s2.stopState != nil, "")
+
+ case BlockStartState:
+ a.checkCondition(s2.getEndState() != nil, "")
+
+ case *BlockEndState:
+ a.checkCondition(s2.startState != nil, "")
+
+ case DecisionState:
+ a.checkCondition(len(s2.GetTransitions()) <= 1 || s2.getDecision() >= 0, "")
+
+ default:
+ var _, ok = s2.(*RuleStopState)
+
+ a.checkCondition(len(s2.GetTransitions()) <= 1 || ok, "")
+ }
+ }
+}
+
+func (a *ATNDeserializer) checkCondition(condition bool, message string) {
+ if !condition {
+ if message == "" {
+ message = "IllegalState"
+ }
+
+ panic(message)
+ }
+}
+
+func (a *ATNDeserializer) readInt() int {
+ v := a.data[a.pos]
+
+ a.pos++
+
+ return int(v) // data is 32 bits but int is at least that big
+}
+
+func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, _, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
+ target := atn.states[trg]
+
+ switch typeIndex {
+ case TransitionEPSILON:
+ return NewEpsilonTransition(target, -1)
+
+ case TransitionRANGE:
+ if arg3 != 0 {
+ return NewRangeTransition(target, TokenEOF, arg2)
+ }
+
+ return NewRangeTransition(target, arg1, arg2)
+
+ case TransitionRULE:
+ return NewRuleTransition(atn.states[arg1], arg2, arg3, target)
+
+ case TransitionPREDICATE:
+ return NewPredicateTransition(target, arg1, arg2, arg3 != 0)
+
+ case TransitionPRECEDENCE:
+ return NewPrecedencePredicateTransition(target, arg1)
+
+ case TransitionATOM:
+ if arg3 != 0 {
+ return NewAtomTransition(target, TokenEOF)
+ }
+
+ return NewAtomTransition(target, arg1)
+
+ case TransitionACTION:
+ return NewActionTransition(target, arg1, arg2, arg3 != 0)
+
+ case TransitionSET:
+ return NewSetTransition(target, sets[arg1])
+
+ case TransitionNOTSET:
+ return NewNotSetTransition(target, sets[arg1])
+
+ case TransitionWILDCARD:
+ return NewWildcardTransition(target)
+ }
+
+ panic("The specified transition type is not valid.")
+}
+
+func (a *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
+ var s ATNState
+
+ switch typeIndex {
+ case ATNStateInvalidType:
+ return nil
+
+ case ATNStateBasic:
+ s = NewBasicState()
+
+ case ATNStateRuleStart:
+ s = NewRuleStartState()
+
+ case ATNStateBlockStart:
+ s = NewBasicBlockStartState()
+
+ case ATNStatePlusBlockStart:
+ s = NewPlusBlockStartState()
+
+ case ATNStateStarBlockStart:
+ s = NewStarBlockStartState()
+
+ case ATNStateTokenStart:
+ s = NewTokensStartState()
+
+ case ATNStateRuleStop:
+ s = NewRuleStopState()
+
+ case ATNStateBlockEnd:
+ s = NewBlockEndState()
+
+ case ATNStateStarLoopBack:
+ s = NewStarLoopbackState()
+
+ case ATNStateStarLoopEntry:
+ s = NewStarLoopEntryState()
+
+ case ATNStatePlusLoopBack:
+ s = NewPlusLoopbackState()
+
+ case ATNStateLoopEnd:
+ s = NewLoopEndState()
+
+ default:
+ panic(fmt.Sprintf("state type %d is invalid", typeIndex))
+ }
+
+ s.SetRuleIndex(ruleIndex)
+
+ return s
+}
+
+func (a *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) LexerAction {
+ switch typeIndex {
+ case LexerActionTypeChannel:
+ return NewLexerChannelAction(data1)
+
+ case LexerActionTypeCustom:
+ return NewLexerCustomAction(data1, data2)
+
+ case LexerActionTypeMode:
+ return NewLexerModeAction(data1)
+
+ case LexerActionTypeMore:
+ return LexerMoreActionINSTANCE
+
+ case LexerActionTypePopMode:
+ return LexerPopModeActionINSTANCE
+
+ case LexerActionTypePushMode:
+ return NewLexerPushModeAction(data1)
+
+ case LexerActionTypeSkip:
+ return LexerSkipActionINSTANCE
+
+ case LexerActionTypeType:
+ return NewLexerTypeAction(data1)
+
+ default:
+ panic(fmt.Sprintf("lexer action %d is invalid", typeIndex))
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go
new file mode 100644
index 0000000..afe6c9f
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go
@@ -0,0 +1,41 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewATNConfigSet(false))
+
+type IATNSimulator interface {
+ SharedContextCache() *PredictionContextCache
+ ATN() *ATN
+ DecisionToDFA() []*DFA
+}
+
+type BaseATNSimulator struct {
+ atn *ATN
+ sharedContextCache *PredictionContextCache
+ decisionToDFA []*DFA
+}
+
+func (b *BaseATNSimulator) getCachedContext(context *PredictionContext) *PredictionContext {
+ if b.sharedContextCache == nil {
+ return context
+ }
+
+ //visited := NewJMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionVisitedCollection, "Visit map in getCachedContext()")
+ visited := NewVisitRecord()
+ return getCachedBasePredictionContext(context, b.sharedContextCache, visited)
+}
+
+func (b *BaseATNSimulator) SharedContextCache() *PredictionContextCache {
+ return b.sharedContextCache
+}
+
+func (b *BaseATNSimulator) ATN() *ATN {
+ return b.atn
+}
+
+func (b *BaseATNSimulator) DecisionToDFA() []*DFA {
+ return b.decisionToDFA
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_state.go b/vendor/github.com/antlr4-go/antlr/v4/atn_state.go
new file mode 100644
index 0000000..2ae5807
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/atn_state.go
@@ -0,0 +1,461 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "os"
+ "strconv"
+)
+
+// Constants for serialization.
+const (
+ ATNStateInvalidType = 0
+ ATNStateBasic = 1
+ ATNStateRuleStart = 2
+ ATNStateBlockStart = 3
+ ATNStatePlusBlockStart = 4
+ ATNStateStarBlockStart = 5
+ ATNStateTokenStart = 6
+ ATNStateRuleStop = 7
+ ATNStateBlockEnd = 8
+ ATNStateStarLoopBack = 9
+ ATNStateStarLoopEntry = 10
+ ATNStatePlusLoopBack = 11
+ ATNStateLoopEnd = 12
+
+ ATNStateInvalidStateNumber = -1
+)
+
+//goland:noinspection GoUnusedGlobalVariable
+var ATNStateInitialNumTransitions = 4
+
+type ATNState interface {
+ GetEpsilonOnlyTransitions() bool
+
+ GetRuleIndex() int
+ SetRuleIndex(int)
+
+ GetNextTokenWithinRule() *IntervalSet
+ SetNextTokenWithinRule(*IntervalSet)
+
+ GetATN() *ATN
+ SetATN(*ATN)
+
+ GetStateType() int
+
+ GetStateNumber() int
+ SetStateNumber(int)
+
+ GetTransitions() []Transition
+ SetTransitions([]Transition)
+ AddTransition(Transition, int)
+
+ String() string
+ Hash() int
+ Equals(Collectable[ATNState]) bool
+}
+
+type BaseATNState struct {
+ // NextTokenWithinRule caches lookahead during parsing. Not used during construction.
+ NextTokenWithinRule *IntervalSet
+
+ // atn is the current ATN.
+ atn *ATN
+
+ epsilonOnlyTransitions bool
+
+ // ruleIndex tracks the Rule index because there are no Rule objects at runtime.
+ ruleIndex int
+
+ stateNumber int
+
+ stateType int
+
+ // Track the transitions emanating from this ATN state.
+ transitions []Transition
+}
+
+func NewATNState() *BaseATNState {
+ return &BaseATNState{stateNumber: ATNStateInvalidStateNumber, stateType: ATNStateInvalidType}
+}
+
+func (as *BaseATNState) GetRuleIndex() int {
+ return as.ruleIndex
+}
+
+func (as *BaseATNState) SetRuleIndex(v int) {
+ as.ruleIndex = v
+}
+func (as *BaseATNState) GetEpsilonOnlyTransitions() bool {
+ return as.epsilonOnlyTransitions
+}
+
+func (as *BaseATNState) GetATN() *ATN {
+ return as.atn
+}
+
+func (as *BaseATNState) SetATN(atn *ATN) {
+ as.atn = atn
+}
+
+func (as *BaseATNState) GetTransitions() []Transition {
+ return as.transitions
+}
+
+func (as *BaseATNState) SetTransitions(t []Transition) {
+ as.transitions = t
+}
+
+func (as *BaseATNState) GetStateType() int {
+ return as.stateType
+}
+
+func (as *BaseATNState) GetStateNumber() int {
+ return as.stateNumber
+}
+
+func (as *BaseATNState) SetStateNumber(stateNumber int) {
+ as.stateNumber = stateNumber
+}
+
+func (as *BaseATNState) GetNextTokenWithinRule() *IntervalSet {
+ return as.NextTokenWithinRule
+}
+
+func (as *BaseATNState) SetNextTokenWithinRule(v *IntervalSet) {
+ as.NextTokenWithinRule = v
+}
+
+func (as *BaseATNState) Hash() int {
+ return as.stateNumber
+}
+
+func (as *BaseATNState) String() string {
+ return strconv.Itoa(as.stateNumber)
+}
+
+func (as *BaseATNState) Equals(other Collectable[ATNState]) bool {
+ if ot, ok := other.(ATNState); ok {
+ return as.stateNumber == ot.GetStateNumber()
+ }
+
+ return false
+}
+
+func (as *BaseATNState) isNonGreedyExitState() bool {
+ return false
+}
+
+func (as *BaseATNState) AddTransition(trans Transition, index int) {
+ if len(as.transitions) == 0 {
+ as.epsilonOnlyTransitions = trans.getIsEpsilon()
+ } else if as.epsilonOnlyTransitions != trans.getIsEpsilon() {
+ _, _ = fmt.Fprintf(os.Stdin, "ATN state %d has both epsilon and non-epsilon transitions.\n", as.stateNumber)
+ as.epsilonOnlyTransitions = false
+ }
+
+ // TODO: Check code for already present compared to the Java equivalent
+ //alreadyPresent := false
+ //for _, t := range as.transitions {
+ // if t.getTarget().GetStateNumber() == trans.getTarget().GetStateNumber() {
+ // if t.getLabel() != nil && trans.getLabel() != nil && trans.getLabel().Equals(t.getLabel()) {
+ // alreadyPresent = true
+ // break
+ // }
+ // } else if t.getIsEpsilon() && trans.getIsEpsilon() {
+ // alreadyPresent = true
+ // break
+ // }
+ //}
+ //if !alreadyPresent {
+ if index == -1 {
+ as.transitions = append(as.transitions, trans)
+ } else {
+ as.transitions = append(as.transitions[:index], append([]Transition{trans}, as.transitions[index:]...)...)
+ // TODO: as.transitions.splice(index, 1, trans)
+ }
+ //} else {
+ // _, _ = fmt.Fprintf(os.Stderr, "Transition already present in state %d\n", as.stateNumber)
+ //}
+}
+
+type BasicState struct {
+ BaseATNState
+}
+
+func NewBasicState() *BasicState {
+ return &BasicState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateBasic,
+ },
+ }
+}
+
+type DecisionState interface {
+ ATNState
+
+ getDecision() int
+ setDecision(int)
+
+ getNonGreedy() bool
+ setNonGreedy(bool)
+}
+
+type BaseDecisionState struct {
+ BaseATNState
+ decision int
+ nonGreedy bool
+}
+
+func NewBaseDecisionState() *BaseDecisionState {
+ return &BaseDecisionState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateBasic,
+ },
+ decision: -1,
+ }
+}
+
+func (s *BaseDecisionState) getDecision() int {
+ return s.decision
+}
+
+func (s *BaseDecisionState) setDecision(b int) {
+ s.decision = b
+}
+
+func (s *BaseDecisionState) getNonGreedy() bool {
+ return s.nonGreedy
+}
+
+func (s *BaseDecisionState) setNonGreedy(b bool) {
+ s.nonGreedy = b
+}
+
+type BlockStartState interface {
+ DecisionState
+
+ getEndState() *BlockEndState
+ setEndState(*BlockEndState)
+}
+
+// BaseBlockStartState is the start of a regular (...) block.
+type BaseBlockStartState struct {
+ BaseDecisionState
+ endState *BlockEndState
+}
+
+func NewBlockStartState() *BaseBlockStartState {
+ return &BaseBlockStartState{
+ BaseDecisionState: BaseDecisionState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateBasic,
+ },
+ decision: -1,
+ },
+ }
+}
+
+func (s *BaseBlockStartState) getEndState() *BlockEndState {
+ return s.endState
+}
+
+func (s *BaseBlockStartState) setEndState(b *BlockEndState) {
+ s.endState = b
+}
+
+type BasicBlockStartState struct {
+ BaseBlockStartState
+}
+
+func NewBasicBlockStartState() *BasicBlockStartState {
+ return &BasicBlockStartState{
+ BaseBlockStartState: BaseBlockStartState{
+ BaseDecisionState: BaseDecisionState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateBlockStart,
+ },
+ },
+ },
+ }
+}
+
+var _ BlockStartState = &BasicBlockStartState{}
+
+// BlockEndState is a terminal node of a simple (a|b|c) block.
+type BlockEndState struct {
+ BaseATNState
+ startState ATNState
+}
+
+func NewBlockEndState() *BlockEndState {
+ return &BlockEndState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateBlockEnd,
+ },
+ startState: nil,
+ }
+}
+
+// RuleStopState is the last node in the ATN for a rule, unless that rule is the
+// start symbol. In that case, there is one transition to EOF. Later, we might
+// encode references to all calls to this rule to compute FOLLOW sets for error
+// handling.
+type RuleStopState struct {
+ BaseATNState
+}
+
+func NewRuleStopState() *RuleStopState {
+ return &RuleStopState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateRuleStop,
+ },
+ }
+}
+
+type RuleStartState struct {
+ BaseATNState
+ stopState ATNState
+ isPrecedenceRule bool
+}
+
+func NewRuleStartState() *RuleStartState {
+ return &RuleStartState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateRuleStart,
+ },
+ }
+}
+
+// PlusLoopbackState is a decision state for A+ and (A|B)+. It has two
+// transitions: one to the loop back to start of the block, and one to exit.
+type PlusLoopbackState struct {
+ BaseDecisionState
+}
+
+func NewPlusLoopbackState() *PlusLoopbackState {
+ return &PlusLoopbackState{
+ BaseDecisionState: BaseDecisionState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStatePlusLoopBack,
+ },
+ },
+ }
+}
+
+// PlusBlockStartState is the start of a (A|B|...)+ loop. Technically it is a
+// decision state; we don't use it for code generation. Somebody might need it,
+// it is included for completeness. In reality, PlusLoopbackState is the real
+// decision-making node for A+.
+type PlusBlockStartState struct {
+ BaseBlockStartState
+ loopBackState ATNState
+}
+
+func NewPlusBlockStartState() *PlusBlockStartState {
+ return &PlusBlockStartState{
+ BaseBlockStartState: BaseBlockStartState{
+ BaseDecisionState: BaseDecisionState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStatePlusBlockStart,
+ },
+ },
+ },
+ }
+}
+
+var _ BlockStartState = &PlusBlockStartState{}
+
+// StarBlockStartState is the block that begins a closure loop.
+type StarBlockStartState struct {
+ BaseBlockStartState
+}
+
+func NewStarBlockStartState() *StarBlockStartState {
+ return &StarBlockStartState{
+ BaseBlockStartState: BaseBlockStartState{
+ BaseDecisionState: BaseDecisionState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateStarBlockStart,
+ },
+ },
+ },
+ }
+}
+
+var _ BlockStartState = &StarBlockStartState{}
+
+type StarLoopbackState struct {
+ BaseATNState
+}
+
+func NewStarLoopbackState() *StarLoopbackState {
+ return &StarLoopbackState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateStarLoopBack,
+ },
+ }
+}
+
+type StarLoopEntryState struct {
+ BaseDecisionState
+ loopBackState ATNState
+ precedenceRuleDecision bool
+}
+
+func NewStarLoopEntryState() *StarLoopEntryState {
+ // False precedenceRuleDecision indicates whether s state can benefit from a precedence DFA during SLL decision making.
+ return &StarLoopEntryState{
+ BaseDecisionState: BaseDecisionState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateStarLoopEntry,
+ },
+ },
+ }
+}
+
+// LoopEndState marks the end of a * or + loop.
+type LoopEndState struct {
+ BaseATNState
+ loopBackState ATNState
+}
+
+func NewLoopEndState() *LoopEndState {
+ return &LoopEndState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateLoopEnd,
+ },
+ }
+}
+
+// TokensStartState is the Tokens rule start state linking to each lexer rule start state.
+type TokensStartState struct {
+ BaseDecisionState
+}
+
+func NewTokensStartState() *TokensStartState {
+ return &TokensStartState{
+ BaseDecisionState: BaseDecisionState{
+ BaseATNState: BaseATNState{
+ stateNumber: ATNStateInvalidStateNumber,
+ stateType: ATNStateTokenStart,
+ },
+ },
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_type.go b/vendor/github.com/antlr4-go/antlr/v4/atn_type.go
new file mode 100644
index 0000000..3a515a1
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/atn_type.go
@@ -0,0 +1,11 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+// Represent the type of recognizer an ATN applies to.
+const (
+ ATNTypeLexer = 0
+ ATNTypeParser = 1
+)
diff --git a/vendor/github.com/antlr4-go/antlr/v4/char_stream.go b/vendor/github.com/antlr4-go/antlr/v4/char_stream.go
new file mode 100644
index 0000000..bd8127b
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/char_stream.go
@@ -0,0 +1,12 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+type CharStream interface {
+ IntStream
+ GetText(int, int) string
+ GetTextFromTokens(start, end Token) string
+ GetTextFromInterval(Interval) string
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go b/vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go
new file mode 100644
index 0000000..1bb0314
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go
@@ -0,0 +1,56 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+// TokenFactory creates CommonToken objects.
+type TokenFactory interface {
+ Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token
+}
+
+// CommonTokenFactory is the default TokenFactory implementation.
+type CommonTokenFactory struct {
+ // copyText indicates whether CommonToken.setText should be called after
+ // constructing tokens to explicitly set the text. This is useful for cases
+ // where the input stream might not be able to provide arbitrary substrings of
+ // text from the input after the lexer creates a token (e.g. the
+ // implementation of CharStream.GetText in UnbufferedCharStream panics an
+ // UnsupportedOperationException). Explicitly setting the token text allows
+ // Token.GetText to be called at any time regardless of the input stream
+ // implementation.
+ //
+ // The default value is false to avoid the performance and memory overhead of
+ // copying text for every token unless explicitly requested.
+ copyText bool
+}
+
+func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
+ return &CommonTokenFactory{copyText: copyText}
+}
+
+// CommonTokenFactoryDEFAULT is the default CommonTokenFactory. It does not
+// explicitly copy token text when constructing tokens.
+var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
+
+func (c *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token {
+ t := NewCommonToken(source, ttype, channel, start, stop)
+
+ t.line = line
+ t.column = column
+
+ if text != "" {
+ t.SetText(text)
+ } else if c.copyText && source.charStream != nil {
+ t.SetText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
+ }
+
+ return t
+}
+
+func (c *CommonTokenFactory) createThin(ttype int, text string) Token {
+ t := NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
+ t.SetText(text)
+
+ return t
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go b/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go
new file mode 100644
index 0000000..b75da9d
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go
@@ -0,0 +1,450 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "strconv"
+)
+
+// CommonTokenStream is an implementation of TokenStream that loads tokens from
+// a TokenSource on-demand and places the tokens in a buffer to provide access
+// to any previous token by index. This token stream ignores the value of
+// Token.getChannel. If your parser requires the token stream filter tokens to
+// only those on a particular channel, such as Token.DEFAULT_CHANNEL or
+// Token.HIDDEN_CHANNEL, use a filtering token stream such a CommonTokenStream.
+type CommonTokenStream struct {
+ channel int
+
+ // fetchedEOF indicates whether the Token.EOF token has been fetched from
+ // tokenSource and added to tokens. This field improves performance for the
+ // following cases:
+ //
+ // consume: The lookahead check in consume to preven consuming the EOF symbol is
+ // optimized by checking the values of fetchedEOF and p instead of calling LA.
+ //
+ // fetch: The check to prevent adding multiple EOF symbols into tokens is
+ // trivial with bt field.
+ fetchedEOF bool
+
+ // index into [tokens] of the current token (next token to consume).
+ // tokens[p] should be LT(1). It is set to -1 when the stream is first
+ // constructed or when SetTokenSource is called, indicating that the first token
+ // has not yet been fetched from the token source. For additional information,
+ // see the documentation of [IntStream] for a description of initializing methods.
+ index int
+
+ // tokenSource is the [TokenSource] from which tokens for the bt stream are
+ // fetched.
+ tokenSource TokenSource
+
+ // tokens contains all tokens fetched from the token source. The list is considered a
+ // complete view of the input once fetchedEOF is set to true.
+ tokens []Token
+}
+
+// NewCommonTokenStream creates a new CommonTokenStream instance using the supplied lexer to produce
+// tokens and will pull tokens from the given lexer channel.
+func NewCommonTokenStream(lexer Lexer, channel int) *CommonTokenStream {
+ return &CommonTokenStream{
+ channel: channel,
+ index: -1,
+ tokenSource: lexer,
+ tokens: make([]Token, 0),
+ }
+}
+
+// GetAllTokens returns all tokens currently pulled from the token source.
+func (c *CommonTokenStream) GetAllTokens() []Token {
+ return c.tokens
+}
+
+func (c *CommonTokenStream) Mark() int {
+ return 0
+}
+
+func (c *CommonTokenStream) Release(_ int) {}
+
+func (c *CommonTokenStream) Reset() {
+ c.fetchedEOF = false
+ c.tokens = make([]Token, 0)
+ c.Seek(0)
+}
+
+func (c *CommonTokenStream) Seek(index int) {
+ c.lazyInit()
+ c.index = c.adjustSeekIndex(index)
+}
+
+func (c *CommonTokenStream) Get(index int) Token {
+ c.lazyInit()
+
+ return c.tokens[index]
+}
+
+func (c *CommonTokenStream) Consume() {
+ SkipEOFCheck := false
+
+ if c.index >= 0 {
+ if c.fetchedEOF {
+ // The last token in tokens is EOF. Skip the check if p indexes any fetched.
+ // token except the last.
+ SkipEOFCheck = c.index < len(c.tokens)-1
+ } else {
+ // No EOF token in tokens. Skip the check if p indexes a fetched token.
+ SkipEOFCheck = c.index < len(c.tokens)
+ }
+ } else {
+ // Not yet initialized
+ SkipEOFCheck = false
+ }
+
+ if !SkipEOFCheck && c.LA(1) == TokenEOF {
+ panic("cannot consume EOF")
+ }
+
+ if c.Sync(c.index + 1) {
+ c.index = c.adjustSeekIndex(c.index + 1)
+ }
+}
+
+// Sync makes sure index i in tokens has a token and returns true if a token is
+// located at index i and otherwise false.
+func (c *CommonTokenStream) Sync(i int) bool {
+ n := i - len(c.tokens) + 1 // How many more elements do we need?
+
+ if n > 0 {
+ fetched := c.fetch(n)
+ return fetched >= n
+ }
+
+ return true
+}
+
+// fetch adds n elements to buffer and returns the actual number of elements
+// added to the buffer.
+func (c *CommonTokenStream) fetch(n int) int {
+ if c.fetchedEOF {
+ return 0
+ }
+
+ for i := 0; i < n; i++ {
+ t := c.tokenSource.NextToken()
+
+ t.SetTokenIndex(len(c.tokens))
+ c.tokens = append(c.tokens, t)
+
+ if t.GetTokenType() == TokenEOF {
+ c.fetchedEOF = true
+
+ return i + 1
+ }
+ }
+
+ return n
+}
+
+// GetTokens gets all tokens from start to stop inclusive.
+func (c *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []Token {
+ if start < 0 || stop < 0 {
+ return nil
+ }
+
+ c.lazyInit()
+
+ subset := make([]Token, 0)
+
+ if stop >= len(c.tokens) {
+ stop = len(c.tokens) - 1
+ }
+
+ for i := start; i < stop; i++ {
+ t := c.tokens[i]
+
+ if t.GetTokenType() == TokenEOF {
+ break
+ }
+
+ if types == nil || types.contains(t.GetTokenType()) {
+ subset = append(subset, t)
+ }
+ }
+
+ return subset
+}
+
+func (c *CommonTokenStream) LA(i int) int {
+ return c.LT(i).GetTokenType()
+}
+
+func (c *CommonTokenStream) lazyInit() {
+ if c.index == -1 {
+ c.setup()
+ }
+}
+
+func (c *CommonTokenStream) setup() {
+ c.Sync(0)
+ c.index = c.adjustSeekIndex(0)
+}
+
+func (c *CommonTokenStream) GetTokenSource() TokenSource {
+ return c.tokenSource
+}
+
+// SetTokenSource resets the c token stream by setting its token source.
+func (c *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
+ c.tokenSource = tokenSource
+ c.tokens = make([]Token, 0)
+ c.index = -1
+ c.fetchedEOF = false
+}
+
+// NextTokenOnChannel returns the index of the next token on channel given a
+// starting index. Returns i if tokens[i] is on channel. Returns -1 if there are
+// no tokens on channel between 'i' and [TokenEOF].
+func (c *CommonTokenStream) NextTokenOnChannel(i, _ int) int {
+ c.Sync(i)
+
+ if i >= len(c.tokens) {
+ return -1
+ }
+
+ token := c.tokens[i]
+
+ for token.GetChannel() != c.channel {
+ if token.GetTokenType() == TokenEOF {
+ return -1
+ }
+
+ i++
+ c.Sync(i)
+ token = c.tokens[i]
+ }
+
+ return i
+}
+
+// previousTokenOnChannel returns the index of the previous token on channel
+// given a starting index. Returns i if tokens[i] is on channel. Returns -1 if
+// there are no tokens on channel between i and 0.
+func (c *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
+ for i >= 0 && c.tokens[i].GetChannel() != channel {
+ i--
+ }
+
+ return i
+}
+
+// GetHiddenTokensToRight collects all tokens on a specified channel to the
+// right of the current token up until we see a token on DEFAULT_TOKEN_CHANNEL
+// or EOF. If channel is -1, it finds any non-default channel token.
+func (c *CommonTokenStream) GetHiddenTokensToRight(tokenIndex, channel int) []Token {
+ c.lazyInit()
+
+ if tokenIndex < 0 || tokenIndex >= len(c.tokens) {
+ panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
+ }
+
+ nextOnChannel := c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
+ from := tokenIndex + 1
+
+ // If no onChannel to the right, then nextOnChannel == -1, so set 'to' to the last token
+ var to int
+
+ if nextOnChannel == -1 {
+ to = len(c.tokens) - 1
+ } else {
+ to = nextOnChannel
+ }
+
+ return c.filterForChannel(from, to, channel)
+}
+
+// GetHiddenTokensToLeft collects all tokens on channel to the left of the
+// current token until we see a token on DEFAULT_TOKEN_CHANNEL. If channel is
+// -1, it finds any non default channel token.
+func (c *CommonTokenStream) GetHiddenTokensToLeft(tokenIndex, channel int) []Token {
+ c.lazyInit()
+
+ if tokenIndex < 0 || tokenIndex >= len(c.tokens) {
+ panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
+ }
+
+ prevOnChannel := c.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
+
+ if prevOnChannel == tokenIndex-1 {
+ return nil
+ }
+
+ // If there are none on channel to the left and prevOnChannel == -1 then from = 0
+ from := prevOnChannel + 1
+ to := tokenIndex - 1
+
+ return c.filterForChannel(from, to, channel)
+}
+
+func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token {
+ hidden := make([]Token, 0)
+
+ for i := left; i < right+1; i++ {
+ t := c.tokens[i]
+
+ if channel == -1 {
+ if t.GetChannel() != LexerDefaultTokenChannel {
+ hidden = append(hidden, t)
+ }
+ } else if t.GetChannel() == channel {
+ hidden = append(hidden, t)
+ }
+ }
+
+ if len(hidden) == 0 {
+ return nil
+ }
+
+ return hidden
+}
+
+func (c *CommonTokenStream) GetSourceName() string {
+ return c.tokenSource.GetSourceName()
+}
+
+func (c *CommonTokenStream) Size() int {
+ return len(c.tokens)
+}
+
+func (c *CommonTokenStream) Index() int {
+ return c.index
+}
+
+func (c *CommonTokenStream) GetAllText() string {
+ c.Fill()
+ return c.GetTextFromInterval(NewInterval(0, len(c.tokens)-1))
+}
+
+func (c *CommonTokenStream) GetTextFromTokens(start, end Token) string {
+ if start == nil || end == nil {
+ return ""
+ }
+
+ return c.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
+}
+
+func (c *CommonTokenStream) GetTextFromRuleContext(interval RuleContext) string {
+ return c.GetTextFromInterval(interval.GetSourceInterval())
+}
+
+func (c *CommonTokenStream) GetTextFromInterval(interval Interval) string {
+ c.lazyInit()
+ c.Sync(interval.Stop)
+
+ start := interval.Start
+ stop := interval.Stop
+
+ if start < 0 || stop < 0 {
+ return ""
+ }
+
+ if stop >= len(c.tokens) {
+ stop = len(c.tokens) - 1
+ }
+
+ s := ""
+
+ for i := start; i < stop+1; i++ {
+ t := c.tokens[i]
+
+ if t.GetTokenType() == TokenEOF {
+ break
+ }
+
+ s += t.GetText()
+ }
+
+ return s
+}
+
+// Fill gets all tokens from the lexer until EOF.
+func (c *CommonTokenStream) Fill() {
+ c.lazyInit()
+
+ for c.fetch(1000) == 1000 {
+ continue
+ }
+}
+
+func (c *CommonTokenStream) adjustSeekIndex(i int) int {
+ return c.NextTokenOnChannel(i, c.channel)
+}
+
+func (c *CommonTokenStream) LB(k int) Token {
+ if k == 0 || c.index-k < 0 {
+ return nil
+ }
+
+ i := c.index
+ n := 1
+
+ // Find k good tokens looking backward
+ for n <= k {
+ // Skip off-channel tokens
+ i = c.previousTokenOnChannel(i-1, c.channel)
+ n++
+ }
+
+ if i < 0 {
+ return nil
+ }
+
+ return c.tokens[i]
+}
+
+func (c *CommonTokenStream) LT(k int) Token {
+ c.lazyInit()
+
+ if k == 0 {
+ return nil
+ }
+
+ if k < 0 {
+ return c.LB(-k)
+ }
+
+ i := c.index
+ n := 1 // We know tokens[n] is valid
+
+ // Find k good tokens
+ for n < k {
+ // Skip off-channel tokens, but make sure to not look past EOF
+ if c.Sync(i + 1) {
+ i = c.NextTokenOnChannel(i+1, c.channel)
+ }
+
+ n++
+ }
+
+ return c.tokens[i]
+}
+
+// getNumberOfOnChannelTokens counts EOF once.
+func (c *CommonTokenStream) getNumberOfOnChannelTokens() int {
+ var n int
+
+ c.Fill()
+
+ for i := 0; i < len(c.tokens); i++ {
+ t := c.tokens[i]
+
+ if t.GetChannel() == c.channel {
+ n++
+ }
+
+ if t.GetTokenType() == TokenEOF {
+ break
+ }
+ }
+
+ return n
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/comparators.go b/vendor/github.com/antlr4-go/antlr/v4/comparators.go
new file mode 100644
index 0000000..7467e9b
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/comparators.go
@@ -0,0 +1,150 @@
+package antlr
+
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+// This file contains all the implementations of custom comparators used for generic collections when the
+// Hash() and Equals() funcs supplied by the struct objects themselves need to be overridden. Normally, we would
+// put the comparators in the source file for the struct themselves, but given the organization of this code is
+// sorta kinda based upon the Java code, I found it confusing trying to find out which comparator was where and used by
+// which instantiation of a collection. For instance, an Array2DHashSet in the Java source, when used with ATNConfig
+// collections requires three different comparators depending on what the collection is being used for. Collecting - pun intended -
+// all the comparators here, makes it much easier to see which implementation of hash and equals is used by which collection.
+// It also makes it easy to verify that the Hash() and Equals() functions marry up with the Java implementations.
+
+// ObjEqComparator is the equivalent of the Java ObjectEqualityComparator, which is the default instance of
+// Equality comparator. We do not have inheritance in Go, only interfaces, so we use generics to enforce some
+// type safety and avoid having to implement this for every type that we want to perform comparison on.
+//
+// This comparator works by using the standard Hash() and Equals() methods of the type T that is being compared. Which
+// allows us to use it in any collection instance that does not require a special hash or equals implementation.
+type ObjEqComparator[T Collectable[T]] struct{}
+
+var (
+ aStateEqInst = &ObjEqComparator[ATNState]{}
+ aConfEqInst = &ObjEqComparator[*ATNConfig]{}
+
+ // aConfCompInst is the comparator used for the ATNConfigSet for the configLookup cache
+ aConfCompInst = &ATNConfigComparator[*ATNConfig]{}
+ atnConfCompInst = &BaseATNConfigComparator[*ATNConfig]{}
+ dfaStateEqInst = &ObjEqComparator[*DFAState]{}
+ semctxEqInst = &ObjEqComparator[SemanticContext]{}
+ atnAltCfgEqInst = &ATNAltConfigComparator[*ATNConfig]{}
+ pContextEqInst = &ObjEqComparator[*PredictionContext]{}
+)
+
+// Equals2 delegates to the Equals() method of type T
+func (c *ObjEqComparator[T]) Equals2(o1, o2 T) bool {
+ return o1.Equals(o2)
+}
+
+// Hash1 delegates to the Hash() method of type T
+func (c *ObjEqComparator[T]) Hash1(o T) int {
+
+ return o.Hash()
+}
+
+type SemCComparator[T Collectable[T]] struct{}
+
+// ATNConfigComparator is used as the comparator for the configLookup field of an ATNConfigSet
+// and has a custom Equals() and Hash() implementation, because equality is not based on the
+// standard Hash() and Equals() methods of the ATNConfig type.
+type ATNConfigComparator[T Collectable[T]] struct {
+}
+
+// Equals2 is a custom comparator for ATNConfigs specifically for configLookup
+func (c *ATNConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool {
+
+ // Same pointer, must be equal, even if both nil
+ //
+ if o1 == o2 {
+ return true
+
+ }
+
+ // If either are nil, but not both, then the result is false
+ //
+ if o1 == nil || o2 == nil {
+ return false
+ }
+
+ return o1.GetState().GetStateNumber() == o2.GetState().GetStateNumber() &&
+ o1.GetAlt() == o2.GetAlt() &&
+ o1.GetSemanticContext().Equals(o2.GetSemanticContext())
+}
+
+// Hash1 is custom hash implementation for ATNConfigs specifically for configLookup
+func (c *ATNConfigComparator[T]) Hash1(o *ATNConfig) int {
+
+ hash := 7
+ hash = 31*hash + o.GetState().GetStateNumber()
+ hash = 31*hash + o.GetAlt()
+ hash = 31*hash + o.GetSemanticContext().Hash()
+ return hash
+}
+
+// ATNAltConfigComparator is used as the comparator for mapping configs to Alt Bitsets
+type ATNAltConfigComparator[T Collectable[T]] struct {
+}
+
+// Equals2 is a custom comparator for ATNConfigs specifically for configLookup
+func (c *ATNAltConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool {
+
+ // Same pointer, must be equal, even if both nil
+ //
+ if o1 == o2 {
+ return true
+
+ }
+
+ // If either are nil, but not both, then the result is false
+ //
+ if o1 == nil || o2 == nil {
+ return false
+ }
+
+ return o1.GetState().GetStateNumber() == o2.GetState().GetStateNumber() &&
+ o1.GetContext().Equals(o2.GetContext())
+}
+
+// Hash1 is custom hash implementation for ATNConfigs specifically for configLookup
+func (c *ATNAltConfigComparator[T]) Hash1(o *ATNConfig) int {
+ h := murmurInit(7)
+ h = murmurUpdate(h, o.GetState().GetStateNumber())
+ h = murmurUpdate(h, o.GetContext().Hash())
+ return murmurFinish(h, 2)
+}
+
+// BaseATNConfigComparator is used as the comparator for the configLookup field of a ATNConfigSet
+// and has a custom Equals() and Hash() implementation, because equality is not based on the
+// standard Hash() and Equals() methods of the ATNConfig type.
+type BaseATNConfigComparator[T Collectable[T]] struct {
+}
+
+// Equals2 is a custom comparator for ATNConfigs specifically for baseATNConfigSet
+func (c *BaseATNConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool {
+
+ // Same pointer, must be equal, even if both nil
+ //
+ if o1 == o2 {
+ return true
+
+ }
+
+ // If either are nil, but not both, then the result is false
+ //
+ if o1 == nil || o2 == nil {
+ return false
+ }
+
+ return o1.GetState().GetStateNumber() == o2.GetState().GetStateNumber() &&
+ o1.GetAlt() == o2.GetAlt() &&
+ o1.GetSemanticContext().Equals(o2.GetSemanticContext())
+}
+
+// Hash1 is custom hash implementation for ATNConfigs specifically for configLookup, but in fact just
+// delegates to the standard Hash() method of the ATNConfig type.
+func (c *BaseATNConfigComparator[T]) Hash1(o *ATNConfig) int {
+ return o.Hash()
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/configuration.go b/vendor/github.com/antlr4-go/antlr/v4/configuration.go
new file mode 100644
index 0000000..c2b7245
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/configuration.go
@@ -0,0 +1,214 @@
+package antlr
+
+type runtimeConfiguration struct {
+ statsTraceStacks bool
+ lexerATNSimulatorDebug bool
+ lexerATNSimulatorDFADebug bool
+ parserATNSimulatorDebug bool
+ parserATNSimulatorTraceATNSim bool
+ parserATNSimulatorDFADebug bool
+ parserATNSimulatorRetryDebug bool
+ lRLoopEntryBranchOpt bool
+ memoryManager bool
+}
+
+// Global runtime configuration
+var runtimeConfig = runtimeConfiguration{
+ lRLoopEntryBranchOpt: true,
+}
+
+type runtimeOption func(*runtimeConfiguration) error
+
+// ConfigureRuntime allows the runtime to be configured globally setting things like trace and statistics options.
+// It uses the functional options pattern for go. This is a package global function as it operates on the runtime
+// configuration regardless of the instantiation of anything higher up such as a parser or lexer. Generally this is
+// used for debugging/tracing/statistics options, which are usually used by the runtime maintainers (or rather the
+// only maintainer). However, it is possible that you might want to use this to set a global option concerning the
+// memory allocation type used by the runtime such as sync.Pool or not.
+//
+// The options are applied in the order they are passed in, so the last option will override any previous options.
+//
+// For example, if you want to turn on the collection create point stack flag to true, you can do:
+//
+// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(true))
+//
+// If you want to turn it off, you can do:
+//
+// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(false))
+func ConfigureRuntime(options ...runtimeOption) error {
+ for _, option := range options {
+ err := option(&runtimeConfig)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// WithStatsTraceStacks sets the global flag indicating whether to collect stack traces at the create-point of
+// certain structs, such as collections, or the use point of certain methods such as Put().
+// Because this can be expensive, it is turned off by default. However, it
+// can be useful to track down exactly where memory is being created and used.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(true))
+//
+// You can turn it off at any time using:
+//
+// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(false))
+func WithStatsTraceStacks(trace bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.statsTraceStacks = trace
+ return nil
+ }
+}
+
+// WithLexerATNSimulatorDebug sets the global flag indicating whether to log debug information from the lexer [ATN]
+// simulator. This is useful for debugging lexer issues by comparing the output with the Java runtime. Only useful
+// to the runtime maintainers.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDebug(true))
+//
+// You can turn it off at any time using:
+//
+// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDebug(false))
+func WithLexerATNSimulatorDebug(debug bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.lexerATNSimulatorDebug = debug
+ return nil
+ }
+}
+
+// WithLexerATNSimulatorDFADebug sets the global flag indicating whether to log debug information from the lexer [ATN] [DFA]
+// simulator. This is useful for debugging lexer issues by comparing the output with the Java runtime. Only useful
+// to the runtime maintainers.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDFADebug(true))
+//
+// You can turn it off at any time using:
+//
+// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDFADebug(false))
+func WithLexerATNSimulatorDFADebug(debug bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.lexerATNSimulatorDFADebug = debug
+ return nil
+ }
+}
+
+// WithParserATNSimulatorDebug sets the global flag indicating whether to log debug information from the parser [ATN]
+// simulator. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful
+// to the runtime maintainers.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDebug(true))
+//
+// You can turn it off at any time using:
+//
+// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDebug(false))
+func WithParserATNSimulatorDebug(debug bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.parserATNSimulatorDebug = debug
+ return nil
+ }
+}
+
+// WithParserATNSimulatorTraceATNSim sets the global flag indicating whether to log trace information from the parser [ATN] simulator
+// [DFA]. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful
+// to the runtime maintainers.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorTraceATNSim(true))
+//
+// You can turn it off at any time using:
+//
+// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorTraceATNSim(false))
+func WithParserATNSimulatorTraceATNSim(trace bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.parserATNSimulatorTraceATNSim = trace
+ return nil
+ }
+}
+
+// WithParserATNSimulatorDFADebug sets the global flag indicating whether to log debug information from the parser [ATN] [DFA]
+// simulator. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful
+// to the runtime maintainers.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDFADebug(true))
+//
+// You can turn it off at any time using:
+//
+// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDFADebug(false))
+func WithParserATNSimulatorDFADebug(debug bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.parserATNSimulatorDFADebug = debug
+ return nil
+ }
+}
+
+// WithParserATNSimulatorRetryDebug sets the global flag indicating whether to log debug information from the parser [ATN] [DFA]
+// simulator when retrying a decision. This is useful for debugging parser issues by comparing the output with the Java runtime.
+// Only useful to the runtime maintainers.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorRetryDebug(true))
+//
+// You can turn it off at any time using:
+//
+// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorRetryDebug(false))
+func WithParserATNSimulatorRetryDebug(debug bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.parserATNSimulatorRetryDebug = debug
+ return nil
+ }
+}
+
+// WithLRLoopEntryBranchOpt sets the global flag indicating whether let recursive loop operations should be
+// optimized or not. This is useful for debugging parser issues by comparing the output with the Java runtime.
+// It turns off the functionality of [canDropLoopEntryEdgeInLeftRecursiveRule] in [ParserATNSimulator].
+//
+// Note that default is to use this optimization.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithLRLoopEntryBranchOpt(true))
+//
+// You can turn it off at any time using:
+//
+// antlr.ConfigureRuntime(antlr.WithLRLoopEntryBranchOpt(false))
+func WithLRLoopEntryBranchOpt(off bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.lRLoopEntryBranchOpt = off
+ return nil
+ }
+}
+
+// WithMemoryManager sets the global flag indicating whether to use the memory manager or not. This is useful
+// for poorly constructed grammars that create a lot of garbage. It turns on the functionality of [memoryManager], which
+// will intercept garbage collection and cause available memory to be reused. At the end of the day, this is no substitute
+// for fixing your grammar by ridding yourself of extreme ambiguity. BUt if you are just trying to reuse an opensource
+// grammar, this may help make it more practical.
+//
+// Note that default is to use normal Go memory allocation and not pool memory.
+//
+// Use:
+//
+// antlr.ConfigureRuntime(antlr.WithMemoryManager(true))
+//
+// Note that if you turn this on, you should probably leave it on. You should use only one memory strategy or the other
+// and should remember to nil out any references to the parser or lexer when you are done with them.
+func WithMemoryManager(use bool) runtimeOption {
+ return func(config *runtimeConfiguration) error {
+ config.memoryManager = use
+ return nil
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/dfa.go b/vendor/github.com/antlr4-go/antlr/v4/dfa.go
new file mode 100644
index 0000000..6b63eb1
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/dfa.go
@@ -0,0 +1,175 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+// DFA represents the Deterministic Finite Automaton used by the recognizer, including all the states it can
+// reach and the transitions between them.
+type DFA struct {
+ // atnStartState is the ATN state in which this was created
+ atnStartState DecisionState
+
+ decision int
+
+ // states is all the DFA states. Use Map to get the old state back; Set can only
+ // indicate whether it is there. Go maps implement key hash collisions and so on and are very
+ // good, but the DFAState is an object and can't be used directly as the key as it can in say Java
+ // amd C#, whereby if the hashcode is the same for two objects, then Equals() is called against them
+ // to see if they really are the same object. Hence, we have our own map storage.
+ //
+ states *JStore[*DFAState, *ObjEqComparator[*DFAState]]
+
+ numstates int
+
+ s0 *DFAState
+
+ // precedenceDfa is the backing field for isPrecedenceDfa and setPrecedenceDfa.
+ // True if the DFA is for a precedence decision and false otherwise.
+ precedenceDfa bool
+}
+
+func NewDFA(atnStartState DecisionState, decision int) *DFA {
+ dfa := &DFA{
+ atnStartState: atnStartState,
+ decision: decision,
+ states: nil, // Lazy initialize
+ }
+ if s, ok := atnStartState.(*StarLoopEntryState); ok && s.precedenceRuleDecision {
+ dfa.precedenceDfa = true
+ dfa.s0 = NewDFAState(-1, NewATNConfigSet(false))
+ dfa.s0.isAcceptState = false
+ dfa.s0.requiresFullContext = false
+ }
+ return dfa
+}
+
+// getPrecedenceStartState gets the start state for the current precedence and
+// returns the start state corresponding to the specified precedence if a start
+// state exists for the specified precedence and nil otherwise. d must be a
+// precedence DFA. See also isPrecedenceDfa.
+func (d *DFA) getPrecedenceStartState(precedence int) *DFAState {
+ if !d.getPrecedenceDfa() {
+ panic("only precedence DFAs may contain a precedence start state")
+ }
+
+ // s0.edges is never nil for a precedence DFA
+ if precedence < 0 || precedence >= len(d.getS0().getEdges()) {
+ return nil
+ }
+
+ return d.getS0().getIthEdge(precedence)
+}
+
+// setPrecedenceStartState sets the start state for the current precedence. d
+// must be a precedence DFA. See also isPrecedenceDfa.
+func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
+ if !d.getPrecedenceDfa() {
+ panic("only precedence DFAs may contain a precedence start state")
+ }
+
+ if precedence < 0 {
+ return
+ }
+
+ // Synchronization on s0 here is ok. When the DFA is turned into a
+ // precedence DFA, s0 will be initialized once and not updated again. s0.edges
+ // is never nil for a precedence DFA.
+ s0 := d.getS0()
+ if precedence >= s0.numEdges() {
+ edges := append(s0.getEdges(), make([]*DFAState, precedence+1-s0.numEdges())...)
+ s0.setEdges(edges)
+ d.setS0(s0)
+ }
+
+ s0.setIthEdge(precedence, startState)
+}
+
+func (d *DFA) getPrecedenceDfa() bool {
+ return d.precedenceDfa
+}
+
+// setPrecedenceDfa sets whether d is a precedence DFA. If precedenceDfa differs
+// from the current DFA configuration, then d.states is cleared, the initial
+// state s0 is set to a new DFAState with an empty outgoing DFAState.edges to
+// store the start states for individual precedence values if precedenceDfa is
+// true or nil otherwise, and d.precedenceDfa is updated.
+func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
+ if d.getPrecedenceDfa() != precedenceDfa {
+ d.states = nil // Lazy initialize
+ d.numstates = 0
+
+ if precedenceDfa {
+ precedenceState := NewDFAState(-1, NewATNConfigSet(false))
+ precedenceState.setEdges(make([]*DFAState, 0))
+ precedenceState.isAcceptState = false
+ precedenceState.requiresFullContext = false
+ d.setS0(precedenceState)
+ } else {
+ d.setS0(nil)
+ }
+
+ d.precedenceDfa = precedenceDfa
+ }
+}
+
+// Len returns the number of states in d. We use this instead of accessing states directly so that we can implement lazy
+// instantiation of the states JMap.
+func (d *DFA) Len() int {
+ if d.states == nil {
+ return 0
+ }
+ return d.states.Len()
+}
+
+// Get returns a state that matches s if it is present in the DFA state set. We defer to this
+// function instead of accessing states directly so that we can implement lazy instantiation of the states JMap.
+func (d *DFA) Get(s *DFAState) (*DFAState, bool) {
+ if d.states == nil {
+ return nil, false
+ }
+ return d.states.Get(s)
+}
+
+func (d *DFA) Put(s *DFAState) (*DFAState, bool) {
+ if d.states == nil {
+ d.states = NewJStore[*DFAState, *ObjEqComparator[*DFAState]](dfaStateEqInst, DFAStateCollection, "DFA via DFA.Put")
+ }
+ return d.states.Put(s)
+}
+
+func (d *DFA) getS0() *DFAState {
+ return d.s0
+}
+
+func (d *DFA) setS0(s *DFAState) {
+ d.s0 = s
+}
+
+// sortedStates returns the states in d sorted by their state number, or an empty set if d.states is nil.
+func (d *DFA) sortedStates() []*DFAState {
+ if d.states == nil {
+ return []*DFAState{}
+ }
+ vs := d.states.SortedSlice(func(i, j *DFAState) bool {
+ return i.stateNumber < j.stateNumber
+ })
+
+ return vs
+}
+
+func (d *DFA) String(literalNames []string, symbolicNames []string) string {
+ if d.getS0() == nil {
+ return ""
+ }
+
+ return NewDFASerializer(d, literalNames, symbolicNames).String()
+}
+
+func (d *DFA) ToLexerString() string {
+ if d.getS0() == nil {
+ return ""
+ }
+
+ return NewLexerDFASerializer(d).String()
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go b/vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go
new file mode 100644
index 0000000..0e11009
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go
@@ -0,0 +1,158 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// DFASerializer is a DFA walker that knows how to dump the DFA states to serialized
+// strings.
+type DFASerializer struct {
+ dfa *DFA
+ literalNames []string
+ symbolicNames []string
+}
+
+func NewDFASerializer(dfa *DFA, literalNames, symbolicNames []string) *DFASerializer {
+ if literalNames == nil {
+ literalNames = make([]string, 0)
+ }
+
+ if symbolicNames == nil {
+ symbolicNames = make([]string, 0)
+ }
+
+ return &DFASerializer{
+ dfa: dfa,
+ literalNames: literalNames,
+ symbolicNames: symbolicNames,
+ }
+}
+
+func (d *DFASerializer) String() string {
+ if d.dfa.getS0() == nil {
+ return ""
+ }
+
+ buf := ""
+ states := d.dfa.sortedStates()
+
+ for _, s := range states {
+ if s.edges != nil {
+ n := len(s.edges)
+
+ for j := 0; j < n; j++ {
+ t := s.edges[j]
+
+ if t != nil && t.stateNumber != 0x7FFFFFFF {
+ buf += d.GetStateString(s)
+ buf += "-"
+ buf += d.getEdgeLabel(j)
+ buf += "->"
+ buf += d.GetStateString(t)
+ buf += "\n"
+ }
+ }
+ }
+ }
+
+ if len(buf) == 0 {
+ return ""
+ }
+
+ return buf
+}
+
+func (d *DFASerializer) getEdgeLabel(i int) string {
+ if i == 0 {
+ return "EOF"
+ } else if d.literalNames != nil && i-1 < len(d.literalNames) {
+ return d.literalNames[i-1]
+ } else if d.symbolicNames != nil && i-1 < len(d.symbolicNames) {
+ return d.symbolicNames[i-1]
+ }
+
+ return strconv.Itoa(i - 1)
+}
+
+func (d *DFASerializer) GetStateString(s *DFAState) string {
+ var a, b string
+
+ if s.isAcceptState {
+ a = ":"
+ }
+
+ if s.requiresFullContext {
+ b = "^"
+ }
+
+ baseStateStr := a + "s" + strconv.Itoa(s.stateNumber) + b
+
+ if s.isAcceptState {
+ if s.predicates != nil {
+ return baseStateStr + "=>" + fmt.Sprint(s.predicates)
+ }
+
+ return baseStateStr + "=>" + fmt.Sprint(s.prediction)
+ }
+
+ return baseStateStr
+}
+
+type LexerDFASerializer struct {
+ *DFASerializer
+}
+
+func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
+ return &LexerDFASerializer{DFASerializer: NewDFASerializer(dfa, nil, nil)}
+}
+
+func (l *LexerDFASerializer) getEdgeLabel(i int) string {
+ var sb strings.Builder
+ sb.Grow(6)
+ sb.WriteByte('\'')
+ sb.WriteRune(rune(i))
+ sb.WriteByte('\'')
+ return sb.String()
+}
+
+func (l *LexerDFASerializer) String() string {
+ if l.dfa.getS0() == nil {
+ return ""
+ }
+
+ buf := ""
+ states := l.dfa.sortedStates()
+
+ for i := 0; i < len(states); i++ {
+ s := states[i]
+
+ if s.edges != nil {
+ n := len(s.edges)
+
+ for j := 0; j < n; j++ {
+ t := s.edges[j]
+
+ if t != nil && t.stateNumber != 0x7FFFFFFF {
+ buf += l.GetStateString(s)
+ buf += "-"
+ buf += l.getEdgeLabel(j)
+ buf += "->"
+ buf += l.GetStateString(t)
+ buf += "\n"
+ }
+ }
+ }
+ }
+
+ if len(buf) == 0 {
+ return ""
+ }
+
+ return buf
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/dfa_state.go b/vendor/github.com/antlr4-go/antlr/v4/dfa_state.go
new file mode 100644
index 0000000..6541430
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/dfa_state.go
@@ -0,0 +1,170 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+)
+
+// PredPrediction maps a predicate to a predicted alternative.
+type PredPrediction struct {
+ alt int
+ pred SemanticContext
+}
+
+func NewPredPrediction(pred SemanticContext, alt int) *PredPrediction {
+ return &PredPrediction{alt: alt, pred: pred}
+}
+
+func (p *PredPrediction) String() string {
+ return "(" + fmt.Sprint(p.pred) + ", " + fmt.Sprint(p.alt) + ")"
+}
+
+// DFAState represents a set of possible [ATN] configurations. As Aho, Sethi,
+// Ullman p. 117 says: "The DFA uses its state to keep track of all possible
+// states the ATN can be in after reading each input symbol. That is to say,
+// after reading input a1, a2,..an, the DFA is in a state that represents the
+// subset T of the states of the ATN that are reachable from the ATN's start
+// state along some path labeled a1a2..an."
+//
+// In conventional NFA-to-DFA conversion, therefore, the subset T would be a bitset representing the set of
+// states the [ATN] could be in. We need to track the alt predicted by each state
+// as well, however. More importantly, we need to maintain a stack of states,
+// tracking the closure operations as they jump from rule to rule, emulating
+// rule invocations (method calls). I have to add a stack to simulate the proper
+// lookahead sequences for the underlying LL grammar from which the ATN was
+// derived.
+//
+// I use a set of [ATNConfig] objects, not simple states. An [ATNConfig] is both a
+// state (ala normal conversion) and a [RuleContext] describing the chain of rules
+// (if any) followed to arrive at that state.
+//
+// A [DFAState] may have multiple references to a particular state, but with
+// different [ATN] contexts (with same or different alts) meaning that state was
+// reached via a different set of rule invocations.
+type DFAState struct {
+ stateNumber int
+ configs *ATNConfigSet
+
+ // edges elements point to the target of the symbol. Shift up by 1 so (-1)
+ // Token.EOF maps to the first element.
+ edges []*DFAState
+
+ isAcceptState bool
+
+ // prediction is the 'ttype' we match or alt we predict if the state is 'accept'.
+ // Set to ATN.INVALID_ALT_NUMBER when predicates != nil or
+ // requiresFullContext.
+ prediction int
+
+ lexerActionExecutor *LexerActionExecutor
+
+ // requiresFullContext indicates it was created during an SLL prediction that
+ // discovered a conflict between the configurations in the state. Future
+ // ParserATNSimulator.execATN invocations immediately jump doing
+ // full context prediction if true.
+ requiresFullContext bool
+
+ // predicates is the predicates associated with the ATN configurations of the
+ // DFA state during SLL parsing. When we have predicates, requiresFullContext
+ // is false, since full context prediction evaluates predicates on-the-fly. If
+ // d is
+ // not nil, then prediction is ATN.INVALID_ALT_NUMBER.
+ //
+ // We only use these for non-requiresFullContext but conflicting states. That
+ // means we know from the context (it's $ or we don't dip into outer context)
+ // that it's an ambiguity not a conflict.
+ //
+ // This list is computed by
+ // ParserATNSimulator.predicateDFAState.
+ predicates []*PredPrediction
+}
+
+func NewDFAState(stateNumber int, configs *ATNConfigSet) *DFAState {
+ if configs == nil {
+ configs = NewATNConfigSet(false)
+ }
+
+ return &DFAState{configs: configs, stateNumber: stateNumber}
+}
+
+// GetAltSet gets the set of all alts mentioned by all ATN configurations in d.
+func (d *DFAState) GetAltSet() []int {
+ var alts []int
+
+ if d.configs != nil {
+ for _, c := range d.configs.configs {
+ alts = append(alts, c.GetAlt())
+ }
+ }
+
+ if len(alts) == 0 {
+ return nil
+ }
+
+ return alts
+}
+
+func (d *DFAState) getEdges() []*DFAState {
+ return d.edges
+}
+
+func (d *DFAState) numEdges() int {
+ return len(d.edges)
+}
+
+func (d *DFAState) getIthEdge(i int) *DFAState {
+ return d.edges[i]
+}
+
+func (d *DFAState) setEdges(newEdges []*DFAState) {
+ d.edges = newEdges
+}
+
+func (d *DFAState) setIthEdge(i int, edge *DFAState) {
+ d.edges[i] = edge
+}
+
+func (d *DFAState) setPrediction(v int) {
+ d.prediction = v
+}
+
+func (d *DFAState) String() string {
+ var s string
+ if d.isAcceptState {
+ if d.predicates != nil {
+ s = "=>" + fmt.Sprint(d.predicates)
+ } else {
+ s = "=>" + fmt.Sprint(d.prediction)
+ }
+ }
+
+ return fmt.Sprintf("%d:%s%s", d.stateNumber, fmt.Sprint(d.configs), s)
+}
+
+func (d *DFAState) Hash() int {
+ h := murmurInit(7)
+ h = murmurUpdate(h, d.configs.Hash())
+ return murmurFinish(h, 1)
+}
+
+// Equals returns whether d equals other. Two DFAStates are equal if their ATN
+// configuration sets are the same. This method is used to see if a state
+// already exists.
+//
+// Because the number of alternatives and number of ATN configurations are
+// finite, there is a finite number of DFA states that can be processed. This is
+// necessary to show that the algorithm terminates.
+//
+// Cannot test the DFA state numbers here because in
+// ParserATNSimulator.addDFAState we need to know if any other state exists that
+// has d exact set of ATN configurations. The stateNumber is irrelevant.
+func (d *DFAState) Equals(o Collectable[*DFAState]) bool {
+ if d == o {
+ return true
+ }
+
+ return d.configs.Equals(o.(*DFAState).configs)
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go b/vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go
new file mode 100644
index 0000000..bd2cd8b
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go
@@ -0,0 +1,110 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "strconv"
+)
+
+//
+// This implementation of {@link ANTLRErrorListener} can be used to identify
+// certain potential correctness and performance problems in grammars. "reports"
+// are made by calling {@link Parser//NotifyErrorListeners} with the appropriate
+// message.
+//
+//
+// Ambiguities : These are cases where more than one path through the
+// grammar can Match the input.
+// Weak context sensitivity : These are cases where full-context
+// prediction resolved an SLL conflict to a unique alternative which equaled the
+// minimum alternative of the SLL conflict.
+// Strong (forced) context sensitivity : These are cases where the
+// full-context prediction resolved an SLL conflict to a unique alternative,
+// and the minimum alternative of the SLL conflict was found to not be
+// a truly viable alternative. Two-stage parsing cannot be used for inputs where
+// d situation occurs.
+//
+
+type DiagnosticErrorListener struct {
+ *DefaultErrorListener
+
+ exactOnly bool
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewDiagnosticErrorListener(exactOnly bool) *DiagnosticErrorListener {
+
+ n := new(DiagnosticErrorListener)
+
+ // whether all ambiguities or only exact ambiguities are Reported.
+ n.exactOnly = exactOnly
+ return n
+}
+
+func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
+ if d.exactOnly && !exact {
+ return
+ }
+ msg := "reportAmbiguity d=" +
+ d.getDecisionDescription(recognizer, dfa) +
+ ": ambigAlts=" +
+ d.getConflictingAlts(ambigAlts, configs).String() +
+ ", input='" +
+ recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
+ recognizer.NotifyErrorListeners(msg, nil, nil)
+}
+
+func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, _ *BitSet, _ *ATNConfigSet) {
+
+ msg := "reportAttemptingFullContext d=" +
+ d.getDecisionDescription(recognizer, dfa) +
+ ", input='" +
+ recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
+ recognizer.NotifyErrorListeners(msg, nil, nil)
+}
+
+func (d *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, _ int, _ *ATNConfigSet) {
+ msg := "reportContextSensitivity d=" +
+ d.getDecisionDescription(recognizer, dfa) +
+ ", input='" +
+ recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
+ recognizer.NotifyErrorListeners(msg, nil, nil)
+}
+
+func (d *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, dfa *DFA) string {
+ decision := dfa.decision
+ ruleIndex := dfa.atnStartState.GetRuleIndex()
+
+ ruleNames := recognizer.GetRuleNames()
+ if ruleIndex < 0 || ruleIndex >= len(ruleNames) {
+ return strconv.Itoa(decision)
+ }
+ ruleName := ruleNames[ruleIndex]
+ if ruleName == "" {
+ return strconv.Itoa(decision)
+ }
+ return strconv.Itoa(decision) + " (" + ruleName + ")"
+}
+
+// Computes the set of conflicting or ambiguous alternatives from a
+// configuration set, if that information was not already provided by the
+// parser.
+//
+// @param ReportedAlts The set of conflicting or ambiguous alternatives, as
+// Reported by the parser.
+// @param configs The conflicting or ambiguous configuration set.
+// @return Returns {@code ReportedAlts} if it is not {@code nil}, otherwise
+// returns the set of alternatives represented in {@code configs}.
+func (d *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set *ATNConfigSet) *BitSet {
+ if ReportedAlts != nil {
+ return ReportedAlts
+ }
+ result := NewBitSet()
+ for _, c := range set.configs {
+ result.add(c.GetAlt())
+ }
+
+ return result
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/error_listener.go b/vendor/github.com/antlr4-go/antlr/v4/error_listener.go
new file mode 100644
index 0000000..21a0216
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/error_listener.go
@@ -0,0 +1,100 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "os"
+ "strconv"
+)
+
+// Provides an empty default implementation of {@link ANTLRErrorListener}. The
+// default implementation of each method does nothing, but can be overridden as
+// necessary.
+
+type ErrorListener interface {
+ SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException)
+ ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet)
+ ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet)
+ ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet)
+}
+
+type DefaultErrorListener struct {
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewDefaultErrorListener() *DefaultErrorListener {
+ return new(DefaultErrorListener)
+}
+
+func (d *DefaultErrorListener) SyntaxError(_ Recognizer, _ interface{}, _, _ int, _ string, _ RecognitionException) {
+}
+
+func (d *DefaultErrorListener) ReportAmbiguity(_ Parser, _ *DFA, _, _ int, _ bool, _ *BitSet, _ *ATNConfigSet) {
+}
+
+func (d *DefaultErrorListener) ReportAttemptingFullContext(_ Parser, _ *DFA, _, _ int, _ *BitSet, _ *ATNConfigSet) {
+}
+
+func (d *DefaultErrorListener) ReportContextSensitivity(_ Parser, _ *DFA, _, _, _ int, _ *ATNConfigSet) {
+}
+
+type ConsoleErrorListener struct {
+ *DefaultErrorListener
+}
+
+func NewConsoleErrorListener() *ConsoleErrorListener {
+ return new(ConsoleErrorListener)
+}
+
+// ConsoleErrorListenerINSTANCE provides a default instance of {@link ConsoleErrorListener}.
+var ConsoleErrorListenerINSTANCE = NewConsoleErrorListener()
+
+// SyntaxError prints messages to System.err containing the
+// values of line, charPositionInLine, and msg using
+// the following format:
+//
+// line :
+func (c *ConsoleErrorListener) SyntaxError(_ Recognizer, _ interface{}, line, column int, msg string, _ RecognitionException) {
+ _, _ = fmt.Fprintln(os.Stderr, "line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg)
+}
+
+type ProxyErrorListener struct {
+ *DefaultErrorListener
+ delegates []ErrorListener
+}
+
+func NewProxyErrorListener(delegates []ErrorListener) *ProxyErrorListener {
+ if delegates == nil {
+ panic("delegates is not provided")
+ }
+ l := new(ProxyErrorListener)
+ l.delegates = delegates
+ return l
+}
+
+func (p *ProxyErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
+ for _, d := range p.delegates {
+ d.SyntaxError(recognizer, offendingSymbol, line, column, msg, e)
+ }
+}
+
+func (p *ProxyErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
+ for _, d := range p.delegates {
+ d.ReportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
+ }
+}
+
+func (p *ProxyErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
+ for _, d := range p.delegates {
+ d.ReportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
+ }
+}
+
+func (p *ProxyErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
+ for _, d := range p.delegates {
+ d.ReportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/error_strategy.go b/vendor/github.com/antlr4-go/antlr/v4/error_strategy.go
new file mode 100644
index 0000000..9db2be1
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/error_strategy.go
@@ -0,0 +1,702 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+)
+
+type ErrorStrategy interface {
+ reset(Parser)
+ RecoverInline(Parser) Token
+ Recover(Parser, RecognitionException)
+ Sync(Parser)
+ InErrorRecoveryMode(Parser) bool
+ ReportError(Parser, RecognitionException)
+ ReportMatch(Parser)
+}
+
+// DefaultErrorStrategy is the default implementation of ANTLRErrorStrategy used for
+// error reporting and recovery in ANTLR parsers.
+type DefaultErrorStrategy struct {
+ errorRecoveryMode bool
+ lastErrorIndex int
+ lastErrorStates *IntervalSet
+}
+
+var _ ErrorStrategy = &DefaultErrorStrategy{}
+
+func NewDefaultErrorStrategy() *DefaultErrorStrategy {
+
+ d := new(DefaultErrorStrategy)
+
+ // Indicates whether the error strategy is currently "recovering from an
+ // error". This is used to suppress Reporting multiple error messages while
+ // attempting to recover from a detected syntax error.
+ //
+ // @see //InErrorRecoveryMode
+ //
+ d.errorRecoveryMode = false
+
+ // The index into the input stream where the last error occurred.
+ // This is used to prevent infinite loops where an error is found
+ // but no token is consumed during recovery...another error is found,
+ // ad nauseam. This is a failsafe mechanism to guarantee that at least
+ // one token/tree node is consumed for two errors.
+ //
+ d.lastErrorIndex = -1
+ d.lastErrorStates = nil
+ return d
+}
+
+// The default implementation simply calls {@link //endErrorCondition} to
+// ensure that the handler is not in error recovery mode.
+func (d *DefaultErrorStrategy) reset(recognizer Parser) {
+ d.endErrorCondition(recognizer)
+}
+
+// This method is called to enter error recovery mode when a recognition
+// exception is Reported.
+func (d *DefaultErrorStrategy) beginErrorCondition(_ Parser) {
+ d.errorRecoveryMode = true
+}
+
+func (d *DefaultErrorStrategy) InErrorRecoveryMode(_ Parser) bool {
+ return d.errorRecoveryMode
+}
+
+// This method is called to leave error recovery mode after recovering from
+// a recognition exception.
+func (d *DefaultErrorStrategy) endErrorCondition(_ Parser) {
+ d.errorRecoveryMode = false
+ d.lastErrorStates = nil
+ d.lastErrorIndex = -1
+}
+
+// ReportMatch is the default implementation of error matching and simply calls endErrorCondition.
+func (d *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
+ d.endErrorCondition(recognizer)
+}
+
+// ReportError is the default implementation of error reporting.
+// It returns immediately if the handler is already
+// in error recovery mode. Otherwise, it calls [beginErrorCondition]
+// and dispatches the Reporting task based on the runtime type of e
+// according to the following table.
+//
+// [NoViableAltException] : Dispatches the call to [ReportNoViableAlternative]
+// [InputMisMatchException] : Dispatches the call to [ReportInputMisMatch]
+// [FailedPredicateException] : Dispatches the call to [ReportFailedPredicate]
+// All other types : Calls [NotifyErrorListeners] to Report the exception
+func (d *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) {
+ // if we've already Reported an error and have not Matched a token
+ // yet successfully, don't Report any errors.
+ if d.InErrorRecoveryMode(recognizer) {
+ return // don't Report spurious errors
+ }
+ d.beginErrorCondition(recognizer)
+
+ switch t := e.(type) {
+ default:
+ fmt.Println("unknown recognition error type: " + reflect.TypeOf(e).Name())
+ // fmt.Println(e.stack)
+ recognizer.NotifyErrorListeners(e.GetMessage(), e.GetOffendingToken(), e)
+ case *NoViableAltException:
+ d.ReportNoViableAlternative(recognizer, t)
+ case *InputMisMatchException:
+ d.ReportInputMisMatch(recognizer, t)
+ case *FailedPredicateException:
+ d.ReportFailedPredicate(recognizer, t)
+ }
+}
+
+// Recover is the default recovery implementation.
+// It reSynchronizes the parser by consuming tokens until we find one in the reSynchronization set -
+// loosely the set of tokens that can follow the current rule.
+func (d *DefaultErrorStrategy) Recover(recognizer Parser, _ RecognitionException) {
+
+ if d.lastErrorIndex == recognizer.GetInputStream().Index() &&
+ d.lastErrorStates != nil && d.lastErrorStates.contains(recognizer.GetState()) {
+ // uh oh, another error at same token index and previously-Visited
+ // state in ATN must be a case where LT(1) is in the recovery
+ // token set so nothing got consumed. Consume a single token
+ // at least to prevent an infinite loop d is a failsafe.
+ recognizer.Consume()
+ }
+ d.lastErrorIndex = recognizer.GetInputStream().Index()
+ if d.lastErrorStates == nil {
+ d.lastErrorStates = NewIntervalSet()
+ }
+ d.lastErrorStates.addOne(recognizer.GetState())
+ followSet := d.GetErrorRecoverySet(recognizer)
+ d.consumeUntil(recognizer, followSet)
+}
+
+// Sync is the default implementation of error strategy synchronization.
+//
+// This Sync makes sure that the current lookahead symbol is consistent with what were expecting
+// at this point in the [ATN]. You can call this anytime but ANTLR only
+// generates code to check before sub-rules/loops and each iteration.
+//
+// Implements [Jim Idle]'s magic Sync mechanism in closures and optional
+// sub-rules. E.g.:
+//
+// a : Sync ( stuff Sync )*
+// Sync : {consume to what can follow Sync}
+//
+// At the start of a sub-rule upon error, Sync performs single
+// token deletion, if possible. If it can't do that, it bails on the current
+// rule and uses the default error recovery, which consumes until the
+// reSynchronization set of the current rule.
+//
+// If the sub-rule is optional
+//
+// ({@code (...)?}, {@code (...)*},
+//
+// or a block with an empty alternative), then the expected set includes what follows
+// the sub-rule.
+//
+// During loop iteration, it consumes until it sees a token that can start a
+// sub-rule or what follows loop. Yes, that is pretty aggressive. We opt to
+// stay in the loop as long as possible.
+//
+// # Origins
+//
+// Previous versions of ANTLR did a poor job of their recovery within loops.
+// A single mismatch token or missing token would force the parser to bail
+// out of the entire rules surrounding the loop. So, for rule:
+//
+// classfunc : 'class' ID '{' member* '}'
+//
+// input with an extra token between members would force the parser to
+// consume until it found the next class definition rather than the next
+// member definition of the current class.
+//
+// This functionality cost a bit of effort because the parser has to
+// compare the token set at the start of the loop and at each iteration. If for
+// some reason speed is suffering for you, you can turn off this
+// functionality by simply overriding this method as empty:
+//
+// { }
+//
+// [Jim Idle]: https://github.com/jimidle
+func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
+ // If already recovering, don't try to Sync
+ if d.InErrorRecoveryMode(recognizer) {
+ return
+ }
+
+ s := recognizer.GetInterpreter().atn.states[recognizer.GetState()]
+ la := recognizer.GetTokenStream().LA(1)
+
+ // try cheaper subset first might get lucky. seems to shave a wee bit off
+ nextTokens := recognizer.GetATN().NextTokens(s, nil)
+ if nextTokens.contains(TokenEpsilon) || nextTokens.contains(la) {
+ return
+ }
+
+ switch s.GetStateType() {
+ case ATNStateBlockStart, ATNStateStarBlockStart, ATNStatePlusBlockStart, ATNStateStarLoopEntry:
+ // Report error and recover if possible
+ if d.SingleTokenDeletion(recognizer) != nil {
+ return
+ }
+ recognizer.SetError(NewInputMisMatchException(recognizer))
+ case ATNStatePlusLoopBack, ATNStateStarLoopBack:
+ d.ReportUnwantedToken(recognizer)
+ expecting := NewIntervalSet()
+ expecting.addSet(recognizer.GetExpectedTokens())
+ whatFollowsLoopIterationOrRule := expecting.addSet(d.GetErrorRecoverySet(recognizer))
+ d.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
+ default:
+ // do nothing if we can't identify the exact kind of ATN state
+ }
+}
+
+// ReportNoViableAlternative is called by [ReportError] when the exception is a [NoViableAltException].
+//
+// See also [ReportError]
+func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *NoViableAltException) {
+ tokens := recognizer.GetTokenStream()
+ var input string
+ if tokens != nil {
+ if e.startToken.GetTokenType() == TokenEOF {
+ input = ""
+ } else {
+ input = tokens.GetTextFromTokens(e.startToken, e.offendingToken)
+ }
+ } else {
+ input = ""
+ }
+ msg := "no viable alternative at input " + d.escapeWSAndQuote(input)
+ recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
+}
+
+// ReportInputMisMatch is called by [ReportError] when the exception is an [InputMisMatchException]
+//
+// See also: [ReportError]
+func (d *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *InputMisMatchException) {
+ msg := "mismatched input " + d.GetTokenErrorDisplay(e.offendingToken) +
+ " expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
+ recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
+}
+
+// ReportFailedPredicate is called by [ReportError] when the exception is a [FailedPredicateException].
+//
+// See also: [ReportError]
+func (d *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *FailedPredicateException) {
+ ruleName := recognizer.GetRuleNames()[recognizer.GetParserRuleContext().GetRuleIndex()]
+ msg := "rule " + ruleName + " " + e.message
+ recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
+}
+
+// ReportUnwantedToken is called to report a syntax error that requires the removal
+// of a token from the input stream. At the time d method is called, the
+// erroneous symbol is the current LT(1) symbol and has not yet been
+// removed from the input stream. When this method returns,
+// recognizer is in error recovery mode.
+//
+// This method is called when singleTokenDeletion identifies
+// single-token deletion as a viable recovery strategy for a mismatched
+// input error.
+//
+// The default implementation simply returns if the handler is already in
+// error recovery mode. Otherwise, it calls beginErrorCondition to
+// enter error recovery mode, followed by calling
+// [NotifyErrorListeners]
+func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
+ if d.InErrorRecoveryMode(recognizer) {
+ return
+ }
+ d.beginErrorCondition(recognizer)
+ t := recognizer.GetCurrentToken()
+ tokenName := d.GetTokenErrorDisplay(t)
+ expecting := d.GetExpectedTokens(recognizer)
+ msg := "extraneous input " + tokenName + " expecting " +
+ expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
+ recognizer.NotifyErrorListeners(msg, t, nil)
+}
+
+// ReportMissingToken is called to report a syntax error which requires the
+// insertion of a missing token into the input stream. At the time this
+// method is called, the missing token has not yet been inserted. When this
+// method returns, recognizer is in error recovery mode.
+//
+// This method is called when singleTokenInsertion identifies
+// single-token insertion as a viable recovery strategy for a mismatched
+// input error.
+//
+// The default implementation simply returns if the handler is already in
+// error recovery mode. Otherwise, it calls beginErrorCondition to
+// enter error recovery mode, followed by calling [NotifyErrorListeners]
+func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
+ if d.InErrorRecoveryMode(recognizer) {
+ return
+ }
+ d.beginErrorCondition(recognizer)
+ t := recognizer.GetCurrentToken()
+ expecting := d.GetExpectedTokens(recognizer)
+ msg := "missing " + expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false) +
+ " at " + d.GetTokenErrorDisplay(t)
+ recognizer.NotifyErrorListeners(msg, t, nil)
+}
+
+// The RecoverInline default implementation attempts to recover from the mismatched input
+// by using single token insertion and deletion as described below. If the
+// recovery attempt fails, this method panics with [InputMisMatchException}.
+// TODO: Not sure that panic() is the right thing to do here - JI
+//
+// # EXTRA TOKEN (single token deletion)
+//
+// LA(1) is not what we are looking for. If LA(2) has the
+// right token, however, then assume LA(1) is some extra spurious
+// token and delete it. Then consume and return the next token (which was
+// the LA(2) token) as the successful result of the Match operation.
+//
+// # This recovery strategy is implemented by singleTokenDeletion
+//
+// # MISSING TOKEN (single token insertion)
+//
+// If current token -at LA(1) - is consistent with what could come
+// after the expected LA(1) token, then assume the token is missing
+// and use the parser's [TokenFactory] to create it on the fly. The
+// “insertion” is performed by returning the created token as the successful
+// result of the Match operation.
+//
+// This recovery strategy is implemented by [SingleTokenInsertion].
+//
+// # Example
+//
+// For example, Input i=(3 is clearly missing the ')'. When
+// the parser returns from the nested call to expr, it will have
+// call the chain:
+//
+// stat → expr → atom
+//
+// and it will be trying to Match the ')' at this point in the
+// derivation:
+//
+// : ID '=' '(' INT ')' ('+' atom)* ';'
+// ^
+//
+// The attempt to [Match] ')' will fail when it sees ';' and
+// call [RecoverInline]. To recover, it sees that LA(1)==';'
+// is in the set of tokens that can follow the ')' token reference
+// in rule atom. It can assume that you forgot the ')'.
+func (d *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
+ // SINGLE TOKEN DELETION
+ MatchedSymbol := d.SingleTokenDeletion(recognizer)
+ if MatchedSymbol != nil {
+ // we have deleted the extra token.
+ // now, move past ttype token as if all were ok
+ recognizer.Consume()
+ return MatchedSymbol
+ }
+ // SINGLE TOKEN INSERTION
+ if d.SingleTokenInsertion(recognizer) {
+ return d.GetMissingSymbol(recognizer)
+ }
+ // even that didn't work must panic the exception
+ recognizer.SetError(NewInputMisMatchException(recognizer))
+ return nil
+}
+
+// SingleTokenInsertion implements the single-token insertion inline error recovery
+// strategy. It is called by [RecoverInline] if the single-token
+// deletion strategy fails to recover from the mismatched input. If this
+// method returns {@code true}, {@code recognizer} will be in error recovery
+// mode.
+//
+// This method determines whether single-token insertion is viable by
+// checking if the LA(1) input symbol could be successfully Matched
+// if it were instead the LA(2) symbol. If this method returns
+// {@code true}, the caller is responsible for creating and inserting a
+// token with the correct type to produce this behavior.
+//
+// This func returns true if single-token insertion is a viable recovery
+// strategy for the current mismatched input.
+func (d *DefaultErrorStrategy) SingleTokenInsertion(recognizer Parser) bool {
+ currentSymbolType := recognizer.GetTokenStream().LA(1)
+ // if current token is consistent with what could come after current
+ // ATN state, then we know we're missing a token error recovery
+ // is free to conjure up and insert the missing token
+ atn := recognizer.GetInterpreter().atn
+ currentState := atn.states[recognizer.GetState()]
+ next := currentState.GetTransitions()[0].getTarget()
+ expectingAtLL2 := atn.NextTokens(next, recognizer.GetParserRuleContext())
+ if expectingAtLL2.contains(currentSymbolType) {
+ d.ReportMissingToken(recognizer)
+ return true
+ }
+
+ return false
+}
+
+// SingleTokenDeletion implements the single-token deletion inline error recovery
+// strategy. It is called by [RecoverInline] to attempt to recover
+// from mismatched input. If this method returns nil, the parser and error
+// handler state will not have changed. If this method returns non-nil,
+// recognizer will not be in error recovery mode since the
+// returned token was a successful Match.
+//
+// If the single-token deletion is successful, this method calls
+// [ReportUnwantedToken] to Report the error, followed by
+// [Consume] to actually “delete” the extraneous token. Then,
+// before returning, [ReportMatch] is called to signal a successful
+// Match.
+//
+// The func returns the successfully Matched [Token] instance if single-token
+// deletion successfully recovers from the mismatched input, otherwise nil.
+func (d *DefaultErrorStrategy) SingleTokenDeletion(recognizer Parser) Token {
+ NextTokenType := recognizer.GetTokenStream().LA(2)
+ expecting := d.GetExpectedTokens(recognizer)
+ if expecting.contains(NextTokenType) {
+ d.ReportUnwantedToken(recognizer)
+ // print("recoverFromMisMatchedToken deleting " \
+ // + str(recognizer.GetTokenStream().LT(1)) \
+ // + " since " + str(recognizer.GetTokenStream().LT(2)) \
+ // + " is what we want", file=sys.stderr)
+ recognizer.Consume() // simply delete extra token
+ // we want to return the token we're actually Matching
+ MatchedSymbol := recognizer.GetCurrentToken()
+ d.ReportMatch(recognizer) // we know current token is correct
+ return MatchedSymbol
+ }
+
+ return nil
+}
+
+// GetMissingSymbol conjures up a missing token during error recovery.
+//
+// The recognizer attempts to recover from single missing
+// symbols. But, actions might refer to that missing symbol.
+// For example:
+//
+// x=ID {f($x)}.
+//
+// The action clearly assumes
+// that there has been an identifier Matched previously and that
+// $x points at that token. If that token is missing, but
+// the next token in the stream is what we want we assume that
+// this token is missing, and we keep going. Because we
+// have to return some token to replace the missing token,
+// we have to conjure one up. This method gives the user control
+// over the tokens returned for missing tokens. Mostly,
+// you will want to create something special for identifier
+// tokens. For literals such as '{' and ',', the default
+// action in the parser or tree parser works. It simply creates
+// a [CommonToken] of the appropriate type. The text will be the token name.
+// If you need to change which tokens must be created by the lexer,
+// override this method to create the appropriate tokens.
+func (d *DefaultErrorStrategy) GetMissingSymbol(recognizer Parser) Token {
+ currentSymbol := recognizer.GetCurrentToken()
+ expecting := d.GetExpectedTokens(recognizer)
+ expectedTokenType := expecting.first()
+ var tokenText string
+
+ if expectedTokenType == TokenEOF {
+ tokenText = ""
+ } else {
+ ln := recognizer.GetLiteralNames()
+ if expectedTokenType > 0 && expectedTokenType < len(ln) {
+ tokenText = ""
+ } else {
+ tokenText = "" // TODO: matches the JS impl
+ }
+ }
+ current := currentSymbol
+ lookback := recognizer.GetTokenStream().LT(-1)
+ if current.GetTokenType() == TokenEOF && lookback != nil {
+ current = lookback
+ }
+
+ tf := recognizer.GetTokenFactory()
+
+ return tf.Create(current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
+}
+
+func (d *DefaultErrorStrategy) GetExpectedTokens(recognizer Parser) *IntervalSet {
+ return recognizer.GetExpectedTokens()
+}
+
+// GetTokenErrorDisplay determines how a token should be displayed in an error message.
+// The default is to display just the text, but during development you might
+// want to have a lot of information spit out. Override this func in that case
+// to use t.String() (which, for [CommonToken], dumps everything about
+// the token). This is better than forcing you to override a method in
+// your token objects because you don't have to go modify your lexer
+// so that it creates a new type.
+func (d *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
+ if t == nil {
+ return ""
+ }
+ s := t.GetText()
+ if s == "" {
+ if t.GetTokenType() == TokenEOF {
+ s = ""
+ } else {
+ s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
+ }
+ }
+ return d.escapeWSAndQuote(s)
+}
+
+func (d *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
+ s = strings.Replace(s, "\t", "\\t", -1)
+ s = strings.Replace(s, "\n", "\\n", -1)
+ s = strings.Replace(s, "\r", "\\r", -1)
+ return "'" + s + "'"
+}
+
+// GetErrorRecoverySet computes the error recovery set for the current rule. During
+// rule invocation, the parser pushes the set of tokens that can
+// follow that rule reference on the stack. This amounts to
+// computing FIRST of what follows the rule reference in the
+// enclosing rule. See LinearApproximator.FIRST().
+//
+// This local follow set only includes tokens
+// from within the rule i.e., the FIRST computation done by
+// ANTLR stops at the end of a rule.
+//
+// # Example
+//
+// When you find a "no viable alt exception", the input is not
+// consistent with any of the alternatives for rule r. The best
+// thing to do is to consume tokens until you see something that
+// can legally follow a call to r or any rule that called r.
+// You don't want the exact set of viable next tokens because the
+// input might just be missing a token--you might consume the
+// rest of the input looking for one of the missing tokens.
+//
+// Consider the grammar:
+//
+// a : '[' b ']'
+// | '(' b ')'
+// ;
+//
+// b : c '^' INT
+// ;
+//
+// c : ID
+// | INT
+// ;
+//
+// At each rule invocation, the set of tokens that could follow
+// that rule is pushed on a stack. Here are the various
+// context-sensitive follow sets:
+//
+// FOLLOW(b1_in_a) = FIRST(']') = ']'
+// FOLLOW(b2_in_a) = FIRST(')') = ')'
+// FOLLOW(c_in_b) = FIRST('^') = '^'
+//
+// Upon erroneous input “[]”, the call chain is
+//
+// a → b → c
+//
+// and, hence, the follow context stack is:
+//
+// Depth Follow set Start of rule execution
+// 0 a (from main())
+// 1 ']' b
+// 2 '^' c
+//
+// Notice that ')' is not included, because b would have to have
+// been called from a different context in rule a for ')' to be
+// included.
+//
+// For error recovery, we cannot consider FOLLOW(c)
+// (context-sensitive or otherwise). We need the combined set of
+// all context-sensitive FOLLOW sets - the set of all tokens that
+// could follow any reference in the call chain. We need to
+// reSync to one of those tokens. Note that FOLLOW(c)='^' and if
+// we reSync'd to that token, we'd consume until EOF. We need to
+// Sync to context-sensitive FOLLOWs for a, b, and c:
+//
+// {']','^'}
+//
+// In this case, for input "[]", LA(1) is ']' and in the set, so we would
+// not consume anything. After printing an error, rule c would
+// return normally. Rule b would not find the required '^' though.
+// At this point, it gets a mismatched token error and panics an
+// exception (since LA(1) is not in the viable following token
+// set). The rule exception handler tries to recover, but finds
+// the same recovery set and doesn't consume anything. Rule b
+// exits normally returning to rule a. Now it finds the ']' (and
+// with the successful Match exits errorRecovery mode).
+//
+// So, you can see that the parser walks up the call chain looking
+// for the token that was a member of the recovery set.
+//
+// Errors are not generated in errorRecovery mode.
+//
+// ANTLR's error recovery mechanism is based upon original ideas:
+//
+// [Algorithms + Data Structures = Programs] by Niklaus Wirth and
+// [A note on error recovery in recursive descent parsers].
+//
+// Later, Josef Grosch had some good ideas in [Efficient and Comfortable Error Recovery in Recursive Descent
+// Parsers]
+//
+// Like Grosch I implement context-sensitive FOLLOW sets that are combined at run-time upon error to avoid overhead
+// during parsing. Later, the runtime Sync was improved for loops/sub-rules see [Sync] docs
+//
+// [A note on error recovery in recursive descent parsers]: http://portal.acm.org/citation.cfm?id=947902.947905
+// [Algorithms + Data Structures = Programs]: https://t.ly/5QzgE
+// [Efficient and Comfortable Error Recovery in Recursive Descent Parsers]: ftp://www.cocolab.com/products/cocktail/doca4.ps/ell.ps.zip
+func (d *DefaultErrorStrategy) GetErrorRecoverySet(recognizer Parser) *IntervalSet {
+ atn := recognizer.GetInterpreter().atn
+ ctx := recognizer.GetParserRuleContext()
+ recoverSet := NewIntervalSet()
+ for ctx != nil && ctx.GetInvokingState() >= 0 {
+ // compute what follows who invoked us
+ invokingState := atn.states[ctx.GetInvokingState()]
+ rt := invokingState.GetTransitions()[0]
+ follow := atn.NextTokens(rt.(*RuleTransition).followState, nil)
+ recoverSet.addSet(follow)
+ ctx = ctx.GetParent().(ParserRuleContext)
+ }
+ recoverSet.removeOne(TokenEpsilon)
+ return recoverSet
+}
+
+// Consume tokens until one Matches the given token set.//
+func (d *DefaultErrorStrategy) consumeUntil(recognizer Parser, set *IntervalSet) {
+ ttype := recognizer.GetTokenStream().LA(1)
+ for ttype != TokenEOF && !set.contains(ttype) {
+ recognizer.Consume()
+ ttype = recognizer.GetTokenStream().LA(1)
+ }
+}
+
+// The BailErrorStrategy implementation of ANTLRErrorStrategy responds to syntax errors
+// by immediately canceling the parse operation with a
+// [ParseCancellationException]. The implementation ensures that the
+// [ParserRuleContext//exception] field is set for all parse tree nodes
+// that were not completed prior to encountering the error.
+//
+// This error strategy is useful in the following scenarios.
+//
+// - Two-stage parsing: This error strategy allows the first
+// stage of two-stage parsing to immediately terminate if an error is
+// encountered, and immediately fall back to the second stage. In addition to
+// avoiding wasted work by attempting to recover from errors here, the empty
+// implementation of [BailErrorStrategy.Sync] improves the performance of
+// the first stage.
+//
+// - Silent validation: When syntax errors are not being
+// Reported or logged, and the parse result is simply ignored if errors occur,
+// the [BailErrorStrategy] avoids wasting work on recovering from errors
+// when the result will be ignored either way.
+//
+// myparser.SetErrorHandler(NewBailErrorStrategy())
+//
+// See also: [Parser.SetErrorHandler(ANTLRErrorStrategy)]
+type BailErrorStrategy struct {
+ *DefaultErrorStrategy
+}
+
+var _ ErrorStrategy = &BailErrorStrategy{}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewBailErrorStrategy() *BailErrorStrategy {
+
+ b := new(BailErrorStrategy)
+
+ b.DefaultErrorStrategy = NewDefaultErrorStrategy()
+
+ return b
+}
+
+// Recover Instead of recovering from exception e, re-panic it wrapped
+// in a [ParseCancellationException] so it is not caught by the
+// rule func catches. Use Exception.GetCause() to get the
+// original [RecognitionException].
+func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
+ context := recognizer.GetParserRuleContext()
+ for context != nil {
+ context.SetException(e)
+ if parent, ok := context.GetParent().(ParserRuleContext); ok {
+ context = parent
+ } else {
+ context = nil
+ }
+ }
+ recognizer.SetError(NewParseCancellationException()) // TODO: we don't emit e properly
+}
+
+// RecoverInline makes sure we don't attempt to recover inline if the parser
+// successfully recovers, it won't panic an exception.
+func (b *BailErrorStrategy) RecoverInline(recognizer Parser) Token {
+ b.Recover(recognizer, NewInputMisMatchException(recognizer))
+
+ return nil
+}
+
+// Sync makes sure we don't attempt to recover from problems in sub-rules.
+func (b *BailErrorStrategy) Sync(_ Parser) {
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/errors.go b/vendor/github.com/antlr4-go/antlr/v4/errors.go
new file mode 100644
index 0000000..8f0f2f6
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/errors.go
@@ -0,0 +1,259 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+// The root of the ANTLR exception hierarchy. In general, ANTLR tracks just
+// 3 kinds of errors: prediction errors, failed predicate errors, and
+// mismatched input errors. In each case, the parser knows where it is
+// in the input, where it is in the ATN, the rule invocation stack,
+// and what kind of problem occurred.
+
+type RecognitionException interface {
+ GetOffendingToken() Token
+ GetMessage() string
+ GetInputStream() IntStream
+}
+
+type BaseRecognitionException struct {
+ message string
+ recognizer Recognizer
+ offendingToken Token
+ offendingState int
+ ctx RuleContext
+ input IntStream
+}
+
+func NewBaseRecognitionException(message string, recognizer Recognizer, input IntStream, ctx RuleContext) *BaseRecognitionException {
+
+ // todo
+ // Error.call(this)
+ //
+ // if (!!Error.captureStackTrace) {
+ // Error.captureStackTrace(this, RecognitionException)
+ // } else {
+ // stack := NewError().stack
+ // }
+ // TODO: may be able to use - "runtime" func Stack(buf []byte, all bool) int
+
+ t := new(BaseRecognitionException)
+
+ t.message = message
+ t.recognizer = recognizer
+ t.input = input
+ t.ctx = ctx
+
+ // The current Token when an error occurred. Since not all streams
+ // support accessing symbols by index, we have to track the {@link Token}
+ // instance itself.
+ //
+ t.offendingToken = nil
+
+ // Get the ATN state number the parser was in at the time the error
+ // occurred. For NoViableAltException and LexerNoViableAltException exceptions, this is the
+ // DecisionState number. For others, it is the state whose outgoing edge we couldn't Match.
+ //
+ t.offendingState = -1
+ if t.recognizer != nil {
+ t.offendingState = t.recognizer.GetState()
+ }
+
+ return t
+}
+
+func (b *BaseRecognitionException) GetMessage() string {
+ return b.message
+}
+
+func (b *BaseRecognitionException) GetOffendingToken() Token {
+ return b.offendingToken
+}
+
+func (b *BaseRecognitionException) GetInputStream() IntStream {
+ return b.input
+}
+
+// If the state number is not known, b method returns -1.
+
+// getExpectedTokens gets the set of input symbols which could potentially follow the
+// previously Matched symbol at the time this exception was raised.
+//
+// If the set of expected tokens is not known and could not be computed,
+// this method returns nil.
+//
+// The func returns the set of token types that could potentially follow the current
+// state in the {ATN}, or nil if the information is not available.
+
+func (b *BaseRecognitionException) getExpectedTokens() *IntervalSet {
+ if b.recognizer != nil {
+ return b.recognizer.GetATN().getExpectedTokens(b.offendingState, b.ctx)
+ }
+
+ return nil
+}
+
+func (b *BaseRecognitionException) String() string {
+ return b.message
+}
+
+type LexerNoViableAltException struct {
+ *BaseRecognitionException
+
+ startIndex int
+ deadEndConfigs *ATNConfigSet
+}
+
+func NewLexerNoViableAltException(lexer Lexer, input CharStream, startIndex int, deadEndConfigs *ATNConfigSet) *LexerNoViableAltException {
+
+ l := new(LexerNoViableAltException)
+
+ l.BaseRecognitionException = NewBaseRecognitionException("", lexer, input, nil)
+
+ l.startIndex = startIndex
+ l.deadEndConfigs = deadEndConfigs
+
+ return l
+}
+
+func (l *LexerNoViableAltException) String() string {
+ symbol := ""
+ if l.startIndex >= 0 && l.startIndex < l.input.Size() {
+ symbol = l.input.(CharStream).GetTextFromInterval(NewInterval(l.startIndex, l.startIndex))
+ }
+ return "LexerNoViableAltException" + symbol
+}
+
+type NoViableAltException struct {
+ *BaseRecognitionException
+
+ startToken Token
+ offendingToken Token
+ ctx ParserRuleContext
+ deadEndConfigs *ATNConfigSet
+}
+
+// NewNoViableAltException creates an exception indicating that the parser could not decide which of two or more paths
+// to take based upon the remaining input. It tracks the starting token
+// of the offending input and also knows where the parser was
+// in the various paths when the error.
+//
+// Reported by [ReportNoViableAlternative]
+func NewNoViableAltException(recognizer Parser, input TokenStream, startToken Token, offendingToken Token, deadEndConfigs *ATNConfigSet, ctx ParserRuleContext) *NoViableAltException {
+
+ if ctx == nil {
+ ctx = recognizer.GetParserRuleContext()
+ }
+
+ if offendingToken == nil {
+ offendingToken = recognizer.GetCurrentToken()
+ }
+
+ if startToken == nil {
+ startToken = recognizer.GetCurrentToken()
+ }
+
+ if input == nil {
+ input = recognizer.GetInputStream().(TokenStream)
+ }
+
+ n := new(NoViableAltException)
+ n.BaseRecognitionException = NewBaseRecognitionException("", recognizer, input, ctx)
+
+ // Which configurations did we try at input.Index() that couldn't Match
+ // input.LT(1)
+ n.deadEndConfigs = deadEndConfigs
+
+ // The token object at the start index the input stream might
+ // not be buffering tokens so get a reference to it.
+ //
+ // At the time the error occurred, of course the stream needs to keep a
+ // buffer of all the tokens, but later we might not have access to those.
+ n.startToken = startToken
+ n.offendingToken = offendingToken
+
+ return n
+}
+
+type InputMisMatchException struct {
+ *BaseRecognitionException
+}
+
+// NewInputMisMatchException creates an exception that signifies any kind of mismatched input exceptions such as
+// when the current input does not Match the expected token.
+func NewInputMisMatchException(recognizer Parser) *InputMisMatchException {
+
+ i := new(InputMisMatchException)
+ i.BaseRecognitionException = NewBaseRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
+
+ i.offendingToken = recognizer.GetCurrentToken()
+
+ return i
+
+}
+
+// FailedPredicateException indicates that a semantic predicate failed during validation. Validation of predicates
+// occurs when normally parsing the alternative just like Matching a token.
+// Disambiguating predicate evaluation occurs when we test a predicate during
+// prediction.
+type FailedPredicateException struct {
+ *BaseRecognitionException
+
+ ruleIndex int
+ predicateIndex int
+ predicate string
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewFailedPredicateException(recognizer Parser, predicate string, message string) *FailedPredicateException {
+
+ f := new(FailedPredicateException)
+
+ f.BaseRecognitionException = NewBaseRecognitionException(f.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
+
+ s := recognizer.GetInterpreter().atn.states[recognizer.GetState()]
+ trans := s.GetTransitions()[0]
+ if trans2, ok := trans.(*PredicateTransition); ok {
+ f.ruleIndex = trans2.ruleIndex
+ f.predicateIndex = trans2.predIndex
+ } else {
+ f.ruleIndex = 0
+ f.predicateIndex = 0
+ }
+ f.predicate = predicate
+ f.offendingToken = recognizer.GetCurrentToken()
+
+ return f
+}
+
+func (f *FailedPredicateException) formatMessage(predicate, message string) string {
+ if message != "" {
+ return message
+ }
+
+ return "failed predicate: {" + predicate + "}?"
+}
+
+type ParseCancellationException struct {
+}
+
+func (p ParseCancellationException) GetOffendingToken() Token {
+ //TODO implement me
+ panic("implement me")
+}
+
+func (p ParseCancellationException) GetMessage() string {
+ //TODO implement me
+ panic("implement me")
+}
+
+func (p ParseCancellationException) GetInputStream() IntStream {
+ //TODO implement me
+ panic("implement me")
+}
+
+func NewParseCancellationException() *ParseCancellationException {
+ // Error.call(this)
+ // Error.captureStackTrace(this, ParseCancellationException)
+ return new(ParseCancellationException)
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/file_stream.go b/vendor/github.com/antlr4-go/antlr/v4/file_stream.go
new file mode 100644
index 0000000..5f65f80
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/file_stream.go
@@ -0,0 +1,67 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "bufio"
+ "os"
+)
+
+// This is an InputStream that is loaded from a file all at once
+// when you construct the object.
+
+type FileStream struct {
+ InputStream
+ filename string
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewFileStream(fileName string) (*FileStream, error) {
+
+ f, err := os.Open(fileName)
+ if err != nil {
+ return nil, err
+ }
+
+ defer func(f *os.File) {
+ errF := f.Close()
+ if errF != nil {
+ }
+ }(f)
+
+ reader := bufio.NewReader(f)
+ fInfo, err := f.Stat()
+ if err != nil {
+ return nil, err
+ }
+
+ fs := &FileStream{
+ InputStream: InputStream{
+ index: 0,
+ name: fileName,
+ },
+ filename: fileName,
+ }
+
+ // Pre-build the buffer and read runes efficiently
+ //
+ fs.data = make([]rune, 0, fInfo.Size())
+ for {
+ r, _, err := reader.ReadRune()
+ if err != nil {
+ break
+ }
+ fs.data = append(fs.data, r)
+ }
+ fs.size = len(fs.data) // Size in runes
+
+ // All done.
+ //
+ return fs, nil
+}
+
+func (f *FileStream) GetSourceName() string {
+ return f.filename
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/input_stream.go b/vendor/github.com/antlr4-go/antlr/v4/input_stream.go
new file mode 100644
index 0000000..b737fe8
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/input_stream.go
@@ -0,0 +1,157 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "bufio"
+ "io"
+)
+
+type InputStream struct {
+ name string
+ index int
+ data []rune
+ size int
+}
+
+// NewIoStream creates a new input stream from the given io.Reader reader.
+// Note that the reader is read completely into memory and so it must actually
+// have a stopping point - you cannot pass in a reader on an open-ended source such
+// as a socket for instance.
+func NewIoStream(reader io.Reader) *InputStream {
+
+ rReader := bufio.NewReader(reader)
+
+ is := &InputStream{
+ name: "",
+ index: 0,
+ }
+
+ // Pre-build the buffer and read runes reasonably efficiently given that
+ // we don't exactly know how big the input is.
+ //
+ is.data = make([]rune, 0, 512)
+ for {
+ r, _, err := rReader.ReadRune()
+ if err != nil {
+ break
+ }
+ is.data = append(is.data, r)
+ }
+ is.size = len(is.data) // number of runes
+ return is
+}
+
+// NewInputStream creates a new input stream from the given string
+func NewInputStream(data string) *InputStream {
+
+ is := &InputStream{
+ name: "",
+ index: 0,
+ data: []rune(data), // This is actually the most efficient way
+ }
+ is.size = len(is.data) // number of runes, but we could also use len(data), which is efficient too
+ return is
+}
+
+func (is *InputStream) reset() {
+ is.index = 0
+}
+
+// Consume moves the input pointer to the next character in the input stream
+func (is *InputStream) Consume() {
+ if is.index >= is.size {
+ // assert is.LA(1) == TokenEOF
+ panic("cannot consume EOF")
+ }
+ is.index++
+}
+
+// LA returns the character at the given offset from the start of the input stream
+func (is *InputStream) LA(offset int) int {
+
+ if offset == 0 {
+ return 0 // nil
+ }
+ if offset < 0 {
+ offset++ // e.g., translate LA(-1) to use offset=0
+ }
+ pos := is.index + offset - 1
+
+ if pos < 0 || pos >= is.size { // invalid
+ return TokenEOF
+ }
+
+ return int(is.data[pos])
+}
+
+// LT returns the character at the given offset from the start of the input stream
+func (is *InputStream) LT(offset int) int {
+ return is.LA(offset)
+}
+
+// Index returns the current offset in to the input stream
+func (is *InputStream) Index() int {
+ return is.index
+}
+
+// Size returns the total number of characters in the input stream
+func (is *InputStream) Size() int {
+ return is.size
+}
+
+// Mark does nothing here as we have entire buffer
+func (is *InputStream) Mark() int {
+ return -1
+}
+
+// Release does nothing here as we have entire buffer
+func (is *InputStream) Release(_ int) {
+}
+
+// Seek the input point to the provided index offset
+func (is *InputStream) Seek(index int) {
+ if index <= is.index {
+ is.index = index // just jump don't update stream state (line,...)
+ return
+ }
+ // seek forward
+ is.index = intMin(index, is.size)
+}
+
+// GetText returns the text from the input stream from the start to the stop index
+func (is *InputStream) GetText(start int, stop int) string {
+ if stop >= is.size {
+ stop = is.size - 1
+ }
+ if start >= is.size {
+ return ""
+ }
+
+ return string(is.data[start : stop+1])
+}
+
+// GetTextFromTokens returns the text from the input stream from the first character of the start token to the last
+// character of the stop token
+func (is *InputStream) GetTextFromTokens(start, stop Token) string {
+ if start != nil && stop != nil {
+ return is.GetTextFromInterval(NewInterval(start.GetTokenIndex(), stop.GetTokenIndex()))
+ }
+
+ return ""
+}
+
+func (is *InputStream) GetTextFromInterval(i Interval) string {
+ return is.GetText(i.Start, i.Stop)
+}
+
+func (*InputStream) GetSourceName() string {
+ return ""
+}
+
+// String returns the entire input stream as a string
+func (is *InputStream) String() string {
+ return string(is.data)
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/int_stream.go b/vendor/github.com/antlr4-go/antlr/v4/int_stream.go
new file mode 100644
index 0000000..4778878
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/int_stream.go
@@ -0,0 +1,16 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+type IntStream interface {
+ Consume()
+ LA(int) int
+ Mark() int
+ Release(marker int)
+ Index() int
+ Seek(index int)
+ Size() int
+ GetSourceName() string
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/interval_set.go b/vendor/github.com/antlr4-go/antlr/v4/interval_set.go
new file mode 100644
index 0000000..cc50660
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/interval_set.go
@@ -0,0 +1,330 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "strconv"
+ "strings"
+)
+
+type Interval struct {
+ Start int
+ Stop int
+}
+
+// NewInterval creates a new interval with the given start and stop values.
+func NewInterval(start, stop int) Interval {
+ return Interval{
+ Start: start,
+ Stop: stop,
+ }
+}
+
+// Contains returns true if the given item is contained within the interval.
+func (i Interval) Contains(item int) bool {
+ return item >= i.Start && item < i.Stop
+}
+
+// String generates a string representation of the interval.
+func (i Interval) String() string {
+ if i.Start == i.Stop-1 {
+ return strconv.Itoa(i.Start)
+ }
+
+ return strconv.Itoa(i.Start) + ".." + strconv.Itoa(i.Stop-1)
+}
+
+// Length returns the length of the interval.
+func (i Interval) Length() int {
+ return i.Stop - i.Start
+}
+
+// IntervalSet represents a collection of [Intervals], which may be read-only.
+type IntervalSet struct {
+ intervals []Interval
+ readOnly bool
+}
+
+// NewIntervalSet creates a new empty, writable, interval set.
+func NewIntervalSet() *IntervalSet {
+
+ i := new(IntervalSet)
+
+ i.intervals = nil
+ i.readOnly = false
+
+ return i
+}
+
+func (i *IntervalSet) Equals(other *IntervalSet) bool {
+ if len(i.intervals) != len(other.intervals) {
+ return false
+ }
+
+ for k, v := range i.intervals {
+ if v.Start != other.intervals[k].Start || v.Stop != other.intervals[k].Stop {
+ return false
+ }
+ }
+
+ return true
+}
+
+func (i *IntervalSet) first() int {
+ if len(i.intervals) == 0 {
+ return TokenInvalidType
+ }
+
+ return i.intervals[0].Start
+}
+
+func (i *IntervalSet) addOne(v int) {
+ i.addInterval(NewInterval(v, v+1))
+}
+
+func (i *IntervalSet) addRange(l, h int) {
+ i.addInterval(NewInterval(l, h+1))
+}
+
+func (i *IntervalSet) addInterval(v Interval) {
+ if i.intervals == nil {
+ i.intervals = make([]Interval, 0)
+ i.intervals = append(i.intervals, v)
+ } else {
+ // find insert pos
+ for k, interval := range i.intervals {
+ // distinct range -> insert
+ if v.Stop < interval.Start {
+ i.intervals = append(i.intervals[0:k], append([]Interval{v}, i.intervals[k:]...)...)
+ return
+ } else if v.Stop == interval.Start {
+ i.intervals[k].Start = v.Start
+ return
+ } else if v.Start <= interval.Stop {
+ i.intervals[k] = NewInterval(intMin(interval.Start, v.Start), intMax(interval.Stop, v.Stop))
+
+ // if not applying to end, merge potential overlaps
+ if k < len(i.intervals)-1 {
+ l := i.intervals[k]
+ r := i.intervals[k+1]
+ // if r contained in l
+ if l.Stop >= r.Stop {
+ i.intervals = append(i.intervals[0:k+1], i.intervals[k+2:]...)
+ } else if l.Stop >= r.Start { // partial overlap
+ i.intervals[k] = NewInterval(l.Start, r.Stop)
+ i.intervals = append(i.intervals[0:k+1], i.intervals[k+2:]...)
+ }
+ }
+ return
+ }
+ }
+ // greater than any exiting
+ i.intervals = append(i.intervals, v)
+ }
+}
+
+func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
+ if other.intervals != nil {
+ for k := 0; k < len(other.intervals); k++ {
+ i2 := other.intervals[k]
+ i.addInterval(NewInterval(i2.Start, i2.Stop))
+ }
+ }
+ return i
+}
+
+func (i *IntervalSet) complement(start int, stop int) *IntervalSet {
+ result := NewIntervalSet()
+ result.addInterval(NewInterval(start, stop+1))
+ for j := 0; j < len(i.intervals); j++ {
+ result.removeRange(i.intervals[j])
+ }
+ return result
+}
+
+func (i *IntervalSet) contains(item int) bool {
+ if i.intervals == nil {
+ return false
+ }
+ for k := 0; k < len(i.intervals); k++ {
+ if i.intervals[k].Contains(item) {
+ return true
+ }
+ }
+ return false
+}
+
+func (i *IntervalSet) length() int {
+ iLen := 0
+
+ for _, v := range i.intervals {
+ iLen += v.Length()
+ }
+
+ return iLen
+}
+
+func (i *IntervalSet) removeRange(v Interval) {
+ if v.Start == v.Stop-1 {
+ i.removeOne(v.Start)
+ } else if i.intervals != nil {
+ k := 0
+ for n := 0; n < len(i.intervals); n++ {
+ ni := i.intervals[k]
+ // intervals are ordered
+ if v.Stop <= ni.Start {
+ return
+ } else if v.Start > ni.Start && v.Stop < ni.Stop {
+ i.intervals[k] = NewInterval(ni.Start, v.Start)
+ x := NewInterval(v.Stop, ni.Stop)
+ // i.intervals.splice(k, 0, x)
+ i.intervals = append(i.intervals[0:k], append([]Interval{x}, i.intervals[k:]...)...)
+ return
+ } else if v.Start <= ni.Start && v.Stop >= ni.Stop {
+ // i.intervals.splice(k, 1)
+ i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
+ k = k - 1 // need another pass
+ } else if v.Start < ni.Stop {
+ i.intervals[k] = NewInterval(ni.Start, v.Start)
+ } else if v.Stop < ni.Stop {
+ i.intervals[k] = NewInterval(v.Stop, ni.Stop)
+ }
+ k++
+ }
+ }
+}
+
+func (i *IntervalSet) removeOne(v int) {
+ if i.intervals != nil {
+ for k := 0; k < len(i.intervals); k++ {
+ ki := i.intervals[k]
+ // intervals i ordered
+ if v < ki.Start {
+ return
+ } else if v == ki.Start && v == ki.Stop-1 {
+ // i.intervals.splice(k, 1)
+ i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
+ return
+ } else if v == ki.Start {
+ i.intervals[k] = NewInterval(ki.Start+1, ki.Stop)
+ return
+ } else if v == ki.Stop-1 {
+ i.intervals[k] = NewInterval(ki.Start, ki.Stop-1)
+ return
+ } else if v < ki.Stop-1 {
+ x := NewInterval(ki.Start, v)
+ ki.Start = v + 1
+ // i.intervals.splice(k, 0, x)
+ i.intervals = append(i.intervals[0:k], append([]Interval{x}, i.intervals[k:]...)...)
+ return
+ }
+ }
+ }
+}
+
+func (i *IntervalSet) String() string {
+ return i.StringVerbose(nil, nil, false)
+}
+
+func (i *IntervalSet) StringVerbose(literalNames []string, symbolicNames []string, elemsAreChar bool) string {
+
+ if i.intervals == nil {
+ return "{}"
+ } else if literalNames != nil || symbolicNames != nil {
+ return i.toTokenString(literalNames, symbolicNames)
+ } else if elemsAreChar {
+ return i.toCharString()
+ }
+
+ return i.toIndexString()
+}
+
+func (i *IntervalSet) GetIntervals() []Interval {
+ return i.intervals
+}
+
+func (i *IntervalSet) toCharString() string {
+ names := make([]string, len(i.intervals))
+
+ var sb strings.Builder
+
+ for j := 0; j < len(i.intervals); j++ {
+ v := i.intervals[j]
+ if v.Stop == v.Start+1 {
+ if v.Start == TokenEOF {
+ names = append(names, "")
+ } else {
+ sb.WriteByte('\'')
+ sb.WriteRune(rune(v.Start))
+ sb.WriteByte('\'')
+ names = append(names, sb.String())
+ sb.Reset()
+ }
+ } else {
+ sb.WriteByte('\'')
+ sb.WriteRune(rune(v.Start))
+ sb.WriteString("'..'")
+ sb.WriteRune(rune(v.Stop - 1))
+ sb.WriteByte('\'')
+ names = append(names, sb.String())
+ sb.Reset()
+ }
+ }
+ if len(names) > 1 {
+ return "{" + strings.Join(names, ", ") + "}"
+ }
+
+ return names[0]
+}
+
+func (i *IntervalSet) toIndexString() string {
+
+ names := make([]string, 0)
+ for j := 0; j < len(i.intervals); j++ {
+ v := i.intervals[j]
+ if v.Stop == v.Start+1 {
+ if v.Start == TokenEOF {
+ names = append(names, "")
+ } else {
+ names = append(names, strconv.Itoa(v.Start))
+ }
+ } else {
+ names = append(names, strconv.Itoa(v.Start)+".."+strconv.Itoa(v.Stop-1))
+ }
+ }
+ if len(names) > 1 {
+ return "{" + strings.Join(names, ", ") + "}"
+ }
+
+ return names[0]
+}
+
+func (i *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
+ names := make([]string, 0)
+ for _, v := range i.intervals {
+ for j := v.Start; j < v.Stop; j++ {
+ names = append(names, i.elementName(literalNames, symbolicNames, j))
+ }
+ }
+ if len(names) > 1 {
+ return "{" + strings.Join(names, ", ") + "}"
+ }
+
+ return names[0]
+}
+
+func (i *IntervalSet) elementName(literalNames []string, symbolicNames []string, a int) string {
+ if a == TokenEOF {
+ return ""
+ } else if a == TokenEpsilon {
+ return ""
+ } else {
+ if a < len(literalNames) && literalNames[a] != "" {
+ return literalNames[a]
+ }
+
+ return symbolicNames[a]
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/jcollect.go b/vendor/github.com/antlr4-go/antlr/v4/jcollect.go
new file mode 100644
index 0000000..ceccd96
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/jcollect.go
@@ -0,0 +1,685 @@
+package antlr
+
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+import (
+ "container/list"
+ "runtime/debug"
+ "sort"
+ "sync"
+)
+
+// Collectable is an interface that a struct should implement if it is to be
+// usable as a key in these collections.
+type Collectable[T any] interface {
+ Hash() int
+ Equals(other Collectable[T]) bool
+}
+
+type Comparator[T any] interface {
+ Hash1(o T) int
+ Equals2(T, T) bool
+}
+
+type CollectionSource int
+type CollectionDescriptor struct {
+ SybolicName string
+ Description string
+}
+
+const (
+ UnknownCollection CollectionSource = iota
+ ATNConfigLookupCollection
+ ATNStateCollection
+ DFAStateCollection
+ ATNConfigCollection
+ PredictionContextCollection
+ SemanticContextCollection
+ ClosureBusyCollection
+ PredictionVisitedCollection
+ MergeCacheCollection
+ PredictionContextCacheCollection
+ AltSetCollection
+ ReachSetCollection
+)
+
+var CollectionDescriptors = map[CollectionSource]CollectionDescriptor{
+ UnknownCollection: {
+ SybolicName: "UnknownCollection",
+ Description: "Unknown collection type. Only used if the target author thought it was an unimportant collection.",
+ },
+ ATNConfigCollection: {
+ SybolicName: "ATNConfigCollection",
+ Description: "ATNConfig collection. Used to store the ATNConfigs for a particular state in the ATN." +
+ "For instance, it is used to store the results of the closure() operation in the ATN.",
+ },
+ ATNConfigLookupCollection: {
+ SybolicName: "ATNConfigLookupCollection",
+ Description: "ATNConfigLookup collection. Used to store the ATNConfigs for a particular state in the ATN." +
+ "This is used to prevent duplicating equivalent states in an ATNConfigurationSet.",
+ },
+ ATNStateCollection: {
+ SybolicName: "ATNStateCollection",
+ Description: "ATNState collection. This is used to store the states of the ATN.",
+ },
+ DFAStateCollection: {
+ SybolicName: "DFAStateCollection",
+ Description: "DFAState collection. This is used to store the states of the DFA.",
+ },
+ PredictionContextCollection: {
+ SybolicName: "PredictionContextCollection",
+ Description: "PredictionContext collection. This is used to store the prediction contexts of the ATN and cache computes.",
+ },
+ SemanticContextCollection: {
+ SybolicName: "SemanticContextCollection",
+ Description: "SemanticContext collection. This is used to store the semantic contexts of the ATN.",
+ },
+ ClosureBusyCollection: {
+ SybolicName: "ClosureBusyCollection",
+ Description: "ClosureBusy collection. This is used to check and prevent infinite recursion right recursive rules." +
+ "It stores ATNConfigs that are currently being processed in the closure() operation.",
+ },
+ PredictionVisitedCollection: {
+ SybolicName: "PredictionVisitedCollection",
+ Description: "A map that records whether we have visited a particular context when searching through cached entries.",
+ },
+ MergeCacheCollection: {
+ SybolicName: "MergeCacheCollection",
+ Description: "A map that records whether we have already merged two particular contexts and can save effort by not repeating it.",
+ },
+ PredictionContextCacheCollection: {
+ SybolicName: "PredictionContextCacheCollection",
+ Description: "A map that records whether we have already created a particular context and can save effort by not computing it again.",
+ },
+ AltSetCollection: {
+ SybolicName: "AltSetCollection",
+ Description: "Used to eliminate duplicate alternatives in an ATN config set.",
+ },
+ ReachSetCollection: {
+ SybolicName: "ReachSetCollection",
+ Description: "Used as merge cache to prevent us needing to compute the merge of two states if we have already done it.",
+ },
+}
+
+// JStore implements a container that allows the use of a struct to calculate the key
+// for a collection of values akin to map. This is not meant to be a full-blown HashMap but just
+// serve the needs of the ANTLR Go runtime.
+//
+// For ease of porting the logic of the runtime from the master target (Java), this collection
+// operates in a similar way to Java, in that it can use any struct that supplies a Hash() and Equals()
+// function as the key. The values are stored in a standard go map which internally is a form of hashmap
+// itself, the key for the go map is the hash supplied by the key object. The collection is able to deal with
+// hash conflicts by using a simple slice of values associated with the hash code indexed bucket. That isn't
+// particularly efficient, but it is simple, and it works. As this is specifically for the ANTLR runtime, and
+// we understand the requirements, then this is fine - this is not a general purpose collection.
+type JStore[T any, C Comparator[T]] struct {
+ store map[int][]T
+ len int
+ comparator Comparator[T]
+ stats *JStatRec
+}
+
+func NewJStore[T any, C Comparator[T]](comparator Comparator[T], cType CollectionSource, desc string) *JStore[T, C] {
+
+ if comparator == nil {
+ panic("comparator cannot be nil")
+ }
+
+ s := &JStore[T, C]{
+ store: make(map[int][]T, 1),
+ comparator: comparator,
+ }
+ if collectStats {
+ s.stats = &JStatRec{
+ Source: cType,
+ Description: desc,
+ }
+
+ // Track where we created it from if we are being asked to do so
+ if runtimeConfig.statsTraceStacks {
+ s.stats.CreateStack = debug.Stack()
+ }
+ Statistics.AddJStatRec(s.stats)
+ }
+ return s
+}
+
+// Put will store given value in the collection. Note that the key for storage is generated from
+// the value itself - this is specifically because that is what ANTLR needs - this would not be useful
+// as any kind of general collection.
+//
+// If the key has a hash conflict, then the value will be added to the slice of values associated with the
+// hash, unless the value is already in the slice, in which case the existing value is returned. Value equivalence is
+// tested by calling the equals() method on the key.
+//
+// # If the given value is already present in the store, then the existing value is returned as v and exists is set to true
+//
+// If the given value is not present in the store, then the value is added to the store and returned as v and exists is set to false.
+func (s *JStore[T, C]) Put(value T) (v T, exists bool) {
+
+ if collectStats {
+ s.stats.Puts++
+ }
+ kh := s.comparator.Hash1(value)
+
+ var hClash bool
+ for _, v1 := range s.store[kh] {
+ hClash = true
+ if s.comparator.Equals2(value, v1) {
+ if collectStats {
+ s.stats.PutHits++
+ s.stats.PutHashConflicts++
+ }
+ return v1, true
+ }
+ if collectStats {
+ s.stats.PutMisses++
+ }
+ }
+ if collectStats && hClash {
+ s.stats.PutHashConflicts++
+ }
+ s.store[kh] = append(s.store[kh], value)
+
+ if collectStats {
+ if len(s.store[kh]) > s.stats.MaxSlotSize {
+ s.stats.MaxSlotSize = len(s.store[kh])
+ }
+ }
+ s.len++
+ if collectStats {
+ s.stats.CurSize = s.len
+ if s.len > s.stats.MaxSize {
+ s.stats.MaxSize = s.len
+ }
+ }
+ return value, false
+}
+
+// Get will return the value associated with the key - the type of the key is the same type as the value
+// which would not generally be useful, but this is a specific thing for ANTLR where the key is
+// generated using the object we are going to store.
+func (s *JStore[T, C]) Get(key T) (T, bool) {
+ if collectStats {
+ s.stats.Gets++
+ }
+ kh := s.comparator.Hash1(key)
+ var hClash bool
+ for _, v := range s.store[kh] {
+ hClash = true
+ if s.comparator.Equals2(key, v) {
+ if collectStats {
+ s.stats.GetHits++
+ s.stats.GetHashConflicts++
+ }
+ return v, true
+ }
+ if collectStats {
+ s.stats.GetMisses++
+ }
+ }
+ if collectStats {
+ if hClash {
+ s.stats.GetHashConflicts++
+ }
+ s.stats.GetNoEnt++
+ }
+ return key, false
+}
+
+// Contains returns true if the given key is present in the store
+func (s *JStore[T, C]) Contains(key T) bool {
+ _, present := s.Get(key)
+ return present
+}
+
+func (s *JStore[T, C]) SortedSlice(less func(i, j T) bool) []T {
+ vs := make([]T, 0, len(s.store))
+ for _, v := range s.store {
+ vs = append(vs, v...)
+ }
+ sort.Slice(vs, func(i, j int) bool {
+ return less(vs[i], vs[j])
+ })
+
+ return vs
+}
+
+func (s *JStore[T, C]) Each(f func(T) bool) {
+ for _, e := range s.store {
+ for _, v := range e {
+ f(v)
+ }
+ }
+}
+
+func (s *JStore[T, C]) Len() int {
+ return s.len
+}
+
+func (s *JStore[T, C]) Values() []T {
+ vs := make([]T, 0, len(s.store))
+ for _, e := range s.store {
+ vs = append(vs, e...)
+ }
+ return vs
+}
+
+type entry[K, V any] struct {
+ key K
+ val V
+}
+
+type JMap[K, V any, C Comparator[K]] struct {
+ store map[int][]*entry[K, V]
+ len int
+ comparator Comparator[K]
+ stats *JStatRec
+}
+
+func NewJMap[K, V any, C Comparator[K]](comparator Comparator[K], cType CollectionSource, desc string) *JMap[K, V, C] {
+ m := &JMap[K, V, C]{
+ store: make(map[int][]*entry[K, V], 1),
+ comparator: comparator,
+ }
+ if collectStats {
+ m.stats = &JStatRec{
+ Source: cType,
+ Description: desc,
+ }
+ // Track where we created it from if we are being asked to do so
+ if runtimeConfig.statsTraceStacks {
+ m.stats.CreateStack = debug.Stack()
+ }
+ Statistics.AddJStatRec(m.stats)
+ }
+ return m
+}
+
+func (m *JMap[K, V, C]) Put(key K, val V) (V, bool) {
+ if collectStats {
+ m.stats.Puts++
+ }
+ kh := m.comparator.Hash1(key)
+
+ var hClash bool
+ for _, e := range m.store[kh] {
+ hClash = true
+ if m.comparator.Equals2(e.key, key) {
+ if collectStats {
+ m.stats.PutHits++
+ m.stats.PutHashConflicts++
+ }
+ return e.val, true
+ }
+ if collectStats {
+ m.stats.PutMisses++
+ }
+ }
+ if collectStats {
+ if hClash {
+ m.stats.PutHashConflicts++
+ }
+ }
+ m.store[kh] = append(m.store[kh], &entry[K, V]{key, val})
+ if collectStats {
+ if len(m.store[kh]) > m.stats.MaxSlotSize {
+ m.stats.MaxSlotSize = len(m.store[kh])
+ }
+ }
+ m.len++
+ if collectStats {
+ m.stats.CurSize = m.len
+ if m.len > m.stats.MaxSize {
+ m.stats.MaxSize = m.len
+ }
+ }
+ return val, false
+}
+
+func (m *JMap[K, V, C]) Values() []V {
+ vs := make([]V, 0, len(m.store))
+ for _, e := range m.store {
+ for _, v := range e {
+ vs = append(vs, v.val)
+ }
+ }
+ return vs
+}
+
+func (m *JMap[K, V, C]) Get(key K) (V, bool) {
+ if collectStats {
+ m.stats.Gets++
+ }
+ var none V
+ kh := m.comparator.Hash1(key)
+ var hClash bool
+ for _, e := range m.store[kh] {
+ hClash = true
+ if m.comparator.Equals2(e.key, key) {
+ if collectStats {
+ m.stats.GetHits++
+ m.stats.GetHashConflicts++
+ }
+ return e.val, true
+ }
+ if collectStats {
+ m.stats.GetMisses++
+ }
+ }
+ if collectStats {
+ if hClash {
+ m.stats.GetHashConflicts++
+ }
+ m.stats.GetNoEnt++
+ }
+ return none, false
+}
+
+func (m *JMap[K, V, C]) Len() int {
+ return m.len
+}
+
+func (m *JMap[K, V, C]) Delete(key K) {
+ kh := m.comparator.Hash1(key)
+ for i, e := range m.store[kh] {
+ if m.comparator.Equals2(e.key, key) {
+ m.store[kh] = append(m.store[kh][:i], m.store[kh][i+1:]...)
+ m.len--
+ return
+ }
+ }
+}
+
+func (m *JMap[K, V, C]) Clear() {
+ m.store = make(map[int][]*entry[K, V])
+}
+
+type JPCMap struct {
+ store *JMap[*PredictionContext, *JMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]], *ObjEqComparator[*PredictionContext]]
+ size int
+ stats *JStatRec
+}
+
+func NewJPCMap(cType CollectionSource, desc string) *JPCMap {
+ m := &JPCMap{
+ store: NewJMap[*PredictionContext, *JMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]], *ObjEqComparator[*PredictionContext]](pContextEqInst, cType, desc),
+ }
+ if collectStats {
+ m.stats = &JStatRec{
+ Source: cType,
+ Description: desc,
+ }
+ // Track where we created it from if we are being asked to do so
+ if runtimeConfig.statsTraceStacks {
+ m.stats.CreateStack = debug.Stack()
+ }
+ Statistics.AddJStatRec(m.stats)
+ }
+ return m
+}
+
+func (pcm *JPCMap) Get(k1, k2 *PredictionContext) (*PredictionContext, bool) {
+ if collectStats {
+ pcm.stats.Gets++
+ }
+ // Do we have a map stored by k1?
+ //
+ m2, present := pcm.store.Get(k1)
+ if present {
+ if collectStats {
+ pcm.stats.GetHits++
+ }
+ // We found a map of values corresponding to k1, so now we need to look up k2 in that map
+ //
+ return m2.Get(k2)
+ }
+ if collectStats {
+ pcm.stats.GetMisses++
+ }
+ return nil, false
+}
+
+func (pcm *JPCMap) Put(k1, k2, v *PredictionContext) {
+
+ if collectStats {
+ pcm.stats.Puts++
+ }
+ // First does a map already exist for k1?
+ //
+ if m2, present := pcm.store.Get(k1); present {
+ if collectStats {
+ pcm.stats.PutHits++
+ }
+ _, present = m2.Put(k2, v)
+ if !present {
+ pcm.size++
+ if collectStats {
+ pcm.stats.CurSize = pcm.size
+ if pcm.size > pcm.stats.MaxSize {
+ pcm.stats.MaxSize = pcm.size
+ }
+ }
+ }
+ } else {
+ // No map found for k1, so we create it, add in our value, then store is
+ //
+ if collectStats {
+ pcm.stats.PutMisses++
+ m2 = NewJMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]](pContextEqInst, pcm.stats.Source, pcm.stats.Description+" map entry")
+ } else {
+ m2 = NewJMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]](pContextEqInst, PredictionContextCacheCollection, "map entry")
+ }
+
+ m2.Put(k2, v)
+ pcm.store.Put(k1, m2)
+ pcm.size++
+ }
+}
+
+type JPCMap2 struct {
+ store map[int][]JPCEntry
+ size int
+ stats *JStatRec
+}
+
+type JPCEntry struct {
+ k1, k2, v *PredictionContext
+}
+
+func NewJPCMap2(cType CollectionSource, desc string) *JPCMap2 {
+ m := &JPCMap2{
+ store: make(map[int][]JPCEntry, 1000),
+ }
+ if collectStats {
+ m.stats = &JStatRec{
+ Source: cType,
+ Description: desc,
+ }
+ // Track where we created it from if we are being asked to do so
+ if runtimeConfig.statsTraceStacks {
+ m.stats.CreateStack = debug.Stack()
+ }
+ Statistics.AddJStatRec(m.stats)
+ }
+ return m
+}
+
+func dHash(k1, k2 *PredictionContext) int {
+ return k1.cachedHash*31 + k2.cachedHash
+}
+
+func (pcm *JPCMap2) Get(k1, k2 *PredictionContext) (*PredictionContext, bool) {
+ if collectStats {
+ pcm.stats.Gets++
+ }
+
+ h := dHash(k1, k2)
+ var hClash bool
+ for _, e := range pcm.store[h] {
+ hClash = true
+ if e.k1.Equals(k1) && e.k2.Equals(k2) {
+ if collectStats {
+ pcm.stats.GetHits++
+ pcm.stats.GetHashConflicts++
+ }
+ return e.v, true
+ }
+ if collectStats {
+ pcm.stats.GetMisses++
+ }
+ }
+ if collectStats {
+ if hClash {
+ pcm.stats.GetHashConflicts++
+ }
+ pcm.stats.GetNoEnt++
+ }
+ return nil, false
+}
+
+func (pcm *JPCMap2) Put(k1, k2, v *PredictionContext) (*PredictionContext, bool) {
+ if collectStats {
+ pcm.stats.Puts++
+ }
+ h := dHash(k1, k2)
+ var hClash bool
+ for _, e := range pcm.store[h] {
+ hClash = true
+ if e.k1.Equals(k1) && e.k2.Equals(k2) {
+ if collectStats {
+ pcm.stats.PutHits++
+ pcm.stats.PutHashConflicts++
+ }
+ return e.v, true
+ }
+ if collectStats {
+ pcm.stats.PutMisses++
+ }
+ }
+ if collectStats {
+ if hClash {
+ pcm.stats.PutHashConflicts++
+ }
+ }
+ pcm.store[h] = append(pcm.store[h], JPCEntry{k1, k2, v})
+ pcm.size++
+ if collectStats {
+ pcm.stats.CurSize = pcm.size
+ if pcm.size > pcm.stats.MaxSize {
+ pcm.stats.MaxSize = pcm.size
+ }
+ }
+ return nil, false
+}
+
+type VisitEntry struct {
+ k *PredictionContext
+ v *PredictionContext
+}
+type VisitRecord struct {
+ store map[*PredictionContext]*PredictionContext
+ len int
+ stats *JStatRec
+}
+
+type VisitList struct {
+ cache *list.List
+ lock sync.RWMutex
+}
+
+var visitListPool = VisitList{
+ cache: list.New(),
+ lock: sync.RWMutex{},
+}
+
+// NewVisitRecord returns a new VisitRecord instance from the pool if available.
+// Note that this "map" uses a pointer as a key because we are emulating the behavior of
+// IdentityHashMap in Java, which uses the `==` operator to compare whether the keys are equal,
+// which means is the key the same reference to an object rather than is it .equals() to another
+// object.
+func NewVisitRecord() *VisitRecord {
+ visitListPool.lock.Lock()
+ el := visitListPool.cache.Front()
+ defer visitListPool.lock.Unlock()
+ var vr *VisitRecord
+ if el == nil {
+ vr = &VisitRecord{
+ store: make(map[*PredictionContext]*PredictionContext),
+ }
+ if collectStats {
+ vr.stats = &JStatRec{
+ Source: PredictionContextCacheCollection,
+ Description: "VisitRecord",
+ }
+ // Track where we created it from if we are being asked to do so
+ if runtimeConfig.statsTraceStacks {
+ vr.stats.CreateStack = debug.Stack()
+ }
+ }
+ } else {
+ vr = el.Value.(*VisitRecord)
+ visitListPool.cache.Remove(el)
+ vr.store = make(map[*PredictionContext]*PredictionContext)
+ }
+ if collectStats {
+ Statistics.AddJStatRec(vr.stats)
+ }
+ return vr
+}
+
+func (vr *VisitRecord) Release() {
+ vr.len = 0
+ vr.store = nil
+ if collectStats {
+ vr.stats.MaxSize = 0
+ vr.stats.CurSize = 0
+ vr.stats.Gets = 0
+ vr.stats.GetHits = 0
+ vr.stats.GetMisses = 0
+ vr.stats.GetHashConflicts = 0
+ vr.stats.GetNoEnt = 0
+ vr.stats.Puts = 0
+ vr.stats.PutHits = 0
+ vr.stats.PutMisses = 0
+ vr.stats.PutHashConflicts = 0
+ vr.stats.MaxSlotSize = 0
+ }
+ visitListPool.lock.Lock()
+ visitListPool.cache.PushBack(vr)
+ visitListPool.lock.Unlock()
+}
+
+func (vr *VisitRecord) Get(k *PredictionContext) (*PredictionContext, bool) {
+ if collectStats {
+ vr.stats.Gets++
+ }
+ v := vr.store[k]
+ if v != nil {
+ if collectStats {
+ vr.stats.GetHits++
+ }
+ return v, true
+ }
+ if collectStats {
+ vr.stats.GetNoEnt++
+ }
+ return nil, false
+}
+
+func (vr *VisitRecord) Put(k, v *PredictionContext) (*PredictionContext, bool) {
+ if collectStats {
+ vr.stats.Puts++
+ }
+ vr.store[k] = v
+ vr.len++
+ if collectStats {
+ vr.stats.CurSize = vr.len
+ if vr.len > vr.stats.MaxSize {
+ vr.stats.MaxSize = vr.len
+ }
+ }
+ return v, false
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/lexer.go b/vendor/github.com/antlr4-go/antlr/v4/lexer.go
new file mode 100644
index 0000000..3c7896a
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/lexer.go
@@ -0,0 +1,426 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strconv"
+)
+
+// A lexer is recognizer that draws input symbols from a character stream.
+// lexer grammars result in a subclass of this object. A Lexer object
+// uses simplified Match() and error recovery mechanisms in the interest
+// of speed.
+///
+
+type Lexer interface {
+ TokenSource
+ Recognizer
+
+ Emit() Token
+
+ SetChannel(int)
+ PushMode(int)
+ PopMode() int
+ SetType(int)
+ SetMode(int)
+}
+
+type BaseLexer struct {
+ *BaseRecognizer
+
+ Interpreter ILexerATNSimulator
+ TokenStartCharIndex int
+ TokenStartLine int
+ TokenStartColumn int
+ ActionType int
+ Virt Lexer // The most derived lexer implementation. Allows virtual method calls.
+
+ input CharStream
+ factory TokenFactory
+ tokenFactorySourcePair *TokenSourceCharStreamPair
+ token Token
+ hitEOF bool
+ channel int
+ thetype int
+ modeStack IntStack
+ mode int
+ text string
+}
+
+func NewBaseLexer(input CharStream) *BaseLexer {
+
+ lexer := new(BaseLexer)
+
+ lexer.BaseRecognizer = NewBaseRecognizer()
+
+ lexer.input = input
+ lexer.factory = CommonTokenFactoryDEFAULT
+ lexer.tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input}
+
+ lexer.Virt = lexer
+
+ lexer.Interpreter = nil // child classes must populate it
+
+ // The goal of all lexer rules/methods is to create a token object.
+ // l is an instance variable as multiple rules may collaborate to
+ // create a single token. NextToken will return l object after
+ // Matching lexer rule(s). If you subclass to allow multiple token
+ // emissions, then set l to the last token to be Matched or
+ // something non nil so that the auto token emit mechanism will not
+ // emit another token.
+ lexer.token = nil
+
+ // What character index in the stream did the current token start at?
+ // Needed, for example, to get the text for current token. Set at
+ // the start of NextToken.
+ lexer.TokenStartCharIndex = -1
+
+ // The line on which the first character of the token resides///
+ lexer.TokenStartLine = -1
+
+ // The character position of first character within the line///
+ lexer.TokenStartColumn = -1
+
+ // Once we see EOF on char stream, next token will be EOF.
+ // If you have DONE : EOF then you see DONE EOF.
+ lexer.hitEOF = false
+
+ // The channel number for the current token///
+ lexer.channel = TokenDefaultChannel
+
+ // The token type for the current token///
+ lexer.thetype = TokenInvalidType
+
+ lexer.modeStack = make([]int, 0)
+ lexer.mode = LexerDefaultMode
+
+ // You can set the text for the current token to override what is in
+ // the input char buffer. Use setText() or can set l instance var.
+ // /
+ lexer.text = ""
+
+ return lexer
+}
+
+const (
+ LexerDefaultMode = 0
+ LexerMore = -2
+ LexerSkip = -3
+)
+
+//goland:noinspection GoUnusedConst
+const (
+ LexerDefaultTokenChannel = TokenDefaultChannel
+ LexerHidden = TokenHiddenChannel
+ LexerMinCharValue = 0x0000
+ LexerMaxCharValue = 0x10FFFF
+)
+
+func (b *BaseLexer) Reset() {
+ // wack Lexer state variables
+ if b.input != nil {
+ b.input.Seek(0) // rewind the input
+ }
+ b.token = nil
+ b.thetype = TokenInvalidType
+ b.channel = TokenDefaultChannel
+ b.TokenStartCharIndex = -1
+ b.TokenStartColumn = -1
+ b.TokenStartLine = -1
+ b.text = ""
+
+ b.hitEOF = false
+ b.mode = LexerDefaultMode
+ b.modeStack = make([]int, 0)
+
+ b.Interpreter.reset()
+}
+
+func (b *BaseLexer) GetInterpreter() ILexerATNSimulator {
+ return b.Interpreter
+}
+
+func (b *BaseLexer) GetInputStream() CharStream {
+ return b.input
+}
+
+func (b *BaseLexer) GetSourceName() string {
+ return b.GrammarFileName
+}
+
+func (b *BaseLexer) SetChannel(v int) {
+ b.channel = v
+}
+
+func (b *BaseLexer) GetTokenFactory() TokenFactory {
+ return b.factory
+}
+
+func (b *BaseLexer) setTokenFactory(f TokenFactory) {
+ b.factory = f
+}
+
+func (b *BaseLexer) safeMatch() (ret int) {
+ defer func() {
+ if e := recover(); e != nil {
+ if re, ok := e.(RecognitionException); ok {
+ b.notifyListeners(re) // Report error
+ b.Recover(re)
+ ret = LexerSkip // default
+ }
+ }
+ }()
+
+ return b.Interpreter.Match(b.input, b.mode)
+}
+
+// NextToken returns a token from the lexer input source i.e., Match a token on the source char stream.
+func (b *BaseLexer) NextToken() Token {
+ if b.input == nil {
+ panic("NextToken requires a non-nil input stream.")
+ }
+
+ tokenStartMarker := b.input.Mark()
+
+ // previously in finally block
+ defer func() {
+ // make sure we release marker after Match or
+ // unbuffered char stream will keep buffering
+ b.input.Release(tokenStartMarker)
+ }()
+
+ for {
+ if b.hitEOF {
+ b.EmitEOF()
+ return b.token
+ }
+ b.token = nil
+ b.channel = TokenDefaultChannel
+ b.TokenStartCharIndex = b.input.Index()
+ b.TokenStartColumn = b.Interpreter.GetCharPositionInLine()
+ b.TokenStartLine = b.Interpreter.GetLine()
+ b.text = ""
+ continueOuter := false
+ for {
+ b.thetype = TokenInvalidType
+
+ ttype := b.safeMatch()
+
+ if b.input.LA(1) == TokenEOF {
+ b.hitEOF = true
+ }
+ if b.thetype == TokenInvalidType {
+ b.thetype = ttype
+ }
+ if b.thetype == LexerSkip {
+ continueOuter = true
+ break
+ }
+ if b.thetype != LexerMore {
+ break
+ }
+ }
+
+ if continueOuter {
+ continue
+ }
+ if b.token == nil {
+ b.Virt.Emit()
+ }
+ return b.token
+ }
+}
+
+// Skip instructs the lexer to Skip creating a token for current lexer rule
+// and look for another token. [NextToken] knows to keep looking when
+// a lexer rule finishes with token set to [SKIPTOKEN]. Recall that
+// if token==nil at end of any token rule, it creates one for you
+// and emits it.
+func (b *BaseLexer) Skip() {
+ b.thetype = LexerSkip
+}
+
+func (b *BaseLexer) More() {
+ b.thetype = LexerMore
+}
+
+// SetMode changes the lexer to a new mode. The lexer will use this mode from hereon in and the rules for that mode
+// will be in force.
+func (b *BaseLexer) SetMode(m int) {
+ b.mode = m
+}
+
+// PushMode saves the current lexer mode so that it can be restored later. See [PopMode], then sets the
+// current lexer mode to the supplied mode m.
+func (b *BaseLexer) PushMode(m int) {
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("pushMode " + strconv.Itoa(m))
+ }
+ b.modeStack.Push(b.mode)
+ b.mode = m
+}
+
+// PopMode restores the lexer mode saved by a call to [PushMode]. It is a panic error if there is no saved mode to
+// return to.
+func (b *BaseLexer) PopMode() int {
+ if len(b.modeStack) == 0 {
+ panic("Empty Stack")
+ }
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("popMode back to " + fmt.Sprint(b.modeStack[0:len(b.modeStack)-1]))
+ }
+ i, _ := b.modeStack.Pop()
+ b.mode = i
+ return b.mode
+}
+
+func (b *BaseLexer) inputStream() CharStream {
+ return b.input
+}
+
+// SetInputStream resets the lexer input stream and associated lexer state.
+func (b *BaseLexer) SetInputStream(input CharStream) {
+ b.input = nil
+ b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
+ b.Reset()
+ b.input = input
+ b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
+}
+
+func (b *BaseLexer) GetTokenSourceCharStreamPair() *TokenSourceCharStreamPair {
+ return b.tokenFactorySourcePair
+}
+
+// EmitToken by default does not support multiple emits per [NextToken] invocation
+// for efficiency reasons. Subclass and override this func, [NextToken],
+// and [GetToken] (to push tokens into a list and pull from that list
+// rather than a single variable as this implementation does).
+func (b *BaseLexer) EmitToken(token Token) {
+ b.token = token
+}
+
+// Emit is the standard method called to automatically emit a token at the
+// outermost lexical rule. The token object should point into the
+// char buffer start..stop. If there is a text override in 'text',
+// use that to set the token's text. Override this method to emit
+// custom [Token] objects or provide a new factory.
+// /
+func (b *BaseLexer) Emit() Token {
+ t := b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.GetCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
+ b.EmitToken(t)
+ return t
+}
+
+// EmitEOF emits an EOF token. By default, this is the last token emitted
+func (b *BaseLexer) EmitEOF() Token {
+ cpos := b.GetCharPositionInLine()
+ lpos := b.GetLine()
+ eof := b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
+ b.EmitToken(eof)
+ return eof
+}
+
+// GetCharPositionInLine returns the current position in the current line as far as the lexer is concerned.
+func (b *BaseLexer) GetCharPositionInLine() int {
+ return b.Interpreter.GetCharPositionInLine()
+}
+
+func (b *BaseLexer) GetLine() int {
+ return b.Interpreter.GetLine()
+}
+
+func (b *BaseLexer) GetType() int {
+ return b.thetype
+}
+
+func (b *BaseLexer) SetType(t int) {
+ b.thetype = t
+}
+
+// GetCharIndex returns the index of the current character of lookahead
+func (b *BaseLexer) GetCharIndex() int {
+ return b.input.Index()
+}
+
+// GetText returns the text Matched so far for the current token or any text override.
+func (b *BaseLexer) GetText() string {
+ if b.text != "" {
+ return b.text
+ }
+
+ return b.Interpreter.GetText(b.input)
+}
+
+// SetText sets the complete text of this token; it wipes any previous changes to the text.
+func (b *BaseLexer) SetText(text string) {
+ b.text = text
+}
+
+// GetATN returns the ATN used by the lexer.
+func (b *BaseLexer) GetATN() *ATN {
+ return b.Interpreter.ATN()
+}
+
+// GetAllTokens returns a list of all [Token] objects in input char stream.
+// Forces a load of all tokens that can be made from the input char stream.
+//
+// Does not include EOF token.
+func (b *BaseLexer) GetAllTokens() []Token {
+ vl := b.Virt
+ tokens := make([]Token, 0)
+ t := vl.NextToken()
+ for t.GetTokenType() != TokenEOF {
+ tokens = append(tokens, t)
+ t = vl.NextToken()
+ }
+ return tokens
+}
+
+func (b *BaseLexer) notifyListeners(e RecognitionException) {
+ start := b.TokenStartCharIndex
+ stop := b.input.Index()
+ text := b.input.GetTextFromInterval(NewInterval(start, stop))
+ msg := "token recognition error at: '" + text + "'"
+ listener := b.GetErrorListenerDispatch()
+ listener.SyntaxError(b, nil, b.TokenStartLine, b.TokenStartColumn, msg, e)
+}
+
+func (b *BaseLexer) getErrorDisplayForChar(c rune) string {
+ if c == TokenEOF {
+ return ""
+ } else if c == '\n' {
+ return "\\n"
+ } else if c == '\t' {
+ return "\\t"
+ } else if c == '\r' {
+ return "\\r"
+ } else {
+ return string(c)
+ }
+}
+
+func (b *BaseLexer) getCharErrorDisplay(c rune) string {
+ return "'" + b.getErrorDisplayForChar(c) + "'"
+}
+
+// Recover can normally Match any char in its vocabulary after Matching
+// a token, so here we do the easy thing and just kill a character and hope
+// it all works out. You can instead use the rule invocation stack
+// to do sophisticated error recovery if you are in a fragment rule.
+//
+// In general, lexers should not need to recover and should have rules that cover any eventuality, such as
+// a character that makes no sense to the recognizer.
+func (b *BaseLexer) Recover(re RecognitionException) {
+ if b.input.LA(1) != TokenEOF {
+ if _, ok := re.(*LexerNoViableAltException); ok {
+ // Skip a char and try again
+ b.Interpreter.Consume(b.input)
+ } else {
+ // TODO: Do we lose character or line position information?
+ b.input.Consume()
+ }
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/lexer_action.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_action.go
new file mode 100644
index 0000000..eaa7393
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/lexer_action.go
@@ -0,0 +1,452 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import "strconv"
+
+const (
+ // LexerActionTypeChannel represents a [LexerChannelAction] action.
+ LexerActionTypeChannel = 0
+
+ // LexerActionTypeCustom represents a [LexerCustomAction] action.
+ LexerActionTypeCustom = 1
+
+ // LexerActionTypeMode represents a [LexerModeAction] action.
+ LexerActionTypeMode = 2
+
+ // LexerActionTypeMore represents a [LexerMoreAction] action.
+ LexerActionTypeMore = 3
+
+ // LexerActionTypePopMode represents a [LexerPopModeAction] action.
+ LexerActionTypePopMode = 4
+
+ // LexerActionTypePushMode represents a [LexerPushModeAction] action.
+ LexerActionTypePushMode = 5
+
+ // LexerActionTypeSkip represents a [LexerSkipAction] action.
+ LexerActionTypeSkip = 6
+
+ // LexerActionTypeType represents a [LexerTypeAction] action.
+ LexerActionTypeType = 7
+)
+
+type LexerAction interface {
+ getActionType() int
+ getIsPositionDependent() bool
+ execute(lexer Lexer)
+ Hash() int
+ Equals(other LexerAction) bool
+}
+
+type BaseLexerAction struct {
+ actionType int
+ isPositionDependent bool
+}
+
+func NewBaseLexerAction(action int) *BaseLexerAction {
+ la := new(BaseLexerAction)
+
+ la.actionType = action
+ la.isPositionDependent = false
+
+ return la
+}
+
+func (b *BaseLexerAction) execute(_ Lexer) {
+ panic("Not implemented")
+}
+
+func (b *BaseLexerAction) getActionType() int {
+ return b.actionType
+}
+
+func (b *BaseLexerAction) getIsPositionDependent() bool {
+ return b.isPositionDependent
+}
+
+func (b *BaseLexerAction) Hash() int {
+ h := murmurInit(0)
+ h = murmurUpdate(h, b.actionType)
+ return murmurFinish(h, 1)
+}
+
+func (b *BaseLexerAction) Equals(other LexerAction) bool {
+ return b.actionType == other.getActionType()
+}
+
+// LexerSkipAction implements the [BaseLexerAction.Skip] lexer action by calling [Lexer.Skip].
+//
+// The Skip command does not have any parameters, so this action is
+// implemented as a singleton instance exposed by the [LexerSkipActionINSTANCE].
+type LexerSkipAction struct {
+ *BaseLexerAction
+}
+
+func NewLexerSkipAction() *LexerSkipAction {
+ la := new(LexerSkipAction)
+ la.BaseLexerAction = NewBaseLexerAction(LexerActionTypeSkip)
+ return la
+}
+
+// LexerSkipActionINSTANCE provides a singleton instance of this parameterless lexer action.
+var LexerSkipActionINSTANCE = NewLexerSkipAction()
+
+func (l *LexerSkipAction) execute(lexer Lexer) {
+ lexer.Skip()
+}
+
+// String returns a string representation of the current [LexerSkipAction].
+func (l *LexerSkipAction) String() string {
+ return "skip"
+}
+
+func (b *LexerSkipAction) Equals(other LexerAction) bool {
+ return other.getActionType() == LexerActionTypeSkip
+}
+
+// Implements the {@code type} lexer action by calling {@link Lexer//setType}
+//
+// with the assigned type.
+type LexerTypeAction struct {
+ *BaseLexerAction
+
+ thetype int
+}
+
+func NewLexerTypeAction(thetype int) *LexerTypeAction {
+ l := new(LexerTypeAction)
+ l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
+ l.thetype = thetype
+ return l
+}
+
+func (l *LexerTypeAction) execute(lexer Lexer) {
+ lexer.SetType(l.thetype)
+}
+
+func (l *LexerTypeAction) Hash() int {
+ h := murmurInit(0)
+ h = murmurUpdate(h, l.actionType)
+ h = murmurUpdate(h, l.thetype)
+ return murmurFinish(h, 2)
+}
+
+func (l *LexerTypeAction) Equals(other LexerAction) bool {
+ if l == other {
+ return true
+ } else if _, ok := other.(*LexerTypeAction); !ok {
+ return false
+ } else {
+ return l.thetype == other.(*LexerTypeAction).thetype
+ }
+}
+
+func (l *LexerTypeAction) String() string {
+ return "actionType(" + strconv.Itoa(l.thetype) + ")"
+}
+
+// LexerPushModeAction implements the pushMode lexer action by calling
+// [Lexer.pushMode] with the assigned mode.
+type LexerPushModeAction struct {
+ *BaseLexerAction
+ mode int
+}
+
+func NewLexerPushModeAction(mode int) *LexerPushModeAction {
+
+ l := new(LexerPushModeAction)
+ l.BaseLexerAction = NewBaseLexerAction(LexerActionTypePushMode)
+
+ l.mode = mode
+ return l
+}
+
+// This action is implemented by calling {@link Lexer//pushMode} with the
+// value provided by {@link //getMode}.
+func (l *LexerPushModeAction) execute(lexer Lexer) {
+ lexer.PushMode(l.mode)
+}
+
+func (l *LexerPushModeAction) Hash() int {
+ h := murmurInit(0)
+ h = murmurUpdate(h, l.actionType)
+ h = murmurUpdate(h, l.mode)
+ return murmurFinish(h, 2)
+}
+
+func (l *LexerPushModeAction) Equals(other LexerAction) bool {
+ if l == other {
+ return true
+ } else if _, ok := other.(*LexerPushModeAction); !ok {
+ return false
+ } else {
+ return l.mode == other.(*LexerPushModeAction).mode
+ }
+}
+
+func (l *LexerPushModeAction) String() string {
+ return "pushMode(" + strconv.Itoa(l.mode) + ")"
+}
+
+// LexerPopModeAction implements the popMode lexer action by calling [Lexer.popMode].
+//
+// The popMode command does not have any parameters, so this action is
+// implemented as a singleton instance exposed by [LexerPopModeActionINSTANCE]
+type LexerPopModeAction struct {
+ *BaseLexerAction
+}
+
+func NewLexerPopModeAction() *LexerPopModeAction {
+
+ l := new(LexerPopModeAction)
+
+ l.BaseLexerAction = NewBaseLexerAction(LexerActionTypePopMode)
+
+ return l
+}
+
+var LexerPopModeActionINSTANCE = NewLexerPopModeAction()
+
+// This action is implemented by calling {@link Lexer//popMode}.
+func (l *LexerPopModeAction) execute(lexer Lexer) {
+ lexer.PopMode()
+}
+
+func (l *LexerPopModeAction) String() string {
+ return "popMode"
+}
+
+// Implements the {@code more} lexer action by calling {@link Lexer//more}.
+//
+// The {@code more} command does not have any parameters, so l action is
+// implemented as a singleton instance exposed by {@link //INSTANCE}.
+
+type LexerMoreAction struct {
+ *BaseLexerAction
+}
+
+func NewLexerMoreAction() *LexerMoreAction {
+ l := new(LexerMoreAction)
+ l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMore)
+
+ return l
+}
+
+var LexerMoreActionINSTANCE = NewLexerMoreAction()
+
+// This action is implemented by calling {@link Lexer//popMode}.
+func (l *LexerMoreAction) execute(lexer Lexer) {
+ lexer.More()
+}
+
+func (l *LexerMoreAction) String() string {
+ return "more"
+}
+
+// LexerModeAction implements the mode lexer action by calling [Lexer.mode] with
+// the assigned mode.
+type LexerModeAction struct {
+ *BaseLexerAction
+ mode int
+}
+
+func NewLexerModeAction(mode int) *LexerModeAction {
+ l := new(LexerModeAction)
+ l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMode)
+ l.mode = mode
+ return l
+}
+
+// This action is implemented by calling {@link Lexer//mode} with the
+// value provided by {@link //getMode}.
+func (l *LexerModeAction) execute(lexer Lexer) {
+ lexer.SetMode(l.mode)
+}
+
+func (l *LexerModeAction) Hash() int {
+ h := murmurInit(0)
+ h = murmurUpdate(h, l.actionType)
+ h = murmurUpdate(h, l.mode)
+ return murmurFinish(h, 2)
+}
+
+func (l *LexerModeAction) Equals(other LexerAction) bool {
+ if l == other {
+ return true
+ } else if _, ok := other.(*LexerModeAction); !ok {
+ return false
+ } else {
+ return l.mode == other.(*LexerModeAction).mode
+ }
+}
+
+func (l *LexerModeAction) String() string {
+ return "mode(" + strconv.Itoa(l.mode) + ")"
+}
+
+// Executes a custom lexer action by calling {@link Recognizer//action} with the
+// rule and action indexes assigned to the custom action. The implementation of
+// a custom action is added to the generated code for the lexer in an override
+// of {@link Recognizer//action} when the grammar is compiled.
+//
+// This class may represent embedded actions created with the {...}
+// syntax in ANTLR 4, as well as actions created for lexer commands where the
+// command argument could not be evaluated when the grammar was compiled.
+
+// Constructs a custom lexer action with the specified rule and action
+// indexes.
+//
+// @param ruleIndex The rule index to use for calls to
+// {@link Recognizer//action}.
+// @param actionIndex The action index to use for calls to
+// {@link Recognizer//action}.
+
+type LexerCustomAction struct {
+ *BaseLexerAction
+ ruleIndex, actionIndex int
+}
+
+func NewLexerCustomAction(ruleIndex, actionIndex int) *LexerCustomAction {
+ l := new(LexerCustomAction)
+ l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCustom)
+ l.ruleIndex = ruleIndex
+ l.actionIndex = actionIndex
+ l.isPositionDependent = true
+ return l
+}
+
+// Custom actions are implemented by calling {@link Lexer//action} with the
+// appropriate rule and action indexes.
+func (l *LexerCustomAction) execute(lexer Lexer) {
+ lexer.Action(nil, l.ruleIndex, l.actionIndex)
+}
+
+func (l *LexerCustomAction) Hash() int {
+ h := murmurInit(0)
+ h = murmurUpdate(h, l.actionType)
+ h = murmurUpdate(h, l.ruleIndex)
+ h = murmurUpdate(h, l.actionIndex)
+ return murmurFinish(h, 3)
+}
+
+func (l *LexerCustomAction) Equals(other LexerAction) bool {
+ if l == other {
+ return true
+ } else if _, ok := other.(*LexerCustomAction); !ok {
+ return false
+ } else {
+ return l.ruleIndex == other.(*LexerCustomAction).ruleIndex &&
+ l.actionIndex == other.(*LexerCustomAction).actionIndex
+ }
+}
+
+// LexerChannelAction implements the channel lexer action by calling
+// [Lexer.setChannel] with the assigned channel.
+//
+// Constructs a new channel action with the specified channel value.
+type LexerChannelAction struct {
+ *BaseLexerAction
+ channel int
+}
+
+// NewLexerChannelAction creates a channel lexer action by calling
+// [Lexer.setChannel] with the assigned channel.
+//
+// Constructs a new channel action with the specified channel value.
+func NewLexerChannelAction(channel int) *LexerChannelAction {
+ l := new(LexerChannelAction)
+ l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeChannel)
+ l.channel = channel
+ return l
+}
+
+// This action is implemented by calling {@link Lexer//setChannel} with the
+// value provided by {@link //getChannel}.
+func (l *LexerChannelAction) execute(lexer Lexer) {
+ lexer.SetChannel(l.channel)
+}
+
+func (l *LexerChannelAction) Hash() int {
+ h := murmurInit(0)
+ h = murmurUpdate(h, l.actionType)
+ h = murmurUpdate(h, l.channel)
+ return murmurFinish(h, 2)
+}
+
+func (l *LexerChannelAction) Equals(other LexerAction) bool {
+ if l == other {
+ return true
+ } else if _, ok := other.(*LexerChannelAction); !ok {
+ return false
+ } else {
+ return l.channel == other.(*LexerChannelAction).channel
+ }
+}
+
+func (l *LexerChannelAction) String() string {
+ return "channel(" + strconv.Itoa(l.channel) + ")"
+}
+
+// This implementation of {@link LexerAction} is used for tracking input offsets
+// for position-dependent actions within a {@link LexerActionExecutor}.
+//
+// This action is not serialized as part of the ATN, and is only required for
+// position-dependent lexer actions which appear at a location other than the
+// end of a rule. For more information about DFA optimizations employed for
+// lexer actions, see {@link LexerActionExecutor//append} and
+// {@link LexerActionExecutor//fixOffsetBeforeMatch}.
+
+type LexerIndexedCustomAction struct {
+ *BaseLexerAction
+ offset int
+ lexerAction LexerAction
+ isPositionDependent bool
+}
+
+// NewLexerIndexedCustomAction constructs a new indexed custom action by associating a character offset
+// with a [LexerAction].
+//
+// Note: This class is only required for lexer actions for which
+// [LexerAction.isPositionDependent] returns true.
+//
+// The offset points into the input [CharStream], relative to
+// the token start index, at which the specified lexerAction should be
+// executed.
+func NewLexerIndexedCustomAction(offset int, lexerAction LexerAction) *LexerIndexedCustomAction {
+
+ l := new(LexerIndexedCustomAction)
+ l.BaseLexerAction = NewBaseLexerAction(lexerAction.getActionType())
+
+ l.offset = offset
+ l.lexerAction = lexerAction
+ l.isPositionDependent = true
+
+ return l
+}
+
+// This method calls {@link //execute} on the result of {@link //getAction}
+// using the provided {@code lexer}.
+func (l *LexerIndexedCustomAction) execute(lexer Lexer) {
+ // assume the input stream position was properly set by the calling code
+ l.lexerAction.execute(lexer)
+}
+
+func (l *LexerIndexedCustomAction) Hash() int {
+ h := murmurInit(0)
+ h = murmurUpdate(h, l.offset)
+ h = murmurUpdate(h, l.lexerAction.Hash())
+ return murmurFinish(h, 2)
+}
+
+func (l *LexerIndexedCustomAction) equals(other LexerAction) bool {
+ if l == other {
+ return true
+ } else if _, ok := other.(*LexerIndexedCustomAction); !ok {
+ return false
+ } else {
+ return l.offset == other.(*LexerIndexedCustomAction).offset &&
+ l.lexerAction.Equals(other.(*LexerIndexedCustomAction).lexerAction)
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go
new file mode 100644
index 0000000..dfc28c3
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go
@@ -0,0 +1,173 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import "golang.org/x/exp/slices"
+
+// Represents an executor for a sequence of lexer actions which traversed during
+// the Matching operation of a lexer rule (token).
+//
+// The executor tracks position information for position-dependent lexer actions
+// efficiently, ensuring that actions appearing only at the end of the rule do
+// not cause bloating of the {@link DFA} created for the lexer.
+
+type LexerActionExecutor struct {
+ lexerActions []LexerAction
+ cachedHash int
+}
+
+func NewLexerActionExecutor(lexerActions []LexerAction) *LexerActionExecutor {
+
+ if lexerActions == nil {
+ lexerActions = make([]LexerAction, 0)
+ }
+
+ l := new(LexerActionExecutor)
+
+ l.lexerActions = lexerActions
+
+ // Caches the result of {@link //hashCode} since the hash code is an element
+ // of the performance-critical {@link ATNConfig//hashCode} operation.
+ l.cachedHash = murmurInit(0)
+ for _, a := range lexerActions {
+ l.cachedHash = murmurUpdate(l.cachedHash, a.Hash())
+ }
+ l.cachedHash = murmurFinish(l.cachedHash, len(lexerActions))
+
+ return l
+}
+
+// LexerActionExecutorappend creates a [LexerActionExecutor] which executes the actions for
+// the input [LexerActionExecutor] followed by a specified
+// [LexerAction].
+// TODO: This does not match the Java code
+func LexerActionExecutorappend(lexerActionExecutor *LexerActionExecutor, lexerAction LexerAction) *LexerActionExecutor {
+ if lexerActionExecutor == nil {
+ return NewLexerActionExecutor([]LexerAction{lexerAction})
+ }
+
+ return NewLexerActionExecutor(append(lexerActionExecutor.lexerActions, lexerAction))
+}
+
+// fixOffsetBeforeMatch creates a [LexerActionExecutor] which encodes the current offset
+// for position-dependent lexer actions.
+//
+// Normally, when the executor encounters lexer actions where
+// [LexerAction.isPositionDependent] returns true, it calls
+// [IntStream.Seek] on the input [CharStream] to set the input
+// position to the end of the current token. This behavior provides
+// for efficient [DFA] representation of lexer actions which appear at the end
+// of a lexer rule, even when the lexer rule Matches a variable number of
+// characters.
+//
+// Prior to traversing a Match transition in the [ATN], the current offset
+// from the token start index is assigned to all position-dependent lexer
+// actions which have not already been assigned a fixed offset. By storing
+// the offsets relative to the token start index, the [DFA] representation of
+// lexer actions which appear in the middle of tokens remains efficient due
+// to sharing among tokens of the same Length, regardless of their absolute
+// position in the input stream.
+//
+// If the current executor already has offsets assigned to all
+// position-dependent lexer actions, the method returns this instance.
+//
+// The offset is assigned to all position-dependent
+// lexer actions which do not already have offsets assigned.
+//
+// The func returns a [LexerActionExecutor] that stores input stream offsets
+// for all position-dependent lexer actions.
+func (l *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecutor {
+ var updatedLexerActions []LexerAction
+ for i := 0; i < len(l.lexerActions); i++ {
+ _, ok := l.lexerActions[i].(*LexerIndexedCustomAction)
+ if l.lexerActions[i].getIsPositionDependent() && !ok {
+ if updatedLexerActions == nil {
+ updatedLexerActions = make([]LexerAction, 0, len(l.lexerActions))
+ updatedLexerActions = append(updatedLexerActions, l.lexerActions...)
+ }
+ updatedLexerActions[i] = NewLexerIndexedCustomAction(offset, l.lexerActions[i])
+ }
+ }
+ if updatedLexerActions == nil {
+ return l
+ }
+
+ return NewLexerActionExecutor(updatedLexerActions)
+}
+
+// Execute the actions encapsulated by l executor within the context of a
+// particular {@link Lexer}.
+//
+// This method calls {@link IntStream//seek} to set the position of the
+// {@code input} {@link CharStream} prior to calling
+// {@link LexerAction//execute} on a position-dependent action. Before the
+// method returns, the input position will be restored to the same position
+// it was in when the method was invoked.
+//
+// @param lexer The lexer instance.
+// @param input The input stream which is the source for the current token.
+// When l method is called, the current {@link IntStream//index} for
+// {@code input} should be the start of the following token, i.e. 1
+// character past the end of the current token.
+// @param startIndex The token start index. This value may be passed to
+// {@link IntStream//seek} to set the {@code input} position to the beginning
+// of the token.
+// /
+func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex int) {
+ requiresSeek := false
+ stopIndex := input.Index()
+
+ defer func() {
+ if requiresSeek {
+ input.Seek(stopIndex)
+ }
+ }()
+
+ for i := 0; i < len(l.lexerActions); i++ {
+ lexerAction := l.lexerActions[i]
+ if la, ok := lexerAction.(*LexerIndexedCustomAction); ok {
+ offset := la.offset
+ input.Seek(startIndex + offset)
+ lexerAction = la.lexerAction
+ requiresSeek = (startIndex + offset) != stopIndex
+ } else if lexerAction.getIsPositionDependent() {
+ input.Seek(stopIndex)
+ requiresSeek = false
+ }
+ lexerAction.execute(lexer)
+ }
+}
+
+func (l *LexerActionExecutor) Hash() int {
+ if l == nil {
+ // TODO: Why is this here? l should not be nil
+ return 61
+ }
+
+ // TODO: This is created from the action itself when the struct is created - will this be an issue at some point? Java uses the runtime assign hashcode
+ return l.cachedHash
+}
+
+func (l *LexerActionExecutor) Equals(other interface{}) bool {
+ if l == other {
+ return true
+ }
+ othert, ok := other.(*LexerActionExecutor)
+ if !ok {
+ return false
+ }
+ if othert == nil {
+ return false
+ }
+ if l.cachedHash != othert.cachedHash {
+ return false
+ }
+ if len(l.lexerActions) != len(othert.lexerActions) {
+ return false
+ }
+ return slices.EqualFunc(l.lexerActions, othert.lexerActions, func(i, j LexerAction) bool {
+ return i.Equals(j)
+ })
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go
new file mode 100644
index 0000000..fe938b0
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go
@@ -0,0 +1,677 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+//goland:noinspection GoUnusedGlobalVariable
+var (
+ LexerATNSimulatorMinDFAEdge = 0
+ LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN
+
+ LexerATNSimulatorMatchCalls = 0
+)
+
+type ILexerATNSimulator interface {
+ IATNSimulator
+
+ reset()
+ Match(input CharStream, mode int) int
+ GetCharPositionInLine() int
+ GetLine() int
+ GetText(input CharStream) string
+ Consume(input CharStream)
+}
+
+type LexerATNSimulator struct {
+ BaseATNSimulator
+
+ recog Lexer
+ predictionMode int
+ mergeCache *JPCMap2
+ startIndex int
+ Line int
+ CharPositionInLine int
+ mode int
+ prevAccept *SimState
+ MatchCalls int
+}
+
+func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator {
+ l := &LexerATNSimulator{
+ BaseATNSimulator: BaseATNSimulator{
+ atn: atn,
+ sharedContextCache: sharedContextCache,
+ },
+ }
+
+ l.decisionToDFA = decisionToDFA
+ l.recog = recog
+
+ // The current token's starting index into the character stream.
+ // Shared across DFA to ATN simulation in case the ATN fails and the
+ // DFA did not have a previous accept state. In l case, we use the
+ // ATN-generated exception object.
+ l.startIndex = -1
+
+ // line number 1..n within the input
+ l.Line = 1
+
+ // The index of the character relative to the beginning of the line
+ // 0..n-1
+ l.CharPositionInLine = 0
+
+ l.mode = LexerDefaultMode
+
+ // Used during DFA/ATN exec to record the most recent accept configuration
+ // info
+ l.prevAccept = NewSimState()
+
+ return l
+}
+
+func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
+ l.CharPositionInLine = simulator.CharPositionInLine
+ l.Line = simulator.Line
+ l.mode = simulator.mode
+ l.startIndex = simulator.startIndex
+}
+
+func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
+ l.MatchCalls++
+ l.mode = mode
+ mark := input.Mark()
+
+ defer func() {
+ input.Release(mark)
+ }()
+
+ l.startIndex = input.Index()
+ l.prevAccept.reset()
+
+ dfa := l.decisionToDFA[mode]
+
+ var s0 *DFAState
+ l.atn.stateMu.RLock()
+ s0 = dfa.getS0()
+ l.atn.stateMu.RUnlock()
+
+ if s0 == nil {
+ return l.MatchATN(input)
+ }
+
+ return l.execATN(input, s0)
+}
+
+func (l *LexerATNSimulator) reset() {
+ l.prevAccept.reset()
+ l.startIndex = -1
+ l.Line = 1
+ l.CharPositionInLine = 0
+ l.mode = LexerDefaultMode
+}
+
+func (l *LexerATNSimulator) MatchATN(input CharStream) int {
+ startState := l.atn.modeToStartState[l.mode]
+
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String())
+ }
+ oldMode := l.mode
+ s0Closure := l.computeStartState(input, startState)
+ suppressEdge := s0Closure.hasSemanticContext
+ s0Closure.hasSemanticContext = false
+
+ next := l.addDFAState(s0Closure, suppressEdge)
+
+ predict := l.execATN(input, next)
+
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("DFA after MatchATN: " + l.decisionToDFA[oldMode].ToLexerString())
+ }
+ return predict
+}
+
+func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
+
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("start state closure=" + ds0.configs.String())
+ }
+ if ds0.isAcceptState {
+ // allow zero-Length tokens
+ l.captureSimState(l.prevAccept, input, ds0)
+ }
+ t := input.LA(1)
+ s := ds0 // s is current/from DFA state
+
+ for { // while more work
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("execATN loop starting closure: " + s.configs.String())
+ }
+
+ // As we move src->trg, src->trg, we keep track of the previous trg to
+ // avoid looking up the DFA state again, which is expensive.
+ // If the previous target was already part of the DFA, we might
+ // be able to avoid doing a reach operation upon t. If s!=nil,
+ // it means that semantic predicates didn't prevent us from
+ // creating a DFA state. Once we know s!=nil, we check to see if
+ // the DFA state has an edge already for t. If so, we can just reuse
+ // it's configuration set there's no point in re-computing it.
+ // This is kind of like doing DFA simulation within the ATN
+ // simulation because DFA simulation is really just a way to avoid
+ // computing reach/closure sets. Technically, once we know that
+ // we have a previously added DFA state, we could jump over to
+ // the DFA simulator. But, that would mean popping back and forth
+ // a lot and making things more complicated algorithmically.
+ // This optimization makes a lot of sense for loops within DFA.
+ // A character will take us back to an existing DFA state
+ // that already has lots of edges out of it. e.g., .* in comments.
+ target := l.getExistingTargetState(s, t)
+ if target == nil {
+ target = l.computeTargetState(input, s, t)
+ // print("Computed:" + str(target))
+ }
+ if target == ATNSimulatorError {
+ break
+ }
+ // If l is a consumable input element, make sure to consume before
+ // capturing the accept state so the input index, line, and char
+ // position accurately reflect the state of the interpreter at the
+ // end of the token.
+ if t != TokenEOF {
+ l.Consume(input)
+ }
+ if target.isAcceptState {
+ l.captureSimState(l.prevAccept, input, target)
+ if t == TokenEOF {
+ break
+ }
+ }
+ t = input.LA(1)
+ s = target // flip current DFA target becomes new src/from state
+ }
+
+ return l.failOrAccept(l.prevAccept, input, s.configs, t)
+}
+
+// Get an existing target state for an edge in the DFA. If the target state
+// for the edge has not yet been computed or is otherwise not available,
+// l method returns {@code nil}.
+//
+// @param s The current DFA state
+// @param t The next input symbol
+// @return The existing target DFA state for the given input symbol
+// {@code t}, or {@code nil} if the target state for l edge is not
+// already cached
+func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
+ if t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
+ return nil
+ }
+
+ l.atn.edgeMu.RLock()
+ defer l.atn.edgeMu.RUnlock()
+ if s.getEdges() == nil {
+ return nil
+ }
+ target := s.getIthEdge(t - LexerATNSimulatorMinDFAEdge)
+ if runtimeConfig.lexerATNSimulatorDebug && target != nil {
+ fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
+ }
+ return target
+}
+
+// computeTargetState computes a target state for an edge in the [DFA], and attempt to add the
+// computed state and corresponding edge to the [DFA].
+//
+// The func returns the computed target [DFA] state for the given input symbol t.
+// If this does not lead to a valid [DFA] state, this method
+// returns ATNSimulatorError.
+func (l *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState {
+ reach := NewOrderedATNConfigSet()
+
+ // if we don't find an existing DFA state
+ // Fill reach starting from closure, following t transitions
+ l.getReachableConfigSet(input, s.configs, reach, t)
+
+ if len(reach.configs) == 0 { // we got nowhere on t from s
+ if !reach.hasSemanticContext {
+ // we got nowhere on t, don't panic out l knowledge it'd
+ // cause a fail-over from DFA later.
+ l.addDFAEdge(s, t, ATNSimulatorError, nil)
+ }
+ // stop when we can't Match any more char
+ return ATNSimulatorError
+ }
+ // Add an edge from s to target DFA found/created for reach
+ return l.addDFAEdge(s, t, nil, reach)
+}
+
+func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach *ATNConfigSet, t int) int {
+ if l.prevAccept.dfaState != nil {
+ lexerActionExecutor := prevAccept.dfaState.lexerActionExecutor
+ l.accept(input, lexerActionExecutor, l.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
+ return prevAccept.dfaState.prediction
+ }
+
+ // if no accept and EOF is first char, return EOF
+ if t == TokenEOF && input.Index() == l.startIndex {
+ return TokenEOF
+ }
+
+ panic(NewLexerNoViableAltException(l.recog, input, l.startIndex, reach))
+}
+
+// getReachableConfigSet when given a starting configuration set, figures out all [ATN] configurations
+// we can reach upon input t.
+//
+// Parameter reach is a return parameter.
+func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *ATNConfigSet, reach *ATNConfigSet, t int) {
+ // l is used to Skip processing for configs which have a lower priority
+ // than a runtimeConfig that already reached an accept state for the same rule
+ SkipAlt := ATNInvalidAltNumber
+
+ for _, cfg := range closure.configs {
+ currentAltReachedAcceptState := cfg.GetAlt() == SkipAlt
+ if currentAltReachedAcceptState && cfg.passedThroughNonGreedyDecision {
+ continue
+ }
+
+ if runtimeConfig.lexerATNSimulatorDebug {
+
+ fmt.Printf("testing %s at %s\n", l.GetTokenName(t), cfg.String())
+ }
+
+ for _, trans := range cfg.GetState().GetTransitions() {
+ target := l.getReachableTarget(trans, t)
+ if target != nil {
+ lexerActionExecutor := cfg.lexerActionExecutor
+ if lexerActionExecutor != nil {
+ lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - l.startIndex)
+ }
+ treatEOFAsEpsilon := t == TokenEOF
+ config := NewLexerATNConfig3(cfg, target, lexerActionExecutor)
+ if l.closure(input, config, reach,
+ currentAltReachedAcceptState, true, treatEOFAsEpsilon) {
+ // any remaining configs for l alt have a lower priority
+ // than the one that just reached an accept state.
+ SkipAlt = cfg.GetAlt()
+ }
+ }
+ }
+ }
+}
+
+func (l *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Printf("ACTION %v\n", lexerActionExecutor)
+ }
+ // seek to after last char in token
+ input.Seek(index)
+ l.Line = line
+ l.CharPositionInLine = charPos
+ if lexerActionExecutor != nil && l.recog != nil {
+ lexerActionExecutor.execute(l.recog, input, startIndex)
+ }
+}
+
+func (l *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState {
+ if trans.Matches(t, 0, LexerMaxCharValue) {
+ return trans.getTarget()
+ }
+
+ return nil
+}
+
+func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *ATNConfigSet {
+ configs := NewOrderedATNConfigSet()
+ for i := 0; i < len(p.GetTransitions()); i++ {
+ target := p.GetTransitions()[i].getTarget()
+ cfg := NewLexerATNConfig6(target, i+1, BasePredictionContextEMPTY)
+ l.closure(input, cfg, configs, false, false, false)
+ }
+
+ return configs
+}
+
+// closure since the alternatives within any lexer decision are ordered by
+// preference, this method stops pursuing the closure as soon as an accept
+// state is reached. After the first accept state is reached by depth-first
+// search from runtimeConfig, all other (potentially reachable) states for
+// this rule would have a lower priority.
+//
+// The func returns true if an accept state is reached.
+func (l *LexerATNSimulator) closure(input CharStream, config *ATNConfig, configs *ATNConfigSet,
+ currentAltReachedAcceptState, speculative, treatEOFAsEpsilon bool) bool {
+
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("closure(" + config.String() + ")")
+ }
+
+ _, ok := config.state.(*RuleStopState)
+ if ok {
+
+ if runtimeConfig.lexerATNSimulatorDebug {
+ if l.recog != nil {
+ fmt.Printf("closure at %s rule stop %s\n", l.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
+ } else {
+ fmt.Printf("closure at rule stop %s\n", config)
+ }
+ }
+
+ if config.context == nil || config.context.hasEmptyPath() {
+ if config.context == nil || config.context.isEmpty() {
+ configs.Add(config, nil)
+ return true
+ }
+
+ configs.Add(NewLexerATNConfig2(config, config.state, BasePredictionContextEMPTY), nil)
+ currentAltReachedAcceptState = true
+ }
+ if config.context != nil && !config.context.isEmpty() {
+ for i := 0; i < config.context.length(); i++ {
+ if config.context.getReturnState(i) != BasePredictionContextEmptyReturnState {
+ newContext := config.context.GetParent(i) // "pop" return state
+ returnState := l.atn.states[config.context.getReturnState(i)]
+ cfg := NewLexerATNConfig2(config, returnState, newContext)
+ currentAltReachedAcceptState = l.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEOFAsEpsilon)
+ }
+ }
+ }
+ return currentAltReachedAcceptState
+ }
+ // optimization
+ if !config.state.GetEpsilonOnlyTransitions() {
+ if !currentAltReachedAcceptState || !config.passedThroughNonGreedyDecision {
+ configs.Add(config, nil)
+ }
+ }
+ for j := 0; j < len(config.state.GetTransitions()); j++ {
+ trans := config.state.GetTransitions()[j]
+ cfg := l.getEpsilonTarget(input, config, trans, configs, speculative, treatEOFAsEpsilon)
+ if cfg != nil {
+ currentAltReachedAcceptState = l.closure(input, cfg, configs,
+ currentAltReachedAcceptState, speculative, treatEOFAsEpsilon)
+ }
+ }
+ return currentAltReachedAcceptState
+}
+
+// side-effect: can alter configs.hasSemanticContext
+func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *ATNConfig, trans Transition,
+ configs *ATNConfigSet, speculative, treatEOFAsEpsilon bool) *ATNConfig {
+
+ var cfg *ATNConfig
+
+ if trans.getSerializationType() == TransitionRULE {
+
+ rt := trans.(*RuleTransition)
+ newContext := SingletonBasePredictionContextCreate(config.context, rt.followState.GetStateNumber())
+ cfg = NewLexerATNConfig2(config, trans.getTarget(), newContext)
+
+ } else if trans.getSerializationType() == TransitionPRECEDENCE {
+ panic("Precedence predicates are not supported in lexers.")
+ } else if trans.getSerializationType() == TransitionPREDICATE {
+ // Track traversing semantic predicates. If we traverse,
+ // we cannot add a DFA state for l "reach" computation
+ // because the DFA would not test the predicate again in the
+ // future. Rather than creating collections of semantic predicates
+ // like v3 and testing them on prediction, v4 will test them on the
+ // fly all the time using the ATN not the DFA. This is slower but
+ // semantically it's not used that often. One of the key elements to
+ // l predicate mechanism is not adding DFA states that see
+ // predicates immediately afterwards in the ATN. For example,
+
+ // a : ID {p1}? | ID {p2}?
+
+ // should create the start state for rule 'a' (to save start state
+ // competition), but should not create target of ID state. The
+ // collection of ATN states the following ID references includes
+ // states reached by traversing predicates. Since l is when we
+ // test them, we cannot cash the DFA state target of ID.
+
+ pt := trans.(*PredicateTransition)
+
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex))
+ }
+ configs.hasSemanticContext = true
+ if l.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative) {
+ cfg = NewLexerATNConfig4(config, trans.getTarget())
+ }
+ } else if trans.getSerializationType() == TransitionACTION {
+ if config.context == nil || config.context.hasEmptyPath() {
+ // execute actions anywhere in the start rule for a token.
+ //
+ // TODO: if the entry rule is invoked recursively, some
+ // actions may be executed during the recursive call. The
+ // problem can appear when hasEmptyPath() is true but
+ // isEmpty() is false. In this case, the config needs to be
+ // split into two contexts - one with just the empty path
+ // and another with everything but the empty path.
+ // Unfortunately, the current algorithm does not allow
+ // getEpsilonTarget to return two configurations, so
+ // additional modifications are needed before we can support
+ // the split operation.
+ lexerActionExecutor := LexerActionExecutorappend(config.lexerActionExecutor, l.atn.lexerActions[trans.(*ActionTransition).actionIndex])
+ cfg = NewLexerATNConfig3(config, trans.getTarget(), lexerActionExecutor)
+ } else {
+ // ignore actions in referenced rules
+ cfg = NewLexerATNConfig4(config, trans.getTarget())
+ }
+ } else if trans.getSerializationType() == TransitionEPSILON {
+ cfg = NewLexerATNConfig4(config, trans.getTarget())
+ } else if trans.getSerializationType() == TransitionATOM ||
+ trans.getSerializationType() == TransitionRANGE ||
+ trans.getSerializationType() == TransitionSET {
+ if treatEOFAsEpsilon {
+ if trans.Matches(TokenEOF, 0, LexerMaxCharValue) {
+ cfg = NewLexerATNConfig4(config, trans.getTarget())
+ }
+ }
+ }
+ return cfg
+}
+
+// evaluatePredicate eEvaluates a predicate specified in the lexer.
+//
+// If speculative is true, this method was called before
+// [consume] for the Matched character. This method should call
+// [consume] before evaluating the predicate to ensure position
+// sensitive values, including [GetText], [GetLine],
+// and [GetColumn], properly reflect the current
+// lexer state. This method should restore input and the simulator
+// to the original state before returning, i.e. undo the actions made by the
+// call to [Consume].
+//
+// The func returns true if the specified predicate evaluates to true.
+func (l *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predIndex int, speculative bool) bool {
+ // assume true if no recognizer was provided
+ if l.recog == nil {
+ return true
+ }
+ if !speculative {
+ return l.recog.Sempred(nil, ruleIndex, predIndex)
+ }
+ savedcolumn := l.CharPositionInLine
+ savedLine := l.Line
+ index := input.Index()
+ marker := input.Mark()
+
+ defer func() {
+ l.CharPositionInLine = savedcolumn
+ l.Line = savedLine
+ input.Seek(index)
+ input.Release(marker)
+ }()
+
+ l.Consume(input)
+ return l.recog.Sempred(nil, ruleIndex, predIndex)
+}
+
+func (l *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
+ settings.index = input.Index()
+ settings.line = l.Line
+ settings.column = l.CharPositionInLine
+ settings.dfaState = dfaState
+}
+
+func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfgs *ATNConfigSet) *DFAState {
+ if to == nil && cfgs != nil {
+ // leading to l call, ATNConfigSet.hasSemanticContext is used as a
+ // marker indicating dynamic predicate evaluation makes l edge
+ // dependent on the specific input sequence, so the static edge in the
+ // DFA should be omitted. The target DFAState is still created since
+ // execATN has the ability to reSynchronize with the DFA state cache
+ // following the predicate evaluation step.
+ //
+ // TJP notes: next time through the DFA, we see a pred again and eval.
+ // If that gets us to a previously created (but dangling) DFA
+ // state, we can continue in pure DFA mode from there.
+ //
+ suppressEdge := cfgs.hasSemanticContext
+ cfgs.hasSemanticContext = false
+ to = l.addDFAState(cfgs, true)
+
+ if suppressEdge {
+ return to
+ }
+ }
+ // add the edge
+ if tk < LexerATNSimulatorMinDFAEdge || tk > LexerATNSimulatorMaxDFAEdge {
+ // Only track edges within the DFA bounds
+ return to
+ }
+ if runtimeConfig.lexerATNSimulatorDebug {
+ fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
+ }
+ l.atn.edgeMu.Lock()
+ defer l.atn.edgeMu.Unlock()
+ if from.getEdges() == nil {
+ // make room for tokens 1..n and -1 masquerading as index 0
+ from.setEdges(make([]*DFAState, LexerATNSimulatorMaxDFAEdge-LexerATNSimulatorMinDFAEdge+1))
+ }
+ from.setIthEdge(tk-LexerATNSimulatorMinDFAEdge, to) // connect
+
+ return to
+}
+
+// Add a NewDFA state if there isn't one with l set of
+// configurations already. This method also detects the first
+// configuration containing an ATN rule stop state. Later, when
+// traversing the DFA, we will know which rule to accept.
+func (l *LexerATNSimulator) addDFAState(configs *ATNConfigSet, suppressEdge bool) *DFAState {
+
+ proposed := NewDFAState(-1, configs)
+ var firstConfigWithRuleStopState *ATNConfig
+
+ for _, cfg := range configs.configs {
+ _, ok := cfg.GetState().(*RuleStopState)
+
+ if ok {
+ firstConfigWithRuleStopState = cfg
+ break
+ }
+ }
+ if firstConfigWithRuleStopState != nil {
+ proposed.isAcceptState = true
+ proposed.lexerActionExecutor = firstConfigWithRuleStopState.lexerActionExecutor
+ proposed.setPrediction(l.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()])
+ }
+ dfa := l.decisionToDFA[l.mode]
+
+ l.atn.stateMu.Lock()
+ defer l.atn.stateMu.Unlock()
+ existing, present := dfa.Get(proposed)
+ if present {
+
+ // This state was already present, so just return it.
+ //
+ proposed = existing
+ } else {
+
+ // We need to add the new state
+ //
+ proposed.stateNumber = dfa.Len()
+ configs.readOnly = true
+ configs.configLookup = nil // Not needed now
+ proposed.configs = configs
+ dfa.Put(proposed)
+ }
+ if !suppressEdge {
+ dfa.setS0(proposed)
+ }
+ return proposed
+}
+
+func (l *LexerATNSimulator) getDFA(mode int) *DFA {
+ return l.decisionToDFA[mode]
+}
+
+// GetText returns the text [Match]ed so far for the current token.
+func (l *LexerATNSimulator) GetText(input CharStream) string {
+ // index is first lookahead char, don't include.
+ return input.GetTextFromInterval(NewInterval(l.startIndex, input.Index()-1))
+}
+
+func (l *LexerATNSimulator) Consume(input CharStream) {
+ curChar := input.LA(1)
+ if curChar == int('\n') {
+ l.Line++
+ l.CharPositionInLine = 0
+ } else {
+ l.CharPositionInLine++
+ }
+ input.Consume()
+}
+
+func (l *LexerATNSimulator) GetCharPositionInLine() int {
+ return l.CharPositionInLine
+}
+
+func (l *LexerATNSimulator) GetLine() int {
+ return l.Line
+}
+
+func (l *LexerATNSimulator) GetTokenName(tt int) string {
+ if tt == -1 {
+ return "EOF"
+ }
+
+ var sb strings.Builder
+ sb.Grow(6)
+ sb.WriteByte('\'')
+ sb.WriteRune(rune(tt))
+ sb.WriteByte('\'')
+
+ return sb.String()
+}
+
+func resetSimState(sim *SimState) {
+ sim.index = -1
+ sim.line = 0
+ sim.column = -1
+ sim.dfaState = nil
+}
+
+type SimState struct {
+ index int
+ line int
+ column int
+ dfaState *DFAState
+}
+
+func NewSimState() *SimState {
+ s := new(SimState)
+ resetSimState(s)
+ return s
+}
+
+func (s *SimState) reset() {
+ resetSimState(s)
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go b/vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go
new file mode 100644
index 0000000..4955ac8
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go
@@ -0,0 +1,218 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+type LL1Analyzer struct {
+ atn *ATN
+}
+
+func NewLL1Analyzer(atn *ATN) *LL1Analyzer {
+ la := new(LL1Analyzer)
+ la.atn = atn
+ return la
+}
+
+const (
+ // LL1AnalyzerHitPred is a special value added to the lookahead sets to indicate that we hit
+ // a predicate during analysis if
+ //
+ // seeThruPreds==false
+ LL1AnalyzerHitPred = TokenInvalidType
+)
+
+// *
+// Calculates the SLL(1) expected lookahead set for each outgoing transition
+// of an {@link ATNState}. The returned array has one element for each
+// outgoing transition in {@code s}. If the closure from transition
+// i leads to a semantic predicate before Matching a symbol, the
+// element at index i of the result will be {@code nil}.
+//
+// @param s the ATN state
+// @return the expected symbols for each outgoing transition of {@code s}.
+func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
+ if s == nil {
+ return nil
+ }
+ count := len(s.GetTransitions())
+ look := make([]*IntervalSet, count)
+ for alt := 0; alt < count; alt++ {
+
+ look[alt] = NewIntervalSet()
+ lookBusy := NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, "LL1Analyzer.getDecisionLookahead for lookBusy")
+ la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), false, false)
+
+ // Wipe out lookahead for la alternative if we found nothing,
+ // or we had a predicate when we !seeThruPreds
+ if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHitPred) {
+ look[alt] = nil
+ }
+ }
+ return look
+}
+
+// Look computes the set of tokens that can follow s in the [ATN] in the
+// specified ctx.
+//
+// If ctx is nil and the end of the rule containing
+// s is reached, [EPSILON] is added to the result set.
+//
+// If ctx is not nil and the end of the outermost rule is
+// reached, [EOF] is added to the result set.
+//
+// Parameter s the ATN state, and stopState is the ATN state to stop at. This can be a
+// [BlockEndState] to detect epsilon paths through a closure.
+//
+// Parameter ctx is the complete parser context, or nil if the context
+// should be ignored
+//
+// The func returns the set of tokens that can follow s in the [ATN] in the
+// specified ctx.
+func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet {
+ r := NewIntervalSet()
+ var lookContext *PredictionContext
+ if ctx != nil {
+ lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
+ }
+ la.look1(s, stopState, lookContext, r, NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, "LL1Analyzer.Look for la.look1()"),
+ NewBitSet(), true, true)
+ return r
+}
+
+//*
+// Compute set of tokens that can follow {@code s} in the ATN in the
+// specified {@code ctx}.
+//
+// If {@code ctx} is {@code nil} and {@code stopState} or the end of the
+// rule containing {@code s} is reached, {@link Token//EPSILON} is added to
+// the result set. If {@code ctx} is not {@code nil} and {@code addEOF} is
+// {@code true} and {@code stopState} or the end of the outermost rule is
+// reached, {@link Token//EOF} is added to the result set.
+//
+// @param s the ATN state.
+// @param stopState the ATN state to stop at. This can be a
+// {@link BlockEndState} to detect epsilon paths through a closure.
+// @param ctx The outer context, or {@code nil} if the outer context should
+// not be used.
+// @param look The result lookahead set.
+// @param lookBusy A set used for preventing epsilon closures in the ATN
+// from causing a stack overflow. Outside code should pass
+// {@code NewSet} for la argument.
+// @param calledRuleStack A set used for preventing left recursion in the
+// ATN from causing a stack overflow. Outside code should pass
+// {@code NewBitSet()} for la argument.
+// @param seeThruPreds {@code true} to true semantic predicates as
+// implicitly {@code true} and "see through them", otherwise {@code false}
+// to treat semantic predicates as opaque and add {@link //HitPred} to the
+// result if one is encountered.
+// @param addEOF Add {@link Token//EOF} to the result if the end of the
+// outermost context is reached. This parameter has no effect if {@code ctx}
+// is {@code nil}.
+
+func (la *LL1Analyzer) look2(_, stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]],
+ calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) {
+
+ returnState := la.atn.states[ctx.getReturnState(i)]
+ la.look1(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
+
+}
+
+func (la *LL1Analyzer) look1(s, stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]], calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
+
+ c := NewATNConfig6(s, 0, ctx)
+
+ if lookBusy.Contains(c) {
+ return
+ }
+
+ _, present := lookBusy.Put(c)
+ if present {
+ return
+
+ }
+ if s == stopState {
+ if ctx == nil {
+ look.addOne(TokenEpsilon)
+ return
+ } else if ctx.isEmpty() && addEOF {
+ look.addOne(TokenEOF)
+ return
+ }
+ }
+
+ _, ok := s.(*RuleStopState)
+
+ if ok {
+ if ctx == nil {
+ look.addOne(TokenEpsilon)
+ return
+ } else if ctx.isEmpty() && addEOF {
+ look.addOne(TokenEOF)
+ return
+ }
+
+ if ctx.pcType != PredictionContextEmpty {
+ removed := calledRuleStack.contains(s.GetRuleIndex())
+ defer func() {
+ if removed {
+ calledRuleStack.add(s.GetRuleIndex())
+ }
+ }()
+ calledRuleStack.remove(s.GetRuleIndex())
+ // run thru all possible stack tops in ctx
+ for i := 0; i < ctx.length(); i++ {
+ returnState := la.atn.states[ctx.getReturnState(i)]
+ la.look2(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
+ }
+ return
+ }
+ }
+
+ n := len(s.GetTransitions())
+
+ for i := 0; i < n; i++ {
+ t := s.GetTransitions()[i]
+
+ if t1, ok := t.(*RuleTransition); ok {
+ if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
+ continue
+ }
+
+ newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
+ la.look3(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
+ } else if t2, ok := t.(AbstractPredicateTransition); ok {
+ if seeThruPreds {
+ la.look1(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
+ } else {
+ look.addOne(LL1AnalyzerHitPred)
+ }
+ } else if t.getIsEpsilon() {
+ la.look1(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
+ } else if _, ok := t.(*WildcardTransition); ok {
+ look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
+ } else {
+ set := t.getLabel()
+ if set != nil {
+ if _, ok := t.(*NotSetTransition); ok {
+ set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)
+ }
+ look.addSet(set)
+ }
+ }
+ }
+}
+
+func (la *LL1Analyzer) look3(stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]],
+ calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
+
+ newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
+
+ defer func() {
+ calledRuleStack.remove(t1.getTarget().GetRuleIndex())
+ }()
+
+ calledRuleStack.add(t1.getTarget().GetRuleIndex())
+ la.look1(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
+
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go b/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go
new file mode 100644
index 0000000..923c7b5
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go
@@ -0,0 +1,47 @@
+//go:build !antlr.stats
+
+package antlr
+
+// This file is compiled when the build configuration antlr.stats is not enabled.
+// which then allows the compiler to optimize out all the code that is not used.
+const collectStats = false
+
+// goRunStats is a dummy struct used when build configuration antlr.stats is not enabled.
+type goRunStats struct {
+}
+
+var Statistics = &goRunStats{}
+
+func (s *goRunStats) AddJStatRec(_ *JStatRec) {
+ // Do nothing - compiler will optimize this out (hopefully)
+}
+
+func (s *goRunStats) CollectionAnomalies() {
+ // Do nothing - compiler will optimize this out (hopefully)
+}
+
+func (s *goRunStats) Reset() {
+ // Do nothing - compiler will optimize this out (hopefully)
+}
+
+func (s *goRunStats) Report(dir string, prefix string) error {
+ // Do nothing - compiler will optimize this out (hopefully)
+ return nil
+}
+
+func (s *goRunStats) Analyze() {
+ // Do nothing - compiler will optimize this out (hopefully)
+}
+
+type statsOption func(*goRunStats) error
+
+func (s *goRunStats) Configure(options ...statsOption) error {
+ // Do nothing - compiler will optimize this out (hopefully)
+ return nil
+}
+
+func WithTopN(topN int) statsOption {
+ return func(s *goRunStats) error {
+ return nil
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/parser.go b/vendor/github.com/antlr4-go/antlr/v4/parser.go
new file mode 100644
index 0000000..fb57ac1
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/parser.go
@@ -0,0 +1,700 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strconv"
+)
+
+type Parser interface {
+ Recognizer
+
+ GetInterpreter() *ParserATNSimulator
+
+ GetTokenStream() TokenStream
+ GetTokenFactory() TokenFactory
+ GetParserRuleContext() ParserRuleContext
+ SetParserRuleContext(ParserRuleContext)
+ Consume() Token
+ GetParseListeners() []ParseTreeListener
+
+ GetErrorHandler() ErrorStrategy
+ SetErrorHandler(ErrorStrategy)
+ GetInputStream() IntStream
+ GetCurrentToken() Token
+ GetExpectedTokens() *IntervalSet
+ NotifyErrorListeners(string, Token, RecognitionException)
+ IsExpectedToken(int) bool
+ GetPrecedence() int
+ GetRuleInvocationStack(ParserRuleContext) []string
+}
+
+type BaseParser struct {
+ *BaseRecognizer
+
+ Interpreter *ParserATNSimulator
+ BuildParseTrees bool
+
+ input TokenStream
+ errHandler ErrorStrategy
+ precedenceStack IntStack
+ ctx ParserRuleContext
+
+ tracer *TraceListener
+ parseListeners []ParseTreeListener
+ _SyntaxErrors int
+}
+
+// NewBaseParser contains all the parsing support code to embed in parsers. Essentially most of it is error
+// recovery stuff.
+//
+//goland:noinspection GoUnusedExportedFunction
+func NewBaseParser(input TokenStream) *BaseParser {
+
+ p := new(BaseParser)
+
+ p.BaseRecognizer = NewBaseRecognizer()
+
+ // The input stream.
+ p.input = nil
+
+ // The error handling strategy for the parser. The default value is a new
+ // instance of {@link DefaultErrorStrategy}.
+ p.errHandler = NewDefaultErrorStrategy()
+ p.precedenceStack = make([]int, 0)
+ p.precedenceStack.Push(0)
+
+ // The ParserRuleContext object for the currently executing rule.
+ // p.is always non-nil during the parsing process.
+ p.ctx = nil
+
+ // Specifies whether the parser should construct a parse tree during
+ // the parsing process. The default value is {@code true}.
+ p.BuildParseTrees = true
+
+ // When setTrace(true) is called, a reference to the
+ // TraceListener is stored here, so it can be easily removed in a
+ // later call to setTrace(false). The listener itself is
+ // implemented as a parser listener so p.field is not directly used by
+ // other parser methods.
+ p.tracer = nil
+
+ // The list of ParseTreeListener listeners registered to receive
+ // events during the parse.
+ p.parseListeners = nil
+
+ // The number of syntax errors Reported during parsing. p.value is
+ // incremented each time NotifyErrorListeners is called.
+ p._SyntaxErrors = 0
+ p.SetInputStream(input)
+
+ return p
+}
+
+// This field maps from the serialized ATN string to the deserialized [ATN] with
+// bypass alternatives.
+//
+// [ATNDeserializationOptions.isGenerateRuleBypassTransitions]
+//
+//goland:noinspection GoUnusedGlobalVariable
+var bypassAltsAtnCache = make(map[string]int)
+
+// reset the parser's state//
+func (p *BaseParser) reset() {
+ if p.input != nil {
+ p.input.Seek(0)
+ }
+ p.errHandler.reset(p)
+ p.ctx = nil
+ p._SyntaxErrors = 0
+ p.SetTrace(nil)
+ p.precedenceStack = make([]int, 0)
+ p.precedenceStack.Push(0)
+ if p.Interpreter != nil {
+ p.Interpreter.reset()
+ }
+}
+
+func (p *BaseParser) GetErrorHandler() ErrorStrategy {
+ return p.errHandler
+}
+
+func (p *BaseParser) SetErrorHandler(e ErrorStrategy) {
+ p.errHandler = e
+}
+
+// Match current input symbol against {@code ttype}. If the symbol type
+// Matches, {@link ANTLRErrorStrategy//ReportMatch} and {@link //consume} are
+// called to complete the Match process.
+//
+// If the symbol type does not Match,
+// {@link ANTLRErrorStrategy//recoverInline} is called on the current error
+// strategy to attempt recovery. If {@link //getBuildParseTree} is
+// {@code true} and the token index of the symbol returned by
+// {@link ANTLRErrorStrategy//recoverInline} is -1, the symbol is added to
+// the parse tree by calling {@link ParserRuleContext//addErrorNode}.
+//
+// @param ttype the token type to Match
+// @return the Matched symbol
+// @panics RecognitionException if the current input symbol did not Match
+// {@code ttype} and the error strategy could not recover from the
+// mismatched symbol
+
+func (p *BaseParser) Match(ttype int) Token {
+
+ t := p.GetCurrentToken()
+
+ if t.GetTokenType() == ttype {
+ p.errHandler.ReportMatch(p)
+ p.Consume()
+ } else {
+ t = p.errHandler.RecoverInline(p)
+ if p.HasError() {
+ return nil
+ }
+ if p.BuildParseTrees && t.GetTokenIndex() == -1 {
+
+ // we must have conjured up a new token during single token
+ // insertion if it's not the current symbol
+ p.ctx.AddErrorNode(t)
+ }
+ }
+
+ return t
+}
+
+// Match current input symbol as a wildcard. If the symbol type Matches
+// (i.e. has a value greater than 0), {@link ANTLRErrorStrategy//ReportMatch}
+// and {@link //consume} are called to complete the Match process.
+//
+// If the symbol type does not Match,
+// {@link ANTLRErrorStrategy//recoverInline} is called on the current error
+// strategy to attempt recovery. If {@link //getBuildParseTree} is
+// {@code true} and the token index of the symbol returned by
+// {@link ANTLRErrorStrategy//recoverInline} is -1, the symbol is added to
+// the parse tree by calling {@link ParserRuleContext//addErrorNode}.
+//
+// @return the Matched symbol
+// @panics RecognitionException if the current input symbol did not Match
+// a wildcard and the error strategy could not recover from the mismatched
+// symbol
+
+func (p *BaseParser) MatchWildcard() Token {
+ t := p.GetCurrentToken()
+ if t.GetTokenType() > 0 {
+ p.errHandler.ReportMatch(p)
+ p.Consume()
+ } else {
+ t = p.errHandler.RecoverInline(p)
+ if p.BuildParseTrees && t.GetTokenIndex() == -1 {
+ // we must have conjured up a new token during single token
+ // insertion if it's not the current symbol
+ p.ctx.AddErrorNode(t)
+ }
+ }
+ return t
+}
+
+func (p *BaseParser) GetParserRuleContext() ParserRuleContext {
+ return p.ctx
+}
+
+func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) {
+ p.ctx = v
+}
+
+func (p *BaseParser) GetParseListeners() []ParseTreeListener {
+ if p.parseListeners == nil {
+ return make([]ParseTreeListener, 0)
+ }
+ return p.parseListeners
+}
+
+// AddParseListener registers listener to receive events during the parsing process.
+//
+// To support output-preserving grammar transformations (including but not
+// limited to left-recursion removal, automated left-factoring, and
+// optimized code generation), calls to listener methods during the parse
+// may differ substantially from calls made by
+// [ParseTreeWalker.DEFAULT] used after the parse is complete. In
+// particular, rule entry and exit events may occur in a different order
+// during the parse than after the parser. In addition, calls to certain
+// rule entry methods may be omitted.
+//
+// With the following specific exceptions, calls to listener events are
+// deterministic, i.e. for identical input the calls to listener
+// methods will be the same.
+//
+// - Alterations to the grammar used to generate code may change the
+// behavior of the listener calls.
+// - Alterations to the command line options passed to ANTLR 4 when
+// generating the parser may change the behavior of the listener calls.
+// - Changing the version of the ANTLR Tool used to generate the parser
+// may change the behavior of the listener calls.
+func (p *BaseParser) AddParseListener(listener ParseTreeListener) {
+ if listener == nil {
+ panic("listener")
+ }
+ if p.parseListeners == nil {
+ p.parseListeners = make([]ParseTreeListener, 0)
+ }
+ p.parseListeners = append(p.parseListeners, listener)
+}
+
+// RemoveParseListener removes listener from the list of parse listeners.
+//
+// If listener is nil or has not been added as a parse
+// listener, this func does nothing.
+func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) {
+
+ if p.parseListeners != nil {
+
+ idx := -1
+ for i, v := range p.parseListeners {
+ if v == listener {
+ idx = i
+ break
+ }
+ }
+
+ if idx == -1 {
+ return
+ }
+
+ // remove the listener from the slice
+ p.parseListeners = append(p.parseListeners[0:idx], p.parseListeners[idx+1:]...)
+
+ if len(p.parseListeners) == 0 {
+ p.parseListeners = nil
+ }
+ }
+}
+
+// Remove all parse listeners.
+func (p *BaseParser) removeParseListeners() {
+ p.parseListeners = nil
+}
+
+// TriggerEnterRuleEvent notifies all parse listeners of an enter rule event.
+func (p *BaseParser) TriggerEnterRuleEvent() {
+ if p.parseListeners != nil {
+ ctx := p.ctx
+ for _, listener := range p.parseListeners {
+ listener.EnterEveryRule(ctx)
+ ctx.EnterRule(listener)
+ }
+ }
+}
+
+// TriggerExitRuleEvent notifies any parse listeners of an exit rule event.
+func (p *BaseParser) TriggerExitRuleEvent() {
+ if p.parseListeners != nil {
+ // reverse order walk of listeners
+ ctx := p.ctx
+ l := len(p.parseListeners) - 1
+
+ for i := range p.parseListeners {
+ listener := p.parseListeners[l-i]
+ ctx.ExitRule(listener)
+ listener.ExitEveryRule(ctx)
+ }
+ }
+}
+
+func (p *BaseParser) GetInterpreter() *ParserATNSimulator {
+ return p.Interpreter
+}
+
+func (p *BaseParser) GetATN() *ATN {
+ return p.Interpreter.atn
+}
+
+func (p *BaseParser) GetTokenFactory() TokenFactory {
+ return p.input.GetTokenSource().GetTokenFactory()
+}
+
+// setTokenFactory is used to tell our token source and error strategy about a new way to create tokens.
+func (p *BaseParser) setTokenFactory(factory TokenFactory) {
+ p.input.GetTokenSource().setTokenFactory(factory)
+}
+
+// GetATNWithBypassAlts - the ATN with bypass alternatives is expensive to create, so we create it
+// lazily.
+func (p *BaseParser) GetATNWithBypassAlts() {
+
+ // TODO - Implement this?
+ panic("Not implemented!")
+
+ // serializedAtn := p.getSerializedATN()
+ // if (serializedAtn == nil) {
+ // panic("The current parser does not support an ATN with bypass alternatives.")
+ // }
+ // result := p.bypassAltsAtnCache[serializedAtn]
+ // if (result == nil) {
+ // deserializationOptions := NewATNDeserializationOptions(nil)
+ // deserializationOptions.generateRuleBypassTransitions = true
+ // result = NewATNDeserializer(deserializationOptions).deserialize(serializedAtn)
+ // p.bypassAltsAtnCache[serializedAtn] = result
+ // }
+ // return result
+}
+
+// The preferred method of getting a tree pattern. For example, here's a
+// sample use:
+//
+//
+// ParseTree t = parser.expr()
+// ParseTreePattern p = parser.compileParseTreePattern("<ID>+0",
+// MyParser.RULE_expr)
+// ParseTreeMatch m = p.Match(t)
+// String id = m.Get("ID")
+//
+
+//goland:noinspection GoUnusedParameter
+func (p *BaseParser) compileParseTreePattern(pattern, patternRuleIndex, lexer Lexer) {
+
+ panic("NewParseTreePatternMatcher not implemented!")
+ //
+ // if (lexer == nil) {
+ // if (p.GetTokenStream() != nil) {
+ // tokenSource := p.GetTokenStream().GetTokenSource()
+ // if _, ok := tokenSource.(ILexer); ok {
+ // lexer = tokenSource
+ // }
+ // }
+ // }
+ // if (lexer == nil) {
+ // panic("Parser can't discover a lexer to use")
+ // }
+
+ // m := NewParseTreePatternMatcher(lexer, p)
+ // return m.compile(pattern, patternRuleIndex)
+}
+
+func (p *BaseParser) GetInputStream() IntStream {
+ return p.GetTokenStream()
+}
+
+func (p *BaseParser) SetInputStream(input TokenStream) {
+ p.SetTokenStream(input)
+}
+
+func (p *BaseParser) GetTokenStream() TokenStream {
+ return p.input
+}
+
+// SetTokenStream installs input as the token stream and resets the parser.
+func (p *BaseParser) SetTokenStream(input TokenStream) {
+ p.input = nil
+ p.reset()
+ p.input = input
+}
+
+// GetCurrentToken returns the current token at LT(1).
+//
+// [Match] needs to return the current input symbol, which gets put
+// into the label for the associated token ref e.g., x=ID.
+func (p *BaseParser) GetCurrentToken() Token {
+ return p.input.LT(1)
+}
+
+func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err RecognitionException) {
+ if offendingToken == nil {
+ offendingToken = p.GetCurrentToken()
+ }
+ p._SyntaxErrors++
+ line := offendingToken.GetLine()
+ column := offendingToken.GetColumn()
+ listener := p.GetErrorListenerDispatch()
+ listener.SyntaxError(p, offendingToken, line, column, msg, err)
+}
+
+func (p *BaseParser) Consume() Token {
+ o := p.GetCurrentToken()
+ if o.GetTokenType() != TokenEOF {
+ p.GetInputStream().Consume()
+ }
+ hasListener := p.parseListeners != nil && len(p.parseListeners) > 0
+ if p.BuildParseTrees || hasListener {
+ if p.errHandler.InErrorRecoveryMode(p) {
+ node := p.ctx.AddErrorNode(o)
+ if p.parseListeners != nil {
+ for _, l := range p.parseListeners {
+ l.VisitErrorNode(node)
+ }
+ }
+
+ } else {
+ node := p.ctx.AddTokenNode(o)
+ if p.parseListeners != nil {
+ for _, l := range p.parseListeners {
+ l.VisitTerminal(node)
+ }
+ }
+ }
+ // node.invokingState = p.state
+ }
+
+ return o
+}
+
+func (p *BaseParser) addContextToParseTree() {
+ // add current context to parent if we have a parent
+ if p.ctx.GetParent() != nil {
+ p.ctx.GetParent().(ParserRuleContext).AddChild(p.ctx)
+ }
+}
+
+func (p *BaseParser) EnterRule(localctx ParserRuleContext, state, _ int) {
+ p.SetState(state)
+ p.ctx = localctx
+ p.ctx.SetStart(p.input.LT(1))
+ if p.BuildParseTrees {
+ p.addContextToParseTree()
+ }
+ if p.parseListeners != nil {
+ p.TriggerEnterRuleEvent()
+ }
+}
+
+func (p *BaseParser) ExitRule() {
+ p.ctx.SetStop(p.input.LT(-1))
+ // trigger event on ctx, before it reverts to parent
+ if p.parseListeners != nil {
+ p.TriggerExitRuleEvent()
+ }
+ p.SetState(p.ctx.GetInvokingState())
+ if p.ctx.GetParent() != nil {
+ p.ctx = p.ctx.GetParent().(ParserRuleContext)
+ } else {
+ p.ctx = nil
+ }
+}
+
+func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
+ localctx.SetAltNumber(altNum)
+ // if we have a new localctx, make sure we replace existing ctx
+ // that is previous child of parse tree
+ if p.BuildParseTrees && p.ctx != localctx {
+ if p.ctx.GetParent() != nil {
+ p.ctx.GetParent().(ParserRuleContext).RemoveLastChild()
+ p.ctx.GetParent().(ParserRuleContext).AddChild(localctx)
+ }
+ }
+ p.ctx = localctx
+}
+
+// Get the precedence level for the top-most precedence rule.
+//
+// @return The precedence level for the top-most precedence rule, or -1 if
+// the parser context is not nested within a precedence rule.
+
+func (p *BaseParser) GetPrecedence() int {
+ if len(p.precedenceStack) == 0 {
+ return -1
+ }
+
+ return p.precedenceStack[len(p.precedenceStack)-1]
+}
+
+func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, _, precedence int) {
+ p.SetState(state)
+ p.precedenceStack.Push(precedence)
+ p.ctx = localctx
+ p.ctx.SetStart(p.input.LT(1))
+ if p.parseListeners != nil {
+ p.TriggerEnterRuleEvent() // simulates rule entry for
+ // left-recursive rules
+ }
+}
+
+//
+// Like {@link //EnterRule} but for recursive rules.
+
+func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, _ int) {
+ previous := p.ctx
+ previous.SetParent(localctx)
+ previous.SetInvokingState(state)
+ previous.SetStop(p.input.LT(-1))
+
+ p.ctx = localctx
+ p.ctx.SetStart(previous.GetStart())
+ if p.BuildParseTrees {
+ p.ctx.AddChild(previous)
+ }
+ if p.parseListeners != nil {
+ p.TriggerEnterRuleEvent() // simulates rule entry for
+ // left-recursive rules
+ }
+}
+
+func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
+ _, _ = p.precedenceStack.Pop()
+ p.ctx.SetStop(p.input.LT(-1))
+ retCtx := p.ctx // save current ctx (return value)
+ // unroll so ctx is as it was before call to recursive method
+ if p.parseListeners != nil {
+ for p.ctx != parentCtx {
+ p.TriggerExitRuleEvent()
+ p.ctx = p.ctx.GetParent().(ParserRuleContext)
+ }
+ } else {
+ p.ctx = parentCtx
+ }
+ // hook into tree
+ retCtx.SetParent(parentCtx)
+ if p.BuildParseTrees && parentCtx != nil {
+ // add return ctx into invoking rule's tree
+ parentCtx.AddChild(retCtx)
+ }
+}
+
+func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
+ ctx := p.ctx
+ for ctx != nil {
+ if ctx.GetRuleIndex() == ruleIndex {
+ return ctx
+ }
+ ctx = ctx.GetParent().(ParserRuleContext)
+ }
+ return nil
+}
+
+func (p *BaseParser) Precpred(_ RuleContext, precedence int) bool {
+ return precedence >= p.precedenceStack[len(p.precedenceStack)-1]
+}
+
+//goland:noinspection GoUnusedParameter
+func (p *BaseParser) inContext(context ParserRuleContext) bool {
+ // TODO: useful in parser?
+ return false
+}
+
+// IsExpectedToken checks whether symbol can follow the current state in the
+// {ATN}. The behavior of p.method is equivalent to the following, but is
+// implemented such that the complete context-sensitive follow set does not
+// need to be explicitly constructed.
+//
+// return getExpectedTokens().contains(symbol)
+func (p *BaseParser) IsExpectedToken(symbol int) bool {
+ atn := p.Interpreter.atn
+ ctx := p.ctx
+ s := atn.states[p.state]
+ following := atn.NextTokens(s, nil)
+ if following.contains(symbol) {
+ return true
+ }
+ if !following.contains(TokenEpsilon) {
+ return false
+ }
+ for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
+ invokingState := atn.states[ctx.GetInvokingState()]
+ rt := invokingState.GetTransitions()[0]
+ following = atn.NextTokens(rt.(*RuleTransition).followState, nil)
+ if following.contains(symbol) {
+ return true
+ }
+ ctx = ctx.GetParent().(ParserRuleContext)
+ }
+ if following.contains(TokenEpsilon) && symbol == TokenEOF {
+ return true
+ }
+
+ return false
+}
+
+// GetExpectedTokens and returns the set of input symbols which could follow the current parser
+// state and context, as given by [GetState] and [GetContext],
+// respectively.
+func (p *BaseParser) GetExpectedTokens() *IntervalSet {
+ return p.Interpreter.atn.getExpectedTokens(p.state, p.ctx)
+}
+
+func (p *BaseParser) GetExpectedTokensWithinCurrentRule() *IntervalSet {
+ atn := p.Interpreter.atn
+ s := atn.states[p.state]
+ return atn.NextTokens(s, nil)
+}
+
+// GetRuleIndex get a rule's index (i.e., RULE_ruleName field) or -1 if not found.
+func (p *BaseParser) GetRuleIndex(ruleName string) int {
+ var ruleIndex, ok = p.GetRuleIndexMap()[ruleName]
+ if ok {
+ return ruleIndex
+ }
+
+ return -1
+}
+
+// GetRuleInvocationStack returns a list of the rule names in your parser instance
+// leading up to a call to the current rule. You could override if
+// you want more details such as the file/line info of where
+// in the ATN a rule is invoked.
+func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string {
+ if c == nil {
+ c = p.ctx
+ }
+ stack := make([]string, 0)
+ for c != nil {
+ // compute what follows who invoked us
+ ruleIndex := c.GetRuleIndex()
+ if ruleIndex < 0 {
+ stack = append(stack, "n/a")
+ } else {
+ stack = append(stack, p.GetRuleNames()[ruleIndex])
+ }
+
+ vp := c.GetParent()
+
+ if vp == nil {
+ break
+ }
+
+ c = vp.(ParserRuleContext)
+ }
+ return stack
+}
+
+// GetDFAStrings returns a list of all DFA states used for debugging purposes
+func (p *BaseParser) GetDFAStrings() string {
+ return fmt.Sprint(p.Interpreter.decisionToDFA)
+}
+
+// DumpDFA prints the whole of the DFA for debugging
+func (p *BaseParser) DumpDFA() {
+ seenOne := false
+ for _, dfa := range p.Interpreter.decisionToDFA {
+ if dfa.Len() > 0 {
+ if seenOne {
+ fmt.Println()
+ }
+ fmt.Println("Decision " + strconv.Itoa(dfa.decision) + ":")
+ fmt.Print(dfa.String(p.LiteralNames, p.SymbolicNames))
+ seenOne = true
+ }
+ }
+}
+
+func (p *BaseParser) GetSourceName() string {
+ return p.GrammarFileName
+}
+
+// SetTrace installs a trace listener for the parse.
+//
+// During a parse it is sometimes useful to listen in on the rule entry and exit
+// events as well as token Matches. This is for quick and dirty debugging.
+func (p *BaseParser) SetTrace(trace *TraceListener) {
+ if trace == nil {
+ p.RemoveParseListener(p.tracer)
+ p.tracer = nil
+ } else {
+ if p.tracer != nil {
+ p.RemoveParseListener(p.tracer)
+ }
+ p.tracer = NewTraceListener(p)
+ p.AddParseListener(p.tracer)
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go
new file mode 100644
index 0000000..ae28696
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go
@@ -0,0 +1,1668 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+var ()
+
+// ClosureBusy is a store of ATNConfigs and is a tiny abstraction layer over
+// a standard JStore so that we can use Lazy instantiation of the JStore, mostly
+// to avoid polluting the stats module with a ton of JStore instances with nothing in them.
+type ClosureBusy struct {
+ bMap *JStore[*ATNConfig, Comparator[*ATNConfig]]
+ desc string
+}
+
+// NewClosureBusy creates a new ClosureBusy instance used to avoid infinite recursion for right-recursive rules
+func NewClosureBusy(desc string) *ClosureBusy {
+ return &ClosureBusy{
+ desc: desc,
+ }
+}
+
+func (c *ClosureBusy) Put(config *ATNConfig) (*ATNConfig, bool) {
+ if c.bMap == nil {
+ c.bMap = NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, c.desc)
+ }
+ return c.bMap.Put(config)
+}
+
+type ParserATNSimulator struct {
+ BaseATNSimulator
+
+ parser Parser
+ predictionMode int
+ input TokenStream
+ startIndex int
+ dfa *DFA
+ mergeCache *JPCMap
+ outerContext ParserRuleContext
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *ParserATNSimulator {
+
+ p := &ParserATNSimulator{
+ BaseATNSimulator: BaseATNSimulator{
+ atn: atn,
+ sharedContextCache: sharedContextCache,
+ },
+ }
+
+ p.parser = parser
+ p.decisionToDFA = decisionToDFA
+ // SLL, LL, or LL + exact ambig detection?//
+ p.predictionMode = PredictionModeLL
+ // LAME globals to avoid parameters!!!!! I need these down deep in predTransition
+ p.input = nil
+ p.startIndex = 0
+ p.outerContext = nil
+ p.dfa = nil
+ // Each prediction operation uses a cache for merge of prediction contexts.
+ // Don't keep around as it wastes huge amounts of memory. [JPCMap]
+ // isn't Synchronized, but we're ok since two threads shouldn't reuse same
+ // parser/atn-simulator object because it can only handle one input at a time.
+ // This maps graphs a and b to merged result c. (a,b) -> c. We can avoid
+ // the merge if we ever see a and b again. Note that (b,a) -> c should
+ // also be examined during cache lookup.
+ //
+ p.mergeCache = nil
+
+ return p
+}
+
+func (p *ParserATNSimulator) GetPredictionMode() int {
+ return p.predictionMode
+}
+
+func (p *ParserATNSimulator) SetPredictionMode(v int) {
+ p.predictionMode = v
+}
+
+func (p *ParserATNSimulator) reset() {
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) AdaptivePredict(parser *BaseParser, input TokenStream, decision int, outerContext ParserRuleContext) int {
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("adaptivePredict decision " + strconv.Itoa(decision) +
+ " exec LA(1)==" + p.getLookaheadName(input) +
+ " line " + strconv.Itoa(input.LT(1).GetLine()) + ":" +
+ strconv.Itoa(input.LT(1).GetColumn()))
+ }
+ p.input = input
+ p.startIndex = input.Index()
+ p.outerContext = outerContext
+
+ dfa := p.decisionToDFA[decision]
+ p.dfa = dfa
+ m := input.Mark()
+ index := input.Index()
+
+ defer func() {
+ p.dfa = nil
+ p.mergeCache = nil // whack cache after each prediction
+ // Do not attempt to run a GC now that we're done with the cache as makes the
+ // GC overhead terrible for badly formed grammars and has little effect on well formed
+ // grammars.
+ // I have made some extra effort to try and reduce memory pressure by reusing allocations when
+ // possible. However, it can only have a limited effect. The real solution is to encourage grammar
+ // authors to think more carefully about their grammar and to use the new antlr.stats tag to inspect
+ // what is happening at runtime, along with using the error listener to report ambiguities.
+
+ input.Seek(index)
+ input.Release(m)
+ }()
+
+ // Now we are certain to have a specific decision's DFA
+ // But, do we still need an initial state?
+ var s0 *DFAState
+ p.atn.stateMu.RLock()
+ if dfa.getPrecedenceDfa() {
+ p.atn.edgeMu.RLock()
+ // the start state for a precedence DFA depends on the current
+ // parser precedence, and is provided by a DFA method.
+ s0 = dfa.getPrecedenceStartState(p.parser.GetPrecedence())
+ p.atn.edgeMu.RUnlock()
+ } else {
+ // the start state for a "regular" DFA is just s0
+ s0 = dfa.getS0()
+ }
+ p.atn.stateMu.RUnlock()
+
+ if s0 == nil {
+ if outerContext == nil {
+ outerContext = ParserRuleContextEmpty
+ }
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("predictATN decision " + strconv.Itoa(dfa.decision) +
+ " exec LA(1)==" + p.getLookaheadName(input) +
+ ", outerContext=" + outerContext.String(p.parser.GetRuleNames(), nil))
+ }
+ fullCtx := false
+ s0Closure := p.computeStartState(dfa.atnStartState, ParserRuleContextEmpty, fullCtx)
+
+ p.atn.stateMu.Lock()
+ if dfa.getPrecedenceDfa() {
+ // If p is a precedence DFA, we use applyPrecedenceFilter
+ // to convert the computed start state to a precedence start
+ // state. We then use DFA.setPrecedenceStartState to set the
+ // appropriate start state for the precedence level rather
+ // than simply setting DFA.s0.
+ //
+ dfa.s0.configs = s0Closure
+ s0Closure = p.applyPrecedenceFilter(s0Closure)
+ s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
+ p.atn.edgeMu.Lock()
+ dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0)
+ p.atn.edgeMu.Unlock()
+ } else {
+ s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
+ dfa.setS0(s0)
+ }
+ p.atn.stateMu.Unlock()
+ }
+
+ alt, re := p.execATN(dfa, s0, input, index, outerContext)
+ parser.SetError(re)
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("DFA after predictATN: " + dfa.String(p.parser.GetLiteralNames(), nil))
+ }
+ return alt
+
+}
+
+// execATN performs ATN simulation to compute a predicted alternative based
+// upon the remaining input, but also updates the DFA cache to avoid
+// having to traverse the ATN again for the same input sequence.
+//
+// There are some key conditions we're looking for after computing a new
+// set of ATN configs (proposed DFA state):
+//
+// - If the set is empty, there is no viable alternative for current symbol
+// - Does the state uniquely predict an alternative?
+// - Does the state have a conflict that would prevent us from
+// putting it on the work list?
+//
+// We also have some key operations to do:
+//
+// - Add an edge from previous DFA state to potentially NewDFA state, D,
+// - Upon current symbol but only if adding to work list, which means in all
+// cases except no viable alternative (and possibly non-greedy decisions?)
+// - Collecting predicates and adding semantic context to DFA accept states
+// - adding rule context to context-sensitive DFA accept states
+// - Consuming an input symbol
+// - Reporting a conflict
+// - Reporting an ambiguity
+// - Reporting a context sensitivity
+// - Reporting insufficient predicates
+//
+// Cover these cases:
+//
+// - dead end
+// - single alt
+// - single alt + predicates
+// - conflict
+// - conflict + predicates
+//
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, startIndex int, outerContext ParserRuleContext) (int, RecognitionException) {
+
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("execATN decision " + strconv.Itoa(dfa.decision) +
+ ", DFA state " + s0.String() +
+ ", LA(1)==" + p.getLookaheadName(input) +
+ " line " + strconv.Itoa(input.LT(1).GetLine()) + ":" + strconv.Itoa(input.LT(1).GetColumn()))
+ }
+
+ previousD := s0
+
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("s0 = " + s0.String())
+ }
+ t := input.LA(1)
+ for { // for more work
+ D := p.getExistingTargetState(previousD, t)
+ if D == nil {
+ D = p.computeTargetState(dfa, previousD, t)
+ }
+ if D == ATNSimulatorError {
+ // if any configs in previous dipped into outer context, that
+ // means that input up to t actually finished entry rule
+ // at least for SLL decision. Full LL doesn't dip into outer
+ // so don't need special case.
+ // We will get an error no matter what so delay until after
+ // decision better error message. Also, no reachable target
+ // ATN states in SLL implies LL will also get nowhere.
+ // If conflict in states that dip out, choose min since we
+ // will get error no matter what.
+ e := p.noViableAlt(input, outerContext, previousD.configs, startIndex)
+ input.Seek(startIndex)
+ alt := p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext)
+ if alt != ATNInvalidAltNumber {
+ return alt, nil
+ }
+ p.parser.SetError(e)
+ return ATNInvalidAltNumber, e
+ }
+ if D.requiresFullContext && p.predictionMode != PredictionModeSLL {
+ // IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error)
+ conflictingAlts := D.configs.conflictingAlts
+ if D.predicates != nil {
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("DFA state has preds in DFA sim LL fail-over")
+ }
+ conflictIndex := input.Index()
+ if conflictIndex != startIndex {
+ input.Seek(startIndex)
+ }
+ conflictingAlts = p.evalSemanticContext(D.predicates, outerContext, true)
+ if conflictingAlts.length() == 1 {
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("Full LL avoided")
+ }
+ return conflictingAlts.minValue(), nil
+ }
+ if conflictIndex != startIndex {
+ // restore the index so Reporting the fallback to full
+ // context occurs with the index at the correct spot
+ input.Seek(conflictIndex)
+ }
+ }
+ if runtimeConfig.parserATNSimulatorDFADebug {
+ fmt.Println("ctx sensitive state " + outerContext.String(nil, nil) + " in " + D.String())
+ }
+ fullCtx := true
+ s0Closure := p.computeStartState(dfa.atnStartState, outerContext, fullCtx)
+ p.ReportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.Index())
+ alt, re := p.execATNWithFullContext(dfa, D, s0Closure, input, startIndex, outerContext)
+ return alt, re
+ }
+ if D.isAcceptState {
+ if D.predicates == nil {
+ return D.prediction, nil
+ }
+ stopIndex := input.Index()
+ input.Seek(startIndex)
+ alts := p.evalSemanticContext(D.predicates, outerContext, true)
+
+ switch alts.length() {
+ case 0:
+ return ATNInvalidAltNumber, p.noViableAlt(input, outerContext, D.configs, startIndex)
+ case 1:
+ return alts.minValue(), nil
+ default:
+ // Report ambiguity after predicate evaluation to make sure the correct set of ambig alts is Reported.
+ p.ReportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configs)
+ return alts.minValue(), nil
+ }
+ }
+ previousD = D
+
+ if t != TokenEOF {
+ input.Consume()
+ t = input.LA(1)
+ }
+ }
+}
+
+// Get an existing target state for an edge in the DFA. If the target state
+// for the edge has not yet been computed or is otherwise not available,
+// p method returns {@code nil}.
+//
+// @param previousD The current DFA state
+// @param t The next input symbol
+// @return The existing target DFA state for the given input symbol
+// {@code t}, or {@code nil} if the target state for p edge is not
+// already cached
+
+func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int) *DFAState {
+ if t+1 < 0 {
+ return nil
+ }
+
+ p.atn.edgeMu.RLock()
+ defer p.atn.edgeMu.RUnlock()
+ edges := previousD.getEdges()
+ if edges == nil || t+1 >= len(edges) {
+ return nil
+ }
+ return previousD.getIthEdge(t + 1)
+}
+
+// Compute a target state for an edge in the DFA, and attempt to add the
+// computed state and corresponding edge to the DFA.
+//
+// @param dfa The DFA
+// @param previousD The current DFA state
+// @param t The next input symbol
+//
+// @return The computed target DFA state for the given input symbol
+// {@code t}. If {@code t} does not lead to a valid DFA state, p method
+// returns {@link //ERROR}.
+//
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState, t int) *DFAState {
+ reach := p.computeReachSet(previousD.configs, t, false)
+
+ if reach == nil {
+ p.addDFAEdge(dfa, previousD, t, ATNSimulatorError)
+ return ATNSimulatorError
+ }
+ // create new target state we'll add to DFA after it's complete
+ D := NewDFAState(-1, reach)
+
+ predictedAlt := p.getUniqueAlt(reach)
+
+ if runtimeConfig.parserATNSimulatorDebug {
+ altSubSets := PredictionModegetConflictingAltSubsets(reach)
+ fmt.Println("SLL altSubSets=" + fmt.Sprint(altSubSets) +
+ ", previous=" + previousD.configs.String() +
+ ", configs=" + reach.String() +
+ ", predict=" + strconv.Itoa(predictedAlt) +
+ ", allSubsetsConflict=" +
+ fmt.Sprint(PredictionModeallSubsetsConflict(altSubSets)) +
+ ", conflictingAlts=" + p.getConflictingAlts(reach).String())
+ }
+ if predictedAlt != ATNInvalidAltNumber {
+ // NO CONFLICT, UNIQUELY PREDICTED ALT
+ D.isAcceptState = true
+ D.configs.uniqueAlt = predictedAlt
+ D.setPrediction(predictedAlt)
+ } else if PredictionModehasSLLConflictTerminatingPrediction(p.predictionMode, reach) {
+ // MORE THAN ONE VIABLE ALTERNATIVE
+ D.configs.conflictingAlts = p.getConflictingAlts(reach)
+ D.requiresFullContext = true
+ // in SLL-only mode, we will stop at p state and return the minimum alt
+ D.isAcceptState = true
+ D.setPrediction(D.configs.conflictingAlts.minValue())
+ }
+ if D.isAcceptState && D.configs.hasSemanticContext {
+ p.predicateDFAState(D, p.atn.getDecisionState(dfa.decision))
+ if D.predicates != nil {
+ D.setPrediction(ATNInvalidAltNumber)
+ }
+ }
+ // all adds to dfa are done after we've created full D state
+ D = p.addDFAEdge(dfa, previousD, t, D)
+ return D
+}
+
+func (p *ParserATNSimulator) predicateDFAState(dfaState *DFAState, decisionState DecisionState) {
+ // We need to test all predicates, even in DFA states that
+ // uniquely predict alternative.
+ nalts := len(decisionState.GetTransitions())
+ // Update DFA so reach becomes accept state with (predicate,alt)
+ // pairs if preds found for conflicting alts
+ altsToCollectPredsFrom := p.getConflictingAltsOrUniqueAlt(dfaState.configs)
+ altToPred := p.getPredsForAmbigAlts(altsToCollectPredsFrom, dfaState.configs, nalts)
+ if altToPred != nil {
+ dfaState.predicates = p.getPredicatePredictions(altsToCollectPredsFrom, altToPred)
+ dfaState.setPrediction(ATNInvalidAltNumber) // make sure we use preds
+ } else {
+ // There are preds in configs but they might go away
+ // when OR'd together like {p}? || NONE == NONE. If neither
+ // alt has preds, resolve to min alt
+ dfaState.setPrediction(altsToCollectPredsFrom.minValue())
+ }
+}
+
+// comes back with reach.uniqueAlt set to a valid alt
+//
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 *ATNConfigSet, input TokenStream, startIndex int, outerContext ParserRuleContext) (int, RecognitionException) {
+
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("execATNWithFullContext " + s0.String())
+ }
+
+ fullCtx := true
+ foundExactAmbig := false
+ var reach *ATNConfigSet
+ previous := s0
+ input.Seek(startIndex)
+ t := input.LA(1)
+ predictedAlt := -1
+
+ for { // for more work
+ reach = p.computeReachSet(previous, t, fullCtx)
+ if reach == nil {
+ // if any configs in previous dipped into outer context, that
+ // means that input up to t actually finished entry rule
+ // at least for LL decision. Full LL doesn't dip into outer
+ // so don't need special case.
+ // We will get an error no matter what so delay until after
+ // decision better error message. Also, no reachable target
+ // ATN states in SLL implies LL will also get nowhere.
+ // If conflict in states that dip out, choose min since we
+ // will get error no matter what.
+ input.Seek(startIndex)
+ alt := p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext)
+ if alt != ATNInvalidAltNumber {
+ return alt, nil
+ }
+ return alt, p.noViableAlt(input, outerContext, previous, startIndex)
+ }
+ altSubSets := PredictionModegetConflictingAltSubsets(reach)
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("LL altSubSets=" + fmt.Sprint(altSubSets) + ", predict=" +
+ strconv.Itoa(PredictionModegetUniqueAlt(altSubSets)) + ", resolvesToJustOneViableAlt=" +
+ fmt.Sprint(PredictionModeresolvesToJustOneViableAlt(altSubSets)))
+ }
+ reach.uniqueAlt = p.getUniqueAlt(reach)
+ // unique prediction?
+ if reach.uniqueAlt != ATNInvalidAltNumber {
+ predictedAlt = reach.uniqueAlt
+ break
+ }
+ if p.predictionMode != PredictionModeLLExactAmbigDetection {
+ predictedAlt = PredictionModeresolvesToJustOneViableAlt(altSubSets)
+ if predictedAlt != ATNInvalidAltNumber {
+ break
+ }
+ } else {
+ // In exact ambiguity mode, we never try to terminate early.
+ // Just keeps scarfing until we know what the conflict is
+ if PredictionModeallSubsetsConflict(altSubSets) && PredictionModeallSubsetsEqual(altSubSets) {
+ foundExactAmbig = true
+ predictedAlt = PredictionModegetSingleViableAlt(altSubSets)
+ break
+ }
+ // else there are multiple non-conflicting subsets or
+ // we're not sure what the ambiguity is yet.
+ // So, keep going.
+ }
+ previous = reach
+ if t != TokenEOF {
+ input.Consume()
+ t = input.LA(1)
+ }
+ }
+ // If the configuration set uniquely predicts an alternative,
+ // without conflict, then we know that it's a full LL decision
+ // not SLL.
+ if reach.uniqueAlt != ATNInvalidAltNumber {
+ p.ReportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.Index())
+ return predictedAlt, nil
+ }
+ // We do not check predicates here because we have checked them
+ // on-the-fly when doing full context prediction.
+
+ //
+ // In non-exact ambiguity detection mode, we might actually be able to
+ // detect an exact ambiguity, but I'm not going to spend the cycles
+ // needed to check. We only emit ambiguity warnings in exact ambiguity
+ // mode.
+ //
+ // For example, we might know that we have conflicting configurations.
+ // But, that does not mean that there is no way forward without a
+ // conflict. It's possible to have non-conflicting alt subsets as in:
+ //
+ // altSubSets=[{1, 2}, {1, 2}, {1}, {1, 2}]
+ //
+ // from
+ //
+ // [(17,1,[5 $]), (13,1,[5 10 $]), (21,1,[5 10 $]), (11,1,[$]),
+ // (13,2,[5 10 $]), (21,2,[5 10 $]), (11,2,[$])]
+ //
+ // In p case, (17,1,[5 $]) indicates there is some next sequence that
+ // would resolve p without conflict to alternative 1. Any other viable
+ // next sequence, however, is associated with a conflict. We stop
+ // looking for input because no amount of further lookahead will alter
+ // the fact that we should predict alternative 1. We just can't say for
+ // sure that there is an ambiguity without looking further.
+
+ p.ReportAmbiguity(dfa, D, startIndex, input.Index(), foundExactAmbig, reach.Alts(), reach)
+
+ return predictedAlt, nil
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) computeReachSet(closure *ATNConfigSet, t int, fullCtx bool) *ATNConfigSet {
+ if p.mergeCache == nil {
+ p.mergeCache = NewJPCMap(ReachSetCollection, "Merge cache for computeReachSet()")
+ }
+ intermediate := NewATNConfigSet(fullCtx)
+
+ // Configurations already in a rule stop state indicate reaching the end
+ // of the decision rule (local context) or end of the start rule (full
+ // context). Once reached, these configurations are never updated by a
+ // closure operation, so they are handled separately for the performance
+ // advantage of having a smaller intermediate set when calling closure.
+ //
+ // For full-context reach operations, separate handling is required to
+ // ensure that the alternative Matching the longest overall sequence is
+ // chosen when multiple such configurations can Match the input.
+
+ var skippedStopStates []*ATNConfig
+
+ // First figure out where we can reach on input t
+ for _, c := range closure.configs {
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("testing " + p.GetTokenName(t) + " at " + c.String())
+ }
+
+ if _, ok := c.GetState().(*RuleStopState); ok {
+ if fullCtx || t == TokenEOF {
+ skippedStopStates = append(skippedStopStates, c)
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("added " + c.String() + " to SkippedStopStates")
+ }
+ }
+ continue
+ }
+
+ for _, trans := range c.GetState().GetTransitions() {
+ target := p.getReachableTarget(trans, t)
+ if target != nil {
+ cfg := NewATNConfig4(c, target)
+ intermediate.Add(cfg, p.mergeCache)
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("added " + cfg.String() + " to intermediate")
+ }
+ }
+ }
+ }
+
+ // Now figure out where the reach operation can take us...
+ var reach *ATNConfigSet
+
+ // This block optimizes the reach operation for intermediate sets which
+ // trivially indicate a termination state for the overall
+ // AdaptivePredict operation.
+ //
+ // The conditions assume that intermediate
+ // contains all configurations relevant to the reach set, but p
+ // condition is not true when one or more configurations have been
+ // withheld in SkippedStopStates, or when the current symbol is EOF.
+ //
+ if skippedStopStates == nil && t != TokenEOF {
+ if len(intermediate.configs) == 1 {
+ // Don't pursue the closure if there is just one state.
+ // It can only have one alternative just add to result
+ // Also don't pursue the closure if there is unique alternative
+ // among the configurations.
+ reach = intermediate
+ } else if p.getUniqueAlt(intermediate) != ATNInvalidAltNumber {
+ // Also don't pursue the closure if there is unique alternative
+ // among the configurations.
+ reach = intermediate
+ }
+ }
+ // If the reach set could not be trivially determined, perform a closure
+ // operation on the intermediate set to compute its initial value.
+ //
+ if reach == nil {
+ reach = NewATNConfigSet(fullCtx)
+ closureBusy := NewClosureBusy("ParserATNSimulator.computeReachSet() make a closureBusy")
+ treatEOFAsEpsilon := t == TokenEOF
+ amount := len(intermediate.configs)
+ for k := 0; k < amount; k++ {
+ p.closure(intermediate.configs[k], reach, closureBusy, false, fullCtx, treatEOFAsEpsilon)
+ }
+ }
+ if t == TokenEOF {
+ // After consuming EOF no additional input is possible, so we are
+ // only interested in configurations which reached the end of the
+ // decision rule (local context) or end of the start rule (full
+ // context). Update reach to contain only these configurations. This
+ // handles both explicit EOF transitions in the grammar and implicit
+ // EOF transitions following the end of the decision or start rule.
+ //
+ // When reach==intermediate, no closure operation was performed. In
+ // p case, removeAllConfigsNotInRuleStopState needs to check for
+ // reachable rule stop states as well as configurations already in
+ // a rule stop state.
+ //
+ // This is handled before the configurations in SkippedStopStates,
+ // because any configurations potentially added from that list are
+ // already guaranteed to meet this condition whether it's
+ // required.
+ //
+ reach = p.removeAllConfigsNotInRuleStopState(reach, reach.Equals(intermediate))
+ }
+ // If SkippedStopStates!=nil, then it contains at least one
+ // configuration. For full-context reach operations, these
+ // configurations reached the end of the start rule, in which case we
+ // only add them back to reach if no configuration during the current
+ // closure operation reached such a state. This ensures AdaptivePredict
+ // chooses an alternative Matching the longest overall sequence when
+ // multiple alternatives are viable.
+ //
+ if skippedStopStates != nil && ((!fullCtx) || (!PredictionModehasConfigInRuleStopState(reach))) {
+ for l := 0; l < len(skippedStopStates); l++ {
+ reach.Add(skippedStopStates[l], p.mergeCache)
+ }
+ }
+
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("computeReachSet " + closure.String() + " -> " + reach.String())
+ }
+
+ if len(reach.configs) == 0 {
+ return nil
+ }
+
+ return reach
+}
+
+// removeAllConfigsNotInRuleStopState returns a configuration set containing only the configurations from
+// configs which are in a [RuleStopState]. If all
+// configurations in configs are already in a rule stop state, this
+// method simply returns configs.
+//
+// When lookToEndOfRule is true, this method uses
+// [ATN].[NextTokens] for each configuration in configs which is
+// not already in a rule stop state to see if a rule stop state is reachable
+// from the configuration via epsilon-only transitions.
+//
+// When lookToEndOfRule is true, this method checks for rule stop states
+// reachable by epsilon-only transitions from each configuration in
+// configs.
+//
+// The func returns configs if all configurations in configs are in a
+// rule stop state, otherwise it returns a new configuration set containing only
+// the configurations from configs which are in a rule stop state
+func (p *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs *ATNConfigSet, lookToEndOfRule bool) *ATNConfigSet {
+ if PredictionModeallConfigsInRuleStopStates(configs) {
+ return configs
+ }
+ result := NewATNConfigSet(configs.fullCtx)
+ for _, config := range configs.configs {
+ if _, ok := config.GetState().(*RuleStopState); ok {
+ result.Add(config, p.mergeCache)
+ continue
+ }
+ if lookToEndOfRule && config.GetState().GetEpsilonOnlyTransitions() {
+ NextTokens := p.atn.NextTokens(config.GetState(), nil)
+ if NextTokens.contains(TokenEpsilon) {
+ endOfRuleState := p.atn.ruleToStopState[config.GetState().GetRuleIndex()]
+ result.Add(NewATNConfig4(config, endOfRuleState), p.mergeCache)
+ }
+ }
+ }
+ return result
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, fullCtx bool) *ATNConfigSet {
+ // always at least the implicit call to start rule
+ initialContext := predictionContextFromRuleContext(p.atn, ctx)
+ configs := NewATNConfigSet(fullCtx)
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("computeStartState from ATN state " + a.String() +
+ " initialContext=" + initialContext.String())
+ }
+
+ for i := 0; i < len(a.GetTransitions()); i++ {
+ target := a.GetTransitions()[i].getTarget()
+ c := NewATNConfig6(target, i+1, initialContext)
+ closureBusy := NewClosureBusy("ParserATNSimulator.computeStartState() make a closureBusy")
+ p.closure(c, configs, closureBusy, true, fullCtx, false)
+ }
+ return configs
+}
+
+// applyPrecedenceFilter transforms the start state computed by
+// [computeStartState] to the special start state used by a
+// precedence [DFA] for a particular precedence value. The transformation
+// process applies the following changes to the start state's configuration
+// set.
+//
+// 1. Evaluate the precedence predicates for each configuration using
+// [SemanticContext].evalPrecedence.
+// 2. Remove all configurations which predict an alternative greater than
+// 1, for which another configuration that predicts alternative 1 is in the
+// same ATN state with the same prediction context.
+//
+// Transformation 2 is valid for the following reasons:
+//
+// - The closure block cannot contain any epsilon transitions which bypass
+// the body of the closure, so all states reachable via alternative 1 are
+// part of the precedence alternatives of the transformed left-recursive
+// rule.
+// - The "primary" portion of a left recursive rule cannot contain an
+// epsilon transition, so the only way an alternative other than 1 can exist
+// in a state that is also reachable via alternative 1 is by nesting calls
+// to the left-recursive rule, with the outer calls not being at the
+// preferred precedence level.
+//
+// The prediction context must be considered by this filter to address
+// situations like the following:
+//
+// grammar TA
+// prog: statement* EOF
+// statement: letterA | statement letterA 'b'
+// letterA: 'a'
+//
+// In the above grammar, the [ATN] state immediately before the token
+// reference 'a' in letterA is reachable from the left edge
+// of both the primary and closure blocks of the left-recursive rule
+// statement. The prediction context associated with each of these
+// configurations distinguishes between them, and prevents the alternative
+// which stepped out to prog, and then back in to statement
+// from being eliminated by the filter.
+//
+// The func returns the transformed configuration set representing the start state
+// for a precedence [DFA] at a particular precedence level (determined by
+// calling [Parser].getPrecedence).
+func (p *ParserATNSimulator) applyPrecedenceFilter(configs *ATNConfigSet) *ATNConfigSet {
+
+ statesFromAlt1 := make(map[int]*PredictionContext)
+ configSet := NewATNConfigSet(configs.fullCtx)
+
+ for _, config := range configs.configs {
+ // handle alt 1 first
+ if config.GetAlt() != 1 {
+ continue
+ }
+ updatedContext := config.GetSemanticContext().evalPrecedence(p.parser, p.outerContext)
+ if updatedContext == nil {
+ // the configuration was eliminated
+ continue
+ }
+ statesFromAlt1[config.GetState().GetStateNumber()] = config.GetContext()
+ if updatedContext != config.GetSemanticContext() {
+ configSet.Add(NewATNConfig2(config, updatedContext), p.mergeCache)
+ } else {
+ configSet.Add(config, p.mergeCache)
+ }
+ }
+ for _, config := range configs.configs {
+
+ if config.GetAlt() == 1 {
+ // already handled
+ continue
+ }
+ // In the future, p elimination step could be updated to also
+ // filter the prediction context for alternatives predicting alt>1
+ // (basically a graph subtraction algorithm).
+ if !config.getPrecedenceFilterSuppressed() {
+ context := statesFromAlt1[config.GetState().GetStateNumber()]
+ if context != nil && context.Equals(config.GetContext()) {
+ // eliminated
+ continue
+ }
+ }
+ configSet.Add(config, p.mergeCache)
+ }
+ return configSet
+}
+
+func (p *ParserATNSimulator) getReachableTarget(trans Transition, ttype int) ATNState {
+ if trans.Matches(ttype, 0, p.atn.maxTokenType) {
+ return trans.getTarget()
+ }
+
+ return nil
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs *ATNConfigSet, nalts int) []SemanticContext {
+
+ altToPred := make([]SemanticContext, nalts+1)
+ for _, c := range configs.configs {
+ if ambigAlts.contains(c.GetAlt()) {
+ altToPred[c.GetAlt()] = SemanticContextorContext(altToPred[c.GetAlt()], c.GetSemanticContext())
+ }
+ }
+ nPredAlts := 0
+ for i := 1; i <= nalts; i++ {
+ pred := altToPred[i]
+ if pred == nil {
+ altToPred[i] = SemanticContextNone
+ } else if pred != SemanticContextNone {
+ nPredAlts++
+ }
+ }
+ // unambiguous alts are nil in altToPred
+ if nPredAlts == 0 {
+ altToPred = nil
+ }
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("getPredsForAmbigAlts result " + fmt.Sprint(altToPred))
+ }
+ return altToPred
+}
+
+func (p *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altToPred []SemanticContext) []*PredPrediction {
+ pairs := make([]*PredPrediction, 0)
+ containsPredicate := false
+ for i := 1; i < len(altToPred); i++ {
+ pred := altToPred[i]
+ // un-predicated is indicated by SemanticContextNONE
+ if ambigAlts != nil && ambigAlts.contains(i) {
+ pairs = append(pairs, NewPredPrediction(pred, i))
+ }
+ if pred != SemanticContextNone {
+ containsPredicate = true
+ }
+ }
+ if !containsPredicate {
+ return nil
+ }
+ return pairs
+}
+
+// getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule is used to improve the localization of error messages by
+// choosing an alternative rather than panic a NoViableAltException in particular prediction scenarios where the
+// Error state was reached during [ATN] simulation.
+//
+// The default implementation of this method uses the following
+// algorithm to identify an [ATN] configuration which successfully parsed the
+// decision entry rule. Choosing such an alternative ensures that the
+// [ParserRuleContext] returned by the calling rule will be complete
+// and valid, and the syntax error will be Reported later at a more
+// localized location.
+//
+// - If a syntactically valid path or paths reach the end of the decision rule, and
+// they are semantically valid if predicated, return the min associated alt.
+// - Else, if a semantically invalid but syntactically valid path exist
+// or paths exist, return the minimum associated alt.
+// - Otherwise, return [ATNInvalidAltNumber].
+//
+// In some scenarios, the algorithm described above could predict an
+// alternative which will result in a [FailedPredicateException] in
+// the parser. Specifically, this could occur if the only configuration
+// capable of successfully parsing to the end of the decision rule is
+// blocked by a semantic predicate. By choosing this alternative within
+// [AdaptivePredict] instead of panic a [NoViableAltException], the resulting
+// [FailedPredicateException] in the parser will identify the specific
+// predicate which is preventing the parser from successfully parsing the
+// decision rule, which helps developers identify and correct logic errors
+// in semantic predicates.
+//
+// pass in the configs holding ATN configurations which were valid immediately before
+// the ERROR state was reached, outerContext as the initial parser context from the paper
+// or the parser stack at the instant before prediction commences.
+//
+// Teh func returns the value to return from [AdaptivePredict], or
+// [ATNInvalidAltNumber] if a suitable alternative was not
+// identified and [AdaptivePredict] should report an error instead.
+func (p *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs *ATNConfigSet, outerContext ParserRuleContext) int {
+ cfgs := p.splitAccordingToSemanticValidity(configs, outerContext)
+ semValidConfigs := cfgs[0]
+ semInvalidConfigs := cfgs[1]
+ alt := p.GetAltThatFinishedDecisionEntryRule(semValidConfigs)
+ if alt != ATNInvalidAltNumber { // semantically/syntactically viable path exists
+ return alt
+ }
+ // Is there a syntactically valid path with a failed pred?
+ if len(semInvalidConfigs.configs) > 0 {
+ alt = p.GetAltThatFinishedDecisionEntryRule(semInvalidConfigs)
+ if alt != ATNInvalidAltNumber { // syntactically viable path exists
+ return alt
+ }
+ }
+ return ATNInvalidAltNumber
+}
+
+func (p *ParserATNSimulator) GetAltThatFinishedDecisionEntryRule(configs *ATNConfigSet) int {
+ alts := NewIntervalSet()
+
+ for _, c := range configs.configs {
+ _, ok := c.GetState().(*RuleStopState)
+
+ if c.GetReachesIntoOuterContext() > 0 || (ok && c.GetContext().hasEmptyPath()) {
+ alts.addOne(c.GetAlt())
+ }
+ }
+ if alts.length() == 0 {
+ return ATNInvalidAltNumber
+ }
+
+ return alts.first()
+}
+
+// Walk the list of configurations and split them according to
+// those that have preds evaluating to true/false. If no pred, assume
+// true pred and include in succeeded set. Returns Pair of sets.
+//
+// Create a NewSet so as not to alter the incoming parameter.
+//
+// Assumption: the input stream has been restored to the starting point
+// prediction, which is where predicates need to evaluate.
+
+type ATNConfigSetPair struct {
+ item0, item1 *ATNConfigSet
+}
+
+func (p *ParserATNSimulator) splitAccordingToSemanticValidity(configs *ATNConfigSet, outerContext ParserRuleContext) []*ATNConfigSet {
+ succeeded := NewATNConfigSet(configs.fullCtx)
+ failed := NewATNConfigSet(configs.fullCtx)
+
+ for _, c := range configs.configs {
+ if c.GetSemanticContext() != SemanticContextNone {
+ predicateEvaluationResult := c.GetSemanticContext().evaluate(p.parser, outerContext)
+ if predicateEvaluationResult {
+ succeeded.Add(c, nil)
+ } else {
+ failed.Add(c, nil)
+ }
+ } else {
+ succeeded.Add(c, nil)
+ }
+ }
+ return []*ATNConfigSet{succeeded, failed}
+}
+
+// evalSemanticContext looks through a list of predicate/alt pairs, returning alts for the
+// pairs that win. A [SemanticContextNone] predicate indicates an alt containing an
+// un-predicated runtimeConfig which behaves as "always true." If !complete
+// then we stop at the first predicate that evaluates to true. This
+// includes pairs with nil predicates.
+//
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPrediction, outerContext ParserRuleContext, complete bool) *BitSet {
+ predictions := NewBitSet()
+ for i := 0; i < len(predPredictions); i++ {
+ pair := predPredictions[i]
+ if pair.pred == SemanticContextNone {
+ predictions.add(pair.alt)
+ if !complete {
+ break
+ }
+ continue
+ }
+
+ predicateEvaluationResult := pair.pred.evaluate(p.parser, outerContext)
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorDFADebug {
+ fmt.Println("eval pred " + pair.String() + "=" + fmt.Sprint(predicateEvaluationResult))
+ }
+ if predicateEvaluationResult {
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorDFADebug {
+ fmt.Println("PREDICT " + fmt.Sprint(pair.alt))
+ }
+ predictions.add(pair.alt)
+ if !complete {
+ break
+ }
+ }
+ }
+ return predictions
+}
+
+func (p *ParserATNSimulator) closure(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx, treatEOFAsEpsilon bool) {
+ initialDepth := 0
+ p.closureCheckingStopState(config, configs, closureBusy, collectPredicates,
+ fullCtx, initialDepth, treatEOFAsEpsilon)
+}
+
+func (p *ParserATNSimulator) closureCheckingStopState(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("closure(" + config.String() + ")")
+ }
+
+ var stack []*ATNConfig
+ visited := make(map[*ATNConfig]bool)
+
+ stack = append(stack, config)
+
+ for len(stack) > 0 {
+ currConfig := stack[len(stack)-1]
+ stack = stack[:len(stack)-1]
+
+ if _, ok := visited[currConfig]; ok {
+ continue
+ }
+ visited[currConfig] = true
+
+ if _, ok := currConfig.GetState().(*RuleStopState); ok {
+ // We hit rule end. If we have context info, use it
+ // run thru all possible stack tops in ctx
+ if !currConfig.GetContext().isEmpty() {
+ for i := 0; i < currConfig.GetContext().length(); i++ {
+ if currConfig.GetContext().getReturnState(i) == BasePredictionContextEmptyReturnState {
+ if fullCtx {
+ nb := NewATNConfig1(currConfig, currConfig.GetState(), BasePredictionContextEMPTY)
+ configs.Add(nb, p.mergeCache)
+ continue
+ } else {
+ // we have no context info, just chase follow links (if greedy)
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("FALLING off rule " + p.getRuleName(currConfig.GetState().GetRuleIndex()))
+ }
+ p.closureWork(currConfig, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
+ }
+ continue
+ }
+ returnState := p.atn.states[currConfig.GetContext().getReturnState(i)]
+ newContext := currConfig.GetContext().GetParent(i) // "pop" return state
+
+ c := NewATNConfig5(returnState, currConfig.GetAlt(), newContext, currConfig.GetSemanticContext())
+ // While we have context to pop back from, we may have
+ // gotten that context AFTER having falling off a rule.
+ // Make sure we track that we are now out of context.
+ c.SetReachesIntoOuterContext(currConfig.GetReachesIntoOuterContext())
+
+ stack = append(stack, c)
+ }
+ continue
+ } else if fullCtx {
+ // reached end of start rule
+ configs.Add(currConfig, p.mergeCache)
+ continue
+ } else {
+ // else if we have no context info, just chase follow links (if greedy)
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("FALLING off rule " + p.getRuleName(currConfig.GetState().GetRuleIndex()))
+ }
+ }
+ }
+
+ p.closureWork(currConfig, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
+ }
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) closureCheckingStopStateRecursive(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("closure(" + config.String() + ")")
+ }
+
+ if _, ok := config.GetState().(*RuleStopState); ok {
+ // We hit rule end. If we have context info, use it
+ // run thru all possible stack tops in ctx
+ if !config.GetContext().isEmpty() {
+ for i := 0; i < config.GetContext().length(); i++ {
+ if config.GetContext().getReturnState(i) == BasePredictionContextEmptyReturnState {
+ if fullCtx {
+ nb := NewATNConfig1(config, config.GetState(), BasePredictionContextEMPTY)
+ configs.Add(nb, p.mergeCache)
+ continue
+ } else {
+ // we have no context info, just chase follow links (if greedy)
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex()))
+ }
+ p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
+ }
+ continue
+ }
+ returnState := p.atn.states[config.GetContext().getReturnState(i)]
+ newContext := config.GetContext().GetParent(i) // "pop" return state
+
+ c := NewATNConfig5(returnState, config.GetAlt(), newContext, config.GetSemanticContext())
+ // While we have context to pop back from, we may have
+ // gotten that context AFTER having falling off a rule.
+ // Make sure we track that we are now out of context.
+ c.SetReachesIntoOuterContext(config.GetReachesIntoOuterContext())
+ p.closureCheckingStopState(c, configs, closureBusy, collectPredicates, fullCtx, depth-1, treatEOFAsEpsilon)
+ }
+ return
+ } else if fullCtx {
+ // reached end of start rule
+ configs.Add(config, p.mergeCache)
+ return
+ } else {
+ // else if we have no context info, just chase follow links (if greedy)
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex()))
+ }
+ }
+ }
+ p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
+}
+
+// Do the actual work of walking epsilon edges
+//
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) closureWork(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
+ state := config.GetState()
+ // optimization
+ if !state.GetEpsilonOnlyTransitions() {
+ configs.Add(config, p.mergeCache)
+ // make sure to not return here, because EOF transitions can act as
+ // both epsilon transitions and non-epsilon transitions.
+ }
+ for i := 0; i < len(state.GetTransitions()); i++ {
+ if i == 0 && p.canDropLoopEntryEdgeInLeftRecursiveRule(config) {
+ continue
+ }
+
+ t := state.GetTransitions()[i]
+ _, ok := t.(*ActionTransition)
+ continueCollecting := collectPredicates && !ok
+ c := p.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEOFAsEpsilon)
+ if c != nil {
+ newDepth := depth
+
+ if _, ok := config.GetState().(*RuleStopState); ok {
+ // target fell off end of rule mark resulting c as having dipped into outer context
+ // We can't get here if incoming config was rule stop and we had context
+ // track how far we dip into outer context. Might
+ // come in handy and we avoid evaluating context dependent
+ // preds if this is > 0.
+
+ if p.dfa != nil && p.dfa.getPrecedenceDfa() {
+ if t.(*EpsilonTransition).outermostPrecedenceReturn == p.dfa.atnStartState.GetRuleIndex() {
+ c.setPrecedenceFilterSuppressed(true)
+ }
+ }
+
+ c.SetReachesIntoOuterContext(c.GetReachesIntoOuterContext() + 1)
+
+ _, present := closureBusy.Put(c)
+ if present {
+ // avoid infinite recursion for right-recursive rules
+ continue
+ }
+
+ configs.dipsIntoOuterContext = true // TODO: can remove? only care when we add to set per middle of this method
+ newDepth--
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("dips into outer ctx: " + c.String())
+ }
+ } else {
+
+ if !t.getIsEpsilon() {
+ _, present := closureBusy.Put(c)
+ if present {
+ // avoid infinite recursion for EOF* and EOF+
+ continue
+ }
+ }
+ if _, ok := t.(*RuleTransition); ok {
+ // latch when newDepth goes negative - once we step out of the entry context we can't return
+ if newDepth >= 0 {
+ newDepth++
+ }
+ }
+ }
+ p.closureCheckingStopState(c, configs, closureBusy, continueCollecting, fullCtx, newDepth, treatEOFAsEpsilon)
+ }
+ }
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) canDropLoopEntryEdgeInLeftRecursiveRule(config *ATNConfig) bool {
+ if !runtimeConfig.lRLoopEntryBranchOpt {
+ return false
+ }
+
+ _p := config.GetState()
+
+ // First check to see if we are in StarLoopEntryState generated during
+ // left-recursion elimination. For efficiency, also check if
+ // the context has an empty stack case. If so, it would mean
+ // global FOLLOW so we can't perform optimization
+ if _p.GetStateType() != ATNStateStarLoopEntry {
+ return false
+ }
+ startLoop, ok := _p.(*StarLoopEntryState)
+ if !ok {
+ return false
+ }
+ if !startLoop.precedenceRuleDecision ||
+ config.GetContext().isEmpty() ||
+ config.GetContext().hasEmptyPath() {
+ return false
+ }
+
+ // Require all return states to return back to the same rule
+ // that p is in.
+ numCtxs := config.GetContext().length()
+ for i := 0; i < numCtxs; i++ {
+ returnState := p.atn.states[config.GetContext().getReturnState(i)]
+ if returnState.GetRuleIndex() != _p.GetRuleIndex() {
+ return false
+ }
+ }
+ x := _p.GetTransitions()[0].getTarget()
+ decisionStartState := x.(BlockStartState)
+ blockEndStateNum := decisionStartState.getEndState().stateNumber
+ blockEndState := p.atn.states[blockEndStateNum].(*BlockEndState)
+
+ // Verify that the top of each stack context leads to loop entry/exit
+ // state through epsilon edges and w/o leaving rule.
+
+ for i := 0; i < numCtxs; i++ { // for each stack context
+ returnStateNumber := config.GetContext().getReturnState(i)
+ returnState := p.atn.states[returnStateNumber]
+
+ // all states must have single outgoing epsilon edge
+ if len(returnState.GetTransitions()) != 1 || !returnState.GetTransitions()[0].getIsEpsilon() {
+ return false
+ }
+
+ // Look for prefix op case like 'not expr', (' type ')' expr
+ returnStateTarget := returnState.GetTransitions()[0].getTarget()
+ if returnState.GetStateType() == ATNStateBlockEnd && returnStateTarget == _p {
+ continue
+ }
+
+ // Look for 'expr op expr' or case where expr's return state is block end
+ // of (...)* internal block; the block end points to loop back
+ // which points to p but we don't need to check that
+ if returnState == blockEndState {
+ continue
+ }
+
+ // Look for ternary expr ? expr : expr. The return state points at block end,
+ // which points at loop entry state
+ if returnStateTarget == blockEndState {
+ continue
+ }
+
+ // Look for complex prefix 'between expr and expr' case where 2nd expr's
+ // return state points at block end state of (...)* internal block
+ if returnStateTarget.GetStateType() == ATNStateBlockEnd &&
+ len(returnStateTarget.GetTransitions()) == 1 &&
+ returnStateTarget.GetTransitions()[0].getIsEpsilon() &&
+ returnStateTarget.GetTransitions()[0].getTarget() == _p {
+ continue
+ }
+
+ // anything else ain't conforming
+ return false
+ }
+
+ return true
+}
+
+func (p *ParserATNSimulator) getRuleName(index int) string {
+ if p.parser != nil && index >= 0 {
+ return p.parser.GetRuleNames()[index]
+ }
+ var sb strings.Builder
+ sb.Grow(32)
+
+ sb.WriteString("')
+ return sb.String()
+}
+
+func (p *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t Transition, collectPredicates, inContext, fullCtx, treatEOFAsEpsilon bool) *ATNConfig {
+
+ switch t.getSerializationType() {
+ case TransitionRULE:
+ return p.ruleTransition(config, t.(*RuleTransition))
+ case TransitionPRECEDENCE:
+ return p.precedenceTransition(config, t.(*PrecedencePredicateTransition), collectPredicates, inContext, fullCtx)
+ case TransitionPREDICATE:
+ return p.predTransition(config, t.(*PredicateTransition), collectPredicates, inContext, fullCtx)
+ case TransitionACTION:
+ return p.actionTransition(config, t.(*ActionTransition))
+ case TransitionEPSILON:
+ return NewATNConfig4(config, t.getTarget())
+ case TransitionATOM, TransitionRANGE, TransitionSET:
+ // EOF transitions act like epsilon transitions after the first EOF
+ // transition is traversed
+ if treatEOFAsEpsilon {
+ if t.Matches(TokenEOF, 0, 1) {
+ return NewATNConfig4(config, t.getTarget())
+ }
+ }
+ return nil
+ default:
+ return nil
+ }
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) actionTransition(config *ATNConfig, t *ActionTransition) *ATNConfig {
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("ACTION edge " + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.actionIndex))
+ }
+ return NewATNConfig4(config, t.getTarget())
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) precedenceTransition(config *ATNConfig,
+ pt *PrecedencePredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig {
+
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("PRED (collectPredicates=" + fmt.Sprint(collectPredicates) + ") " +
+ strconv.Itoa(pt.precedence) + ">=_p, ctx dependent=true")
+ if p.parser != nil {
+ fmt.Println("context surrounding pred is " + fmt.Sprint(p.parser.GetRuleInvocationStack(nil)))
+ }
+ }
+ var c *ATNConfig
+ if collectPredicates && inContext {
+ if fullCtx {
+ // In full context mode, we can evaluate predicates on-the-fly
+ // during closure, which dramatically reduces the size of
+ // the runtimeConfig sets. It also obviates the need to test predicates
+ // later during conflict resolution.
+ currentPosition := p.input.Index()
+ p.input.Seek(p.startIndex)
+ predSucceeds := pt.getPredicate().evaluate(p.parser, p.outerContext)
+ p.input.Seek(currentPosition)
+ if predSucceeds {
+ c = NewATNConfig4(config, pt.getTarget()) // no pred context
+ }
+ } else {
+ newSemCtx := SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate())
+ c = NewATNConfig3(config, pt.getTarget(), newSemCtx)
+ }
+ } else {
+ c = NewATNConfig4(config, pt.getTarget())
+ }
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("runtimeConfig from pred transition=" + c.String())
+ }
+ return c
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) predTransition(config *ATNConfig, pt *PredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig {
+
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("PRED (collectPredicates=" + fmt.Sprint(collectPredicates) + ") " + strconv.Itoa(pt.ruleIndex) +
+ ":" + strconv.Itoa(pt.predIndex) + ", ctx dependent=" + fmt.Sprint(pt.isCtxDependent))
+ if p.parser != nil {
+ fmt.Println("context surrounding pred is " + fmt.Sprint(p.parser.GetRuleInvocationStack(nil)))
+ }
+ }
+ var c *ATNConfig
+ if collectPredicates && (!pt.isCtxDependent || inContext) {
+ if fullCtx {
+ // In full context mode, we can evaluate predicates on-the-fly
+ // during closure, which dramatically reduces the size of
+ // the config sets. It also obviates the need to test predicates
+ // later during conflict resolution.
+ currentPosition := p.input.Index()
+ p.input.Seek(p.startIndex)
+ predSucceeds := pt.getPredicate().evaluate(p.parser, p.outerContext)
+ p.input.Seek(currentPosition)
+ if predSucceeds {
+ c = NewATNConfig4(config, pt.getTarget()) // no pred context
+ }
+ } else {
+ newSemCtx := SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate())
+ c = NewATNConfig3(config, pt.getTarget(), newSemCtx)
+ }
+ } else {
+ c = NewATNConfig4(config, pt.getTarget())
+ }
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("config from pred transition=" + c.String())
+ }
+ return c
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) ruleTransition(config *ATNConfig, t *RuleTransition) *ATNConfig {
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("CALL rule " + p.getRuleName(t.getTarget().GetRuleIndex()) + ", ctx=" + config.GetContext().String())
+ }
+ returnState := t.followState
+ newContext := SingletonBasePredictionContextCreate(config.GetContext(), returnState.GetStateNumber())
+ return NewATNConfig1(config, t.getTarget(), newContext)
+}
+
+func (p *ParserATNSimulator) getConflictingAlts(configs *ATNConfigSet) *BitSet {
+ altsets := PredictionModegetConflictingAltSubsets(configs)
+ return PredictionModeGetAlts(altsets)
+}
+
+// getConflictingAltsOrUniqueAlt Sam pointed out a problem with the previous definition, v3, of
+// ambiguous states. If we have another state associated with conflicting
+// alternatives, we should keep going. For example, the following grammar
+//
+// s : (ID | ID ID?) ;
+//
+// When the [ATN] simulation reaches the state before ;, it has a [DFA]
+// state that looks like:
+//
+// [12|1|[], 6|2|[], 12|2|[]].
+//
+// Naturally
+//
+// 12|1|[] and 12|2|[]
+//
+// conflict, but we cannot stop processing this node
+// because alternative to has another way to continue, via
+//
+// [6|2|[]].
+//
+// The key is that we have a single state that has config's only associated
+// with a single alternative, 2, and crucially the state transitions
+// among the configurations are all non-epsilon transitions. That means
+// we don't consider any conflicts that include alternative 2. So, we
+// ignore the conflict between alts 1 and 2. We ignore a set of
+// conflicting alts when there is an intersection with an alternative
+// associated with a single alt state in the state config-list map.
+//
+// It's also the case that we might have two conflicting configurations but
+// also a 3rd non-conflicting configuration for a different alternative:
+//
+// [1|1|[], 1|2|[], 8|3|[]].
+//
+// This can come about from grammar:
+//
+// a : A | A | A B
+//
+// After Matching input A, we reach the stop state for rule A, state 1.
+// State 8 is the state right before B. Clearly alternatives 1 and 2
+// conflict and no amount of further lookahead will separate the two.
+// However, alternative 3 will be able to continue, so we do not
+// stop working on this state.
+//
+// In the previous example, we're concerned
+// with states associated with the conflicting alternatives. Here alt
+// 3 is not associated with the conflicting configs, but since we can continue
+// looking for input reasonably, I don't declare the state done. We
+// ignore a set of conflicting alts when we have an alternative
+// that we still need to pursue.
+func (p *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs *ATNConfigSet) *BitSet {
+ var conflictingAlts *BitSet
+ if configs.uniqueAlt != ATNInvalidAltNumber {
+ conflictingAlts = NewBitSet()
+ conflictingAlts.add(configs.uniqueAlt)
+ } else {
+ conflictingAlts = configs.conflictingAlts
+ }
+ return conflictingAlts
+}
+
+func (p *ParserATNSimulator) GetTokenName(t int) string {
+ if t == TokenEOF {
+ return "EOF"
+ }
+
+ if p.parser != nil && p.parser.GetLiteralNames() != nil && t < len(p.parser.GetLiteralNames()) {
+ return p.parser.GetLiteralNames()[t] + "<" + strconv.Itoa(t) + ">"
+ }
+
+ if p.parser != nil && p.parser.GetLiteralNames() != nil && t < len(p.parser.GetSymbolicNames()) {
+ return p.parser.GetSymbolicNames()[t] + "<" + strconv.Itoa(t) + ">"
+ }
+
+ return strconv.Itoa(t)
+}
+
+func (p *ParserATNSimulator) getLookaheadName(input TokenStream) string {
+ return p.GetTokenName(input.LA(1))
+}
+
+// Used for debugging in [AdaptivePredict] around [execATN], but I cut
+// it out for clarity now that alg. works well. We can leave this
+// "dead" code for a bit.
+func (p *ParserATNSimulator) dumpDeadEndConfigs(_ *NoViableAltException) {
+
+ panic("Not implemented")
+
+ // fmt.Println("dead end configs: ")
+ // var decs = nvae.deadEndConfigs
+ //
+ // for i:=0; i0) {
+ // var t = c.state.GetTransitions()[0]
+ // if t2, ok := t.(*AtomTransition); ok {
+ // trans = "Atom "+ p.GetTokenName(t2.label)
+ // } else if t3, ok := t.(SetTransition); ok {
+ // _, ok := t.(*NotSetTransition)
+ //
+ // var s string
+ // if (ok){
+ // s = "~"
+ // }
+ //
+ // trans = s + "Set " + t3.set
+ // }
+ // }
+ // fmt.Errorf(c.String(p.parser, true) + ":" + trans)
+ // }
+}
+
+func (p *ParserATNSimulator) noViableAlt(input TokenStream, outerContext ParserRuleContext, configs *ATNConfigSet, startIndex int) *NoViableAltException {
+ return NewNoViableAltException(p.parser, input, input.Get(startIndex), input.LT(1), configs, outerContext)
+}
+
+func (p *ParserATNSimulator) getUniqueAlt(configs *ATNConfigSet) int {
+ alt := ATNInvalidAltNumber
+ for _, c := range configs.configs {
+ if alt == ATNInvalidAltNumber {
+ alt = c.GetAlt() // found first alt
+ } else if c.GetAlt() != alt {
+ return ATNInvalidAltNumber
+ }
+ }
+ return alt
+}
+
+// Add an edge to the DFA, if possible. This method calls
+// {@link //addDFAState} to ensure the {@code to} state is present in the
+// DFA. If {@code from} is {@code nil}, or if {@code t} is outside the
+// range of edges that can be represented in the DFA tables, p method
+// returns without adding the edge to the DFA.
+//
+// If {@code to} is {@code nil}, p method returns {@code nil}.
+// Otherwise, p method returns the {@link DFAState} returned by calling
+// {@link //addDFAState} for the {@code to} state.
+//
+// @param dfa The DFA
+// @param from The source state for the edge
+// @param t The input symbol
+// @param to The target state for the edge
+//
+// @return If {@code to} is {@code nil}, p method returns {@code nil}
+// otherwise p method returns the result of calling {@link //addDFAState}
+// on {@code to}
+//
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFAState) *DFAState {
+ if runtimeConfig.parserATNSimulatorDebug {
+ fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + p.GetTokenName(t))
+ }
+ if to == nil {
+ return nil
+ }
+ p.atn.stateMu.Lock()
+ to = p.addDFAState(dfa, to) // used existing if possible not incoming
+ p.atn.stateMu.Unlock()
+ if from == nil || t < -1 || t > p.atn.maxTokenType {
+ return to
+ }
+ p.atn.edgeMu.Lock()
+ if from.getEdges() == nil {
+ from.setEdges(make([]*DFAState, p.atn.maxTokenType+1+1))
+ }
+ from.setIthEdge(t+1, to) // connect
+ p.atn.edgeMu.Unlock()
+
+ if runtimeConfig.parserATNSimulatorDebug {
+ var names []string
+ if p.parser != nil {
+ names = p.parser.GetLiteralNames()
+ }
+
+ fmt.Println("DFA=\n" + dfa.String(names, nil))
+ }
+ return to
+}
+
+// addDFAState adds state D to the [DFA] if it is not already present, and returns
+// the actual instance stored in the [DFA]. If a state equivalent to D
+// is already in the [DFA], the existing state is returned. Otherwise, this
+// method returns D after adding it to the [DFA].
+//
+// If D is [ATNSimulatorError], this method returns [ATNSimulatorError] and
+// does not change the DFA.
+//
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) addDFAState(dfa *DFA, d *DFAState) *DFAState {
+ if d == ATNSimulatorError {
+ return d
+ }
+
+ existing, present := dfa.Get(d)
+ if present {
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Print("addDFAState " + d.String() + " exists")
+ }
+ return existing
+ }
+
+ // The state will be added if not already there or we will be given back the existing state struct
+ // if it is present.
+ //
+ d.stateNumber = dfa.Len()
+ if !d.configs.readOnly {
+ d.configs.OptimizeConfigs(&p.BaseATNSimulator)
+ d.configs.readOnly = true
+ d.configs.configLookup = nil
+ }
+ dfa.Put(d)
+
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("addDFAState new " + d.String())
+ }
+
+ return d
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) ReportAttemptingFullContext(dfa *DFA, conflictingAlts *BitSet, configs *ATNConfigSet, startIndex, stopIndex int) {
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug {
+ interval := NewInterval(startIndex, stopIndex+1)
+ fmt.Println("ReportAttemptingFullContext decision=" + strconv.Itoa(dfa.decision) + ":" + configs.String() +
+ ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
+ }
+ if p.parser != nil {
+ p.parser.GetErrorListenerDispatch().ReportAttemptingFullContext(p.parser, dfa, startIndex, stopIndex, conflictingAlts, configs)
+ }
+}
+
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) ReportContextSensitivity(dfa *DFA, prediction int, configs *ATNConfigSet, startIndex, stopIndex int) {
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug {
+ interval := NewInterval(startIndex, stopIndex+1)
+ fmt.Println("ReportContextSensitivity decision=" + strconv.Itoa(dfa.decision) + ":" + configs.String() +
+ ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
+ }
+ if p.parser != nil {
+ p.parser.GetErrorListenerDispatch().ReportContextSensitivity(p.parser, dfa, startIndex, stopIndex, prediction, configs)
+ }
+}
+
+// ReportAmbiguity reports and ambiguity in the parse, which shows that the parser will explore a different route.
+//
+// If context-sensitive parsing, we know it's an ambiguity not a conflict or error, but we can report it to the developer
+// so that they can see that this is happening and can take action if they want to.
+//
+//goland:noinspection GoBoolExpressions
+func (p *ParserATNSimulator) ReportAmbiguity(dfa *DFA, _ *DFAState, startIndex, stopIndex int,
+ exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
+ if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug {
+ interval := NewInterval(startIndex, stopIndex+1)
+ fmt.Println("ReportAmbiguity " + ambigAlts.String() + ":" + configs.String() +
+ ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
+ }
+ if p.parser != nil {
+ p.parser.GetErrorListenerDispatch().ReportAmbiguity(p.parser, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go b/vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go
new file mode 100644
index 0000000..c249bc1
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go
@@ -0,0 +1,421 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "reflect"
+ "strconv"
+)
+
+type ParserRuleContext interface {
+ RuleContext
+
+ SetException(RecognitionException)
+
+ AddTokenNode(token Token) *TerminalNodeImpl
+ AddErrorNode(badToken Token) *ErrorNodeImpl
+
+ EnterRule(listener ParseTreeListener)
+ ExitRule(listener ParseTreeListener)
+
+ SetStart(Token)
+ GetStart() Token
+
+ SetStop(Token)
+ GetStop() Token
+
+ AddChild(child RuleContext) RuleContext
+ RemoveLastChild()
+}
+
+type BaseParserRuleContext struct {
+ parentCtx RuleContext
+ invokingState int
+ RuleIndex int
+
+ start, stop Token
+ exception RecognitionException
+ children []Tree
+}
+
+func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int) *BaseParserRuleContext {
+ prc := new(BaseParserRuleContext)
+ InitBaseParserRuleContext(prc, parent, invokingStateNumber)
+ return prc
+}
+
+func InitBaseParserRuleContext(prc *BaseParserRuleContext, parent ParserRuleContext, invokingStateNumber int) {
+ // What context invoked b rule?
+ prc.parentCtx = parent
+
+ // What state invoked the rule associated with b context?
+ // The "return address" is the followState of invokingState
+ // If parent is nil, b should be -1.
+ if parent == nil {
+ prc.invokingState = -1
+ } else {
+ prc.invokingState = invokingStateNumber
+ }
+
+ prc.RuleIndex = -1
+ // * If we are debugging or building a parse tree for a Visitor,
+ // we need to track all of the tokens and rule invocations associated
+ // with prc rule's context. This is empty for parsing w/o tree constr.
+ // operation because we don't the need to track the details about
+ // how we parse prc rule.
+ // /
+ prc.children = nil
+ prc.start = nil
+ prc.stop = nil
+ // The exception that forced prc rule to return. If the rule successfully
+ // completed, prc is {@code nil}.
+ prc.exception = nil
+}
+
+func (prc *BaseParserRuleContext) SetException(e RecognitionException) {
+ prc.exception = e
+}
+
+func (prc *BaseParserRuleContext) GetChildren() []Tree {
+ return prc.children
+}
+
+func (prc *BaseParserRuleContext) CopyFrom(ctx *BaseParserRuleContext) {
+ // from RuleContext
+ prc.parentCtx = ctx.parentCtx
+ prc.invokingState = ctx.invokingState
+ prc.children = nil
+ prc.start = ctx.start
+ prc.stop = ctx.stop
+}
+
+func (prc *BaseParserRuleContext) GetText() string {
+ if prc.GetChildCount() == 0 {
+ return ""
+ }
+
+ var s string
+ for _, child := range prc.children {
+ s += child.(ParseTree).GetText()
+ }
+
+ return s
+}
+
+// EnterRule is called when any rule is entered.
+func (prc *BaseParserRuleContext) EnterRule(_ ParseTreeListener) {
+}
+
+// ExitRule is called when any rule is exited.
+func (prc *BaseParserRuleContext) ExitRule(_ ParseTreeListener) {
+}
+
+// * Does not set parent link other add methods do that
+func (prc *BaseParserRuleContext) addTerminalNodeChild(child TerminalNode) TerminalNode {
+ if prc.children == nil {
+ prc.children = make([]Tree, 0)
+ }
+ if child == nil {
+ panic("Child may not be null")
+ }
+ prc.children = append(prc.children, child)
+ return child
+}
+
+func (prc *BaseParserRuleContext) AddChild(child RuleContext) RuleContext {
+ if prc.children == nil {
+ prc.children = make([]Tree, 0)
+ }
+ if child == nil {
+ panic("Child may not be null")
+ }
+ prc.children = append(prc.children, child)
+ return child
+}
+
+// RemoveLastChild is used by [EnterOuterAlt] to toss out a [RuleContext] previously added as
+// we entered a rule. If we have a label, we will need to remove
+// the generic ruleContext object.
+func (prc *BaseParserRuleContext) RemoveLastChild() {
+ if prc.children != nil && len(prc.children) > 0 {
+ prc.children = prc.children[0 : len(prc.children)-1]
+ }
+}
+
+func (prc *BaseParserRuleContext) AddTokenNode(token Token) *TerminalNodeImpl {
+
+ node := NewTerminalNodeImpl(token)
+ prc.addTerminalNodeChild(node)
+ node.parentCtx = prc
+ return node
+
+}
+
+func (prc *BaseParserRuleContext) AddErrorNode(badToken Token) *ErrorNodeImpl {
+ node := NewErrorNodeImpl(badToken)
+ prc.addTerminalNodeChild(node)
+ node.parentCtx = prc
+ return node
+}
+
+func (prc *BaseParserRuleContext) GetChild(i int) Tree {
+ if prc.children != nil && len(prc.children) >= i {
+ return prc.children[i]
+ }
+
+ return nil
+}
+
+func (prc *BaseParserRuleContext) GetChildOfType(i int, childType reflect.Type) RuleContext {
+ if childType == nil {
+ return prc.GetChild(i).(RuleContext)
+ }
+
+ for j := 0; j < len(prc.children); j++ {
+ child := prc.children[j]
+ if reflect.TypeOf(child) == childType {
+ if i == 0 {
+ return child.(RuleContext)
+ }
+
+ i--
+ }
+ }
+
+ return nil
+}
+
+func (prc *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
+ return TreesStringTree(prc, ruleNames, recog)
+}
+
+func (prc *BaseParserRuleContext) GetRuleContext() RuleContext {
+ return prc
+}
+
+func (prc *BaseParserRuleContext) Accept(visitor ParseTreeVisitor) interface{} {
+ return visitor.VisitChildren(prc)
+}
+
+func (prc *BaseParserRuleContext) SetStart(t Token) {
+ prc.start = t
+}
+
+func (prc *BaseParserRuleContext) GetStart() Token {
+ return prc.start
+}
+
+func (prc *BaseParserRuleContext) SetStop(t Token) {
+ prc.stop = t
+}
+
+func (prc *BaseParserRuleContext) GetStop() Token {
+ return prc.stop
+}
+
+func (prc *BaseParserRuleContext) GetToken(ttype int, i int) TerminalNode {
+
+ for j := 0; j < len(prc.children); j++ {
+ child := prc.children[j]
+ if c2, ok := child.(TerminalNode); ok {
+ if c2.GetSymbol().GetTokenType() == ttype {
+ if i == 0 {
+ return c2
+ }
+
+ i--
+ }
+ }
+ }
+ return nil
+}
+
+func (prc *BaseParserRuleContext) GetTokens(ttype int) []TerminalNode {
+ if prc.children == nil {
+ return make([]TerminalNode, 0)
+ }
+
+ tokens := make([]TerminalNode, 0)
+
+ for j := 0; j < len(prc.children); j++ {
+ child := prc.children[j]
+ if tchild, ok := child.(TerminalNode); ok {
+ if tchild.GetSymbol().GetTokenType() == ttype {
+ tokens = append(tokens, tchild)
+ }
+ }
+ }
+
+ return tokens
+}
+
+func (prc *BaseParserRuleContext) GetPayload() interface{} {
+ return prc
+}
+
+func (prc *BaseParserRuleContext) getChild(ctxType reflect.Type, i int) RuleContext {
+ if prc.children == nil || i < 0 || i >= len(prc.children) {
+ return nil
+ }
+
+ j := -1 // what element have we found with ctxType?
+ for _, o := range prc.children {
+
+ childType := reflect.TypeOf(o)
+
+ if childType.Implements(ctxType) {
+ j++
+ if j == i {
+ return o.(RuleContext)
+ }
+ }
+ }
+ return nil
+}
+
+// Go lacks generics, so it's not possible for us to return the child with the correct type, but we do
+// check for convertibility
+
+func (prc *BaseParserRuleContext) GetTypedRuleContext(ctxType reflect.Type, i int) RuleContext {
+ return prc.getChild(ctxType, i)
+}
+
+func (prc *BaseParserRuleContext) GetTypedRuleContexts(ctxType reflect.Type) []RuleContext {
+ if prc.children == nil {
+ return make([]RuleContext, 0)
+ }
+
+ contexts := make([]RuleContext, 0)
+
+ for _, child := range prc.children {
+ childType := reflect.TypeOf(child)
+
+ if childType.ConvertibleTo(ctxType) {
+ contexts = append(contexts, child.(RuleContext))
+ }
+ }
+ return contexts
+}
+
+func (prc *BaseParserRuleContext) GetChildCount() int {
+ if prc.children == nil {
+ return 0
+ }
+
+ return len(prc.children)
+}
+
+func (prc *BaseParserRuleContext) GetSourceInterval() Interval {
+ if prc.start == nil || prc.stop == nil {
+ return TreeInvalidInterval
+ }
+
+ return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
+}
+
+//need to manage circular dependencies, so export now
+
+// Print out a whole tree, not just a node, in LISP format
+// (root child1 .. childN). Print just a node if b is a leaf.
+//
+
+func (prc *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
+
+ var p ParserRuleContext = prc
+ s := "["
+ for p != nil && p != stop {
+ if ruleNames == nil {
+ if !p.IsEmpty() {
+ s += strconv.Itoa(p.GetInvokingState())
+ }
+ } else {
+ ri := p.GetRuleIndex()
+ var ruleName string
+ if ri >= 0 && ri < len(ruleNames) {
+ ruleName = ruleNames[ri]
+ } else {
+ ruleName = strconv.Itoa(ri)
+ }
+ s += ruleName
+ }
+ if p.GetParent() != nil && (ruleNames != nil || !p.GetParent().(ParserRuleContext).IsEmpty()) {
+ s += " "
+ }
+ pi := p.GetParent()
+ if pi != nil {
+ p = pi.(ParserRuleContext)
+ } else {
+ p = nil
+ }
+ }
+ s += "]"
+ return s
+}
+
+func (prc *BaseParserRuleContext) SetParent(v Tree) {
+ if v == nil {
+ prc.parentCtx = nil
+ } else {
+ prc.parentCtx = v.(RuleContext)
+ }
+}
+
+func (prc *BaseParserRuleContext) GetInvokingState() int {
+ return prc.invokingState
+}
+
+func (prc *BaseParserRuleContext) SetInvokingState(t int) {
+ prc.invokingState = t
+}
+
+func (prc *BaseParserRuleContext) GetRuleIndex() int {
+ return prc.RuleIndex
+}
+
+func (prc *BaseParserRuleContext) GetAltNumber() int {
+ return ATNInvalidAltNumber
+}
+
+func (prc *BaseParserRuleContext) SetAltNumber(_ int) {}
+
+// IsEmpty returns true if the context of b is empty.
+//
+// A context is empty if there is no invoking state, meaning nobody calls
+// current context.
+func (prc *BaseParserRuleContext) IsEmpty() bool {
+ return prc.invokingState == -1
+}
+
+// GetParent returns the combined text of all child nodes. This method only considers
+// tokens which have been added to the parse tree.
+//
+// Since tokens on hidden channels (e.g. whitespace or comments) are not
+// added to the parse trees, they will not appear in the output of this
+// method.
+func (prc *BaseParserRuleContext) GetParent() Tree {
+ return prc.parentCtx
+}
+
+var ParserRuleContextEmpty = NewBaseParserRuleContext(nil, -1)
+
+type InterpreterRuleContext interface {
+ ParserRuleContext
+}
+
+type BaseInterpreterRuleContext struct {
+ *BaseParserRuleContext
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewBaseInterpreterRuleContext(parent BaseInterpreterRuleContext, invokingStateNumber, ruleIndex int) *BaseInterpreterRuleContext {
+
+ prc := new(BaseInterpreterRuleContext)
+
+ prc.BaseParserRuleContext = NewBaseParserRuleContext(parent, invokingStateNumber)
+
+ prc.RuleIndex = ruleIndex
+
+ return prc
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go
new file mode 100644
index 0000000..c1b80cc
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go
@@ -0,0 +1,727 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "golang.org/x/exp/slices"
+ "strconv"
+)
+
+var _emptyPredictionContextHash int
+
+func init() {
+ _emptyPredictionContextHash = murmurInit(1)
+ _emptyPredictionContextHash = murmurFinish(_emptyPredictionContextHash, 0)
+}
+
+func calculateEmptyHash() int {
+ return _emptyPredictionContextHash
+}
+
+const (
+ // BasePredictionContextEmptyReturnState represents {@code $} in an array in full context mode, $
+ // doesn't mean wildcard:
+ //
+ // $ + x = [$,x]
+ //
+ // Here,
+ //
+ // $ = EmptyReturnState
+ BasePredictionContextEmptyReturnState = 0x7FFFFFFF
+)
+
+// TODO: JI These are meant to be atomics - this does not seem to match the Java runtime here
+//
+//goland:noinspection GoUnusedGlobalVariable
+var (
+ BasePredictionContextglobalNodeCount = 1
+ BasePredictionContextid = BasePredictionContextglobalNodeCount
+)
+
+const (
+ PredictionContextEmpty = iota
+ PredictionContextSingleton
+ PredictionContextArray
+)
+
+// PredictionContext is a go idiomatic implementation of PredictionContext that does not rty to
+// emulate inheritance from Java, and can be used without an interface definition. An interface
+// is not required because no user code will ever need to implement this interface.
+type PredictionContext struct {
+ cachedHash int
+ pcType int
+ parentCtx *PredictionContext
+ returnState int
+ parents []*PredictionContext
+ returnStates []int
+}
+
+func NewEmptyPredictionContext() *PredictionContext {
+ nep := &PredictionContext{}
+ nep.cachedHash = calculateEmptyHash()
+ nep.pcType = PredictionContextEmpty
+ nep.returnState = BasePredictionContextEmptyReturnState
+ return nep
+}
+
+func NewBaseSingletonPredictionContext(parent *PredictionContext, returnState int) *PredictionContext {
+ pc := &PredictionContext{}
+ pc.pcType = PredictionContextSingleton
+ pc.returnState = returnState
+ pc.parentCtx = parent
+ if parent != nil {
+ pc.cachedHash = calculateHash(parent, returnState)
+ } else {
+ pc.cachedHash = calculateEmptyHash()
+ }
+ return pc
+}
+
+func SingletonBasePredictionContextCreate(parent *PredictionContext, returnState int) *PredictionContext {
+ if returnState == BasePredictionContextEmptyReturnState && parent == nil {
+ // someone can pass in the bits of an array ctx that mean $
+ return BasePredictionContextEMPTY
+ }
+ return NewBaseSingletonPredictionContext(parent, returnState)
+}
+
+func NewArrayPredictionContext(parents []*PredictionContext, returnStates []int) *PredictionContext {
+ // Parent can be nil only if full ctx mode and we make an array
+ // from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using
+ // nil parent and
+ // returnState == {@link //EmptyReturnState}.
+ hash := murmurInit(1)
+ for _, parent := range parents {
+ hash = murmurUpdate(hash, parent.Hash())
+ }
+ for _, returnState := range returnStates {
+ hash = murmurUpdate(hash, returnState)
+ }
+ hash = murmurFinish(hash, len(parents)<<1)
+
+ nec := &PredictionContext{}
+ nec.cachedHash = hash
+ nec.pcType = PredictionContextArray
+ nec.parents = parents
+ nec.returnStates = returnStates
+ return nec
+}
+
+func (p *PredictionContext) Hash() int {
+ return p.cachedHash
+}
+
+func (p *PredictionContext) Equals(other Collectable[*PredictionContext]) bool {
+ switch p.pcType {
+ case PredictionContextEmpty:
+ otherP := other.(*PredictionContext)
+ return other == nil || otherP == nil || otherP.isEmpty()
+ case PredictionContextSingleton:
+ return p.SingletonEquals(other)
+ case PredictionContextArray:
+ return p.ArrayEquals(other)
+ }
+ return false
+}
+
+func (p *PredictionContext) ArrayEquals(o Collectable[*PredictionContext]) bool {
+ if o == nil {
+ return false
+ }
+ other := o.(*PredictionContext)
+ if other == nil || other.pcType != PredictionContextArray {
+ return false
+ }
+ if p.cachedHash != other.Hash() {
+ return false // can't be same if hash is different
+ }
+
+ // Must compare the actual array elements and not just the array address
+ //
+ return slices.Equal(p.returnStates, other.returnStates) &&
+ slices.EqualFunc(p.parents, other.parents, func(x, y *PredictionContext) bool {
+ return x.Equals(y)
+ })
+}
+
+func (p *PredictionContext) SingletonEquals(other Collectable[*PredictionContext]) bool {
+ if other == nil {
+ return false
+ }
+ otherP := other.(*PredictionContext)
+ if otherP == nil {
+ return false
+ }
+
+ if p.cachedHash != otherP.Hash() {
+ return false // Can't be same if hash is different
+ }
+
+ if p.returnState != otherP.getReturnState(0) {
+ return false
+ }
+
+ // Both parents must be nil if one is
+ if p.parentCtx == nil {
+ return otherP.parentCtx == nil
+ }
+
+ return p.parentCtx.Equals(otherP.parentCtx)
+}
+
+func (p *PredictionContext) GetParent(i int) *PredictionContext {
+ switch p.pcType {
+ case PredictionContextEmpty:
+ return nil
+ case PredictionContextSingleton:
+ return p.parentCtx
+ case PredictionContextArray:
+ return p.parents[i]
+ }
+ return nil
+}
+
+func (p *PredictionContext) getReturnState(i int) int {
+ switch p.pcType {
+ case PredictionContextArray:
+ return p.returnStates[i]
+ default:
+ return p.returnState
+ }
+}
+
+func (p *PredictionContext) GetReturnStates() []int {
+ switch p.pcType {
+ case PredictionContextArray:
+ return p.returnStates
+ default:
+ return []int{p.returnState}
+ }
+}
+
+func (p *PredictionContext) length() int {
+ switch p.pcType {
+ case PredictionContextArray:
+ return len(p.returnStates)
+ default:
+ return 1
+ }
+}
+
+func (p *PredictionContext) hasEmptyPath() bool {
+ switch p.pcType {
+ case PredictionContextSingleton:
+ return p.returnState == BasePredictionContextEmptyReturnState
+ }
+ return p.getReturnState(p.length()-1) == BasePredictionContextEmptyReturnState
+}
+
+func (p *PredictionContext) String() string {
+ switch p.pcType {
+ case PredictionContextEmpty:
+ return "$"
+ case PredictionContextSingleton:
+ var up string
+
+ if p.parentCtx == nil {
+ up = ""
+ } else {
+ up = p.parentCtx.String()
+ }
+
+ if len(up) == 0 {
+ if p.returnState == BasePredictionContextEmptyReturnState {
+ return "$"
+ }
+
+ return strconv.Itoa(p.returnState)
+ }
+
+ return strconv.Itoa(p.returnState) + " " + up
+ case PredictionContextArray:
+ if p.isEmpty() {
+ return "[]"
+ }
+
+ s := "["
+ for i := 0; i < len(p.returnStates); i++ {
+ if i > 0 {
+ s = s + ", "
+ }
+ if p.returnStates[i] == BasePredictionContextEmptyReturnState {
+ s = s + "$"
+ continue
+ }
+ s = s + strconv.Itoa(p.returnStates[i])
+ if !p.parents[i].isEmpty() {
+ s = s + " " + p.parents[i].String()
+ } else {
+ s = s + "nil"
+ }
+ }
+ return s + "]"
+
+ default:
+ return "unknown"
+ }
+}
+
+func (p *PredictionContext) isEmpty() bool {
+ switch p.pcType {
+ case PredictionContextEmpty:
+ return true
+ case PredictionContextArray:
+ // since EmptyReturnState can only appear in the last position, we
+ // don't need to verify that size==1
+ return p.returnStates[0] == BasePredictionContextEmptyReturnState
+ default:
+ return false
+ }
+}
+
+func (p *PredictionContext) Type() int {
+ return p.pcType
+}
+
+func calculateHash(parent *PredictionContext, returnState int) int {
+ h := murmurInit(1)
+ h = murmurUpdate(h, parent.Hash())
+ h = murmurUpdate(h, returnState)
+ return murmurFinish(h, 2)
+}
+
+// Convert a {@link RuleContext} tree to a {@link BasePredictionContext} graph.
+// Return {@link //EMPTY} if {@code outerContext} is empty or nil.
+// /
+func predictionContextFromRuleContext(a *ATN, outerContext RuleContext) *PredictionContext {
+ if outerContext == nil {
+ outerContext = ParserRuleContextEmpty
+ }
+ // if we are in RuleContext of start rule, s, then BasePredictionContext
+ // is EMPTY. Nobody called us. (if we are empty, return empty)
+ if outerContext.GetParent() == nil || outerContext == ParserRuleContextEmpty {
+ return BasePredictionContextEMPTY
+ }
+ // If we have a parent, convert it to a BasePredictionContext graph
+ parent := predictionContextFromRuleContext(a, outerContext.GetParent().(RuleContext))
+ state := a.states[outerContext.GetInvokingState()]
+ transition := state.GetTransitions()[0]
+
+ return SingletonBasePredictionContextCreate(parent, transition.(*RuleTransition).followState.GetStateNumber())
+}
+
+func merge(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext {
+
+ // Share same graph if both same
+ //
+ if a == b || a.Equals(b) {
+ return a
+ }
+
+ if a.pcType == PredictionContextSingleton && b.pcType == PredictionContextSingleton {
+ return mergeSingletons(a, b, rootIsWildcard, mergeCache)
+ }
+ // At least one of a or b is array
+ // If one is $ and rootIsWildcard, return $ as wildcard
+ if rootIsWildcard {
+ if a.isEmpty() {
+ return a
+ }
+ if b.isEmpty() {
+ return b
+ }
+ }
+
+ // Convert either Singleton or Empty to arrays, so that we can merge them
+ //
+ ara := convertToArray(a)
+ arb := convertToArray(b)
+ return mergeArrays(ara, arb, rootIsWildcard, mergeCache)
+}
+
+func convertToArray(pc *PredictionContext) *PredictionContext {
+ switch pc.Type() {
+ case PredictionContextEmpty:
+ return NewArrayPredictionContext([]*PredictionContext{}, []int{})
+ case PredictionContextSingleton:
+ return NewArrayPredictionContext([]*PredictionContext{pc.GetParent(0)}, []int{pc.getReturnState(0)})
+ default:
+ // Already an array
+ }
+ return pc
+}
+
+// mergeSingletons merges two Singleton [PredictionContext] instances.
+//
+// Stack tops equal, parents merge is same return left graph.
+//
+//
+// Same stack top, parents differ merge parents giving array node, then
+// remainders of those graphs. A new root node is created to point to the
+// merged parents.
+//
+//
+// Different stack tops pointing to same parent. Make array node for the
+// root where both element in the root point to the same (original)
+// parent.
+//
+//
+// Different stack tops pointing to different parents. Make array node for
+// the root where each element points to the corresponding original
+// parent.
+//
+//
+// @param a the first {@link SingletonBasePredictionContext}
+// @param b the second {@link SingletonBasePredictionContext}
+// @param rootIsWildcard {@code true} if this is a local-context merge,
+// otherwise false to indicate a full-context merge
+// @param mergeCache
+// /
+func mergeSingletons(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext {
+ if mergeCache != nil {
+ previous, present := mergeCache.Get(a, b)
+ if present {
+ return previous
+ }
+ previous, present = mergeCache.Get(b, a)
+ if present {
+ return previous
+ }
+ }
+
+ rootMerge := mergeRoot(a, b, rootIsWildcard)
+ if rootMerge != nil {
+ if mergeCache != nil {
+ mergeCache.Put(a, b, rootMerge)
+ }
+ return rootMerge
+ }
+ if a.returnState == b.returnState {
+ parent := merge(a.parentCtx, b.parentCtx, rootIsWildcard, mergeCache)
+ // if parent is same as existing a or b parent or reduced to a parent,
+ // return it
+ if parent.Equals(a.parentCtx) {
+ return a // ax + bx = ax, if a=b
+ }
+ if parent.Equals(b.parentCtx) {
+ return b // ax + bx = bx, if a=b
+ }
+ // else: ax + ay = a'[x,y]
+ // merge parents x and y, giving array node with x,y then remainders
+ // of those graphs. dup a, a' points at merged array.
+ // New joined parent so create a new singleton pointing to it, a'
+ spc := SingletonBasePredictionContextCreate(parent, a.returnState)
+ if mergeCache != nil {
+ mergeCache.Put(a, b, spc)
+ }
+ return spc
+ }
+ // a != b payloads differ
+ // see if we can collapse parents due to $+x parents if local ctx
+ var singleParent *PredictionContext
+ if a.Equals(b) || (a.parentCtx != nil && a.parentCtx.Equals(b.parentCtx)) { // ax +
+ // bx =
+ // [a,b]x
+ singleParent = a.parentCtx
+ }
+ if singleParent != nil { // parents are same
+ // sort payloads and use same parent
+ payloads := []int{a.returnState, b.returnState}
+ if a.returnState > b.returnState {
+ payloads[0] = b.returnState
+ payloads[1] = a.returnState
+ }
+ parents := []*PredictionContext{singleParent, singleParent}
+ apc := NewArrayPredictionContext(parents, payloads)
+ if mergeCache != nil {
+ mergeCache.Put(a, b, apc)
+ }
+ return apc
+ }
+ // parents differ and can't merge them. Just pack together
+ // into array can't merge.
+ // ax + by = [ax,by]
+ payloads := []int{a.returnState, b.returnState}
+ parents := []*PredictionContext{a.parentCtx, b.parentCtx}
+ if a.returnState > b.returnState { // sort by payload
+ payloads[0] = b.returnState
+ payloads[1] = a.returnState
+ parents = []*PredictionContext{b.parentCtx, a.parentCtx}
+ }
+ apc := NewArrayPredictionContext(parents, payloads)
+ if mergeCache != nil {
+ mergeCache.Put(a, b, apc)
+ }
+ return apc
+}
+
+// Handle case where at least one of {@code a} or {@code b} is
+// {@link //EMPTY}. In the following diagrams, the symbol {@code $} is used
+// to represent {@link //EMPTY}.
+//
+// Local-Context Merges
+//
+// These local-context merge operations are used when {@code rootIsWildcard}
+// is true.
+//
+// {@link //EMPTY} is superset of any graph return {@link //EMPTY}.
+//
+//
+// {@link //EMPTY} and anything is {@code //EMPTY}, so merged parent is
+// {@code //EMPTY} return left graph.
+//
+//
+// Special case of last merge if local context.
+//
+//
+// Full-Context Merges
+//
+// These full-context merge operations are used when {@code rootIsWildcard}
+// is false.
+//
+//
+//
+// Must keep all contexts {@link //EMPTY} in array is a special value (and
+// nil parent).
+//
+//
+//
+//
+// @param a the first {@link SingletonBasePredictionContext}
+// @param b the second {@link SingletonBasePredictionContext}
+// @param rootIsWildcard {@code true} if this is a local-context merge,
+// otherwise false to indicate a full-context merge
+// /
+func mergeRoot(a, b *PredictionContext, rootIsWildcard bool) *PredictionContext {
+ if rootIsWildcard {
+ if a.pcType == PredictionContextEmpty {
+ return BasePredictionContextEMPTY // // + b =//
+ }
+ if b.pcType == PredictionContextEmpty {
+ return BasePredictionContextEMPTY // a +// =//
+ }
+ } else {
+ if a.isEmpty() && b.isEmpty() {
+ return BasePredictionContextEMPTY // $ + $ = $
+ } else if a.isEmpty() { // $ + x = [$,x]
+ payloads := []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState}
+ parents := []*PredictionContext{b.GetParent(-1), nil}
+ return NewArrayPredictionContext(parents, payloads)
+ } else if b.isEmpty() { // x + $ = [$,x] ($ is always first if present)
+ payloads := []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState}
+ parents := []*PredictionContext{a.GetParent(-1), nil}
+ return NewArrayPredictionContext(parents, payloads)
+ }
+ }
+ return nil
+}
+
+// Merge two {@link ArrayBasePredictionContext} instances.
+//
+// Different tops, different parents.
+//
+//
+// Shared top, same parents.
+//
+//
+// Shared top, different parents.
+//
+//
+// Shared top, all shared parents.
+//
+//
+// Equal tops, merge parents and reduce top to
+// {@link SingletonBasePredictionContext}.
+//
+//
+//goland:noinspection GoBoolExpressions
+func mergeArrays(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext {
+ if mergeCache != nil {
+ previous, present := mergeCache.Get(a, b)
+ if present {
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> previous")
+ }
+ return previous
+ }
+ previous, present = mergeCache.Get(b, a)
+ if present {
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> previous")
+ }
+ return previous
+ }
+ }
+ // merge sorted payloads a + b => M
+ i := 0 // walks a
+ j := 0 // walks b
+ k := 0 // walks target M array
+
+ mergedReturnStates := make([]int, len(a.returnStates)+len(b.returnStates))
+ mergedParents := make([]*PredictionContext, len(a.returnStates)+len(b.returnStates))
+ // walk and merge to yield mergedParents, mergedReturnStates
+ for i < len(a.returnStates) && j < len(b.returnStates) {
+ aParent := a.parents[i]
+ bParent := b.parents[j]
+ if a.returnStates[i] == b.returnStates[j] {
+ // same payload (stack tops are equal), must yield merged singleton
+ payload := a.returnStates[i]
+ // $+$ = $
+ bothDollars := payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil
+ axAX := aParent != nil && bParent != nil && aParent.Equals(bParent) // ax+ax
+ // ->
+ // ax
+ if bothDollars || axAX {
+ mergedParents[k] = aParent // choose left
+ mergedReturnStates[k] = payload
+ } else { // ax+ay -> a'[x,y]
+ mergedParent := merge(aParent, bParent, rootIsWildcard, mergeCache)
+ mergedParents[k] = mergedParent
+ mergedReturnStates[k] = payload
+ }
+ i++ // hop over left one as usual
+ j++ // but also Skip one in right side since we merge
+ } else if a.returnStates[i] < b.returnStates[j] { // copy a[i] to M
+ mergedParents[k] = aParent
+ mergedReturnStates[k] = a.returnStates[i]
+ i++
+ } else { // b > a, copy b[j] to M
+ mergedParents[k] = bParent
+ mergedReturnStates[k] = b.returnStates[j]
+ j++
+ }
+ k++
+ }
+ // copy over any payloads remaining in either array
+ if i < len(a.returnStates) {
+ for p := i; p < len(a.returnStates); p++ {
+ mergedParents[k] = a.parents[p]
+ mergedReturnStates[k] = a.returnStates[p]
+ k++
+ }
+ } else {
+ for p := j; p < len(b.returnStates); p++ {
+ mergedParents[k] = b.parents[p]
+ mergedReturnStates[k] = b.returnStates[p]
+ k++
+ }
+ }
+ // trim merged if we combined a few that had same stack tops
+ if k < len(mergedParents) { // write index < last position trim
+ if k == 1 { // for just one merged element, return singleton top
+ pc := SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0])
+ if mergeCache != nil {
+ mergeCache.Put(a, b, pc)
+ }
+ return pc
+ }
+ mergedParents = mergedParents[0:k]
+ mergedReturnStates = mergedReturnStates[0:k]
+ }
+
+ M := NewArrayPredictionContext(mergedParents, mergedReturnStates)
+
+ // if we created same array as a or b, return that instead
+ // TODO: JI track whether this is possible above during merge sort for speed and possibly avoid an allocation
+ if M.Equals(a) {
+ if mergeCache != nil {
+ mergeCache.Put(a, b, a)
+ }
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> a")
+ }
+ return a
+ }
+ if M.Equals(b) {
+ if mergeCache != nil {
+ mergeCache.Put(a, b, b)
+ }
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> b")
+ }
+ return b
+ }
+ combineCommonParents(&mergedParents)
+
+ if mergeCache != nil {
+ mergeCache.Put(a, b, M)
+ }
+ if runtimeConfig.parserATNSimulatorTraceATNSim {
+ fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> " + M.String())
+ }
+ return M
+}
+
+// Make pass over all M parents and merge any Equals() ones.
+// Note that we pass a pointer to the slice as we want to modify it in place.
+//
+//goland:noinspection GoUnusedFunction
+func combineCommonParents(parents *[]*PredictionContext) {
+ uniqueParents := NewJStore[*PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionContextCollection, "combineCommonParents for PredictionContext")
+
+ for p := 0; p < len(*parents); p++ {
+ parent := (*parents)[p]
+ _, _ = uniqueParents.Put(parent)
+ }
+ for q := 0; q < len(*parents); q++ {
+ pc, _ := uniqueParents.Get((*parents)[q])
+ (*parents)[q] = pc
+ }
+}
+
+func getCachedBasePredictionContext(context *PredictionContext, contextCache *PredictionContextCache, visited *VisitRecord) *PredictionContext {
+ if context.isEmpty() {
+ return context
+ }
+ existing, present := visited.Get(context)
+ if present {
+ return existing
+ }
+
+ existing, present = contextCache.Get(context)
+ if present {
+ visited.Put(context, existing)
+ return existing
+ }
+ changed := false
+ parents := make([]*PredictionContext, context.length())
+ for i := 0; i < len(parents); i++ {
+ parent := getCachedBasePredictionContext(context.GetParent(i), contextCache, visited)
+ if changed || !parent.Equals(context.GetParent(i)) {
+ if !changed {
+ parents = make([]*PredictionContext, context.length())
+ for j := 0; j < context.length(); j++ {
+ parents[j] = context.GetParent(j)
+ }
+ changed = true
+ }
+ parents[i] = parent
+ }
+ }
+ if !changed {
+ contextCache.add(context)
+ visited.Put(context, context)
+ return context
+ }
+ var updated *PredictionContext
+ if len(parents) == 0 {
+ updated = BasePredictionContextEMPTY
+ } else if len(parents) == 1 {
+ updated = SingletonBasePredictionContextCreate(parents[0], context.getReturnState(0))
+ } else {
+ updated = NewArrayPredictionContext(parents, context.GetReturnStates())
+ }
+ contextCache.add(updated)
+ visited.Put(updated, updated)
+ visited.Put(context, updated)
+
+ return updated
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go
new file mode 100644
index 0000000..25dfb11
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go
@@ -0,0 +1,48 @@
+package antlr
+
+var BasePredictionContextEMPTY = &PredictionContext{
+ cachedHash: calculateEmptyHash(),
+ pcType: PredictionContextEmpty,
+ returnState: BasePredictionContextEmptyReturnState,
+}
+
+// PredictionContextCache is Used to cache [PredictionContext] objects. It is used for the shared
+// context cash associated with contexts in DFA states. This cache
+// can be used for both lexers and parsers.
+type PredictionContextCache struct {
+ cache *JMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]]
+}
+
+func NewPredictionContextCache() *PredictionContextCache {
+ return &PredictionContextCache{
+ cache: NewJMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionContextCacheCollection, "NewPredictionContextCache()"),
+ }
+}
+
+// Add a context to the cache and return it. If the context already exists,
+// return that one instead and do not add a new context to the cache.
+// Protect shared cache from unsafe thread access.
+func (p *PredictionContextCache) add(ctx *PredictionContext) *PredictionContext {
+ if ctx.isEmpty() {
+ return BasePredictionContextEMPTY
+ }
+
+ // Put will return the existing entry if it is present (note this is done via Equals, not whether it is
+ // the same pointer), otherwise it will add the new entry and return that.
+ //
+ existing, present := p.cache.Get(ctx)
+ if present {
+ return existing
+ }
+ p.cache.Put(ctx, ctx)
+ return ctx
+}
+
+func (p *PredictionContextCache) Get(ctx *PredictionContext) (*PredictionContext, bool) {
+ pc, exists := p.cache.Get(ctx)
+ return pc, exists
+}
+
+func (p *PredictionContextCache) length() int {
+ return p.cache.Len()
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go
new file mode 100644
index 0000000..3f85a6a
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go
@@ -0,0 +1,536 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+// This enumeration defines the prediction modes available in ANTLR 4 along with
+// utility methods for analyzing configuration sets for conflicts and/or
+// ambiguities.
+
+const (
+ // PredictionModeSLL represents the SLL(*) prediction mode.
+ // This prediction mode ignores the current
+ // parser context when making predictions. This is the fastest prediction
+ // mode, and provides correct results for many grammars. This prediction
+ // mode is more powerful than the prediction mode provided by ANTLR 3, but
+ // may result in syntax errors for grammar and input combinations which are
+ // not SLL.
+ //
+ // When using this prediction mode, the parser will either return a correct
+ // parse tree (i.e. the same parse tree that would be returned with the
+ // [PredictionModeLL] prediction mode), or it will Report a syntax error. If a
+ // syntax error is encountered when using the SLL prediction mode,
+ // it may be due to either an actual syntax error in the input or indicate
+ // that the particular combination of grammar and input requires the more
+ // powerful LL prediction abilities to complete successfully.
+ //
+ // This prediction mode does not provide any guarantees for prediction
+ // behavior for syntactically-incorrect inputs.
+ //
+ PredictionModeSLL = 0
+
+ // PredictionModeLL represents the LL(*) prediction mode.
+ // This prediction mode allows the current parser
+ // context to be used for resolving SLL conflicts that occur during
+ // prediction. This is the fastest prediction mode that guarantees correct
+ // parse results for all combinations of grammars with syntactically correct
+ // inputs.
+ //
+ // When using this prediction mode, the parser will make correct decisions
+ // for all syntactically-correct grammar and input combinations. However, in
+ // cases where the grammar is truly ambiguous this prediction mode might not
+ // report a precise answer for exactly which alternatives are
+ // ambiguous.
+ //
+ // This prediction mode does not provide any guarantees for prediction
+ // behavior for syntactically-incorrect inputs.
+ //
+ PredictionModeLL = 1
+
+ // PredictionModeLLExactAmbigDetection represents the LL(*) prediction mode
+ // with exact ambiguity detection.
+ //
+ // In addition to the correctness guarantees provided by the [PredictionModeLL] prediction mode,
+ // this prediction mode instructs the prediction algorithm to determine the
+ // complete and exact set of ambiguous alternatives for every ambiguous
+ // decision encountered while parsing.
+ //
+ // This prediction mode may be used for diagnosing ambiguities during
+ // grammar development. Due to the performance overhead of calculating sets
+ // of ambiguous alternatives, this prediction mode should be avoided when
+ // the exact results are not necessary.
+ //
+ // This prediction mode does not provide any guarantees for prediction
+ // behavior for syntactically-incorrect inputs.
+ //
+ PredictionModeLLExactAmbigDetection = 2
+)
+
+// PredictionModehasSLLConflictTerminatingPrediction computes the SLL prediction termination condition.
+//
+// This method computes the SLL prediction termination condition for both of
+// the following cases:
+//
+// - The usual SLL+LL fallback upon SLL conflict
+// - Pure SLL without LL fallback
+//
+// # Combined SLL+LL Parsing
+//
+// When LL-fallback is enabled upon SLL conflict, correct predictions are
+// ensured regardless of how the termination condition is computed by this
+// method. Due to the substantially higher cost of LL prediction, the
+// prediction should only fall back to LL when the additional lookahead
+// cannot lead to a unique SLL prediction.
+//
+// Assuming combined SLL+LL parsing, an SLL configuration set with only
+// conflicting subsets should fall back to full LL, even if the
+// configuration sets don't resolve to the same alternative, e.g.
+//
+// {1,2} and {3,4}
+//
+// If there is at least one non-conflicting
+// configuration, SLL could continue with the hopes that more lookahead will
+// resolve via one of those non-conflicting configurations.
+//
+// Here's the prediction termination rule them: SLL (for SLL+LL parsing)
+// stops when it sees only conflicting configuration subsets. In contrast,
+// full LL keeps going when there is uncertainty.
+//
+// # Heuristic
+//
+// As a heuristic, we stop prediction when we see any conflicting subset
+// unless we see a state that only has one alternative associated with it.
+// The single-alt-state thing lets prediction continue upon rules like
+// (otherwise, it would admit defeat too soon):
+//
+// [12|1|[], 6|2|[], 12|2|[]]. s : (ID | ID ID?) ;
+//
+// When the [ATN] simulation reaches the state before ';', it has a
+// [DFA] state that looks like:
+//
+// [12|1|[], 6|2|[], 12|2|[]]
+//
+// Naturally
+//
+// 12|1|[] and 12|2|[]
+//
+// conflict, but we cannot stop processing this node because alternative to has another way to continue,
+// via
+//
+// [6|2|[]]
+//
+// It also let's us continue for this rule:
+//
+// [1|1|[], 1|2|[], 8|3|[]] a : A | A | A B ;
+//
+// After Matching input A, we reach the stop state for rule A, state 1.
+// State 8 is the state immediately before B. Clearly alternatives 1 and 2
+// conflict and no amount of further lookahead will separate the two.
+// However, alternative 3 will be able to continue, and so we do not stop
+// working on this state. In the previous example, we're concerned with
+// states associated with the conflicting alternatives. Here alt 3 is not
+// associated with the conflicting configs, but since we can continue
+// looking for input reasonably, don't declare the state done.
+//
+// # Pure SLL Parsing
+//
+// To handle pure SLL parsing, all we have to do is make sure that we
+// combine stack contexts for configurations that differ only by semantic
+// predicate. From there, we can do the usual SLL termination heuristic.
+//
+// # Predicates in SLL+LL Parsing
+//
+// SLL decisions don't evaluate predicates until after they reach [DFA] stop
+// states because they need to create the [DFA] cache that works in all
+// semantic situations. In contrast, full LL evaluates predicates collected
+// during start state computation, so it can ignore predicates thereafter.
+// This means that SLL termination detection can totally ignore semantic
+// predicates.
+//
+// Implementation-wise, [ATNConfigSet] combines stack contexts but not
+// semantic predicate contexts, so we might see two configurations like the
+// following:
+//
+// (s, 1, x, {}), (s, 1, x', {p})
+//
+// Before testing these configurations against others, we have to merge
+// x and x' (without modifying the existing configurations).
+// For example, we test (x+x')==x” when looking for conflicts in
+// the following configurations:
+//
+// (s, 1, x, {}), (s, 1, x', {p}), (s, 2, x”, {})
+//
+// If the configuration set has predicates (as indicated by
+// [ATNConfigSet.hasSemanticContext]), this algorithm makes a copy of
+// the configurations to strip out all the predicates so that a standard
+// [ATNConfigSet] will merge everything ignoring predicates.
+func PredictionModehasSLLConflictTerminatingPrediction(mode int, configs *ATNConfigSet) bool {
+
+ // Configs in rule stop states indicate reaching the end of the decision
+ // rule (local context) or end of start rule (full context). If all
+ // configs meet this condition, then none of the configurations is able
+ // to Match additional input, so we terminate prediction.
+ //
+ if PredictionModeallConfigsInRuleStopStates(configs) {
+ return true
+ }
+
+ // pure SLL mode parsing
+ if mode == PredictionModeSLL {
+ // Don't bother with combining configs from different semantic
+ // contexts if we can fail over to full LL costs more time
+ // since we'll often fail over anyway.
+ if configs.hasSemanticContext {
+ // dup configs, tossing out semantic predicates
+ dup := NewATNConfigSet(false)
+ for _, c := range configs.configs {
+
+ // NewATNConfig({semanticContext:}, c)
+ c = NewATNConfig2(c, SemanticContextNone)
+ dup.Add(c, nil)
+ }
+ configs = dup
+ }
+ // now we have combined contexts for configs with dissimilar predicates
+ }
+ // pure SLL or combined SLL+LL mode parsing
+ altsets := PredictionModegetConflictingAltSubsets(configs)
+ return PredictionModehasConflictingAltSet(altsets) && !PredictionModehasStateAssociatedWithOneAlt(configs)
+}
+
+// PredictionModehasConfigInRuleStopState checks if any configuration in the given configs is in a
+// [RuleStopState]. Configurations meeting this condition have reached
+// the end of the decision rule (local context) or end of start rule (full
+// context).
+//
+// The func returns true if any configuration in the supplied configs is in a [RuleStopState]
+func PredictionModehasConfigInRuleStopState(configs *ATNConfigSet) bool {
+ for _, c := range configs.configs {
+ if _, ok := c.GetState().(*RuleStopState); ok {
+ return true
+ }
+ }
+ return false
+}
+
+// PredictionModeallConfigsInRuleStopStates checks if all configurations in configs are in a
+// [RuleStopState]. Configurations meeting this condition have reached
+// the end of the decision rule (local context) or end of start rule (full
+// context).
+//
+// the func returns true if all configurations in configs are in a
+// [RuleStopState]
+func PredictionModeallConfigsInRuleStopStates(configs *ATNConfigSet) bool {
+
+ for _, c := range configs.configs {
+ if _, ok := c.GetState().(*RuleStopState); !ok {
+ return false
+ }
+ }
+ return true
+}
+
+// PredictionModeresolvesToJustOneViableAlt checks full LL prediction termination.
+//
+// Can we stop looking ahead during [ATN] simulation or is there some
+// uncertainty as to which alternative we will ultimately pick, after
+// consuming more input? Even if there are partial conflicts, we might know
+// that everything is going to resolve to the same minimum alternative. That
+// means we can stop since no more lookahead will change that fact. On the
+// other hand, there might be multiple conflicts that resolve to different
+// minimums. That means we need more look ahead to decide which of those
+// alternatives we should predict.
+//
+// The basic idea is to split the set of configurations 'C', into
+// conflicting subsets (s, _, ctx, _) and singleton subsets with
+// non-conflicting configurations. Two configurations conflict if they have
+// identical [ATNConfig].state and [ATNConfig].context values
+// but a different [ATNConfig].alt value, e.g.
+//
+// (s, i, ctx, _)
+//
+// and
+//
+// (s, j, ctx, _) ; for i != j
+//
+// Reduce these configuration subsets to the set of possible alternatives.
+// You can compute the alternative subsets in one pass as follows:
+//
+// A_s,ctx = {i | (s, i, ctx, _)}
+//
+// for each configuration in C holding s and ctx fixed.
+//
+// Or in pseudo-code:
+//
+// for each configuration c in C:
+// map[c] U = c.ATNConfig.alt alt // map hash/equals uses s and x, not alt and not pred
+//
+// The values in map are the set of
+//
+// A_s,ctx
+//
+// sets.
+//
+// If
+//
+// |A_s,ctx| = 1
+//
+// then there is no conflict associated with s and ctx.
+//
+// Reduce the subsets to singletons by choosing a minimum of each subset. If
+// the union of these alternative subsets is a singleton, then no amount of
+// further lookahead will help us. We will always pick that alternative. If,
+// however, there is more than one alternative, then we are uncertain which
+// alternative to predict and must continue looking for resolution. We may
+// or may not discover an ambiguity in the future, even if there are no
+// conflicting subsets this round.
+//
+// The biggest sin is to terminate early because it means we've made a
+// decision but were uncertain as to the eventual outcome. We haven't used
+// enough lookahead. On the other hand, announcing a conflict too late is no
+// big deal; you will still have the conflict. It's just inefficient. It
+// might even look until the end of file.
+//
+// No special consideration for semantic predicates is required because
+// predicates are evaluated on-the-fly for full LL prediction, ensuring that
+// no configuration contains a semantic context during the termination
+// check.
+//
+// # Conflicting Configs
+//
+// Two configurations:
+//
+// (s, i, x) and (s, j, x')
+//
+// conflict when i != j but x = x'. Because we merge all
+// (s, i, _) configurations together, that means that there are at
+// most n configurations associated with state s for
+// n possible alternatives in the decision. The merged stacks
+// complicate the comparison of configuration contexts x and x'.
+//
+// Sam checks to see if one is a subset of the other by calling
+// merge and checking to see if the merged result is either x or x'.
+// If the x associated with lowest alternative i
+// is the superset, then i is the only possible prediction since the
+// others resolve to min(i) as well. However, if x is
+// associated with j > i then at least one stack configuration for
+// j is not in conflict with alternative i. The algorithm
+// should keep going, looking for more lookahead due to the uncertainty.
+//
+// For simplicity, I'm doing an equality check between x and
+// x', which lets the algorithm continue to consume lookahead longer
+// than necessary. The reason I like the equality is of course the
+// simplicity but also because that is the test you need to detect the
+// alternatives that are actually in conflict.
+//
+// # Continue/Stop Rule
+//
+// Continue if the union of resolved alternative sets from non-conflicting and
+// conflicting alternative subsets has more than one alternative. We are
+// uncertain about which alternative to predict.
+//
+// The complete set of alternatives,
+//
+// [i for (_, i, _)]
+//
+// tells us which alternatives are still in the running for the amount of input we've
+// consumed at this point. The conflicting sets let us to strip away
+// configurations that won't lead to more states because we resolve
+// conflicts to the configuration with a minimum alternate for the
+// conflicting set.
+//
+// Cases
+//
+// - no conflicts and more than 1 alternative in set => continue
+// - (s, 1, x), (s, 2, x), (s, 3, z), (s', 1, y), (s', 2, y) yields non-conflicting set
+// {3} ∪ conflicting sets min({1,2}) ∪ min({1,2}) = {1,3} => continue
+// - (s, 1, x), (s, 2, x), (s', 1, y), (s', 2, y), (s”, 1, z) yields non-conflicting set
+// {1} ∪ conflicting sets min({1,2}) ∪ min({1,2}) = {1} => stop and predict 1
+// - (s, 1, x), (s, 2, x), (s', 1, y), (s', 2, y) yields conflicting, reduced sets
+// {1} ∪ {1} = {1} => stop and predict 1, can announce ambiguity {1,2}
+// - (s, 1, x), (s, 2, x), (s', 2, y), (s', 3, y) yields conflicting, reduced sets
+// {1} ∪ {2} = {1,2} => continue
+// - (s, 1, x), (s, 2, x), (s', 2, y), (s', 3, y) yields conflicting, reduced sets
+// {1} ∪ {2} = {1,2} => continue
+// - (s, 1, x), (s, 2, x), (s', 3, y), (s', 4, y) yields conflicting, reduced sets
+// {1} ∪ {3} = {1,3} => continue
+//
+// # Exact Ambiguity Detection
+//
+// If all states report the same conflicting set of alternatives, then we
+// know we have the exact ambiguity set:
+//
+// |A_i| > 1
+//
+// and
+//
+// A_i = A_j ; for all i, j
+//
+// In other words, we continue examining lookahead until all A_i
+// have more than one alternative and all A_i are the same. If
+//
+// A={{1,2}, {1,3}}
+//
+// then regular LL prediction would terminate because the resolved set is {1}.
+// To determine what the real ambiguity is, we have to know whether the ambiguity is between one and
+// two or one and three so we keep going. We can only stop prediction when
+// we need exact ambiguity detection when the sets look like:
+//
+// A={{1,2}}
+//
+// or
+//
+// {{1,2},{1,2}}, etc...
+func PredictionModeresolvesToJustOneViableAlt(altsets []*BitSet) int {
+ return PredictionModegetSingleViableAlt(altsets)
+}
+
+// PredictionModeallSubsetsConflict determines if every alternative subset in altsets contains more
+// than one alternative.
+//
+// The func returns true if every [BitSet] in altsets has
+// [BitSet].cardinality cardinality > 1
+func PredictionModeallSubsetsConflict(altsets []*BitSet) bool {
+ return !PredictionModehasNonConflictingAltSet(altsets)
+}
+
+// PredictionModehasNonConflictingAltSet determines if any single alternative subset in altsets contains
+// exactly one alternative.
+//
+// The func returns true if altsets contains at least one [BitSet] with
+// [BitSet].cardinality cardinality 1
+func PredictionModehasNonConflictingAltSet(altsets []*BitSet) bool {
+ for i := 0; i < len(altsets); i++ {
+ alts := altsets[i]
+ if alts.length() == 1 {
+ return true
+ }
+ }
+ return false
+}
+
+// PredictionModehasConflictingAltSet determines if any single alternative subset in altsets contains
+// more than one alternative.
+//
+// The func returns true if altsets contains a [BitSet] with
+// [BitSet].cardinality cardinality > 1, otherwise false
+func PredictionModehasConflictingAltSet(altsets []*BitSet) bool {
+ for i := 0; i < len(altsets); i++ {
+ alts := altsets[i]
+ if alts.length() > 1 {
+ return true
+ }
+ }
+ return false
+}
+
+// PredictionModeallSubsetsEqual determines if every alternative subset in altsets is equivalent.
+//
+// The func returns true if every member of altsets is equal to the others.
+func PredictionModeallSubsetsEqual(altsets []*BitSet) bool {
+ var first *BitSet
+
+ for i := 0; i < len(altsets); i++ {
+ alts := altsets[i]
+ if first == nil {
+ first = alts
+ } else if alts != first {
+ return false
+ }
+ }
+
+ return true
+}
+
+// PredictionModegetUniqueAlt returns the unique alternative predicted by all alternative subsets in
+// altsets. If no such alternative exists, this method returns
+// [ATNInvalidAltNumber].
+//
+// @param altsets a collection of alternative subsets
+func PredictionModegetUniqueAlt(altsets []*BitSet) int {
+ all := PredictionModeGetAlts(altsets)
+ if all.length() == 1 {
+ return all.minValue()
+ }
+
+ return ATNInvalidAltNumber
+}
+
+// PredictionModeGetAlts returns the complete set of represented alternatives for a collection of
+// alternative subsets. This method returns the union of each [BitSet]
+// in altsets, being the set of represented alternatives in altsets.
+func PredictionModeGetAlts(altsets []*BitSet) *BitSet {
+ all := NewBitSet()
+ for _, alts := range altsets {
+ all.or(alts)
+ }
+ return all
+}
+
+// PredictionModegetConflictingAltSubsets gets the conflicting alt subsets from a configuration set.
+//
+// for each configuration c in configs:
+// map[c] U= c.ATNConfig.alt // map hash/equals uses s and x, not alt and not pred
+func PredictionModegetConflictingAltSubsets(configs *ATNConfigSet) []*BitSet {
+ configToAlts := NewJMap[*ATNConfig, *BitSet, *ATNAltConfigComparator[*ATNConfig]](atnAltCfgEqInst, AltSetCollection, "PredictionModegetConflictingAltSubsets()")
+
+ for _, c := range configs.configs {
+
+ alts, ok := configToAlts.Get(c)
+ if !ok {
+ alts = NewBitSet()
+ configToAlts.Put(c, alts)
+ }
+ alts.add(c.GetAlt())
+ }
+
+ return configToAlts.Values()
+}
+
+// PredictionModeGetStateToAltMap gets a map from state to alt subset from a configuration set.
+//
+// for each configuration c in configs:
+// map[c.ATNConfig.state] U= c.ATNConfig.alt}
+func PredictionModeGetStateToAltMap(configs *ATNConfigSet) *AltDict {
+ m := NewAltDict()
+
+ for _, c := range configs.configs {
+ alts := m.Get(c.GetState().String())
+ if alts == nil {
+ alts = NewBitSet()
+ m.put(c.GetState().String(), alts)
+ }
+ alts.(*BitSet).add(c.GetAlt())
+ }
+ return m
+}
+
+func PredictionModehasStateAssociatedWithOneAlt(configs *ATNConfigSet) bool {
+ values := PredictionModeGetStateToAltMap(configs).values()
+ for i := 0; i < len(values); i++ {
+ if values[i].(*BitSet).length() == 1 {
+ return true
+ }
+ }
+ return false
+}
+
+// PredictionModegetSingleViableAlt gets the single alternative predicted by all alternative subsets in altsets
+// if there is one.
+//
+// TODO: JI - Review this code - it does not seem to do the same thing as the Java code - maybe because [BitSet] is not like the Java utils BitSet
+func PredictionModegetSingleViableAlt(altsets []*BitSet) int {
+ result := ATNInvalidAltNumber
+
+ for i := 0; i < len(altsets); i++ {
+ alts := altsets[i]
+ minAlt := alts.minValue()
+ if result == ATNInvalidAltNumber {
+ result = minAlt
+ } else if result != minAlt { // more than 1 viable alt
+ return ATNInvalidAltNumber
+ }
+ }
+ return result
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/recognizer.go b/vendor/github.com/antlr4-go/antlr/v4/recognizer.go
new file mode 100644
index 0000000..2e0b504
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/recognizer.go
@@ -0,0 +1,241 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strings"
+
+ "strconv"
+)
+
+type Recognizer interface {
+ GetLiteralNames() []string
+ GetSymbolicNames() []string
+ GetRuleNames() []string
+
+ Sempred(RuleContext, int, int) bool
+ Precpred(RuleContext, int) bool
+
+ GetState() int
+ SetState(int)
+ Action(RuleContext, int, int)
+ AddErrorListener(ErrorListener)
+ RemoveErrorListeners()
+ GetATN() *ATN
+ GetErrorListenerDispatch() ErrorListener
+ HasError() bool
+ GetError() RecognitionException
+ SetError(RecognitionException)
+}
+
+type BaseRecognizer struct {
+ listeners []ErrorListener
+ state int
+
+ RuleNames []string
+ LiteralNames []string
+ SymbolicNames []string
+ GrammarFileName string
+ SynErr RecognitionException
+}
+
+func NewBaseRecognizer() *BaseRecognizer {
+ rec := new(BaseRecognizer)
+ rec.listeners = []ErrorListener{ConsoleErrorListenerINSTANCE}
+ rec.state = -1
+ return rec
+}
+
+//goland:noinspection GoUnusedGlobalVariable
+var tokenTypeMapCache = make(map[string]int)
+
+//goland:noinspection GoUnusedGlobalVariable
+var ruleIndexMapCache = make(map[string]int)
+
+func (b *BaseRecognizer) checkVersion(toolVersion string) {
+ runtimeVersion := "4.12.0"
+ if runtimeVersion != toolVersion {
+ fmt.Println("ANTLR runtime and generated code versions disagree: " + runtimeVersion + "!=" + toolVersion)
+ }
+}
+
+func (b *BaseRecognizer) SetError(err RecognitionException) {
+ b.SynErr = err
+}
+
+func (b *BaseRecognizer) HasError() bool {
+ return b.SynErr != nil
+}
+
+func (b *BaseRecognizer) GetError() RecognitionException {
+ return b.SynErr
+}
+
+func (b *BaseRecognizer) Action(_ RuleContext, _, _ int) {
+ panic("action not implemented on Recognizer!")
+}
+
+func (b *BaseRecognizer) AddErrorListener(listener ErrorListener) {
+ b.listeners = append(b.listeners, listener)
+}
+
+func (b *BaseRecognizer) RemoveErrorListeners() {
+ b.listeners = make([]ErrorListener, 0)
+}
+
+func (b *BaseRecognizer) GetRuleNames() []string {
+ return b.RuleNames
+}
+
+func (b *BaseRecognizer) GetTokenNames() []string {
+ return b.LiteralNames
+}
+
+func (b *BaseRecognizer) GetSymbolicNames() []string {
+ return b.SymbolicNames
+}
+
+func (b *BaseRecognizer) GetLiteralNames() []string {
+ return b.LiteralNames
+}
+
+func (b *BaseRecognizer) GetState() int {
+ return b.state
+}
+
+func (b *BaseRecognizer) SetState(v int) {
+ b.state = v
+}
+
+//func (b *Recognizer) GetTokenTypeMap() {
+// var tokenNames = b.GetTokenNames()
+// if (tokenNames==nil) {
+// panic("The current recognizer does not provide a list of token names.")
+// }
+// var result = tokenTypeMapCache[tokenNames]
+// if(result==nil) {
+// result = tokenNames.reduce(function(o, k, i) { o[k] = i })
+// result.EOF = TokenEOF
+// tokenTypeMapCache[tokenNames] = result
+// }
+// return result
+//}
+
+// GetRuleIndexMap Get a map from rule names to rule indexes.
+//
+// Used for XPath and tree pattern compilation.
+//
+// TODO: JI This is not yet implemented in the Go runtime. Maybe not needed.
+func (b *BaseRecognizer) GetRuleIndexMap() map[string]int {
+
+ panic("Method not defined!")
+ // var ruleNames = b.GetRuleNames()
+ // if (ruleNames==nil) {
+ // panic("The current recognizer does not provide a list of rule names.")
+ // }
+ //
+ // var result = ruleIndexMapCache[ruleNames]
+ // if(result==nil) {
+ // result = ruleNames.reduce(function(o, k, i) { o[k] = i })
+ // ruleIndexMapCache[ruleNames] = result
+ // }
+ // return result
+}
+
+// GetTokenType get the token type based upon its name
+func (b *BaseRecognizer) GetTokenType(_ string) int {
+ panic("Method not defined!")
+ // var ttype = b.GetTokenTypeMap()[tokenName]
+ // if (ttype !=nil) {
+ // return ttype
+ // } else {
+ // return TokenInvalidType
+ // }
+}
+
+//func (b *Recognizer) GetTokenTypeMap() map[string]int {
+// Vocabulary vocabulary = getVocabulary()
+//
+// Synchronized (tokenTypeMapCache) {
+// Map result = tokenTypeMapCache.Get(vocabulary)
+// if (result == null) {
+// result = new HashMap()
+// for (int i = 0; i < GetATN().maxTokenType; i++) {
+// String literalName = vocabulary.getLiteralName(i)
+// if (literalName != null) {
+// result.put(literalName, i)
+// }
+//
+// String symbolicName = vocabulary.GetSymbolicName(i)
+// if (symbolicName != null) {
+// result.put(symbolicName, i)
+// }
+// }
+//
+// result.put("EOF", Token.EOF)
+// result = Collections.unmodifiableMap(result)
+// tokenTypeMapCache.put(vocabulary, result)
+// }
+//
+// return result
+// }
+//}
+
+// GetErrorHeader returns the error header, normally line/character position information.
+//
+// Can be overridden in sub structs embedding BaseRecognizer.
+func (b *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
+ line := e.GetOffendingToken().GetLine()
+ column := e.GetOffendingToken().GetColumn()
+ return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
+}
+
+// GetTokenErrorDisplay shows how a token should be displayed in an error message.
+//
+// The default is to display just the text, but during development you might
+// want to have a lot of information spit out. Override in that case
+// to use t.String() (which, for CommonToken, dumps everything about
+// the token). This is better than forcing you to override a method in
+// your token objects because you don't have to go modify your lexer
+// so that it creates a NewJava type.
+//
+// Deprecated: This method is not called by the ANTLR 4 Runtime. Specific
+// implementations of [ANTLRErrorStrategy] may provide a similar
+// feature when necessary. For example, see [DefaultErrorStrategy].GetTokenErrorDisplay()
+func (b *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
+ if t == nil {
+ return ""
+ }
+ s := t.GetText()
+ if s == "" {
+ if t.GetTokenType() == TokenEOF {
+ s = ""
+ } else {
+ s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
+ }
+ }
+ s = strings.Replace(s, "\t", "\\t", -1)
+ s = strings.Replace(s, "\n", "\\n", -1)
+ s = strings.Replace(s, "\r", "\\r", -1)
+
+ return "'" + s + "'"
+}
+
+func (b *BaseRecognizer) GetErrorListenerDispatch() ErrorListener {
+ return NewProxyErrorListener(b.listeners)
+}
+
+// Sempred embedding structs need to override this if there are sempreds or actions
+// that the ATN interpreter needs to execute
+func (b *BaseRecognizer) Sempred(_ RuleContext, _ int, _ int) bool {
+ return true
+}
+
+// Precpred embedding structs need to override this if there are preceding predicates
+// that the ATN interpreter needs to execute
+func (b *BaseRecognizer) Precpred(_ RuleContext, _ int) bool {
+ return true
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/rule_context.go b/vendor/github.com/antlr4-go/antlr/v4/rule_context.go
new file mode 100644
index 0000000..f2ad047
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/rule_context.go
@@ -0,0 +1,40 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+// RuleContext is a record of a single rule invocation. It knows
+// which context invoked it, if any. If there is no parent context, then
+// naturally the invoking state is not valid. The parent link
+// provides a chain upwards from the current rule invocation to the root
+// of the invocation tree, forming a stack.
+//
+// We actually carry no information about the rule associated with this context (except
+// when parsing). We keep only the state number of the invoking state from
+// the [ATN] submachine that invoked this. Contrast this with the s
+// pointer inside [ParserRuleContext] that tracks the current state
+// being "executed" for the current rule.
+//
+// The parent contexts are useful for computing lookahead sets and
+// getting error information.
+//
+// These objects are used during parsing and prediction.
+// For the special case of parsers, we use the struct
+// [ParserRuleContext], which embeds a RuleContext.
+//
+// @see ParserRuleContext
+type RuleContext interface {
+ RuleNode
+
+ GetInvokingState() int
+ SetInvokingState(int)
+
+ GetRuleIndex() int
+ IsEmpty() bool
+
+ GetAltNumber() int
+ SetAltNumber(altNumber int)
+
+ String([]string, RuleContext) string
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/semantic_context.go b/vendor/github.com/antlr4-go/antlr/v4/semantic_context.go
new file mode 100644
index 0000000..68cb906
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/semantic_context.go
@@ -0,0 +1,464 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strconv"
+)
+
+// SemanticContext is a tree structure used to record the semantic context in which
+//
+// an ATN configuration is valid. It's either a single predicate,
+// a conjunction p1 && p2, or a sum of products p1 || p2.
+//
+// I have scoped the AND, OR, and Predicate subclasses of
+// [SemanticContext] within the scope of this outer ``class''
+type SemanticContext interface {
+ Equals(other Collectable[SemanticContext]) bool
+ Hash() int
+
+ evaluate(parser Recognizer, outerContext RuleContext) bool
+ evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext
+
+ String() string
+}
+
+func SemanticContextandContext(a, b SemanticContext) SemanticContext {
+ if a == nil || a == SemanticContextNone {
+ return b
+ }
+ if b == nil || b == SemanticContextNone {
+ return a
+ }
+ result := NewAND(a, b)
+ if len(result.opnds) == 1 {
+ return result.opnds[0]
+ }
+
+ return result
+}
+
+func SemanticContextorContext(a, b SemanticContext) SemanticContext {
+ if a == nil {
+ return b
+ }
+ if b == nil {
+ return a
+ }
+ if a == SemanticContextNone || b == SemanticContextNone {
+ return SemanticContextNone
+ }
+ result := NewOR(a, b)
+ if len(result.opnds) == 1 {
+ return result.opnds[0]
+ }
+
+ return result
+}
+
+type Predicate struct {
+ ruleIndex int
+ predIndex int
+ isCtxDependent bool
+}
+
+func NewPredicate(ruleIndex, predIndex int, isCtxDependent bool) *Predicate {
+ p := new(Predicate)
+
+ p.ruleIndex = ruleIndex
+ p.predIndex = predIndex
+ p.isCtxDependent = isCtxDependent // e.g., $i ref in pred
+ return p
+}
+
+//The default {@link SemanticContext}, which is semantically equivalent to
+//a predicate of the form {@code {true}?}.
+
+var SemanticContextNone = NewPredicate(-1, -1, false)
+
+func (p *Predicate) evalPrecedence(_ Recognizer, _ RuleContext) SemanticContext {
+ return p
+}
+
+func (p *Predicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
+
+ var localctx RuleContext
+
+ if p.isCtxDependent {
+ localctx = outerContext
+ }
+
+ return parser.Sempred(localctx, p.ruleIndex, p.predIndex)
+}
+
+func (p *Predicate) Equals(other Collectable[SemanticContext]) bool {
+ if p == other {
+ return true
+ } else if _, ok := other.(*Predicate); !ok {
+ return false
+ } else {
+ return p.ruleIndex == other.(*Predicate).ruleIndex &&
+ p.predIndex == other.(*Predicate).predIndex &&
+ p.isCtxDependent == other.(*Predicate).isCtxDependent
+ }
+}
+
+func (p *Predicate) Hash() int {
+ h := murmurInit(0)
+ h = murmurUpdate(h, p.ruleIndex)
+ h = murmurUpdate(h, p.predIndex)
+ if p.isCtxDependent {
+ h = murmurUpdate(h, 1)
+ } else {
+ h = murmurUpdate(h, 0)
+ }
+ return murmurFinish(h, 3)
+}
+
+func (p *Predicate) String() string {
+ return "{" + strconv.Itoa(p.ruleIndex) + ":" + strconv.Itoa(p.predIndex) + "}?"
+}
+
+type PrecedencePredicate struct {
+ precedence int
+}
+
+func NewPrecedencePredicate(precedence int) *PrecedencePredicate {
+
+ p := new(PrecedencePredicate)
+ p.precedence = precedence
+
+ return p
+}
+
+func (p *PrecedencePredicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
+ return parser.Precpred(outerContext, p.precedence)
+}
+
+func (p *PrecedencePredicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
+ if parser.Precpred(outerContext, p.precedence) {
+ return SemanticContextNone
+ }
+
+ return nil
+}
+
+func (p *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
+ return p.precedence - other.precedence
+}
+
+func (p *PrecedencePredicate) Equals(other Collectable[SemanticContext]) bool {
+
+ var op *PrecedencePredicate
+ var ok bool
+ if op, ok = other.(*PrecedencePredicate); !ok {
+ return false
+ }
+
+ if p == op {
+ return true
+ }
+
+ return p.precedence == other.(*PrecedencePredicate).precedence
+}
+
+func (p *PrecedencePredicate) Hash() int {
+ h := uint32(1)
+ h = 31*h + uint32(p.precedence)
+ return int(h)
+}
+
+func (p *PrecedencePredicate) String() string {
+ return "{" + strconv.Itoa(p.precedence) + ">=prec}?"
+}
+
+func PrecedencePredicatefilterPrecedencePredicates(set *JStore[SemanticContext, Comparator[SemanticContext]]) []*PrecedencePredicate {
+ result := make([]*PrecedencePredicate, 0)
+
+ set.Each(func(v SemanticContext) bool {
+ if c2, ok := v.(*PrecedencePredicate); ok {
+ result = append(result, c2)
+ }
+ return true
+ })
+
+ return result
+}
+
+// A semantic context which is true whenever none of the contained contexts
+// is false.`
+
+type AND struct {
+ opnds []SemanticContext
+}
+
+func NewAND(a, b SemanticContext) *AND {
+
+ operands := NewJStore[SemanticContext, Comparator[SemanticContext]](semctxEqInst, SemanticContextCollection, "NewAND() operands")
+ if aa, ok := a.(*AND); ok {
+ for _, o := range aa.opnds {
+ operands.Put(o)
+ }
+ } else {
+ operands.Put(a)
+ }
+
+ if ba, ok := b.(*AND); ok {
+ for _, o := range ba.opnds {
+ operands.Put(o)
+ }
+ } else {
+ operands.Put(b)
+ }
+ precedencePredicates := PrecedencePredicatefilterPrecedencePredicates(operands)
+ if len(precedencePredicates) > 0 {
+ // interested in the transition with the lowest precedence
+ var reduced *PrecedencePredicate
+
+ for _, p := range precedencePredicates {
+ if reduced == nil || p.precedence < reduced.precedence {
+ reduced = p
+ }
+ }
+
+ operands.Put(reduced)
+ }
+
+ vs := operands.Values()
+ opnds := make([]SemanticContext, len(vs))
+ copy(opnds, vs)
+
+ and := new(AND)
+ and.opnds = opnds
+
+ return and
+}
+
+func (a *AND) Equals(other Collectable[SemanticContext]) bool {
+ if a == other {
+ return true
+ }
+ if _, ok := other.(*AND); !ok {
+ return false
+ } else {
+ for i, v := range other.(*AND).opnds {
+ if !a.opnds[i].Equals(v) {
+ return false
+ }
+ }
+ return true
+ }
+}
+
+// {@inheritDoc}
+//
+//
+// The evaluation of predicates by a context is short-circuiting, but
+// unordered.
+func (a *AND) evaluate(parser Recognizer, outerContext RuleContext) bool {
+ for i := 0; i < len(a.opnds); i++ {
+ if !a.opnds[i].evaluate(parser, outerContext) {
+ return false
+ }
+ }
+ return true
+}
+
+func (a *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
+ differs := false
+ operands := make([]SemanticContext, 0)
+
+ for i := 0; i < len(a.opnds); i++ {
+ context := a.opnds[i]
+ evaluated := context.evalPrecedence(parser, outerContext)
+ differs = differs || (evaluated != context)
+ if evaluated == nil {
+ // The AND context is false if any element is false
+ return nil
+ } else if evaluated != SemanticContextNone {
+ // Reduce the result by Skipping true elements
+ operands = append(operands, evaluated)
+ }
+ }
+ if !differs {
+ return a
+ }
+
+ if len(operands) == 0 {
+ // all elements were true, so the AND context is true
+ return SemanticContextNone
+ }
+
+ var result SemanticContext
+
+ for _, o := range operands {
+ if result == nil {
+ result = o
+ } else {
+ result = SemanticContextandContext(result, o)
+ }
+ }
+
+ return result
+}
+
+func (a *AND) Hash() int {
+ h := murmurInit(37) // Init with a value different from OR
+ for _, op := range a.opnds {
+ h = murmurUpdate(h, op.Hash())
+ }
+ return murmurFinish(h, len(a.opnds))
+}
+
+func (o *OR) Hash() int {
+ h := murmurInit(41) // Init with o value different from AND
+ for _, op := range o.opnds {
+ h = murmurUpdate(h, op.Hash())
+ }
+ return murmurFinish(h, len(o.opnds))
+}
+
+func (a *AND) String() string {
+ s := ""
+
+ for _, o := range a.opnds {
+ s += "&& " + fmt.Sprint(o)
+ }
+
+ if len(s) > 3 {
+ return s[0:3]
+ }
+
+ return s
+}
+
+//
+// A semantic context which is true whenever at least one of the contained
+// contexts is true.
+//
+
+type OR struct {
+ opnds []SemanticContext
+}
+
+func NewOR(a, b SemanticContext) *OR {
+
+ operands := NewJStore[SemanticContext, Comparator[SemanticContext]](semctxEqInst, SemanticContextCollection, "NewOR() operands")
+ if aa, ok := a.(*OR); ok {
+ for _, o := range aa.opnds {
+ operands.Put(o)
+ }
+ } else {
+ operands.Put(a)
+ }
+
+ if ba, ok := b.(*OR); ok {
+ for _, o := range ba.opnds {
+ operands.Put(o)
+ }
+ } else {
+ operands.Put(b)
+ }
+ precedencePredicates := PrecedencePredicatefilterPrecedencePredicates(operands)
+ if len(precedencePredicates) > 0 {
+ // interested in the transition with the lowest precedence
+ var reduced *PrecedencePredicate
+
+ for _, p := range precedencePredicates {
+ if reduced == nil || p.precedence > reduced.precedence {
+ reduced = p
+ }
+ }
+
+ operands.Put(reduced)
+ }
+
+ vs := operands.Values()
+
+ opnds := make([]SemanticContext, len(vs))
+ copy(opnds, vs)
+
+ o := new(OR)
+ o.opnds = opnds
+
+ return o
+}
+
+func (o *OR) Equals(other Collectable[SemanticContext]) bool {
+ if o == other {
+ return true
+ } else if _, ok := other.(*OR); !ok {
+ return false
+ } else {
+ for i, v := range other.(*OR).opnds {
+ if !o.opnds[i].Equals(v) {
+ return false
+ }
+ }
+ return true
+ }
+}
+
+//
+// The evaluation of predicates by o context is short-circuiting, but
+// unordered.
+func (o *OR) evaluate(parser Recognizer, outerContext RuleContext) bool {
+ for i := 0; i < len(o.opnds); i++ {
+ if o.opnds[i].evaluate(parser, outerContext) {
+ return true
+ }
+ }
+ return false
+}
+
+func (o *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
+ differs := false
+ operands := make([]SemanticContext, 0)
+ for i := 0; i < len(o.opnds); i++ {
+ context := o.opnds[i]
+ evaluated := context.evalPrecedence(parser, outerContext)
+ differs = differs || (evaluated != context)
+ if evaluated == SemanticContextNone {
+ // The OR context is true if any element is true
+ return SemanticContextNone
+ } else if evaluated != nil {
+ // Reduce the result by Skipping false elements
+ operands = append(operands, evaluated)
+ }
+ }
+ if !differs {
+ return o
+ }
+ if len(operands) == 0 {
+ // all elements were false, so the OR context is false
+ return nil
+ }
+ var result SemanticContext
+
+ for _, o := range operands {
+ if result == nil {
+ result = o
+ } else {
+ result = SemanticContextorContext(result, o)
+ }
+ }
+
+ return result
+}
+
+func (o *OR) String() string {
+ s := ""
+
+ for _, o := range o.opnds {
+ s += "|| " + fmt.Sprint(o)
+ }
+
+ if len(s) > 3 {
+ return s[0:3]
+ }
+
+ return s
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/statistics.go b/vendor/github.com/antlr4-go/antlr/v4/statistics.go
new file mode 100644
index 0000000..70c0673
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/statistics.go
@@ -0,0 +1,281 @@
+//go:build antlr.stats
+
+package antlr
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "sync"
+)
+
+// This file allows the user to collect statistics about the runtime of the ANTLR runtime. It is not enabled by default
+// and so incurs no time penalty. To enable it, you must build the runtime with the antlr.stats build tag.
+//
+
+// Tells various components to collect statistics - because it is only true when this file is included, it will
+// allow the compiler to completely eliminate all the code that is only used when collecting statistics.
+const collectStats = true
+
+// goRunStats is a collection of all the various data the ANTLR runtime has collected about a particular run.
+// It is exported so that it can be used by others to look for things that are not already looked for in the
+// runtime statistics.
+type goRunStats struct {
+
+ // jStats is a slice of all the [JStatRec] records that have been created, which is one for EVERY collection created
+ // during a run. It is exported so that it can be used by others to look for things that are not already looked for
+ // within this package.
+ //
+ jStats []*JStatRec
+ jStatsLock sync.RWMutex
+ topN int
+ topNByMax []*JStatRec
+ topNByUsed []*JStatRec
+ unusedCollections map[CollectionSource]int
+ counts map[CollectionSource]int
+}
+
+const (
+ collectionsFile = "collections"
+)
+
+var (
+ Statistics = &goRunStats{
+ topN: 10,
+ }
+)
+
+type statsOption func(*goRunStats) error
+
+// Configure allows the statistics system to be configured as the user wants and override the defaults
+func (s *goRunStats) Configure(options ...statsOption) error {
+ for _, option := range options {
+ err := option(s)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// WithTopN sets the number of things to list in the report when we are concerned with the top N things.
+//
+// For example, if you want to see the top 20 collections by size, you can do:
+//
+// antlr.Statistics.Configure(antlr.WithTopN(20))
+func WithTopN(topN int) statsOption {
+ return func(s *goRunStats) error {
+ s.topN = topN
+ return nil
+ }
+}
+
+// Analyze looks through all the statistical records and computes all the outputs that might be useful to the user.
+//
+// The function gathers and analyzes a number of statistics about any particular run of
+// an ANTLR generated recognizer. In the vast majority of cases, the statistics are only
+// useful to maintainers of ANTLR itself, but they can be useful to users as well. They may be
+// especially useful in tracking down bugs or performance problems when an ANTLR user could
+// supply the output from this package, but cannot supply the grammar file(s) they are using, even
+// privately to the maintainers.
+//
+// The statistics are gathered by the runtime itself, and are not gathered by the parser or lexer, but the user
+// must call this function their selves to analyze the statistics. This is because none of the infrastructure is
+// extant unless the calling program is built with the antlr.stats tag like so:
+//
+// go build -tags antlr.stats .
+//
+// When a program is built with the antlr.stats tag, the Statistics object is created and available outside
+// the package. The user can then call the [Statistics.Analyze] function to analyze the statistics and then call the
+// [Statistics.Report] function to report the statistics.
+//
+// Please forward any questions about this package to the ANTLR discussion groups on GitHub or send to them to
+// me [Jim Idle] directly at jimi@idle.ws
+//
+// [Jim Idle]: https:://github.com/jim-idle
+func (s *goRunStats) Analyze() {
+
+ // Look for anything that looks strange and record it in our local maps etc for the report to present it
+ //
+ s.CollectionAnomalies()
+ s.TopNCollections()
+}
+
+// TopNCollections looks through all the statistical records and gathers the top ten collections by size.
+func (s *goRunStats) TopNCollections() {
+
+ // Let's sort the stat records by MaxSize
+ //
+ sort.Slice(s.jStats, func(i, j int) bool {
+ return s.jStats[i].MaxSize > s.jStats[j].MaxSize
+ })
+
+ for i := 0; i < len(s.jStats) && i < s.topN; i++ {
+ s.topNByMax = append(s.topNByMax, s.jStats[i])
+ }
+
+ // Sort by the number of times used
+ //
+ sort.Slice(s.jStats, func(i, j int) bool {
+ return s.jStats[i].Gets+s.jStats[i].Puts > s.jStats[j].Gets+s.jStats[j].Puts
+ })
+ for i := 0; i < len(s.jStats) && i < s.topN; i++ {
+ s.topNByUsed = append(s.topNByUsed, s.jStats[i])
+ }
+}
+
+// Report dumps a markdown formatted report of all the statistics collected during a run to the given dir output
+// path, which should represent a directory. Generated files will be prefixed with the given prefix and will be
+// given a type name such as `anomalies` and a time stamp such as `2021-09-01T12:34:56` and a .md suffix.
+func (s *goRunStats) Report(dir string, prefix string) error {
+
+ isDir, err := isDirectory(dir)
+ switch {
+ case err != nil:
+ return err
+ case !isDir:
+ return fmt.Errorf("output directory `%s` is not a directory", dir)
+ }
+ s.reportCollections(dir, prefix)
+
+ // Clean out any old data in case the user forgets
+ //
+ s.Reset()
+ return nil
+}
+
+func (s *goRunStats) Reset() {
+ s.jStats = nil
+ s.topNByUsed = nil
+ s.topNByMax = nil
+}
+
+func (s *goRunStats) reportCollections(dir, prefix string) {
+ cname := filepath.Join(dir, ".asciidoctor")
+ // If the file doesn't exist, create it, or append to the file
+ f, err := os.OpenFile(cname, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
+ if err != nil {
+ log.Fatal(err)
+ }
+ _, _ = f.WriteString(`// .asciidoctorconfig
+++++
+
+++++`)
+ _ = f.Close()
+
+ fname := filepath.Join(dir, prefix+"_"+"_"+collectionsFile+"_"+".adoc")
+ // If the file doesn't exist, create it, or append to the file
+ f, err = os.OpenFile(fname, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer func(f *os.File) {
+ err := f.Close()
+ if err != nil {
+ log.Fatal(err)
+ }
+ }(f)
+ _, _ = f.WriteString("= Collections for " + prefix + "\n\n")
+
+ _, _ = f.WriteString("== Summary\n")
+
+ if s.unusedCollections != nil {
+ _, _ = f.WriteString("=== Unused Collections\n")
+ _, _ = f.WriteString("Unused collections incur a penalty for allocation that makes them a candidate for either\n")
+ _, _ = f.WriteString(" removal or optimization. If you are using a collection that is not used, you should\n")
+ _, _ = f.WriteString(" consider removing it. If you are using a collection that is used, but not very often,\n")
+ _, _ = f.WriteString(" you should consider using lazy initialization to defer the allocation until it is\n")
+ _, _ = f.WriteString(" actually needed.\n\n")
+
+ _, _ = f.WriteString("\n.Unused collections\n")
+ _, _ = f.WriteString(`[cols="<3,>1"]` + "\n\n")
+ _, _ = f.WriteString("|===\n")
+ _, _ = f.WriteString("| Type | Count\n")
+
+ for k, v := range s.unusedCollections {
+ _, _ = f.WriteString("| " + CollectionDescriptors[k].SybolicName + " | " + strconv.Itoa(v) + "\n")
+ }
+ f.WriteString("|===\n\n")
+ }
+
+ _, _ = f.WriteString("\n.Summary of Collections\n")
+ _, _ = f.WriteString(`[cols="<3,>1"]` + "\n\n")
+ _, _ = f.WriteString("|===\n")
+ _, _ = f.WriteString("| Type | Count\n")
+ for k, v := range s.counts {
+ _, _ = f.WriteString("| " + CollectionDescriptors[k].SybolicName + " | " + strconv.Itoa(v) + "\n")
+ }
+ _, _ = f.WriteString("| Total | " + strconv.Itoa(len(s.jStats)) + "\n")
+ _, _ = f.WriteString("|===\n\n")
+
+ _, _ = f.WriteString("\n.Summary of Top " + strconv.Itoa(s.topN) + " Collections by MaxSize\n")
+ _, _ = f.WriteString(`[cols="<1,<3,>1,>1,>1,>1"]` + "\n\n")
+ _, _ = f.WriteString("|===\n")
+ _, _ = f.WriteString("| Source | Description | MaxSize | EndSize | Puts | Gets\n")
+ for _, c := range s.topNByMax {
+ _, _ = f.WriteString("| " + CollectionDescriptors[c.Source].SybolicName + "\n")
+ _, _ = f.WriteString("| " + c.Description + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.MaxSize) + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.CurSize) + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.Puts) + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.Gets) + "\n")
+ _, _ = f.WriteString("\n")
+ }
+ _, _ = f.WriteString("|===\n\n")
+
+ _, _ = f.WriteString("\n.Summary of Top " + strconv.Itoa(s.topN) + " Collections by Access\n")
+ _, _ = f.WriteString(`[cols="<1,<3,>1,>1,>1,>1,>1"]` + "\n\n")
+ _, _ = f.WriteString("|===\n")
+ _, _ = f.WriteString("| Source | Description | MaxSize | EndSize | Puts | Gets | P+G\n")
+ for _, c := range s.topNByUsed {
+ _, _ = f.WriteString("| " + CollectionDescriptors[c.Source].SybolicName + "\n")
+ _, _ = f.WriteString("| " + c.Description + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.MaxSize) + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.CurSize) + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.Puts) + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.Gets) + "\n")
+ _, _ = f.WriteString("| " + strconv.Itoa(c.Gets+c.Puts) + "\n")
+ _, _ = f.WriteString("\n")
+ }
+ _, _ = f.WriteString("|===\n\n")
+}
+
+// AddJStatRec adds a [JStatRec] record to the [goRunStats] collection when build runtimeConfig antlr.stats is enabled.
+func (s *goRunStats) AddJStatRec(rec *JStatRec) {
+ s.jStatsLock.Lock()
+ defer s.jStatsLock.Unlock()
+ s.jStats = append(s.jStats, rec)
+}
+
+// CollectionAnomalies looks through all the statistical records and gathers any anomalies that have been found.
+func (s *goRunStats) CollectionAnomalies() {
+ s.jStatsLock.RLock()
+ defer s.jStatsLock.RUnlock()
+ s.counts = make(map[CollectionSource]int, len(s.jStats))
+ for _, c := range s.jStats {
+
+ // Accumlate raw counts
+ //
+ s.counts[c.Source]++
+
+ // Look for allocated but unused collections and count them
+ if c.MaxSize == 0 && c.Puts == 0 {
+ if s.unusedCollections == nil {
+ s.unusedCollections = make(map[CollectionSource]int)
+ }
+ s.unusedCollections[c.Source]++
+ }
+ if c.MaxSize > 6000 {
+ fmt.Println("Collection ", c.Description, "accumulated a max size of ", c.MaxSize, " - this is probably too large and indicates a poorly formed grammar")
+ }
+ }
+
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/stats_data.go b/vendor/github.com/antlr4-go/antlr/v4/stats_data.go
new file mode 100644
index 0000000..4d9eb94
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/stats_data.go
@@ -0,0 +1,23 @@
+package antlr
+
+// A JStatRec is a record of a particular use of a [JStore], [JMap] or JPCMap] collection. Typically, it will be
+// used to look for unused collections that wre allocated anyway, problems with hash bucket clashes, and anomalies
+// such as huge numbers of Gets with no entries found GetNoEnt. You can refer to the CollectionAnomalies() function
+// for ideas on what can be gleaned from these statistics about collections.
+type JStatRec struct {
+ Source CollectionSource
+ MaxSize int
+ CurSize int
+ Gets int
+ GetHits int
+ GetMisses int
+ GetHashConflicts int
+ GetNoEnt int
+ Puts int
+ PutHits int
+ PutMisses int
+ PutHashConflicts int
+ MaxSlotSize int
+ Description string
+ CreateStack []byte
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/token.go b/vendor/github.com/antlr4-go/antlr/v4/token.go
new file mode 100644
index 0000000..9670efb
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/token.go
@@ -0,0 +1,213 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "strconv"
+ "strings"
+)
+
+type TokenSourceCharStreamPair struct {
+ tokenSource TokenSource
+ charStream CharStream
+}
+
+// A token has properties: text, type, line, character position in the line
+// (so we can ignore tabs), token channel, index, and source from which
+// we obtained this token.
+
+type Token interface {
+ GetSource() *TokenSourceCharStreamPair
+ GetTokenType() int
+ GetChannel() int
+ GetStart() int
+ GetStop() int
+ GetLine() int
+ GetColumn() int
+
+ GetText() string
+ SetText(s string)
+
+ GetTokenIndex() int
+ SetTokenIndex(v int)
+
+ GetTokenSource() TokenSource
+ GetInputStream() CharStream
+
+ String() string
+}
+
+type BaseToken struct {
+ source *TokenSourceCharStreamPair
+ tokenType int // token type of the token
+ channel int // The parser ignores everything not on DEFAULT_CHANNEL
+ start int // optional return -1 if not implemented.
+ stop int // optional return -1 if not implemented.
+ tokenIndex int // from 0..n-1 of the token object in the input stream
+ line int // line=1..n of the 1st character
+ column int // beginning of the line at which it occurs, 0..n-1
+ text string // text of the token.
+ readOnly bool
+}
+
+const (
+ TokenInvalidType = 0
+
+ // TokenEpsilon - during lookahead operations, this "token" signifies we hit the rule end [ATN] state
+ // and did not follow it despite needing to.
+ TokenEpsilon = -2
+
+ TokenMinUserTokenType = 1
+
+ TokenEOF = -1
+
+ // TokenDefaultChannel is the default channel upon which tokens are sent to the parser.
+ //
+ // All tokens go to the parser (unless [Skip] is called in the lexer rule)
+ // on a particular "channel". The parser tunes to a particular channel
+ // so that whitespace etc... can go to the parser on a "hidden" channel.
+ TokenDefaultChannel = 0
+
+ // TokenHiddenChannel defines the normal hidden channel - the parser wil not see tokens that are not on [TokenDefaultChannel].
+ //
+ // Anything on a different channel than TokenDefaultChannel is not parsed by parser.
+ TokenHiddenChannel = 1
+)
+
+func (b *BaseToken) GetChannel() int {
+ return b.channel
+}
+
+func (b *BaseToken) GetStart() int {
+ return b.start
+}
+
+func (b *BaseToken) GetStop() int {
+ return b.stop
+}
+
+func (b *BaseToken) GetLine() int {
+ return b.line
+}
+
+func (b *BaseToken) GetColumn() int {
+ return b.column
+}
+
+func (b *BaseToken) GetTokenType() int {
+ return b.tokenType
+}
+
+func (b *BaseToken) GetSource() *TokenSourceCharStreamPair {
+ return b.source
+}
+
+func (b *BaseToken) GetTokenIndex() int {
+ return b.tokenIndex
+}
+
+func (b *BaseToken) SetTokenIndex(v int) {
+ b.tokenIndex = v
+}
+
+func (b *BaseToken) GetTokenSource() TokenSource {
+ return b.source.tokenSource
+}
+
+func (b *BaseToken) GetInputStream() CharStream {
+ return b.source.charStream
+}
+
+type CommonToken struct {
+ BaseToken
+}
+
+func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start, stop int) *CommonToken {
+
+ t := &CommonToken{
+ BaseToken: BaseToken{
+ source: source,
+ tokenType: tokenType,
+ channel: channel,
+ start: start,
+ stop: stop,
+ tokenIndex: -1,
+ },
+ }
+
+ if t.source.tokenSource != nil {
+ t.line = source.tokenSource.GetLine()
+ t.column = source.tokenSource.GetCharPositionInLine()
+ } else {
+ t.column = -1
+ }
+ return t
+}
+
+// An empty {@link Pair} which is used as the default value of
+// {@link //source} for tokens that do not have a source.
+
+//CommonToken.EMPTY_SOURCE = [ nil, nil ]
+
+// Constructs a New{@link CommonToken} as a copy of another {@link Token}.
+//
+//
+// If {@code oldToken} is also a {@link CommonToken} instance, the newly
+// constructed token will share a reference to the {@link //text} field and
+// the {@link Pair} stored in {@link //source}. Otherwise, {@link //text} will
+// be assigned the result of calling {@link //GetText}, and {@link //source}
+// will be constructed from the result of {@link Token//GetTokenSource} and
+// {@link Token//GetInputStream}.
+//
+// @param oldToken The token to copy.
+func (c *CommonToken) clone() *CommonToken {
+ t := NewCommonToken(c.source, c.tokenType, c.channel, c.start, c.stop)
+ t.tokenIndex = c.GetTokenIndex()
+ t.line = c.GetLine()
+ t.column = c.GetColumn()
+ t.text = c.GetText()
+ return t
+}
+
+func (c *CommonToken) GetText() string {
+ if c.text != "" {
+ return c.text
+ }
+ input := c.GetInputStream()
+ if input == nil {
+ return ""
+ }
+ n := input.Size()
+ if c.start < n && c.stop < n {
+ return input.GetTextFromInterval(NewInterval(c.start, c.stop))
+ }
+ return ""
+}
+
+func (c *CommonToken) SetText(text string) {
+ c.text = text
+}
+
+func (c *CommonToken) String() string {
+ txt := c.GetText()
+ if txt != "" {
+ txt = strings.Replace(txt, "\n", "\\n", -1)
+ txt = strings.Replace(txt, "\r", "\\r", -1)
+ txt = strings.Replace(txt, "\t", "\\t", -1)
+ } else {
+ txt = ""
+ }
+
+ var ch string
+ if c.channel > 0 {
+ ch = ",channel=" + strconv.Itoa(c.channel)
+ } else {
+ ch = ""
+ }
+
+ return "[@" + strconv.Itoa(c.tokenIndex) + "," + strconv.Itoa(c.start) + ":" + strconv.Itoa(c.stop) + "='" +
+ txt + "',<" + strconv.Itoa(c.tokenType) + ">" +
+ ch + "," + strconv.Itoa(c.line) + ":" + strconv.Itoa(c.column) + "]"
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/token_source.go b/vendor/github.com/antlr4-go/antlr/v4/token_source.go
new file mode 100644
index 0000000..a3f36ea
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/token_source.go
@@ -0,0 +1,17 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+type TokenSource interface {
+ NextToken() Token
+ Skip()
+ More()
+ GetLine() int
+ GetCharPositionInLine() int
+ GetInputStream() CharStream
+ GetSourceName() string
+ setTokenFactory(factory TokenFactory)
+ GetTokenFactory() TokenFactory
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/token_stream.go b/vendor/github.com/antlr4-go/antlr/v4/token_stream.go
new file mode 100644
index 0000000..bf4ff66
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/token_stream.go
@@ -0,0 +1,21 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+type TokenStream interface {
+ IntStream
+
+ LT(k int) Token
+ Reset()
+
+ Get(index int) Token
+ GetTokenSource() TokenSource
+ SetTokenSource(TokenSource)
+
+ GetAllText() string
+ GetTextFromInterval(Interval) string
+ GetTextFromRuleContext(RuleContext) string
+ GetTextFromTokens(Token, Token) string
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go b/vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go
new file mode 100644
index 0000000..ccf59b4
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go
@@ -0,0 +1,662 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "bytes"
+ "fmt"
+)
+
+//
+// Useful for rewriting out a buffered input token stream after doing some
+// augmentation or other manipulations on it.
+
+//
+// You can insert stuff, replace, and delete chunks. Note that the operations
+// are done lazily--only if you convert the buffer to a {@link String} with
+// {@link TokenStream#getText()}. This is very efficient because you are not
+// moving data around all the time. As the buffer of tokens is converted to
+// strings, the {@link #getText()} method(s) scan the input token stream and
+// check to see if there is an operation at the current index. If so, the
+// operation is done and then normal {@link String} rendering continues on the
+// buffer. This is like having multiple Turing machine instruction streams
+// (programs) operating on a single input tape. :)
+//
+
+// This rewriter makes no modifications to the token stream. It does not ask the
+// stream to fill itself up nor does it advance the input cursor. The token
+// stream {@link TokenStream#index()} will return the same value before and
+// after any {@link #getText()} call.
+
+//
+// The rewriter only works on tokens that you have in the buffer and ignores the
+// current input cursor. If you are buffering tokens on-demand, calling
+// {@link #getText()} halfway through the input will only do rewrites for those
+// tokens in the first half of the file.
+
+//
+// Since the operations are done lazily at {@link #getText}-time, operations do
+// not screw up the token index values. That is, an insert operation at token
+// index {@code i} does not change the index values for tokens
+// {@code i}+1..n-1.
+
+//
+// Because operations never actually alter the buffer, you may always get the
+// original token stream back without undoing anything. Since the instructions
+// are queued up, you can easily simulate transactions and roll back any changes
+// if there is an error just by removing instructions. For example,
+
+//
+// CharStream input = new ANTLRFileStream("input");
+// TLexer lex = new TLexer(input);
+// CommonTokenStream tokens = new CommonTokenStream(lex);
+// T parser = new T(tokens);
+// TokenStreamRewriter rewriter = new TokenStreamRewriter(tokens);
+// parser.startRule();
+//
+
+//
+// Then in the rules, you can execute (assuming rewriter is visible):
+
+//
+// Token t,u;
+// ...
+// rewriter.insertAfter(t, "text to put after t");}
+// rewriter.insertAfter(u, "text after u");}
+// System.out.println(rewriter.getText());
+//
+
+//
+// You can also have multiple "instruction streams" and get multiple rewrites
+// from a single pass over the input. Just name the instruction streams and use
+// that name again when printing the buffer. This could be useful for generating
+// a C file and also its header file--all from the same buffer:
+
+//
+// rewriter.insertAfter("pass1", t, "text to put after t");}
+// rewriter.insertAfter("pass2", u, "text after u");}
+// System.out.println(rewriter.getText("pass1"));
+// System.out.println(rewriter.getText("pass2"));
+//
+
+//
+// If you don't use named rewrite streams, a "default" stream is used as the
+// first example shows.
+
+const (
+ DefaultProgramName = "default"
+ ProgramInitSize = 100
+ MinTokenIndex = 0
+)
+
+// Define the rewrite operation hierarchy
+
+type RewriteOperation interface {
+
+ // Execute the rewrite operation by possibly adding to the buffer.
+ // Return the index of the next token to operate on.
+ Execute(buffer *bytes.Buffer) int
+ String() string
+ GetInstructionIndex() int
+ GetIndex() int
+ GetText() string
+ GetOpName() string
+ GetTokens() TokenStream
+ SetInstructionIndex(val int)
+ SetIndex(int)
+ SetText(string)
+ SetOpName(string)
+ SetTokens(TokenStream)
+}
+
+type BaseRewriteOperation struct {
+ //Current index of rewrites list
+ instructionIndex int
+ //Token buffer index
+ index int
+ //Substitution text
+ text string
+ //Actual operation name
+ opName string
+ //Pointer to token steam
+ tokens TokenStream
+}
+
+func (op *BaseRewriteOperation) GetInstructionIndex() int {
+ return op.instructionIndex
+}
+
+func (op *BaseRewriteOperation) GetIndex() int {
+ return op.index
+}
+
+func (op *BaseRewriteOperation) GetText() string {
+ return op.text
+}
+
+func (op *BaseRewriteOperation) GetOpName() string {
+ return op.opName
+}
+
+func (op *BaseRewriteOperation) GetTokens() TokenStream {
+ return op.tokens
+}
+
+func (op *BaseRewriteOperation) SetInstructionIndex(val int) {
+ op.instructionIndex = val
+}
+
+func (op *BaseRewriteOperation) SetIndex(val int) {
+ op.index = val
+}
+
+func (op *BaseRewriteOperation) SetText(val string) {
+ op.text = val
+}
+
+func (op *BaseRewriteOperation) SetOpName(val string) {
+ op.opName = val
+}
+
+func (op *BaseRewriteOperation) SetTokens(val TokenStream) {
+ op.tokens = val
+}
+
+func (op *BaseRewriteOperation) Execute(_ *bytes.Buffer) int {
+ return op.index
+}
+
+func (op *BaseRewriteOperation) String() string {
+ return fmt.Sprintf("<%s@%d:\"%s\">",
+ op.opName,
+ op.tokens.Get(op.GetIndex()),
+ op.text,
+ )
+
+}
+
+type InsertBeforeOp struct {
+ BaseRewriteOperation
+}
+
+func NewInsertBeforeOp(index int, text string, stream TokenStream) *InsertBeforeOp {
+ return &InsertBeforeOp{BaseRewriteOperation: BaseRewriteOperation{
+ index: index,
+ text: text,
+ opName: "InsertBeforeOp",
+ tokens: stream,
+ }}
+}
+
+func (op *InsertBeforeOp) Execute(buffer *bytes.Buffer) int {
+ buffer.WriteString(op.text)
+ if op.tokens.Get(op.index).GetTokenType() != TokenEOF {
+ buffer.WriteString(op.tokens.Get(op.index).GetText())
+ }
+ return op.index + 1
+}
+
+func (op *InsertBeforeOp) String() string {
+ return op.BaseRewriteOperation.String()
+}
+
+// InsertAfterOp distinguishes between insert after/before to do the "insert after" instructions
+// first and then the "insert before" instructions at same index. Implementation
+// of "insert after" is "insert before index+1".
+type InsertAfterOp struct {
+ BaseRewriteOperation
+}
+
+func NewInsertAfterOp(index int, text string, stream TokenStream) *InsertAfterOp {
+ return &InsertAfterOp{
+ BaseRewriteOperation: BaseRewriteOperation{
+ index: index + 1,
+ text: text,
+ tokens: stream,
+ },
+ }
+}
+
+func (op *InsertAfterOp) Execute(buffer *bytes.Buffer) int {
+ buffer.WriteString(op.text)
+ if op.tokens.Get(op.index).GetTokenType() != TokenEOF {
+ buffer.WriteString(op.tokens.Get(op.index).GetText())
+ }
+ return op.index + 1
+}
+
+func (op *InsertAfterOp) String() string {
+ return op.BaseRewriteOperation.String()
+}
+
+// ReplaceOp tries to replace range from x..y with (y-x)+1 ReplaceOp
+// instructions.
+type ReplaceOp struct {
+ BaseRewriteOperation
+ LastIndex int
+}
+
+func NewReplaceOp(from, to int, text string, stream TokenStream) *ReplaceOp {
+ return &ReplaceOp{
+ BaseRewriteOperation: BaseRewriteOperation{
+ index: from,
+ text: text,
+ opName: "ReplaceOp",
+ tokens: stream,
+ },
+ LastIndex: to,
+ }
+}
+
+func (op *ReplaceOp) Execute(buffer *bytes.Buffer) int {
+ if op.text != "" {
+ buffer.WriteString(op.text)
+ }
+ return op.LastIndex + 1
+}
+
+func (op *ReplaceOp) String() string {
+ if op.text == "" {
+ return fmt.Sprintf("",
+ op.tokens.Get(op.index), op.tokens.Get(op.LastIndex))
+ }
+ return fmt.Sprintf("",
+ op.tokens.Get(op.index), op.tokens.Get(op.LastIndex), op.text)
+}
+
+type TokenStreamRewriter struct {
+ //Our source stream
+ tokens TokenStream
+ // You may have multiple, named streams of rewrite operations.
+ // I'm calling these things "programs."
+ // Maps String (name) → rewrite (List)
+ programs map[string][]RewriteOperation
+ lastRewriteTokenIndexes map[string]int
+}
+
+func NewTokenStreamRewriter(tokens TokenStream) *TokenStreamRewriter {
+ return &TokenStreamRewriter{
+ tokens: tokens,
+ programs: map[string][]RewriteOperation{
+ DefaultProgramName: make([]RewriteOperation, 0, ProgramInitSize),
+ },
+ lastRewriteTokenIndexes: map[string]int{},
+ }
+}
+
+func (tsr *TokenStreamRewriter) GetTokenStream() TokenStream {
+ return tsr.tokens
+}
+
+// Rollback the instruction stream for a program so that
+// the indicated instruction (via instructionIndex) is no
+// longer in the stream. UNTESTED!
+func (tsr *TokenStreamRewriter) Rollback(programName string, instructionIndex int) {
+ is, ok := tsr.programs[programName]
+ if ok {
+ tsr.programs[programName] = is[MinTokenIndex:instructionIndex]
+ }
+}
+
+func (tsr *TokenStreamRewriter) RollbackDefault(instructionIndex int) {
+ tsr.Rollback(DefaultProgramName, instructionIndex)
+}
+
+// DeleteProgram Reset the program so that no instructions exist
+func (tsr *TokenStreamRewriter) DeleteProgram(programName string) {
+ tsr.Rollback(programName, MinTokenIndex) //TODO: double test on that cause lower bound is not included
+}
+
+func (tsr *TokenStreamRewriter) DeleteProgramDefault() {
+ tsr.DeleteProgram(DefaultProgramName)
+}
+
+func (tsr *TokenStreamRewriter) InsertAfter(programName string, index int, text string) {
+ // to insert after, just insert before next index (even if past end)
+ var op RewriteOperation = NewInsertAfterOp(index, text, tsr.tokens)
+ rewrites := tsr.GetProgram(programName)
+ op.SetInstructionIndex(len(rewrites))
+ tsr.AddToProgram(programName, op)
+}
+
+func (tsr *TokenStreamRewriter) InsertAfterDefault(index int, text string) {
+ tsr.InsertAfter(DefaultProgramName, index, text)
+}
+
+func (tsr *TokenStreamRewriter) InsertAfterToken(programName string, token Token, text string) {
+ tsr.InsertAfter(programName, token.GetTokenIndex(), text)
+}
+
+func (tsr *TokenStreamRewriter) InsertBefore(programName string, index int, text string) {
+ var op RewriteOperation = NewInsertBeforeOp(index, text, tsr.tokens)
+ rewrites := tsr.GetProgram(programName)
+ op.SetInstructionIndex(len(rewrites))
+ tsr.AddToProgram(programName, op)
+}
+
+func (tsr *TokenStreamRewriter) InsertBeforeDefault(index int, text string) {
+ tsr.InsertBefore(DefaultProgramName, index, text)
+}
+
+func (tsr *TokenStreamRewriter) InsertBeforeToken(programName string, token Token, text string) {
+ tsr.InsertBefore(programName, token.GetTokenIndex(), text)
+}
+
+func (tsr *TokenStreamRewriter) Replace(programName string, from, to int, text string) {
+ if from > to || from < 0 || to < 0 || to >= tsr.tokens.Size() {
+ panic(fmt.Sprintf("replace: range invalid: %d..%d(size=%d)",
+ from, to, tsr.tokens.Size()))
+ }
+ var op RewriteOperation = NewReplaceOp(from, to, text, tsr.tokens)
+ rewrites := tsr.GetProgram(programName)
+ op.SetInstructionIndex(len(rewrites))
+ tsr.AddToProgram(programName, op)
+}
+
+func (tsr *TokenStreamRewriter) ReplaceDefault(from, to int, text string) {
+ tsr.Replace(DefaultProgramName, from, to, text)
+}
+
+func (tsr *TokenStreamRewriter) ReplaceDefaultPos(index int, text string) {
+ tsr.ReplaceDefault(index, index, text)
+}
+
+func (tsr *TokenStreamRewriter) ReplaceToken(programName string, from, to Token, text string) {
+ tsr.Replace(programName, from.GetTokenIndex(), to.GetTokenIndex(), text)
+}
+
+func (tsr *TokenStreamRewriter) ReplaceTokenDefault(from, to Token, text string) {
+ tsr.ReplaceToken(DefaultProgramName, from, to, text)
+}
+
+func (tsr *TokenStreamRewriter) ReplaceTokenDefaultPos(index Token, text string) {
+ tsr.ReplaceTokenDefault(index, index, text)
+}
+
+func (tsr *TokenStreamRewriter) Delete(programName string, from, to int) {
+ tsr.Replace(programName, from, to, "")
+}
+
+func (tsr *TokenStreamRewriter) DeleteDefault(from, to int) {
+ tsr.Delete(DefaultProgramName, from, to)
+}
+
+func (tsr *TokenStreamRewriter) DeleteDefaultPos(index int) {
+ tsr.DeleteDefault(index, index)
+}
+
+func (tsr *TokenStreamRewriter) DeleteToken(programName string, from, to Token) {
+ tsr.ReplaceToken(programName, from, to, "")
+}
+
+func (tsr *TokenStreamRewriter) DeleteTokenDefault(from, to Token) {
+ tsr.DeleteToken(DefaultProgramName, from, to)
+}
+
+func (tsr *TokenStreamRewriter) GetLastRewriteTokenIndex(programName string) int {
+ i, ok := tsr.lastRewriteTokenIndexes[programName]
+ if !ok {
+ return -1
+ }
+ return i
+}
+
+func (tsr *TokenStreamRewriter) GetLastRewriteTokenIndexDefault() int {
+ return tsr.GetLastRewriteTokenIndex(DefaultProgramName)
+}
+
+func (tsr *TokenStreamRewriter) SetLastRewriteTokenIndex(programName string, i int) {
+ tsr.lastRewriteTokenIndexes[programName] = i
+}
+
+func (tsr *TokenStreamRewriter) InitializeProgram(name string) []RewriteOperation {
+ is := make([]RewriteOperation, 0, ProgramInitSize)
+ tsr.programs[name] = is
+ return is
+}
+
+func (tsr *TokenStreamRewriter) AddToProgram(name string, op RewriteOperation) {
+ is := tsr.GetProgram(name)
+ is = append(is, op)
+ tsr.programs[name] = is
+}
+
+func (tsr *TokenStreamRewriter) GetProgram(name string) []RewriteOperation {
+ is, ok := tsr.programs[name]
+ if !ok {
+ is = tsr.InitializeProgram(name)
+ }
+ return is
+}
+
+// GetTextDefault returns the text from the original tokens altered per the
+// instructions given to this rewriter.
+func (tsr *TokenStreamRewriter) GetTextDefault() string {
+ return tsr.GetText(
+ DefaultProgramName,
+ NewInterval(0, tsr.tokens.Size()-1))
+}
+
+// GetText returns the text from the original tokens altered per the
+// instructions given to this rewriter.
+func (tsr *TokenStreamRewriter) GetText(programName string, interval Interval) string {
+ rewrites := tsr.programs[programName]
+ start := interval.Start
+ stop := interval.Stop
+ // ensure start/end are in range
+ stop = min(stop, tsr.tokens.Size()-1)
+ start = max(start, 0)
+ if len(rewrites) == 0 {
+ return tsr.tokens.GetTextFromInterval(interval) // no instructions to execute
+ }
+ buf := bytes.Buffer{}
+ // First, optimize instruction stream
+ indexToOp := reduceToSingleOperationPerIndex(rewrites)
+ // Walk buffer, executing instructions and emitting tokens
+ for i := start; i <= stop && i < tsr.tokens.Size(); {
+ op := indexToOp[i]
+ delete(indexToOp, i) // remove so any left have index size-1
+ t := tsr.tokens.Get(i)
+ if op == nil {
+ // no operation at that index, just dump token
+ if t.GetTokenType() != TokenEOF {
+ buf.WriteString(t.GetText())
+ }
+ i++ // move to next token
+ } else {
+ i = op.Execute(&buf) // execute operation and skip
+ }
+ }
+ // include stuff after end if it's last index in buffer
+ // So, if they did an insertAfter(lastValidIndex, "foo"), include
+ // foo if end==lastValidIndex.
+ if stop == tsr.tokens.Size()-1 {
+ // Scan any remaining operations after last token
+ // should be included (they will be inserts).
+ for _, op := range indexToOp {
+ if op.GetIndex() >= tsr.tokens.Size()-1 {
+ buf.WriteString(op.GetText())
+ }
+ }
+ }
+ return buf.String()
+}
+
+// reduceToSingleOperationPerIndex combines operations and report invalid operations (like
+// overlapping replaces that are not completed nested). Inserts to
+// same index need to be combined etc...
+//
+// Here are the cases:
+//
+// I.i.u I.j.v leave alone, non-overlapping
+// I.i.u I.i.v combine: Iivu
+//
+// R.i-j.u R.x-y.v | i-j in x-y delete first R
+// R.i-j.u R.i-j.v delete first R
+// R.i-j.u R.x-y.v | x-y in i-j ERROR
+// R.i-j.u R.x-y.v | boundaries overlap ERROR
+//
+// Delete special case of replace (text==null):
+// D.i-j.u D.x-y.v | boundaries overlap combine to max(min)..max(right)
+//
+// I.i.u R.x-y.v | i in (x+1)-y delete I (since insert before
+// we're not deleting i)
+// I.i.u R.x-y.v | i not in (x+1)-y leave alone, non-overlapping
+// R.x-y.v I.i.u | i in x-y ERROR
+// R.x-y.v I.x.u R.x-y.uv (combine, delete I)
+// R.x-y.v I.i.u | i not in x-y leave alone, non-overlapping
+//
+// I.i.u = insert u before op @ index i
+// R.x-y.u = replace x-y indexed tokens with u
+//
+// First we need to examine replaces. For any replace op:
+//
+// 1. wipe out any insertions before op within that range.
+// 2. Drop any replace op before that is contained completely within
+// that range.
+// 3. Throw exception upon boundary overlap with any previous replace.
+//
+// Then we can deal with inserts:
+//
+// 1. for any inserts to same index, combine even if not adjacent.
+// 2. for any prior replace with same left boundary, combine this
+// insert with replace and delete this 'replace'.
+// 3. throw exception if index in same range as previous replace
+//
+// Don't actually delete; make op null in list. Easier to walk list.
+// Later we can throw as we add to index → op map.
+//
+// Note that I.2 R.2-2 will wipe out I.2 even though, technically, the
+// inserted stuff would be before the 'replace' range. But, if you
+// add tokens in front of a method body '{' and then delete the method
+// body, I think the stuff before the '{' you added should disappear too.
+//
+// The func returns a map from token index to operation.
+func reduceToSingleOperationPerIndex(rewrites []RewriteOperation) map[int]RewriteOperation {
+ // WALK REPLACES
+ for i := 0; i < len(rewrites); i++ {
+ op := rewrites[i]
+ if op == nil {
+ continue
+ }
+ rop, ok := op.(*ReplaceOp)
+ if !ok {
+ continue
+ }
+ // Wipe prior inserts within range
+ for j := 0; j < i && j < len(rewrites); j++ {
+ if iop, ok := rewrites[j].(*InsertBeforeOp); ok {
+ if iop.index == rop.index {
+ // E.g., insert before 2, delete 2..2; update replace
+ // text to include insert before, kill insert
+ rewrites[iop.instructionIndex] = nil
+ if rop.text != "" {
+ rop.text = iop.text + rop.text
+ } else {
+ rop.text = iop.text
+ }
+ } else if iop.index > rop.index && iop.index <= rop.LastIndex {
+ // delete insert as it's a no-op.
+ rewrites[iop.instructionIndex] = nil
+ }
+ }
+ }
+ // Drop any prior replaces contained within
+ for j := 0; j < i && j < len(rewrites); j++ {
+ if prevop, ok := rewrites[j].(*ReplaceOp); ok {
+ if prevop.index >= rop.index && prevop.LastIndex <= rop.LastIndex {
+ // delete replace as it's a no-op.
+ rewrites[prevop.instructionIndex] = nil
+ continue
+ }
+ // throw exception unless disjoint or identical
+ disjoint := prevop.LastIndex < rop.index || prevop.index > rop.LastIndex
+ // Delete special case of replace (text==null):
+ // D.i-j.u D.x-y.v | boundaries overlap combine to max(min)..max(right)
+ if prevop.text == "" && rop.text == "" && !disjoint {
+ rewrites[prevop.instructionIndex] = nil
+ rop.index = min(prevop.index, rop.index)
+ rop.LastIndex = max(prevop.LastIndex, rop.LastIndex)
+ } else if !disjoint {
+ panic("replace op boundaries of " + rop.String() + " overlap with previous " + prevop.String())
+ }
+ }
+ }
+ }
+ // WALK INSERTS
+ for i := 0; i < len(rewrites); i++ {
+ op := rewrites[i]
+ if op == nil {
+ continue
+ }
+ //hack to replicate inheritance in composition
+ _, iok := rewrites[i].(*InsertBeforeOp)
+ _, aok := rewrites[i].(*InsertAfterOp)
+ if !iok && !aok {
+ continue
+ }
+ iop := rewrites[i]
+ // combine current insert with prior if any at same index
+ // deviating a bit from TokenStreamRewriter.java - hard to incorporate inheritance logic
+ for j := 0; j < i && j < len(rewrites); j++ {
+ if nextIop, ok := rewrites[j].(*InsertAfterOp); ok {
+ if nextIop.index == iop.GetIndex() {
+ iop.SetText(nextIop.text + iop.GetText())
+ rewrites[j] = nil
+ }
+ }
+ if prevIop, ok := rewrites[j].(*InsertBeforeOp); ok {
+ if prevIop.index == iop.GetIndex() {
+ iop.SetText(iop.GetText() + prevIop.text)
+ rewrites[prevIop.instructionIndex] = nil
+ }
+ }
+ }
+ // look for replaces where iop.index is in range; error
+ for j := 0; j < i && j < len(rewrites); j++ {
+ if rop, ok := rewrites[j].(*ReplaceOp); ok {
+ if iop.GetIndex() == rop.index {
+ rop.text = iop.GetText() + rop.text
+ rewrites[i] = nil
+ continue
+ }
+ if iop.GetIndex() >= rop.index && iop.GetIndex() <= rop.LastIndex {
+ panic("insert op " + iop.String() + " within boundaries of previous " + rop.String())
+ }
+ }
+ }
+ }
+ m := map[int]RewriteOperation{}
+ for i := 0; i < len(rewrites); i++ {
+ op := rewrites[i]
+ if op == nil {
+ continue
+ }
+ if _, ok := m[op.GetIndex()]; ok {
+ panic("should only be one op per index")
+ }
+ m[op.GetIndex()] = op
+ }
+ return m
+}
+
+/*
+ Quick fixing Go lack of overloads
+*/
+
+func max(a, b int) int {
+ if a > b {
+ return a
+ } else {
+ return b
+ }
+}
+func min(a, b int) int {
+ if a < b {
+ return a
+ } else {
+ return b
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/trace_listener.go b/vendor/github.com/antlr4-go/antlr/v4/trace_listener.go
new file mode 100644
index 0000000..7b663bf
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/trace_listener.go
@@ -0,0 +1,32 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import "fmt"
+
+type TraceListener struct {
+ parser *BaseParser
+}
+
+func NewTraceListener(parser *BaseParser) *TraceListener {
+ tl := new(TraceListener)
+ tl.parser = parser
+ return tl
+}
+
+func (t *TraceListener) VisitErrorNode(_ ErrorNode) {
+}
+
+func (t *TraceListener) EnterEveryRule(ctx ParserRuleContext) {
+ fmt.Println("enter " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser.input.LT(1).GetText())
+}
+
+func (t *TraceListener) VisitTerminal(node TerminalNode) {
+ fmt.Println("consume " + fmt.Sprint(node.GetSymbol()) + " rule " + t.parser.GetRuleNames()[t.parser.ctx.GetRuleIndex()])
+}
+
+func (t *TraceListener) ExitEveryRule(ctx ParserRuleContext) {
+ fmt.Println("exit " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser.input.LT(1).GetText())
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/transition.go b/vendor/github.com/antlr4-go/antlr/v4/transition.go
new file mode 100644
index 0000000..313b0fc
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/transition.go
@@ -0,0 +1,439 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// atom, set, epsilon, action, predicate, rule transitions.
+//
+// This is a one way link. It emanates from a state (usually via a list of
+// transitions) and has a target state.
+//
+// Since we never have to change the ATN transitions once we construct it,
+// the states. We'll use the term Edge for the DFA to distinguish them from
+// ATN transitions.
+
+type Transition interface {
+ getTarget() ATNState
+ setTarget(ATNState)
+ getIsEpsilon() bool
+ getLabel() *IntervalSet
+ getSerializationType() int
+ Matches(int, int, int) bool
+}
+
+type BaseTransition struct {
+ target ATNState
+ isEpsilon bool
+ label int
+ intervalSet *IntervalSet
+ serializationType int
+}
+
+func NewBaseTransition(target ATNState) *BaseTransition {
+
+ if target == nil {
+ panic("target cannot be nil.")
+ }
+
+ t := new(BaseTransition)
+
+ t.target = target
+ // Are we epsilon, action, sempred?
+ t.isEpsilon = false
+ t.intervalSet = nil
+
+ return t
+}
+
+func (t *BaseTransition) getTarget() ATNState {
+ return t.target
+}
+
+func (t *BaseTransition) setTarget(s ATNState) {
+ t.target = s
+}
+
+func (t *BaseTransition) getIsEpsilon() bool {
+ return t.isEpsilon
+}
+
+func (t *BaseTransition) getLabel() *IntervalSet {
+ return t.intervalSet
+}
+
+func (t *BaseTransition) getSerializationType() int {
+ return t.serializationType
+}
+
+func (t *BaseTransition) Matches(_, _, _ int) bool {
+ panic("Not implemented")
+}
+
+const (
+ TransitionEPSILON = 1
+ TransitionRANGE = 2
+ TransitionRULE = 3
+ TransitionPREDICATE = 4 // e.g., {isType(input.LT(1))}?
+ TransitionATOM = 5
+ TransitionACTION = 6
+ TransitionSET = 7 // ~(A|B) or ~atom, wildcard, which convert to next 2
+ TransitionNOTSET = 8
+ TransitionWILDCARD = 9
+ TransitionPRECEDENCE = 10
+)
+
+//goland:noinspection GoUnusedGlobalVariable
+var TransitionserializationNames = []string{
+ "INVALID",
+ "EPSILON",
+ "RANGE",
+ "RULE",
+ "PREDICATE",
+ "ATOM",
+ "ACTION",
+ "SET",
+ "NOT_SET",
+ "WILDCARD",
+ "PRECEDENCE",
+}
+
+//var TransitionserializationTypes struct {
+// EpsilonTransition int
+// RangeTransition int
+// RuleTransition int
+// PredicateTransition int
+// AtomTransition int
+// ActionTransition int
+// SetTransition int
+// NotSetTransition int
+// WildcardTransition int
+// PrecedencePredicateTransition int
+//}{
+// TransitionEPSILON,
+// TransitionRANGE,
+// TransitionRULE,
+// TransitionPREDICATE,
+// TransitionATOM,
+// TransitionACTION,
+// TransitionSET,
+// TransitionNOTSET,
+// TransitionWILDCARD,
+// TransitionPRECEDENCE
+//}
+
+// AtomTransition
+// TODO: make all transitions sets? no, should remove set edges
+type AtomTransition struct {
+ BaseTransition
+}
+
+func NewAtomTransition(target ATNState, intervalSet int) *AtomTransition {
+ t := &AtomTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionATOM,
+ label: intervalSet,
+ isEpsilon: false,
+ },
+ }
+ t.intervalSet = t.makeLabel()
+
+ return t
+}
+
+func (t *AtomTransition) makeLabel() *IntervalSet {
+ s := NewIntervalSet()
+ s.addOne(t.label)
+ return s
+}
+
+func (t *AtomTransition) Matches(symbol, _, _ int) bool {
+ return t.label == symbol
+}
+
+func (t *AtomTransition) String() string {
+ return strconv.Itoa(t.label)
+}
+
+type RuleTransition struct {
+ BaseTransition
+ followState ATNState
+ ruleIndex, precedence int
+}
+
+func NewRuleTransition(ruleStart ATNState, ruleIndex, precedence int, followState ATNState) *RuleTransition {
+ return &RuleTransition{
+ BaseTransition: BaseTransition{
+ target: ruleStart,
+ isEpsilon: true,
+ serializationType: TransitionRULE,
+ },
+ ruleIndex: ruleIndex,
+ precedence: precedence,
+ followState: followState,
+ }
+}
+
+func (t *RuleTransition) Matches(_, _, _ int) bool {
+ return false
+}
+
+type EpsilonTransition struct {
+ BaseTransition
+ outermostPrecedenceReturn int
+}
+
+func NewEpsilonTransition(target ATNState, outermostPrecedenceReturn int) *EpsilonTransition {
+ return &EpsilonTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionEPSILON,
+ isEpsilon: true,
+ },
+ outermostPrecedenceReturn: outermostPrecedenceReturn,
+ }
+}
+
+func (t *EpsilonTransition) Matches(_, _, _ int) bool {
+ return false
+}
+
+func (t *EpsilonTransition) String() string {
+ return "epsilon"
+}
+
+type RangeTransition struct {
+ BaseTransition
+ start, stop int
+}
+
+func NewRangeTransition(target ATNState, start, stop int) *RangeTransition {
+ t := &RangeTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionRANGE,
+ isEpsilon: false,
+ },
+ start: start,
+ stop: stop,
+ }
+ t.intervalSet = t.makeLabel()
+ return t
+}
+
+func (t *RangeTransition) makeLabel() *IntervalSet {
+ s := NewIntervalSet()
+ s.addRange(t.start, t.stop)
+ return s
+}
+
+func (t *RangeTransition) Matches(symbol, _, _ int) bool {
+ return symbol >= t.start && symbol <= t.stop
+}
+
+func (t *RangeTransition) String() string {
+ var sb strings.Builder
+ sb.WriteByte('\'')
+ sb.WriteRune(rune(t.start))
+ sb.WriteString("'..'")
+ sb.WriteRune(rune(t.stop))
+ sb.WriteByte('\'')
+ return sb.String()
+}
+
+type AbstractPredicateTransition interface {
+ Transition
+ IAbstractPredicateTransitionFoo()
+}
+
+type BaseAbstractPredicateTransition struct {
+ BaseTransition
+}
+
+func NewBasePredicateTransition(target ATNState) *BaseAbstractPredicateTransition {
+ return &BaseAbstractPredicateTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ },
+ }
+}
+
+func (a *BaseAbstractPredicateTransition) IAbstractPredicateTransitionFoo() {}
+
+type PredicateTransition struct {
+ BaseAbstractPredicateTransition
+ isCtxDependent bool
+ ruleIndex, predIndex int
+}
+
+func NewPredicateTransition(target ATNState, ruleIndex, predIndex int, isCtxDependent bool) *PredicateTransition {
+ return &PredicateTransition{
+ BaseAbstractPredicateTransition: BaseAbstractPredicateTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionPREDICATE,
+ isEpsilon: true,
+ },
+ },
+ isCtxDependent: isCtxDependent,
+ ruleIndex: ruleIndex,
+ predIndex: predIndex,
+ }
+}
+
+func (t *PredicateTransition) Matches(_, _, _ int) bool {
+ return false
+}
+
+func (t *PredicateTransition) getPredicate() *Predicate {
+ return NewPredicate(t.ruleIndex, t.predIndex, t.isCtxDependent)
+}
+
+func (t *PredicateTransition) String() string {
+ return "pred_" + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.predIndex)
+}
+
+type ActionTransition struct {
+ BaseTransition
+ isCtxDependent bool
+ ruleIndex, actionIndex, predIndex int
+}
+
+func NewActionTransition(target ATNState, ruleIndex, actionIndex int, isCtxDependent bool) *ActionTransition {
+ return &ActionTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionACTION,
+ isEpsilon: true,
+ },
+ isCtxDependent: isCtxDependent,
+ ruleIndex: ruleIndex,
+ actionIndex: actionIndex,
+ }
+}
+
+func (t *ActionTransition) Matches(_, _, _ int) bool {
+ return false
+}
+
+func (t *ActionTransition) String() string {
+ return "action_" + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.actionIndex)
+}
+
+type SetTransition struct {
+ BaseTransition
+}
+
+func NewSetTransition(target ATNState, set *IntervalSet) *SetTransition {
+ t := &SetTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionSET,
+ },
+ }
+
+ if set != nil {
+ t.intervalSet = set
+ } else {
+ t.intervalSet = NewIntervalSet()
+ t.intervalSet.addOne(TokenInvalidType)
+ }
+ return t
+}
+
+func (t *SetTransition) Matches(symbol, _, _ int) bool {
+ return t.intervalSet.contains(symbol)
+}
+
+func (t *SetTransition) String() string {
+ return t.intervalSet.String()
+}
+
+type NotSetTransition struct {
+ SetTransition
+}
+
+func NewNotSetTransition(target ATNState, set *IntervalSet) *NotSetTransition {
+ t := &NotSetTransition{
+ SetTransition: SetTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionNOTSET,
+ },
+ },
+ }
+ if set != nil {
+ t.intervalSet = set
+ } else {
+ t.intervalSet = NewIntervalSet()
+ t.intervalSet.addOne(TokenInvalidType)
+ }
+
+ return t
+}
+
+func (t *NotSetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
+ return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !t.intervalSet.contains(symbol)
+}
+
+func (t *NotSetTransition) String() string {
+ return "~" + t.intervalSet.String()
+}
+
+type WildcardTransition struct {
+ BaseTransition
+}
+
+func NewWildcardTransition(target ATNState) *WildcardTransition {
+ return &WildcardTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionWILDCARD,
+ },
+ }
+}
+
+func (t *WildcardTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
+ return symbol >= minVocabSymbol && symbol <= maxVocabSymbol
+}
+
+func (t *WildcardTransition) String() string {
+ return "."
+}
+
+type PrecedencePredicateTransition struct {
+ BaseAbstractPredicateTransition
+ precedence int
+}
+
+func NewPrecedencePredicateTransition(target ATNState, precedence int) *PrecedencePredicateTransition {
+ return &PrecedencePredicateTransition{
+ BaseAbstractPredicateTransition: BaseAbstractPredicateTransition{
+ BaseTransition: BaseTransition{
+ target: target,
+ serializationType: TransitionPRECEDENCE,
+ isEpsilon: true,
+ },
+ },
+ precedence: precedence,
+ }
+}
+
+func (t *PrecedencePredicateTransition) Matches(_, _, _ int) bool {
+ return false
+}
+
+func (t *PrecedencePredicateTransition) getPredicate() *PrecedencePredicate {
+ return NewPrecedencePredicate(t.precedence)
+}
+
+func (t *PrecedencePredicateTransition) String() string {
+ return fmt.Sprint(t.precedence) + " >= _p"
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/tree.go b/vendor/github.com/antlr4-go/antlr/v4/tree.go
new file mode 100644
index 0000000..c288420
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/tree.go
@@ -0,0 +1,304 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+// The basic notion of a tree has a parent, a payload, and a list of children.
+// It is the most abstract interface for all the trees used by ANTLR.
+///
+
+var TreeInvalidInterval = NewInterval(-1, -2)
+
+type Tree interface {
+ GetParent() Tree
+ SetParent(Tree)
+ GetPayload() interface{}
+ GetChild(i int) Tree
+ GetChildCount() int
+ GetChildren() []Tree
+}
+
+type SyntaxTree interface {
+ Tree
+ GetSourceInterval() Interval
+}
+
+type ParseTree interface {
+ SyntaxTree
+ Accept(Visitor ParseTreeVisitor) interface{}
+ GetText() string
+ ToStringTree([]string, Recognizer) string
+}
+
+type RuleNode interface {
+ ParseTree
+ GetRuleContext() RuleContext
+}
+
+type TerminalNode interface {
+ ParseTree
+ GetSymbol() Token
+}
+
+type ErrorNode interface {
+ TerminalNode
+
+ errorNode()
+}
+
+type ParseTreeVisitor interface {
+ Visit(tree ParseTree) interface{}
+ VisitChildren(node RuleNode) interface{}
+ VisitTerminal(node TerminalNode) interface{}
+ VisitErrorNode(node ErrorNode) interface{}
+}
+
+type BaseParseTreeVisitor struct{}
+
+var _ ParseTreeVisitor = &BaseParseTreeVisitor{}
+
+func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return tree.Accept(v) }
+func (v *BaseParseTreeVisitor) VisitChildren(_ RuleNode) interface{} { return nil }
+func (v *BaseParseTreeVisitor) VisitTerminal(_ TerminalNode) interface{} { return nil }
+func (v *BaseParseTreeVisitor) VisitErrorNode(_ ErrorNode) interface{} { return nil }
+
+// TODO: Implement this?
+//func (this ParseTreeVisitor) Visit(ctx) {
+// if (Utils.isArray(ctx)) {
+// self := this
+// return ctx.map(function(child) { return VisitAtom(self, child)})
+// } else {
+// return VisitAtom(this, ctx)
+// }
+//}
+//
+//func VisitAtom(Visitor, ctx) {
+// if (ctx.parser == nil) { //is terminal
+// return
+// }
+//
+// name := ctx.parser.ruleNames[ctx.ruleIndex]
+// funcName := "Visit" + Utils.titleCase(name)
+//
+// return Visitor[funcName](ctx)
+//}
+
+type ParseTreeListener interface {
+ VisitTerminal(node TerminalNode)
+ VisitErrorNode(node ErrorNode)
+ EnterEveryRule(ctx ParserRuleContext)
+ ExitEveryRule(ctx ParserRuleContext)
+}
+
+type BaseParseTreeListener struct{}
+
+var _ ParseTreeListener = &BaseParseTreeListener{}
+
+func (l *BaseParseTreeListener) VisitTerminal(_ TerminalNode) {}
+func (l *BaseParseTreeListener) VisitErrorNode(_ ErrorNode) {}
+func (l *BaseParseTreeListener) EnterEveryRule(_ ParserRuleContext) {}
+func (l *BaseParseTreeListener) ExitEveryRule(_ ParserRuleContext) {}
+
+type TerminalNodeImpl struct {
+ parentCtx RuleContext
+ symbol Token
+}
+
+var _ TerminalNode = &TerminalNodeImpl{}
+
+func NewTerminalNodeImpl(symbol Token) *TerminalNodeImpl {
+ tn := new(TerminalNodeImpl)
+
+ tn.parentCtx = nil
+ tn.symbol = symbol
+
+ return tn
+}
+
+func (t *TerminalNodeImpl) GetChild(_ int) Tree {
+ return nil
+}
+
+func (t *TerminalNodeImpl) GetChildren() []Tree {
+ return nil
+}
+
+func (t *TerminalNodeImpl) SetChildren(_ []Tree) {
+ panic("Cannot set children on terminal node")
+}
+
+func (t *TerminalNodeImpl) GetSymbol() Token {
+ return t.symbol
+}
+
+func (t *TerminalNodeImpl) GetParent() Tree {
+ return t.parentCtx
+}
+
+func (t *TerminalNodeImpl) SetParent(tree Tree) {
+ t.parentCtx = tree.(RuleContext)
+}
+
+func (t *TerminalNodeImpl) GetPayload() interface{} {
+ return t.symbol
+}
+
+func (t *TerminalNodeImpl) GetSourceInterval() Interval {
+ if t.symbol == nil {
+ return TreeInvalidInterval
+ }
+ tokenIndex := t.symbol.GetTokenIndex()
+ return NewInterval(tokenIndex, tokenIndex)
+}
+
+func (t *TerminalNodeImpl) GetChildCount() int {
+ return 0
+}
+
+func (t *TerminalNodeImpl) Accept(v ParseTreeVisitor) interface{} {
+ return v.VisitTerminal(t)
+}
+
+func (t *TerminalNodeImpl) GetText() string {
+ return t.symbol.GetText()
+}
+
+func (t *TerminalNodeImpl) String() string {
+ if t.symbol.GetTokenType() == TokenEOF {
+ return ""
+ }
+
+ return t.symbol.GetText()
+}
+
+func (t *TerminalNodeImpl) ToStringTree(_ []string, _ Recognizer) string {
+ return t.String()
+}
+
+// Represents a token that was consumed during reSynchronization
+// rather than during a valid Match operation. For example,
+// we will create this kind of a node during single token insertion
+// and deletion as well as during "consume until error recovery set"
+// upon no viable alternative exceptions.
+
+type ErrorNodeImpl struct {
+ *TerminalNodeImpl
+}
+
+var _ ErrorNode = &ErrorNodeImpl{}
+
+func NewErrorNodeImpl(token Token) *ErrorNodeImpl {
+ en := new(ErrorNodeImpl)
+ en.TerminalNodeImpl = NewTerminalNodeImpl(token)
+ return en
+}
+
+func (e *ErrorNodeImpl) errorNode() {}
+
+func (e *ErrorNodeImpl) Accept(v ParseTreeVisitor) interface{} {
+ return v.VisitErrorNode(e)
+}
+
+type ParseTreeWalker struct {
+}
+
+func NewParseTreeWalker() *ParseTreeWalker {
+ return new(ParseTreeWalker)
+}
+
+// Walk performs a walk on the given parse tree starting at the root and going down recursively
+// with depth-first search. On each node, [EnterRule] is called before
+// recursively walking down into child nodes, then [ExitRule] is called after the recursive call to wind up.
+func (p *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
+ switch tt := t.(type) {
+ case ErrorNode:
+ listener.VisitErrorNode(tt)
+ case TerminalNode:
+ listener.VisitTerminal(tt)
+ default:
+ p.EnterRule(listener, t.(RuleNode))
+ for i := 0; i < t.GetChildCount(); i++ {
+ child := t.GetChild(i)
+ p.Walk(listener, child)
+ }
+ p.ExitRule(listener, t.(RuleNode))
+ }
+}
+
+// EnterRule enters a grammar rule by first triggering the generic event [ParseTreeListener].[EnterEveryRule]
+// then by triggering the event specific to the given parse tree node
+func (p *ParseTreeWalker) EnterRule(listener ParseTreeListener, r RuleNode) {
+ ctx := r.GetRuleContext().(ParserRuleContext)
+ listener.EnterEveryRule(ctx)
+ ctx.EnterRule(listener)
+}
+
+// ExitRule exits a grammar rule by first triggering the event specific to the given parse tree node
+// then by triggering the generic event [ParseTreeListener].ExitEveryRule
+func (p *ParseTreeWalker) ExitRule(listener ParseTreeListener, r RuleNode) {
+ ctx := r.GetRuleContext().(ParserRuleContext)
+ ctx.ExitRule(listener)
+ listener.ExitEveryRule(ctx)
+}
+
+//goland:noinspection GoUnusedGlobalVariable
+var ParseTreeWalkerDefault = NewParseTreeWalker()
+
+type IterativeParseTreeWalker struct {
+ *ParseTreeWalker
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func NewIterativeParseTreeWalker() *IterativeParseTreeWalker {
+ return new(IterativeParseTreeWalker)
+}
+
+func (i *IterativeParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
+ var stack []Tree
+ var indexStack []int
+ currentNode := t
+ currentIndex := 0
+
+ for currentNode != nil {
+ // pre-order visit
+ switch tt := currentNode.(type) {
+ case ErrorNode:
+ listener.VisitErrorNode(tt)
+ case TerminalNode:
+ listener.VisitTerminal(tt)
+ default:
+ i.EnterRule(listener, currentNode.(RuleNode))
+ }
+ // Move down to first child, if exists
+ if currentNode.GetChildCount() > 0 {
+ stack = append(stack, currentNode)
+ indexStack = append(indexStack, currentIndex)
+ currentIndex = 0
+ currentNode = currentNode.GetChild(0)
+ continue
+ }
+
+ for {
+ // post-order visit
+ if ruleNode, ok := currentNode.(RuleNode); ok {
+ i.ExitRule(listener, ruleNode)
+ }
+ // No parent, so no siblings
+ if len(stack) == 0 {
+ currentNode = nil
+ currentIndex = 0
+ break
+ }
+ // Move to next sibling if possible
+ currentIndex++
+ if stack[len(stack)-1].GetChildCount() > currentIndex {
+ currentNode = stack[len(stack)-1].GetChild(currentIndex)
+ break
+ }
+ // No next, sibling, so move up
+ currentNode, stack = stack[len(stack)-1], stack[:len(stack)-1]
+ currentIndex, indexStack = indexStack[len(indexStack)-1], indexStack[:len(indexStack)-1]
+ }
+ }
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/trees.go b/vendor/github.com/antlr4-go/antlr/v4/trees.go
new file mode 100644
index 0000000..f44c05d
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/trees.go
@@ -0,0 +1,142 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import "fmt"
+
+/** A set of utility routines useful for all kinds of ANTLR trees. */
+
+// TreesStringTree prints out a whole tree in LISP form. [getNodeText] is used on the
+// node payloads to get the text for the nodes. Detects parse trees and extracts data appropriately.
+func TreesStringTree(tree Tree, ruleNames []string, recog Recognizer) string {
+
+ if recog != nil {
+ ruleNames = recog.GetRuleNames()
+ }
+
+ s := TreesGetNodeText(tree, ruleNames, nil)
+
+ s = EscapeWhitespace(s, false)
+ c := tree.GetChildCount()
+ if c == 0 {
+ return s
+ }
+ res := "(" + s + " "
+ if c > 0 {
+ s = TreesStringTree(tree.GetChild(0), ruleNames, nil)
+ res += s
+ }
+ for i := 1; i < c; i++ {
+ s = TreesStringTree(tree.GetChild(i), ruleNames, nil)
+ res += " " + s
+ }
+ res += ")"
+ return res
+}
+
+func TreesGetNodeText(t Tree, ruleNames []string, recog Parser) string {
+ if recog != nil {
+ ruleNames = recog.GetRuleNames()
+ }
+
+ if ruleNames != nil {
+ switch t2 := t.(type) {
+ case RuleNode:
+ t3 := t2.GetRuleContext()
+ altNumber := t3.GetAltNumber()
+
+ if altNumber != ATNInvalidAltNumber {
+ return fmt.Sprintf("%s:%d", ruleNames[t3.GetRuleIndex()], altNumber)
+ }
+ return ruleNames[t3.GetRuleIndex()]
+ case ErrorNode:
+ return fmt.Sprint(t2)
+ case TerminalNode:
+ if t2.GetSymbol() != nil {
+ return t2.GetSymbol().GetText()
+ }
+ }
+ }
+
+ // no recognition for rule names
+ payload := t.GetPayload()
+ if p2, ok := payload.(Token); ok {
+ return p2.GetText()
+ }
+
+ return fmt.Sprint(t.GetPayload())
+}
+
+// TreesGetChildren returns am ordered list of all children of this node
+//
+//goland:noinspection GoUnusedExportedFunction
+func TreesGetChildren(t Tree) []Tree {
+ list := make([]Tree, 0)
+ for i := 0; i < t.GetChildCount(); i++ {
+ list = append(list, t.GetChild(i))
+ }
+ return list
+}
+
+// TreesgetAncestors returns a list of all ancestors of this node. The first node of list is the root
+// and the last node is the parent of this node.
+//
+//goland:noinspection GoUnusedExportedFunction
+func TreesgetAncestors(t Tree) []Tree {
+ ancestors := make([]Tree, 0)
+ t = t.GetParent()
+ for t != nil {
+ f := []Tree{t}
+ ancestors = append(f, ancestors...)
+ t = t.GetParent()
+ }
+ return ancestors
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func TreesFindAllTokenNodes(t ParseTree, ttype int) []ParseTree {
+ return TreesfindAllNodes(t, ttype, true)
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func TreesfindAllRuleNodes(t ParseTree, ruleIndex int) []ParseTree {
+ return TreesfindAllNodes(t, ruleIndex, false)
+}
+
+func TreesfindAllNodes(t ParseTree, index int, findTokens bool) []ParseTree {
+ nodes := make([]ParseTree, 0)
+ treesFindAllNodes(t, index, findTokens, &nodes)
+ return nodes
+}
+
+func treesFindAllNodes(t ParseTree, index int, findTokens bool, nodes *[]ParseTree) {
+ // check this node (the root) first
+
+ t2, ok := t.(TerminalNode)
+ t3, ok2 := t.(ParserRuleContext)
+
+ if findTokens && ok {
+ if t2.GetSymbol().GetTokenType() == index {
+ *nodes = append(*nodes, t2)
+ }
+ } else if !findTokens && ok2 {
+ if t3.GetRuleIndex() == index {
+ *nodes = append(*nodes, t3)
+ }
+ }
+ // check children
+ for i := 0; i < t.GetChildCount(); i++ {
+ treesFindAllNodes(t.GetChild(i).(ParseTree), index, findTokens, nodes)
+ }
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func TreesDescendants(t ParseTree) []ParseTree {
+ nodes := []ParseTree{t}
+ for i := 0; i < t.GetChildCount(); i++ {
+ nodes = append(nodes, TreesDescendants(t.GetChild(i).(ParseTree))...)
+ }
+ return nodes
+}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/utils.go b/vendor/github.com/antlr4-go/antlr/v4/utils.go
new file mode 100644
index 0000000..733d7df
--- /dev/null
+++ b/vendor/github.com/antlr4-go/antlr/v4/utils.go
@@ -0,0 +1,328 @@
+// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
+// Use of this file is governed by the BSD 3-clause license that
+// can be found in the LICENSE.txt file in the project root.
+
+package antlr
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "math/bits"
+ "os"
+ "strconv"
+ "strings"
+ "syscall"
+)
+
+func intMin(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
+
+func intMax(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
+
+// A simple integer stack
+
+type IntStack []int
+
+var ErrEmptyStack = errors.New("stack is empty")
+
+func (s *IntStack) Pop() (int, error) {
+ l := len(*s) - 1
+ if l < 0 {
+ return 0, ErrEmptyStack
+ }
+ v := (*s)[l]
+ *s = (*s)[0:l]
+ return v, nil
+}
+
+func (s *IntStack) Push(e int) {
+ *s = append(*s, e)
+}
+
+const bitsPerWord = 64
+
+func indexForBit(bit int) int {
+ return bit / bitsPerWord
+}
+
+//goland:noinspection GoUnusedExportedFunction,GoUnusedFunction
+func wordForBit(data []uint64, bit int) uint64 {
+ idx := indexForBit(bit)
+ if idx >= len(data) {
+ return 0
+ }
+ return data[idx]
+}
+
+func maskForBit(bit int) uint64 {
+ return uint64(1) << (bit % bitsPerWord)
+}
+
+func wordsNeeded(bit int) int {
+ return indexForBit(bit) + 1
+}
+
+type BitSet struct {
+ data []uint64
+}
+
+// NewBitSet creates a new bitwise set
+// TODO: See if we can replace with the standard library's BitSet
+func NewBitSet() *BitSet {
+ return &BitSet{}
+}
+
+func (b *BitSet) add(value int) {
+ idx := indexForBit(value)
+ if idx >= len(b.data) {
+ size := wordsNeeded(value)
+ data := make([]uint64, size)
+ copy(data, b.data)
+ b.data = data
+ }
+ b.data[idx] |= maskForBit(value)
+}
+
+func (b *BitSet) clear(index int) {
+ idx := indexForBit(index)
+ if idx >= len(b.data) {
+ return
+ }
+ b.data[idx] &= ^maskForBit(index)
+}
+
+func (b *BitSet) or(set *BitSet) {
+ // Get min size necessary to represent the bits in both sets.
+ bLen := b.minLen()
+ setLen := set.minLen()
+ maxLen := intMax(bLen, setLen)
+ if maxLen > len(b.data) {
+ // Increase the size of len(b.data) to represent the bits in both sets.
+ data := make([]uint64, maxLen)
+ copy(data, b.data)
+ b.data = data
+ }
+ // len(b.data) is at least setLen.
+ for i := 0; i < setLen; i++ {
+ b.data[i] |= set.data[i]
+ }
+}
+
+func (b *BitSet) remove(value int) {
+ b.clear(value)
+}
+
+func (b *BitSet) contains(value int) bool {
+ idx := indexForBit(value)
+ if idx >= len(b.data) {
+ return false
+ }
+ return (b.data[idx] & maskForBit(value)) != 0
+}
+
+func (b *BitSet) minValue() int {
+ for i, v := range b.data {
+ if v == 0 {
+ continue
+ }
+ return i*bitsPerWord + bits.TrailingZeros64(v)
+ }
+ return 2147483647
+}
+
+func (b *BitSet) equals(other interface{}) bool {
+ otherBitSet, ok := other.(*BitSet)
+ if !ok {
+ return false
+ }
+
+ if b == otherBitSet {
+ return true
+ }
+
+ // We only compare set bits, so we cannot rely on the two slices having the same size. Its
+ // possible for two BitSets to have different slice lengths but the same set bits. So we only
+ // compare the relevant words and ignore the trailing zeros.
+ bLen := b.minLen()
+ otherLen := otherBitSet.minLen()
+
+ if bLen != otherLen {
+ return false
+ }
+
+ for i := 0; i < bLen; i++ {
+ if b.data[i] != otherBitSet.data[i] {
+ return false
+ }
+ }
+
+ return true
+}
+
+func (b *BitSet) minLen() int {
+ for i := len(b.data); i > 0; i-- {
+ if b.data[i-1] != 0 {
+ return i
+ }
+ }
+ return 0
+}
+
+func (b *BitSet) length() int {
+ cnt := 0
+ for _, val := range b.data {
+ cnt += bits.OnesCount64(val)
+ }
+ return cnt
+}
+
+func (b *BitSet) String() string {
+ vals := make([]string, 0, b.length())
+
+ for i, v := range b.data {
+ for v != 0 {
+ n := bits.TrailingZeros64(v)
+ vals = append(vals, strconv.Itoa(i*bitsPerWord+n))
+ v &= ^(uint64(1) << n)
+ }
+ }
+
+ return "{" + strings.Join(vals, ", ") + "}"
+}
+
+type AltDict struct {
+ data map[string]interface{}
+}
+
+func NewAltDict() *AltDict {
+ d := new(AltDict)
+ d.data = make(map[string]interface{})
+ return d
+}
+
+func (a *AltDict) Get(key string) interface{} {
+ key = "k-" + key
+ return a.data[key]
+}
+
+func (a *AltDict) put(key string, value interface{}) {
+ key = "k-" + key
+ a.data[key] = value
+}
+
+func (a *AltDict) values() []interface{} {
+ vs := make([]interface{}, len(a.data))
+ i := 0
+ for _, v := range a.data {
+ vs[i] = v
+ i++
+ }
+ return vs
+}
+
+func EscapeWhitespace(s string, escapeSpaces bool) string {
+
+ s = strings.Replace(s, "\t", "\\t", -1)
+ s = strings.Replace(s, "\n", "\\n", -1)
+ s = strings.Replace(s, "\r", "\\r", -1)
+ if escapeSpaces {
+ s = strings.Replace(s, " ", "\u00B7", -1)
+ }
+ return s
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func TerminalNodeToStringArray(sa []TerminalNode) []string {
+ st := make([]string, len(sa))
+
+ for i, s := range sa {
+ st[i] = fmt.Sprintf("%v", s)
+ }
+
+ return st
+}
+
+//goland:noinspection GoUnusedExportedFunction
+func PrintArrayJavaStyle(sa []string) string {
+ var buffer bytes.Buffer
+
+ buffer.WriteString("[")
+
+ for i, s := range sa {
+ buffer.WriteString(s)
+ if i != len(sa)-1 {
+ buffer.WriteString(", ")
+ }
+ }
+
+ buffer.WriteString("]")
+
+ return buffer.String()
+}
+
+// murmur hash
+func murmurInit(seed int) int {
+ return seed
+}
+
+func murmurUpdate(h int, value int) int {
+ const c1 uint32 = 0xCC9E2D51
+ const c2 uint32 = 0x1B873593
+ const r1 uint32 = 15
+ const r2 uint32 = 13
+ const m uint32 = 5
+ const n uint32 = 0xE6546B64
+
+ k := uint32(value)
+ k *= c1
+ k = (k << r1) | (k >> (32 - r1))
+ k *= c2
+
+ hash := uint32(h) ^ k
+ hash = (hash << r2) | (hash >> (32 - r2))
+ hash = hash*m + n
+ return int(hash)
+}
+
+func murmurFinish(h int, numberOfWords int) int {
+ var hash = uint32(h)
+ hash ^= uint32(numberOfWords) << 2
+ hash ^= hash >> 16
+ hash *= 0x85ebca6b
+ hash ^= hash >> 13
+ hash *= 0xc2b2ae35
+ hash ^= hash >> 16
+
+ return int(hash)
+}
+
+func isDirectory(dir string) (bool, error) {
+ fileInfo, err := os.Stat(dir)
+ if err != nil {
+ switch {
+ case errors.Is(err, syscall.ENOENT):
+ // The given directory does not exist, so we will try to create it
+ //
+ err = os.MkdirAll(dir, 0755)
+ if err != nil {
+ return false, err
+ }
+
+ return true, nil
+ case err != nil:
+ return false, err
+ default:
+ }
+ }
+ return fileInfo.IsDir(), err
+}
diff --git a/vendor/github.com/araddon/dateparse/.travis.yml b/vendor/github.com/araddon/dateparse/.travis.yml
new file mode 100644
index 0000000..3b4b177
--- /dev/null
+++ b/vendor/github.com/araddon/dateparse/.travis.yml
@@ -0,0 +1,13 @@
+language: go
+
+go:
+ - 1.13.x
+
+before_install:
+ - go get -t -v ./...
+
+script:
+ - go test -race -coverprofile=coverage.txt -covermode=atomic
+
+after_success:
+ - bash <(curl -s https://codecov.io/bash)
diff --git a/vendor/github.com/araddon/dateparse/LICENSE b/vendor/github.com/araddon/dateparse/LICENSE
new file mode 100644
index 0000000..f675ed3
--- /dev/null
+++ b/vendor/github.com/araddon/dateparse/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015-2017 Aaron Raddon
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/araddon/dateparse/README.md b/vendor/github.com/araddon/dateparse/README.md
new file mode 100644
index 0000000..fe682dd
--- /dev/null
+++ b/vendor/github.com/araddon/dateparse/README.md
@@ -0,0 +1,323 @@
+Go Date Parser
+---------------------------
+
+Parse many date strings without knowing format in advance. Uses a scanner to read bytes and use a state machine to find format. Much faster than shotgun based parse methods. See [bench_test.go](https://github.com/araddon/dateparse/blob/master/bench_test.go) for performance comparison.
+
+
+[![Code Coverage](https://codecov.io/gh/araddon/dateparse/branch/master/graph/badge.svg)](https://codecov.io/gh/araddon/dateparse)
+[![GoDoc](https://godoc.org/github.com/araddon/dateparse?status.svg)](http://godoc.org/github.com/araddon/dateparse)
+[![Build Status](https://travis-ci.org/araddon/dateparse.svg?branch=master)](https://travis-ci.org/araddon/dateparse)
+[![Go ReportCard](https://goreportcard.com/badge/araddon/dateparse)](https://goreportcard.com/report/araddon/dateparse)
+
+**MM/DD/YYYY VS DD/MM/YYYY** Right now this uses mm/dd/yyyy WHEN ambiguous if this is not desired behavior, use `ParseStrict` which will fail on ambiguous date strings.
+
+**Timezones** The location your server is configured affects the results! See example or https://play.golang.org/p/IDHRalIyXh and last paragraph here https://golang.org/pkg/time/#Parse.
+
+
+```go
+
+// Normal parse. Equivalent Timezone rules as time.Parse()
+t, err := dateparse.ParseAny("3/1/2014")
+
+// Parse Strict, error on ambigous mm/dd vs dd/mm dates
+t, err := dateparse.ParseStrict("3/1/2014")
+> returns error
+
+// Return a string that represents the layout to parse the given date-time.
+layout, err := dateparse.ParseFormat("May 8, 2009 5:57:51 PM")
+> "Jan 2, 2006 3:04:05 PM"
+
+```
+
+cli tool for testing dateformats
+----------------------------------
+
+[Date Parse CLI](https://github.com/araddon/dateparse/blob/master/dateparse)
+
+
+Extended example
+-------------------
+
+https://github.com/araddon/dateparse/blob/master/example/main.go
+
+```go
+package main
+
+import (
+ "flag"
+ "fmt"
+ "time"
+
+ "github.com/scylladb/termtables"
+ "github.com/araddon/dateparse"
+)
+
+var examples = []string{
+ "May 8, 2009 5:57:51 PM",
+ "oct 7, 1970",
+ "oct 7, '70",
+ "oct. 7, 1970",
+ "oct. 7, 70",
+ "Mon Jan 2 15:04:05 2006",
+ "Mon Jan 2 15:04:05 MST 2006",
+ "Mon Jan 02 15:04:05 -0700 2006",
+ "Monday, 02-Jan-06 15:04:05 MST",
+ "Mon, 02 Jan 2006 15:04:05 MST",
+ "Tue, 11 Jul 2017 16:28:13 +0200 (CEST)",
+ "Mon, 02 Jan 2006 15:04:05 -0700",
+ "Mon 30 Sep 2018 09:09:09 PM UTC",
+ "Mon Aug 10 15:44:11 UTC+0100 2015",
+ "Thu, 4 Jan 2018 17:53:36 +0000",
+ "Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)",
+ "Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00)",
+ "September 17, 2012 10:09am",
+ "September 17, 2012 at 10:09am PST-08",
+ "September 17, 2012, 10:10:09",
+ "October 7, 1970",
+ "October 7th, 1970",
+ "12 Feb 2006, 19:17",
+ "12 Feb 2006 19:17",
+ "14 May 2019 19:11:40.164",
+ "7 oct 70",
+ "7 oct 1970",
+ "03 February 2013",
+ "1 July 2013",
+ "2013-Feb-03",
+ // dd/Mon/yyy alpha Months
+ "06/Jan/2008:15:04:05 -0700",
+ "06/Jan/2008 15:04:05 -0700",
+ // mm/dd/yy
+ "3/31/2014",
+ "03/31/2014",
+ "08/21/71",
+ "8/1/71",
+ "4/8/2014 22:05",
+ "04/08/2014 22:05",
+ "4/8/14 22:05",
+ "04/2/2014 03:00:51",
+ "8/8/1965 12:00:00 AM",
+ "8/8/1965 01:00:01 PM",
+ "8/8/1965 01:00 PM",
+ "8/8/1965 1:00 PM",
+ "8/8/1965 12:00 AM",
+ "4/02/2014 03:00:51",
+ "03/19/2012 10:11:59",
+ "03/19/2012 10:11:59.3186369",
+ // yyyy/mm/dd
+ "2014/3/31",
+ "2014/03/31",
+ "2014/4/8 22:05",
+ "2014/04/08 22:05",
+ "2014/04/2 03:00:51",
+ "2014/4/02 03:00:51",
+ "2012/03/19 10:11:59",
+ "2012/03/19 10:11:59.3186369",
+ // yyyy:mm:dd
+ "2014:3:31",
+ "2014:03:31",
+ "2014:4:8 22:05",
+ "2014:04:08 22:05",
+ "2014:04:2 03:00:51",
+ "2014:4:02 03:00:51",
+ "2012:03:19 10:11:59",
+ "2012:03:19 10:11:59.3186369",
+ // Chinese
+ "2014年04月08日",
+ // yyyy-mm-ddThh
+ "2006-01-02T15:04:05+0000",
+ "2009-08-12T22:15:09-07:00",
+ "2009-08-12T22:15:09",
+ "2009-08-12T22:15:09.988",
+ "2009-08-12T22:15:09Z",
+ "2017-07-19T03:21:51:897+0100",
+ "2019-05-29T08:41-04", // no seconds, 2 digit TZ offset
+ // yyyy-mm-dd hh:mm:ss
+ "2014-04-26 17:24:37.3186369",
+ "2012-08-03 18:31:59.257000000",
+ "2014-04-26 17:24:37.123",
+ "2013-04-01 22:43",
+ "2013-04-01 22:43:22",
+ "2014-12-16 06:20:00 UTC",
+ "2014-12-16 06:20:00 GMT",
+ "2014-04-26 05:24:37 PM",
+ "2014-04-26 13:13:43 +0800",
+ "2014-04-26 13:13:43 +0800 +08",
+ "2014-04-26 13:13:44 +09:00",
+ "2012-08-03 18:31:59.257000000 +0000 UTC",
+ "2015-09-30 18:48:56.35272715 +0000 UTC",
+ "2015-02-18 00:12:00 +0000 GMT",
+ "2015-02-18 00:12:00 +0000 UTC",
+ "2015-02-08 03:02:00 +0300 MSK m=+0.000000001",
+ "2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001",
+ "2017-07-19 03:21:51+00:00",
+ "2014-04-26",
+ "2014-04",
+ "2014",
+ "2014-05-11 08:20:13,787",
+ // yyyy-mm-dd-07:00
+ "2020-07-20+08:00",
+ // mm.dd.yy
+ "3.31.2014",
+ "03.31.2014",
+ "08.21.71",
+ "2014.03",
+ "2014.03.30",
+ // yyyymmdd and similar
+ "20140601",
+ "20140722105203",
+ // yymmdd hh:mm:yy mysql log
+ // 080313 05:21:55 mysqld started
+ "171113 14:14:20",
+ // unix seconds, ms, micro, nano
+ "1332151919",
+ "1384216367189",
+ "1384216367111222",
+ "1384216367111222333",
+}
+
+var (
+ timezone = ""
+)
+
+func main() {
+ flag.StringVar(&timezone, "timezone", "UTC", "Timezone aka `America/Los_Angeles` formatted time-zone")
+ flag.Parse()
+
+ if timezone != "" {
+ // NOTE: This is very, very important to understand
+ // time-parsing in go
+ loc, err := time.LoadLocation(timezone)
+ if err != nil {
+ panic(err.Error())
+ }
+ time.Local = loc
+ }
+
+ table := termtables.CreateTable()
+
+ table.AddHeaders("Input", "Parsed, and Output as %v")
+ for _, dateExample := range examples {
+ t, err := dateparse.ParseLocal(dateExample)
+ if err != nil {
+ panic(err.Error())
+ }
+ table.AddRow(dateExample, fmt.Sprintf("%v", t))
+ }
+ fmt.Println(table.Render())
+}
+
+/*
++-------------------------------------------------------+-----------------------------------------+
+| Input | Parsed, and Output as %v |
++-------------------------------------------------------+-----------------------------------------+
+| May 8, 2009 5:57:51 PM | 2009-05-08 17:57:51 +0000 UTC |
+| oct 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
+| oct 7, '70 | 1970-10-07 00:00:00 +0000 UTC |
+| oct. 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
+| oct. 7, 70 | 1970-10-07 00:00:00 +0000 UTC |
+| Mon Jan 2 15:04:05 2006 | 2006-01-02 15:04:05 +0000 UTC |
+| Mon Jan 2 15:04:05 MST 2006 | 2006-01-02 15:04:05 +0000 MST |
+| Mon Jan 02 15:04:05 -0700 2006 | 2006-01-02 15:04:05 -0700 -0700 |
+| Monday, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST |
+| Mon, 02 Jan 2006 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST |
+| Tue, 11 Jul 2017 16:28:13 +0200 (CEST) | 2017-07-11 16:28:13 +0200 +0200 |
+| Mon, 02 Jan 2006 15:04:05 -0700 | 2006-01-02 15:04:05 -0700 -0700 |
+| Mon 30 Sep 2018 09:09:09 PM UTC | 2018-09-30 21:09:09 +0000 UTC |
+| Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC |
+| Thu, 4 Jan 2018 17:53:36 +0000 | 2018-01-04 17:53:36 +0000 UTC |
+| Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT |
+| Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00) | 2021-01-03 00:12:23 +0800 +0800 |
+| September 17, 2012 10:09am | 2012-09-17 10:09:00 +0000 UTC |
+| September 17, 2012 at 10:09am PST-08 | 2012-09-17 10:09:00 -0800 PST |
+| September 17, 2012, 10:10:09 | 2012-09-17 10:10:09 +0000 UTC |
+| October 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
+| October 7th, 1970 | 1970-10-07 00:00:00 +0000 UTC |
+| 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC |
+| 12 Feb 2006 19:17 | 2006-02-12 19:17:00 +0000 UTC |
+| 14 May 2019 19:11:40.164 | 2019-05-14 19:11:40.164 +0000 UTC |
+| 7 oct 70 | 1970-10-07 00:00:00 +0000 UTC |
+| 7 oct 1970 | 1970-10-07 00:00:00 +0000 UTC |
+| 03 February 2013 | 2013-02-03 00:00:00 +0000 UTC |
+| 1 July 2013 | 2013-07-01 00:00:00 +0000 UTC |
+| 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC |
+| 06/Jan/2008:15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
+| 06/Jan/2008 15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
+| 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
+| 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
+| 08/21/71 | 1971-08-21 00:00:00 +0000 UTC |
+| 8/1/71 | 1971-08-01 00:00:00 +0000 UTC |
+| 4/8/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC |
+| 04/08/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC |
+| 4/8/14 22:05 | 2014-04-08 22:05:00 +0000 UTC |
+| 04/2/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
+| 8/8/1965 12:00:00 AM | 1965-08-08 00:00:00 +0000 UTC |
+| 8/8/1965 01:00:01 PM | 1965-08-08 13:00:01 +0000 UTC |
+| 8/8/1965 01:00 PM | 1965-08-08 13:00:00 +0000 UTC |
+| 8/8/1965 1:00 PM | 1965-08-08 13:00:00 +0000 UTC |
+| 8/8/1965 12:00 AM | 1965-08-08 00:00:00 +0000 UTC |
+| 4/02/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
+| 03/19/2012 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
+| 03/19/2012 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
+| 2014/3/31 | 2014-03-31 00:00:00 +0000 UTC |
+| 2014/03/31 | 2014-03-31 00:00:00 +0000 UTC |
+| 2014/4/8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
+| 2014/04/08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
+| 2014/04/2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
+| 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
+| 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
+| 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
+| 2014:3:31 | 2014-03-31 00:00:00 +0000 UTC |
+| 2014:03:31 | 2014-03-31 00:00:00 +0000 UTC |
+| 2014:4:8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
+| 2014:04:08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
+| 2014:04:2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
+| 2014:4:02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
+| 2012:03:19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
+| 2012:03:19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
+| 2014年04月08日 | 2014-04-08 00:00:00 +0000 UTC |
+| 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC |
+| 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 |
+| 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC |
+| 2009-08-12T22:15:09.988 | 2009-08-12 22:15:09.988 +0000 UTC |
+| 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC |
+| 2017-07-19T03:21:51:897+0100 | 2017-07-19 03:21:51.897 +0100 +0100 |
+| 2019-05-29T08:41-04 | 2019-05-29 08:41:00 -0400 -0400 |
+| 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC |
+| 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC |
+| 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC |
+| 2013-04-01 22:43 | 2013-04-01 22:43:00 +0000 UTC |
+| 2013-04-01 22:43:22 | 2013-04-01 22:43:22 +0000 UTC |
+| 2014-12-16 06:20:00 UTC | 2014-12-16 06:20:00 +0000 UTC |
+| 2014-12-16 06:20:00 GMT | 2014-12-16 06:20:00 +0000 UTC |
+| 2014-04-26 05:24:37 PM | 2014-04-26 17:24:37 +0000 UTC |
+| 2014-04-26 13:13:43 +0800 | 2014-04-26 13:13:43 +0800 +0800 |
+| 2014-04-26 13:13:43 +0800 +08 | 2014-04-26 13:13:43 +0800 +0800 |
+| 2014-04-26 13:13:44 +09:00 | 2014-04-26 13:13:44 +0900 +0900 |
+| 2012-08-03 18:31:59.257000000 +0000 UTC | 2012-08-03 18:31:59.257 +0000 UTC |
+| 2015-09-30 18:48:56.35272715 +0000 UTC | 2015-09-30 18:48:56.35272715 +0000 UTC |
+| 2015-02-18 00:12:00 +0000 GMT | 2015-02-18 00:12:00 +0000 UTC |
+| 2015-02-18 00:12:00 +0000 UTC | 2015-02-18 00:12:00 +0000 UTC |
+| 2015-02-08 03:02:00 +0300 MSK m=+0.000000001 | 2015-02-08 03:02:00 +0300 +0300 |
+| 2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001 | 2015-02-08 03:02:00.001 +0300 +0300 |
+| 2017-07-19 03:21:51+00:00 | 2017-07-19 03:21:51 +0000 UTC |
+| 2014-04-26 | 2014-04-26 00:00:00 +0000 UTC |
+| 2014-04 | 2014-04-01 00:00:00 +0000 UTC |
+| 2014 | 2014-01-01 00:00:00 +0000 UTC |
+| 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC |
+| 2020-07-20+08:00 | 2020-07-20 00:00:00 +0800 +0800 |
+| 3.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
+| 03.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
+| 08.21.71 | 1971-08-21 00:00:00 +0000 UTC |
+| 2014.03 | 2014-03-01 00:00:00 +0000 UTC |
+| 2014.03.30 | 2014-03-30 00:00:00 +0000 UTC |
+| 20140601 | 2014-06-01 00:00:00 +0000 UTC |
+| 20140722105203 | 2014-07-22 10:52:03 +0000 UTC |
+| 171113 14:14:20 | 2017-11-13 14:14:20 +0000 UTC |
+| 1332151919 | 2012-03-19 10:11:59 +0000 UTC |
+| 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC |
+| 1384216367111222 | 2013-11-12 00:32:47.111222 +0000 UTC |
+| 1384216367111222333 | 2013-11-12 00:32:47.111222333 +0000 UTC |
++-------------------------------------------------------+-----------------------------------------+
+*/
+
+```
diff --git a/vendor/github.com/araddon/dateparse/parseany.go b/vendor/github.com/araddon/dateparse/parseany.go
new file mode 100644
index 0000000..b9668b2
--- /dev/null
+++ b/vendor/github.com/araddon/dateparse/parseany.go
@@ -0,0 +1,2189 @@
+// Package dateparse parses date-strings without knowing the format
+// in advance, using a fast lex based approach to eliminate shotgun
+// attempts. It leans towards US style dates when there is a conflict.
+package dateparse
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+ "time"
+ "unicode"
+ "unicode/utf8"
+)
+
+// func init() {
+// gou.SetupLogging("debug")
+// gou.SetColorOutput()
+// }
+
+var days = []string{
+ "mon",
+ "tue",
+ "wed",
+ "thu",
+ "fri",
+ "sat",
+ "sun",
+ "monday",
+ "tuesday",
+ "wednesday",
+ "thursday",
+ "friday",
+ "saturday",
+ "sunday",
+}
+
+var months = []string{
+ "january",
+ "february",
+ "march",
+ "april",
+ "may",
+ "june",
+ "july",
+ "august",
+ "september",
+ "october",
+ "november",
+ "december",
+}
+
+type dateState uint8
+type timeState uint8
+
+const (
+ dateStart dateState = iota // 0
+ dateDigit
+ dateDigitSt
+ dateYearDash
+ dateYearDashAlphaDash
+ dateYearDashDash
+ dateYearDashDashWs // 5
+ dateYearDashDashT
+ dateYearDashDashOffset
+ dateDigitDash
+ dateDigitDashAlpha
+ dateDigitDashAlphaDash // 10
+ dateDigitDot
+ dateDigitDotDot
+ dateDigitSlash
+ dateDigitYearSlash
+ dateDigitSlashAlpha // 15
+ dateDigitColon
+ dateDigitChineseYear
+ dateDigitChineseYearWs
+ dateDigitWs
+ dateDigitWsMoYear // 20
+ dateDigitWsMolong
+ dateAlpha
+ dateAlphaWs
+ dateAlphaWsDigit
+ dateAlphaWsDigitMore // 25
+ dateAlphaWsDigitMoreWs
+ dateAlphaWsDigitMoreWsYear
+ dateAlphaWsMonth
+ dateAlphaWsDigitYearmaybe
+ dateAlphaWsMonthMore
+ dateAlphaWsMonthSuffix
+ dateAlphaWsMore
+ dateAlphaWsAtTime
+ dateAlphaWsAlpha
+ dateAlphaWsAlphaYearmaybe // 35
+ dateAlphaPeriodWsDigit
+ dateWeekdayComma
+ dateWeekdayAbbrevComma
+)
+const (
+ // Time state
+ timeIgnore timeState = iota // 0
+ timeStart
+ timeWs
+ timeWsAlpha
+ timeWsAlphaWs
+ timeWsAlphaZoneOffset // 5
+ timeWsAlphaZoneOffsetWs
+ timeWsAlphaZoneOffsetWsYear
+ timeWsAlphaZoneOffsetWsExtra
+ timeWsAMPMMaybe
+ timeWsAMPM // 10
+ timeWsOffset
+ timeWsOffsetWs // 12
+ timeWsOffsetColonAlpha
+ timeWsOffsetColon
+ timeWsYear // 15
+ timeOffset
+ timeOffsetColon
+ timeAlpha
+ timePeriod
+ timePeriodOffset // 20
+ timePeriodOffsetColon
+ timePeriodOffsetColonWs
+ timePeriodWs
+ timePeriodWsAlpha
+ timePeriodWsOffset // 25
+ timePeriodWsOffsetWs
+ timePeriodWsOffsetWsAlpha
+ timePeriodWsOffsetColon
+ timePeriodWsOffsetColonAlpha
+ timeZ
+ timeZDigit
+)
+
+var (
+ // ErrAmbiguousMMDD for date formats such as 04/02/2014 the mm/dd vs dd/mm are
+ // ambiguous, so it is an error for strict parse rules.
+ ErrAmbiguousMMDD = fmt.Errorf("This date has ambiguous mm/dd vs dd/mm type format")
+)
+
+func unknownErr(datestr string) error {
+ return fmt.Errorf("Could not find format for %q", datestr)
+}
+
+// ParseAny parse an unknown date format, detect the layout.
+// Normal parse. Equivalent Timezone rules as time.Parse().
+// NOTE: please see readme on mmdd vs ddmm ambiguous dates.
+func ParseAny(datestr string, opts ...ParserOption) (time.Time, error) {
+ p, err := parseTime(datestr, nil, opts...)
+ if err != nil {
+ return time.Time{}, err
+ }
+ return p.parse()
+}
+
+// ParseIn with Location, equivalent to time.ParseInLocation() timezone/offset
+// rules. Using location arg, if timezone/offset info exists in the
+// datestring, it uses the given location rules for any zone interpretation.
+// That is, MST means one thing when using America/Denver and something else
+// in other locations.
+func ParseIn(datestr string, loc *time.Location, opts ...ParserOption) (time.Time, error) {
+ p, err := parseTime(datestr, loc, opts...)
+ if err != nil {
+ return time.Time{}, err
+ }
+ return p.parse()
+}
+
+// ParseLocal Given an unknown date format, detect the layout,
+// using time.Local, parse.
+//
+// Set Location to time.Local. Same as ParseIn Location but lazily uses
+// the global time.Local variable for Location argument.
+//
+// denverLoc, _ := time.LoadLocation("America/Denver")
+// time.Local = denverLoc
+//
+// t, err := dateparse.ParseLocal("3/1/2014")
+//
+// Equivalent to:
+//
+// t, err := dateparse.ParseIn("3/1/2014", denverLoc)
+//
+func ParseLocal(datestr string, opts ...ParserOption) (time.Time, error) {
+ p, err := parseTime(datestr, time.Local, opts...)
+ if err != nil {
+ return time.Time{}, err
+ }
+ return p.parse()
+}
+
+// MustParse parse a date, and panic if it can't be parsed. Used for testing.
+// Not recommended for most use-cases.
+func MustParse(datestr string, opts ...ParserOption) time.Time {
+ p, err := parseTime(datestr, nil, opts...)
+ if err != nil {
+ panic(err.Error())
+ }
+ t, err := p.parse()
+ if err != nil {
+ panic(err.Error())
+ }
+ return t
+}
+
+// ParseFormat parse's an unknown date-time string and returns a layout
+// string that can parse this (and exact same format) other date-time strings.
+//
+// layout, err := dateparse.ParseFormat("2013-02-01 00:00:00")
+// // layout = "2006-01-02 15:04:05"
+//
+func ParseFormat(datestr string, opts ...ParserOption) (string, error) {
+ p, err := parseTime(datestr, nil, opts...)
+ if err != nil {
+ return "", err
+ }
+ _, err = p.parse()
+ if err != nil {
+ return "", err
+ }
+ return string(p.format), nil
+}
+
+// ParseStrict parse an unknown date format. IF the date is ambigous
+// mm/dd vs dd/mm then return an error. These return errors: 3.3.2014 , 8/8/71 etc
+func ParseStrict(datestr string, opts ...ParserOption) (time.Time, error) {
+ p, err := parseTime(datestr, nil, opts...)
+ if err != nil {
+ return time.Time{}, err
+ }
+ if p.ambiguousMD {
+ return time.Time{}, ErrAmbiguousMMDD
+ }
+ return p.parse()
+}
+
+func parseTime(datestr string, loc *time.Location, opts ...ParserOption) (p *parser, err error) {
+
+ p = newParser(datestr, loc, opts...)
+ if p.retryAmbiguousDateWithSwap {
+ // month out of range signifies that a day/month swap is the correct solution to an ambiguous date
+ // this is because it means that a day is being interpreted as a month and overflowing the valid value for that
+ // by retrying in this case, we can fix a common situation with no assumptions
+ defer func() {
+ if p != nil && p.ambiguousMD {
+ // if it errors out with the following error, swap before we
+ // get out of this function to reduce scope it needs to be applied on
+ _, err := p.parse()
+ if err != nil && strings.Contains(err.Error(), "month out of range") {
+ // create the option to reverse the preference
+ preferMonthFirst := PreferMonthFirst(!p.preferMonthFirst)
+ // turn off the retry to avoid endless recursion
+ retryAmbiguousDateWithSwap := RetryAmbiguousDateWithSwap(false)
+ modifiedOpts := append(opts, preferMonthFirst, retryAmbiguousDateWithSwap)
+ p, err = parseTime(datestr, time.Local, modifiedOpts...)
+ }
+ }
+
+ }()
+ }
+
+ i := 0
+
+ // General strategy is to read rune by rune through the date looking for
+ // certain hints of what type of date we are dealing with.
+ // Hopefully we only need to read about 5 or 6 bytes before
+ // we figure it out and then attempt a parse
+iterRunes:
+ for ; i < len(datestr); i++ {
+ //r := rune(datestr[i])
+ r, bytesConsumed := utf8.DecodeRuneInString(datestr[i:])
+ if bytesConsumed > 1 {
+ i += (bytesConsumed - 1)
+ }
+
+ // gou.Debugf("i=%d r=%s state=%d %s", i, string(r), p.stateDate, datestr)
+ switch p.stateDate {
+ case dateStart:
+ if unicode.IsDigit(r) {
+ p.stateDate = dateDigit
+ } else if unicode.IsLetter(r) {
+ p.stateDate = dateAlpha
+ } else {
+ return nil, unknownErr(datestr)
+ }
+ case dateDigit:
+
+ switch r {
+ case '-', '\u2212':
+ // 2006-01-02
+ // 2013-Feb-03
+ // 13-Feb-03
+ // 29-Jun-2016
+ if i == 4 {
+ p.stateDate = dateYearDash
+ p.yeari = 0
+ p.yearlen = i
+ p.moi = i + 1
+ p.set(0, "2006")
+ } else {
+ p.stateDate = dateDigitDash
+ }
+ case '/':
+ // 08/May/2005
+ // 03/31/2005
+ // 2014/02/24
+ p.stateDate = dateDigitSlash
+ if i == 4 {
+ // 2014/02/24 - Year first /
+ p.yearlen = i // since it was start of datestr, i=len
+ p.moi = i + 1
+ p.setYear()
+ p.stateDate = dateDigitYearSlash
+ } else {
+ // Either Ambiguous dd/mm vs mm/dd OR dd/month/yy
+ // 08/May/2005
+ // 03/31/2005
+ // 31/03/2005
+ if i+2 < len(p.datestr) && unicode.IsLetter(rune(datestr[i+1])) {
+ // 08/May/2005
+ p.stateDate = dateDigitSlashAlpha
+ p.moi = i + 1
+ p.daylen = 2
+ p.dayi = 0
+ p.setDay()
+ continue
+ }
+ // Ambiguous dd/mm vs mm/dd the bane of date-parsing
+ // 03/31/2005
+ // 31/03/2005
+ p.ambiguousMD = true
+ if p.preferMonthFirst {
+ if p.molen == 0 {
+ // 03/31/2005
+ p.molen = i
+ p.setMonth()
+ p.dayi = i + 1
+ }
+ } else {
+ if p.daylen == 0 {
+ p.daylen = i
+ p.setDay()
+ p.moi = i + 1
+ }
+ }
+
+ }
+
+ case ':':
+ // 03/31/2005
+ // 2014/02/24
+ p.stateDate = dateDigitColon
+ if i == 4 {
+ p.yearlen = i
+ p.moi = i + 1
+ p.setYear()
+ } else {
+ p.ambiguousMD = true
+ if p.preferMonthFirst {
+ if p.molen == 0 {
+ p.molen = i
+ p.setMonth()
+ p.dayi = i + 1
+ }
+ }
+ }
+
+ case '.':
+ // 3.31.2014
+ // 08.21.71
+ // 2014.05
+ p.stateDate = dateDigitDot
+ if i == 4 {
+ p.yearlen = i
+ p.moi = i + 1
+ p.setYear()
+ } else {
+ p.ambiguousMD = true
+ p.moi = 0
+ p.molen = i
+ p.setMonth()
+ p.dayi = i + 1
+ }
+
+ case ' ':
+ // 18 January 2018
+ // 8 January 2018
+ // 8 jan 2018
+ // 02 Jan 2018 23:59
+ // 02 Jan 2018 23:59:34
+ // 12 Feb 2006, 19:17
+ // 12 Feb 2006, 19:17:22
+ if i == 6 {
+ p.stateDate = dateDigitSt
+ } else {
+ p.stateDate = dateDigitWs
+ p.dayi = 0
+ p.daylen = i
+ }
+ case '年':
+ // Chinese Year
+ p.stateDate = dateDigitChineseYear
+ case ',':
+ return nil, unknownErr(datestr)
+ default:
+ continue
+ }
+ p.part1Len = i
+
+ case dateDigitSt:
+ p.set(0, "060102")
+ i = i - 1
+ p.stateTime = timeStart
+ break iterRunes
+ case dateYearDash:
+ // dateYearDashDashT
+ // 2006-01-02T15:04:05Z07:00
+ // 2020-08-17T17:00:00:000+0100
+ // dateYearDashDashWs
+ // 2013-04-01 22:43:22
+ // dateYearDashAlphaDash
+ // 2013-Feb-03
+ switch r {
+ case '-':
+ p.molen = i - p.moi
+ p.dayi = i + 1
+ p.stateDate = dateYearDashDash
+ p.setMonth()
+ default:
+ if unicode.IsLetter(r) {
+ p.stateDate = dateYearDashAlphaDash
+ }
+ }
+
+ case dateYearDashDash:
+ // dateYearDashDashT
+ // 2006-01-02T15:04:05Z07:00
+ // dateYearDashDashWs
+ // 2013-04-01 22:43:22
+ // dateYearDashDashOffset
+ // 2020-07-20+00:00
+ switch r {
+ case '+', '-':
+ p.offseti = i
+ p.daylen = i - p.dayi
+ p.stateDate = dateYearDashDashOffset
+ p.setDay()
+ case ' ':
+ p.daylen = i - p.dayi
+ p.stateDate = dateYearDashDashWs
+ p.stateTime = timeStart
+ p.setDay()
+ break iterRunes
+ case 'T':
+ p.daylen = i - p.dayi
+ p.stateDate = dateYearDashDashT
+ p.stateTime = timeStart
+ p.setDay()
+ break iterRunes
+ }
+
+ case dateYearDashDashT:
+ // dateYearDashDashT
+ // 2006-01-02T15:04:05Z07:00
+ // 2020-08-17T17:00:00:000+0100
+
+ case dateYearDashDashOffset:
+ // 2020-07-20+00:00
+ switch r {
+ case ':':
+ p.set(p.offseti, "-07:00")
+ // case ' ':
+ // return nil, unknownErr(datestr)
+ }
+
+ case dateYearDashAlphaDash:
+ // 2013-Feb-03
+ switch r {
+ case '-':
+ p.molen = i - p.moi
+ p.set(p.moi, "Jan")
+ p.dayi = i + 1
+ }
+ case dateDigitDash:
+ // 13-Feb-03
+ // 29-Jun-2016
+ if unicode.IsLetter(r) {
+ p.stateDate = dateDigitDashAlpha
+ p.moi = i
+ } else {
+ return nil, unknownErr(datestr)
+ }
+ case dateDigitDashAlpha:
+ // 13-Feb-03
+ // 28-Feb-03
+ // 29-Jun-2016
+ switch r {
+ case '-':
+ p.molen = i - p.moi
+ p.set(p.moi, "Jan")
+ p.yeari = i + 1
+ p.stateDate = dateDigitDashAlphaDash
+ }
+
+ case dateDigitDashAlphaDash:
+ // 13-Feb-03 ambiguous
+ // 28-Feb-03 ambiguous
+ // 29-Jun-2016 dd-month(alpha)-yyyy
+ switch r {
+ case ' ':
+ // we need to find if this was 4 digits, aka year
+ // or 2 digits which makes it ambiguous year/day
+ length := i - (p.moi + p.molen + 1)
+ if length == 4 {
+ p.yearlen = 4
+ p.set(p.yeari, "2006")
+ // We now also know that part1 was the day
+ p.dayi = 0
+ p.daylen = p.part1Len
+ p.setDay()
+ } else if length == 2 {
+ // We have no idea if this is
+ // yy-mon-dd OR dd-mon-yy
+ //
+ // We are going to ASSUME (bad, bad) that it is dd-mon-yy which is a horible assumption
+ p.ambiguousMD = true
+ p.yearlen = 2
+ p.set(p.yeari, "06")
+ // We now also know that part1 was the day
+ p.dayi = 0
+ p.daylen = p.part1Len
+ p.setDay()
+ }
+ p.stateTime = timeStart
+ break iterRunes
+ }
+
+ case dateDigitYearSlash:
+ // 2014/07/10 06:55:38.156283
+ // I honestly don't know if this format ever shows up as yyyy/
+
+ switch r {
+ case ' ', ':':
+ p.stateTime = timeStart
+ if p.daylen == 0 {
+ p.daylen = i - p.dayi
+ p.setDay()
+ }
+ break iterRunes
+ case '/':
+ if p.molen == 0 {
+ p.molen = i - p.moi
+ p.setMonth()
+ p.dayi = i + 1
+ }
+ }
+
+ case dateDigitSlashAlpha:
+ // 06/May/2008
+
+ switch r {
+ case '/':
+ // |
+ // 06/May/2008
+ if p.molen == 0 {
+ p.set(p.moi, "Jan")
+ p.yeari = i + 1
+ }
+ // We aren't breaking because we are going to re-use this case
+ // to find where the date starts, and possible time begins
+ case ' ', ':':
+ p.stateTime = timeStart
+ if p.yearlen == 0 {
+ p.yearlen = i - p.yeari
+ p.setYear()
+ }
+ break iterRunes
+ }
+
+ case dateDigitSlash:
+ // 03/19/2012 10:11:59
+ // 04/2/2014 03:00:37
+ // 3/1/2012 10:11:59
+ // 4/8/2014 22:05
+ // 3/1/2014
+ // 10/13/2014
+ // 01/02/2006
+ // 1/2/06
+
+ switch r {
+ case '/':
+ // This is the 2nd / so now we should know start pts of all of the dd, mm, yy
+ if p.preferMonthFirst {
+ if p.daylen == 0 {
+ p.daylen = i - p.dayi
+ p.setDay()
+ p.yeari = i + 1
+ }
+ } else {
+ if p.molen == 0 {
+ p.molen = i - p.moi
+ p.setMonth()
+ p.yeari = i + 1
+ }
+ }
+ // Note no break, we are going to pass by and re-enter this dateDigitSlash
+ // and look for ending (space) or not (just date)
+ case ' ':
+ p.stateTime = timeStart
+ if p.yearlen == 0 {
+ p.yearlen = i - p.yeari
+ p.setYear()
+ }
+ break iterRunes
+ }
+
+ case dateDigitColon:
+ // 2014:07:10 06:55:38.156283
+ // 03:19:2012 10:11:59
+ // 04:2:2014 03:00:37
+ // 3:1:2012 10:11:59
+ // 4:8:2014 22:05
+ // 3:1:2014
+ // 10:13:2014
+ // 01:02:2006
+ // 1:2:06
+
+ switch r {
+ case ' ':
+ p.stateTime = timeStart
+ if p.yearlen == 0 {
+ p.yearlen = i - p.yeari
+ p.setYear()
+ } else if p.daylen == 0 {
+ p.daylen = i - p.dayi
+ p.setDay()
+ }
+ break iterRunes
+ case ':':
+ if p.yearlen > 0 {
+ // 2014:07:10 06:55:38.156283
+ if p.molen == 0 {
+ p.molen = i - p.moi
+ p.setMonth()
+ p.dayi = i + 1
+ }
+ } else if p.preferMonthFirst {
+ if p.daylen == 0 {
+ p.daylen = i - p.dayi
+ p.setDay()
+ p.yeari = i + 1
+ }
+ }
+ }
+
+ case dateDigitWs:
+ // 18 January 2018
+ // 8 January 2018
+ // 8 jan 2018
+ // 1 jan 18
+ // 02 Jan 2018 23:59
+ // 02 Jan 2018 23:59:34
+ // 12 Feb 2006, 19:17
+ // 12 Feb 2006, 19:17:22
+ switch r {
+ case ' ':
+ p.yeari = i + 1
+ //p.yearlen = 4
+ p.dayi = 0
+ p.daylen = p.part1Len
+ p.setDay()
+ p.stateTime = timeStart
+ if i > p.daylen+len(" Sep") { // November etc
+ // If len greather than space + 3 it must be full month
+ p.stateDate = dateDigitWsMolong
+ } else {
+ // If len=3, the might be Feb or May? Ie ambigous abbreviated but
+ // we can parse may with either. BUT, that means the
+ // format may not be correct?
+ // mo := strings.ToLower(datestr[p.daylen+1 : i])
+ p.moi = p.daylen + 1
+ p.molen = i - p.moi
+ p.set(p.moi, "Jan")
+ p.stateDate = dateDigitWsMoYear
+ }
+ }
+
+ case dateDigitWsMoYear:
+ // 8 jan 2018
+ // 02 Jan 2018 23:59
+ // 02 Jan 2018 23:59:34
+ // 12 Feb 2006, 19:17
+ // 12 Feb 2006, 19:17:22
+ switch r {
+ case ',':
+ p.yearlen = i - p.yeari
+ p.setYear()
+ i++
+ break iterRunes
+ case ' ':
+ p.yearlen = i - p.yeari
+ p.setYear()
+ break iterRunes
+ }
+ case dateDigitWsMolong:
+ // 18 January 2018
+ // 8 January 2018
+
+ case dateDigitChineseYear:
+ // dateDigitChineseYear
+ // 2014年04月08日
+ // weekday %Y年%m月%e日 %A %I:%M %p
+ // 2013年07月18日 星期四 10:27 上午
+ if r == ' ' {
+ p.stateDate = dateDigitChineseYearWs
+ break
+ }
+ case dateDigitDot:
+ // This is the 2nd period
+ // 3.31.2014
+ // 08.21.71
+ // 2014.05
+ // 2018.09.30
+ if r == '.' {
+ if p.moi == 0 {
+ // 3.31.2014
+ p.daylen = i - p.dayi
+ p.yeari = i + 1
+ p.setDay()
+ p.stateDate = dateDigitDotDot
+ } else {
+ // 2018.09.30
+ //p.molen = 2
+ p.molen = i - p.moi
+ p.dayi = i + 1
+ p.setMonth()
+ p.stateDate = dateDigitDotDot
+ }
+ }
+ case dateDigitDotDot:
+ // iterate all the way through
+ case dateAlpha:
+ // dateAlphaWS
+ // Mon Jan _2 15:04:05 2006
+ // Mon Jan _2 15:04:05 MST 2006
+ // Mon Jan 02 15:04:05 -0700 2006
+ // Mon Aug 10 15:44:11 UTC+0100 2015
+ // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)
+ // dateAlphaWSDigit
+ // May 8, 2009 5:57:51 PM
+ // oct 1, 1970
+ // dateAlphaWsMonth
+ // April 8, 2009
+ // dateAlphaWsMore
+ // dateAlphaWsAtTime
+ // January 02, 2006 at 3:04pm MST-07
+ //
+ // dateAlphaPeriodWsDigit
+ // oct. 1, 1970
+ // dateWeekdayComma
+ // Monday, 02 Jan 2006 15:04:05 MST
+ // Monday, 02-Jan-06 15:04:05 MST
+ // Monday, 02 Jan 2006 15:04:05 -0700
+ // Monday, 02 Jan 2006 15:04:05 +0100
+ // dateWeekdayAbbrevComma
+ // Mon, 02 Jan 2006 15:04:05 MST
+ // Mon, 02 Jan 2006 15:04:05 -0700
+ // Thu, 13 Jul 2017 08:58:40 +0100
+ // Tue, 11 Jul 2017 16:28:13 +0200 (CEST)
+ // Mon, 02-Jan-06 15:04:05 MST
+ switch {
+ case r == ' ':
+ // X
+ // April 8, 2009
+ if i > 3 {
+ // Check to see if the alpha is name of month? or Day?
+ month := strings.ToLower(datestr[0:i])
+ if isMonthFull(month) {
+ p.fullMonth = month
+ // len(" 31, 2018") = 9
+ if len(datestr[i:]) < 10 {
+ // April 8, 2009
+ p.stateDate = dateAlphaWsMonth
+ } else {
+ p.stateDate = dateAlphaWsMore
+ }
+ p.dayi = i + 1
+ break
+ }
+
+ } else {
+ // This is possibly ambiguous? May will parse as either though.
+ // So, it could return in-correct format.
+ // dateAlphaWs
+ // May 05, 2005, 05:05:05
+ // May 05 2005, 05:05:05
+ // Jul 05, 2005, 05:05:05
+ // May 8 17:57:51 2009
+ // May 8 17:57:51 2009
+ // skip & return to dateStart
+ // Tue 05 May 2020, 05:05:05
+ // Mon Jan 2 15:04:05 2006
+
+ maybeDay := strings.ToLower(datestr[0:i])
+ if isDay(maybeDay) {
+ // using skip throws off indices used by other code; saner to restart
+ return parseTime(datestr[i+1:], loc)
+ }
+ p.stateDate = dateAlphaWs
+ }
+
+ case r == ',':
+ // Mon, 02 Jan 2006
+
+ if i == 3 {
+ p.stateDate = dateWeekdayAbbrevComma
+ p.set(0, "Mon")
+ } else {
+ p.stateDate = dateWeekdayComma
+ p.skip = i + 2
+ i++
+ // TODO: lets just make this "skip" as we don't need
+ // the mon, monday, they are all superfelous and not needed
+ // just lay down the skip, no need to fill and then skip
+ }
+ case r == '.':
+ // sept. 28, 2017
+ // jan. 28, 2017
+ p.stateDate = dateAlphaPeriodWsDigit
+ if i == 3 {
+ p.molen = i
+ p.set(0, "Jan")
+ } else if i == 4 {
+ // gross
+ datestr = datestr[0:i-1] + datestr[i:]
+ return parseTime(datestr, loc, opts...)
+ } else {
+ return nil, unknownErr(datestr)
+ }
+ }
+
+ case dateAlphaWs:
+ // dateAlphaWsAlpha
+ // Mon Jan _2 15:04:05 2006
+ // Mon Jan _2 15:04:05 MST 2006
+ // Mon Jan 02 15:04:05 -0700 2006
+ // Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)
+ // Mon Aug 10 15:44:11 UTC+0100 2015
+ // dateAlphaWsDigit
+ // May 8, 2009 5:57:51 PM
+ // May 8 2009 5:57:51 PM
+ // May 8 17:57:51 2009
+ // May 8 17:57:51 2009
+ // May 08 17:57:51 2009
+ // oct 1, 1970
+ // oct 7, '70
+ switch {
+ case unicode.IsLetter(r):
+ p.set(0, "Mon")
+ p.stateDate = dateAlphaWsAlpha
+ p.set(i, "Jan")
+ case unicode.IsDigit(r):
+ p.set(0, "Jan")
+ p.stateDate = dateAlphaWsDigit
+ p.dayi = i
+ }
+
+ case dateAlphaWsDigit:
+ // May 8, 2009 5:57:51 PM
+ // May 8 2009 5:57:51 PM
+ // oct 1, 1970
+ // oct 7, '70
+ // oct. 7, 1970
+ // May 8 17:57:51 2009
+ // May 8 17:57:51 2009
+ // May 08 17:57:51 2009
+ if r == ',' {
+ p.daylen = i - p.dayi
+ p.setDay()
+ p.stateDate = dateAlphaWsDigitMore
+ } else if r == ' ' {
+ p.daylen = i - p.dayi
+ p.setDay()
+ p.yeari = i + 1
+ p.stateDate = dateAlphaWsDigitYearmaybe
+ p.stateTime = timeStart
+ } else if unicode.IsLetter(r) {
+ p.stateDate = dateAlphaWsMonthSuffix
+ i--
+ }
+ case dateAlphaWsDigitYearmaybe:
+ // x
+ // May 8 2009 5:57:51 PM
+ // May 8 17:57:51 2009
+ // May 8 17:57:51 2009
+ // May 08 17:57:51 2009
+ // Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)
+ if r == ':' {
+ // Guessed wrong; was not a year
+ i = i - 3
+ p.stateDate = dateAlphaWsDigit
+ p.yeari = 0
+ break iterRunes
+ } else if r == ' ' {
+ // must be year format, not 15:04
+ p.yearlen = i - p.yeari
+ p.setYear()
+ break iterRunes
+ }
+ case dateAlphaWsDigitMore:
+ // x
+ // May 8, 2009 5:57:51 PM
+ // May 05, 2005, 05:05:05
+ // May 05 2005, 05:05:05
+ // oct 1, 1970
+ // oct 7, '70
+ if r == ' ' {
+ p.yeari = i + 1
+ p.stateDate = dateAlphaWsDigitMoreWs
+ }
+ case dateAlphaWsDigitMoreWs:
+ // x
+ // May 8, 2009 5:57:51 PM
+ // May 05, 2005, 05:05:05
+ // oct 1, 1970
+ // oct 7, '70
+ switch r {
+ case '\'':
+ p.yeari = i + 1
+ case ' ', ',':
+ // x
+ // May 8, 2009 5:57:51 PM
+ // x
+ // May 8, 2009, 5:57:51 PM
+ p.stateDate = dateAlphaWsDigitMoreWsYear
+ p.yearlen = i - p.yeari
+ p.setYear()
+ p.stateTime = timeStart
+ break iterRunes
+ }
+
+ case dateAlphaWsMonth:
+ // April 8, 2009
+ // April 8 2009
+ switch r {
+ case ' ', ',':
+ // x
+ // June 8, 2009
+ // x
+ // June 8 2009
+ if p.daylen == 0 {
+ p.daylen = i - p.dayi
+ p.setDay()
+ }
+ case 's', 'S', 'r', 'R', 't', 'T', 'n', 'N':
+ // st, rd, nd, st
+ i--
+ p.stateDate = dateAlphaWsMonthSuffix
+ default:
+ if p.daylen > 0 && p.yeari == 0 {
+ p.yeari = i
+ }
+ }
+ case dateAlphaWsMonthMore:
+ // X
+ // January 02, 2006, 15:04:05
+ // January 02 2006, 15:04:05
+ // January 02, 2006 15:04:05
+ // January 02 2006 15:04:05
+ switch r {
+ case ',':
+ p.yearlen = i - p.yeari
+ p.setYear()
+ p.stateTime = timeStart
+ i++
+ break iterRunes
+ case ' ':
+ p.yearlen = i - p.yeari
+ p.setYear()
+ p.stateTime = timeStart
+ break iterRunes
+ }
+ case dateAlphaWsMonthSuffix:
+ // x
+ // April 8th, 2009
+ // April 8th 2009
+ switch r {
+ case 't', 'T':
+ if p.nextIs(i, 'h') || p.nextIs(i, 'H') {
+ if len(datestr) > i+2 {
+ return parseTime(fmt.Sprintf("%s%s", p.datestr[0:i], p.datestr[i+2:]), loc, opts...)
+ }
+ }
+ case 'n', 'N':
+ if p.nextIs(i, 'd') || p.nextIs(i, 'D') {
+ if len(datestr) > i+2 {
+ return parseTime(fmt.Sprintf("%s%s", p.datestr[0:i], p.datestr[i+2:]), loc, opts...)
+ }
+ }
+ case 's', 'S':
+ if p.nextIs(i, 't') || p.nextIs(i, 'T') {
+ if len(datestr) > i+2 {
+ return parseTime(fmt.Sprintf("%s%s", p.datestr[0:i], p.datestr[i+2:]), loc, opts...)
+ }
+ }
+ case 'r', 'R':
+ if p.nextIs(i, 'd') || p.nextIs(i, 'D') {
+ if len(datestr) > i+2 {
+ return parseTime(fmt.Sprintf("%s%s", p.datestr[0:i], p.datestr[i+2:]), loc, opts...)
+ }
+ }
+ }
+ case dateAlphaWsMore:
+ // January 02, 2006, 15:04:05
+ // January 02 2006, 15:04:05
+ // January 2nd, 2006, 15:04:05
+ // January 2nd 2006, 15:04:05
+ // September 17, 2012 at 5:00pm UTC-05
+ switch {
+ case r == ',':
+ // x
+ // January 02, 2006, 15:04:05
+ if p.nextIs(i, ' ') {
+ p.daylen = i - p.dayi
+ p.setDay()
+ p.yeari = i + 2
+ p.stateDate = dateAlphaWsMonthMore
+ i++
+ }
+
+ case r == ' ':
+ // x
+ // January 02 2006, 15:04:05
+ p.daylen = i - p.dayi
+ p.setDay()
+ p.yeari = i + 1
+ p.stateDate = dateAlphaWsMonthMore
+ case unicode.IsDigit(r):
+ // XX
+ // January 02, 2006, 15:04:05
+ continue
+ case unicode.IsLetter(r):
+ // X
+ // January 2nd, 2006, 15:04:05
+ p.daylen = i - p.dayi
+ p.setDay()
+ p.stateDate = dateAlphaWsMonthSuffix
+ i--
+ }
+
+ case dateAlphaPeriodWsDigit:
+ // oct. 7, '70
+ switch {
+ case r == ' ':
+ // continue
+ case unicode.IsDigit(r):
+ p.stateDate = dateAlphaWsDigit
+ p.dayi = i
+ default:
+ return p, unknownErr(datestr)
+ }
+ case dateWeekdayComma:
+ // Monday, 02 Jan 2006 15:04:05 MST
+ // Monday, 02 Jan 2006 15:04:05 -0700
+ // Monday, 02 Jan 2006 15:04:05 +0100
+ // Monday, 02-Jan-06 15:04:05 MST
+ if p.dayi == 0 {
+ p.dayi = i
+ }
+ switch r {
+ case ' ', '-':
+ if p.moi == 0 {
+ p.moi = i + 1
+ p.daylen = i - p.dayi
+ p.setDay()
+ } else if p.yeari == 0 {
+ p.yeari = i + 1
+ p.molen = i - p.moi
+ p.set(p.moi, "Jan")
+ } else {
+ p.stateTime = timeStart
+ break iterRunes
+ }
+ }
+ case dateWeekdayAbbrevComma:
+ // Mon, 02 Jan 2006 15:04:05 MST
+ // Mon, 02 Jan 2006 15:04:05 -0700
+ // Thu, 13 Jul 2017 08:58:40 +0100
+ // Thu, 4 Jan 2018 17:53:36 +0000
+ // Tue, 11 Jul 2017 16:28:13 +0200 (CEST)
+ // Mon, 02-Jan-06 15:04:05 MST
+ switch r {
+ case ' ', '-':
+ if p.dayi == 0 {
+ p.dayi = i + 1
+ } else if p.moi == 0 {
+ p.daylen = i - p.dayi
+ p.setDay()
+ p.moi = i + 1
+ } else if p.yeari == 0 {
+ p.molen = i - p.moi
+ p.set(p.moi, "Jan")
+ p.yeari = i + 1
+ } else {
+ p.yearlen = i - p.yeari
+ p.setYear()
+ p.stateTime = timeStart
+ break iterRunes
+ }
+ }
+
+ default:
+ break iterRunes
+ }
+ }
+ p.coalesceDate(i)
+ if p.stateTime == timeStart {
+ // increment first one, since the i++ occurs at end of loop
+ if i < len(p.datestr) {
+ i++
+ }
+ // ensure we skip any whitespace prefix
+ for ; i < len(datestr); i++ {
+ r := rune(datestr[i])
+ if r != ' ' {
+ break
+ }
+ }
+
+ iterTimeRunes:
+ for ; i < len(datestr); i++ {
+ r := rune(datestr[i])
+
+ // gou.Debugf("i=%d r=%s state=%d iterTimeRunes %s %s", i, string(r), p.stateTime, p.ds(), p.ts())
+
+ switch p.stateTime {
+ case timeStart:
+ // 22:43:22
+ // 22:43
+ // timeComma
+ // 08:20:13,787
+ // timeWs
+ // 05:24:37 PM
+ // 06:20:00 UTC
+ // 06:20:00 UTC-05
+ // 00:12:00 +0000 UTC
+ // 22:18:00 +0000 UTC m=+0.000000001
+ // 15:04:05 -0700
+ // 15:04:05 -07:00
+ // 15:04:05 2008
+ // timeOffset
+ // 03:21:51+00:00
+ // 19:55:00+0100
+ // timePeriod
+ // 17:24:37.3186369
+ // 00:07:31.945167
+ // 18:31:59.257000000
+ // 00:00:00.000
+ // timePeriodOffset
+ // 19:55:00.799+0100
+ // timePeriodOffsetColon
+ // 15:04:05.999-07:00
+ // timePeriodWs
+ // timePeriodWsOffset
+ // 00:07:31.945167 +0000
+ // 00:00:00.000 +0000
+ // timePeriodWsOffsetAlpha
+ // 00:07:31.945167 +0000 UTC
+ // 22:18:00.001 +0000 UTC m=+0.000000001
+ // 00:00:00.000 +0000 UTC
+ // timePeriodWsAlpha
+ // 06:20:00.000 UTC
+ if p.houri == 0 {
+ p.houri = i
+ }
+ switch r {
+ case ',':
+ // hm, lets just swap out comma for period. for some reason go
+ // won't parse it.
+ // 2014-05-11 08:20:13,787
+ ds := []byte(p.datestr)
+ ds[i] = '.'
+ return parseTime(string(ds), loc, opts...)
+ case '-', '+':
+ // 03:21:51+00:00
+ p.stateTime = timeOffset
+ if p.seci == 0 {
+ // 22:18+0530
+ p.minlen = i - p.mini
+ } else {
+ if p.seclen == 0 {
+ p.seclen = i - p.seci
+ }
+ if p.msi > 0 && p.mslen == 0 {
+ p.mslen = i - p.msi
+ }
+ }
+ p.offseti = i
+ case '.':
+ p.stateTime = timePeriod
+ p.seclen = i - p.seci
+ p.msi = i + 1
+ case 'Z':
+ p.stateTime = timeZ
+ if p.seci == 0 {
+ p.minlen = i - p.mini
+ } else {
+ p.seclen = i - p.seci
+ }
+ // (Z)ulu time
+ p.loc = time.UTC
+ case 'a', 'A':
+ if p.nextIs(i, 't') || p.nextIs(i, 'T') {
+ // x
+ // September 17, 2012 at 5:00pm UTC-05
+ i++ // skip t
+ if p.nextIs(i, ' ') {
+ // x
+ // September 17, 2012 at 5:00pm UTC-05
+ i++ // skip '
+ p.houri = 0 // reset hour
+ }
+ } else {
+ switch {
+ case r == 'a' && p.nextIs(i, 'm'):
+ p.coalesceTime(i)
+ p.set(i, "am")
+ case r == 'A' && p.nextIs(i, 'M'):
+ p.coalesceTime(i)
+ p.set(i, "PM")
+ }
+ }
+
+ case 'p', 'P':
+ // Could be AM/PM
+ switch {
+ case r == 'p' && p.nextIs(i, 'm'):
+ p.coalesceTime(i)
+ p.set(i, "pm")
+ case r == 'P' && p.nextIs(i, 'M'):
+ p.coalesceTime(i)
+ p.set(i, "PM")
+ }
+ case ' ':
+ p.coalesceTime(i)
+ p.stateTime = timeWs
+ case ':':
+ if p.mini == 0 {
+ p.mini = i + 1
+ p.hourlen = i - p.houri
+ } else if p.seci == 0 {
+ p.seci = i + 1
+ p.minlen = i - p.mini
+ } else if p.seci > 0 {
+ // 18:31:59:257 ms uses colon, wtf
+ p.seclen = i - p.seci
+ p.set(p.seci, "05")
+ p.msi = i + 1
+
+ // gross, gross, gross. manipulating the datestr is horrible.
+ // https://github.com/araddon/dateparse/issues/117
+ // Could not get the parsing to work using golang time.Parse() without
+ // replacing that colon with period.
+ p.set(i, ".")
+ datestr = datestr[0:i] + "." + datestr[i+1:]
+ p.datestr = datestr
+ }
+ }
+ case timeOffset:
+ // 19:55:00+0100
+ // timeOffsetColon
+ // 15:04:05+07:00
+ // 15:04:05-07:00
+ if r == ':' {
+ p.stateTime = timeOffsetColon
+ }
+ case timeWs:
+ // timeWsAlpha
+ // 06:20:00 UTC
+ // 06:20:00 UTC-05
+ // 15:44:11 UTC+0100 2015
+ // 18:04:07 GMT+0100 (GMT Daylight Time)
+ // 17:57:51 MST 2009
+ // timeWsAMPMMaybe
+ // 05:24:37 PM
+ // timeWsOffset
+ // 15:04:05 -0700
+ // 00:12:00 +0000 UTC
+ // timeWsOffsetColon
+ // 15:04:05 -07:00
+ // 17:57:51 -0700 2009
+ // timeWsOffsetColonAlpha
+ // 00:12:00 +00:00 UTC
+ // timeWsYear
+ // 00:12:00 2008
+ // timeZ
+ // 15:04:05.99Z
+ switch r {
+ case 'A', 'P':
+ // Could be AM/PM or could be PST or similar
+ p.tzi = i
+ p.stateTime = timeWsAMPMMaybe
+ case '+', '-':
+ p.offseti = i
+ p.stateTime = timeWsOffset
+ default:
+ if unicode.IsLetter(r) {
+ // 06:20:00 UTC
+ // 06:20:00 UTC-05
+ // 15:44:11 UTC+0100 2015
+ // 17:57:51 MST 2009
+ p.tzi = i
+ p.stateTime = timeWsAlpha
+ } else if unicode.IsDigit(r) {
+ // 00:12:00 2008
+ p.stateTime = timeWsYear
+ p.yeari = i
+ }
+ }
+ case timeWsAlpha:
+ // 06:20:00 UTC
+ // 06:20:00 UTC-05
+ // timeWsAlphaWs
+ // 17:57:51 MST 2009
+ // timeWsAlphaZoneOffset
+ // timeWsAlphaZoneOffsetWs
+ // timeWsAlphaZoneOffsetWsExtra
+ // 18:04:07 GMT+0100 (GMT Daylight Time)
+ // timeWsAlphaZoneOffsetWsYear
+ // 15:44:11 UTC+0100 2015
+ switch r {
+ case '+', '-':
+ p.tzlen = i - p.tzi
+ if p.tzlen == 4 {
+ p.set(p.tzi, " MST")
+ } else if p.tzlen == 3 {
+ p.set(p.tzi, "MST")
+ }
+ p.stateTime = timeWsAlphaZoneOffset
+ p.offseti = i
+ case ' ':
+ // 17:57:51 MST 2009
+ // 17:57:51 MST
+ p.tzlen = i - p.tzi
+ if p.tzlen == 4 {
+ p.set(p.tzi, " MST")
+ } else if p.tzlen == 3 {
+ p.set(p.tzi, "MST")
+ }
+ p.stateTime = timeWsAlphaWs
+ p.yeari = i + 1
+ }
+ case timeWsAlphaWs:
+ // 17:57:51 MST 2009
+
+ case timeWsAlphaZoneOffset:
+ // 06:20:00 UTC-05
+ // timeWsAlphaZoneOffset
+ // timeWsAlphaZoneOffsetWs
+ // timeWsAlphaZoneOffsetWsExtra
+ // 18:04:07 GMT+0100 (GMT Daylight Time)
+ // timeWsAlphaZoneOffsetWsYear
+ // 15:44:11 UTC+0100 2015
+ switch r {
+ case ' ':
+ p.set(p.offseti, "-0700")
+ if p.yeari == 0 {
+ p.yeari = i + 1
+ }
+ p.stateTime = timeWsAlphaZoneOffsetWs
+ }
+ case timeWsAlphaZoneOffsetWs:
+ // timeWsAlphaZoneOffsetWs
+ // timeWsAlphaZoneOffsetWsExtra
+ // 18:04:07 GMT+0100 (GMT Daylight Time)
+ // timeWsAlphaZoneOffsetWsYear
+ // 15:44:11 UTC+0100 2015
+ if unicode.IsDigit(r) {
+ p.stateTime = timeWsAlphaZoneOffsetWsYear
+ } else {
+ p.extra = i - 1
+ p.stateTime = timeWsAlphaZoneOffsetWsExtra
+ }
+ case timeWsAlphaZoneOffsetWsYear:
+ // 15:44:11 UTC+0100 2015
+ if unicode.IsDigit(r) {
+ p.yearlen = i - p.yeari + 1
+ if p.yearlen == 4 {
+ p.setYear()
+ }
+ }
+ case timeWsAMPMMaybe:
+ // timeWsAMPMMaybe
+ // timeWsAMPM
+ // 05:24:37 PM
+ // timeWsAlpha
+ // 00:12:00 PST
+ // 15:44:11 UTC+0100 2015
+ if r == 'M' {
+ //return parse("2006-01-02 03:04:05 PM", datestr, loc)
+ p.stateTime = timeWsAMPM
+ p.set(i-1, "PM")
+ if p.hourlen == 2 {
+ p.set(p.houri, "03")
+ } else if p.hourlen == 1 {
+ p.set(p.houri, "3")
+ }
+ } else {
+ p.stateTime = timeWsAlpha
+ }
+
+ case timeWsOffset:
+ // timeWsOffset
+ // 15:04:05 -0700
+ // timeWsOffsetWsOffset
+ // 17:57:51 -0700 -07
+ // timeWsOffsetWs
+ // 17:57:51 -0700 2009
+ // 00:12:00 +0000 UTC
+ // timeWsOffsetColon
+ // 15:04:05 -07:00
+ // timeWsOffsetColonAlpha
+ // 00:12:00 +00:00 UTC
+ switch r {
+ case ':':
+ p.stateTime = timeWsOffsetColon
+ case ' ':
+ p.set(p.offseti, "-0700")
+ p.yeari = i + 1
+ p.stateTime = timeWsOffsetWs
+ }
+ case timeWsOffsetWs:
+ // 17:57:51 -0700 2009
+ // 00:12:00 +0000 UTC
+ // 22:18:00.001 +0000 UTC m=+0.000000001
+ // w Extra
+ // 17:57:51 -0700 -07
+ switch r {
+ case '=':
+ // eff you golang
+ if datestr[i-1] == 'm' {
+ p.extra = i - 2
+ p.trimExtra()
+ break
+ }
+ case '+', '-', '(':
+ // This really doesn't seem valid, but for some reason when round-tripping a go date
+ // their is an extra +03 printed out. seems like go bug to me, but, parsing anyway.
+ // 00:00:00 +0300 +03
+ // 00:00:00 +0300 +0300
+ p.extra = i - 1
+ p.stateTime = timeWsOffset
+ p.trimExtra()
+ break
+ default:
+ switch {
+ case unicode.IsDigit(r):
+ p.yearlen = i - p.yeari + 1
+ if p.yearlen == 4 {
+ p.setYear()
+ }
+ case unicode.IsLetter(r):
+ // 15:04:05 -0700 MST
+ if p.tzi == 0 {
+ p.tzi = i
+ }
+ }
+ }
+
+ case timeWsOffsetColon:
+ // timeWsOffsetColon
+ // 15:04:05 -07:00
+ // timeWsOffsetColonAlpha
+ // 2015-02-18 00:12:00 +00:00 UTC
+ if unicode.IsLetter(r) {
+ // 2015-02-18 00:12:00 +00:00 UTC
+ p.stateTime = timeWsOffsetColonAlpha
+ break iterTimeRunes
+ }
+ case timePeriod:
+ // 15:04:05.999999999+07:00
+ // 15:04:05.999999999-07:00
+ // 15:04:05.999999+07:00
+ // 15:04:05.999999-07:00
+ // 15:04:05.999+07:00
+ // 15:04:05.999-07:00
+ // timePeriod
+ // 17:24:37.3186369
+ // 00:07:31.945167
+ // 18:31:59.257000000
+ // 00:00:00.000
+ // timePeriodOffset
+ // 19:55:00.799+0100
+ // timePeriodOffsetColon
+ // 15:04:05.999-07:00
+ // timePeriodWs
+ // timePeriodWsOffset
+ // 00:07:31.945167 +0000
+ // 00:00:00.000 +0000
+ // With Extra
+ // 00:00:00.000 +0300 +03
+ // timePeriodWsOffsetAlpha
+ // 00:07:31.945167 +0000 UTC
+ // 00:00:00.000 +0000 UTC
+ // 22:18:00.001 +0000 UTC m=+0.000000001
+ // timePeriodWsAlpha
+ // 06:20:00.000 UTC
+ switch r {
+ case ' ':
+ p.mslen = i - p.msi
+ p.stateTime = timePeriodWs
+ case '+', '-':
+ // This really shouldn't happen
+ p.mslen = i - p.msi
+ p.offseti = i
+ p.stateTime = timePeriodOffset
+ default:
+ if unicode.IsLetter(r) {
+ // 06:20:00.000 UTC
+ p.mslen = i - p.msi
+ p.stateTime = timePeriodWsAlpha
+ }
+ }
+ case timePeriodOffset:
+ // timePeriodOffset
+ // 19:55:00.799+0100
+ // timePeriodOffsetColon
+ // 15:04:05.999-07:00
+ // 13:31:51.999-07:00 MST
+ if r == ':' {
+ p.stateTime = timePeriodOffsetColon
+ }
+ case timePeriodOffsetColon:
+ // timePeriodOffset
+ // timePeriodOffsetColon
+ // 15:04:05.999-07:00
+ // 13:31:51.999 -07:00 MST
+ switch r {
+ case ' ':
+ p.set(p.offseti, "-07:00")
+ p.stateTime = timePeriodOffsetColonWs
+ p.tzi = i + 1
+ }
+ case timePeriodOffsetColonWs:
+ // continue
+ case timePeriodWs:
+ // timePeriodWs
+ // timePeriodWsOffset
+ // 00:07:31.945167 +0000
+ // 00:00:00.000 +0000
+ // timePeriodWsOffsetAlpha
+ // 00:07:31.945167 +0000 UTC
+ // 00:00:00.000 +0000 UTC
+ // timePeriodWsOffsetColon
+ // 13:31:51.999 -07:00 MST
+ // timePeriodWsAlpha
+ // 06:20:00.000 UTC
+ if p.offseti == 0 {
+ p.offseti = i
+ }
+ switch r {
+ case '+', '-':
+ p.mslen = i - p.msi - 1
+ p.stateTime = timePeriodWsOffset
+ default:
+ if unicode.IsLetter(r) {
+ // 00:07:31.945167 +0000 UTC
+ // 00:00:00.000 +0000 UTC
+ p.stateTime = timePeriodWsOffsetWsAlpha
+ break iterTimeRunes
+ }
+ }
+
+ case timePeriodWsOffset:
+ // timePeriodWs
+ // timePeriodWsOffset
+ // 00:07:31.945167 +0000
+ // 00:00:00.000 +0000
+ // With Extra
+ // 00:00:00.000 +0300 +03
+ // timePeriodWsOffsetAlpha
+ // 00:07:31.945167 +0000 UTC
+ // 00:00:00.000 +0000 UTC
+ // 03:02:00.001 +0300 MSK m=+0.000000001
+ // timePeriodWsOffsetColon
+ // 13:31:51.999 -07:00 MST
+ // timePeriodWsAlpha
+ // 06:20:00.000 UTC
+ switch r {
+ case ':':
+ p.stateTime = timePeriodWsOffsetColon
+ case ' ':
+ p.set(p.offseti, "-0700")
+ case '+', '-':
+ // This really doesn't seem valid, but for some reason when round-tripping a go date
+ // their is an extra +03 printed out. seems like go bug to me, but, parsing anyway.
+ // 00:00:00.000 +0300 +03
+ // 00:00:00.000 +0300 +0300
+ p.extra = i - 1
+ p.trimExtra()
+ break
+ default:
+ if unicode.IsLetter(r) {
+ // 00:07:31.945167 +0000 UTC
+ // 00:00:00.000 +0000 UTC
+ // 03:02:00.001 +0300 MSK m=+0.000000001
+ p.stateTime = timePeriodWsOffsetWsAlpha
+ }
+ }
+ case timePeriodWsOffsetWsAlpha:
+ // 03:02:00.001 +0300 MSK m=+0.000000001
+ // eff you golang
+ if r == '=' && datestr[i-1] == 'm' {
+ p.extra = i - 2
+ p.trimExtra()
+ break
+ }
+
+ case timePeriodWsOffsetColon:
+ // 13:31:51.999 -07:00 MST
+ switch r {
+ case ' ':
+ p.set(p.offseti, "-07:00")
+ default:
+ if unicode.IsLetter(r) {
+ // 13:31:51.999 -07:00 MST
+ p.tzi = i
+ p.stateTime = timePeriodWsOffsetColonAlpha
+ }
+ }
+ case timePeriodWsOffsetColonAlpha:
+ // continue
+ case timeZ:
+ // timeZ
+ // 15:04:05.99Z
+ // With a time-zone at end after Z
+ // 2006-01-02T15:04:05.999999999Z07:00
+ // 2006-01-02T15:04:05Z07:00
+ // RFC3339 = "2006-01-02T15:04:05Z07:00"
+ // RFC3339Nano = "2006-01-02T15:04:05.999999999Z07:00"
+ if unicode.IsDigit(r) {
+ p.stateTime = timeZDigit
+ }
+
+ }
+ }
+
+ switch p.stateTime {
+ case timeWsAlpha:
+ switch len(p.datestr) - p.tzi {
+ case 3:
+ // 13:31:51.999 +01:00 CET
+ p.set(p.tzi, "MST")
+ case 4:
+ p.set(p.tzi, "MST")
+ p.extra = len(p.datestr) - 1
+ p.trimExtra()
+ }
+
+ case timeWsAlphaWs:
+ p.yearlen = i - p.yeari
+ p.setYear()
+ case timeWsYear:
+ p.yearlen = i - p.yeari
+ p.setYear()
+ case timeWsAlphaZoneOffsetWsExtra:
+ p.trimExtra()
+ case timeWsAlphaZoneOffset:
+ // 06:20:00 UTC-05
+ if i-p.offseti < 4 {
+ p.set(p.offseti, "-07")
+ } else {
+ p.set(p.offseti, "-0700")
+ }
+
+ case timePeriod:
+ p.mslen = i - p.msi
+ case timeOffset:
+
+ switch len(p.datestr) - p.offseti {
+ case 0, 1, 2, 4:
+ return p, fmt.Errorf("TZ offset not recognized %q near %q (must be 2 or 4 digits optional colon)", datestr, string(datestr[p.offseti:]))
+ case 3:
+ // 19:55:00+01
+ p.set(p.offseti, "-07")
+ case 5:
+ // 19:55:00+0100
+ p.set(p.offseti, "-0700")
+ }
+
+ case timeWsOffset:
+ p.set(p.offseti, "-0700")
+ case timeWsOffsetWs:
+ // 17:57:51 -0700 2009
+ // 00:12:00 +0000 UTC
+ if p.tzi > 0 {
+ switch len(p.datestr) - p.tzi {
+ case 3:
+ // 13:31:51.999 +01:00 CET
+ p.set(p.tzi, "MST")
+ case 4:
+ // 13:31:51.999 +01:00 CEST
+ p.set(p.tzi, "MST ")
+ }
+
+ }
+ case timeWsOffsetColon:
+ // 17:57:51 -07:00
+ p.set(p.offseti, "-07:00")
+ case timeOffsetColon:
+ // 15:04:05+07:00
+ p.set(p.offseti, "-07:00")
+ case timePeriodOffset:
+ // 19:55:00.799+0100
+ p.set(p.offseti, "-0700")
+ case timePeriodOffsetColon:
+ p.set(p.offseti, "-07:00")
+ case timePeriodWsOffsetColonAlpha:
+ p.tzlen = i - p.tzi
+ switch p.tzlen {
+ case 3:
+ p.set(p.tzi, "MST")
+ case 4:
+ p.set(p.tzi, "MST ")
+ }
+ case timePeriodWsOffset:
+ p.set(p.offseti, "-0700")
+ }
+ p.coalesceTime(i)
+ }
+
+ switch p.stateDate {
+ case dateDigit:
+ // unixy timestamps ish
+ // example ct type
+ // 1499979655583057426 19 nanoseconds
+ // 1499979795437000 16 micro-seconds
+ // 20180722105203 14 yyyyMMddhhmmss
+ // 1499979795437 13 milliseconds
+ // 1332151919 10 seconds
+ // 20140601 8 yyyymmdd
+ // 2014 4 yyyy
+ t := time.Time{}
+ if len(datestr) == len("1499979655583057426") { // 19
+ // nano-seconds
+ if nanoSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil {
+ t = time.Unix(0, nanoSecs)
+ }
+ } else if len(datestr) == len("1499979795437000") { // 16
+ // micro-seconds
+ if microSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil {
+ t = time.Unix(0, microSecs*1000)
+ }
+ } else if len(datestr) == len("yyyyMMddhhmmss") { // 14
+ // yyyyMMddhhmmss
+ p.format = []byte("20060102150405")
+ return p, nil
+ } else if len(datestr) == len("1332151919000") { // 13
+ if miliSecs, err := strconv.ParseInt(datestr, 10, 64); err == nil {
+ t = time.Unix(0, miliSecs*1000*1000)
+ }
+ } else if len(datestr) == len("1332151919") { //10
+ if secs, err := strconv.ParseInt(datestr, 10, 64); err == nil {
+ t = time.Unix(secs, 0)
+ }
+ } else if len(datestr) == len("20140601") {
+ p.format = []byte("20060102")
+ return p, nil
+ } else if len(datestr) == len("2014") {
+ p.format = []byte("2006")
+ return p, nil
+ } else if len(datestr) < 4 {
+ return nil, fmt.Errorf("unrecognized format, too short %v", datestr)
+ }
+ if !t.IsZero() {
+ if loc == nil {
+ p.t = &t
+ return p, nil
+ }
+ t = t.In(loc)
+ p.t = &t
+ return p, nil
+ }
+ case dateDigitSt:
+ // 171113 14:14:20
+ return p, nil
+
+ case dateYearDash:
+ // 2006-01
+ return p, nil
+
+ case dateYearDashDash:
+ // 2006-01-02
+ // 2006-1-02
+ // 2006-1-2
+ // 2006-01-2
+ return p, nil
+
+ case dateYearDashDashOffset:
+ /// 2020-07-20+00:00
+ switch len(p.datestr) - p.offseti {
+ case 5:
+ p.set(p.offseti, "-0700")
+ case 6:
+ p.set(p.offseti, "-07:00")
+ }
+ return p, nil
+
+ case dateYearDashAlphaDash:
+ // 2013-Feb-03
+ // 2013-Feb-3
+ p.daylen = i - p.dayi
+ p.setDay()
+ return p, nil
+
+ case dateYearDashDashWs:
+ // 2013-04-01
+ return p, nil
+
+ case dateYearDashDashT:
+ return p, nil
+
+ case dateDigitDashAlphaDash:
+ // 13-Feb-03 ambiguous
+ // 28-Feb-03 ambiguous
+ // 29-Jun-2016
+ length := len(datestr) - (p.moi + p.molen + 1)
+ if length == 4 {
+ p.yearlen = 4
+ p.set(p.yeari, "2006")
+ // We now also know that part1 was the day
+ p.dayi = 0
+ p.daylen = p.part1Len
+ p.setDay()
+ } else if length == 2 {
+ // We have no idea if this is
+ // yy-mon-dd OR dd-mon-yy
+ //
+ // We are going to ASSUME (bad, bad) that it is dd-mon-yy which is a horible assumption
+ p.ambiguousMD = true
+ p.yearlen = 2
+ p.set(p.yeari, "06")
+ // We now also know that part1 was the day
+ p.dayi = 0
+ p.daylen = p.part1Len
+ p.setDay()
+ }
+
+ return p, nil
+
+ case dateDigitDot:
+ // 2014.05
+ p.molen = i - p.moi
+ p.setMonth()
+ return p, nil
+
+ case dateDigitDotDot:
+ // 03.31.1981
+ // 3.31.2014
+ // 3.2.1981
+ // 3.2.81
+ // 08.21.71
+ // 2018.09.30
+ return p, nil
+
+ case dateDigitWsMoYear:
+ // 2 Jan 2018
+ // 2 Jan 18
+ // 2 Jan 2018 23:59
+ // 02 Jan 2018 23:59
+ // 12 Feb 2006, 19:17
+ return p, nil
+
+ case dateDigitWsMolong:
+ // 18 January 2018
+ // 8 January 2018
+ if p.daylen == 2 {
+ p.format = []byte("02 January 2006")
+ return p, nil
+ }
+ p.format = []byte("2 January 2006")
+ return p, nil // parse("2 January 2006", datestr, loc)
+
+ case dateAlphaWsMonth:
+ p.yearlen = i - p.yeari
+ p.setYear()
+ return p, nil
+
+ case dateAlphaWsMonthMore:
+ return p, nil
+
+ case dateAlphaWsDigitMoreWs:
+ // oct 1, 1970
+ p.yearlen = i - p.yeari
+ p.setYear()
+ return p, nil
+
+ case dateAlphaWsDigitMoreWsYear:
+ // May 8, 2009 5:57:51 PM
+ // Jun 7, 2005, 05:57:51
+ return p, nil
+
+ case dateAlphaWsAlpha:
+ return p, nil
+
+ case dateAlphaWsDigit:
+ return p, nil
+
+ case dateAlphaWsDigitYearmaybe:
+ return p, nil
+
+ case dateDigitSlash:
+ // 3/1/2014
+ // 10/13/2014
+ // 01/02/2006
+ return p, nil
+
+ case dateDigitSlashAlpha:
+ // 03/Jun/2014
+ return p, nil
+
+ case dateDigitYearSlash:
+ // 2014/10/13
+ return p, nil
+
+ case dateDigitColon:
+ // 3:1:2014
+ // 10:13:2014
+ // 01:02:2006
+ // 2014:10:13
+ return p, nil
+
+ case dateDigitChineseYear:
+ // dateDigitChineseYear
+ // 2014年04月08日
+ p.format = []byte("2006年01月02日")
+ return p, nil
+
+ case dateDigitChineseYearWs:
+ p.format = []byte("2006年01月02日 15:04:05")
+ return p, nil
+
+ case dateWeekdayComma:
+ // Monday, 02 Jan 2006 15:04:05 -0700
+ // Monday, 02 Jan 2006 15:04:05 +0100
+ // Monday, 02-Jan-06 15:04:05 MST
+ return p, nil
+
+ case dateWeekdayAbbrevComma:
+ // Mon, 02-Jan-06 15:04:05 MST
+ // Mon, 02 Jan 2006 15:04:05 MST
+ return p, nil
+
+ }
+
+ return nil, unknownErr(datestr)
+}
+
+type parser struct {
+ loc *time.Location
+ preferMonthFirst bool
+ retryAmbiguousDateWithSwap bool
+ ambiguousMD bool
+ stateDate dateState
+ stateTime timeState
+ format []byte
+ datestr string
+ fullMonth string
+ skip int
+ extra int
+ part1Len int
+ yeari int
+ yearlen int
+ moi int
+ molen int
+ dayi int
+ daylen int
+ houri int
+ hourlen int
+ mini int
+ minlen int
+ seci int
+ seclen int
+ msi int
+ mslen int
+ offseti int
+ offsetlen int
+ tzi int
+ tzlen int
+ t *time.Time
+}
+
+// ParserOption defines a function signature implemented by options
+// Options defined like this accept the parser and operate on the data within
+type ParserOption func(*parser) error
+
+// PreferMonthFirst is an option that allows preferMonthFirst to be changed from its default
+func PreferMonthFirst(preferMonthFirst bool) ParserOption {
+ return func(p *parser) error {
+ p.preferMonthFirst = preferMonthFirst
+ return nil
+ }
+}
+
+// RetryAmbiguousDateWithSwap is an option that allows retryAmbiguousDateWithSwap to be changed from its default
+func RetryAmbiguousDateWithSwap(retryAmbiguousDateWithSwap bool) ParserOption {
+ return func(p *parser) error {
+ p.retryAmbiguousDateWithSwap = retryAmbiguousDateWithSwap
+ return nil
+ }
+}
+
+func newParser(dateStr string, loc *time.Location, opts ...ParserOption) *parser {
+ p := &parser{
+ stateDate: dateStart,
+ stateTime: timeIgnore,
+ datestr: dateStr,
+ loc: loc,
+ preferMonthFirst: true,
+ retryAmbiguousDateWithSwap: false,
+ }
+ p.format = []byte(dateStr)
+
+ // allow the options to mutate the parser fields from their defaults
+ for _, option := range opts {
+ option(p)
+ }
+ return p
+}
+
+func (p *parser) nextIs(i int, b byte) bool {
+ if len(p.datestr) > i+1 && p.datestr[i+1] == b {
+ return true
+ }
+ return false
+}
+
+func (p *parser) set(start int, val string) {
+ if start < 0 {
+ return
+ }
+ if len(p.format) < start+len(val) {
+ return
+ }
+ for i, r := range val {
+ p.format[start+i] = byte(r)
+ }
+}
+func (p *parser) setMonth() {
+ if p.molen == 2 {
+ p.set(p.moi, "01")
+ } else if p.molen == 1 {
+ p.set(p.moi, "1")
+ }
+}
+
+func (p *parser) setDay() {
+ if p.daylen == 2 {
+ p.set(p.dayi, "02")
+ } else if p.daylen == 1 {
+ p.set(p.dayi, "2")
+ }
+}
+func (p *parser) setYear() {
+ if p.yearlen == 2 {
+ p.set(p.yeari, "06")
+ } else if p.yearlen == 4 {
+ p.set(p.yeari, "2006")
+ }
+}
+func (p *parser) coalesceDate(end int) {
+ if p.yeari > 0 {
+ if p.yearlen == 0 {
+ p.yearlen = end - p.yeari
+ }
+ p.setYear()
+ }
+ if p.moi > 0 && p.molen == 0 {
+ p.molen = end - p.moi
+ p.setMonth()
+ }
+ if p.dayi > 0 && p.daylen == 0 {
+ p.daylen = end - p.dayi
+ p.setDay()
+ }
+}
+func (p *parser) ts() string {
+ return fmt.Sprintf("h:(%d:%d) m:(%d:%d) s:(%d:%d)", p.houri, p.hourlen, p.mini, p.minlen, p.seci, p.seclen)
+}
+func (p *parser) ds() string {
+ return fmt.Sprintf("%s d:(%d:%d) m:(%d:%d) y:(%d:%d)", p.datestr, p.dayi, p.daylen, p.moi, p.molen, p.yeari, p.yearlen)
+}
+func (p *parser) coalesceTime(end int) {
+ // 03:04:05
+ // 15:04:05
+ // 3:04:05
+ // 3:4:5
+ // 15:04:05.00
+ if p.houri > 0 {
+ if p.hourlen == 2 {
+ p.set(p.houri, "15")
+ } else if p.hourlen == 1 {
+ p.set(p.houri, "3")
+ }
+ }
+ if p.mini > 0 {
+ if p.minlen == 0 {
+ p.minlen = end - p.mini
+ }
+ if p.minlen == 2 {
+ p.set(p.mini, "04")
+ } else {
+ p.set(p.mini, "4")
+ }
+ }
+ if p.seci > 0 {
+ if p.seclen == 0 {
+ p.seclen = end - p.seci
+ }
+ if p.seclen == 2 {
+ p.set(p.seci, "05")
+ } else {
+ p.set(p.seci, "5")
+ }
+ }
+
+ if p.msi > 0 {
+ for i := 0; i < p.mslen; i++ {
+ p.format[p.msi+i] = '0'
+ }
+ }
+}
+func (p *parser) setFullMonth(month string) {
+ if p.moi == 0 {
+ p.format = []byte(fmt.Sprintf("%s%s", "January", p.format[len(month):]))
+ }
+}
+
+func (p *parser) trimExtra() {
+ if p.extra > 0 && len(p.format) > p.extra {
+ p.format = p.format[0:p.extra]
+ p.datestr = p.datestr[0:p.extra]
+ }
+}
+
+// func (p *parser) remove(i, length int) {
+// if len(p.format) > i+length {
+// //append(a[:i], a[j:]...)
+// p.format = append(p.format[0:i], p.format[i+length:]...)
+// }
+// if len(p.datestr) > i+length {
+// //append(a[:i], a[j:]...)
+// p.datestr = fmt.Sprintf("%s%s", p.datestr[0:i], p.datestr[i+length:])
+// }
+// }
+
+func (p *parser) parse() (time.Time, error) {
+ if p.t != nil {
+ return *p.t, nil
+ }
+ if len(p.fullMonth) > 0 {
+ p.setFullMonth(p.fullMonth)
+ }
+ if p.skip > 0 && len(p.format) > p.skip {
+ p.format = p.format[p.skip:]
+ p.datestr = p.datestr[p.skip:]
+ }
+
+ if p.loc == nil {
+ // gou.Debugf("parse layout=%q input=%q \ntx, err := time.Parse(%q, %q)", string(p.format), p.datestr, string(p.format), p.datestr)
+ return time.Parse(string(p.format), p.datestr)
+ }
+ //gou.Debugf("parse layout=%q input=%q \ntx, err := time.ParseInLocation(%q, %q, %v)", string(p.format), p.datestr, string(p.format), p.datestr, p.loc)
+ return time.ParseInLocation(string(p.format), p.datestr, p.loc)
+}
+func isDay(alpha string) bool {
+ for _, day := range days {
+ if alpha == day {
+ return true
+ }
+ }
+ return false
+}
+func isMonthFull(alpha string) bool {
+ for _, month := range months {
+ if alpha == month {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/bits-and-blooms/bitset/.gitignore b/vendor/github.com/bits-and-blooms/bitset/.gitignore
new file mode 100644
index 0000000..5c204d2
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/.gitignore
@@ -0,0 +1,26 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
+
+target
diff --git a/vendor/github.com/bits-and-blooms/bitset/.travis.yml b/vendor/github.com/bits-and-blooms/bitset/.travis.yml
new file mode 100644
index 0000000..094aa5c
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/.travis.yml
@@ -0,0 +1,37 @@
+language: go
+
+sudo: false
+
+branches:
+ except:
+ - release
+
+branches:
+ only:
+ - master
+ - travis
+
+go:
+ - "1.11.x"
+ - tip
+
+matrix:
+ allow_failures:
+ - go: tip
+
+before_install:
+ - if [ -n "$GH_USER" ]; then git config --global github.user ${GH_USER}; fi;
+ - if [ -n "$GH_TOKEN" ]; then git config --global github.token ${GH_TOKEN}; fi;
+ - go get github.com/mattn/goveralls
+
+before_script:
+ - make deps
+
+script:
+ - make qa
+
+after_failure:
+ - cat ./target/test/report.xml
+
+after_success:
+ - if [ "$TRAVIS_GO_VERSION" = "1.11.1" ]; then $HOME/gopath/bin/goveralls -covermode=count -coverprofile=target/report/coverage.out -service=travis-ci; fi;
diff --git a/vendor/github.com/bits-and-blooms/bitset/LICENSE b/vendor/github.com/bits-and-blooms/bitset/LICENSE
new file mode 100644
index 0000000..59cab8a
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2014 Will Fitzgerald. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/bits-and-blooms/bitset/README.md b/vendor/github.com/bits-and-blooms/bitset/README.md
new file mode 100644
index 0000000..fe7bca6
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/README.md
@@ -0,0 +1,159 @@
+# bitset
+
+*Go language library to map between non-negative integers and boolean values*
+
+[![Test](https://github.com/bits-and-blooms/bitset/workflows/Test/badge.svg)](https://github.com/willf/bitset/actions?query=workflow%3ATest)
+[![Go Report Card](https://goreportcard.com/badge/github.com/willf/bitset)](https://goreportcard.com/report/github.com/willf/bitset)
+[![PkgGoDev](https://pkg.go.dev/badge/github.com/bits-and-blooms/bitset?tab=doc)](https://pkg.go.dev/github.com/bits-and-blooms/bitset?tab=doc)
+
+
+This library is part of the [awesome go collection](https://github.com/avelino/awesome-go). It is used in production by several important systems:
+
+* [beego](https://github.com/beego/beego)
+* [CubeFS](https://github.com/cubefs/cubefs)
+* [Amazon EKS Distro](https://github.com/aws/eks-distro)
+* [sourcegraph](https://github.com/sourcegraph/sourcegraph)
+* [torrent](https://github.com/anacrolix/torrent)
+
+
+## Description
+
+Package bitset implements bitsets, a mapping between non-negative integers and boolean values.
+It should be more efficient than map[uint] bool.
+
+It provides methods for setting, clearing, flipping, and testing individual integers.
+
+But it also provides set intersection, union, difference, complement, and symmetric operations, as well as tests to check whether any, all, or no bits are set, and querying a bitset's current length and number of positive bits.
+
+BitSets are expanded to the size of the largest set bit; the memory allocation is approximately Max bits, where Max is the largest set bit. BitSets are never shrunk. On creation, a hint can be given for the number of bits that will be used.
+
+Many of the methods, including Set, Clear, and Flip, return a BitSet pointer, which allows for chaining.
+
+### Example use:
+
+```go
+package main
+
+import (
+ "fmt"
+ "math/rand"
+
+ "github.com/bits-and-blooms/bitset"
+)
+
+func main() {
+ fmt.Printf("Hello from BitSet!\n")
+ var b bitset.BitSet
+ // play some Go Fish
+ for i := 0; i < 100; i++ {
+ card1 := uint(rand.Intn(52))
+ card2 := uint(rand.Intn(52))
+ b.Set(card1)
+ if b.Test(card2) {
+ fmt.Println("Go Fish!")
+ }
+ b.Clear(card1)
+ }
+
+ // Chaining
+ b.Set(10).Set(11)
+
+ for i, e := b.NextSet(0); e; i, e = b.NextSet(i + 1) {
+ fmt.Println("The following bit is set:", i)
+ }
+ if b.Intersection(bitset.New(100).Set(10)).Count() == 1 {
+ fmt.Println("Intersection works.")
+ } else {
+ fmt.Println("Intersection doesn't work???")
+ }
+}
+```
+
+
+Package documentation is at: https://pkg.go.dev/github.com/bits-and-blooms/bitset?tab=doc
+
+## Serialization
+
+
+You may serialize a bitset safely and portably to a stream
+of bytes as follows:
+```Go
+ const length = 9585
+ const oneEvery = 97
+ bs := bitset.New(length)
+ // Add some bits
+ for i := uint(0); i < length; i += oneEvery {
+ bs = bs.Set(i)
+ }
+
+ var buf bytes.Buffer
+ n, err := bs.WriteTo(&buf)
+ if err != nil {
+ // failure
+ }
+ // Here n == buf.Len()
+```
+You can later deserialize the result as follows:
+
+```Go
+ // Read back from buf
+ bs = bitset.New()
+ n, err = bs.ReadFrom(&buf)
+ if err != nil {
+ // error
+ }
+ // n is the number of bytes read
+```
+
+The `ReadFrom` function attempts to read the data into the existing
+BitSet instance, to minimize memory allocations.
+
+
+*Performance tip*:
+When reading and writing to a file or a network connection, you may get better performance by
+wrapping your streams with `bufio` instances.
+
+E.g.,
+```Go
+ f, err := os.Create("myfile")
+ w := bufio.NewWriter(f)
+```
+```Go
+ f, err := os.Open("myfile")
+ r := bufio.NewReader(f)
+```
+
+## Memory Usage
+
+The memory usage of a bitset using `N` bits is at least `N/8` bytes. The number of bits in a bitset is at least as large as one plus the greatest bit index you have accessed. Thus it is possible to run out of memory while using a bitset. If you have lots of bits, you might prefer compressed bitsets, like the [Roaring bitmaps](http://roaringbitmap.org) and its [Go implementation](https://github.com/RoaringBitmap/roaring).
+
+The `roaring` library allows you to go back and forth between compressed Roaring bitmaps and the conventional bitset instances:
+```Go
+ mybitset := roaringbitmap.ToBitSet()
+ newroaringbitmap := roaring.FromBitSet(mybitset)
+```
+
+
+## Implementation Note
+
+Go 1.9 introduced a native `math/bits` library. We provide backward compatibility to Go 1.7, which might be removed.
+
+It is possible that a later version will match the `math/bits` return signature for counts (which is `int`, rather than our library's `uint64`). If so, the version will be bumped.
+
+## Installation
+
+```bash
+go get github.com/bits-and-blooms/bitset
+```
+
+## Contributing
+
+If you wish to contribute to this project, please branch and issue a pull request against master ("[GitHub Flow](https://guides.github.com/introduction/flow/)")
+
+## Running all tests
+
+Before committing the code, please check if it passes tests, has adequate coverage, etc.
+```bash
+go test
+go test -cover
+```
diff --git a/vendor/github.com/bits-and-blooms/bitset/SECURITY.md b/vendor/github.com/bits-and-blooms/bitset/SECURITY.md
new file mode 100644
index 0000000..f888420
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/SECURITY.md
@@ -0,0 +1,5 @@
+# Security Policy
+
+## Reporting a Vulnerability
+
+You can report privately a vulnerability by email at daniel@lemire.me (current maintainer).
diff --git a/vendor/github.com/bits-and-blooms/bitset/azure-pipelines.yml b/vendor/github.com/bits-and-blooms/bitset/azure-pipelines.yml
new file mode 100644
index 0000000..f9b2959
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/azure-pipelines.yml
@@ -0,0 +1,39 @@
+# Go
+# Build your Go project.
+# Add steps that test, save build artifacts, deploy, and more:
+# https://docs.microsoft.com/azure/devops/pipelines/languages/go
+
+trigger:
+- master
+
+pool:
+ vmImage: 'Ubuntu-16.04'
+
+variables:
+ GOBIN: '$(GOPATH)/bin' # Go binaries path
+ GOROOT: '/usr/local/go1.11' # Go installation path
+ GOPATH: '$(system.defaultWorkingDirectory)/gopath' # Go workspace path
+ modulePath: '$(GOPATH)/src/github.com/$(build.repository.name)' # Path to the module's code
+
+steps:
+- script: |
+ mkdir -p '$(GOBIN)'
+ mkdir -p '$(GOPATH)/pkg'
+ mkdir -p '$(modulePath)'
+ shopt -s extglob
+ shopt -s dotglob
+ mv !(gopath) '$(modulePath)'
+ echo '##vso[task.prependpath]$(GOBIN)'
+ echo '##vso[task.prependpath]$(GOROOT)/bin'
+ displayName: 'Set up the Go workspace'
+
+- script: |
+ go version
+ go get -v -t -d ./...
+ if [ -f Gopkg.toml ]; then
+ curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
+ dep ensure
+ fi
+ go build -v .
+ workingDirectory: '$(modulePath)'
+ displayName: 'Get dependencies, then build'
diff --git a/vendor/github.com/bits-and-blooms/bitset/bitset.go b/vendor/github.com/bits-and-blooms/bitset/bitset.go
new file mode 100644
index 0000000..9f38ed3
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/bitset.go
@@ -0,0 +1,1184 @@
+/*
+Package bitset implements bitsets, a mapping
+between non-negative integers and boolean values. It should be more
+efficient than map[uint] bool.
+
+It provides methods for setting, clearing, flipping, and testing
+individual integers.
+
+But it also provides set intersection, union, difference,
+complement, and symmetric operations, as well as tests to
+check whether any, all, or no bits are set, and querying a
+bitset's current length and number of positive bits.
+
+BitSets are expanded to the size of the largest set bit; the
+memory allocation is approximately Max bits, where Max is
+the largest set bit. BitSets are never shrunk. On creation,
+a hint can be given for the number of bits that will be used.
+
+Many of the methods, including Set,Clear, and Flip, return
+a BitSet pointer, which allows for chaining.
+
+Example use:
+
+ import "bitset"
+ var b BitSet
+ b.Set(10).Set(11)
+ if b.Test(1000) {
+ b.Clear(1000)
+ }
+ if B.Intersection(bitset.New(100).Set(10)).Count() > 1 {
+ fmt.Println("Intersection works.")
+ }
+
+As an alternative to BitSets, one should check out the 'big' package,
+which provides a (less set-theoretical) view of bitsets.
+*/
+package bitset
+
+import (
+ "bytes"
+ "encoding/base64"
+ "encoding/binary"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "strconv"
+)
+
+// the wordSize of a bit set
+const wordSize = uint(64)
+
+// the wordSize of a bit set in bytes
+const wordBytes = wordSize / 8
+
+// log2WordSize is lg(wordSize)
+const log2WordSize = uint(6)
+
+// allBits has every bit set
+const allBits uint64 = 0xffffffffffffffff
+
+// default binary BigEndian
+var binaryOrder binary.ByteOrder = binary.BigEndian
+
+// default json encoding base64.URLEncoding
+var base64Encoding = base64.URLEncoding
+
+// Base64StdEncoding Marshal/Unmarshal BitSet with base64.StdEncoding(Default: base64.URLEncoding)
+func Base64StdEncoding() { base64Encoding = base64.StdEncoding }
+
+// LittleEndian Marshal/Unmarshal Binary as Little Endian(Default: binary.BigEndian)
+func LittleEndian() { binaryOrder = binary.LittleEndian }
+
+// A BitSet is a set of bits. The zero value of a BitSet is an empty set of length 0.
+type BitSet struct {
+ length uint
+ set []uint64
+}
+
+// Error is used to distinguish errors (panics) generated in this package.
+type Error string
+
+// safeSet will fixup b.set to be non-nil and return the field value
+func (b *BitSet) safeSet() []uint64 {
+ if b.set == nil {
+ b.set = make([]uint64, wordsNeeded(0))
+ }
+ return b.set
+}
+
+// SetBitsetFrom fills the bitset with an array of integers without creating a new BitSet instance
+func (b *BitSet) SetBitsetFrom(buf []uint64) {
+ b.length = uint(len(buf)) * 64
+ b.set = buf
+}
+
+// From is a constructor used to create a BitSet from an array of words
+func From(buf []uint64) *BitSet {
+ return FromWithLength(uint(len(buf))*64, buf)
+}
+
+// FromWithLength constructs from an array of words and length.
+func FromWithLength(len uint, set []uint64) *BitSet {
+ return &BitSet{len, set}
+}
+
+// Bytes returns the bitset as array of words
+func (b *BitSet) Bytes() []uint64 {
+ return b.set
+}
+
+// wordsNeeded calculates the number of words needed for i bits
+func wordsNeeded(i uint) int {
+ if i > (Cap() - wordSize + 1) {
+ return int(Cap() >> log2WordSize)
+ }
+ return int((i + (wordSize - 1)) >> log2WordSize)
+}
+
+// wordsNeededUnbound calculates the number of words needed for i bits, possibly exceeding the capacity.
+// This function is useful if you know that the capacity cannot be exceeded (e.g., you have an existing bitmap).
+func wordsNeededUnbound(i uint) int {
+ return int((i + (wordSize - 1)) >> log2WordSize)
+}
+
+// wordsIndex calculates the index of words in a `uint64`
+func wordsIndex(i uint) uint {
+ return i & (wordSize - 1)
+}
+
+// New creates a new BitSet with a hint that length bits will be required
+func New(length uint) (bset *BitSet) {
+ defer func() {
+ if r := recover(); r != nil {
+ bset = &BitSet{
+ 0,
+ make([]uint64, 0),
+ }
+ }
+ }()
+
+ bset = &BitSet{
+ length,
+ make([]uint64, wordsNeeded(length)),
+ }
+
+ return bset
+}
+
+// Cap returns the total possible capacity, or number of bits
+func Cap() uint {
+ return ^uint(0)
+}
+
+// Len returns the number of bits in the BitSet.
+// Note the difference to method Count, see example.
+func (b *BitSet) Len() uint {
+ return b.length
+}
+
+// extendSet adds additional words to incorporate new bits if needed
+func (b *BitSet) extendSet(i uint) {
+ if i >= Cap() {
+ panic("You are exceeding the capacity")
+ }
+ nsize := wordsNeeded(i + 1)
+ if b.set == nil {
+ b.set = make([]uint64, nsize)
+ } else if cap(b.set) >= nsize {
+ b.set = b.set[:nsize] // fast resize
+ } else if len(b.set) < nsize {
+ newset := make([]uint64, nsize, 2*nsize) // increase capacity 2x
+ copy(newset, b.set)
+ b.set = newset
+ }
+ b.length = i + 1
+}
+
+// Test whether bit i is set.
+func (b *BitSet) Test(i uint) bool {
+ if i >= b.length {
+ return false
+ }
+ return b.set[i>>log2WordSize]&(1<= Cap(), this function will panic.
+// Warning: using a very large value for 'i'
+// may lead to a memory shortage and a panic: the caller is responsible
+// for providing sensible parameters in line with their memory capacity.
+func (b *BitSet) Set(i uint) *BitSet {
+ if i >= b.length { // if we need more bits, make 'em
+ b.extendSet(i)
+ }
+ b.set[i>>log2WordSize] |= 1 << wordsIndex(i)
+ return b
+}
+
+// Clear bit i to 0
+func (b *BitSet) Clear(i uint) *BitSet {
+ if i >= b.length {
+ return b
+ }
+ b.set[i>>log2WordSize] &^= 1 << wordsIndex(i)
+ return b
+}
+
+// SetTo sets bit i to value.
+// If i>= Cap(), this function will panic.
+// Warning: using a very large value for 'i'
+// may lead to a memory shortage and a panic: the caller is responsible
+// for providing sensible parameters in line with their memory capacity.
+func (b *BitSet) SetTo(i uint, value bool) *BitSet {
+ if value {
+ return b.Set(i)
+ }
+ return b.Clear(i)
+}
+
+// Flip bit at i.
+// If i>= Cap(), this function will panic.
+// Warning: using a very large value for 'i'
+// may lead to a memory shortage and a panic: the caller is responsible
+// for providing sensible parameters in line with their memory capacity.
+func (b *BitSet) Flip(i uint) *BitSet {
+ if i >= b.length {
+ return b.Set(i)
+ }
+ b.set[i>>log2WordSize] ^= 1 << wordsIndex(i)
+ return b
+}
+
+// FlipRange bit in [start, end).
+// If end>= Cap(), this function will panic.
+// Warning: using a very large value for 'end'
+// may lead to a memory shortage and a panic: the caller is responsible
+// for providing sensible parameters in line with their memory capacity.
+func (b *BitSet) FlipRange(start, end uint) *BitSet {
+ if start >= end {
+ return b
+ }
+ if end-1 >= b.length { // if we need more bits, make 'em
+ b.extendSet(end - 1)
+ }
+ var startWord uint = start >> log2WordSize
+ var endWord uint = end >> log2WordSize
+ b.set[startWord] ^= ^(^uint64(0) << wordsIndex(start))
+ if endWord > 0 {
+ // bounds check elimination
+ data := b.set
+ _ = data[endWord-1]
+ for i := startWord; i < endWord; i++ {
+ data[i] = ^data[i]
+ }
+ }
+ if end&(wordSize-1) != 0 {
+ b.set[endWord] ^= ^uint64(0) >> wordsIndex(-end)
+ }
+ return b
+}
+
+// Shrink shrinks BitSet so that the provided value is the last possible
+// set value. It clears all bits > the provided index and reduces the size
+// and length of the set.
+//
+// Note that the parameter value is not the new length in bits: it is the
+// maximal value that can be stored in the bitset after the function call.
+// The new length in bits is the parameter value + 1. Thus it is not possible
+// to use this function to set the length to 0, the minimal value of the length
+// after this function call is 1.
+//
+// A new slice is allocated to store the new bits, so you may see an increase in
+// memory usage until the GC runs. Normally this should not be a problem, but if you
+// have an extremely large BitSet its important to understand that the old BitSet will
+// remain in memory until the GC frees it.
+func (b *BitSet) Shrink(lastbitindex uint) *BitSet {
+ length := lastbitindex + 1
+ idx := wordsNeeded(length)
+ if idx > len(b.set) {
+ return b
+ }
+ shrunk := make([]uint64, idx)
+ copy(shrunk, b.set[:idx])
+ b.set = shrunk
+ b.length = length
+ lastWordUsedBits := length % 64
+ if lastWordUsedBits != 0 {
+ b.set[idx-1] &= allBits >> uint64(64-wordsIndex(lastWordUsedBits))
+ }
+ return b
+}
+
+// Compact shrinks BitSet to so that we preserve all set bits, while minimizing
+// memory usage. Compact calls Shrink.
+func (b *BitSet) Compact() *BitSet {
+ idx := len(b.set) - 1
+ for ; idx >= 0 && b.set[idx] == 0; idx-- {
+ }
+ newlength := uint((idx + 1) << log2WordSize)
+ if newlength >= b.length {
+ return b // nothing to do
+ }
+ if newlength > 0 {
+ return b.Shrink(newlength - 1)
+ }
+ // We preserve one word
+ return b.Shrink(63)
+}
+
+// InsertAt takes an index which indicates where a bit should be
+// inserted. Then it shifts all the bits in the set to the left by 1, starting
+// from the given index position, and sets the index position to 0.
+//
+// Depending on the size of your BitSet, and where you are inserting the new entry,
+// this method could be extremely slow and in some cases might cause the entire BitSet
+// to be recopied.
+func (b *BitSet) InsertAt(idx uint) *BitSet {
+ insertAtElement := idx >> log2WordSize
+
+ // if length of set is a multiple of wordSize we need to allocate more space first
+ if b.isLenExactMultiple() {
+ b.set = append(b.set, uint64(0))
+ }
+
+ var i uint
+ for i = uint(len(b.set) - 1); i > insertAtElement; i-- {
+ // all elements above the position where we want to insert can simply by shifted
+ b.set[i] <<= 1
+
+ // we take the most significant bit of the previous element and set it as
+ // the least significant bit of the current element
+ b.set[i] |= (b.set[i-1] & 0x8000000000000000) >> 63
+ }
+
+ // generate a mask to extract the data that we need to shift left
+ // within the element where we insert a bit
+ dataMask := uint64(1)< 0x40000 {
+ buffer.WriteString("...")
+ break
+ }
+ buffer.WriteString(strconv.FormatInt(int64(i), 10))
+ i, e = b.NextSet(i + 1)
+ if e {
+ buffer.WriteString(",")
+ }
+ }
+ buffer.WriteString("}")
+ return buffer.String()
+}
+
+// DeleteAt deletes the bit at the given index position from
+// within the bitset
+// All the bits residing on the left of the deleted bit get
+// shifted right by 1
+// The running time of this operation may potentially be
+// relatively slow, O(length)
+func (b *BitSet) DeleteAt(i uint) *BitSet {
+ // the index of the slice element where we'll delete a bit
+ deleteAtElement := i >> log2WordSize
+
+ // generate a mask for the data that needs to be shifted right
+ // within that slice element that gets modified
+ dataMask := ^((uint64(1) << wordsIndex(i)) - 1)
+
+ // extract the data that we'll shift right from the slice element
+ data := b.set[deleteAtElement] & dataMask
+
+ // set the masked area to 0 while leaving the rest as it is
+ b.set[deleteAtElement] &= ^dataMask
+
+ // shift the previously extracted data to the right and then
+ // set it in the previously masked area
+ b.set[deleteAtElement] |= (data >> 1) & dataMask
+
+ // loop over all the consecutive slice elements to copy each
+ // lowest bit into the highest position of the previous element,
+ // then shift the entire content to the right by 1
+ for i := int(deleteAtElement) + 1; i < len(b.set); i++ {
+ b.set[i-1] |= (b.set[i] & 1) << 63
+ b.set[i] >>= 1
+ }
+
+ b.length = b.length - 1
+
+ return b
+}
+
+// NextSet returns the next bit set from the specified index,
+// including possibly the current index
+// along with an error code (true = valid, false = no set bit found)
+// for i,e := v.NextSet(0); e; i,e = v.NextSet(i + 1) {...}
+//
+// Users concerned with performance may want to use NextSetMany to
+// retrieve several values at once.
+func (b *BitSet) NextSet(i uint) (uint, bool) {
+ x := int(i >> log2WordSize)
+ if x >= len(b.set) {
+ return 0, false
+ }
+ w := b.set[x]
+ w = w >> wordsIndex(i)
+ if w != 0 {
+ return i + trailingZeroes64(w), true
+ }
+ x++
+ // bounds check elimination in the loop
+ if x < 0 {
+ return 0, false
+ }
+ for x < len(b.set) {
+ if b.set[x] != 0 {
+ return uint(x)*wordSize + trailingZeroes64(b.set[x]), true
+ }
+ x++
+
+ }
+ return 0, false
+}
+
+// NextSetMany returns many next bit sets from the specified index,
+// including possibly the current index and up to cap(buffer).
+// If the returned slice has len zero, then no more set bits were found
+//
+// buffer := make([]uint, 256) // this should be reused
+// j := uint(0)
+// j, buffer = bitmap.NextSetMany(j, buffer)
+// for ; len(buffer) > 0; j, buffer = bitmap.NextSetMany(j,buffer) {
+// for k := range buffer {
+// do something with buffer[k]
+// }
+// j += 1
+// }
+//
+// It is possible to retrieve all set bits as follow:
+//
+// indices := make([]uint, bitmap.Count())
+// bitmap.NextSetMany(0, indices)
+//
+// However if bitmap.Count() is large, it might be preferable to
+// use several calls to NextSetMany, for performance reasons.
+func (b *BitSet) NextSetMany(i uint, buffer []uint) (uint, []uint) {
+ myanswer := buffer
+ capacity := cap(buffer)
+ x := int(i >> log2WordSize)
+ if x >= len(b.set) || capacity == 0 {
+ return 0, myanswer[:0]
+ }
+ skip := wordsIndex(i)
+ word := b.set[x] >> skip
+ myanswer = myanswer[:capacity]
+ size := int(0)
+ for word != 0 {
+ r := trailingZeroes64(word)
+ t := word & ((^word) + 1)
+ myanswer[size] = r + i
+ size++
+ if size == capacity {
+ goto End
+ }
+ word = word ^ t
+ }
+ x++
+ for idx, word := range b.set[x:] {
+ for word != 0 {
+ r := trailingZeroes64(word)
+ t := word & ((^word) + 1)
+ myanswer[size] = r + (uint(x+idx) << 6)
+ size++
+ if size == capacity {
+ goto End
+ }
+ word = word ^ t
+ }
+ }
+End:
+ if size > 0 {
+ return myanswer[size-1], myanswer[:size]
+ }
+ return 0, myanswer[:0]
+}
+
+// NextClear returns the next clear bit from the specified index,
+// including possibly the current index
+// along with an error code (true = valid, false = no bit found i.e. all bits are set)
+func (b *BitSet) NextClear(i uint) (uint, bool) {
+ x := int(i >> log2WordSize)
+ if x >= len(b.set) {
+ return 0, false
+ }
+ w := b.set[x]
+ w = w >> wordsIndex(i)
+ wA := allBits >> wordsIndex(i)
+ index := i + trailingZeroes64(^w)
+ if w != wA && index < b.length {
+ return index, true
+ }
+ x++
+ // bounds check elimination in the loop
+ if x < 0 {
+ return 0, false
+ }
+ for x < len(b.set) {
+ if b.set[x] != allBits {
+ index = uint(x)*wordSize + trailingZeroes64(^b.set[x])
+ if index < b.length {
+ return index, true
+ }
+ }
+ x++
+ }
+ return 0, false
+}
+
+// ClearAll clears the entire BitSet
+func (b *BitSet) ClearAll() *BitSet {
+ if b != nil && b.set != nil {
+ for i := range b.set {
+ b.set[i] = 0
+ }
+ }
+ return b
+}
+
+// SetAll sets the entire BitSet
+func (b *BitSet) SetAll() *BitSet {
+ if b != nil && b.set != nil {
+ for i := range b.set {
+ b.set[i] = allBits
+ }
+
+ b.cleanLastWord()
+ }
+ return b
+}
+
+// wordCount returns the number of words used in a bit set
+func (b *BitSet) wordCount() int {
+ return wordsNeededUnbound(b.length)
+}
+
+// Clone this BitSet
+func (b *BitSet) Clone() *BitSet {
+ c := New(b.length)
+ if b.set != nil { // Clone should not modify current object
+ copy(c.set, b.set)
+ }
+ return c
+}
+
+// Copy into a destination BitSet using the Go array copy semantics:
+// the number of bits copied is the minimum of the number of bits in the current
+// BitSet (Len()) and the destination Bitset.
+// We return the number of bits copied in the destination BitSet.
+func (b *BitSet) Copy(c *BitSet) (count uint) {
+ if c == nil {
+ return
+ }
+ if b.set != nil { // Copy should not modify current object
+ copy(c.set, b.set)
+ }
+ count = c.length
+ if b.length < c.length {
+ count = b.length
+ }
+ // Cleaning the last word is needed to keep the invariant that other functions, such as Count, require
+ // that any bits in the last word that would exceed the length of the bitmask are set to 0.
+ c.cleanLastWord()
+ return
+}
+
+// CopyFull copies into a destination BitSet such that the destination is
+// identical to the source after the operation, allocating memory if necessary.
+func (b *BitSet) CopyFull(c *BitSet) {
+ if c == nil {
+ return
+ }
+ c.length = b.length
+ if len(b.set) == 0 {
+ if c.set != nil {
+ c.set = c.set[:0]
+ }
+ } else {
+ if cap(c.set) < len(b.set) {
+ c.set = make([]uint64, len(b.set))
+ } else {
+ c.set = c.set[:len(b.set)]
+ }
+ copy(c.set, b.set)
+ }
+}
+
+// Count (number of set bits).
+// Also known as "popcount" or "population count".
+func (b *BitSet) Count() uint {
+ if b != nil && b.set != nil {
+ return uint(popcntSlice(b.set))
+ }
+ return 0
+}
+
+// Equal tests the equivalence of two BitSets.
+// False if they are of different sizes, otherwise true
+// only if all the same bits are set
+func (b *BitSet) Equal(c *BitSet) bool {
+ if c == nil || b == nil {
+ return c == b
+ }
+ if b.length != c.length {
+ return false
+ }
+ if b.length == 0 { // if they have both length == 0, then could have nil set
+ return true
+ }
+ wn := b.wordCount()
+ // bounds check elimination
+ if wn <= 0 {
+ return true
+ }
+ _ = b.set[wn-1]
+ _ = c.set[wn-1]
+ for p := 0; p < wn; p++ {
+ if c.set[p] != b.set[p] {
+ return false
+ }
+ }
+ return true
+}
+
+func panicIfNull(b *BitSet) {
+ if b == nil {
+ panic(Error("BitSet must not be null"))
+ }
+}
+
+// Difference of base set and other set
+// This is the BitSet equivalent of &^ (and not)
+func (b *BitSet) Difference(compare *BitSet) (result *BitSet) {
+ panicIfNull(b)
+ panicIfNull(compare)
+ result = b.Clone() // clone b (in case b is bigger than compare)
+ l := compare.wordCount()
+ if l > b.wordCount() {
+ l = b.wordCount()
+ }
+ for i := 0; i < l; i++ {
+ result.set[i] = b.set[i] &^ compare.set[i]
+ }
+ return
+}
+
+// DifferenceCardinality computes the cardinality of the differnce
+func (b *BitSet) DifferenceCardinality(compare *BitSet) uint {
+ panicIfNull(b)
+ panicIfNull(compare)
+ l := compare.wordCount()
+ if l > b.wordCount() {
+ l = b.wordCount()
+ }
+ cnt := uint64(0)
+ cnt += popcntMaskSlice(b.set[:l], compare.set[:l])
+ cnt += popcntSlice(b.set[l:])
+ return uint(cnt)
+}
+
+// InPlaceDifference computes the difference of base set and other set
+// This is the BitSet equivalent of &^ (and not)
+func (b *BitSet) InPlaceDifference(compare *BitSet) {
+ panicIfNull(b)
+ panicIfNull(compare)
+ l := compare.wordCount()
+ if l > b.wordCount() {
+ l = b.wordCount()
+ }
+ if l <= 0 {
+ return
+ }
+ // bounds check elimination
+ data, cmpData := b.set, compare.set
+ _ = data[l-1]
+ _ = cmpData[l-1]
+ for i := 0; i < l; i++ {
+ data[i] &^= cmpData[i]
+ }
+}
+
+// Convenience function: return two bitsets ordered by
+// increasing length. Note: neither can be nil
+func sortByLength(a *BitSet, b *BitSet) (ap *BitSet, bp *BitSet) {
+ if a.length <= b.length {
+ ap, bp = a, b
+ } else {
+ ap, bp = b, a
+ }
+ return
+}
+
+// Intersection of base set and other set
+// This is the BitSet equivalent of & (and)
+func (b *BitSet) Intersection(compare *BitSet) (result *BitSet) {
+ panicIfNull(b)
+ panicIfNull(compare)
+ b, compare = sortByLength(b, compare)
+ result = New(b.length)
+ for i, word := range b.set {
+ result.set[i] = word & compare.set[i]
+ }
+ return
+}
+
+// IntersectionCardinality computes the cardinality of the union
+func (b *BitSet) IntersectionCardinality(compare *BitSet) uint {
+ panicIfNull(b)
+ panicIfNull(compare)
+ b, compare = sortByLength(b, compare)
+ cnt := popcntAndSlice(b.set, compare.set)
+ return uint(cnt)
+}
+
+// InPlaceIntersection destructively computes the intersection of
+// base set and the compare set.
+// This is the BitSet equivalent of & (and)
+func (b *BitSet) InPlaceIntersection(compare *BitSet) {
+ panicIfNull(b)
+ panicIfNull(compare)
+ l := compare.wordCount()
+ if l > b.wordCount() {
+ l = b.wordCount()
+ }
+ if l > 0 {
+ // bounds check elimination
+ data, cmpData := b.set, compare.set
+ _ = data[l-1]
+ _ = cmpData[l-1]
+
+ for i := 0; i < l; i++ {
+ data[i] &= cmpData[i]
+ }
+ }
+ if l >= 0 {
+ for i := l; i < len(b.set); i++ {
+ b.set[i] = 0
+ }
+ }
+ if compare.length > 0 {
+ if compare.length-1 >= b.length {
+ b.extendSet(compare.length - 1)
+ }
+ }
+}
+
+// Union of base set and other set
+// This is the BitSet equivalent of | (or)
+func (b *BitSet) Union(compare *BitSet) (result *BitSet) {
+ panicIfNull(b)
+ panicIfNull(compare)
+ b, compare = sortByLength(b, compare)
+ result = compare.Clone()
+ for i, word := range b.set {
+ result.set[i] = word | compare.set[i]
+ }
+ return
+}
+
+// UnionCardinality computes the cardinality of the uniton of the base set
+// and the compare set.
+func (b *BitSet) UnionCardinality(compare *BitSet) uint {
+ panicIfNull(b)
+ panicIfNull(compare)
+ b, compare = sortByLength(b, compare)
+ cnt := popcntOrSlice(b.set, compare.set)
+ if len(compare.set) > len(b.set) {
+ cnt += popcntSlice(compare.set[len(b.set):])
+ }
+ return uint(cnt)
+}
+
+// InPlaceUnion creates the destructive union of base set and compare set.
+// This is the BitSet equivalent of | (or).
+func (b *BitSet) InPlaceUnion(compare *BitSet) {
+ panicIfNull(b)
+ panicIfNull(compare)
+ l := compare.wordCount()
+ if l > b.wordCount() {
+ l = b.wordCount()
+ }
+ if compare.length > 0 && compare.length-1 >= b.length {
+ b.extendSet(compare.length - 1)
+ }
+ if l > 0 {
+ // bounds check elimination
+ data, cmpData := b.set, compare.set
+ _ = data[l-1]
+ _ = cmpData[l-1]
+
+ for i := 0; i < l; i++ {
+ data[i] |= cmpData[i]
+ }
+ }
+ if len(compare.set) > l {
+ for i := l; i < len(compare.set); i++ {
+ b.set[i] = compare.set[i]
+ }
+ }
+}
+
+// SymmetricDifference of base set and other set
+// This is the BitSet equivalent of ^ (xor)
+func (b *BitSet) SymmetricDifference(compare *BitSet) (result *BitSet) {
+ panicIfNull(b)
+ panicIfNull(compare)
+ b, compare = sortByLength(b, compare)
+ // compare is bigger, so clone it
+ result = compare.Clone()
+ for i, word := range b.set {
+ result.set[i] = word ^ compare.set[i]
+ }
+ return
+}
+
+// SymmetricDifferenceCardinality computes the cardinality of the symmetric difference
+func (b *BitSet) SymmetricDifferenceCardinality(compare *BitSet) uint {
+ panicIfNull(b)
+ panicIfNull(compare)
+ b, compare = sortByLength(b, compare)
+ cnt := popcntXorSlice(b.set, compare.set)
+ if len(compare.set) > len(b.set) {
+ cnt += popcntSlice(compare.set[len(b.set):])
+ }
+ return uint(cnt)
+}
+
+// InPlaceSymmetricDifference creates the destructive SymmetricDifference of base set and other set
+// This is the BitSet equivalent of ^ (xor)
+func (b *BitSet) InPlaceSymmetricDifference(compare *BitSet) {
+ panicIfNull(b)
+ panicIfNull(compare)
+ l := compare.wordCount()
+ if l > b.wordCount() {
+ l = b.wordCount()
+ }
+ if compare.length > 0 && compare.length-1 >= b.length {
+ b.extendSet(compare.length - 1)
+ }
+ if l > 0 {
+ // bounds check elimination
+ data, cmpData := b.set, compare.set
+ _ = data[l-1]
+ _ = cmpData[l-1]
+ for i := 0; i < l; i++ {
+ data[i] ^= cmpData[i]
+ }
+ }
+ if len(compare.set) > l {
+ for i := l; i < len(compare.set); i++ {
+ b.set[i] = compare.set[i]
+ }
+ }
+}
+
+// Is the length an exact multiple of word sizes?
+func (b *BitSet) isLenExactMultiple() bool {
+ return wordsIndex(b.length) == 0
+}
+
+// Clean last word by setting unused bits to 0
+func (b *BitSet) cleanLastWord() {
+ if !b.isLenExactMultiple() {
+ b.set[len(b.set)-1] &= allBits >> (wordSize - wordsIndex(b.length))
+ }
+}
+
+// Complement computes the (local) complement of a bitset (up to length bits)
+func (b *BitSet) Complement() (result *BitSet) {
+ panicIfNull(b)
+ result = New(b.length)
+ for i, word := range b.set {
+ result.set[i] = ^word
+ }
+ result.cleanLastWord()
+ return
+}
+
+// All returns true if all bits are set, false otherwise. Returns true for
+// empty sets.
+func (b *BitSet) All() bool {
+ panicIfNull(b)
+ return b.Count() == b.length
+}
+
+// None returns true if no bit is set, false otherwise. Returns true for
+// empty sets.
+func (b *BitSet) None() bool {
+ panicIfNull(b)
+ if b != nil && b.set != nil {
+ for _, word := range b.set {
+ if word > 0 {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+// Any returns true if any bit is set, false otherwise
+func (b *BitSet) Any() bool {
+ panicIfNull(b)
+ return !b.None()
+}
+
+// IsSuperSet returns true if this is a superset of the other set
+func (b *BitSet) IsSuperSet(other *BitSet) bool {
+ l := other.wordCount()
+ if b.wordCount() < l {
+ l = b.wordCount()
+ }
+ for i, word := range other.set[:l] {
+ if b.set[i]&word != word {
+ return false
+ }
+ }
+ return popcntSlice(other.set[l:]) == 0
+}
+
+// IsStrictSuperSet returns true if this is a strict superset of the other set
+func (b *BitSet) IsStrictSuperSet(other *BitSet) bool {
+ return b.Count() > other.Count() && b.IsSuperSet(other)
+}
+
+// DumpAsBits dumps a bit set as a string of bits. Following the usual convention in Go,
+// the least significant bits are printed last (index 0 is at the end of the string).
+func (b *BitSet) DumpAsBits() string {
+ if b.set == nil {
+ return "."
+ }
+ buffer := bytes.NewBufferString("")
+ i := len(b.set) - 1
+ for ; i >= 0; i-- {
+ fmt.Fprintf(buffer, "%064b.", b.set[i])
+ }
+ return buffer.String()
+}
+
+// BinaryStorageSize returns the binary storage requirements (see WriteTo) in bytes.
+func (b *BitSet) BinaryStorageSize() int {
+ return int(wordBytes + wordBytes*uint(b.wordCount()))
+}
+
+func readUint64Array(reader io.Reader, data []uint64) error {
+ length := len(data)
+ bufferSize := 128
+ buffer := make([]byte, bufferSize*int(wordBytes))
+ for i := 0; i < length; i += bufferSize {
+ end := i + bufferSize
+ if end > length {
+ end = length
+ buffer = buffer[:wordBytes*uint(end-i)]
+ }
+ chunk := data[i:end]
+ if _, err := io.ReadFull(reader, buffer); err != nil {
+ return err
+ }
+ for i := range chunk {
+ chunk[i] = uint64(binaryOrder.Uint64(buffer[8*i:]))
+ }
+ }
+ return nil
+}
+
+func writeUint64Array(writer io.Writer, data []uint64) error {
+ bufferSize := 128
+ buffer := make([]byte, bufferSize*int(wordBytes))
+ for i := 0; i < len(data); i += bufferSize {
+ end := i + bufferSize
+ if end > len(data) {
+ end = len(data)
+ buffer = buffer[:wordBytes*uint(end-i)]
+ }
+ chunk := data[i:end]
+ for i, x := range chunk {
+ binaryOrder.PutUint64(buffer[8*i:], x)
+ }
+ _, err := writer.Write(buffer)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// WriteTo writes a BitSet to a stream. The format is:
+// 1. uint64 length
+// 2. []uint64 set
+// Upon success, the number of bytes written is returned.
+//
+// Performance: if this function is used to write to a disk or network
+// connection, it might be beneficial to wrap the stream in a bufio.Writer.
+// E.g.,
+//
+// f, err := os.Create("myfile")
+// w := bufio.NewWriter(f)
+func (b *BitSet) WriteTo(stream io.Writer) (int64, error) {
+ length := uint64(b.length)
+ // Write length
+ err := binary.Write(stream, binaryOrder, &length)
+ if err != nil {
+ // Upon failure, we do not guarantee that we
+ // return the number of bytes written.
+ return int64(0), err
+ }
+ err = writeUint64Array(stream, b.set[:b.wordCount()])
+ if err != nil {
+ // Upon failure, we do not guarantee that we
+ // return the number of bytes written.
+ return int64(wordBytes), err
+ }
+ return int64(b.BinaryStorageSize()), nil
+}
+
+// ReadFrom reads a BitSet from a stream written using WriteTo
+// The format is:
+// 1. uint64 length
+// 2. []uint64 set
+// Upon success, the number of bytes read is returned.
+// If the current BitSet is not large enough to hold the data,
+// it is extended. In case of error, the BitSet is either
+// left unchanged or made empty if the error occurs too late
+// to preserve the content.
+//
+// Performance: if this function is used to read from a disk or network
+// connection, it might be beneficial to wrap the stream in a bufio.Reader.
+// E.g.,
+//
+// f, err := os.Open("myfile")
+// r := bufio.NewReader(f)
+func (b *BitSet) ReadFrom(stream io.Reader) (int64, error) {
+ var length uint64
+ err := binary.Read(stream, binaryOrder, &length)
+ if err != nil {
+ if err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ return 0, err
+ }
+ newlength := uint(length)
+
+ if uint64(newlength) != length {
+ return 0, errors.New("unmarshalling error: type mismatch")
+ }
+ nWords := wordsNeeded(uint(newlength))
+ if cap(b.set) >= nWords {
+ b.set = b.set[:nWords]
+ } else {
+ b.set = make([]uint64, nWords)
+ }
+
+ b.length = newlength
+
+ err = readUint64Array(stream, b.set)
+ if err != nil {
+ if err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ // We do not want to leave the BitSet partially filled as
+ // it is error prone.
+ b.set = b.set[:0]
+ b.length = 0
+ return 0, err
+ }
+
+ return int64(b.BinaryStorageSize()), nil
+}
+
+// MarshalBinary encodes a BitSet into a binary form and returns the result.
+func (b *BitSet) MarshalBinary() ([]byte, error) {
+ var buf bytes.Buffer
+ _, err := b.WriteTo(&buf)
+ if err != nil {
+ return []byte{}, err
+ }
+
+ return buf.Bytes(), err
+}
+
+// UnmarshalBinary decodes the binary form generated by MarshalBinary.
+func (b *BitSet) UnmarshalBinary(data []byte) error {
+ buf := bytes.NewReader(data)
+ _, err := b.ReadFrom(buf)
+ return err
+}
+
+// MarshalJSON marshals a BitSet as a JSON structure
+func (b BitSet) MarshalJSON() ([]byte, error) {
+ buffer := bytes.NewBuffer(make([]byte, 0, b.BinaryStorageSize()))
+ _, err := b.WriteTo(buffer)
+ if err != nil {
+ return nil, err
+ }
+
+ // URLEncode all bytes
+ return json.Marshal(base64Encoding.EncodeToString(buffer.Bytes()))
+}
+
+// UnmarshalJSON unmarshals a BitSet from JSON created using MarshalJSON
+func (b *BitSet) UnmarshalJSON(data []byte) error {
+ // Unmarshal as string
+ var s string
+ err := json.Unmarshal(data, &s)
+ if err != nil {
+ return err
+ }
+
+ // URLDecode string
+ buf, err := base64Encoding.DecodeString(s)
+ if err != nil {
+ return err
+ }
+
+ _, err = b.ReadFrom(bytes.NewReader(buf))
+ return err
+}
+
+// Rank returns the nunber of set bits up to and including the index
+// that are set in the bitset.
+// See https://en.wikipedia.org/wiki/Ranking#Ranking_in_statistics
+func (b *BitSet) Rank(index uint) uint {
+ if index >= b.length {
+ return b.Count()
+ }
+ leftover := (index + 1) & 63
+ answer := uint(popcntSlice(b.set[:(index+1)>>6]))
+ if leftover != 0 {
+ answer += uint(popcount(b.set[(index+1)>>6] << (64 - leftover)))
+ }
+ return answer
+}
+
+// Select returns the index of the jth set bit, where j is the argument.
+// The caller is responsible to ensure that 0 <= j < Count(): when j is
+// out of range, the function returns the length of the bitset (b.length).
+//
+// Note that this function differs in convention from the Rank function which
+// returns 1 when ranking the smallest value. We follow the conventional
+// textbook definition of Select and Rank.
+func (b *BitSet) Select(index uint) uint {
+ leftover := index
+ for idx, word := range b.set {
+ w := uint(popcount(word))
+ if w > leftover {
+ return uint(idx)*64 + select64(word, leftover)
+ }
+ leftover -= w
+ }
+ return b.length
+}
diff --git a/vendor/github.com/bits-and-blooms/bitset/popcnt.go b/vendor/github.com/bits-and-blooms/bitset/popcnt.go
new file mode 100644
index 0000000..76577a8
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/popcnt.go
@@ -0,0 +1,53 @@
+package bitset
+
+// bit population count, take from
+// https://code.google.com/p/go/issues/detail?id=4988#c11
+// credit: https://code.google.com/u/arnehormann/
+func popcount(x uint64) (n uint64) {
+ x -= (x >> 1) & 0x5555555555555555
+ x = (x>>2)&0x3333333333333333 + x&0x3333333333333333
+ x += x >> 4
+ x &= 0x0f0f0f0f0f0f0f0f
+ x *= 0x0101010101010101
+ return x >> 56
+}
+
+func popcntSliceGo(s []uint64) uint64 {
+ cnt := uint64(0)
+ for _, x := range s {
+ cnt += popcount(x)
+ }
+ return cnt
+}
+
+func popcntMaskSliceGo(s, m []uint64) uint64 {
+ cnt := uint64(0)
+ for i := range s {
+ cnt += popcount(s[i] &^ m[i])
+ }
+ return cnt
+}
+
+func popcntAndSliceGo(s, m []uint64) uint64 {
+ cnt := uint64(0)
+ for i := range s {
+ cnt += popcount(s[i] & m[i])
+ }
+ return cnt
+}
+
+func popcntOrSliceGo(s, m []uint64) uint64 {
+ cnt := uint64(0)
+ for i := range s {
+ cnt += popcount(s[i] | m[i])
+ }
+ return cnt
+}
+
+func popcntXorSliceGo(s, m []uint64) uint64 {
+ cnt := uint64(0)
+ for i := range s {
+ cnt += popcount(s[i] ^ m[i])
+ }
+ return cnt
+}
diff --git a/vendor/github.com/bits-and-blooms/bitset/popcnt_19.go b/vendor/github.com/bits-and-blooms/bitset/popcnt_19.go
new file mode 100644
index 0000000..7855c04
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/popcnt_19.go
@@ -0,0 +1,62 @@
+//go:build go1.9
+// +build go1.9
+
+package bitset
+
+import "math/bits"
+
+func popcntSlice(s []uint64) uint64 {
+ var cnt int
+ for _, x := range s {
+ cnt += bits.OnesCount64(x)
+ }
+ return uint64(cnt)
+}
+
+func popcntMaskSlice(s, m []uint64) uint64 {
+ var cnt int
+ // this explicit check eliminates a bounds check in the loop
+ if len(m) < len(s) {
+ panic("mask slice is too short")
+ }
+ for i := range s {
+ cnt += bits.OnesCount64(s[i] &^ m[i])
+ }
+ return uint64(cnt)
+}
+
+func popcntAndSlice(s, m []uint64) uint64 {
+ var cnt int
+ // this explicit check eliminates a bounds check in the loop
+ if len(m) < len(s) {
+ panic("mask slice is too short")
+ }
+ for i := range s {
+ cnt += bits.OnesCount64(s[i] & m[i])
+ }
+ return uint64(cnt)
+}
+
+func popcntOrSlice(s, m []uint64) uint64 {
+ var cnt int
+ // this explicit check eliminates a bounds check in the loop
+ if len(m) < len(s) {
+ panic("mask slice is too short")
+ }
+ for i := range s {
+ cnt += bits.OnesCount64(s[i] | m[i])
+ }
+ return uint64(cnt)
+}
+
+func popcntXorSlice(s, m []uint64) uint64 {
+ var cnt int
+ // this explicit check eliminates a bounds check in the loop
+ if len(m) < len(s) {
+ panic("mask slice is too short")
+ }
+ for i := range s {
+ cnt += bits.OnesCount64(s[i] ^ m[i])
+ }
+ return uint64(cnt)
+}
diff --git a/vendor/github.com/bits-and-blooms/bitset/popcnt_amd64.go b/vendor/github.com/bits-and-blooms/bitset/popcnt_amd64.go
new file mode 100644
index 0000000..116e044
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/popcnt_amd64.go
@@ -0,0 +1,68 @@
+//go:build !go1.9 && amd64 && !appengine
+// +build !go1.9,amd64,!appengine
+
+package bitset
+
+// *** the following functions are defined in popcnt_amd64.s
+
+//go:noescape
+
+func hasAsm() bool
+
+// useAsm is a flag used to select the GO or ASM implementation of the popcnt function
+var useAsm = hasAsm()
+
+//go:noescape
+
+func popcntSliceAsm(s []uint64) uint64
+
+//go:noescape
+
+func popcntMaskSliceAsm(s, m []uint64) uint64
+
+//go:noescape
+
+func popcntAndSliceAsm(s, m []uint64) uint64
+
+//go:noescape
+
+func popcntOrSliceAsm(s, m []uint64) uint64
+
+//go:noescape
+
+func popcntXorSliceAsm(s, m []uint64) uint64
+
+func popcntSlice(s []uint64) uint64 {
+ if useAsm {
+ return popcntSliceAsm(s)
+ }
+ return popcntSliceGo(s)
+}
+
+func popcntMaskSlice(s, m []uint64) uint64 {
+ if useAsm {
+ return popcntMaskSliceAsm(s, m)
+ }
+ return popcntMaskSliceGo(s, m)
+}
+
+func popcntAndSlice(s, m []uint64) uint64 {
+ if useAsm {
+ return popcntAndSliceAsm(s, m)
+ }
+ return popcntAndSliceGo(s, m)
+}
+
+func popcntOrSlice(s, m []uint64) uint64 {
+ if useAsm {
+ return popcntOrSliceAsm(s, m)
+ }
+ return popcntOrSliceGo(s, m)
+}
+
+func popcntXorSlice(s, m []uint64) uint64 {
+ if useAsm {
+ return popcntXorSliceAsm(s, m)
+ }
+ return popcntXorSliceGo(s, m)
+}
diff --git a/vendor/github.com/bits-and-blooms/bitset/popcnt_amd64.s b/vendor/github.com/bits-and-blooms/bitset/popcnt_amd64.s
new file mode 100644
index 0000000..666c0dc
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/popcnt_amd64.s
@@ -0,0 +1,104 @@
+// +build !go1.9
+// +build amd64,!appengine
+
+TEXT ·hasAsm(SB),4,$0-1
+MOVQ $1, AX
+CPUID
+SHRQ $23, CX
+ANDQ $1, CX
+MOVB CX, ret+0(FP)
+RET
+
+#define POPCNTQ_DX_DX BYTE $0xf3; BYTE $0x48; BYTE $0x0f; BYTE $0xb8; BYTE $0xd2
+
+TEXT ·popcntSliceAsm(SB),4,$0-32
+XORQ AX, AX
+MOVQ s+0(FP), SI
+MOVQ s_len+8(FP), CX
+TESTQ CX, CX
+JZ popcntSliceEnd
+popcntSliceLoop:
+BYTE $0xf3; BYTE $0x48; BYTE $0x0f; BYTE $0xb8; BYTE $0x16 // POPCNTQ (SI), DX
+ADDQ DX, AX
+ADDQ $8, SI
+LOOP popcntSliceLoop
+popcntSliceEnd:
+MOVQ AX, ret+24(FP)
+RET
+
+TEXT ·popcntMaskSliceAsm(SB),4,$0-56
+XORQ AX, AX
+MOVQ s+0(FP), SI
+MOVQ s_len+8(FP), CX
+TESTQ CX, CX
+JZ popcntMaskSliceEnd
+MOVQ m+24(FP), DI
+popcntMaskSliceLoop:
+MOVQ (DI), DX
+NOTQ DX
+ANDQ (SI), DX
+POPCNTQ_DX_DX
+ADDQ DX, AX
+ADDQ $8, SI
+ADDQ $8, DI
+LOOP popcntMaskSliceLoop
+popcntMaskSliceEnd:
+MOVQ AX, ret+48(FP)
+RET
+
+TEXT ·popcntAndSliceAsm(SB),4,$0-56
+XORQ AX, AX
+MOVQ s+0(FP), SI
+MOVQ s_len+8(FP), CX
+TESTQ CX, CX
+JZ popcntAndSliceEnd
+MOVQ m+24(FP), DI
+popcntAndSliceLoop:
+MOVQ (DI), DX
+ANDQ (SI), DX
+POPCNTQ_DX_DX
+ADDQ DX, AX
+ADDQ $8, SI
+ADDQ $8, DI
+LOOP popcntAndSliceLoop
+popcntAndSliceEnd:
+MOVQ AX, ret+48(FP)
+RET
+
+TEXT ·popcntOrSliceAsm(SB),4,$0-56
+XORQ AX, AX
+MOVQ s+0(FP), SI
+MOVQ s_len+8(FP), CX
+TESTQ CX, CX
+JZ popcntOrSliceEnd
+MOVQ m+24(FP), DI
+popcntOrSliceLoop:
+MOVQ (DI), DX
+ORQ (SI), DX
+POPCNTQ_DX_DX
+ADDQ DX, AX
+ADDQ $8, SI
+ADDQ $8, DI
+LOOP popcntOrSliceLoop
+popcntOrSliceEnd:
+MOVQ AX, ret+48(FP)
+RET
+
+TEXT ·popcntXorSliceAsm(SB),4,$0-56
+XORQ AX, AX
+MOVQ s+0(FP), SI
+MOVQ s_len+8(FP), CX
+TESTQ CX, CX
+JZ popcntXorSliceEnd
+MOVQ m+24(FP), DI
+popcntXorSliceLoop:
+MOVQ (DI), DX
+XORQ (SI), DX
+POPCNTQ_DX_DX
+ADDQ DX, AX
+ADDQ $8, SI
+ADDQ $8, DI
+LOOP popcntXorSliceLoop
+popcntXorSliceEnd:
+MOVQ AX, ret+48(FP)
+RET
diff --git a/vendor/github.com/bits-and-blooms/bitset/popcnt_generic.go b/vendor/github.com/bits-and-blooms/bitset/popcnt_generic.go
new file mode 100644
index 0000000..9e0ad46
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/popcnt_generic.go
@@ -0,0 +1,25 @@
+//go:build !go1.9 && (!amd64 || appengine)
+// +build !go1.9
+// +build !amd64 appengine
+
+package bitset
+
+func popcntSlice(s []uint64) uint64 {
+ return popcntSliceGo(s)
+}
+
+func popcntMaskSlice(s, m []uint64) uint64 {
+ return popcntMaskSliceGo(s, m)
+}
+
+func popcntAndSlice(s, m []uint64) uint64 {
+ return popcntAndSliceGo(s, m)
+}
+
+func popcntOrSlice(s, m []uint64) uint64 {
+ return popcntOrSliceGo(s, m)
+}
+
+func popcntXorSlice(s, m []uint64) uint64 {
+ return popcntXorSliceGo(s, m)
+}
diff --git a/vendor/github.com/bits-and-blooms/bitset/select.go b/vendor/github.com/bits-and-blooms/bitset/select.go
new file mode 100644
index 0000000..f15e74a
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/select.go
@@ -0,0 +1,45 @@
+package bitset
+
+func select64(w uint64, j uint) uint {
+ seen := 0
+ // Divide 64bit
+ part := w & 0xFFFFFFFF
+ n := uint(popcount(part))
+ if n <= j {
+ part = w >> 32
+ seen += 32
+ j -= n
+ }
+ ww := part
+
+ // Divide 32bit
+ part = ww & 0xFFFF
+
+ n = uint(popcount(part))
+ if n <= j {
+ part = ww >> 16
+ seen += 16
+ j -= n
+ }
+ ww = part
+
+ // Divide 16bit
+ part = ww & 0xFF
+ n = uint(popcount(part))
+ if n <= j {
+ part = ww >> 8
+ seen += 8
+ j -= n
+ }
+ ww = part
+
+ // Lookup in final byte
+ counter := 0
+ for ; counter < 8; counter++ {
+ j -= uint((ww >> counter) & 1)
+ if j+1 == 0 {
+ break
+ }
+ }
+ return uint(seen + counter)
+}
diff --git a/vendor/github.com/bits-and-blooms/bitset/trailing_zeros_18.go b/vendor/github.com/bits-and-blooms/bitset/trailing_zeros_18.go
new file mode 100644
index 0000000..12336e7
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/trailing_zeros_18.go
@@ -0,0 +1,15 @@
+//go:build !go1.9
+// +build !go1.9
+
+package bitset
+
+var deBruijn = [...]byte{
+ 0, 1, 56, 2, 57, 49, 28, 3, 61, 58, 42, 50, 38, 29, 17, 4,
+ 62, 47, 59, 36, 45, 43, 51, 22, 53, 39, 33, 30, 24, 18, 12, 5,
+ 63, 55, 48, 27, 60, 41, 37, 16, 46, 35, 44, 21, 52, 32, 23, 11,
+ 54, 26, 40, 15, 34, 20, 31, 10, 25, 14, 19, 9, 13, 8, 7, 6,
+}
+
+func trailingZeroes64(v uint64) uint {
+ return uint(deBruijn[((v&-v)*0x03f79d71b4ca8b09)>>58])
+}
diff --git a/vendor/github.com/bits-and-blooms/bitset/trailing_zeros_19.go b/vendor/github.com/bits-and-blooms/bitset/trailing_zeros_19.go
new file mode 100644
index 0000000..cfb0a84
--- /dev/null
+++ b/vendor/github.com/bits-and-blooms/bitset/trailing_zeros_19.go
@@ -0,0 +1,10 @@
+//go:build go1.9
+// +build go1.9
+
+package bitset
+
+import "math/bits"
+
+func trailingZeroes64(v uint64) uint {
+ return uint(bits.TrailingZeros64(v))
+}
diff --git a/vendor/github.com/blevesearch/mmap-go/.gitignore b/vendor/github.com/blevesearch/mmap-go/.gitignore
new file mode 100644
index 0000000..0c0a5e4
--- /dev/null
+++ b/vendor/github.com/blevesearch/mmap-go/.gitignore
@@ -0,0 +1,10 @@
+*.out
+*.5
+*.6
+*.8
+*.swp
+_obj
+_test
+testdata
+/.idea
+*.iml
\ No newline at end of file
diff --git a/vendor/github.com/blevesearch/mmap-go/.travis.yml b/vendor/github.com/blevesearch/mmap-go/.travis.yml
new file mode 100644
index 0000000..169eb1f
--- /dev/null
+++ b/vendor/github.com/blevesearch/mmap-go/.travis.yml
@@ -0,0 +1,16 @@
+language: go
+os:
+ - linux
+ - osx
+ - windows
+go:
+ - 1.11.4
+env:
+ global:
+ - GO111MODULE=on
+install:
+ - go mod download
+ - go get github.com/mattn/goveralls
+script:
+ - go test -v -covermode=count -coverprofile=coverage.out -bench . -cpu 1,4
+ - '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci -repotoken $COVERALLS_TOKEN || true'
diff --git a/vendor/github.com/blevesearch/mmap-go/LICENSE b/vendor/github.com/blevesearch/mmap-go/LICENSE
new file mode 100644
index 0000000..8f05f33
--- /dev/null
+++ b/vendor/github.com/blevesearch/mmap-go/LICENSE
@@ -0,0 +1,25 @@
+Copyright (c) 2011, Evan Shaw
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the copyright holder nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
diff --git a/vendor/github.com/blevesearch/mmap-go/README.md b/vendor/github.com/blevesearch/mmap-go/README.md
new file mode 100644
index 0000000..4cc2bfe
--- /dev/null
+++ b/vendor/github.com/blevesearch/mmap-go/README.md
@@ -0,0 +1,12 @@
+mmap-go
+=======
+
+mmap-go is a portable mmap package for the [Go programming language](http://golang.org).
+It has been tested on Linux (386, amd64), OS X, and Windows (386). It should also
+work on other Unix-like platforms, but hasn't been tested with them. I'm interested
+to hear about the results.
+
+I haven't been able to add more features without adding significant complexity,
+so mmap-go doesn't support mprotect, mincore, and maybe a few other things.
+If you're running on a Unix-like platform and need some of these features,
+I suggest Gustavo Niemeyer's [gommap](http://labix.org/gommap).
diff --git a/vendor/github.com/blevesearch/mmap-go/mmap.go b/vendor/github.com/blevesearch/mmap-go/mmap.go
new file mode 100644
index 0000000..29655bd
--- /dev/null
+++ b/vendor/github.com/blevesearch/mmap-go/mmap.go
@@ -0,0 +1,117 @@
+// Copyright 2011 Evan Shaw. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file defines the common package interface and contains a little bit of
+// factored out logic.
+
+// Package mmap allows mapping files into memory. It tries to provide a simple, reasonably portable interface,
+// but doesn't go out of its way to abstract away every little platform detail.
+// This specifically means:
+// * forked processes may or may not inherit mappings
+// * a file's timestamp may or may not be updated by writes through mappings
+// * specifying a size larger than the file's actual size can increase the file's size
+// * If the mapped file is being modified by another process while your program's running, don't expect consistent results between platforms
+package mmap
+
+import (
+ "errors"
+ "os"
+ "reflect"
+ "unsafe"
+)
+
+const (
+ // RDONLY maps the memory read-only.
+ // Attempts to write to the MMap object will result in undefined behavior.
+ RDONLY = 0
+ // RDWR maps the memory as read-write. Writes to the MMap object will update the
+ // underlying file.
+ RDWR = 1 << iota
+ // COPY maps the memory as copy-on-write. Writes to the MMap object will affect
+ // memory, but the underlying file will remain unchanged.
+ COPY
+ // If EXEC is set, the mapped memory is marked as executable.
+ EXEC
+)
+
+const (
+ // If the ANON flag is set, the mapped memory will not be backed by a file.
+ ANON = 1 << iota
+)
+
+// MMap represents a file mapped into memory.
+type MMap []byte
+
+// Map maps an entire file into memory.
+// If ANON is set in flags, f is ignored.
+func Map(f *os.File, prot, flags int) (MMap, error) {
+ return MapRegion(f, -1, prot, flags, 0)
+}
+
+// MapRegion maps part of a file into memory.
+// The offset parameter must be a multiple of the system's page size.
+// If length < 0, the entire file will be mapped.
+// If ANON is set in flags, f is ignored.
+func MapRegion(f *os.File, length int, prot, flags int, offset int64) (MMap, error) {
+ if offset%int64(os.Getpagesize()) != 0 {
+ return nil, errors.New("offset parameter must be a multiple of the system's page size")
+ }
+
+ var fd uintptr
+ if flags&ANON == 0 {
+ fd = uintptr(f.Fd())
+ if length < 0 {
+ fi, err := f.Stat()
+ if err != nil {
+ return nil, err
+ }
+ length = int(fi.Size())
+ }
+ } else {
+ if length <= 0 {
+ return nil, errors.New("anonymous mapping requires non-zero length")
+ }
+ fd = ^uintptr(0)
+ }
+ return mmap(length, uintptr(prot), uintptr(flags), fd, offset)
+}
+
+func (m *MMap) header() *reflect.SliceHeader {
+ return (*reflect.SliceHeader)(unsafe.Pointer(m))
+}
+
+func (m *MMap) addrLen() (uintptr, uintptr) {
+ header := m.header()
+ return header.Data, uintptr(header.Len)
+}
+
+// Lock keeps the mapped region in physical memory, ensuring that it will not be
+// swapped out.
+func (m MMap) Lock() error {
+ return m.lock()
+}
+
+// Unlock reverses the effect of Lock, allowing the mapped region to potentially
+// be swapped out.
+// If m is already unlocked, aan error will result.
+func (m MMap) Unlock() error {
+ return m.unlock()
+}
+
+// Flush synchronizes the mapping's contents to the file's contents on disk.
+func (m MMap) Flush() error {
+ return m.flush()
+}
+
+// Unmap deletes the memory mapped region, flushes any remaining changes, and sets
+// m to nil.
+// Trying to read or write any remaining references to m after Unmap is called will
+// result in undefined behavior.
+// Unmap should only be called on the slice value that was originally returned from
+// a call to Map. Calling Unmap on a derived slice may cause errors.
+func (m *MMap) Unmap() error {
+ err := m.unmap()
+ *m = nil
+ return err
+}
diff --git a/vendor/github.com/blevesearch/mmap-go/mmap_unix.go b/vendor/github.com/blevesearch/mmap-go/mmap_unix.go
new file mode 100644
index 0000000..25b13e5
--- /dev/null
+++ b/vendor/github.com/blevesearch/mmap-go/mmap_unix.go
@@ -0,0 +1,51 @@
+// Copyright 2011 Evan Shaw. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin dragonfly freebsd linux openbsd solaris netbsd
+
+package mmap
+
+import (
+ "golang.org/x/sys/unix"
+)
+
+func mmap(len int, inprot, inflags, fd uintptr, off int64) ([]byte, error) {
+ flags := unix.MAP_SHARED
+ prot := unix.PROT_READ
+ switch {
+ case inprot© != 0:
+ prot |= unix.PROT_WRITE
+ flags = unix.MAP_PRIVATE
+ case inprot&RDWR != 0:
+ prot |= unix.PROT_WRITE
+ }
+ if inprot&EXEC != 0 {
+ prot |= unix.PROT_EXEC
+ }
+ if inflags&ANON != 0 {
+ flags |= unix.MAP_ANON
+ }
+
+ b, err := unix.Mmap(int(fd), off, len, prot, flags)
+ if err != nil {
+ return nil, err
+ }
+ return b, nil
+}
+
+func (m MMap) flush() error {
+ return unix.Msync([]byte(m), unix.MS_SYNC)
+}
+
+func (m MMap) lock() error {
+ return unix.Mlock([]byte(m))
+}
+
+func (m MMap) unlock() error {
+ return unix.Munlock([]byte(m))
+}
+
+func (m MMap) unmap() error {
+ return unix.Munmap([]byte(m))
+}
diff --git a/vendor/github.com/blevesearch/mmap-go/mmap_windows.go b/vendor/github.com/blevesearch/mmap-go/mmap_windows.go
new file mode 100644
index 0000000..631b382
--- /dev/null
+++ b/vendor/github.com/blevesearch/mmap-go/mmap_windows.go
@@ -0,0 +1,153 @@
+// Copyright 2011 Evan Shaw. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package mmap
+
+import (
+ "errors"
+ "os"
+ "sync"
+
+ "golang.org/x/sys/windows"
+)
+
+// mmap on Windows is a two-step process.
+// First, we call CreateFileMapping to get a handle.
+// Then, we call MapviewToFile to get an actual pointer into memory.
+// Because we want to emulate a POSIX-style mmap, we don't want to expose
+// the handle -- only the pointer. We also want to return only a byte slice,
+// not a struct, so it's convenient to manipulate.
+
+// We keep this map so that we can get back the original handle from the memory address.
+
+type addrinfo struct {
+ file windows.Handle
+ mapview windows.Handle
+ writable bool
+}
+
+var handleLock sync.Mutex
+var handleMap = map[uintptr]*addrinfo{}
+
+func mmap(len int, prot, flags, hfile uintptr, off int64) ([]byte, error) {
+ flProtect := uint32(windows.PAGE_READONLY)
+ dwDesiredAccess := uint32(windows.FILE_MAP_READ)
+ writable := false
+ switch {
+ case prot© != 0:
+ flProtect = windows.PAGE_WRITECOPY
+ dwDesiredAccess = windows.FILE_MAP_COPY
+ writable = true
+ case prot&RDWR != 0:
+ flProtect = windows.PAGE_READWRITE
+ dwDesiredAccess = windows.FILE_MAP_WRITE
+ writable = true
+ }
+ if prot&EXEC != 0 {
+ flProtect <<= 4
+ dwDesiredAccess |= windows.FILE_MAP_EXECUTE
+ }
+
+ // The maximum size is the area of the file, starting from 0,
+ // that we wish to allow to be mappable. It is the sum of
+ // the length the user requested, plus the offset where that length
+ // is starting from. This does not map the data into memory.
+ maxSizeHigh := uint32((off + int64(len)) >> 32)
+ maxSizeLow := uint32((off + int64(len)) & 0xFFFFFFFF)
+ // TODO: Do we need to set some security attributes? It might help portability.
+ h, errno := windows.CreateFileMapping(windows.Handle(hfile), nil, flProtect, maxSizeHigh, maxSizeLow, nil)
+ if h == 0 {
+ return nil, os.NewSyscallError("CreateFileMapping", errno)
+ }
+
+ // Actually map a view of the data into memory. The view's size
+ // is the length the user requested.
+ fileOffsetHigh := uint32(off >> 32)
+ fileOffsetLow := uint32(off & 0xFFFFFFFF)
+ addr, errno := windows.MapViewOfFile(h, dwDesiredAccess, fileOffsetHigh, fileOffsetLow, uintptr(len))
+ if addr == 0 {
+ return nil, os.NewSyscallError("MapViewOfFile", errno)
+ }
+ handleLock.Lock()
+ handleMap[addr] = &addrinfo{
+ file: windows.Handle(hfile),
+ mapview: h,
+ writable: writable,
+ }
+ handleLock.Unlock()
+
+ m := MMap{}
+ dh := m.header()
+ dh.Data = addr
+ dh.Len = len
+ dh.Cap = dh.Len
+
+ return m, nil
+}
+
+func (m MMap) flush() error {
+ addr, len := m.addrLen()
+ errno := windows.FlushViewOfFile(addr, len)
+ if errno != nil {
+ return os.NewSyscallError("FlushViewOfFile", errno)
+ }
+
+ handleLock.Lock()
+ defer handleLock.Unlock()
+ handle, ok := handleMap[addr]
+ if !ok {
+ // should be impossible; we would've errored above
+ return errors.New("unknown base address")
+ }
+
+ if handle.writable {
+ if err := windows.FlushFileBuffers(handle.file); err != nil {
+ return os.NewSyscallError("FlushFileBuffers", err)
+ }
+ }
+
+ return nil
+}
+
+func (m MMap) lock() error {
+ addr, len := m.addrLen()
+ errno := windows.VirtualLock(addr, len)
+ return os.NewSyscallError("VirtualLock", errno)
+}
+
+func (m MMap) unlock() error {
+ addr, len := m.addrLen()
+ errno := windows.VirtualUnlock(addr, len)
+ return os.NewSyscallError("VirtualUnlock", errno)
+}
+
+func (m MMap) unmap() error {
+ err := m.flush()
+ if err != nil {
+ return err
+ }
+
+ addr := m.header().Data
+ // Lock the UnmapViewOfFile along with the handleMap deletion.
+ // As soon as we unmap the view, the OS is free to give the
+ // same addr to another new map. We don't want another goroutine
+ // to insert and remove the same addr into handleMap while
+ // we're trying to remove our old addr/handle pair.
+ handleLock.Lock()
+ defer handleLock.Unlock()
+ err = windows.UnmapViewOfFile(addr)
+ if err != nil {
+ return err
+ }
+
+ handle, ok := handleMap[addr]
+ if !ok {
+ // should be impossible; we would've errored above
+ return errors.New("unknown base address")
+ }
+ delete(handleMap, addr)
+
+ e := windows.CloseHandle(windows.Handle(handle.mapview))
+ return os.NewSyscallError("CloseHandle", e)
+}
diff --git a/vendor/github.com/blevesearch/vellum/CONTRIBUTING.md b/vendor/github.com/blevesearch/vellum/CONTRIBUTING.md
new file mode 100644
index 0000000..b85ec82
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/CONTRIBUTING.md
@@ -0,0 +1,16 @@
+# Contributing to Vellum
+
+We look forward to your contributions, but ask that you first review these guidelines.
+
+### Sign the CLA
+
+As Vellum is a Couchbase project we require contributors accept the [Couchbase Contributor License Agreement](http://review.couchbase.org/static/individual_agreement.html). To sign this agreement log into the Couchbase [code review tool](http://review.couchbase.org/). The Vellum project does not use this code review tool but it is still used to track acceptance of the contributor license agreements.
+
+### Submitting a Pull Request
+
+All types of contributions are welcome, but please keep the following in mind:
+
+- If you're planning a large change, you should really discuss it in a github issue first. This helps avoid duplicate effort and spending time on something that may not be merged.
+- Existing tests should continue to pass, new tests for the contribution are nice to have.
+- All code should have gone through `go fmt`
+- All code should pass `go vet`
diff --git a/vendor/github.com/blevesearch/vellum/LICENSE b/vendor/github.com/blevesearch/vellum/LICENSE
new file mode 100644
index 0000000..7a4a3ea
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
\ No newline at end of file
diff --git a/vendor/github.com/blevesearch/vellum/README.md b/vendor/github.com/blevesearch/vellum/README.md
new file mode 100644
index 0000000..e5c4a8b
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/README.md
@@ -0,0 +1,183 @@
+# ![vellum](docs/logo.png) vellum
+
+[![Tests](https://github.com/couchbase/vellum/workflows/Tests/badge.svg?branch=master&event=push)](https://github.com/couchbase/vellum/actions?query=workflow%3ATests+event%3Apush+branch%3Amaster)
+[![Coverage Status](https://coveralls.io/repos/github/couchbase/vellum/badge.svg?branch=master)](https://coveralls.io/github/couchbase/vellum?branch=master)
+[![GoDoc](https://godoc.org/github.com/couchbase/vellum?status.svg)](https://godoc.org/github.com/couchbase/vellum)
+[![Go Report Card](https://goreportcard.com/badge/github.com/couchbase/vellum)](https://goreportcard.com/report/github.com/couchbase/vellum)
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
+
+A Go library implementing an FST (finite state transducer) capable of:
+ - mapping between keys ([]byte) and a value (uint64)
+ - enumerating keys in lexicographic order
+
+Some additional goals of this implementation:
+ - bounded memory use while building the FST
+ - streaming out FST data while building
+ - mmap FST runtime to support very large FTSs (optional)
+
+## Usage
+
+### Building an FST
+
+To build an FST, create a new builder using the `New()` method. This method takes an `io.Writer` as an argument. As the FST is being built, data will be streamed to the writer as soon as possible. With this builder you **MUST** insert keys in lexicographic order. Inserting keys out of order will result in an error. After inserting the last key into the builder, you **MUST** call `Close()` on the builder. This will flush all remaining data to the underlying writer.
+
+In memory:
+```go
+ var buf bytes.Buffer
+ builder, err := vellum.New(&buf, nil)
+ if err != nil {
+ log.Fatal(err)
+ }
+```
+
+To disk:
+```go
+ f, err := os.Create("/tmp/vellum.fst")
+ if err != nil {
+ log.Fatal(err)
+ }
+ builder, err := vellum.New(f, nil)
+ if err != nil {
+ log.Fatal(err)
+ }
+```
+
+**MUST** insert keys in lexicographic order:
+```go
+err = builder.Insert([]byte("cat"), 1)
+if err != nil {
+ log.Fatal(err)
+}
+
+err = builder.Insert([]byte("dog"), 2)
+if err != nil {
+ log.Fatal(err)
+}
+
+err = builder.Insert([]byte("fish"), 3)
+if err != nil {
+ log.Fatal(err)
+}
+
+err = builder.Close()
+if err != nil {
+ log.Fatal(err)
+}
+```
+
+### Using an FST
+
+After closing the builder, the data can be used to instantiate an FST. If the data was written to disk, you can use the `Open()` method to mmap the file. If the data is already in memory, or you wish to load/mmap the data yourself, you can instantiate the FST with the `Load()` method.
+
+Load in memory:
+```go
+ fst, err := vellum.Load(buf.Bytes())
+ if err != nil {
+ log.Fatal(err)
+ }
+```
+
+Open from disk:
+```go
+ fst, err := vellum.Open("/tmp/vellum.fst")
+ if err != nil {
+ log.Fatal(err)
+ }
+```
+
+Get key/value:
+```go
+ val, exists, err = fst.Get([]byte("dog"))
+ if err != nil {
+ log.Fatal(err)
+ }
+ if exists {
+ fmt.Printf("contains dog with val: %d\n", val)
+ } else {
+ fmt.Printf("does not contain dog")
+ }
+```
+
+Iterate key/values:
+```go
+ itr, err := fst.Iterator(startKeyInclusive, endKeyExclusive)
+ for err == nil {
+ key, val := itr.Current()
+ fmt.Printf("contains key: %s val: %d", key, val)
+ err = itr.Next()
+ }
+ if err != nil {
+ log.Fatal(err)
+ }
+```
+
+### How does the FST get built?
+
+A full example of the implementation is beyond the scope of this README, but let's consider a small example where we want to insert 3 key/value pairs.
+
+First we insert "are" with the value 4.
+
+![step1](docs/demo1.png)
+
+Next, we insert "ate" with the value 2.
+
+![step2](docs/demo2.png)
+
+Notice how the values associated with the transitions were adjusted so that by summing them while traversing we still get the expected value.
+
+At this point, we see that state 5 looks like state 3, and state 4 looks like state 2. But, we cannot yet combine them because future inserts could change this.
+
+Now, we insert "see" with value 3. Once it has been added, we now know that states 5 and 4 can longer change. Since they are identical to 3 and 2, we replace them.
+
+![step3](docs/demo3.png)
+
+Again, we see that states 7 and 8 appear to be identical to 2 and 3.
+
+Having inserted our last key, we call `Close()` on the builder.
+
+![step4](docs/demo4.png)
+
+Now, states 7 and 8 can safely be replaced with 2 and 3.
+
+For additional information, see the references at the bottom of this document.
+
+### What does the serialized format look like?
+
+We've broken out a separate document on the [vellum disk format v1](docs/format.md).
+
+### What if I want to use this on a system that doesn't have mmap?
+
+The mmap library itself is guarded with system/architecture build tags, but we've also added an additional build tag in vellum. If you'd like to Open() a file based representation of an FST, but not use mmap, you can build the library with the `nommap` build tag. NOTE: if you do this, the entire FST will be read into memory.
+
+### Can I use this with Unicode strings?
+
+Yes, however this implementation is only aware of the byte representation you choose. In order to find matches, you must work with some canonical byte representation of the string. In the future, some encoding-aware traversals may be possible on top of the lower-level byte transitions.
+
+### How did this library come to be?
+
+In my work on the [Bleve](https://github.com/blevesearch/bleve) project I became aware of the power of the FST for many search-related tasks. The obvious starting point for such a thing in Go was the [mafsa](https://github.com/smartystreets/mafsa) project. While working with mafsa I encountered some issues. First, it did not stream data to disk while building. Second, it chose to use a rune as the fundamental unit of transition in the FST, but I felt using a byte would be more powerful in the end. My hope is that higher-level encoding-aware traversals will be possible when necessary. Finally, as I reported bugs and submitted PRs I learned that the mafsa project was mainly a research project and no longer being maintained. I wanted to build something that could be used in production. As the project advanced more and more techniques from the [BurntSushi/fst](https://github.com/BurntSushi/fst) were adapted to our implementation.
+
+### Are there tools to work with vellum files?
+
+Under the cmd/vellum subdirectory, there's a command-line tool which
+features subcommands that can allow you to create, inspect and query
+vellum files.
+
+### How can I generate a state transition diagram from a vellum file?
+
+The vellum command-line tool has a "dot" subcommand that can emit
+graphviz dot output data from an input vellum file. The dot file can
+in turn be converted into an image using graphviz tools. Example...
+
+ $ vellum dot myFile.vellum > output.dot
+ $ dot -Tpng output.dot -o output.png
+
+## Related Work
+
+Much credit goes to two existing projects:
+ - [mafsa](https://github.com/smartystreets/mafsa)
+ - [BurntSushi/fst](https://github.com/BurntSushi/fst)
+
+Most of the original implementation here started with my digging into the internals of mafsa. As the implementation progressed, I continued to borrow ideas/approaches from the BurntSushi/fst library as well.
+
+For a great introduction to this topic, please read the blog post [Index 1,600,000,000 Keys with Automata and Rust](http://blog.burntsushi.net/transducers/)
diff --git a/vendor/github.com/blevesearch/vellum/automaton.go b/vendor/github.com/blevesearch/vellum/automaton.go
new file mode 100644
index 0000000..70398f2
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/automaton.go
@@ -0,0 +1,85 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+// Automaton represents the general contract of a byte-based finite automaton
+type Automaton interface {
+
+ // Start returns the start state
+ Start() int
+
+ // IsMatch returns true if and only if the state is a match
+ IsMatch(int) bool
+
+ // CanMatch returns true if and only if it is possible to reach a match
+ // in zero or more steps
+ CanMatch(int) bool
+
+ // WillAlwaysMatch returns true if and only if the current state matches
+ // and will always match no matter what steps are taken
+ WillAlwaysMatch(int) bool
+
+ // Accept returns the next state given the input to the specified state
+ Accept(int, byte) int
+}
+
+// AutomatonContains implements an generic Contains() method which works
+// on any implementation of Automaton
+func AutomatonContains(a Automaton, k []byte) bool {
+ i := 0
+ curr := a.Start()
+ for a.CanMatch(curr) && i < len(k) {
+ curr = a.Accept(curr, k[i])
+ if curr == noneAddr {
+ break
+ }
+ i++
+ }
+ if i != len(k) {
+ return false
+ }
+ return a.IsMatch(curr)
+}
+
+// AlwaysMatch is an Automaton implementation which always matches
+type AlwaysMatch struct{}
+
+// Start returns the AlwaysMatch start state
+func (m *AlwaysMatch) Start() int {
+ return 0
+}
+
+// IsMatch always returns true
+func (m *AlwaysMatch) IsMatch(int) bool {
+ return true
+}
+
+// CanMatch always returns true
+func (m *AlwaysMatch) CanMatch(int) bool {
+ return true
+}
+
+// WillAlwaysMatch always returns true
+func (m *AlwaysMatch) WillAlwaysMatch(int) bool {
+ return true
+}
+
+// Accept returns the next AlwaysMatch state
+func (m *AlwaysMatch) Accept(int, byte) int {
+ return 0
+}
+
+// creating an alwaysMatchAutomaton to avoid unnecessary repeated allocations.
+var alwaysMatchAutomaton = &AlwaysMatch{}
diff --git a/vendor/github.com/blevesearch/vellum/builder.go b/vendor/github.com/blevesearch/vellum/builder.go
new file mode 100644
index 0000000..7e545cb
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/builder.go
@@ -0,0 +1,447 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+import (
+ "bytes"
+ "io"
+)
+
+var defaultBuilderOpts = &BuilderOpts{
+ Encoder: 1,
+ RegistryTableSize: 10000,
+ RegistryMRUSize: 2,
+}
+
+// A Builder is used to build a new FST. When possible data is
+// streamed out to the underlying Writer as soon as possible.
+type Builder struct {
+ unfinished *unfinishedNodes
+ registry *registry
+ last []byte
+ len int
+
+ lastAddr int
+
+ encoder encoder
+ opts *BuilderOpts
+
+ builderNodePool *builderNodePool
+}
+
+const noneAddr = 1
+const emptyAddr = 0
+
+// NewBuilder returns a new Builder which will stream out the
+// underlying representation to the provided Writer as the set is built.
+func newBuilder(w io.Writer, opts *BuilderOpts) (*Builder, error) {
+ if opts == nil {
+ opts = defaultBuilderOpts
+ }
+ builderNodePool := &builderNodePool{}
+ rv := &Builder{
+ unfinished: newUnfinishedNodes(builderNodePool),
+ registry: newRegistry(builderNodePool, opts.RegistryTableSize, opts.RegistryMRUSize),
+ builderNodePool: builderNodePool,
+ opts: opts,
+ lastAddr: noneAddr,
+ }
+
+ var err error
+ rv.encoder, err = loadEncoder(opts.Encoder, w)
+ if err != nil {
+ return nil, err
+ }
+ err = rv.encoder.start()
+ if err != nil {
+ return nil, err
+ }
+ return rv, nil
+}
+
+func (b *Builder) Reset(w io.Writer) error {
+ b.unfinished.Reset()
+ b.registry.Reset()
+ b.lastAddr = noneAddr
+ b.encoder.reset(w)
+ b.last = nil
+ b.len = 0
+
+ err := b.encoder.start()
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+// Insert the provided value to the set being built.
+// NOTE: values must be inserted in lexicographical order.
+func (b *Builder) Insert(key []byte, val uint64) error {
+ // ensure items are added in lexicographic order
+ if bytes.Compare(key, b.last) < 0 {
+ return ErrOutOfOrder
+ }
+ if len(key) == 0 {
+ b.len = 1
+ b.unfinished.setRootOutput(val)
+ return nil
+ }
+
+ prefixLen, out := b.unfinished.findCommonPrefixAndSetOutput(key, val)
+ b.len++
+ err := b.compileFrom(prefixLen)
+ if err != nil {
+ return err
+ }
+ b.copyLastKey(key)
+ b.unfinished.addSuffix(key[prefixLen:], out)
+
+ return nil
+}
+
+func (b *Builder) copyLastKey(key []byte) {
+ if b.last == nil {
+ b.last = make([]byte, 0, 64)
+ } else {
+ b.last = b.last[:0]
+ }
+ b.last = append(b.last, key...)
+}
+
+// Close MUST be called after inserting all values.
+func (b *Builder) Close() error {
+ err := b.compileFrom(0)
+ if err != nil {
+ return err
+ }
+ root := b.unfinished.popRoot()
+ rootAddr, err := b.compile(root)
+ if err != nil {
+ return err
+ }
+ return b.encoder.finish(b.len, rootAddr)
+}
+
+func (b *Builder) compileFrom(iState int) error {
+ addr := noneAddr
+ for iState+1 < len(b.unfinished.stack) {
+ var node *builderNode
+ if addr == noneAddr {
+ node = b.unfinished.popEmpty()
+ } else {
+ node = b.unfinished.popFreeze(addr)
+ }
+ var err error
+ addr, err = b.compile(node)
+ if err != nil {
+ return nil
+ }
+ }
+ b.unfinished.topLastFreeze(addr)
+ return nil
+}
+
+func (b *Builder) compile(node *builderNode) (int, error) {
+ if node.final && len(node.trans) == 0 &&
+ node.finalOutput == 0 {
+ return 0, nil
+ }
+ found, addr, entry := b.registry.entry(node)
+ if found {
+ return addr, nil
+ }
+ addr, err := b.encoder.encodeState(node, b.lastAddr)
+ if err != nil {
+ return 0, err
+ }
+
+ b.lastAddr = addr
+ entry.addr = addr
+ return addr, nil
+}
+
+type unfinishedNodes struct {
+ stack []*builderNodeUnfinished
+
+ // cache allocates a reasonable number of builderNodeUnfinished
+ // objects up front and tries to keep reusing them
+ // because the main data structure is a stack, we assume the
+ // same access pattern, and don't track items separately
+ // this means calls get() and pushXYZ() must be paired,
+ // as well as calls put() and popXYZ()
+ cache []builderNodeUnfinished
+
+ builderNodePool *builderNodePool
+}
+
+func (u *unfinishedNodes) Reset() {
+ u.stack = u.stack[:0]
+ for i := 0; i < len(u.cache); i++ {
+ u.cache[i] = builderNodeUnfinished{}
+ }
+ u.pushEmpty(false)
+}
+
+func newUnfinishedNodes(p *builderNodePool) *unfinishedNodes {
+ rv := &unfinishedNodes{
+ stack: make([]*builderNodeUnfinished, 0, 64),
+ cache: make([]builderNodeUnfinished, 64),
+ builderNodePool: p,
+ }
+ rv.pushEmpty(false)
+ return rv
+}
+
+// get new builderNodeUnfinished, reusing cache if possible
+func (u *unfinishedNodes) get() *builderNodeUnfinished {
+ if len(u.stack) < len(u.cache) {
+ return &u.cache[len(u.stack)]
+ }
+ // full now allocate a new one
+ return &builderNodeUnfinished{}
+}
+
+// return builderNodeUnfinished, clearing it for reuse
+func (u *unfinishedNodes) put() {
+ if len(u.stack) >= len(u.cache) {
+ return
+ // do nothing, not part of cache
+ }
+ u.cache[len(u.stack)] = builderNodeUnfinished{}
+}
+
+func (u *unfinishedNodes) findCommonPrefixAndSetOutput(key []byte,
+ out uint64) (int, uint64) {
+ var i int
+ for i < len(key) {
+ if i >= len(u.stack) {
+ break
+ }
+ var addPrefix uint64
+ if !u.stack[i].hasLastT {
+ break
+ }
+ if u.stack[i].lastIn == key[i] {
+ commonPre := outputPrefix(u.stack[i].lastOut, out)
+ addPrefix = outputSub(u.stack[i].lastOut, commonPre)
+ out = outputSub(out, commonPre)
+ u.stack[i].lastOut = commonPre
+ i++
+ } else {
+ break
+ }
+
+ if addPrefix != 0 {
+ u.stack[i].addOutputPrefix(addPrefix)
+ }
+ }
+
+ return i, out
+}
+
+func (u *unfinishedNodes) pushEmpty(final bool) {
+ next := u.get()
+ next.node = u.builderNodePool.Get()
+ next.node.final = final
+ u.stack = append(u.stack, next)
+}
+
+func (u *unfinishedNodes) popRoot() *builderNode {
+ l := len(u.stack)
+ var unfinished *builderNodeUnfinished
+ u.stack, unfinished = u.stack[:l-1], u.stack[l-1]
+ rv := unfinished.node
+ u.put()
+ return rv
+}
+
+func (u *unfinishedNodes) popFreeze(addr int) *builderNode {
+ l := len(u.stack)
+ var unfinished *builderNodeUnfinished
+ u.stack, unfinished = u.stack[:l-1], u.stack[l-1]
+ unfinished.lastCompiled(addr)
+ rv := unfinished.node
+ u.put()
+ return rv
+}
+
+func (u *unfinishedNodes) popEmpty() *builderNode {
+ l := len(u.stack)
+ var unfinished *builderNodeUnfinished
+ u.stack, unfinished = u.stack[:l-1], u.stack[l-1]
+ rv := unfinished.node
+ u.put()
+ return rv
+}
+
+func (u *unfinishedNodes) setRootOutput(out uint64) {
+ u.stack[0].node.final = true
+ u.stack[0].node.finalOutput = out
+}
+
+func (u *unfinishedNodes) topLastFreeze(addr int) {
+ last := len(u.stack) - 1
+ u.stack[last].lastCompiled(addr)
+}
+
+func (u *unfinishedNodes) addSuffix(bs []byte, out uint64) {
+ if len(bs) == 0 {
+ return
+ }
+ last := len(u.stack) - 1
+ u.stack[last].hasLastT = true
+ u.stack[last].lastIn = bs[0]
+ u.stack[last].lastOut = out
+ for _, b := range bs[1:] {
+ next := u.get()
+ next.node = u.builderNodePool.Get()
+ next.hasLastT = true
+ next.lastIn = b
+ next.lastOut = 0
+ u.stack = append(u.stack, next)
+ }
+ u.pushEmpty(true)
+}
+
+type builderNodeUnfinished struct {
+ node *builderNode
+ lastOut uint64
+ lastIn byte
+ hasLastT bool
+}
+
+func (b *builderNodeUnfinished) lastCompiled(addr int) {
+ if b.hasLastT {
+ transIn := b.lastIn
+ transOut := b.lastOut
+ b.hasLastT = false
+ b.lastOut = 0
+ b.node.trans = append(b.node.trans, transition{
+ in: transIn,
+ out: transOut,
+ addr: addr,
+ })
+ }
+}
+
+func (b *builderNodeUnfinished) addOutputPrefix(prefix uint64) {
+ if b.node.final {
+ b.node.finalOutput = outputCat(prefix, b.node.finalOutput)
+ }
+ for i := range b.node.trans {
+ b.node.trans[i].out = outputCat(prefix, b.node.trans[i].out)
+ }
+ if b.hasLastT {
+ b.lastOut = outputCat(prefix, b.lastOut)
+ }
+}
+
+type builderNode struct {
+ finalOutput uint64
+ trans []transition
+ final bool
+
+ // intrusive linked list
+ next *builderNode
+}
+
+// reset resets the receiver builderNode to a re-usable state.
+func (n *builderNode) reset() {
+ n.final = false
+ n.finalOutput = 0
+ n.trans = n.trans[:0]
+ n.next = nil
+}
+
+func (n *builderNode) equiv(o *builderNode) bool {
+ if n.final != o.final {
+ return false
+ }
+ if n.finalOutput != o.finalOutput {
+ return false
+ }
+ if len(n.trans) != len(o.trans) {
+ return false
+ }
+ for i, ntrans := range n.trans {
+ otrans := o.trans[i]
+ if ntrans.in != otrans.in {
+ return false
+ }
+ if ntrans.addr != otrans.addr {
+ return false
+ }
+ if ntrans.out != otrans.out {
+ return false
+ }
+ }
+ return true
+}
+
+type transition struct {
+ out uint64
+ addr int
+ in byte
+}
+
+func outputPrefix(l, r uint64) uint64 {
+ if l < r {
+ return l
+ }
+ return r
+}
+
+func outputSub(l, r uint64) uint64 {
+ return l - r
+}
+
+func outputCat(l, r uint64) uint64 {
+ return l + r
+}
+
+// builderNodePool pools builderNodes using a singly linked list.
+//
+// NB: builderNode lifecylce is described by the following interactions -
+// +------------------------+ +----------------------+
+// | Unfinished Nodes | Transfer once | Registry |
+// |(not frozen builderNode)|-----builderNode is ------->| (frozen builderNode) |
+// +------------------------+ marked frozen +----------------------+
+// ^ |
+// | |
+// | Put()
+// | Get() on +-------------------+ when
+// +-new char--------| builderNode Pool |<-----------evicted
+// +-------------------+
+type builderNodePool struct {
+ head *builderNode
+}
+
+func (p *builderNodePool) Get() *builderNode {
+ if p.head == nil {
+ return &builderNode{}
+ }
+ head := p.head
+ p.head = p.head.next
+ return head
+}
+
+func (p *builderNodePool) Put(v *builderNode) {
+ if v == nil {
+ return
+ }
+ v.reset()
+ v.next = p.head
+ p.head = v
+}
diff --git a/vendor/github.com/blevesearch/vellum/common.go b/vendor/github.com/blevesearch/vellum/common.go
new file mode 100644
index 0000000..cd3e6a0
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/common.go
@@ -0,0 +1,547 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+const maxCommon = 1<<6 - 1
+
+func encodeCommon(in byte) byte {
+ val := byte((int(commonInputs[in]) + 1) % 256)
+ if val > maxCommon {
+ return 0
+ }
+ return val
+}
+
+func decodeCommon(in byte) byte {
+ return commonInputsInv[in-1]
+}
+
+var commonInputs = []byte{
+ 84, // '\x00'
+ 85, // '\x01'
+ 86, // '\x02'
+ 87, // '\x03'
+ 88, // '\x04'
+ 89, // '\x05'
+ 90, // '\x06'
+ 91, // '\x07'
+ 92, // '\x08'
+ 93, // '\t'
+ 94, // '\n'
+ 95, // '\x0b'
+ 96, // '\x0c'
+ 97, // '\r'
+ 98, // '\x0e'
+ 99, // '\x0f'
+ 100, // '\x10'
+ 101, // '\x11'
+ 102, // '\x12'
+ 103, // '\x13'
+ 104, // '\x14'
+ 105, // '\x15'
+ 106, // '\x16'
+ 107, // '\x17'
+ 108, // '\x18'
+ 109, // '\x19'
+ 110, // '\x1a'
+ 111, // '\x1b'
+ 112, // '\x1c'
+ 113, // '\x1d'
+ 114, // '\x1e'
+ 115, // '\x1f'
+ 116, // ' '
+ 80, // '!'
+ 117, // '"'
+ 118, // '#'
+ 79, // '$'
+ 39, // '%'
+ 30, // '&'
+ 81, // "'"
+ 75, // '('
+ 74, // ')'
+ 82, // '*'
+ 57, // '+'
+ 66, // ','
+ 16, // '-'
+ 12, // '.'
+ 2, // '/'
+ 19, // '0'
+ 20, // '1'
+ 21, // '2'
+ 27, // '3'
+ 32, // '4'
+ 29, // '5'
+ 35, // '6'
+ 36, // '7'
+ 37, // '8'
+ 34, // '9'
+ 24, // ':'
+ 73, // ';'
+ 119, // '<'
+ 23, // '='
+ 120, // '>'
+ 40, // '?'
+ 83, // '@'
+ 44, // 'A'
+ 48, // 'B'
+ 42, // 'C'
+ 43, // 'D'
+ 49, // 'E'
+ 46, // 'F'
+ 62, // 'G'
+ 61, // 'H'
+ 47, // 'I'
+ 69, // 'J'
+ 68, // 'K'
+ 58, // 'L'
+ 56, // 'M'
+ 55, // 'N'
+ 59, // 'O'
+ 51, // 'P'
+ 72, // 'Q'
+ 54, // 'R'
+ 45, // 'S'
+ 52, // 'T'
+ 64, // 'U'
+ 65, // 'V'
+ 63, // 'W'
+ 71, // 'X'
+ 67, // 'Y'
+ 70, // 'Z'
+ 77, // '['
+ 121, // '\\'
+ 78, // ']'
+ 122, // '^'
+ 31, // '_'
+ 123, // '`'
+ 4, // 'a'
+ 25, // 'b'
+ 9, // 'c'
+ 17, // 'd'
+ 1, // 'e'
+ 26, // 'f'
+ 22, // 'g'
+ 13, // 'h'
+ 7, // 'i'
+ 50, // 'j'
+ 38, // 'k'
+ 14, // 'l'
+ 15, // 'm'
+ 10, // 'n'
+ 3, // 'o'
+ 8, // 'p'
+ 60, // 'q'
+ 6, // 'r'
+ 5, // 's'
+ 0, // 't'
+ 18, // 'u'
+ 33, // 'v'
+ 11, // 'w'
+ 41, // 'x'
+ 28, // 'y'
+ 53, // 'z'
+ 124, // '{'
+ 125, // '|'
+ 126, // '}'
+ 76, // '~'
+ 127, // '\x7f'
+ 128, // '\x80'
+ 129, // '\x81'
+ 130, // '\x82'
+ 131, // '\x83'
+ 132, // '\x84'
+ 133, // '\x85'
+ 134, // '\x86'
+ 135, // '\x87'
+ 136, // '\x88'
+ 137, // '\x89'
+ 138, // '\x8a'
+ 139, // '\x8b'
+ 140, // '\x8c'
+ 141, // '\x8d'
+ 142, // '\x8e'
+ 143, // '\x8f'
+ 144, // '\x90'
+ 145, // '\x91'
+ 146, // '\x92'
+ 147, // '\x93'
+ 148, // '\x94'
+ 149, // '\x95'
+ 150, // '\x96'
+ 151, // '\x97'
+ 152, // '\x98'
+ 153, // '\x99'
+ 154, // '\x9a'
+ 155, // '\x9b'
+ 156, // '\x9c'
+ 157, // '\x9d'
+ 158, // '\x9e'
+ 159, // '\x9f'
+ 160, // '\xa0'
+ 161, // '¡'
+ 162, // '¢'
+ 163, // '£'
+ 164, // '¤'
+ 165, // '¥'
+ 166, // '¦'
+ 167, // '§'
+ 168, // '¨'
+ 169, // '©'
+ 170, // 'ª'
+ 171, // '«'
+ 172, // '¬'
+ 173, // '\xad'
+ 174, // '®'
+ 175, // '¯'
+ 176, // '°'
+ 177, // '±'
+ 178, // '²'
+ 179, // '³'
+ 180, // '´'
+ 181, // 'µ'
+ 182, // '¶'
+ 183, // '·'
+ 184, // '¸'
+ 185, // '¹'
+ 186, // 'º'
+ 187, // '»'
+ 188, // '¼'
+ 189, // '½'
+ 190, // '¾'
+ 191, // '¿'
+ 192, // 'À'
+ 193, // 'Á'
+ 194, // 'Â'
+ 195, // 'Ã'
+ 196, // 'Ä'
+ 197, // 'Å'
+ 198, // 'Æ'
+ 199, // 'Ç'
+ 200, // 'È'
+ 201, // 'É'
+ 202, // 'Ê'
+ 203, // 'Ë'
+ 204, // 'Ì'
+ 205, // 'Í'
+ 206, // 'Î'
+ 207, // 'Ï'
+ 208, // 'Ð'
+ 209, // 'Ñ'
+ 210, // 'Ò'
+ 211, // 'Ó'
+ 212, // 'Ô'
+ 213, // 'Õ'
+ 214, // 'Ö'
+ 215, // '×'
+ 216, // 'Ø'
+ 217, // 'Ù'
+ 218, // 'Ú'
+ 219, // 'Û'
+ 220, // 'Ü'
+ 221, // 'Ý'
+ 222, // 'Þ'
+ 223, // 'ß'
+ 224, // 'à'
+ 225, // 'á'
+ 226, // 'â'
+ 227, // 'ã'
+ 228, // 'ä'
+ 229, // 'å'
+ 230, // 'æ'
+ 231, // 'ç'
+ 232, // 'è'
+ 233, // 'é'
+ 234, // 'ê'
+ 235, // 'ë'
+ 236, // 'ì'
+ 237, // 'í'
+ 238, // 'î'
+ 239, // 'ï'
+ 240, // 'ð'
+ 241, // 'ñ'
+ 242, // 'ò'
+ 243, // 'ó'
+ 244, // 'ô'
+ 245, // 'õ'
+ 246, // 'ö'
+ 247, // '÷'
+ 248, // 'ø'
+ 249, // 'ù'
+ 250, // 'ú'
+ 251, // 'û'
+ 252, // 'ü'
+ 253, // 'ý'
+ 254, // 'þ'
+ 255, // 'ÿ'
+}
+
+var commonInputsInv = []byte{
+ 't',
+ 'e',
+ '/',
+ 'o',
+ 'a',
+ 's',
+ 'r',
+ 'i',
+ 'p',
+ 'c',
+ 'n',
+ 'w',
+ '.',
+ 'h',
+ 'l',
+ 'm',
+ '-',
+ 'd',
+ 'u',
+ '0',
+ '1',
+ '2',
+ 'g',
+ '=',
+ ':',
+ 'b',
+ 'f',
+ '3',
+ 'y',
+ '5',
+ '&',
+ '_',
+ '4',
+ 'v',
+ '9',
+ '6',
+ '7',
+ '8',
+ 'k',
+ '%',
+ '?',
+ 'x',
+ 'C',
+ 'D',
+ 'A',
+ 'S',
+ 'F',
+ 'I',
+ 'B',
+ 'E',
+ 'j',
+ 'P',
+ 'T',
+ 'z',
+ 'R',
+ 'N',
+ 'M',
+ '+',
+ 'L',
+ 'O',
+ 'q',
+ 'H',
+ 'G',
+ 'W',
+ 'U',
+ 'V',
+ ',',
+ 'Y',
+ 'K',
+ 'J',
+ 'Z',
+ 'X',
+ 'Q',
+ ';',
+ ')',
+ '(',
+ '~',
+ '[',
+ ']',
+ '$',
+ '!',
+ '\'',
+ '*',
+ '@',
+ '\x00',
+ '\x01',
+ '\x02',
+ '\x03',
+ '\x04',
+ '\x05',
+ '\x06',
+ '\x07',
+ '\x08',
+ '\t',
+ '\n',
+ '\x0b',
+ '\x0c',
+ '\r',
+ '\x0e',
+ '\x0f',
+ '\x10',
+ '\x11',
+ '\x12',
+ '\x13',
+ '\x14',
+ '\x15',
+ '\x16',
+ '\x17',
+ '\x18',
+ '\x19',
+ '\x1a',
+ '\x1b',
+ '\x1c',
+ '\x1d',
+ '\x1e',
+ '\x1f',
+ ' ',
+ '"',
+ '#',
+ '<',
+ '>',
+ '\\',
+ '^',
+ '`',
+ '{',
+ '|',
+ '}',
+ '\x7f',
+ '\x80',
+ '\x81',
+ '\x82',
+ '\x83',
+ '\x84',
+ '\x85',
+ '\x86',
+ '\x87',
+ '\x88',
+ '\x89',
+ '\x8a',
+ '\x8b',
+ '\x8c',
+ '\x8d',
+ '\x8e',
+ '\x8f',
+ '\x90',
+ '\x91',
+ '\x92',
+ '\x93',
+ '\x94',
+ '\x95',
+ '\x96',
+ '\x97',
+ '\x98',
+ '\x99',
+ '\x9a',
+ '\x9b',
+ '\x9c',
+ '\x9d',
+ '\x9e',
+ '\x9f',
+ '\xa0',
+ '\xa1',
+ '\xa2',
+ '\xa3',
+ '\xa4',
+ '\xa5',
+ '\xa6',
+ '\xa7',
+ '\xa8',
+ '\xa9',
+ '\xaa',
+ '\xab',
+ '\xac',
+ '\xad',
+ '\xae',
+ '\xaf',
+ '\xb0',
+ '\xb1',
+ '\xb2',
+ '\xb3',
+ '\xb4',
+ '\xb5',
+ '\xb6',
+ '\xb7',
+ '\xb8',
+ '\xb9',
+ '\xba',
+ '\xbb',
+ '\xbc',
+ '\xbd',
+ '\xbe',
+ '\xbf',
+ '\xc0',
+ '\xc1',
+ '\xc2',
+ '\xc3',
+ '\xc4',
+ '\xc5',
+ '\xc6',
+ '\xc7',
+ '\xc8',
+ '\xc9',
+ '\xca',
+ '\xcb',
+ '\xcc',
+ '\xcd',
+ '\xce',
+ '\xcf',
+ '\xd0',
+ '\xd1',
+ '\xd2',
+ '\xd3',
+ '\xd4',
+ '\xd5',
+ '\xd6',
+ '\xd7',
+ '\xd8',
+ '\xd9',
+ '\xda',
+ '\xdb',
+ '\xdc',
+ '\xdd',
+ '\xde',
+ '\xdf',
+ '\xe0',
+ '\xe1',
+ '\xe2',
+ '\xe3',
+ '\xe4',
+ '\xe5',
+ '\xe6',
+ '\xe7',
+ '\xe8',
+ '\xe9',
+ '\xea',
+ '\xeb',
+ '\xec',
+ '\xed',
+ '\xee',
+ '\xef',
+ '\xf0',
+ '\xf1',
+ '\xf2',
+ '\xf3',
+ '\xf4',
+ '\xf5',
+ '\xf6',
+ '\xf7',
+ '\xf8',
+ '\xf9',
+ '\xfa',
+ '\xfb',
+ '\xfc',
+ '\xfd',
+ '\xfe',
+ '\xff',
+}
diff --git a/vendor/github.com/blevesearch/vellum/decoder_v1.go b/vendor/github.com/blevesearch/vellum/decoder_v1.go
new file mode 100644
index 0000000..d56e61d
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/decoder_v1.go
@@ -0,0 +1,314 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "strconv"
+)
+
+func init() {
+ registerDecoder(versionV1, func(data []byte) decoder {
+ return newDecoderV1(data)
+ })
+}
+
+type decoderV1 struct {
+ data []byte
+}
+
+func newDecoderV1(data []byte) *decoderV1 {
+ return &decoderV1{
+ data: data,
+ }
+}
+
+func (d *decoderV1) getRoot() int {
+ if len(d.data) < footerSizeV1 {
+ return noneAddr
+ }
+ footer := d.data[len(d.data)-footerSizeV1:]
+ root := binary.LittleEndian.Uint64(footer[8:])
+ return int(root)
+}
+
+func (d *decoderV1) getLen() int {
+ if len(d.data) < footerSizeV1 {
+ return 0
+ }
+ footer := d.data[len(d.data)-footerSizeV1:]
+ dlen := binary.LittleEndian.Uint64(footer)
+ return int(dlen)
+}
+
+func (d *decoderV1) stateAt(addr int, prealloc fstState) (fstState, error) {
+ state, ok := prealloc.(*fstStateV1)
+ if ok && state != nil {
+ *state = fstStateV1{} // clear the struct
+ } else {
+ state = &fstStateV1{}
+ }
+ err := state.at(d.data, addr)
+ if err != nil {
+ return nil, err
+ }
+ return state, nil
+}
+
+type fstStateV1 struct {
+ data []byte
+ top int
+ bottom int
+ numTrans int
+
+ // single trans only
+ singleTransChar byte
+ singleTransNext bool
+ singleTransAddr uint64
+ singleTransOut uint64
+
+ // shared
+ transSize int
+ outSize int
+
+ // multiple trans only
+ final bool
+ transTop int
+ transBottom int
+ destTop int
+ destBottom int
+ outTop int
+ outBottom int
+ outFinal int
+}
+
+func (f *fstStateV1) isEncodedSingle() bool {
+ if f.data[f.top]>>7 > 0 {
+ return true
+ }
+ return false
+}
+
+func (f *fstStateV1) at(data []byte, addr int) error {
+ f.data = data
+ if addr == emptyAddr {
+ return f.atZero()
+ } else if addr == noneAddr {
+ return f.atNone()
+ }
+ if addr > len(data) || addr < 16 {
+ return fmt.Errorf("invalid address %d/%d", addr, len(data))
+ }
+ f.top = addr
+ f.bottom = addr
+ if f.isEncodedSingle() {
+ return f.atSingle(data, addr)
+ }
+ return f.atMulti(data, addr)
+}
+
+func (f *fstStateV1) atZero() error {
+ f.top = 0
+ f.bottom = 1
+ f.numTrans = 0
+ f.final = true
+ f.outFinal = 0
+ return nil
+}
+
+func (f *fstStateV1) atNone() error {
+ f.top = 0
+ f.bottom = 1
+ f.numTrans = 0
+ f.final = false
+ f.outFinal = 0
+ return nil
+}
+
+func (f *fstStateV1) atSingle(data []byte, addr int) error {
+ // handle single transition case
+ f.numTrans = 1
+ f.singleTransNext = data[f.top]&transitionNext > 0
+ f.singleTransChar = data[f.top] & maxCommon
+ if f.singleTransChar == 0 {
+ f.bottom-- // extra byte for uncommon
+ f.singleTransChar = data[f.bottom]
+ } else {
+ f.singleTransChar = decodeCommon(f.singleTransChar)
+ }
+ if f.singleTransNext {
+ // now we know the bottom, can compute next addr
+ f.singleTransAddr = uint64(f.bottom - 1)
+ f.singleTransOut = 0
+ } else {
+ f.bottom-- // extra byte with pack sizes
+ f.transSize, f.outSize = decodePackSize(data[f.bottom])
+ f.bottom -= f.transSize // exactly one trans
+ f.singleTransAddr = readPackedUint(data[f.bottom : f.bottom+f.transSize])
+ if f.outSize > 0 {
+ f.bottom -= f.outSize // exactly one out (could be length 0 though)
+ f.singleTransOut = readPackedUint(data[f.bottom : f.bottom+f.outSize])
+ } else {
+ f.singleTransOut = 0
+ }
+ // need to wait till we know bottom
+ if f.singleTransAddr != 0 {
+ f.singleTransAddr = uint64(f.bottom) - f.singleTransAddr
+ }
+ }
+ return nil
+}
+
+func (f *fstStateV1) atMulti(data []byte, addr int) error {
+ // handle multiple transitions case
+ f.final = data[f.top]&stateFinal > 0
+ f.numTrans = int(data[f.top] & maxNumTrans)
+ if f.numTrans == 0 {
+ f.bottom-- // extra byte for number of trans
+ f.numTrans = int(data[f.bottom])
+ if f.numTrans == 1 {
+ // can't really be 1 here, this is special case that means 256
+ f.numTrans = 256
+ }
+ }
+ f.bottom-- // extra byte with pack sizes
+ f.transSize, f.outSize = decodePackSize(data[f.bottom])
+
+ f.transTop = f.bottom
+ f.bottom -= f.numTrans // one byte for each transition
+ f.transBottom = f.bottom
+
+ f.destTop = f.bottom
+ f.bottom -= f.numTrans * f.transSize
+ f.destBottom = f.bottom
+
+ if f.outSize > 0 {
+ f.outTop = f.bottom
+ f.bottom -= f.numTrans * f.outSize
+ f.outBottom = f.bottom
+ if f.final {
+ f.bottom -= f.outSize
+ f.outFinal = f.bottom
+ }
+ }
+ return nil
+}
+
+func (f *fstStateV1) Address() int {
+ return f.top
+}
+
+func (f *fstStateV1) Final() bool {
+ return f.final
+}
+
+func (f *fstStateV1) FinalOutput() uint64 {
+ if f.final && f.outSize > 0 {
+ return readPackedUint(f.data[f.outFinal : f.outFinal+f.outSize])
+ }
+ return 0
+}
+
+func (f *fstStateV1) NumTransitions() int {
+ return f.numTrans
+}
+
+func (f *fstStateV1) TransitionAt(i int) byte {
+ if f.isEncodedSingle() {
+ return f.singleTransChar
+ }
+ transitionKeys := f.data[f.transBottom:f.transTop]
+ return transitionKeys[f.numTrans-i-1]
+}
+
+func (f *fstStateV1) TransitionFor(b byte) (int, int, uint64) {
+ if f.isEncodedSingle() {
+ if f.singleTransChar == b {
+ return 0, int(f.singleTransAddr), f.singleTransOut
+ }
+ return -1, noneAddr, 0
+ }
+ transitionKeys := f.data[f.transBottom:f.transTop]
+ pos := bytes.IndexByte(transitionKeys, b)
+ if pos < 0 {
+ return -1, noneAddr, 0
+ }
+ transDests := f.data[f.destBottom:f.destTop]
+ dest := int(readPackedUint(transDests[pos*f.transSize : pos*f.transSize+f.transSize]))
+ if dest > 0 {
+ // convert delta
+ dest = f.bottom - dest
+ }
+ transVals := f.data[f.outBottom:f.outTop]
+ var out uint64
+ if f.outSize > 0 {
+ out = readPackedUint(transVals[pos*f.outSize : pos*f.outSize+f.outSize])
+ }
+ return f.numTrans - pos - 1, dest, out
+}
+
+func (f *fstStateV1) String() string {
+ rv := ""
+ rv += fmt.Sprintf("State: %d (%#x)", f.top, f.top)
+ if f.final {
+ rv += " final"
+ fout := f.FinalOutput()
+ if fout != 0 {
+ rv += fmt.Sprintf(" (%d)", fout)
+ }
+ }
+ rv += "\n"
+ rv += fmt.Sprintf("Data: % x\n", f.data[f.bottom:f.top+1])
+
+ for i := 0; i < f.numTrans; i++ {
+ transChar := f.TransitionAt(i)
+ _, transDest, transOut := f.TransitionFor(transChar)
+ rv += fmt.Sprintf(" - %d (%#x) '%s' ---> %d (%#x) with output: %d", transChar, transChar, string(transChar), transDest, transDest, transOut)
+ rv += "\n"
+ }
+ if f.numTrans == 0 {
+ rv += "\n"
+ }
+ return rv
+}
+
+func (f *fstStateV1) DotString(num int) string {
+ rv := ""
+ label := fmt.Sprintf("%d", num)
+ final := ""
+ if f.final {
+ final = ",peripheries=2"
+ }
+ rv += fmt.Sprintf(" %d [label=\"%s\"%s];\n", f.top, label, final)
+
+ for i := 0; i < f.numTrans; i++ {
+ transChar := f.TransitionAt(i)
+ _, transDest, transOut := f.TransitionFor(transChar)
+ out := ""
+ if transOut != 0 {
+ out = fmt.Sprintf("/%d", transOut)
+ }
+ rv += fmt.Sprintf(" %d -> %d [label=\"%s%s\"];\n", f.top, transDest, escapeInput(transChar), out)
+ }
+
+ return rv
+}
+
+func escapeInput(b byte) string {
+ x := strconv.AppendQuoteRune(nil, rune(b))
+ return string(x[1:(len(x) - 1)])
+}
diff --git a/vendor/github.com/blevesearch/vellum/encoder_v1.go b/vendor/github.com/blevesearch/vellum/encoder_v1.go
new file mode 100644
index 0000000..0651fc8
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/encoder_v1.go
@@ -0,0 +1,227 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+import (
+ "encoding/binary"
+ "fmt"
+ "io"
+)
+
+const versionV1 = 1
+const oneTransition = 1 << 7
+const transitionNext = 1 << 6
+const stateFinal = 1 << 6
+const footerSizeV1 = 16
+
+func init() {
+ registerEncoder(versionV1, func(w io.Writer) encoder {
+ return newEncoderV1(w)
+ })
+}
+
+type encoderV1 struct {
+ bw *writer
+}
+
+func newEncoderV1(w io.Writer) *encoderV1 {
+ return &encoderV1{
+ bw: newWriter(w),
+ }
+}
+
+func (e *encoderV1) reset(w io.Writer) {
+ e.bw.Reset(w)
+}
+
+func (e *encoderV1) start() error {
+ header := make([]byte, headerSize)
+ binary.LittleEndian.PutUint64(header, versionV1)
+ binary.LittleEndian.PutUint64(header[8:], uint64(0)) // type
+ n, err := e.bw.Write(header)
+ if err != nil {
+ return err
+ }
+ if n != headerSize {
+ return fmt.Errorf("short write of header %d/%d", n, headerSize)
+ }
+ return nil
+}
+
+func (e *encoderV1) encodeState(s *builderNode, lastAddr int) (int, error) {
+ if len(s.trans) == 0 && s.final && s.finalOutput == 0 {
+ return 0, nil
+ } else if len(s.trans) != 1 || s.final {
+ return e.encodeStateMany(s)
+ } else if !s.final && s.trans[0].out == 0 && s.trans[0].addr == lastAddr {
+ return e.encodeStateOneFinish(s, transitionNext)
+ }
+ return e.encodeStateOne(s)
+}
+
+func (e *encoderV1) encodeStateOne(s *builderNode) (int, error) {
+ start := uint64(e.bw.counter)
+ outPackSize := 0
+ if s.trans[0].out != 0 {
+ outPackSize = packedSize(s.trans[0].out)
+ err := e.bw.WritePackedUintIn(s.trans[0].out, outPackSize)
+ if err != nil {
+ return 0, err
+ }
+ }
+ delta := deltaAddr(start, uint64(s.trans[0].addr))
+ transPackSize := packedSize(delta)
+ err := e.bw.WritePackedUintIn(delta, transPackSize)
+ if err != nil {
+ return 0, err
+ }
+
+ packSize := encodePackSize(transPackSize, outPackSize)
+ err = e.bw.WriteByte(packSize)
+ if err != nil {
+ return 0, err
+ }
+
+ return e.encodeStateOneFinish(s, 0)
+}
+
+func (e *encoderV1) encodeStateOneFinish(s *builderNode, next byte) (int, error) {
+ enc := encodeCommon(s.trans[0].in)
+
+ // not a common input
+ if enc == 0 {
+ err := e.bw.WriteByte(s.trans[0].in)
+ if err != nil {
+ return 0, err
+ }
+ }
+ err := e.bw.WriteByte(oneTransition | next | enc)
+ if err != nil {
+ return 0, err
+ }
+
+ return e.bw.counter - 1, nil
+}
+
+func (e *encoderV1) encodeStateMany(s *builderNode) (int, error) {
+ start := uint64(e.bw.counter)
+ transPackSize := 0
+ outPackSize := packedSize(s.finalOutput)
+ anyOutputs := s.finalOutput != 0
+ for i := range s.trans {
+ delta := deltaAddr(start, uint64(s.trans[i].addr))
+ tsize := packedSize(delta)
+ if tsize > transPackSize {
+ transPackSize = tsize
+ }
+ osize := packedSize(s.trans[i].out)
+ if osize > outPackSize {
+ outPackSize = osize
+ }
+ anyOutputs = anyOutputs || s.trans[i].out != 0
+ }
+ if !anyOutputs {
+ outPackSize = 0
+ }
+
+ if anyOutputs {
+ // output final value
+ if s.final {
+ err := e.bw.WritePackedUintIn(s.finalOutput, outPackSize)
+ if err != nil {
+ return 0, err
+ }
+ }
+ // output transition values (in reverse)
+ for j := len(s.trans) - 1; j >= 0; j-- {
+ err := e.bw.WritePackedUintIn(s.trans[j].out, outPackSize)
+ if err != nil {
+ return 0, err
+ }
+ }
+ }
+
+ // output transition dests (in reverse)
+ for j := len(s.trans) - 1; j >= 0; j-- {
+ delta := deltaAddr(start, uint64(s.trans[j].addr))
+ err := e.bw.WritePackedUintIn(delta, transPackSize)
+ if err != nil {
+ return 0, err
+ }
+ }
+
+ // output transition keys (in reverse)
+ for j := len(s.trans) - 1; j >= 0; j-- {
+ err := e.bw.WriteByte(s.trans[j].in)
+ if err != nil {
+ return 0, err
+ }
+ }
+
+ packSize := encodePackSize(transPackSize, outPackSize)
+ err := e.bw.WriteByte(packSize)
+ if err != nil {
+ return 0, err
+ }
+
+ numTrans := encodeNumTrans(len(s.trans))
+
+ // if number of transitions wont fit in edge header byte
+ // write out separately
+ if numTrans == 0 {
+ if len(s.trans) == 256 {
+ // this wouldn't fit in single byte, but reuse value 1
+ // which would have always fit in the edge header instead
+ err = e.bw.WriteByte(1)
+ if err != nil {
+ return 0, err
+ }
+ } else {
+ err = e.bw.WriteByte(byte(len(s.trans)))
+ if err != nil {
+ return 0, err
+ }
+ }
+ }
+
+ // finally write edge header
+ if s.final {
+ numTrans |= stateFinal
+ }
+ err = e.bw.WriteByte(numTrans)
+ if err != nil {
+ return 0, err
+ }
+
+ return e.bw.counter - 1, nil
+}
+
+func (e *encoderV1) finish(count, rootAddr int) error {
+ footer := make([]byte, footerSizeV1)
+ binary.LittleEndian.PutUint64(footer, uint64(count)) // root addr
+ binary.LittleEndian.PutUint64(footer[8:], uint64(rootAddr)) // root addr
+ n, err := e.bw.Write(footer)
+ if err != nil {
+ return err
+ }
+ if n != footerSizeV1 {
+ return fmt.Errorf("short write of footer %d/%d", n, footerSizeV1)
+ }
+ err = e.bw.Flush()
+ if err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/vendor/github.com/blevesearch/vellum/encoding.go b/vendor/github.com/blevesearch/vellum/encoding.go
new file mode 100644
index 0000000..988d486
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/encoding.go
@@ -0,0 +1,87 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+import (
+ "encoding/binary"
+ "fmt"
+ "io"
+)
+
+const headerSize = 16
+
+type encoderConstructor func(w io.Writer) encoder
+type decoderConstructor func([]byte) decoder
+
+var encoders = map[int]encoderConstructor{}
+var decoders = map[int]decoderConstructor{}
+
+type encoder interface {
+ start() error
+ encodeState(s *builderNode, addr int) (int, error)
+ finish(count, rootAddr int) error
+ reset(w io.Writer)
+}
+
+func loadEncoder(ver int, w io.Writer) (encoder, error) {
+ if cons, ok := encoders[ver]; ok {
+ return cons(w), nil
+ }
+ return nil, fmt.Errorf("no encoder for version %d registered", ver)
+}
+
+func registerEncoder(ver int, cons encoderConstructor) {
+ encoders[ver] = cons
+}
+
+type decoder interface {
+ getRoot() int
+ getLen() int
+ stateAt(addr int, prealloc fstState) (fstState, error)
+}
+
+func loadDecoder(ver int, data []byte) (decoder, error) {
+ if cons, ok := decoders[ver]; ok {
+ return cons(data), nil
+ }
+ return nil, fmt.Errorf("no decoder for version %d registered", ver)
+}
+
+func registerDecoder(ver int, cons decoderConstructor) {
+ decoders[ver] = cons
+}
+
+func decodeHeader(header []byte) (ver int, typ int, err error) {
+ if len(header) < headerSize {
+ err = fmt.Errorf("invalid header < 16 bytes")
+ return
+ }
+ ver = int(binary.LittleEndian.Uint64(header[0:8]))
+ typ = int(binary.LittleEndian.Uint64(header[8:16]))
+ return
+}
+
+// fstState represents a state inside the FTS runtime
+// It is the main contract between the FST impl and the decoder
+// The FST impl should work only with this interface, while only the decoder
+// impl knows the physical representation.
+type fstState interface {
+ Address() int
+ Final() bool
+ FinalOutput() uint64
+ NumTransitions() int
+ TransitionFor(b byte) (int, int, uint64)
+ TransitionAt(i int) byte
+}
diff --git a/vendor/github.com/blevesearch/vellum/fst.go b/vendor/github.com/blevesearch/vellum/fst.go
new file mode 100644
index 0000000..3140042
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/fst.go
@@ -0,0 +1,300 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+import (
+ "io"
+
+ "github.com/bits-and-blooms/bitset"
+)
+
+// FST is an in-memory representation of a finite state transducer,
+// capable of returning the uint64 value associated with
+// each []byte key stored, as well as enumerating all of the keys
+// in order.
+type FST struct {
+ f io.Closer
+ ver int
+ len int
+ typ int
+ data []byte
+ decoder decoder
+}
+
+func new(data []byte, f io.Closer) (rv *FST, err error) {
+ rv = &FST{
+ data: data,
+ f: f,
+ }
+
+ rv.ver, rv.typ, err = decodeHeader(data)
+ if err != nil {
+ return nil, err
+ }
+
+ rv.decoder, err = loadDecoder(rv.ver, rv.data)
+ if err != nil {
+ return nil, err
+ }
+
+ rv.len = rv.decoder.getLen()
+
+ return rv, nil
+}
+
+// Contains returns true if this FST contains the specified key.
+func (f *FST) Contains(val []byte) (bool, error) {
+ _, exists, err := f.Get(val)
+ return exists, err
+}
+
+// Get returns the value associated with the key. NOTE: a value of zero
+// does not imply the key does not exist, you must consult the second
+// return value as well.
+func (f *FST) Get(input []byte) (uint64, bool, error) {
+ return f.get(input, nil)
+}
+
+func (f *FST) get(input []byte, prealloc fstState) (uint64, bool, error) {
+ var total uint64
+ curr := f.decoder.getRoot()
+ state, err := f.decoder.stateAt(curr, prealloc)
+ if err != nil {
+ return 0, false, err
+ }
+ for _, c := range input {
+ _, curr, output := state.TransitionFor(c)
+ if curr == noneAddr {
+ return 0, false, nil
+ }
+
+ state, err = f.decoder.stateAt(curr, state)
+ if err != nil {
+ return 0, false, err
+ }
+
+ total += output
+ }
+
+ if state.Final() {
+ total += state.FinalOutput()
+ return total, true, nil
+ }
+ return 0, false, nil
+}
+
+// Version returns the encoding version used by this FST instance.
+func (f *FST) Version() int {
+ return f.ver
+}
+
+// Len returns the number of entries in this FST instance.
+func (f *FST) Len() int {
+ return f.len
+}
+
+// Type returns the type of this FST instance.
+func (f *FST) Type() int {
+ return f.typ
+}
+
+// Close will unmap any mmap'd data (if managed by vellum) and it will close
+// the backing file (if managed by vellum). You MUST call Close() for any
+// FST instance that is created.
+func (f *FST) Close() error {
+ if f.f != nil {
+ err := f.f.Close()
+ if err != nil {
+ return err
+ }
+ }
+ f.data = nil
+ f.decoder = nil
+ return nil
+}
+
+// Start returns the start state of this Automaton
+func (f *FST) Start() int {
+ return f.decoder.getRoot()
+}
+
+// IsMatch returns if this state is a matching state in this Automaton
+func (f *FST) IsMatch(addr int) bool {
+ match, _ := f.IsMatchWithVal(addr)
+ return match
+}
+
+// CanMatch returns if this state can ever transition to a matching state
+// in this Automaton
+func (f *FST) CanMatch(addr int) bool {
+ if addr == noneAddr {
+ return false
+ }
+ return true
+}
+
+// WillAlwaysMatch returns if from this state the Automaton will always
+// be in a matching state
+func (f *FST) WillAlwaysMatch(int) bool {
+ return false
+}
+
+// Accept returns the next state for this Automaton on input of byte b
+func (f *FST) Accept(addr int, b byte) int {
+ next, _ := f.AcceptWithVal(addr, b)
+ return next
+}
+
+// IsMatchWithVal returns if this state is a matching state in this Automaton
+// and also returns the final output value for this state
+func (f *FST) IsMatchWithVal(addr int) (bool, uint64) {
+ s, err := f.decoder.stateAt(addr, nil)
+ if err != nil {
+ return false, 0
+ }
+ return s.Final(), s.FinalOutput()
+}
+
+// AcceptWithVal returns the next state for this Automaton on input of byte b
+// and also returns the output value for the transition
+func (f *FST) AcceptWithVal(addr int, b byte) (int, uint64) {
+ s, err := f.decoder.stateAt(addr, nil)
+ if err != nil {
+ return noneAddr, 0
+ }
+ _, next, output := s.TransitionFor(b)
+ return next, output
+}
+
+// Iterator returns a new Iterator capable of enumerating the key/value pairs
+// between the provided startKeyInclusive and endKeyExclusive.
+func (f *FST) Iterator(startKeyInclusive, endKeyExclusive []byte) (*FSTIterator, error) {
+ return newIterator(f, startKeyInclusive, endKeyExclusive, nil)
+}
+
+// Search returns a new Iterator capable of enumerating the key/value pairs
+// between the provided startKeyInclusive and endKeyExclusive that also
+// satisfy the provided automaton.
+func (f *FST) Search(aut Automaton, startKeyInclusive, endKeyExclusive []byte) (*FSTIterator, error) {
+ return newIterator(f, startKeyInclusive, endKeyExclusive, aut)
+}
+
+// Debug is only intended for debug purposes, it simply asks the underlying
+// decoder visit each state, and pass it to the provided callback.
+func (f *FST) Debug(callback func(int, interface{}) error) error {
+
+ addr := f.decoder.getRoot()
+ set := bitset.New(uint(addr))
+ stack := addrStack{addr}
+
+ stateNumber := 0
+ stack, addr = stack[:len(stack)-1], stack[len(stack)-1]
+ for addr != noneAddr {
+ if set.Test(uint(addr)) {
+ stack, addr = stack.Pop()
+ continue
+ }
+ set.Set(uint(addr))
+ state, err := f.decoder.stateAt(addr, nil)
+ if err != nil {
+ return err
+ }
+ err = callback(stateNumber, state)
+ if err != nil {
+ return err
+ }
+ for i := 0; i < state.NumTransitions(); i++ {
+ tchar := state.TransitionAt(i)
+ _, dest, _ := state.TransitionFor(tchar)
+ stack = append(stack, dest)
+ }
+ stateNumber++
+ stack, addr = stack.Pop()
+ }
+
+ return nil
+}
+
+type addrStack []int
+
+func (a addrStack) Pop() (addrStack, int) {
+ l := len(a)
+ if l < 1 {
+ return a, noneAddr
+ }
+ return a[:l-1], a[l-1]
+}
+
+// Reader() returns a Reader instance that a single thread may use to
+// retrieve data from the FST
+func (f *FST) Reader() (*Reader, error) {
+ return &Reader{f: f}, nil
+}
+
+func (f *FST) GetMinKey() ([]byte, error) {
+ var rv []byte
+
+ curr := f.decoder.getRoot()
+ state, err := f.decoder.stateAt(curr, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ for !state.Final() {
+ nextTrans := state.TransitionAt(0)
+ _, curr, _ = state.TransitionFor(nextTrans)
+ state, err = f.decoder.stateAt(curr, state)
+ if err != nil {
+ return nil, err
+ }
+
+ rv = append(rv, nextTrans)
+ }
+
+ return rv, nil
+}
+
+func (f *FST) GetMaxKey() ([]byte, error) {
+ var rv []byte
+
+ curr := f.decoder.getRoot()
+ state, err := f.decoder.stateAt(curr, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ for state.NumTransitions() > 0 {
+ nextTrans := state.TransitionAt(state.NumTransitions() - 1)
+ _, curr, _ = state.TransitionFor(nextTrans)
+ state, err = f.decoder.stateAt(curr, state)
+ if err != nil {
+ return nil, err
+ }
+
+ rv = append(rv, nextTrans)
+ }
+
+ return rv, nil
+}
+
+// A Reader is meant for a single threaded use
+type Reader struct {
+ f *FST
+ prealloc fstStateV1
+}
+
+func (r *Reader) Get(input []byte) (uint64, bool, error) {
+ return r.f.get(input, &r.prealloc)
+}
diff --git a/vendor/github.com/blevesearch/vellum/fst_iterator.go b/vendor/github.com/blevesearch/vellum/fst_iterator.go
new file mode 100644
index 0000000..2c6b0d6
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/fst_iterator.go
@@ -0,0 +1,303 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+import (
+ "bytes"
+)
+
+// Iterator represents a means of visiting key/value pairs in order.
+type Iterator interface {
+
+ // Current() returns the key/value pair currently pointed to.
+ // The []byte of the key is ONLY guaranteed to be valid until
+ // another call to Next/Seek/Close. If you need it beyond that
+ // point you MUST make a copy.
+ Current() ([]byte, uint64)
+
+ // Next() advances the iterator to the next key/value pair.
+ // If no more key/value pairs exist, ErrIteratorDone is returned.
+ Next() error
+
+ // Seek() advances the iterator the specified key, or the next key
+ // if it does not exist.
+ // If no keys exist after that point, ErrIteratorDone is returned.
+ Seek(key []byte) error
+
+ // Reset resets the Iterator' internal state to allow for iterator
+ // reuse (e.g. pooling).
+ Reset(f *FST, startKeyInclusive, endKeyExclusive []byte, aut Automaton) error
+
+ // Close() frees any resources held by this iterator.
+ Close() error
+}
+
+// FSTIterator is a structure for iterating key/value pairs in this FST in
+// lexicographic order. Iterators should be constructed with the FSTIterator
+// method on the parent FST structure.
+type FSTIterator struct {
+ f *FST
+ aut Automaton
+
+ startKeyInclusive []byte
+ endKeyExclusive []byte
+
+ statesStack []fstState
+ keysStack []byte
+ keysPosStack []int
+ valsStack []uint64
+ autStatesStack []int
+
+ nextStart []byte
+}
+
+func newIterator(f *FST, startKeyInclusive, endKeyExclusive []byte,
+ aut Automaton) (*FSTIterator, error) {
+
+ rv := &FSTIterator{}
+ err := rv.Reset(f, startKeyInclusive, endKeyExclusive, aut)
+ if err != nil {
+ return nil, err
+ }
+ return rv, nil
+}
+
+// Reset resets the Iterator' internal state to allow for iterator
+// reuse (e.g. pooling).
+func (i *FSTIterator) Reset(f *FST,
+ startKeyInclusive, endKeyExclusive []byte, aut Automaton) error {
+ if aut == nil {
+ aut = alwaysMatchAutomaton
+ }
+
+ i.f = f
+ i.startKeyInclusive = startKeyInclusive
+ i.endKeyExclusive = endKeyExclusive
+ i.aut = aut
+
+ return i.pointTo(startKeyInclusive)
+}
+
+// pointTo attempts to point us to the specified location
+func (i *FSTIterator) pointTo(key []byte) error {
+ // tried to seek before start
+ if bytes.Compare(key, i.startKeyInclusive) < 0 {
+ key = i.startKeyInclusive
+ }
+
+ // tried to see past end
+ if i.endKeyExclusive != nil &&
+ bytes.Compare(key, i.endKeyExclusive) > 0 {
+ key = i.endKeyExclusive
+ }
+
+ // reset any state, pointTo always starts over
+ i.statesStack = i.statesStack[:0]
+ i.keysStack = i.keysStack[:0]
+ i.keysPosStack = i.keysPosStack[:0]
+ i.valsStack = i.valsStack[:0]
+ i.autStatesStack = i.autStatesStack[:0]
+
+ root, err := i.f.decoder.stateAt(i.f.decoder.getRoot(), nil)
+ if err != nil {
+ return err
+ }
+
+ autStart := i.aut.Start()
+
+ maxQ := -1
+ // root is always part of the path
+ i.statesStack = append(i.statesStack, root)
+ i.autStatesStack = append(i.autStatesStack, autStart)
+ for j := 0; j < len(key); j++ {
+ keyJ := key[j]
+ curr := i.statesStack[len(i.statesStack)-1]
+ autCurr := i.autStatesStack[len(i.autStatesStack)-1]
+
+ pos, nextAddr, nextVal := curr.TransitionFor(keyJ)
+ if nextAddr == noneAddr {
+ // needed transition doesn't exist
+ // find last trans before the one we needed
+ for q := curr.NumTransitions() - 1; q >= 0; q-- {
+ if curr.TransitionAt(q) < keyJ {
+ maxQ = q
+ break
+ }
+ }
+ break
+ }
+ autNext := i.aut.Accept(autCurr, keyJ)
+
+ next, err := i.f.decoder.stateAt(nextAddr, nil)
+ if err != nil {
+ return err
+ }
+
+ i.statesStack = append(i.statesStack, next)
+ i.keysStack = append(i.keysStack, keyJ)
+ i.keysPosStack = append(i.keysPosStack, pos)
+ i.valsStack = append(i.valsStack, nextVal)
+ i.autStatesStack = append(i.autStatesStack, autNext)
+ continue
+ }
+
+ if !i.statesStack[len(i.statesStack)-1].Final() ||
+ !i.aut.IsMatch(i.autStatesStack[len(i.autStatesStack)-1]) ||
+ bytes.Compare(i.keysStack, key) < 0 {
+ return i.next(maxQ)
+ }
+
+ return nil
+}
+
+// Current returns the key and value currently pointed to by the iterator.
+// If the iterator is not pointing at a valid value (because Iterator/Next/Seek)
+// returned an error previously, it may return nil,0.
+func (i *FSTIterator) Current() ([]byte, uint64) {
+ curr := i.statesStack[len(i.statesStack)-1]
+ if curr.Final() {
+ var total uint64
+ for _, v := range i.valsStack {
+ total += v
+ }
+ total += curr.FinalOutput()
+ return i.keysStack, total
+ }
+ return nil, 0
+}
+
+// Next advances this iterator to the next key/value pair. If there is none
+// or the advancement goes beyond the configured endKeyExclusive, then
+// ErrIteratorDone is returned.
+func (i *FSTIterator) Next() error {
+ return i.next(-1)
+}
+
+func (i *FSTIterator) next(lastOffset int) error {
+ // remember where we started with keysStack in this next() call
+ i.nextStart = append(i.nextStart[:0], i.keysStack...)
+
+ nextOffset := lastOffset + 1
+ allowCompare := false
+
+OUTER:
+ for true {
+ curr := i.statesStack[len(i.statesStack)-1]
+ autCurr := i.autStatesStack[len(i.autStatesStack)-1]
+
+ if curr.Final() && i.aut.IsMatch(autCurr) && allowCompare {
+ // check to see if new keystack might have gone too far
+ if i.endKeyExclusive != nil &&
+ bytes.Compare(i.keysStack, i.endKeyExclusive) >= 0 {
+ return ErrIteratorDone
+ }
+
+ cmp := bytes.Compare(i.keysStack, i.nextStart)
+ if cmp > 0 {
+ // in final state greater than start key
+ return nil
+ }
+ }
+
+ numTrans := curr.NumTransitions()
+
+ INNER:
+ for nextOffset < numTrans {
+ t := curr.TransitionAt(nextOffset)
+
+ autNext := i.aut.Accept(autCurr, t)
+ if !i.aut.CanMatch(autNext) {
+ // TODO: potential optimization to skip nextOffset
+ // forwards more directly to something that the
+ // automaton likes rather than a linear scan?
+ nextOffset += 1
+ continue INNER
+ }
+
+ pos, nextAddr, v := curr.TransitionFor(t)
+
+ // the next slot in the statesStack might have an
+ // fstState instance that we can reuse
+ var nextPrealloc fstState
+ if len(i.statesStack) < cap(i.statesStack) {
+ nextPrealloc = i.statesStack[0:cap(i.statesStack)][len(i.statesStack)]
+ }
+
+ // push onto stack
+ next, err := i.f.decoder.stateAt(nextAddr, nextPrealloc)
+ if err != nil {
+ return err
+ }
+
+ i.statesStack = append(i.statesStack, next)
+ i.keysStack = append(i.keysStack, t)
+ i.keysPosStack = append(i.keysPosStack, pos)
+ i.valsStack = append(i.valsStack, v)
+ i.autStatesStack = append(i.autStatesStack, autNext)
+
+ nextOffset = 0
+ allowCompare = true
+
+ continue OUTER
+ }
+
+ // no more transitions, so need to backtrack and stack pop
+ if len(i.statesStack) <= 1 {
+ // stack len is 1 (root), can't go back further, we're done
+ break
+ }
+
+ // if the top of the stack represents a linear chain of states
+ // (i.e., a suffix of nodes linked by single transitions),
+ // then optimize by popping the suffix in one shot without
+ // going back all the way to the OUTER loop
+ var popNum int
+ for j := len(i.statesStack) - 1; j > 0; j-- {
+ if j == 1 || i.statesStack[j].NumTransitions() != 1 {
+ popNum = len(i.statesStack) - 1 - j
+ break
+ }
+ }
+ if popNum < 1 { // always pop at least 1 entry from the stacks
+ popNum = 1
+ }
+
+ nextOffset = i.keysPosStack[len(i.keysPosStack)-popNum] + 1
+ allowCompare = false
+
+ i.statesStack = i.statesStack[:len(i.statesStack)-popNum]
+ i.keysStack = i.keysStack[:len(i.keysStack)-popNum]
+ i.keysPosStack = i.keysPosStack[:len(i.keysPosStack)-popNum]
+ i.valsStack = i.valsStack[:len(i.valsStack)-popNum]
+ i.autStatesStack = i.autStatesStack[:len(i.autStatesStack)-popNum]
+ }
+
+ return ErrIteratorDone
+}
+
+// Seek advances this iterator to the specified key/value pair. If this key
+// is not in the FST, Current() will return the next largest key. If this
+// seek operation would go past the last key, or outside the configured
+// startKeyInclusive/endKeyExclusive then ErrIteratorDone is returned.
+func (i *FSTIterator) Seek(key []byte) error {
+ return i.pointTo(key)
+}
+
+// Close will free any resources held by this iterator.
+func (i *FSTIterator) Close() error {
+ // at the moment we don't do anything,
+ // but wanted this for API completeness
+ return nil
+}
diff --git a/vendor/github.com/blevesearch/vellum/merge_iterator.go b/vendor/github.com/blevesearch/vellum/merge_iterator.go
new file mode 100644
index 0000000..f00f778
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/merge_iterator.go
@@ -0,0 +1,188 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+import (
+ "bytes"
+)
+
+// MergeFunc is used to choose the new value for a key when merging a slice
+// of iterators, and the same key is observed with multiple values.
+// Values presented to the MergeFunc will be in the same order as the
+// original slice creating the MergeIterator. This allows some MergeFunc
+// implementations to prioritize one iterator over another.
+type MergeFunc func([]uint64) uint64
+
+// MergeIterator implements the Iterator interface by traversing a slice
+// of iterators and merging the contents of them. If the same key exists
+// in mulitipe underlying iterators, a user-provided MergeFunc will be
+// invoked to choose the new value.
+type MergeIterator struct {
+ itrs []Iterator
+ f MergeFunc
+ currKs [][]byte
+ currVs []uint64
+
+ lowK []byte
+ lowV uint64
+ lowIdxs []int
+
+ mergeV []uint64
+}
+
+// NewMergeIterator creates a new MergeIterator over the provided slice of
+// Iterators and with the specified MergeFunc to resolve duplicate keys.
+func NewMergeIterator(itrs []Iterator, f MergeFunc) (*MergeIterator, error) {
+ rv := &MergeIterator{
+ itrs: itrs,
+ f: f,
+ currKs: make([][]byte, len(itrs)),
+ currVs: make([]uint64, len(itrs)),
+ lowIdxs: make([]int, 0, len(itrs)),
+ mergeV: make([]uint64, 0, len(itrs)),
+ }
+ rv.init()
+ if rv.lowK == nil {
+ return rv, ErrIteratorDone
+ }
+ return rv, nil
+}
+
+func (m *MergeIterator) init() {
+ for i, itr := range m.itrs {
+ m.currKs[i], m.currVs[i] = itr.Current()
+ }
+ m.updateMatches()
+}
+
+func (m *MergeIterator) updateMatches() {
+ if len(m.itrs) < 1 {
+ return
+ }
+ m.lowK = m.currKs[0]
+ m.lowIdxs = m.lowIdxs[:0]
+ m.lowIdxs = append(m.lowIdxs, 0)
+ for i := 1; i < len(m.itrs); i++ {
+ if m.currKs[i] == nil {
+ continue
+ }
+ cmp := bytes.Compare(m.currKs[i], m.lowK)
+ if m.lowK == nil || cmp < 0 {
+ // reached a new low
+ m.lowK = m.currKs[i]
+ m.lowIdxs = m.lowIdxs[:0]
+ m.lowIdxs = append(m.lowIdxs, i)
+ } else if cmp == 0 {
+ m.lowIdxs = append(m.lowIdxs, i)
+ }
+ }
+ if len(m.lowIdxs) > 1 {
+ // merge multiple values
+ m.mergeV = m.mergeV[:0]
+ for _, vi := range m.lowIdxs {
+ m.mergeV = append(m.mergeV, m.currVs[vi])
+ }
+ m.lowV = m.f(m.mergeV)
+ } else if len(m.lowIdxs) == 1 {
+ m.lowV = m.currVs[m.lowIdxs[0]]
+ }
+}
+
+// Current returns the key and value currently pointed to by this iterator.
+// If the iterator is not pointing at a valid value (because Iterator/Next/Seek)
+// returned an error previously, it may return nil,0.
+func (m *MergeIterator) Current() ([]byte, uint64) {
+ return m.lowK, m.lowV
+}
+
+// Next advances this iterator to the next key/value pair. If there is none,
+// then ErrIteratorDone is returned.
+func (m *MergeIterator) Next() error {
+ // move all the current low iterators to next
+ for _, vi := range m.lowIdxs {
+ err := m.itrs[vi].Next()
+ if err != nil && err != ErrIteratorDone {
+ return err
+ }
+ m.currKs[vi], m.currVs[vi] = m.itrs[vi].Current()
+ }
+ m.updateMatches()
+ if m.lowK == nil {
+ return ErrIteratorDone
+ }
+ return nil
+}
+
+// Seek advances this iterator to the specified key/value pair. If this key
+// is not in the FST, Current() will return the next largest key. If this
+// seek operation would go past the last key, then ErrIteratorDone is returned.
+func (m *MergeIterator) Seek(key []byte) error {
+ for i := range m.itrs {
+ err := m.itrs[i].Seek(key)
+ if err != nil && err != ErrIteratorDone {
+ return err
+ }
+ }
+ m.updateMatches()
+ if m.lowK == nil {
+ return ErrIteratorDone
+ }
+ return nil
+}
+
+// Close will attempt to close all the underlying Iterators. If any errors
+// are encountered, the first will be returned.
+func (m *MergeIterator) Close() error {
+ var rv error
+ for i := range m.itrs {
+ // close all iterators, return first error if any
+ err := m.itrs[i].Close()
+ if rv == nil {
+ rv = err
+ }
+ }
+ return rv
+}
+
+// MergeMin chooses the minimum value
+func MergeMin(vals []uint64) uint64 {
+ rv := vals[0]
+ for _, v := range vals[1:] {
+ if v < rv {
+ rv = v
+ }
+ }
+ return rv
+}
+
+// MergeMax chooses the maximum value
+func MergeMax(vals []uint64) uint64 {
+ rv := vals[0]
+ for _, v := range vals[1:] {
+ if v > rv {
+ rv = v
+ }
+ }
+ return rv
+}
+
+// MergeSum sums the values
+func MergeSum(vals []uint64) uint64 {
+ rv := vals[0]
+ for _, v := range vals[1:] {
+ rv += v
+ }
+ return rv
+}
diff --git a/vendor/github.com/blevesearch/vellum/pack.go b/vendor/github.com/blevesearch/vellum/pack.go
new file mode 100644
index 0000000..78f3dcd
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/pack.go
@@ -0,0 +1,55 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+func deltaAddr(base, trans uint64) uint64 {
+ // transition dest of 0 is special case
+ if trans == 0 {
+ return 0
+ }
+ return base - trans
+}
+
+const packOutMask = 1<<4 - 1
+
+func encodePackSize(transSize, outSize int) byte {
+ var rv byte
+ rv = byte(transSize << 4)
+ rv |= byte(outSize)
+ return rv
+}
+
+func decodePackSize(pack byte) (transSize int, packSize int) {
+ transSize = int(pack >> 4)
+ packSize = int(pack & packOutMask)
+ return
+}
+
+const maxNumTrans = 1<<6 - 1
+
+func encodeNumTrans(n int) byte {
+ if n <= maxNumTrans {
+ return byte(n)
+ }
+ return 0
+}
+
+func readPackedUint(data []byte) (rv uint64) {
+ for i := range data {
+ shifted := uint64(data[i]) << uint(i*8)
+ rv |= shifted
+ }
+ return
+}
diff --git a/vendor/github.com/blevesearch/vellum/registry.go b/vendor/github.com/blevesearch/vellum/registry.go
new file mode 100644
index 0000000..f5b9b4d
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/registry.go
@@ -0,0 +1,114 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+type registryCell struct {
+ addr int
+ node *builderNode
+}
+
+type registry struct {
+ builderNodePool *builderNodePool
+ table []registryCell
+ tableSize uint
+ mruSize uint
+}
+
+func newRegistry(p *builderNodePool, tableSize, mruSize int) *registry {
+ nsize := tableSize * mruSize
+ rv := ®istry{
+ builderNodePool: p,
+ table: make([]registryCell, nsize),
+ tableSize: uint(tableSize),
+ mruSize: uint(mruSize),
+ }
+ return rv
+}
+
+func (r *registry) Reset() {
+ var empty registryCell
+ for i := range r.table {
+ r.builderNodePool.Put(r.table[i].node)
+ r.table[i] = empty
+ }
+}
+
+func (r *registry) entry(node *builderNode) (bool, int, *registryCell) {
+ if len(r.table) == 0 {
+ return false, 0, nil
+ }
+ bucket := r.hash(node)
+ start := r.mruSize * uint(bucket)
+ end := start + r.mruSize
+ rc := registryCache(r.table[start:end])
+ return rc.entry(node, r.builderNodePool)
+}
+
+const fnvPrime = 1099511628211
+
+func (r *registry) hash(b *builderNode) int {
+ var final uint64
+ if b.final {
+ final = 1
+ }
+
+ var h uint64 = 14695981039346656037
+ h = (h ^ final) * fnvPrime
+ h = (h ^ b.finalOutput) * fnvPrime
+ for _, t := range b.trans {
+ h = (h ^ uint64(t.in)) * fnvPrime
+ h = (h ^ t.out) * fnvPrime
+ h = (h ^ uint64(t.addr)) * fnvPrime
+ }
+ return int(h % uint64(r.tableSize))
+}
+
+type registryCache []registryCell
+
+func (r registryCache) entry(node *builderNode, pool *builderNodePool) (bool, int, *registryCell) {
+ if len(r) == 1 {
+ if r[0].node != nil && r[0].node.equiv(node) {
+ return true, r[0].addr, nil
+ }
+ pool.Put(r[0].node)
+ r[0].node = node
+ return false, 0, &r[0]
+ }
+ for i := range r {
+ if r[i].node != nil && r[i].node.equiv(node) {
+ addr := r[i].addr
+ r.promote(i)
+ return true, addr, nil
+ }
+ }
+ // no match
+ last := len(r) - 1
+ pool.Put(r[last].node)
+ r[last].node = node // discard LRU
+ r.promote(last)
+ return false, 0, &r[0]
+
+}
+
+func (r registryCache) promote(i int) {
+ for i > 0 {
+ r.swap(i-1, i)
+ i--
+ }
+}
+
+func (r registryCache) swap(i, j int) {
+ r[i], r[j] = r[j], r[i]
+}
diff --git a/vendor/github.com/blevesearch/vellum/transducer.go b/vendor/github.com/blevesearch/vellum/transducer.go
new file mode 100644
index 0000000..753c422
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/transducer.go
@@ -0,0 +1,55 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+// Transducer represents the general contract of a byte-based finite transducer
+type Transducer interface {
+
+ // all transducers are also automatons
+ Automaton
+
+ // IsMatchWithValue returns true if and only if the state is a match
+ // additionally it returns a states final value (if any)
+ IsMatchWithVal(int) (bool, uint64)
+
+ // Accept returns the next state given the input to the specified state
+ // additionally it returns the value associated with the transition
+ AcceptWithVal(int, byte) (int, uint64)
+}
+
+// TransducerGet implements an generic Get() method which works
+// on any implementation of Transducer
+// The caller MUST check the boolean return value for a match.
+// Zero is a valid value regardless of match status,
+// and if it is NOT a match, the value collected so far is returned.
+func TransducerGet(t Transducer, k []byte) (bool, uint64) {
+ var total uint64
+ i := 0
+ curr := t.Start()
+ for t.CanMatch(curr) && i < len(k) {
+ var transVal uint64
+ curr, transVal = t.AcceptWithVal(curr, k[i])
+ if curr == noneAddr {
+ break
+ }
+ total += transVal
+ i++
+ }
+ if i != len(k) {
+ return false, total
+ }
+ match, finalVal := t.IsMatchWithVal(curr)
+ return match, total + finalVal
+}
diff --git a/vendor/github.com/blevesearch/vellum/vellum.go b/vendor/github.com/blevesearch/vellum/vellum.go
new file mode 100644
index 0000000..b2537b3
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/vellum.go
@@ -0,0 +1,111 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package vellum is a library for building, serializing and executing an FST (finite
+state transducer).
+
+There are two distinct phases, building an FST and using it.
+
+When building an FST, you insert keys ([]byte) and their associated value
+(uint64). Insert operations MUST be done in lexicographic order. While
+building the FST, data is streamed to an underlying Writer. At the conclusion
+of building, you MUST call Close() on the builder.
+
+After completion of the build phase, you can either Open() the FST if you
+serialized it to disk. Alternatively, if you already have the bytes in
+memory, you can use Load(). By default, Open() will use mmap to avoid loading
+the entire file into memory.
+
+Once the FST is ready, you can use the Contains() method to see if a keys is
+in the FST. You can use the Get() method to see if a key is in the FST and
+retrieve it's associated value. And, you can use the Iterator method to
+enumerate key/value pairs within a specified range.
+
+*/
+package vellum
+
+import (
+ "errors"
+ "io"
+)
+
+// ErrOutOfOrder is returned when values are not inserted in
+// lexicographic order.
+var ErrOutOfOrder = errors.New("values not inserted in lexicographic order")
+
+// ErrIteratorDone is returned by Iterator/Next/Seek methods when the
+// Current() value pointed to by the iterator is greater than the last
+// key in this FST, or outside the configured startKeyInclusive/endKeyExclusive
+// range of the Iterator.
+var ErrIteratorDone = errors.New("iterator-done")
+
+// BuilderOpts is a structure to let advanced users customize the behavior
+// of the builder and some aspects of the generated FST.
+type BuilderOpts struct {
+ Encoder int
+ RegistryTableSize int
+ RegistryMRUSize int
+}
+
+// New returns a new Builder which will stream out the
+// underlying representation to the provided Writer as the set is built.
+func New(w io.Writer, opts *BuilderOpts) (*Builder, error) {
+ return newBuilder(w, opts)
+}
+
+// Open loads the FST stored in the provided path
+func Open(path string) (*FST, error) {
+ return open(path)
+}
+
+// Load will return the FST represented by the provided byte slice.
+func Load(data []byte) (*FST, error) {
+ return new(data, nil)
+}
+
+// Merge will iterate through the provided Iterators, merge duplicate keys
+// with the provided MergeFunc, and build a new FST to the provided Writer.
+func Merge(w io.Writer, opts *BuilderOpts, itrs []Iterator, f MergeFunc) error {
+ builder, err := New(w, opts)
+ if err != nil {
+ return err
+ }
+
+ itr, err := NewMergeIterator(itrs, f)
+ for err == nil {
+ k, v := itr.Current()
+ err = builder.Insert(k, v)
+ if err != nil {
+ return err
+ }
+ err = itr.Next()
+ }
+
+ if err != nil && err != ErrIteratorDone {
+ return err
+ }
+
+ err = itr.Close()
+ if err != nil {
+ return err
+ }
+
+ err = builder.Close()
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/blevesearch/vellum/vellum_mmap.go b/vendor/github.com/blevesearch/vellum/vellum_mmap.go
new file mode 100644
index 0000000..81ea165
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/vellum_mmap.go
@@ -0,0 +1,60 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !nommap
+
+package vellum
+
+import (
+ "os"
+
+ mmap "github.com/blevesearch/mmap-go"
+)
+
+type mmapWrapper struct {
+ f *os.File
+ mm mmap.MMap
+}
+
+func (m *mmapWrapper) Close() (err error) {
+ if m.mm != nil {
+ err = m.mm.Unmap()
+ }
+ // try to close file even if unmap failed
+ if m.f != nil {
+ err2 := m.f.Close()
+ if err == nil {
+ // try to return first error
+ err = err2
+ }
+ }
+ return
+}
+
+func open(path string) (*FST, error) {
+ f, err := os.Open(path)
+ if err != nil {
+ return nil, err
+ }
+ mm, err := mmap.Map(f, mmap.RDONLY, 0)
+ if err != nil {
+ // mmap failed, try to close the file
+ _ = f.Close()
+ return nil, err
+ }
+ return new(mm, &mmapWrapper{
+ f: f,
+ mm: mm,
+ })
+}
diff --git a/vendor/github.com/blevesearch/vellum/vellum_nommap.go b/vendor/github.com/blevesearch/vellum/vellum_nommap.go
new file mode 100644
index 0000000..e985272
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/vellum_nommap.go
@@ -0,0 +1,27 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build nommap
+
+package vellum
+
+import "io/ioutil"
+
+func open(path string) (*FST, error) {
+ data, err := ioutil.ReadFile(string)
+ if err != nil {
+ return nil, err
+ }
+ return new(data, nil)
+}
diff --git a/vendor/github.com/blevesearch/vellum/writer.go b/vendor/github.com/blevesearch/vellum/writer.go
new file mode 100644
index 0000000..d655d47
--- /dev/null
+++ b/vendor/github.com/blevesearch/vellum/writer.go
@@ -0,0 +1,92 @@
+// Copyright (c) 2017 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vellum
+
+import (
+ "bufio"
+ "io"
+)
+
+// A writer is a buffered writer used by vellum. It counts how many bytes have
+// been written and has some convenience methods used for encoding the data.
+type writer struct {
+ w *bufio.Writer
+ counter int
+}
+
+func newWriter(w io.Writer) *writer {
+ return &writer{
+ w: bufio.NewWriter(w),
+ }
+}
+
+func (w *writer) Reset(newWriter io.Writer) {
+ w.w.Reset(newWriter)
+ w.counter = 0
+}
+
+func (w *writer) WriteByte(c byte) error {
+ err := w.w.WriteByte(c)
+ if err != nil {
+ return err
+ }
+ w.counter++
+ return nil
+}
+
+func (w *writer) Write(p []byte) (int, error) {
+ n, err := w.w.Write(p)
+ w.counter += n
+ return n, err
+}
+
+func (w *writer) Flush() error {
+ return w.w.Flush()
+}
+
+func (w *writer) WritePackedUintIn(v uint64, n int) error {
+ for shift := uint(0); shift < uint(n*8); shift += 8 {
+ err := w.WriteByte(byte(v >> shift))
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (w *writer) WritePackedUint(v uint64) error {
+ n := packedSize(v)
+ return w.WritePackedUintIn(v, n)
+}
+
+func packedSize(n uint64) int {
+ if n < 1<<8 {
+ return 1
+ } else if n < 1<<16 {
+ return 2
+ } else if n < 1<<24 {
+ return 3
+ } else if n < 1<<32 {
+ return 4
+ } else if n < 1<<40 {
+ return 5
+ } else if n < 1<<48 {
+ return 6
+ } else if n < 1<<56 {
+ return 7
+ }
+ return 8
+}
diff --git a/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/davecgh/go-spew/LICENSE
new file mode 100644
index 0000000..bc52e96
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/LICENSE
@@ -0,0 +1,15 @@
+ISC License
+
+Copyright (c) 2012-2016 Dave Collins
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/vendor/github.com/davecgh/go-spew/spew/bypass.go b/vendor/github.com/davecgh/go-spew/spew/bypass.go
new file mode 100644
index 0000000..7929947
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/bypass.go
@@ -0,0 +1,145 @@
+// Copyright (c) 2015-2016 Dave Collins
+//
+// Permission to use, copy, modify, and distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+// NOTE: Due to the following build constraints, this file will only be compiled
+// when the code is not running on Google App Engine, compiled by GopherJS, and
+// "-tags safe" is not added to the go build command line. The "disableunsafe"
+// tag is deprecated and thus should not be used.
+// Go versions prior to 1.4 are disabled because they use a different layout
+// for interfaces which make the implementation of unsafeReflectValue more complex.
+// +build !js,!appengine,!safe,!disableunsafe,go1.4
+
+package spew
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+const (
+ // UnsafeDisabled is a build-time constant which specifies whether or
+ // not access to the unsafe package is available.
+ UnsafeDisabled = false
+
+ // ptrSize is the size of a pointer on the current arch.
+ ptrSize = unsafe.Sizeof((*byte)(nil))
+)
+
+type flag uintptr
+
+var (
+ // flagRO indicates whether the value field of a reflect.Value
+ // is read-only.
+ flagRO flag
+
+ // flagAddr indicates whether the address of the reflect.Value's
+ // value may be taken.
+ flagAddr flag
+)
+
+// flagKindMask holds the bits that make up the kind
+// part of the flags field. In all the supported versions,
+// it is in the lower 5 bits.
+const flagKindMask = flag(0x1f)
+
+// Different versions of Go have used different
+// bit layouts for the flags type. This table
+// records the known combinations.
+var okFlags = []struct {
+ ro, addr flag
+}{{
+ // From Go 1.4 to 1.5
+ ro: 1 << 5,
+ addr: 1 << 7,
+}, {
+ // Up to Go tip.
+ ro: 1<<5 | 1<<6,
+ addr: 1 << 8,
+}}
+
+var flagValOffset = func() uintptr {
+ field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag")
+ if !ok {
+ panic("reflect.Value has no flag field")
+ }
+ return field.Offset
+}()
+
+// flagField returns a pointer to the flag field of a reflect.Value.
+func flagField(v *reflect.Value) *flag {
+ return (*flag)(unsafe.Pointer(uintptr(unsafe.Pointer(v)) + flagValOffset))
+}
+
+// unsafeReflectValue converts the passed reflect.Value into a one that bypasses
+// the typical safety restrictions preventing access to unaddressable and
+// unexported data. It works by digging the raw pointer to the underlying
+// value out of the protected value and generating a new unprotected (unsafe)
+// reflect.Value to it.
+//
+// This allows us to check for implementations of the Stringer and error
+// interfaces to be used for pretty printing ordinarily unaddressable and
+// inaccessible values such as unexported struct fields.
+func unsafeReflectValue(v reflect.Value) reflect.Value {
+ if !v.IsValid() || (v.CanInterface() && v.CanAddr()) {
+ return v
+ }
+ flagFieldPtr := flagField(&v)
+ *flagFieldPtr &^= flagRO
+ *flagFieldPtr |= flagAddr
+ return v
+}
+
+// Sanity checks against future reflect package changes
+// to the type or semantics of the Value.flag field.
+func init() {
+ field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag")
+ if !ok {
+ panic("reflect.Value has no flag field")
+ }
+ if field.Type.Kind() != reflect.TypeOf(flag(0)).Kind() {
+ panic("reflect.Value flag field has changed kind")
+ }
+ type t0 int
+ var t struct {
+ A t0
+ // t0 will have flagEmbedRO set.
+ t0
+ // a will have flagStickyRO set
+ a t0
+ }
+ vA := reflect.ValueOf(t).FieldByName("A")
+ va := reflect.ValueOf(t).FieldByName("a")
+ vt0 := reflect.ValueOf(t).FieldByName("t0")
+
+ // Infer flagRO from the difference between the flags
+ // for the (otherwise identical) fields in t.
+ flagPublic := *flagField(&vA)
+ flagWithRO := *flagField(&va) | *flagField(&vt0)
+ flagRO = flagPublic ^ flagWithRO
+
+ // Infer flagAddr from the difference between a value
+ // taken from a pointer and not.
+ vPtrA := reflect.ValueOf(&t).Elem().FieldByName("A")
+ flagNoPtr := *flagField(&vA)
+ flagPtr := *flagField(&vPtrA)
+ flagAddr = flagNoPtr ^ flagPtr
+
+ // Check that the inferred flags tally with one of the known versions.
+ for _, f := range okFlags {
+ if flagRO == f.ro && flagAddr == f.addr {
+ return
+ }
+ }
+ panic("reflect.Value read-only flag has changed semantics")
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
new file mode 100644
index 0000000..205c28d
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
@@ -0,0 +1,38 @@
+// Copyright (c) 2015-2016 Dave Collins
+//
+// Permission to use, copy, modify, and distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+// NOTE: Due to the following build constraints, this file will only be compiled
+// when the code is running on Google App Engine, compiled by GopherJS, or
+// "-tags safe" is added to the go build command line. The "disableunsafe"
+// tag is deprecated and thus should not be used.
+// +build js appengine safe disableunsafe !go1.4
+
+package spew
+
+import "reflect"
+
+const (
+ // UnsafeDisabled is a build-time constant which specifies whether or
+ // not access to the unsafe package is available.
+ UnsafeDisabled = true
+)
+
+// unsafeReflectValue typically converts the passed reflect.Value into a one
+// that bypasses the typical safety restrictions preventing access to
+// unaddressable and unexported data. However, doing this relies on access to
+// the unsafe package. This is a stub version which simply returns the passed
+// reflect.Value when the unsafe package is not available.
+func unsafeReflectValue(v reflect.Value) reflect.Value {
+ return v
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/common.go b/vendor/github.com/davecgh/go-spew/spew/common.go
new file mode 100644
index 0000000..1be8ce9
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/common.go
@@ -0,0 +1,341 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "reflect"
+ "sort"
+ "strconv"
+)
+
+// Some constants in the form of bytes to avoid string overhead. This mirrors
+// the technique used in the fmt package.
+var (
+ panicBytes = []byte("(PANIC=")
+ plusBytes = []byte("+")
+ iBytes = []byte("i")
+ trueBytes = []byte("true")
+ falseBytes = []byte("false")
+ interfaceBytes = []byte("(interface {})")
+ commaNewlineBytes = []byte(",\n")
+ newlineBytes = []byte("\n")
+ openBraceBytes = []byte("{")
+ openBraceNewlineBytes = []byte("{\n")
+ closeBraceBytes = []byte("}")
+ asteriskBytes = []byte("*")
+ colonBytes = []byte(":")
+ colonSpaceBytes = []byte(": ")
+ openParenBytes = []byte("(")
+ closeParenBytes = []byte(")")
+ spaceBytes = []byte(" ")
+ pointerChainBytes = []byte("->")
+ nilAngleBytes = []byte("")
+ maxNewlineBytes = []byte("\n")
+ maxShortBytes = []byte("")
+ circularBytes = []byte("")
+ circularShortBytes = []byte("")
+ invalidAngleBytes = []byte("")
+ openBracketBytes = []byte("[")
+ closeBracketBytes = []byte("]")
+ percentBytes = []byte("%")
+ precisionBytes = []byte(".")
+ openAngleBytes = []byte("<")
+ closeAngleBytes = []byte(">")
+ openMapBytes = []byte("map[")
+ closeMapBytes = []byte("]")
+ lenEqualsBytes = []byte("len=")
+ capEqualsBytes = []byte("cap=")
+)
+
+// hexDigits is used to map a decimal value to a hex digit.
+var hexDigits = "0123456789abcdef"
+
+// catchPanic handles any panics that might occur during the handleMethods
+// calls.
+func catchPanic(w io.Writer, v reflect.Value) {
+ if err := recover(); err != nil {
+ w.Write(panicBytes)
+ fmt.Fprintf(w, "%v", err)
+ w.Write(closeParenBytes)
+ }
+}
+
+// handleMethods attempts to call the Error and String methods on the underlying
+// type the passed reflect.Value represents and outputes the result to Writer w.
+//
+// It handles panics in any called methods by catching and displaying the error
+// as the formatted value.
+func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool) {
+ // We need an interface to check if the type implements the error or
+ // Stringer interface. However, the reflect package won't give us an
+ // interface on certain things like unexported struct fields in order
+ // to enforce visibility rules. We use unsafe, when it's available,
+ // to bypass these restrictions since this package does not mutate the
+ // values.
+ if !v.CanInterface() {
+ if UnsafeDisabled {
+ return false
+ }
+
+ v = unsafeReflectValue(v)
+ }
+
+ // Choose whether or not to do error and Stringer interface lookups against
+ // the base type or a pointer to the base type depending on settings.
+ // Technically calling one of these methods with a pointer receiver can
+ // mutate the value, however, types which choose to satisify an error or
+ // Stringer interface with a pointer receiver should not be mutating their
+ // state inside these interface methods.
+ if !cs.DisablePointerMethods && !UnsafeDisabled && !v.CanAddr() {
+ v = unsafeReflectValue(v)
+ }
+ if v.CanAddr() {
+ v = v.Addr()
+ }
+
+ // Is it an error or Stringer?
+ switch iface := v.Interface().(type) {
+ case error:
+ defer catchPanic(w, v)
+ if cs.ContinueOnMethod {
+ w.Write(openParenBytes)
+ w.Write([]byte(iface.Error()))
+ w.Write(closeParenBytes)
+ w.Write(spaceBytes)
+ return false
+ }
+
+ w.Write([]byte(iface.Error()))
+ return true
+
+ case fmt.Stringer:
+ defer catchPanic(w, v)
+ if cs.ContinueOnMethod {
+ w.Write(openParenBytes)
+ w.Write([]byte(iface.String()))
+ w.Write(closeParenBytes)
+ w.Write(spaceBytes)
+ return false
+ }
+ w.Write([]byte(iface.String()))
+ return true
+ }
+ return false
+}
+
+// printBool outputs a boolean value as true or false to Writer w.
+func printBool(w io.Writer, val bool) {
+ if val {
+ w.Write(trueBytes)
+ } else {
+ w.Write(falseBytes)
+ }
+}
+
+// printInt outputs a signed integer value to Writer w.
+func printInt(w io.Writer, val int64, base int) {
+ w.Write([]byte(strconv.FormatInt(val, base)))
+}
+
+// printUint outputs an unsigned integer value to Writer w.
+func printUint(w io.Writer, val uint64, base int) {
+ w.Write([]byte(strconv.FormatUint(val, base)))
+}
+
+// printFloat outputs a floating point value using the specified precision,
+// which is expected to be 32 or 64bit, to Writer w.
+func printFloat(w io.Writer, val float64, precision int) {
+ w.Write([]byte(strconv.FormatFloat(val, 'g', -1, precision)))
+}
+
+// printComplex outputs a complex value using the specified float precision
+// for the real and imaginary parts to Writer w.
+func printComplex(w io.Writer, c complex128, floatPrecision int) {
+ r := real(c)
+ w.Write(openParenBytes)
+ w.Write([]byte(strconv.FormatFloat(r, 'g', -1, floatPrecision)))
+ i := imag(c)
+ if i >= 0 {
+ w.Write(plusBytes)
+ }
+ w.Write([]byte(strconv.FormatFloat(i, 'g', -1, floatPrecision)))
+ w.Write(iBytes)
+ w.Write(closeParenBytes)
+}
+
+// printHexPtr outputs a uintptr formatted as hexadecimal with a leading '0x'
+// prefix to Writer w.
+func printHexPtr(w io.Writer, p uintptr) {
+ // Null pointer.
+ num := uint64(p)
+ if num == 0 {
+ w.Write(nilAngleBytes)
+ return
+ }
+
+ // Max uint64 is 16 bytes in hex + 2 bytes for '0x' prefix
+ buf := make([]byte, 18)
+
+ // It's simpler to construct the hex string right to left.
+ base := uint64(16)
+ i := len(buf) - 1
+ for num >= base {
+ buf[i] = hexDigits[num%base]
+ num /= base
+ i--
+ }
+ buf[i] = hexDigits[num]
+
+ // Add '0x' prefix.
+ i--
+ buf[i] = 'x'
+ i--
+ buf[i] = '0'
+
+ // Strip unused leading bytes.
+ buf = buf[i:]
+ w.Write(buf)
+}
+
+// valuesSorter implements sort.Interface to allow a slice of reflect.Value
+// elements to be sorted.
+type valuesSorter struct {
+ values []reflect.Value
+ strings []string // either nil or same len and values
+ cs *ConfigState
+}
+
+// newValuesSorter initializes a valuesSorter instance, which holds a set of
+// surrogate keys on which the data should be sorted. It uses flags in
+// ConfigState to decide if and how to populate those surrogate keys.
+func newValuesSorter(values []reflect.Value, cs *ConfigState) sort.Interface {
+ vs := &valuesSorter{values: values, cs: cs}
+ if canSortSimply(vs.values[0].Kind()) {
+ return vs
+ }
+ if !cs.DisableMethods {
+ vs.strings = make([]string, len(values))
+ for i := range vs.values {
+ b := bytes.Buffer{}
+ if !handleMethods(cs, &b, vs.values[i]) {
+ vs.strings = nil
+ break
+ }
+ vs.strings[i] = b.String()
+ }
+ }
+ if vs.strings == nil && cs.SpewKeys {
+ vs.strings = make([]string, len(values))
+ for i := range vs.values {
+ vs.strings[i] = Sprintf("%#v", vs.values[i].Interface())
+ }
+ }
+ return vs
+}
+
+// canSortSimply tests whether a reflect.Kind is a primitive that can be sorted
+// directly, or whether it should be considered for sorting by surrogate keys
+// (if the ConfigState allows it).
+func canSortSimply(kind reflect.Kind) bool {
+ // This switch parallels valueSortLess, except for the default case.
+ switch kind {
+ case reflect.Bool:
+ return true
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ return true
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+ return true
+ case reflect.Float32, reflect.Float64:
+ return true
+ case reflect.String:
+ return true
+ case reflect.Uintptr:
+ return true
+ case reflect.Array:
+ return true
+ }
+ return false
+}
+
+// Len returns the number of values in the slice. It is part of the
+// sort.Interface implementation.
+func (s *valuesSorter) Len() int {
+ return len(s.values)
+}
+
+// Swap swaps the values at the passed indices. It is part of the
+// sort.Interface implementation.
+func (s *valuesSorter) Swap(i, j int) {
+ s.values[i], s.values[j] = s.values[j], s.values[i]
+ if s.strings != nil {
+ s.strings[i], s.strings[j] = s.strings[j], s.strings[i]
+ }
+}
+
+// valueSortLess returns whether the first value should sort before the second
+// value. It is used by valueSorter.Less as part of the sort.Interface
+// implementation.
+func valueSortLess(a, b reflect.Value) bool {
+ switch a.Kind() {
+ case reflect.Bool:
+ return !a.Bool() && b.Bool()
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ return a.Int() < b.Int()
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+ return a.Uint() < b.Uint()
+ case reflect.Float32, reflect.Float64:
+ return a.Float() < b.Float()
+ case reflect.String:
+ return a.String() < b.String()
+ case reflect.Uintptr:
+ return a.Uint() < b.Uint()
+ case reflect.Array:
+ // Compare the contents of both arrays.
+ l := a.Len()
+ for i := 0; i < l; i++ {
+ av := a.Index(i)
+ bv := b.Index(i)
+ if av.Interface() == bv.Interface() {
+ continue
+ }
+ return valueSortLess(av, bv)
+ }
+ }
+ return a.String() < b.String()
+}
+
+// Less returns whether the value at index i should sort before the
+// value at index j. It is part of the sort.Interface implementation.
+func (s *valuesSorter) Less(i, j int) bool {
+ if s.strings == nil {
+ return valueSortLess(s.values[i], s.values[j])
+ }
+ return s.strings[i] < s.strings[j]
+}
+
+// sortValues is a sort function that handles both native types and any type that
+// can be converted to error or Stringer. Other inputs are sorted according to
+// their Value.String() value to ensure display stability.
+func sortValues(values []reflect.Value, cs *ConfigState) {
+ if len(values) == 0 {
+ return
+ }
+ sort.Sort(newValuesSorter(values, cs))
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/config.go b/vendor/github.com/davecgh/go-spew/spew/config.go
new file mode 100644
index 0000000..2e3d22f
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/config.go
@@ -0,0 +1,306 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "os"
+)
+
+// ConfigState houses the configuration options used by spew to format and
+// display values. There is a global instance, Config, that is used to control
+// all top-level Formatter and Dump functionality. Each ConfigState instance
+// provides methods equivalent to the top-level functions.
+//
+// The zero value for ConfigState provides no indentation. You would typically
+// want to set it to a space or a tab.
+//
+// Alternatively, you can use NewDefaultConfig to get a ConfigState instance
+// with default settings. See the documentation of NewDefaultConfig for default
+// values.
+type ConfigState struct {
+ // Indent specifies the string to use for each indentation level. The
+ // global config instance that all top-level functions use set this to a
+ // single space by default. If you would like more indentation, you might
+ // set this to a tab with "\t" or perhaps two spaces with " ".
+ Indent string
+
+ // MaxDepth controls the maximum number of levels to descend into nested
+ // data structures. The default, 0, means there is no limit.
+ //
+ // NOTE: Circular data structures are properly detected, so it is not
+ // necessary to set this value unless you specifically want to limit deeply
+ // nested data structures.
+ MaxDepth int
+
+ // DisableMethods specifies whether or not error and Stringer interfaces are
+ // invoked for types that implement them.
+ DisableMethods bool
+
+ // DisablePointerMethods specifies whether or not to check for and invoke
+ // error and Stringer interfaces on types which only accept a pointer
+ // receiver when the current type is not a pointer.
+ //
+ // NOTE: This might be an unsafe action since calling one of these methods
+ // with a pointer receiver could technically mutate the value, however,
+ // in practice, types which choose to satisify an error or Stringer
+ // interface with a pointer receiver should not be mutating their state
+ // inside these interface methods. As a result, this option relies on
+ // access to the unsafe package, so it will not have any effect when
+ // running in environments without access to the unsafe package such as
+ // Google App Engine or with the "safe" build tag specified.
+ DisablePointerMethods bool
+
+ // DisablePointerAddresses specifies whether to disable the printing of
+ // pointer addresses. This is useful when diffing data structures in tests.
+ DisablePointerAddresses bool
+
+ // DisableCapacities specifies whether to disable the printing of capacities
+ // for arrays, slices, maps and channels. This is useful when diffing
+ // data structures in tests.
+ DisableCapacities bool
+
+ // ContinueOnMethod specifies whether or not recursion should continue once
+ // a custom error or Stringer interface is invoked. The default, false,
+ // means it will print the results of invoking the custom error or Stringer
+ // interface and return immediately instead of continuing to recurse into
+ // the internals of the data type.
+ //
+ // NOTE: This flag does not have any effect if method invocation is disabled
+ // via the DisableMethods or DisablePointerMethods options.
+ ContinueOnMethod bool
+
+ // SortKeys specifies map keys should be sorted before being printed. Use
+ // this to have a more deterministic, diffable output. Note that only
+ // native types (bool, int, uint, floats, uintptr and string) and types
+ // that support the error or Stringer interfaces (if methods are
+ // enabled) are supported, with other types sorted according to the
+ // reflect.Value.String() output which guarantees display stability.
+ SortKeys bool
+
+ // SpewKeys specifies that, as a last resort attempt, map keys should
+ // be spewed to strings and sorted by those strings. This is only
+ // considered if SortKeys is true.
+ SpewKeys bool
+}
+
+// Config is the active configuration of the top-level functions.
+// The configuration can be changed by modifying the contents of spew.Config.
+var Config = ConfigState{Indent: " "}
+
+// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the formatted string as a value that satisfies error. See NewFormatter
+// for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Errorf(format, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Errorf(format string, a ...interface{}) (err error) {
+ return fmt.Errorf(format, c.convertArgs(a)...)
+}
+
+// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprint(w, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Fprint(w io.Writer, a ...interface{}) (n int, err error) {
+ return fmt.Fprint(w, c.convertArgs(a)...)
+}
+
+// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprintf(w, format, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
+ return fmt.Fprintf(w, format, c.convertArgs(a)...)
+}
+
+// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it
+// passed with a Formatter interface returned by c.NewFormatter. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprintln(w, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
+ return fmt.Fprintln(w, c.convertArgs(a)...)
+}
+
+// Print is a wrapper for fmt.Print that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Print(c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Print(a ...interface{}) (n int, err error) {
+ return fmt.Print(c.convertArgs(a)...)
+}
+
+// Printf is a wrapper for fmt.Printf that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Printf(format, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Printf(format string, a ...interface{}) (n int, err error) {
+ return fmt.Printf(format, c.convertArgs(a)...)
+}
+
+// Println is a wrapper for fmt.Println that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Println(c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Println(a ...interface{}) (n int, err error) {
+ return fmt.Println(c.convertArgs(a)...)
+}
+
+// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprint(c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Sprint(a ...interface{}) string {
+ return fmt.Sprint(c.convertArgs(a)...)
+}
+
+// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprintf(format, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Sprintf(format string, a ...interface{}) string {
+ return fmt.Sprintf(format, c.convertArgs(a)...)
+}
+
+// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it
+// were passed with a Formatter interface returned by c.NewFormatter. It
+// returns the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprintln(c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Sprintln(a ...interface{}) string {
+ return fmt.Sprintln(c.convertArgs(a)...)
+}
+
+/*
+NewFormatter returns a custom formatter that satisfies the fmt.Formatter
+interface. As a result, it integrates cleanly with standard fmt package
+printing functions. The formatter is useful for inline printing of smaller data
+types similar to the standard %v format specifier.
+
+The custom formatter only responds to the %v (most compact), %+v (adds pointer
+addresses), %#v (adds types), and %#+v (adds types and pointer addresses) verb
+combinations. Any other verbs such as %x and %q will be sent to the the
+standard fmt package for formatting. In addition, the custom formatter ignores
+the width and precision arguments (however they will still work on the format
+specifiers not handled by the custom formatter).
+
+Typically this function shouldn't be called directly. It is much easier to make
+use of the custom formatter by calling one of the convenience functions such as
+c.Printf, c.Println, or c.Printf.
+*/
+func (c *ConfigState) NewFormatter(v interface{}) fmt.Formatter {
+ return newFormatter(c, v)
+}
+
+// Fdump formats and displays the passed arguments to io.Writer w. It formats
+// exactly the same as Dump.
+func (c *ConfigState) Fdump(w io.Writer, a ...interface{}) {
+ fdump(c, w, a...)
+}
+
+/*
+Dump displays the passed parameters to standard out with newlines, customizable
+indentation, and additional debug information such as complete types and all
+pointer addresses used to indirect to the final value. It provides the
+following features over the built-in printing facilities provided by the fmt
+package:
+
+ * Pointers are dereferenced and followed
+ * Circular data structures are detected and handled properly
+ * Custom Stringer/error interfaces are optionally invoked, including
+ on unexported types
+ * Custom types which only implement the Stringer/error interfaces via
+ a pointer receiver are optionally invoked when passing non-pointer
+ variables
+ * Byte arrays and slices are dumped like the hexdump -C command which
+ includes offsets, byte values in hex, and ASCII output
+
+The configuration options are controlled by modifying the public members
+of c. See ConfigState for options documentation.
+
+See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to
+get the formatted result as a string.
+*/
+func (c *ConfigState) Dump(a ...interface{}) {
+ fdump(c, os.Stdout, a...)
+}
+
+// Sdump returns a string with the passed arguments formatted exactly the same
+// as Dump.
+func (c *ConfigState) Sdump(a ...interface{}) string {
+ var buf bytes.Buffer
+ fdump(c, &buf, a...)
+ return buf.String()
+}
+
+// convertArgs accepts a slice of arguments and returns a slice of the same
+// length with each argument converted to a spew Formatter interface using
+// the ConfigState associated with s.
+func (c *ConfigState) convertArgs(args []interface{}) (formatters []interface{}) {
+ formatters = make([]interface{}, len(args))
+ for index, arg := range args {
+ formatters[index] = newFormatter(c, arg)
+ }
+ return formatters
+}
+
+// NewDefaultConfig returns a ConfigState with the following default settings.
+//
+// Indent: " "
+// MaxDepth: 0
+// DisableMethods: false
+// DisablePointerMethods: false
+// ContinueOnMethod: false
+// SortKeys: false
+func NewDefaultConfig() *ConfigState {
+ return &ConfigState{Indent: " "}
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/doc.go b/vendor/github.com/davecgh/go-spew/spew/doc.go
new file mode 100644
index 0000000..aacaac6
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/doc.go
@@ -0,0 +1,211 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+/*
+Package spew implements a deep pretty printer for Go data structures to aid in
+debugging.
+
+A quick overview of the additional features spew provides over the built-in
+printing facilities for Go data types are as follows:
+
+ * Pointers are dereferenced and followed
+ * Circular data structures are detected and handled properly
+ * Custom Stringer/error interfaces are optionally invoked, including
+ on unexported types
+ * Custom types which only implement the Stringer/error interfaces via
+ a pointer receiver are optionally invoked when passing non-pointer
+ variables
+ * Byte arrays and slices are dumped like the hexdump -C command which
+ includes offsets, byte values in hex, and ASCII output (only when using
+ Dump style)
+
+There are two different approaches spew allows for dumping Go data structures:
+
+ * Dump style which prints with newlines, customizable indentation,
+ and additional debug information such as types and all pointer addresses
+ used to indirect to the final value
+ * A custom Formatter interface that integrates cleanly with the standard fmt
+ package and replaces %v, %+v, %#v, and %#+v to provide inline printing
+ similar to the default %v while providing the additional functionality
+ outlined above and passing unsupported format verbs such as %x and %q
+ along to fmt
+
+Quick Start
+
+This section demonstrates how to quickly get started with spew. See the
+sections below for further details on formatting and configuration options.
+
+To dump a variable with full newlines, indentation, type, and pointer
+information use Dump, Fdump, or Sdump:
+ spew.Dump(myVar1, myVar2, ...)
+ spew.Fdump(someWriter, myVar1, myVar2, ...)
+ str := spew.Sdump(myVar1, myVar2, ...)
+
+Alternatively, if you would prefer to use format strings with a compacted inline
+printing style, use the convenience wrappers Printf, Fprintf, etc with
+%v (most compact), %+v (adds pointer addresses), %#v (adds types), or
+%#+v (adds types and pointer addresses):
+ spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+ spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+ spew.Fprintf(someWriter, "myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+ spew.Fprintf(someWriter, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+
+Configuration Options
+
+Configuration of spew is handled by fields in the ConfigState type. For
+convenience, all of the top-level functions use a global state available
+via the spew.Config global.
+
+It is also possible to create a ConfigState instance that provides methods
+equivalent to the top-level functions. This allows concurrent configuration
+options. See the ConfigState documentation for more details.
+
+The following configuration options are available:
+ * Indent
+ String to use for each indentation level for Dump functions.
+ It is a single space by default. A popular alternative is "\t".
+
+ * MaxDepth
+ Maximum number of levels to descend into nested data structures.
+ There is no limit by default.
+
+ * DisableMethods
+ Disables invocation of error and Stringer interface methods.
+ Method invocation is enabled by default.
+
+ * DisablePointerMethods
+ Disables invocation of error and Stringer interface methods on types
+ which only accept pointer receivers from non-pointer variables.
+ Pointer method invocation is enabled by default.
+
+ * DisablePointerAddresses
+ DisablePointerAddresses specifies whether to disable the printing of
+ pointer addresses. This is useful when diffing data structures in tests.
+
+ * DisableCapacities
+ DisableCapacities specifies whether to disable the printing of
+ capacities for arrays, slices, maps and channels. This is useful when
+ diffing data structures in tests.
+
+ * ContinueOnMethod
+ Enables recursion into types after invoking error and Stringer interface
+ methods. Recursion after method invocation is disabled by default.
+
+ * SortKeys
+ Specifies map keys should be sorted before being printed. Use
+ this to have a more deterministic, diffable output. Note that
+ only native types (bool, int, uint, floats, uintptr and string)
+ and types which implement error or Stringer interfaces are
+ supported with other types sorted according to the
+ reflect.Value.String() output which guarantees display
+ stability. Natural map order is used by default.
+
+ * SpewKeys
+ Specifies that, as a last resort attempt, map keys should be
+ spewed to strings and sorted by those strings. This is only
+ considered if SortKeys is true.
+
+Dump Usage
+
+Simply call spew.Dump with a list of variables you want to dump:
+
+ spew.Dump(myVar1, myVar2, ...)
+
+You may also call spew.Fdump if you would prefer to output to an arbitrary
+io.Writer. For example, to dump to standard error:
+
+ spew.Fdump(os.Stderr, myVar1, myVar2, ...)
+
+A third option is to call spew.Sdump to get the formatted output as a string:
+
+ str := spew.Sdump(myVar1, myVar2, ...)
+
+Sample Dump Output
+
+See the Dump example for details on the setup of the types and variables being
+shown here.
+
+ (main.Foo) {
+ unexportedField: (*main.Bar)(0xf84002e210)({
+ flag: (main.Flag) flagTwo,
+ data: (uintptr)
+ }),
+ ExportedField: (map[interface {}]interface {}) (len=1) {
+ (string) (len=3) "one": (bool) true
+ }
+ }
+
+Byte (and uint8) arrays and slices are displayed uniquely like the hexdump -C
+command as shown.
+ ([]uint8) (len=32 cap=32) {
+ 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... |
+ 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0|
+ 00000020 31 32 |12|
+ }
+
+Custom Formatter
+
+Spew provides a custom formatter that implements the fmt.Formatter interface
+so that it integrates cleanly with standard fmt package printing functions. The
+formatter is useful for inline printing of smaller data types similar to the
+standard %v format specifier.
+
+The custom formatter only responds to the %v (most compact), %+v (adds pointer
+addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb
+combinations. Any other verbs such as %x and %q will be sent to the the
+standard fmt package for formatting. In addition, the custom formatter ignores
+the width and precision arguments (however they will still work on the format
+specifiers not handled by the custom formatter).
+
+Custom Formatter Usage
+
+The simplest way to make use of the spew custom formatter is to call one of the
+convenience functions such as spew.Printf, spew.Println, or spew.Printf. The
+functions have syntax you are most likely already familiar with:
+
+ spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+ spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+ spew.Println(myVar, myVar2)
+ spew.Fprintf(os.Stderr, "myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+ spew.Fprintf(os.Stderr, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+
+See the Index for the full list convenience functions.
+
+Sample Formatter Output
+
+Double pointer to a uint8:
+ %v: <**>5
+ %+v: <**>(0xf8400420d0->0xf8400420c8)5
+ %#v: (**uint8)5
+ %#+v: (**uint8)(0xf8400420d0->0xf8400420c8)5
+
+Pointer to circular struct with a uint8 field and a pointer to itself:
+ %v: <*>{1 <*>}
+ %+v: <*>(0xf84003e260){ui8:1 c:<*>(0xf84003e260)}
+ %#v: (*main.circular){ui8:(uint8)1 c:(*main.circular)}
+ %#+v: (*main.circular)(0xf84003e260){ui8:(uint8)1 c:(*main.circular)(0xf84003e260)}
+
+See the Printf example for details on the setup of variables being shown
+here.
+
+Errors
+
+Since it is possible for custom Stringer/error interfaces to panic, spew
+detects them and handles them internally by printing the panic information
+inline with the output. Since spew is intended to provide deep pretty printing
+capabilities on structures, it intentionally does not return any errors.
+*/
+package spew
diff --git a/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/davecgh/go-spew/spew/dump.go
new file mode 100644
index 0000000..f78d89f
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/dump.go
@@ -0,0 +1,509 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "bytes"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "os"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+var (
+ // uint8Type is a reflect.Type representing a uint8. It is used to
+ // convert cgo types to uint8 slices for hexdumping.
+ uint8Type = reflect.TypeOf(uint8(0))
+
+ // cCharRE is a regular expression that matches a cgo char.
+ // It is used to detect character arrays to hexdump them.
+ cCharRE = regexp.MustCompile(`^.*\._Ctype_char$`)
+
+ // cUnsignedCharRE is a regular expression that matches a cgo unsigned
+ // char. It is used to detect unsigned character arrays to hexdump
+ // them.
+ cUnsignedCharRE = regexp.MustCompile(`^.*\._Ctype_unsignedchar$`)
+
+ // cUint8tCharRE is a regular expression that matches a cgo uint8_t.
+ // It is used to detect uint8_t arrays to hexdump them.
+ cUint8tCharRE = regexp.MustCompile(`^.*\._Ctype_uint8_t$`)
+)
+
+// dumpState contains information about the state of a dump operation.
+type dumpState struct {
+ w io.Writer
+ depth int
+ pointers map[uintptr]int
+ ignoreNextType bool
+ ignoreNextIndent bool
+ cs *ConfigState
+}
+
+// indent performs indentation according to the depth level and cs.Indent
+// option.
+func (d *dumpState) indent() {
+ if d.ignoreNextIndent {
+ d.ignoreNextIndent = false
+ return
+ }
+ d.w.Write(bytes.Repeat([]byte(d.cs.Indent), d.depth))
+}
+
+// unpackValue returns values inside of non-nil interfaces when possible.
+// This is useful for data types like structs, arrays, slices, and maps which
+// can contain varying types packed inside an interface.
+func (d *dumpState) unpackValue(v reflect.Value) reflect.Value {
+ if v.Kind() == reflect.Interface && !v.IsNil() {
+ v = v.Elem()
+ }
+ return v
+}
+
+// dumpPtr handles formatting of pointers by indirecting them as necessary.
+func (d *dumpState) dumpPtr(v reflect.Value) {
+ // Remove pointers at or below the current depth from map used to detect
+ // circular refs.
+ for k, depth := range d.pointers {
+ if depth >= d.depth {
+ delete(d.pointers, k)
+ }
+ }
+
+ // Keep list of all dereferenced pointers to show later.
+ pointerChain := make([]uintptr, 0)
+
+ // Figure out how many levels of indirection there are by dereferencing
+ // pointers and unpacking interfaces down the chain while detecting circular
+ // references.
+ nilFound := false
+ cycleFound := false
+ indirects := 0
+ ve := v
+ for ve.Kind() == reflect.Ptr {
+ if ve.IsNil() {
+ nilFound = true
+ break
+ }
+ indirects++
+ addr := ve.Pointer()
+ pointerChain = append(pointerChain, addr)
+ if pd, ok := d.pointers[addr]; ok && pd < d.depth {
+ cycleFound = true
+ indirects--
+ break
+ }
+ d.pointers[addr] = d.depth
+
+ ve = ve.Elem()
+ if ve.Kind() == reflect.Interface {
+ if ve.IsNil() {
+ nilFound = true
+ break
+ }
+ ve = ve.Elem()
+ }
+ }
+
+ // Display type information.
+ d.w.Write(openParenBytes)
+ d.w.Write(bytes.Repeat(asteriskBytes, indirects))
+ d.w.Write([]byte(ve.Type().String()))
+ d.w.Write(closeParenBytes)
+
+ // Display pointer information.
+ if !d.cs.DisablePointerAddresses && len(pointerChain) > 0 {
+ d.w.Write(openParenBytes)
+ for i, addr := range pointerChain {
+ if i > 0 {
+ d.w.Write(pointerChainBytes)
+ }
+ printHexPtr(d.w, addr)
+ }
+ d.w.Write(closeParenBytes)
+ }
+
+ // Display dereferenced value.
+ d.w.Write(openParenBytes)
+ switch {
+ case nilFound:
+ d.w.Write(nilAngleBytes)
+
+ case cycleFound:
+ d.w.Write(circularBytes)
+
+ default:
+ d.ignoreNextType = true
+ d.dump(ve)
+ }
+ d.w.Write(closeParenBytes)
+}
+
+// dumpSlice handles formatting of arrays and slices. Byte (uint8 under
+// reflection) arrays and slices are dumped in hexdump -C fashion.
+func (d *dumpState) dumpSlice(v reflect.Value) {
+ // Determine whether this type should be hex dumped or not. Also,
+ // for types which should be hexdumped, try to use the underlying data
+ // first, then fall back to trying to convert them to a uint8 slice.
+ var buf []uint8
+ doConvert := false
+ doHexDump := false
+ numEntries := v.Len()
+ if numEntries > 0 {
+ vt := v.Index(0).Type()
+ vts := vt.String()
+ switch {
+ // C types that need to be converted.
+ case cCharRE.MatchString(vts):
+ fallthrough
+ case cUnsignedCharRE.MatchString(vts):
+ fallthrough
+ case cUint8tCharRE.MatchString(vts):
+ doConvert = true
+
+ // Try to use existing uint8 slices and fall back to converting
+ // and copying if that fails.
+ case vt.Kind() == reflect.Uint8:
+ // We need an addressable interface to convert the type
+ // to a byte slice. However, the reflect package won't
+ // give us an interface on certain things like
+ // unexported struct fields in order to enforce
+ // visibility rules. We use unsafe, when available, to
+ // bypass these restrictions since this package does not
+ // mutate the values.
+ vs := v
+ if !vs.CanInterface() || !vs.CanAddr() {
+ vs = unsafeReflectValue(vs)
+ }
+ if !UnsafeDisabled {
+ vs = vs.Slice(0, numEntries)
+
+ // Use the existing uint8 slice if it can be
+ // type asserted.
+ iface := vs.Interface()
+ if slice, ok := iface.([]uint8); ok {
+ buf = slice
+ doHexDump = true
+ break
+ }
+ }
+
+ // The underlying data needs to be converted if it can't
+ // be type asserted to a uint8 slice.
+ doConvert = true
+ }
+
+ // Copy and convert the underlying type if needed.
+ if doConvert && vt.ConvertibleTo(uint8Type) {
+ // Convert and copy each element into a uint8 byte
+ // slice.
+ buf = make([]uint8, numEntries)
+ for i := 0; i < numEntries; i++ {
+ vv := v.Index(i)
+ buf[i] = uint8(vv.Convert(uint8Type).Uint())
+ }
+ doHexDump = true
+ }
+ }
+
+ // Hexdump the entire slice as needed.
+ if doHexDump {
+ indent := strings.Repeat(d.cs.Indent, d.depth)
+ str := indent + hex.Dump(buf)
+ str = strings.Replace(str, "\n", "\n"+indent, -1)
+ str = strings.TrimRight(str, d.cs.Indent)
+ d.w.Write([]byte(str))
+ return
+ }
+
+ // Recursively call dump for each item.
+ for i := 0; i < numEntries; i++ {
+ d.dump(d.unpackValue(v.Index(i)))
+ if i < (numEntries - 1) {
+ d.w.Write(commaNewlineBytes)
+ } else {
+ d.w.Write(newlineBytes)
+ }
+ }
+}
+
+// dump is the main workhorse for dumping a value. It uses the passed reflect
+// value to figure out what kind of object we are dealing with and formats it
+// appropriately. It is a recursive function, however circular data structures
+// are detected and handled properly.
+func (d *dumpState) dump(v reflect.Value) {
+ // Handle invalid reflect values immediately.
+ kind := v.Kind()
+ if kind == reflect.Invalid {
+ d.w.Write(invalidAngleBytes)
+ return
+ }
+
+ // Handle pointers specially.
+ if kind == reflect.Ptr {
+ d.indent()
+ d.dumpPtr(v)
+ return
+ }
+
+ // Print type information unless already handled elsewhere.
+ if !d.ignoreNextType {
+ d.indent()
+ d.w.Write(openParenBytes)
+ d.w.Write([]byte(v.Type().String()))
+ d.w.Write(closeParenBytes)
+ d.w.Write(spaceBytes)
+ }
+ d.ignoreNextType = false
+
+ // Display length and capacity if the built-in len and cap functions
+ // work with the value's kind and the len/cap itself is non-zero.
+ valueLen, valueCap := 0, 0
+ switch v.Kind() {
+ case reflect.Array, reflect.Slice, reflect.Chan:
+ valueLen, valueCap = v.Len(), v.Cap()
+ case reflect.Map, reflect.String:
+ valueLen = v.Len()
+ }
+ if valueLen != 0 || !d.cs.DisableCapacities && valueCap != 0 {
+ d.w.Write(openParenBytes)
+ if valueLen != 0 {
+ d.w.Write(lenEqualsBytes)
+ printInt(d.w, int64(valueLen), 10)
+ }
+ if !d.cs.DisableCapacities && valueCap != 0 {
+ if valueLen != 0 {
+ d.w.Write(spaceBytes)
+ }
+ d.w.Write(capEqualsBytes)
+ printInt(d.w, int64(valueCap), 10)
+ }
+ d.w.Write(closeParenBytes)
+ d.w.Write(spaceBytes)
+ }
+
+ // Call Stringer/error interfaces if they exist and the handle methods flag
+ // is enabled
+ if !d.cs.DisableMethods {
+ if (kind != reflect.Invalid) && (kind != reflect.Interface) {
+ if handled := handleMethods(d.cs, d.w, v); handled {
+ return
+ }
+ }
+ }
+
+ switch kind {
+ case reflect.Invalid:
+ // Do nothing. We should never get here since invalid has already
+ // been handled above.
+
+ case reflect.Bool:
+ printBool(d.w, v.Bool())
+
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ printInt(d.w, v.Int(), 10)
+
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+ printUint(d.w, v.Uint(), 10)
+
+ case reflect.Float32:
+ printFloat(d.w, v.Float(), 32)
+
+ case reflect.Float64:
+ printFloat(d.w, v.Float(), 64)
+
+ case reflect.Complex64:
+ printComplex(d.w, v.Complex(), 32)
+
+ case reflect.Complex128:
+ printComplex(d.w, v.Complex(), 64)
+
+ case reflect.Slice:
+ if v.IsNil() {
+ d.w.Write(nilAngleBytes)
+ break
+ }
+ fallthrough
+
+ case reflect.Array:
+ d.w.Write(openBraceNewlineBytes)
+ d.depth++
+ if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
+ d.indent()
+ d.w.Write(maxNewlineBytes)
+ } else {
+ d.dumpSlice(v)
+ }
+ d.depth--
+ d.indent()
+ d.w.Write(closeBraceBytes)
+
+ case reflect.String:
+ d.w.Write([]byte(strconv.Quote(v.String())))
+
+ case reflect.Interface:
+ // The only time we should get here is for nil interfaces due to
+ // unpackValue calls.
+ if v.IsNil() {
+ d.w.Write(nilAngleBytes)
+ }
+
+ case reflect.Ptr:
+ // Do nothing. We should never get here since pointers have already
+ // been handled above.
+
+ case reflect.Map:
+ // nil maps should be indicated as different than empty maps
+ if v.IsNil() {
+ d.w.Write(nilAngleBytes)
+ break
+ }
+
+ d.w.Write(openBraceNewlineBytes)
+ d.depth++
+ if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
+ d.indent()
+ d.w.Write(maxNewlineBytes)
+ } else {
+ numEntries := v.Len()
+ keys := v.MapKeys()
+ if d.cs.SortKeys {
+ sortValues(keys, d.cs)
+ }
+ for i, key := range keys {
+ d.dump(d.unpackValue(key))
+ d.w.Write(colonSpaceBytes)
+ d.ignoreNextIndent = true
+ d.dump(d.unpackValue(v.MapIndex(key)))
+ if i < (numEntries - 1) {
+ d.w.Write(commaNewlineBytes)
+ } else {
+ d.w.Write(newlineBytes)
+ }
+ }
+ }
+ d.depth--
+ d.indent()
+ d.w.Write(closeBraceBytes)
+
+ case reflect.Struct:
+ d.w.Write(openBraceNewlineBytes)
+ d.depth++
+ if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
+ d.indent()
+ d.w.Write(maxNewlineBytes)
+ } else {
+ vt := v.Type()
+ numFields := v.NumField()
+ for i := 0; i < numFields; i++ {
+ d.indent()
+ vtf := vt.Field(i)
+ d.w.Write([]byte(vtf.Name))
+ d.w.Write(colonSpaceBytes)
+ d.ignoreNextIndent = true
+ d.dump(d.unpackValue(v.Field(i)))
+ if i < (numFields - 1) {
+ d.w.Write(commaNewlineBytes)
+ } else {
+ d.w.Write(newlineBytes)
+ }
+ }
+ }
+ d.depth--
+ d.indent()
+ d.w.Write(closeBraceBytes)
+
+ case reflect.Uintptr:
+ printHexPtr(d.w, uintptr(v.Uint()))
+
+ case reflect.UnsafePointer, reflect.Chan, reflect.Func:
+ printHexPtr(d.w, v.Pointer())
+
+ // There were not any other types at the time this code was written, but
+ // fall back to letting the default fmt package handle it in case any new
+ // types are added.
+ default:
+ if v.CanInterface() {
+ fmt.Fprintf(d.w, "%v", v.Interface())
+ } else {
+ fmt.Fprintf(d.w, "%v", v.String())
+ }
+ }
+}
+
+// fdump is a helper function to consolidate the logic from the various public
+// methods which take varying writers and config states.
+func fdump(cs *ConfigState, w io.Writer, a ...interface{}) {
+ for _, arg := range a {
+ if arg == nil {
+ w.Write(interfaceBytes)
+ w.Write(spaceBytes)
+ w.Write(nilAngleBytes)
+ w.Write(newlineBytes)
+ continue
+ }
+
+ d := dumpState{w: w, cs: cs}
+ d.pointers = make(map[uintptr]int)
+ d.dump(reflect.ValueOf(arg))
+ d.w.Write(newlineBytes)
+ }
+}
+
+// Fdump formats and displays the passed arguments to io.Writer w. It formats
+// exactly the same as Dump.
+func Fdump(w io.Writer, a ...interface{}) {
+ fdump(&Config, w, a...)
+}
+
+// Sdump returns a string with the passed arguments formatted exactly the same
+// as Dump.
+func Sdump(a ...interface{}) string {
+ var buf bytes.Buffer
+ fdump(&Config, &buf, a...)
+ return buf.String()
+}
+
+/*
+Dump displays the passed parameters to standard out with newlines, customizable
+indentation, and additional debug information such as complete types and all
+pointer addresses used to indirect to the final value. It provides the
+following features over the built-in printing facilities provided by the fmt
+package:
+
+ * Pointers are dereferenced and followed
+ * Circular data structures are detected and handled properly
+ * Custom Stringer/error interfaces are optionally invoked, including
+ on unexported types
+ * Custom types which only implement the Stringer/error interfaces via
+ a pointer receiver are optionally invoked when passing non-pointer
+ variables
+ * Byte arrays and slices are dumped like the hexdump -C command which
+ includes offsets, byte values in hex, and ASCII output
+
+The configuration options are controlled by an exported package global,
+spew.Config. See ConfigState for options documentation.
+
+See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to
+get the formatted result as a string.
+*/
+func Dump(a ...interface{}) {
+ fdump(&Config, os.Stdout, a...)
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/format.go b/vendor/github.com/davecgh/go-spew/spew/format.go
new file mode 100644
index 0000000..b04edb7
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/format.go
@@ -0,0 +1,419 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+)
+
+// supportedFlags is a list of all the character flags supported by fmt package.
+const supportedFlags = "0-+# "
+
+// formatState implements the fmt.Formatter interface and contains information
+// about the state of a formatting operation. The NewFormatter function can
+// be used to get a new Formatter which can be used directly as arguments
+// in standard fmt package printing calls.
+type formatState struct {
+ value interface{}
+ fs fmt.State
+ depth int
+ pointers map[uintptr]int
+ ignoreNextType bool
+ cs *ConfigState
+}
+
+// buildDefaultFormat recreates the original format string without precision
+// and width information to pass in to fmt.Sprintf in the case of an
+// unrecognized type. Unless new types are added to the language, this
+// function won't ever be called.
+func (f *formatState) buildDefaultFormat() (format string) {
+ buf := bytes.NewBuffer(percentBytes)
+
+ for _, flag := range supportedFlags {
+ if f.fs.Flag(int(flag)) {
+ buf.WriteRune(flag)
+ }
+ }
+
+ buf.WriteRune('v')
+
+ format = buf.String()
+ return format
+}
+
+// constructOrigFormat recreates the original format string including precision
+// and width information to pass along to the standard fmt package. This allows
+// automatic deferral of all format strings this package doesn't support.
+func (f *formatState) constructOrigFormat(verb rune) (format string) {
+ buf := bytes.NewBuffer(percentBytes)
+
+ for _, flag := range supportedFlags {
+ if f.fs.Flag(int(flag)) {
+ buf.WriteRune(flag)
+ }
+ }
+
+ if width, ok := f.fs.Width(); ok {
+ buf.WriteString(strconv.Itoa(width))
+ }
+
+ if precision, ok := f.fs.Precision(); ok {
+ buf.Write(precisionBytes)
+ buf.WriteString(strconv.Itoa(precision))
+ }
+
+ buf.WriteRune(verb)
+
+ format = buf.String()
+ return format
+}
+
+// unpackValue returns values inside of non-nil interfaces when possible and
+// ensures that types for values which have been unpacked from an interface
+// are displayed when the show types flag is also set.
+// This is useful for data types like structs, arrays, slices, and maps which
+// can contain varying types packed inside an interface.
+func (f *formatState) unpackValue(v reflect.Value) reflect.Value {
+ if v.Kind() == reflect.Interface {
+ f.ignoreNextType = false
+ if !v.IsNil() {
+ v = v.Elem()
+ }
+ }
+ return v
+}
+
+// formatPtr handles formatting of pointers by indirecting them as necessary.
+func (f *formatState) formatPtr(v reflect.Value) {
+ // Display nil if top level pointer is nil.
+ showTypes := f.fs.Flag('#')
+ if v.IsNil() && (!showTypes || f.ignoreNextType) {
+ f.fs.Write(nilAngleBytes)
+ return
+ }
+
+ // Remove pointers at or below the current depth from map used to detect
+ // circular refs.
+ for k, depth := range f.pointers {
+ if depth >= f.depth {
+ delete(f.pointers, k)
+ }
+ }
+
+ // Keep list of all dereferenced pointers to possibly show later.
+ pointerChain := make([]uintptr, 0)
+
+ // Figure out how many levels of indirection there are by derferencing
+ // pointers and unpacking interfaces down the chain while detecting circular
+ // references.
+ nilFound := false
+ cycleFound := false
+ indirects := 0
+ ve := v
+ for ve.Kind() == reflect.Ptr {
+ if ve.IsNil() {
+ nilFound = true
+ break
+ }
+ indirects++
+ addr := ve.Pointer()
+ pointerChain = append(pointerChain, addr)
+ if pd, ok := f.pointers[addr]; ok && pd < f.depth {
+ cycleFound = true
+ indirects--
+ break
+ }
+ f.pointers[addr] = f.depth
+
+ ve = ve.Elem()
+ if ve.Kind() == reflect.Interface {
+ if ve.IsNil() {
+ nilFound = true
+ break
+ }
+ ve = ve.Elem()
+ }
+ }
+
+ // Display type or indirection level depending on flags.
+ if showTypes && !f.ignoreNextType {
+ f.fs.Write(openParenBytes)
+ f.fs.Write(bytes.Repeat(asteriskBytes, indirects))
+ f.fs.Write([]byte(ve.Type().String()))
+ f.fs.Write(closeParenBytes)
+ } else {
+ if nilFound || cycleFound {
+ indirects += strings.Count(ve.Type().String(), "*")
+ }
+ f.fs.Write(openAngleBytes)
+ f.fs.Write([]byte(strings.Repeat("*", indirects)))
+ f.fs.Write(closeAngleBytes)
+ }
+
+ // Display pointer information depending on flags.
+ if f.fs.Flag('+') && (len(pointerChain) > 0) {
+ f.fs.Write(openParenBytes)
+ for i, addr := range pointerChain {
+ if i > 0 {
+ f.fs.Write(pointerChainBytes)
+ }
+ printHexPtr(f.fs, addr)
+ }
+ f.fs.Write(closeParenBytes)
+ }
+
+ // Display dereferenced value.
+ switch {
+ case nilFound:
+ f.fs.Write(nilAngleBytes)
+
+ case cycleFound:
+ f.fs.Write(circularShortBytes)
+
+ default:
+ f.ignoreNextType = true
+ f.format(ve)
+ }
+}
+
+// format is the main workhorse for providing the Formatter interface. It
+// uses the passed reflect value to figure out what kind of object we are
+// dealing with and formats it appropriately. It is a recursive function,
+// however circular data structures are detected and handled properly.
+func (f *formatState) format(v reflect.Value) {
+ // Handle invalid reflect values immediately.
+ kind := v.Kind()
+ if kind == reflect.Invalid {
+ f.fs.Write(invalidAngleBytes)
+ return
+ }
+
+ // Handle pointers specially.
+ if kind == reflect.Ptr {
+ f.formatPtr(v)
+ return
+ }
+
+ // Print type information unless already handled elsewhere.
+ if !f.ignoreNextType && f.fs.Flag('#') {
+ f.fs.Write(openParenBytes)
+ f.fs.Write([]byte(v.Type().String()))
+ f.fs.Write(closeParenBytes)
+ }
+ f.ignoreNextType = false
+
+ // Call Stringer/error interfaces if they exist and the handle methods
+ // flag is enabled.
+ if !f.cs.DisableMethods {
+ if (kind != reflect.Invalid) && (kind != reflect.Interface) {
+ if handled := handleMethods(f.cs, f.fs, v); handled {
+ return
+ }
+ }
+ }
+
+ switch kind {
+ case reflect.Invalid:
+ // Do nothing. We should never get here since invalid has already
+ // been handled above.
+
+ case reflect.Bool:
+ printBool(f.fs, v.Bool())
+
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ printInt(f.fs, v.Int(), 10)
+
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+ printUint(f.fs, v.Uint(), 10)
+
+ case reflect.Float32:
+ printFloat(f.fs, v.Float(), 32)
+
+ case reflect.Float64:
+ printFloat(f.fs, v.Float(), 64)
+
+ case reflect.Complex64:
+ printComplex(f.fs, v.Complex(), 32)
+
+ case reflect.Complex128:
+ printComplex(f.fs, v.Complex(), 64)
+
+ case reflect.Slice:
+ if v.IsNil() {
+ f.fs.Write(nilAngleBytes)
+ break
+ }
+ fallthrough
+
+ case reflect.Array:
+ f.fs.Write(openBracketBytes)
+ f.depth++
+ if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
+ f.fs.Write(maxShortBytes)
+ } else {
+ numEntries := v.Len()
+ for i := 0; i < numEntries; i++ {
+ if i > 0 {
+ f.fs.Write(spaceBytes)
+ }
+ f.ignoreNextType = true
+ f.format(f.unpackValue(v.Index(i)))
+ }
+ }
+ f.depth--
+ f.fs.Write(closeBracketBytes)
+
+ case reflect.String:
+ f.fs.Write([]byte(v.String()))
+
+ case reflect.Interface:
+ // The only time we should get here is for nil interfaces due to
+ // unpackValue calls.
+ if v.IsNil() {
+ f.fs.Write(nilAngleBytes)
+ }
+
+ case reflect.Ptr:
+ // Do nothing. We should never get here since pointers have already
+ // been handled above.
+
+ case reflect.Map:
+ // nil maps should be indicated as different than empty maps
+ if v.IsNil() {
+ f.fs.Write(nilAngleBytes)
+ break
+ }
+
+ f.fs.Write(openMapBytes)
+ f.depth++
+ if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
+ f.fs.Write(maxShortBytes)
+ } else {
+ keys := v.MapKeys()
+ if f.cs.SortKeys {
+ sortValues(keys, f.cs)
+ }
+ for i, key := range keys {
+ if i > 0 {
+ f.fs.Write(spaceBytes)
+ }
+ f.ignoreNextType = true
+ f.format(f.unpackValue(key))
+ f.fs.Write(colonBytes)
+ f.ignoreNextType = true
+ f.format(f.unpackValue(v.MapIndex(key)))
+ }
+ }
+ f.depth--
+ f.fs.Write(closeMapBytes)
+
+ case reflect.Struct:
+ numFields := v.NumField()
+ f.fs.Write(openBraceBytes)
+ f.depth++
+ if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
+ f.fs.Write(maxShortBytes)
+ } else {
+ vt := v.Type()
+ for i := 0; i < numFields; i++ {
+ if i > 0 {
+ f.fs.Write(spaceBytes)
+ }
+ vtf := vt.Field(i)
+ if f.fs.Flag('+') || f.fs.Flag('#') {
+ f.fs.Write([]byte(vtf.Name))
+ f.fs.Write(colonBytes)
+ }
+ f.format(f.unpackValue(v.Field(i)))
+ }
+ }
+ f.depth--
+ f.fs.Write(closeBraceBytes)
+
+ case reflect.Uintptr:
+ printHexPtr(f.fs, uintptr(v.Uint()))
+
+ case reflect.UnsafePointer, reflect.Chan, reflect.Func:
+ printHexPtr(f.fs, v.Pointer())
+
+ // There were not any other types at the time this code was written, but
+ // fall back to letting the default fmt package handle it if any get added.
+ default:
+ format := f.buildDefaultFormat()
+ if v.CanInterface() {
+ fmt.Fprintf(f.fs, format, v.Interface())
+ } else {
+ fmt.Fprintf(f.fs, format, v.String())
+ }
+ }
+}
+
+// Format satisfies the fmt.Formatter interface. See NewFormatter for usage
+// details.
+func (f *formatState) Format(fs fmt.State, verb rune) {
+ f.fs = fs
+
+ // Use standard formatting for verbs that are not v.
+ if verb != 'v' {
+ format := f.constructOrigFormat(verb)
+ fmt.Fprintf(fs, format, f.value)
+ return
+ }
+
+ if f.value == nil {
+ if fs.Flag('#') {
+ fs.Write(interfaceBytes)
+ }
+ fs.Write(nilAngleBytes)
+ return
+ }
+
+ f.format(reflect.ValueOf(f.value))
+}
+
+// newFormatter is a helper function to consolidate the logic from the various
+// public methods which take varying config states.
+func newFormatter(cs *ConfigState, v interface{}) fmt.Formatter {
+ fs := &formatState{value: v, cs: cs}
+ fs.pointers = make(map[uintptr]int)
+ return fs
+}
+
+/*
+NewFormatter returns a custom formatter that satisfies the fmt.Formatter
+interface. As a result, it integrates cleanly with standard fmt package
+printing functions. The formatter is useful for inline printing of smaller data
+types similar to the standard %v format specifier.
+
+The custom formatter only responds to the %v (most compact), %+v (adds pointer
+addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb
+combinations. Any other verbs such as %x and %q will be sent to the the
+standard fmt package for formatting. In addition, the custom formatter ignores
+the width and precision arguments (however they will still work on the format
+specifiers not handled by the custom formatter).
+
+Typically this function shouldn't be called directly. It is much easier to make
+use of the custom formatter by calling one of the convenience functions such as
+Printf, Println, or Fprintf.
+*/
+func NewFormatter(v interface{}) fmt.Formatter {
+ return newFormatter(&Config, v)
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/spew.go b/vendor/github.com/davecgh/go-spew/spew/spew.go
new file mode 100644
index 0000000..32c0e33
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/spew.go
@@ -0,0 +1,148 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "fmt"
+ "io"
+)
+
+// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were
+// passed with a default Formatter interface returned by NewFormatter. It
+// returns the formatted string as a value that satisfies error. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Errorf(format, spew.NewFormatter(a), spew.NewFormatter(b))
+func Errorf(format string, a ...interface{}) (err error) {
+ return fmt.Errorf(format, convertArgs(a)...)
+}
+
+// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were
+// passed with a default Formatter interface returned by NewFormatter. It
+// returns the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprint(w, spew.NewFormatter(a), spew.NewFormatter(b))
+func Fprint(w io.Writer, a ...interface{}) (n int, err error) {
+ return fmt.Fprint(w, convertArgs(a)...)
+}
+
+// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were
+// passed with a default Formatter interface returned by NewFormatter. It
+// returns the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprintf(w, format, spew.NewFormatter(a), spew.NewFormatter(b))
+func Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
+ return fmt.Fprintf(w, format, convertArgs(a)...)
+}
+
+// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it
+// passed with a default Formatter interface returned by NewFormatter. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprintln(w, spew.NewFormatter(a), spew.NewFormatter(b))
+func Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
+ return fmt.Fprintln(w, convertArgs(a)...)
+}
+
+// Print is a wrapper for fmt.Print that treats each argument as if it were
+// passed with a default Formatter interface returned by NewFormatter. It
+// returns the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Print(spew.NewFormatter(a), spew.NewFormatter(b))
+func Print(a ...interface{}) (n int, err error) {
+ return fmt.Print(convertArgs(a)...)
+}
+
+// Printf is a wrapper for fmt.Printf that treats each argument as if it were
+// passed with a default Formatter interface returned by NewFormatter. It
+// returns the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Printf(format, spew.NewFormatter(a), spew.NewFormatter(b))
+func Printf(format string, a ...interface{}) (n int, err error) {
+ return fmt.Printf(format, convertArgs(a)...)
+}
+
+// Println is a wrapper for fmt.Println that treats each argument as if it were
+// passed with a default Formatter interface returned by NewFormatter. It
+// returns the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Println(spew.NewFormatter(a), spew.NewFormatter(b))
+func Println(a ...interface{}) (n int, err error) {
+ return fmt.Println(convertArgs(a)...)
+}
+
+// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were
+// passed with a default Formatter interface returned by NewFormatter. It
+// returns the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprint(spew.NewFormatter(a), spew.NewFormatter(b))
+func Sprint(a ...interface{}) string {
+ return fmt.Sprint(convertArgs(a)...)
+}
+
+// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were
+// passed with a default Formatter interface returned by NewFormatter. It
+// returns the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprintf(format, spew.NewFormatter(a), spew.NewFormatter(b))
+func Sprintf(format string, a ...interface{}) string {
+ return fmt.Sprintf(format, convertArgs(a)...)
+}
+
+// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it
+// were passed with a default Formatter interface returned by NewFormatter. It
+// returns the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprintln(spew.NewFormatter(a), spew.NewFormatter(b))
+func Sprintln(a ...interface{}) string {
+ return fmt.Sprintln(convertArgs(a)...)
+}
+
+// convertArgs accepts a slice of arguments and returns a slice of the same
+// length with each argument converted to a default spew Formatter interface.
+func convertArgs(args []interface{}) (formatters []interface{}) {
+ formatters = make([]interface{}, len(args))
+ for index, arg := range args {
+ formatters[index] = NewFormatter(arg)
+ }
+ return formatters
+}
diff --git a/vendor/github.com/fsnotify/fsnotify/.cirrus.yml b/vendor/github.com/fsnotify/fsnotify/.cirrus.yml
new file mode 100644
index 0000000..ffc7b99
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/.cirrus.yml
@@ -0,0 +1,13 @@
+freebsd_task:
+ name: 'FreeBSD'
+ freebsd_instance:
+ image_family: freebsd-13-2
+ install_script:
+ - pkg update -f
+ - pkg install -y go
+ test_script:
+ # run tests as user "cirrus" instead of root
+ - pw useradd cirrus -m
+ - chown -R cirrus:cirrus .
+ - FSNOTIFY_BUFFER=4096 sudo --preserve-env=FSNOTIFY_BUFFER -u cirrus go test -parallel 1 -race ./...
+ - sudo --preserve-env=FSNOTIFY_BUFFER -u cirrus go test -parallel 1 -race ./...
diff --git a/vendor/github.com/fsnotify/fsnotify/.editorconfig b/vendor/github.com/fsnotify/fsnotify/.editorconfig
new file mode 100644
index 0000000..fad8958
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/.editorconfig
@@ -0,0 +1,12 @@
+root = true
+
+[*.go]
+indent_style = tab
+indent_size = 4
+insert_final_newline = true
+
+[*.{yml,yaml}]
+indent_style = space
+indent_size = 2
+insert_final_newline = true
+trim_trailing_whitespace = true
diff --git a/vendor/github.com/fsnotify/fsnotify/.gitattributes b/vendor/github.com/fsnotify/fsnotify/.gitattributes
new file mode 100644
index 0000000..32f1001
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/.gitattributes
@@ -0,0 +1 @@
+go.sum linguist-generated
diff --git a/vendor/github.com/fsnotify/fsnotify/.gitignore b/vendor/github.com/fsnotify/fsnotify/.gitignore
new file mode 100644
index 0000000..391cc07
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/.gitignore
@@ -0,0 +1,7 @@
+# go test -c output
+*.test
+*.test.exe
+
+# Output of go build ./cmd/fsnotify
+/fsnotify
+/fsnotify.exe
diff --git a/vendor/github.com/fsnotify/fsnotify/.mailmap b/vendor/github.com/fsnotify/fsnotify/.mailmap
new file mode 100644
index 0000000..a04f290
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/.mailmap
@@ -0,0 +1,2 @@
+Chris Howey
+Nathan Youngman <4566+nathany@users.noreply.github.com>
diff --git a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md
new file mode 100644
index 0000000..e0e5757
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md
@@ -0,0 +1,541 @@
+# Changelog
+
+Unreleased
+----------
+Nothing yet.
+
+1.7.0 - 2023-10-22
+------------------
+This version of fsnotify needs Go 1.17.
+
+### Additions
+
+- illumos: add FEN backend to support illumos and Solaris. ([#371])
+
+- all: add `NewBufferedWatcher()` to use a buffered channel, which can be useful
+ in cases where you can't control the kernel buffer and receive a large number
+ of events in bursts. ([#550], [#572])
+
+- all: add `AddWith()`, which is identical to `Add()` but allows passing
+ options. ([#521])
+
+- windows: allow setting the ReadDirectoryChangesW() buffer size with
+ `fsnotify.WithBufferSize()`; the default of 64K is the highest value that
+ works on all platforms and is enough for most purposes, but in some cases a
+ highest buffer is needed. ([#521])
+
+### Changes and fixes
+
+- inotify: remove watcher if a watched path is renamed ([#518])
+
+ After a rename the reported name wasn't updated, or even an empty string.
+ Inotify doesn't provide any good facilities to update it, so just remove the
+ watcher. This is already how it worked on kqueue and FEN.
+
+ On Windows this does work, and remains working.
+
+- windows: don't listen for file attribute changes ([#520])
+
+ File attribute changes are sent as `FILE_ACTION_MODIFIED` by the Windows API,
+ with no way to see if they're a file write or attribute change, so would show
+ up as a fsnotify.Write event. This is never useful, and could result in many
+ spurious Write events.
+
+- windows: return `ErrEventOverflow` if the buffer is full ([#525])
+
+ Before it would merely return "short read", making it hard to detect this
+ error.
+
+- kqueue: make sure events for all files are delivered properly when removing a
+ watched directory ([#526])
+
+ Previously they would get sent with `""` (empty string) or `"."` as the path
+ name.
+
+- kqueue: don't emit spurious Create events for symbolic links ([#524])
+
+ The link would get resolved but kqueue would "forget" it already saw the link
+ itself, resulting on a Create for every Write event for the directory.
+
+- all: return `ErrClosed` on `Add()` when the watcher is closed ([#516])
+
+- other: add `Watcher.Errors` and `Watcher.Events` to the no-op `Watcher` in
+ `backend_other.go`, making it easier to use on unsupported platforms such as
+ WASM, AIX, etc. ([#528])
+
+- other: use the `backend_other.go` no-op if the `appengine` build tag is set;
+ Google AppEngine forbids usage of the unsafe package so the inotify backend
+ won't compile there.
+
+[#371]: https://github.com/fsnotify/fsnotify/pull/371
+[#516]: https://github.com/fsnotify/fsnotify/pull/516
+[#518]: https://github.com/fsnotify/fsnotify/pull/518
+[#520]: https://github.com/fsnotify/fsnotify/pull/520
+[#521]: https://github.com/fsnotify/fsnotify/pull/521
+[#524]: https://github.com/fsnotify/fsnotify/pull/524
+[#525]: https://github.com/fsnotify/fsnotify/pull/525
+[#526]: https://github.com/fsnotify/fsnotify/pull/526
+[#528]: https://github.com/fsnotify/fsnotify/pull/528
+[#537]: https://github.com/fsnotify/fsnotify/pull/537
+[#550]: https://github.com/fsnotify/fsnotify/pull/550
+[#572]: https://github.com/fsnotify/fsnotify/pull/572
+
+1.6.0 - 2022-10-13
+------------------
+This version of fsnotify needs Go 1.16 (this was already the case since 1.5.1,
+but not documented). It also increases the minimum Linux version to 2.6.32.
+
+### Additions
+
+- all: add `Event.Has()` and `Op.Has()` ([#477])
+
+ This makes checking events a lot easier; for example:
+
+ if event.Op&Write == Write && !(event.Op&Remove == Remove) {
+ }
+
+ Becomes:
+
+ if event.Has(Write) && !event.Has(Remove) {
+ }
+
+- all: add cmd/fsnotify ([#463])
+
+ A command-line utility for testing and some examples.
+
+### Changes and fixes
+
+- inotify: don't ignore events for files that don't exist ([#260], [#470])
+
+ Previously the inotify watcher would call `os.Lstat()` to check if a file
+ still exists before emitting events.
+
+ This was inconsistent with other platforms and resulted in inconsistent event
+ reporting (e.g. when a file is quickly removed and re-created), and generally
+ a source of confusion. It was added in 2013 to fix a memory leak that no
+ longer exists.
+
+- all: return `ErrNonExistentWatch` when `Remove()` is called on a path that's
+ not watched ([#460])
+
+- inotify: replace epoll() with non-blocking inotify ([#434])
+
+ Non-blocking inotify was not generally available at the time this library was
+ written in 2014, but now it is. As a result, the minimum Linux version is
+ bumped from 2.6.27 to 2.6.32. This hugely simplifies the code and is faster.
+
+- kqueue: don't check for events every 100ms ([#480])
+
+ The watcher would wake up every 100ms, even when there was nothing to do. Now
+ it waits until there is something to do.
+
+- macos: retry opening files on EINTR ([#475])
+
+- kqueue: skip unreadable files ([#479])
+
+ kqueue requires a file descriptor for every file in a directory; this would
+ fail if a file was unreadable by the current user. Now these files are simply
+ skipped.
+
+- windows: fix renaming a watched directory if the parent is also watched ([#370])
+
+- windows: increase buffer size from 4K to 64K ([#485])
+
+- windows: close file handle on Remove() ([#288])
+
+- kqueue: put pathname in the error if watching a file fails ([#471])
+
+- inotify, windows: calling Close() more than once could race ([#465])
+
+- kqueue: improve Close() performance ([#233])
+
+- all: various documentation additions and clarifications.
+
+[#233]: https://github.com/fsnotify/fsnotify/pull/233
+[#260]: https://github.com/fsnotify/fsnotify/pull/260
+[#288]: https://github.com/fsnotify/fsnotify/pull/288
+[#370]: https://github.com/fsnotify/fsnotify/pull/370
+[#434]: https://github.com/fsnotify/fsnotify/pull/434
+[#460]: https://github.com/fsnotify/fsnotify/pull/460
+[#463]: https://github.com/fsnotify/fsnotify/pull/463
+[#465]: https://github.com/fsnotify/fsnotify/pull/465
+[#470]: https://github.com/fsnotify/fsnotify/pull/470
+[#471]: https://github.com/fsnotify/fsnotify/pull/471
+[#475]: https://github.com/fsnotify/fsnotify/pull/475
+[#477]: https://github.com/fsnotify/fsnotify/pull/477
+[#479]: https://github.com/fsnotify/fsnotify/pull/479
+[#480]: https://github.com/fsnotify/fsnotify/pull/480
+[#485]: https://github.com/fsnotify/fsnotify/pull/485
+
+## [1.5.4] - 2022-04-25
+
+* Windows: add missing defer to `Watcher.WatchList` [#447](https://github.com/fsnotify/fsnotify/pull/447)
+* go.mod: use latest x/sys [#444](https://github.com/fsnotify/fsnotify/pull/444)
+* Fix compilation for OpenBSD [#443](https://github.com/fsnotify/fsnotify/pull/443)
+
+## [1.5.3] - 2022-04-22
+
+* This version is retracted. An incorrect branch is published accidentally [#445](https://github.com/fsnotify/fsnotify/issues/445)
+
+## [1.5.2] - 2022-04-21
+
+* Add a feature to return the directories and files that are being monitored [#374](https://github.com/fsnotify/fsnotify/pull/374)
+* Fix potential crash on windows if `raw.FileNameLength` exceeds `syscall.MAX_PATH` [#361](https://github.com/fsnotify/fsnotify/pull/361)
+* Allow build on unsupported GOOS [#424](https://github.com/fsnotify/fsnotify/pull/424)
+* Don't set `poller.fd` twice in `newFdPoller` [#406](https://github.com/fsnotify/fsnotify/pull/406)
+* fix go vet warnings: call to `(*T).Fatalf` from a non-test goroutine [#416](https://github.com/fsnotify/fsnotify/pull/416)
+
+## [1.5.1] - 2021-08-24
+
+* Revert Add AddRaw to not follow symlinks [#394](https://github.com/fsnotify/fsnotify/pull/394)
+
+## [1.5.0] - 2021-08-20
+
+* Go: Increase minimum required version to Go 1.12 [#381](https://github.com/fsnotify/fsnotify/pull/381)
+* Feature: Add AddRaw method which does not follow symlinks when adding a watch [#289](https://github.com/fsnotify/fsnotify/pull/298)
+* Windows: Follow symlinks by default like on all other systems [#289](https://github.com/fsnotify/fsnotify/pull/289)
+* CI: Use GitHub Actions for CI and cover go 1.12-1.17
+ [#378](https://github.com/fsnotify/fsnotify/pull/378)
+ [#381](https://github.com/fsnotify/fsnotify/pull/381)
+ [#385](https://github.com/fsnotify/fsnotify/pull/385)
+* Go 1.14+: Fix unsafe pointer conversion [#325](https://github.com/fsnotify/fsnotify/pull/325)
+
+## [1.4.9] - 2020-03-11
+
+* Move example usage to the readme #329. This may resolve #328.
+
+## [1.4.8] - 2020-03-10
+
+* CI: test more go versions (@nathany 1d13583d846ea9d66dcabbfefbfb9d8e6fb05216)
+* Tests: Queued inotify events could have been read by the test before max_queued_events was hit (@matthias-stone #265)
+* Tests: t.Fatalf -> t.Errorf in go routines (@gdey #266)
+* CI: Less verbosity (@nathany #267)
+* Tests: Darwin: Exchangedata is deprecated on 10.13 (@nathany #267)
+* Tests: Check if channels are closed in the example (@alexeykazakov #244)
+* CI: Only run golint on latest version of go and fix issues (@cpuguy83 #284)
+* CI: Add windows to travis matrix (@cpuguy83 #284)
+* Docs: Remover appveyor badge (@nathany 11844c0959f6fff69ba325d097fce35bd85a8e93)
+* Linux: create epoll and pipe fds with close-on-exec (@JohannesEbke #219)
+* Linux: open files with close-on-exec (@linxiulei #273)
+* Docs: Plan to support fanotify (@nathany ab058b44498e8b7566a799372a39d150d9ea0119 )
+* Project: Add go.mod (@nathany #309)
+* Project: Revise editor config (@nathany #309)
+* Project: Update copyright for 2019 (@nathany #309)
+* CI: Drop go1.8 from CI matrix (@nathany #309)
+* Docs: Updating the FAQ section for supportability with NFS & FUSE filesystems (@Pratik32 4bf2d1fec78374803a39307bfb8d340688f4f28e )
+
+## [1.4.7] - 2018-01-09
+
+* BSD/macOS: Fix possible deadlock on closing the watcher on kqueue (thanks @nhooyr and @glycerine)
+* Tests: Fix missing verb on format string (thanks @rchiossi)
+* Linux: Fix deadlock in Remove (thanks @aarondl)
+* Linux: Watch.Add improvements (avoid race, fix consistency, reduce garbage) (thanks @twpayne)
+* Docs: Moved FAQ into the README (thanks @vahe)
+* Linux: Properly handle inotify's IN_Q_OVERFLOW event (thanks @zeldovich)
+* Docs: replace references to OS X with macOS
+
+## [1.4.2] - 2016-10-10
+
+* Linux: use InotifyInit1 with IN_CLOEXEC to stop leaking a file descriptor to a child process when using fork/exec [#178](https://github.com/fsnotify/fsnotify/pull/178) (thanks @pattyshack)
+
+## [1.4.1] - 2016-10-04
+
+* Fix flaky inotify stress test on Linux [#177](https://github.com/fsnotify/fsnotify/pull/177) (thanks @pattyshack)
+
+## [1.4.0] - 2016-10-01
+
+* add a String() method to Event.Op [#165](https://github.com/fsnotify/fsnotify/pull/165) (thanks @oozie)
+
+## [1.3.1] - 2016-06-28
+
+* Windows: fix for double backslash when watching the root of a drive [#151](https://github.com/fsnotify/fsnotify/issues/151) (thanks @brunoqc)
+
+## [1.3.0] - 2016-04-19
+
+* Support linux/arm64 by [patching](https://go-review.googlesource.com/#/c/21971/) x/sys/unix and switching to to it from syscall (thanks @suihkulokki) [#135](https://github.com/fsnotify/fsnotify/pull/135)
+
+## [1.2.10] - 2016-03-02
+
+* Fix golint errors in windows.go [#121](https://github.com/fsnotify/fsnotify/pull/121) (thanks @tiffanyfj)
+
+## [1.2.9] - 2016-01-13
+
+kqueue: Fix logic for CREATE after REMOVE [#111](https://github.com/fsnotify/fsnotify/pull/111) (thanks @bep)
+
+## [1.2.8] - 2015-12-17
+
+* kqueue: fix race condition in Close [#105](https://github.com/fsnotify/fsnotify/pull/105) (thanks @djui for reporting the issue and @ppknap for writing a failing test)
+* inotify: fix race in test
+* enable race detection for continuous integration (Linux, Mac, Windows)
+
+## [1.2.5] - 2015-10-17
+
+* inotify: use epoll_create1 for arm64 support (requires Linux 2.6.27 or later) [#100](https://github.com/fsnotify/fsnotify/pull/100) (thanks @suihkulokki)
+* inotify: fix path leaks [#73](https://github.com/fsnotify/fsnotify/pull/73) (thanks @chamaken)
+* kqueue: watch for rename events on subdirectories [#83](https://github.com/fsnotify/fsnotify/pull/83) (thanks @guotie)
+* kqueue: avoid infinite loops from symlinks cycles [#101](https://github.com/fsnotify/fsnotify/pull/101) (thanks @illicitonion)
+
+## [1.2.1] - 2015-10-14
+
+* kqueue: don't watch named pipes [#98](https://github.com/fsnotify/fsnotify/pull/98) (thanks @evanphx)
+
+## [1.2.0] - 2015-02-08
+
+* inotify: use epoll to wake up readEvents [#66](https://github.com/fsnotify/fsnotify/pull/66) (thanks @PieterD)
+* inotify: closing watcher should now always shut down goroutine [#63](https://github.com/fsnotify/fsnotify/pull/63) (thanks @PieterD)
+* kqueue: close kqueue after removing watches, fixes [#59](https://github.com/fsnotify/fsnotify/issues/59)
+
+## [1.1.1] - 2015-02-05
+
+* inotify: Retry read on EINTR [#61](https://github.com/fsnotify/fsnotify/issues/61) (thanks @PieterD)
+
+## [1.1.0] - 2014-12-12
+
+* kqueue: rework internals [#43](https://github.com/fsnotify/fsnotify/pull/43)
+ * add low-level functions
+ * only need to store flags on directories
+ * less mutexes [#13](https://github.com/fsnotify/fsnotify/issues/13)
+ * done can be an unbuffered channel
+ * remove calls to os.NewSyscallError
+* More efficient string concatenation for Event.String() [#52](https://github.com/fsnotify/fsnotify/pull/52) (thanks @mdlayher)
+* kqueue: fix regression in rework causing subdirectories to be watched [#48](https://github.com/fsnotify/fsnotify/issues/48)
+* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51)
+
+## [1.0.4] - 2014-09-07
+
+* kqueue: add dragonfly to the build tags.
+* Rename source code files, rearrange code so exported APIs are at the top.
+* Add done channel to example code. [#37](https://github.com/fsnotify/fsnotify/pull/37) (thanks @chenyukang)
+
+## [1.0.3] - 2014-08-19
+
+* [Fix] Windows MOVED_TO now translates to Create like on BSD and Linux. [#36](https://github.com/fsnotify/fsnotify/issues/36)
+
+## [1.0.2] - 2014-08-17
+
+* [Fix] Missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso)
+* [Fix] Make ./path and path equivalent. (thanks @zhsso)
+
+## [1.0.0] - 2014-08-15
+
+* [API] Remove AddWatch on Windows, use Add.
+* Improve documentation for exported identifiers. [#30](https://github.com/fsnotify/fsnotify/issues/30)
+* Minor updates based on feedback from golint.
+
+## dev / 2014-07-09
+
+* Moved to [github.com/fsnotify/fsnotify](https://github.com/fsnotify/fsnotify).
+* Use os.NewSyscallError instead of returning errno (thanks @hariharan-uno)
+
+## dev / 2014-07-04
+
+* kqueue: fix incorrect mutex used in Close()
+* Update example to demonstrate usage of Op.
+
+## dev / 2014-06-28
+
+* [API] Don't set the Write Op for attribute notifications [#4](https://github.com/fsnotify/fsnotify/issues/4)
+* Fix for String() method on Event (thanks Alex Brainman)
+* Don't build on Plan 9 or Solaris (thanks @4ad)
+
+## dev / 2014-06-21
+
+* Events channel of type Event rather than *Event.
+* [internal] use syscall constants directly for inotify and kqueue.
+* [internal] kqueue: rename events to kevents and fileEvent to event.
+
+## dev / 2014-06-19
+
+* Go 1.3+ required on Windows (uses syscall.ERROR_MORE_DATA internally).
+* [internal] remove cookie from Event struct (unused).
+* [internal] Event struct has the same definition across every OS.
+* [internal] remove internal watch and removeWatch methods.
+
+## dev / 2014-06-12
+
+* [API] Renamed Watch() to Add() and RemoveWatch() to Remove().
+* [API] Pluralized channel names: Events and Errors.
+* [API] Renamed FileEvent struct to Event.
+* [API] Op constants replace methods like IsCreate().
+
+## dev / 2014-06-12
+
+* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98)
+
+## dev / 2014-05-23
+
+* [API] Remove current implementation of WatchFlags.
+ * current implementation doesn't take advantage of OS for efficiency
+ * provides little benefit over filtering events as they are received, but has extra bookkeeping and mutexes
+ * no tests for the current implementation
+ * not fully implemented on Windows [#93](https://github.com/howeyc/fsnotify/issues/93#issuecomment-39285195)
+
+## [0.9.3] - 2014-12-31
+
+* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51)
+
+## [0.9.2] - 2014-08-17
+
+* [Backport] Fix missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso)
+
+## [0.9.1] - 2014-06-12
+
+* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98)
+
+## [0.9.0] - 2014-01-17
+
+* IsAttrib() for events that only concern a file's metadata [#79][] (thanks @abustany)
+* [Fix] kqueue: fix deadlock [#77][] (thanks @cespare)
+* [NOTICE] Development has moved to `code.google.com/p/go.exp/fsnotify` in preparation for inclusion in the Go standard library.
+
+## [0.8.12] - 2013-11-13
+
+* [API] Remove FD_SET and friends from Linux adapter
+
+## [0.8.11] - 2013-11-02
+
+* [Doc] Add Changelog [#72][] (thanks @nathany)
+* [Doc] Spotlight and double modify events on macOS [#62][] (reported by @paulhammond)
+
+## [0.8.10] - 2013-10-19
+
+* [Fix] kqueue: remove file watches when parent directory is removed [#71][] (reported by @mdwhatcott)
+* [Fix] kqueue: race between Close and readEvents [#70][] (reported by @bernerdschaefer)
+* [Doc] specify OS-specific limits in README (thanks @debrando)
+
+## [0.8.9] - 2013-09-08
+
+* [Doc] Contributing (thanks @nathany)
+* [Doc] update package path in example code [#63][] (thanks @paulhammond)
+* [Doc] GoCI badge in README (Linux only) [#60][]
+* [Doc] Cross-platform testing with Vagrant [#59][] (thanks @nathany)
+
+## [0.8.8] - 2013-06-17
+
+* [Fix] Windows: handle `ERROR_MORE_DATA` on Windows [#49][] (thanks @jbowtie)
+
+## [0.8.7] - 2013-06-03
+
+* [API] Make syscall flags internal
+* [Fix] inotify: ignore event changes
+* [Fix] race in symlink test [#45][] (reported by @srid)
+* [Fix] tests on Windows
+* lower case error messages
+
+## [0.8.6] - 2013-05-23
+
+* kqueue: Use EVT_ONLY flag on Darwin
+* [Doc] Update README with full example
+
+## [0.8.5] - 2013-05-09
+
+* [Fix] inotify: allow monitoring of "broken" symlinks (thanks @tsg)
+
+## [0.8.4] - 2013-04-07
+
+* [Fix] kqueue: watch all file events [#40][] (thanks @ChrisBuchholz)
+
+## [0.8.3] - 2013-03-13
+
+* [Fix] inoitfy/kqueue memory leak [#36][] (reported by @nbkolchin)
+* [Fix] kqueue: use fsnFlags for watching a directory [#33][] (reported by @nbkolchin)
+
+## [0.8.2] - 2013-02-07
+
+* [Doc] add Authors
+* [Fix] fix data races for map access [#29][] (thanks @fsouza)
+
+## [0.8.1] - 2013-01-09
+
+* [Fix] Windows path separators
+* [Doc] BSD License
+
+## [0.8.0] - 2012-11-09
+
+* kqueue: directory watching improvements (thanks @vmirage)
+* inotify: add `IN_MOVED_TO` [#25][] (requested by @cpisto)
+* [Fix] kqueue: deleting watched directory [#24][] (reported by @jakerr)
+
+## [0.7.4] - 2012-10-09
+
+* [Fix] inotify: fixes from https://codereview.appspot.com/5418045/ (ugorji)
+* [Fix] kqueue: preserve watch flags when watching for delete [#21][] (reported by @robfig)
+* [Fix] kqueue: watch the directory even if it isn't a new watch (thanks @robfig)
+* [Fix] kqueue: modify after recreation of file
+
+## [0.7.3] - 2012-09-27
+
+* [Fix] kqueue: watch with an existing folder inside the watched folder (thanks @vmirage)
+* [Fix] kqueue: no longer get duplicate CREATE events
+
+## [0.7.2] - 2012-09-01
+
+* kqueue: events for created directories
+
+## [0.7.1] - 2012-07-14
+
+* [Fix] for renaming files
+
+## [0.7.0] - 2012-07-02
+
+* [Feature] FSNotify flags
+* [Fix] inotify: Added file name back to event path
+
+## [0.6.0] - 2012-06-06
+
+* kqueue: watch files after directory created (thanks @tmc)
+
+## [0.5.1] - 2012-05-22
+
+* [Fix] inotify: remove all watches before Close()
+
+## [0.5.0] - 2012-05-03
+
+* [API] kqueue: return errors during watch instead of sending over channel
+* kqueue: match symlink behavior on Linux
+* inotify: add `DELETE_SELF` (requested by @taralx)
+* [Fix] kqueue: handle EINTR (reported by @robfig)
+* [Doc] Godoc example [#1][] (thanks @davecheney)
+
+## [0.4.0] - 2012-03-30
+
+* Go 1 released: build with go tool
+* [Feature] Windows support using winfsnotify
+* Windows does not have attribute change notifications
+* Roll attribute notifications into IsModify
+
+## [0.3.0] - 2012-02-19
+
+* kqueue: add files when watch directory
+
+## [0.2.0] - 2011-12-30
+
+* update to latest Go weekly code
+
+## [0.1.0] - 2011-10-19
+
+* kqueue: add watch on file creation to match inotify
+* kqueue: create file event
+* inotify: ignore `IN_IGNORED` events
+* event String()
+* linux: common FileEvent functions
+* initial commit
+
+[#79]: https://github.com/howeyc/fsnotify/pull/79
+[#77]: https://github.com/howeyc/fsnotify/pull/77
+[#72]: https://github.com/howeyc/fsnotify/issues/72
+[#71]: https://github.com/howeyc/fsnotify/issues/71
+[#70]: https://github.com/howeyc/fsnotify/issues/70
+[#63]: https://github.com/howeyc/fsnotify/issues/63
+[#62]: https://github.com/howeyc/fsnotify/issues/62
+[#60]: https://github.com/howeyc/fsnotify/issues/60
+[#59]: https://github.com/howeyc/fsnotify/issues/59
+[#49]: https://github.com/howeyc/fsnotify/issues/49
+[#45]: https://github.com/howeyc/fsnotify/issues/45
+[#40]: https://github.com/howeyc/fsnotify/issues/40
+[#36]: https://github.com/howeyc/fsnotify/issues/36
+[#33]: https://github.com/howeyc/fsnotify/issues/33
+[#29]: https://github.com/howeyc/fsnotify/issues/29
+[#25]: https://github.com/howeyc/fsnotify/issues/25
+[#24]: https://github.com/howeyc/fsnotify/issues/24
+[#21]: https://github.com/howeyc/fsnotify/issues/21
diff --git a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md
new file mode 100644
index 0000000..ea37975
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md
@@ -0,0 +1,26 @@
+Thank you for your interest in contributing to fsnotify! We try to review and
+merge PRs in a reasonable timeframe, but please be aware that:
+
+- To avoid "wasted" work, please discus changes on the issue tracker first. You
+ can just send PRs, but they may end up being rejected for one reason or the
+ other.
+
+- fsnotify is a cross-platform library, and changes must work reasonably well on
+ all supported platforms.
+
+- Changes will need to be compatible; old code should still compile, and the
+ runtime behaviour can't change in ways that are likely to lead to problems for
+ users.
+
+Testing
+-------
+Just `go test ./...` runs all the tests; the CI runs this on all supported
+platforms. Testing different platforms locally can be done with something like
+[goon] or [Vagrant], but this isn't super-easy to set up at the moment.
+
+Use the `-short` flag to make the "stress test" run faster.
+
+
+[goon]: https://github.com/arp242/goon
+[Vagrant]: https://www.vagrantup.com/
+[integration_test.go]: /integration_test.go
diff --git a/vendor/github.com/fsnotify/fsnotify/LICENSE b/vendor/github.com/fsnotify/fsnotify/LICENSE
new file mode 100644
index 0000000..fb03ade
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/LICENSE
@@ -0,0 +1,25 @@
+Copyright © 2012 The Go Authors. All rights reserved.
+Copyright © fsnotify Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright notice, this
+ list of conditions and the following disclaimer in the documentation and/or
+ other materials provided with the distribution.
+* Neither the name of Google Inc. nor the names of its contributors may be used
+ to endorse or promote products derived from this software without specific
+ prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/fsnotify/fsnotify/README.md b/vendor/github.com/fsnotify/fsnotify/README.md
new file mode 100644
index 0000000..e480733
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/README.md
@@ -0,0 +1,184 @@
+fsnotify is a Go library to provide cross-platform filesystem notifications on
+Windows, Linux, macOS, BSD, and illumos.
+
+Go 1.17 or newer is required; the full documentation is at
+https://pkg.go.dev/github.com/fsnotify/fsnotify
+
+---
+
+Platform support:
+
+| Backend | OS | Status |
+| :-------------------- | :--------- | :------------------------------------------------------------------------ |
+| inotify | Linux | Supported |
+| kqueue | BSD, macOS | Supported |
+| ReadDirectoryChangesW | Windows | Supported |
+| FEN | illumos | Supported |
+| fanotify | Linux 5.9+ | [Not yet](https://github.com/fsnotify/fsnotify/issues/114) |
+| AHAFS | AIX | [aix branch]; experimental due to lack of maintainer and test environment |
+| FSEvents | macOS | [Needs support in x/sys/unix][fsevents] |
+| USN Journals | Windows | [Needs support in x/sys/windows][usn] |
+| Polling | *All* | [Not yet](https://github.com/fsnotify/fsnotify/issues/9) |
+
+Linux and illumos should include Android and Solaris, but these are currently
+untested.
+
+[fsevents]: https://github.com/fsnotify/fsnotify/issues/11#issuecomment-1279133120
+[usn]: https://github.com/fsnotify/fsnotify/issues/53#issuecomment-1279829847
+[aix branch]: https://github.com/fsnotify/fsnotify/issues/353#issuecomment-1284590129
+
+Usage
+-----
+A basic example:
+
+```go
+package main
+
+import (
+ "log"
+
+ "github.com/fsnotify/fsnotify"
+)
+
+func main() {
+ // Create new watcher.
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer watcher.Close()
+
+ // Start listening for events.
+ go func() {
+ for {
+ select {
+ case event, ok := <-watcher.Events:
+ if !ok {
+ return
+ }
+ log.Println("event:", event)
+ if event.Has(fsnotify.Write) {
+ log.Println("modified file:", event.Name)
+ }
+ case err, ok := <-watcher.Errors:
+ if !ok {
+ return
+ }
+ log.Println("error:", err)
+ }
+ }
+ }()
+
+ // Add a path.
+ err = watcher.Add("/tmp")
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Block main goroutine forever.
+ <-make(chan struct{})
+}
+```
+
+Some more examples can be found in [cmd/fsnotify](cmd/fsnotify), which can be
+run with:
+
+ % go run ./cmd/fsnotify
+
+Further detailed documentation can be found in godoc:
+https://pkg.go.dev/github.com/fsnotify/fsnotify
+
+FAQ
+---
+### Will a file still be watched when it's moved to another directory?
+No, not unless you are watching the location it was moved to.
+
+### Are subdirectories watched?
+No, you must add watches for any directory you want to watch (a recursive
+watcher is on the roadmap: [#18]).
+
+[#18]: https://github.com/fsnotify/fsnotify/issues/18
+
+### Do I have to watch the Error and Event channels in a goroutine?
+Yes. You can read both channels in the same goroutine using `select` (you don't
+need a separate goroutine for both channels; see the example).
+
+### Why don't notifications work with NFS, SMB, FUSE, /proc, or /sys?
+fsnotify requires support from underlying OS to work. The current NFS and SMB
+protocols does not provide network level support for file notifications, and
+neither do the /proc and /sys virtual filesystems.
+
+This could be fixed with a polling watcher ([#9]), but it's not yet implemented.
+
+[#9]: https://github.com/fsnotify/fsnotify/issues/9
+
+### Why do I get many Chmod events?
+Some programs may generate a lot of attribute changes; for example Spotlight on
+macOS, anti-virus programs, backup applications, and some others are known to do
+this. As a rule, it's typically best to ignore Chmod events. They're often not
+useful, and tend to cause problems.
+
+Spotlight indexing on macOS can result in multiple events (see [#15]). A
+temporary workaround is to add your folder(s) to the *Spotlight Privacy
+settings* until we have a native FSEvents implementation (see [#11]).
+
+[#11]: https://github.com/fsnotify/fsnotify/issues/11
+[#15]: https://github.com/fsnotify/fsnotify/issues/15
+
+### Watching a file doesn't work well
+Watching individual files (rather than directories) is generally not recommended
+as many programs (especially editors) update files atomically: it will write to
+a temporary file which is then moved to to destination, overwriting the original
+(or some variant thereof). The watcher on the original file is now lost, as that
+no longer exists.
+
+The upshot of this is that a power failure or crash won't leave a half-written
+file.
+
+Watch the parent directory and use `Event.Name` to filter out files you're not
+interested in. There is an example of this in `cmd/fsnotify/file.go`.
+
+Platform-specific notes
+-----------------------
+### Linux
+When a file is removed a REMOVE event won't be emitted until all file
+descriptors are closed; it will emit a CHMOD instead:
+
+ fp := os.Open("file")
+ os.Remove("file") // CHMOD
+ fp.Close() // REMOVE
+
+This is the event that inotify sends, so not much can be changed about this.
+
+The `fs.inotify.max_user_watches` sysctl variable specifies the upper limit for
+the number of watches per user, and `fs.inotify.max_user_instances` specifies
+the maximum number of inotify instances per user. Every Watcher you create is an
+"instance", and every path you add is a "watch".
+
+These are also exposed in `/proc` as `/proc/sys/fs/inotify/max_user_watches` and
+`/proc/sys/fs/inotify/max_user_instances`
+
+To increase them you can use `sysctl` or write the value to proc file:
+
+ # The default values on Linux 5.18
+ sysctl fs.inotify.max_user_watches=124983
+ sysctl fs.inotify.max_user_instances=128
+
+To make the changes persist on reboot edit `/etc/sysctl.conf` or
+`/usr/lib/sysctl.d/50-default.conf` (details differ per Linux distro; check your
+distro's documentation):
+
+ fs.inotify.max_user_watches=124983
+ fs.inotify.max_user_instances=128
+
+Reaching the limit will result in a "no space left on device" or "too many open
+files" error.
+
+### kqueue (macOS, all BSD systems)
+kqueue requires opening a file descriptor for every file that's being watched;
+so if you're watching a directory with five files then that's six file
+descriptors. You will run in to your system's "max open files" limit faster on
+these platforms.
+
+The sysctl variables `kern.maxfiles` and `kern.maxfilesperproc` can be used to
+control the maximum number of open files.
diff --git a/vendor/github.com/fsnotify/fsnotify/backend_fen.go b/vendor/github.com/fsnotify/fsnotify/backend_fen.go
new file mode 100644
index 0000000..28497f1
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/backend_fen.go
@@ -0,0 +1,640 @@
+//go:build solaris
+// +build solaris
+
+// Note: the documentation on the Watcher type and methods is generated from
+// mkdoc.zsh
+
+package fsnotify
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "sync"
+
+ "golang.org/x/sys/unix"
+)
+
+// Watcher watches a set of paths, delivering events on a channel.
+//
+// A watcher should not be copied (e.g. pass it by pointer, rather than by
+// value).
+//
+// # Linux notes
+//
+// When a file is removed a Remove event won't be emitted until all file
+// descriptors are closed, and deletes will always emit a Chmod. For example:
+//
+// fp := os.Open("file")
+// os.Remove("file") // Triggers Chmod
+// fp.Close() // Triggers Remove
+//
+// This is the event that inotify sends, so not much can be changed about this.
+//
+// The fs.inotify.max_user_watches sysctl variable specifies the upper limit
+// for the number of watches per user, and fs.inotify.max_user_instances
+// specifies the maximum number of inotify instances per user. Every Watcher you
+// create is an "instance", and every path you add is a "watch".
+//
+// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and
+// /proc/sys/fs/inotify/max_user_instances
+//
+// To increase them you can use sysctl or write the value to the /proc file:
+//
+// # Default values on Linux 5.18
+// sysctl fs.inotify.max_user_watches=124983
+// sysctl fs.inotify.max_user_instances=128
+//
+// To make the changes persist on reboot edit /etc/sysctl.conf or
+// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check
+// your distro's documentation):
+//
+// fs.inotify.max_user_watches=124983
+// fs.inotify.max_user_instances=128
+//
+// Reaching the limit will result in a "no space left on device" or "too many open
+// files" error.
+//
+// # kqueue notes (macOS, BSD)
+//
+// kqueue requires opening a file descriptor for every file that's being watched;
+// so if you're watching a directory with five files then that's six file
+// descriptors. You will run in to your system's "max open files" limit faster on
+// these platforms.
+//
+// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to
+// control the maximum number of open files, as well as /etc/login.conf on BSD
+// systems.
+//
+// # Windows notes
+//
+// Paths can be added as "C:\path\to\dir", but forward slashes
+// ("C:/path/to/dir") will also work.
+//
+// When a watched directory is removed it will always send an event for the
+// directory itself, but may not send events for all files in that directory.
+// Sometimes it will send events for all times, sometimes it will send no
+// events, and often only for some files.
+//
+// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest
+// value that is guaranteed to work with SMB filesystems. If you have many
+// events in quick succession this may not be enough, and you will have to use
+// [WithBufferSize] to increase the value.
+type Watcher struct {
+ // Events sends the filesystem change events.
+ //
+ // fsnotify can send the following events; a "path" here can refer to a
+ // file, directory, symbolic link, or special file like a FIFO.
+ //
+ // fsnotify.Create A new path was created; this may be followed by one
+ // or more Write events if data also gets written to a
+ // file.
+ //
+ // fsnotify.Remove A path was removed.
+ //
+ // fsnotify.Rename A path was renamed. A rename is always sent with the
+ // old path as Event.Name, and a Create event will be
+ // sent with the new name. Renames are only sent for
+ // paths that are currently watched; e.g. moving an
+ // unmonitored file into a monitored directory will
+ // show up as just a Create. Similarly, renaming a file
+ // to outside a monitored directory will show up as
+ // only a Rename.
+ //
+ // fsnotify.Write A file or named pipe was written to. A Truncate will
+ // also trigger a Write. A single "write action"
+ // initiated by the user may show up as one or multiple
+ // writes, depending on when the system syncs things to
+ // disk. For example when compiling a large Go program
+ // you may get hundreds of Write events, and you may
+ // want to wait until you've stopped receiving them
+ // (see the dedup example in cmd/fsnotify).
+ //
+ // Some systems may send Write event for directories
+ // when the directory content changes.
+ //
+ // fsnotify.Chmod Attributes were changed. On Linux this is also sent
+ // when a file is removed (or more accurately, when a
+ // link to an inode is removed). On kqueue it's sent
+ // when a file is truncated. On Windows it's never
+ // sent.
+ Events chan Event
+
+ // Errors sends any errors.
+ //
+ // ErrEventOverflow is used to indicate there are too many events:
+ //
+ // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl)
+ // - windows: The buffer size is too small; WithBufferSize() can be used to increase it.
+ // - kqueue, fen: Not used.
+ Errors chan error
+
+ mu sync.Mutex
+ port *unix.EventPort
+ done chan struct{} // Channel for sending a "quit message" to the reader goroutine
+ dirs map[string]struct{} // Explicitly watched directories
+ watches map[string]struct{} // Explicitly watched non-directories
+}
+
+// NewWatcher creates a new Watcher.
+func NewWatcher() (*Watcher, error) {
+ return NewBufferedWatcher(0)
+}
+
+// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events
+// channel.
+//
+// The main use case for this is situations with a very large number of events
+// where the kernel buffer size can't be increased (e.g. due to lack of
+// permissions). An unbuffered Watcher will perform better for almost all use
+// cases, and whenever possible you will be better off increasing the kernel
+// buffers instead of adding a large userspace buffer.
+func NewBufferedWatcher(sz uint) (*Watcher, error) {
+ w := &Watcher{
+ Events: make(chan Event, sz),
+ Errors: make(chan error),
+ dirs: make(map[string]struct{}),
+ watches: make(map[string]struct{}),
+ done: make(chan struct{}),
+ }
+
+ var err error
+ w.port, err = unix.NewEventPort()
+ if err != nil {
+ return nil, fmt.Errorf("fsnotify.NewWatcher: %w", err)
+ }
+
+ go w.readEvents()
+ return w, nil
+}
+
+// sendEvent attempts to send an event to the user, returning true if the event
+// was put in the channel successfully and false if the watcher has been closed.
+func (w *Watcher) sendEvent(name string, op Op) (sent bool) {
+ select {
+ case w.Events <- Event{Name: name, Op: op}:
+ return true
+ case <-w.done:
+ return false
+ }
+}
+
+// sendError attempts to send an error to the user, returning true if the error
+// was put in the channel successfully and false if the watcher has been closed.
+func (w *Watcher) sendError(err error) (sent bool) {
+ select {
+ case w.Errors <- err:
+ return true
+ case <-w.done:
+ return false
+ }
+}
+
+func (w *Watcher) isClosed() bool {
+ select {
+ case <-w.done:
+ return true
+ default:
+ return false
+ }
+}
+
+// Close removes all watches and closes the Events channel.
+func (w *Watcher) Close() error {
+ // Take the lock used by associateFile to prevent lingering events from
+ // being processed after the close
+ w.mu.Lock()
+ defer w.mu.Unlock()
+ if w.isClosed() {
+ return nil
+ }
+ close(w.done)
+ return w.port.Close()
+}
+
+// Add starts monitoring the path for changes.
+//
+// A path can only be watched once; watching it more than once is a no-op and will
+// not return an error. Paths that do not yet exist on the filesystem cannot be
+// watched.
+//
+// A watch will be automatically removed if the watched path is deleted or
+// renamed. The exception is the Windows backend, which doesn't remove the
+// watcher on renames.
+//
+// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special
+// filesystems (/proc, /sys, etc.) generally don't work.
+//
+// Returns [ErrClosed] if [Watcher.Close] was called.
+//
+// See [Watcher.AddWith] for a version that allows adding options.
+//
+// # Watching directories
+//
+// All files in a directory are monitored, including new files that are created
+// after the watcher is started. Subdirectories are not watched (i.e. it's
+// non-recursive).
+//
+// # Watching files
+//
+// Watching individual files (rather than directories) is generally not
+// recommended as many programs (especially editors) update files atomically: it
+// will write to a temporary file which is then moved to to destination,
+// overwriting the original (or some variant thereof). The watcher on the
+// original file is now lost, as that no longer exists.
+//
+// The upshot of this is that a power failure or crash won't leave a
+// half-written file.
+//
+// Watch the parent directory and use Event.Name to filter out files you're not
+// interested in. There is an example of this in cmd/fsnotify/file.go.
+func (w *Watcher) Add(name string) error { return w.AddWith(name) }
+
+// AddWith is like [Watcher.Add], but allows adding options. When using Add()
+// the defaults described below are used.
+//
+// Possible options are:
+//
+// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on
+// other platforms. The default is 64K (65536 bytes).
+func (w *Watcher) AddWith(name string, opts ...addOpt) error {
+ if w.isClosed() {
+ return ErrClosed
+ }
+ if w.port.PathIsWatched(name) {
+ return nil
+ }
+
+ _ = getOptions(opts...)
+
+ // Currently we resolve symlinks that were explicitly requested to be
+ // watched. Otherwise we would use LStat here.
+ stat, err := os.Stat(name)
+ if err != nil {
+ return err
+ }
+
+ // Associate all files in the directory.
+ if stat.IsDir() {
+ err := w.handleDirectory(name, stat, true, w.associateFile)
+ if err != nil {
+ return err
+ }
+
+ w.mu.Lock()
+ w.dirs[name] = struct{}{}
+ w.mu.Unlock()
+ return nil
+ }
+
+ err = w.associateFile(name, stat, true)
+ if err != nil {
+ return err
+ }
+
+ w.mu.Lock()
+ w.watches[name] = struct{}{}
+ w.mu.Unlock()
+ return nil
+}
+
+// Remove stops monitoring the path for changes.
+//
+// Directories are always removed non-recursively. For example, if you added
+// /tmp/dir and /tmp/dir/subdir then you will need to remove both.
+//
+// Removing a path that has not yet been added returns [ErrNonExistentWatch].
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) Remove(name string) error {
+ if w.isClosed() {
+ return nil
+ }
+ if !w.port.PathIsWatched(name) {
+ return fmt.Errorf("%w: %s", ErrNonExistentWatch, name)
+ }
+
+ // The user has expressed an intent. Immediately remove this name from
+ // whichever watch list it might be in. If it's not in there the delete
+ // doesn't cause harm.
+ w.mu.Lock()
+ delete(w.watches, name)
+ delete(w.dirs, name)
+ w.mu.Unlock()
+
+ stat, err := os.Stat(name)
+ if err != nil {
+ return err
+ }
+
+ // Remove associations for every file in the directory.
+ if stat.IsDir() {
+ err := w.handleDirectory(name, stat, false, w.dissociateFile)
+ if err != nil {
+ return err
+ }
+ return nil
+ }
+
+ err = w.port.DissociatePath(name)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// readEvents contains the main loop that runs in a goroutine watching for events.
+func (w *Watcher) readEvents() {
+ // If this function returns, the watcher has been closed and we can close
+ // these channels
+ defer func() {
+ close(w.Errors)
+ close(w.Events)
+ }()
+
+ pevents := make([]unix.PortEvent, 8)
+ for {
+ count, err := w.port.Get(pevents, 1, nil)
+ if err != nil && err != unix.ETIME {
+ // Interrupted system call (count should be 0) ignore and continue
+ if errors.Is(err, unix.EINTR) && count == 0 {
+ continue
+ }
+ // Get failed because we called w.Close()
+ if errors.Is(err, unix.EBADF) && w.isClosed() {
+ return
+ }
+ // There was an error not caused by calling w.Close()
+ if !w.sendError(err) {
+ return
+ }
+ }
+
+ p := pevents[:count]
+ for _, pevent := range p {
+ if pevent.Source != unix.PORT_SOURCE_FILE {
+ // Event from unexpected source received; should never happen.
+ if !w.sendError(errors.New("Event from unexpected source received")) {
+ return
+ }
+ continue
+ }
+
+ err = w.handleEvent(&pevent)
+ if err != nil {
+ if !w.sendError(err) {
+ return
+ }
+ }
+ }
+ }
+}
+
+func (w *Watcher) handleDirectory(path string, stat os.FileInfo, follow bool, handler func(string, os.FileInfo, bool) error) error {
+ files, err := os.ReadDir(path)
+ if err != nil {
+ return err
+ }
+
+ // Handle all children of the directory.
+ for _, entry := range files {
+ finfo, err := entry.Info()
+ if err != nil {
+ return err
+ }
+ err = handler(filepath.Join(path, finfo.Name()), finfo, false)
+ if err != nil {
+ return err
+ }
+ }
+
+ // And finally handle the directory itself.
+ return handler(path, stat, follow)
+}
+
+// handleEvent might need to emit more than one fsnotify event if the events
+// bitmap matches more than one event type (e.g. the file was both modified and
+// had the attributes changed between when the association was created and the
+// when event was returned)
+func (w *Watcher) handleEvent(event *unix.PortEvent) error {
+ var (
+ events = event.Events
+ path = event.Path
+ fmode = event.Cookie.(os.FileMode)
+ reRegister = true
+ )
+
+ w.mu.Lock()
+ _, watchedDir := w.dirs[path]
+ _, watchedPath := w.watches[path]
+ w.mu.Unlock()
+ isWatched := watchedDir || watchedPath
+
+ if events&unix.FILE_DELETE != 0 {
+ if !w.sendEvent(path, Remove) {
+ return nil
+ }
+ reRegister = false
+ }
+ if events&unix.FILE_RENAME_FROM != 0 {
+ if !w.sendEvent(path, Rename) {
+ return nil
+ }
+ // Don't keep watching the new file name
+ reRegister = false
+ }
+ if events&unix.FILE_RENAME_TO != 0 {
+ // We don't report a Rename event for this case, because Rename events
+ // are interpreted as referring to the _old_ name of the file, and in
+ // this case the event would refer to the new name of the file. This
+ // type of rename event is not supported by fsnotify.
+
+ // inotify reports a Remove event in this case, so we simulate this
+ // here.
+ if !w.sendEvent(path, Remove) {
+ return nil
+ }
+ // Don't keep watching the file that was removed
+ reRegister = false
+ }
+
+ // The file is gone, nothing left to do.
+ if !reRegister {
+ if watchedDir {
+ w.mu.Lock()
+ delete(w.dirs, path)
+ w.mu.Unlock()
+ }
+ if watchedPath {
+ w.mu.Lock()
+ delete(w.watches, path)
+ w.mu.Unlock()
+ }
+ return nil
+ }
+
+ // If we didn't get a deletion the file still exists and we're going to have
+ // to watch it again. Let's Stat it now so that we can compare permissions
+ // and have what we need to continue watching the file
+
+ stat, err := os.Lstat(path)
+ if err != nil {
+ // This is unexpected, but we should still emit an event. This happens
+ // most often on "rm -r" of a subdirectory inside a watched directory We
+ // get a modify event of something happening inside, but by the time we
+ // get here, the sudirectory is already gone. Clearly we were watching
+ // this path but now it is gone. Let's tell the user that it was
+ // removed.
+ if !w.sendEvent(path, Remove) {
+ return nil
+ }
+ // Suppress extra write events on removed directories; they are not
+ // informative and can be confusing.
+ return nil
+ }
+
+ // resolve symlinks that were explicitly watched as we would have at Add()
+ // time. this helps suppress spurious Chmod events on watched symlinks
+ if isWatched {
+ stat, err = os.Stat(path)
+ if err != nil {
+ // The symlink still exists, but the target is gone. Report the
+ // Remove similar to above.
+ if !w.sendEvent(path, Remove) {
+ return nil
+ }
+ // Don't return the error
+ }
+ }
+
+ if events&unix.FILE_MODIFIED != 0 {
+ if fmode.IsDir() {
+ if watchedDir {
+ if err := w.updateDirectory(path); err != nil {
+ return err
+ }
+ } else {
+ if !w.sendEvent(path, Write) {
+ return nil
+ }
+ }
+ } else {
+ if !w.sendEvent(path, Write) {
+ return nil
+ }
+ }
+ }
+ if events&unix.FILE_ATTRIB != 0 && stat != nil {
+ // Only send Chmod if perms changed
+ if stat.Mode().Perm() != fmode.Perm() {
+ if !w.sendEvent(path, Chmod) {
+ return nil
+ }
+ }
+ }
+
+ if stat != nil {
+ // If we get here, it means we've hit an event above that requires us to
+ // continue watching the file or directory
+ return w.associateFile(path, stat, isWatched)
+ }
+ return nil
+}
+
+func (w *Watcher) updateDirectory(path string) error {
+ // The directory was modified, so we must find unwatched entities and watch
+ // them. If something was removed from the directory, nothing will happen,
+ // as everything else should still be watched.
+ files, err := os.ReadDir(path)
+ if err != nil {
+ return err
+ }
+
+ for _, entry := range files {
+ path := filepath.Join(path, entry.Name())
+ if w.port.PathIsWatched(path) {
+ continue
+ }
+
+ finfo, err := entry.Info()
+ if err != nil {
+ return err
+ }
+ err = w.associateFile(path, finfo, false)
+ if err != nil {
+ if !w.sendError(err) {
+ return nil
+ }
+ }
+ if !w.sendEvent(path, Create) {
+ return nil
+ }
+ }
+ return nil
+}
+
+func (w *Watcher) associateFile(path string, stat os.FileInfo, follow bool) error {
+ if w.isClosed() {
+ return ErrClosed
+ }
+ // This is primarily protecting the call to AssociatePath but it is
+ // important and intentional that the call to PathIsWatched is also
+ // protected by this mutex. Without this mutex, AssociatePath has been seen
+ // to error out that the path is already associated.
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ if w.port.PathIsWatched(path) {
+ // Remove the old association in favor of this one If we get ENOENT,
+ // then while the x/sys/unix wrapper still thought that this path was
+ // associated, the underlying event port did not. This call will have
+ // cleared up that discrepancy. The most likely cause is that the event
+ // has fired but we haven't processed it yet.
+ err := w.port.DissociatePath(path)
+ if err != nil && err != unix.ENOENT {
+ return err
+ }
+ }
+ // FILE_NOFOLLOW means we watch symlinks themselves rather than their
+ // targets.
+ events := unix.FILE_MODIFIED | unix.FILE_ATTRIB | unix.FILE_NOFOLLOW
+ if follow {
+ // We *DO* follow symlinks for explicitly watched entries.
+ events = unix.FILE_MODIFIED | unix.FILE_ATTRIB
+ }
+ return w.port.AssociatePath(path, stat,
+ events,
+ stat.Mode())
+}
+
+func (w *Watcher) dissociateFile(path string, stat os.FileInfo, unused bool) error {
+ if !w.port.PathIsWatched(path) {
+ return nil
+ }
+ return w.port.DissociatePath(path)
+}
+
+// WatchList returns all paths explicitly added with [Watcher.Add] (and are not
+// yet removed).
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) WatchList() []string {
+ if w.isClosed() {
+ return nil
+ }
+
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ entries := make([]string, 0, len(w.watches)+len(w.dirs))
+ for pathname := range w.dirs {
+ entries = append(entries, pathname)
+ }
+ for pathname := range w.watches {
+ entries = append(entries, pathname)
+ }
+
+ return entries
+}
diff --git a/vendor/github.com/fsnotify/fsnotify/backend_inotify.go b/vendor/github.com/fsnotify/fsnotify/backend_inotify.go
new file mode 100644
index 0000000..921c1c1
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/backend_inotify.go
@@ -0,0 +1,594 @@
+//go:build linux && !appengine
+// +build linux,!appengine
+
+// Note: the documentation on the Watcher type and methods is generated from
+// mkdoc.zsh
+
+package fsnotify
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+ "unsafe"
+
+ "golang.org/x/sys/unix"
+)
+
+// Watcher watches a set of paths, delivering events on a channel.
+//
+// A watcher should not be copied (e.g. pass it by pointer, rather than by
+// value).
+//
+// # Linux notes
+//
+// When a file is removed a Remove event won't be emitted until all file
+// descriptors are closed, and deletes will always emit a Chmod. For example:
+//
+// fp := os.Open("file")
+// os.Remove("file") // Triggers Chmod
+// fp.Close() // Triggers Remove
+//
+// This is the event that inotify sends, so not much can be changed about this.
+//
+// The fs.inotify.max_user_watches sysctl variable specifies the upper limit
+// for the number of watches per user, and fs.inotify.max_user_instances
+// specifies the maximum number of inotify instances per user. Every Watcher you
+// create is an "instance", and every path you add is a "watch".
+//
+// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and
+// /proc/sys/fs/inotify/max_user_instances
+//
+// To increase them you can use sysctl or write the value to the /proc file:
+//
+// # Default values on Linux 5.18
+// sysctl fs.inotify.max_user_watches=124983
+// sysctl fs.inotify.max_user_instances=128
+//
+// To make the changes persist on reboot edit /etc/sysctl.conf or
+// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check
+// your distro's documentation):
+//
+// fs.inotify.max_user_watches=124983
+// fs.inotify.max_user_instances=128
+//
+// Reaching the limit will result in a "no space left on device" or "too many open
+// files" error.
+//
+// # kqueue notes (macOS, BSD)
+//
+// kqueue requires opening a file descriptor for every file that's being watched;
+// so if you're watching a directory with five files then that's six file
+// descriptors. You will run in to your system's "max open files" limit faster on
+// these platforms.
+//
+// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to
+// control the maximum number of open files, as well as /etc/login.conf on BSD
+// systems.
+//
+// # Windows notes
+//
+// Paths can be added as "C:\path\to\dir", but forward slashes
+// ("C:/path/to/dir") will also work.
+//
+// When a watched directory is removed it will always send an event for the
+// directory itself, but may not send events for all files in that directory.
+// Sometimes it will send events for all times, sometimes it will send no
+// events, and often only for some files.
+//
+// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest
+// value that is guaranteed to work with SMB filesystems. If you have many
+// events in quick succession this may not be enough, and you will have to use
+// [WithBufferSize] to increase the value.
+type Watcher struct {
+ // Events sends the filesystem change events.
+ //
+ // fsnotify can send the following events; a "path" here can refer to a
+ // file, directory, symbolic link, or special file like a FIFO.
+ //
+ // fsnotify.Create A new path was created; this may be followed by one
+ // or more Write events if data also gets written to a
+ // file.
+ //
+ // fsnotify.Remove A path was removed.
+ //
+ // fsnotify.Rename A path was renamed. A rename is always sent with the
+ // old path as Event.Name, and a Create event will be
+ // sent with the new name. Renames are only sent for
+ // paths that are currently watched; e.g. moving an
+ // unmonitored file into a monitored directory will
+ // show up as just a Create. Similarly, renaming a file
+ // to outside a monitored directory will show up as
+ // only a Rename.
+ //
+ // fsnotify.Write A file or named pipe was written to. A Truncate will
+ // also trigger a Write. A single "write action"
+ // initiated by the user may show up as one or multiple
+ // writes, depending on when the system syncs things to
+ // disk. For example when compiling a large Go program
+ // you may get hundreds of Write events, and you may
+ // want to wait until you've stopped receiving them
+ // (see the dedup example in cmd/fsnotify).
+ //
+ // Some systems may send Write event for directories
+ // when the directory content changes.
+ //
+ // fsnotify.Chmod Attributes were changed. On Linux this is also sent
+ // when a file is removed (or more accurately, when a
+ // link to an inode is removed). On kqueue it's sent
+ // when a file is truncated. On Windows it's never
+ // sent.
+ Events chan Event
+
+ // Errors sends any errors.
+ //
+ // ErrEventOverflow is used to indicate there are too many events:
+ //
+ // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl)
+ // - windows: The buffer size is too small; WithBufferSize() can be used to increase it.
+ // - kqueue, fen: Not used.
+ Errors chan error
+
+ // Store fd here as os.File.Read() will no longer return on close after
+ // calling Fd(). See: https://github.com/golang/go/issues/26439
+ fd int
+ inotifyFile *os.File
+ watches *watches
+ done chan struct{} // Channel for sending a "quit message" to the reader goroutine
+ closeMu sync.Mutex
+ doneResp chan struct{} // Channel to respond to Close
+}
+
+type (
+ watches struct {
+ mu sync.RWMutex
+ wd map[uint32]*watch // wd → watch
+ path map[string]uint32 // pathname → wd
+ }
+ watch struct {
+ wd uint32 // Watch descriptor (as returned by the inotify_add_watch() syscall)
+ flags uint32 // inotify flags of this watch (see inotify(7) for the list of valid flags)
+ path string // Watch path.
+ }
+)
+
+func newWatches() *watches {
+ return &watches{
+ wd: make(map[uint32]*watch),
+ path: make(map[string]uint32),
+ }
+}
+
+func (w *watches) len() int {
+ w.mu.RLock()
+ defer w.mu.RUnlock()
+ return len(w.wd)
+}
+
+func (w *watches) add(ww *watch) {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+ w.wd[ww.wd] = ww
+ w.path[ww.path] = ww.wd
+}
+
+func (w *watches) remove(wd uint32) {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+ delete(w.path, w.wd[wd].path)
+ delete(w.wd, wd)
+}
+
+func (w *watches) removePath(path string) (uint32, bool) {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ wd, ok := w.path[path]
+ if !ok {
+ return 0, false
+ }
+
+ delete(w.path, path)
+ delete(w.wd, wd)
+
+ return wd, true
+}
+
+func (w *watches) byPath(path string) *watch {
+ w.mu.RLock()
+ defer w.mu.RUnlock()
+ return w.wd[w.path[path]]
+}
+
+func (w *watches) byWd(wd uint32) *watch {
+ w.mu.RLock()
+ defer w.mu.RUnlock()
+ return w.wd[wd]
+}
+
+func (w *watches) updatePath(path string, f func(*watch) (*watch, error)) error {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ var existing *watch
+ wd, ok := w.path[path]
+ if ok {
+ existing = w.wd[wd]
+ }
+
+ upd, err := f(existing)
+ if err != nil {
+ return err
+ }
+ if upd != nil {
+ w.wd[upd.wd] = upd
+ w.path[upd.path] = upd.wd
+
+ if upd.wd != wd {
+ delete(w.wd, wd)
+ }
+ }
+
+ return nil
+}
+
+// NewWatcher creates a new Watcher.
+func NewWatcher() (*Watcher, error) {
+ return NewBufferedWatcher(0)
+}
+
+// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events
+// channel.
+//
+// The main use case for this is situations with a very large number of events
+// where the kernel buffer size can't be increased (e.g. due to lack of
+// permissions). An unbuffered Watcher will perform better for almost all use
+// cases, and whenever possible you will be better off increasing the kernel
+// buffers instead of adding a large userspace buffer.
+func NewBufferedWatcher(sz uint) (*Watcher, error) {
+ // Need to set nonblocking mode for SetDeadline to work, otherwise blocking
+ // I/O operations won't terminate on close.
+ fd, errno := unix.InotifyInit1(unix.IN_CLOEXEC | unix.IN_NONBLOCK)
+ if fd == -1 {
+ return nil, errno
+ }
+
+ w := &Watcher{
+ fd: fd,
+ inotifyFile: os.NewFile(uintptr(fd), ""),
+ watches: newWatches(),
+ Events: make(chan Event, sz),
+ Errors: make(chan error),
+ done: make(chan struct{}),
+ doneResp: make(chan struct{}),
+ }
+
+ go w.readEvents()
+ return w, nil
+}
+
+// Returns true if the event was sent, or false if watcher is closed.
+func (w *Watcher) sendEvent(e Event) bool {
+ select {
+ case w.Events <- e:
+ return true
+ case <-w.done:
+ return false
+ }
+}
+
+// Returns true if the error was sent, or false if watcher is closed.
+func (w *Watcher) sendError(err error) bool {
+ select {
+ case w.Errors <- err:
+ return true
+ case <-w.done:
+ return false
+ }
+}
+
+func (w *Watcher) isClosed() bool {
+ select {
+ case <-w.done:
+ return true
+ default:
+ return false
+ }
+}
+
+// Close removes all watches and closes the Events channel.
+func (w *Watcher) Close() error {
+ w.closeMu.Lock()
+ if w.isClosed() {
+ w.closeMu.Unlock()
+ return nil
+ }
+ close(w.done)
+ w.closeMu.Unlock()
+
+ // Causes any blocking reads to return with an error, provided the file
+ // still supports deadline operations.
+ err := w.inotifyFile.Close()
+ if err != nil {
+ return err
+ }
+
+ // Wait for goroutine to close
+ <-w.doneResp
+
+ return nil
+}
+
+// Add starts monitoring the path for changes.
+//
+// A path can only be watched once; watching it more than once is a no-op and will
+// not return an error. Paths that do not yet exist on the filesystem cannot be
+// watched.
+//
+// A watch will be automatically removed if the watched path is deleted or
+// renamed. The exception is the Windows backend, which doesn't remove the
+// watcher on renames.
+//
+// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special
+// filesystems (/proc, /sys, etc.) generally don't work.
+//
+// Returns [ErrClosed] if [Watcher.Close] was called.
+//
+// See [Watcher.AddWith] for a version that allows adding options.
+//
+// # Watching directories
+//
+// All files in a directory are monitored, including new files that are created
+// after the watcher is started. Subdirectories are not watched (i.e. it's
+// non-recursive).
+//
+// # Watching files
+//
+// Watching individual files (rather than directories) is generally not
+// recommended as many programs (especially editors) update files atomically: it
+// will write to a temporary file which is then moved to to destination,
+// overwriting the original (or some variant thereof). The watcher on the
+// original file is now lost, as that no longer exists.
+//
+// The upshot of this is that a power failure or crash won't leave a
+// half-written file.
+//
+// Watch the parent directory and use Event.Name to filter out files you're not
+// interested in. There is an example of this in cmd/fsnotify/file.go.
+func (w *Watcher) Add(name string) error { return w.AddWith(name) }
+
+// AddWith is like [Watcher.Add], but allows adding options. When using Add()
+// the defaults described below are used.
+//
+// Possible options are:
+//
+// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on
+// other platforms. The default is 64K (65536 bytes).
+func (w *Watcher) AddWith(name string, opts ...addOpt) error {
+ if w.isClosed() {
+ return ErrClosed
+ }
+
+ name = filepath.Clean(name)
+ _ = getOptions(opts...)
+
+ var flags uint32 = unix.IN_MOVED_TO | unix.IN_MOVED_FROM |
+ unix.IN_CREATE | unix.IN_ATTRIB | unix.IN_MODIFY |
+ unix.IN_MOVE_SELF | unix.IN_DELETE | unix.IN_DELETE_SELF
+
+ return w.watches.updatePath(name, func(existing *watch) (*watch, error) {
+ if existing != nil {
+ flags |= existing.flags | unix.IN_MASK_ADD
+ }
+
+ wd, err := unix.InotifyAddWatch(w.fd, name, flags)
+ if wd == -1 {
+ return nil, err
+ }
+
+ if existing == nil {
+ return &watch{
+ wd: uint32(wd),
+ path: name,
+ flags: flags,
+ }, nil
+ }
+
+ existing.wd = uint32(wd)
+ existing.flags = flags
+ return existing, nil
+ })
+}
+
+// Remove stops monitoring the path for changes.
+//
+// Directories are always removed non-recursively. For example, if you added
+// /tmp/dir and /tmp/dir/subdir then you will need to remove both.
+//
+// Removing a path that has not yet been added returns [ErrNonExistentWatch].
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) Remove(name string) error {
+ if w.isClosed() {
+ return nil
+ }
+ return w.remove(filepath.Clean(name))
+}
+
+func (w *Watcher) remove(name string) error {
+ wd, ok := w.watches.removePath(name)
+ if !ok {
+ return fmt.Errorf("%w: %s", ErrNonExistentWatch, name)
+ }
+
+ success, errno := unix.InotifyRmWatch(w.fd, wd)
+ if success == -1 {
+ // TODO: Perhaps it's not helpful to return an error here in every case;
+ // The only two possible errors are:
+ //
+ // - EBADF, which happens when w.fd is not a valid file descriptor
+ // of any kind.
+ // - EINVAL, which is when fd is not an inotify descriptor or wd
+ // is not a valid watch descriptor. Watch descriptors are
+ // invalidated when they are removed explicitly or implicitly;
+ // explicitly by inotify_rm_watch, implicitly when the file they
+ // are watching is deleted.
+ return errno
+ }
+ return nil
+}
+
+// WatchList returns all paths explicitly added with [Watcher.Add] (and are not
+// yet removed).
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) WatchList() []string {
+ if w.isClosed() {
+ return nil
+ }
+
+ entries := make([]string, 0, w.watches.len())
+ w.watches.mu.RLock()
+ for pathname := range w.watches.path {
+ entries = append(entries, pathname)
+ }
+ w.watches.mu.RUnlock()
+
+ return entries
+}
+
+// readEvents reads from the inotify file descriptor, converts the
+// received events into Event objects and sends them via the Events channel
+func (w *Watcher) readEvents() {
+ defer func() {
+ close(w.doneResp)
+ close(w.Errors)
+ close(w.Events)
+ }()
+
+ var (
+ buf [unix.SizeofInotifyEvent * 4096]byte // Buffer for a maximum of 4096 raw events
+ errno error // Syscall errno
+ )
+ for {
+ // See if we have been closed.
+ if w.isClosed() {
+ return
+ }
+
+ n, err := w.inotifyFile.Read(buf[:])
+ switch {
+ case errors.Unwrap(err) == os.ErrClosed:
+ return
+ case err != nil:
+ if !w.sendError(err) {
+ return
+ }
+ continue
+ }
+
+ if n < unix.SizeofInotifyEvent {
+ var err error
+ if n == 0 {
+ err = io.EOF // If EOF is received. This should really never happen.
+ } else if n < 0 {
+ err = errno // If an error occurred while reading.
+ } else {
+ err = errors.New("notify: short read in readEvents()") // Read was too short.
+ }
+ if !w.sendError(err) {
+ return
+ }
+ continue
+ }
+
+ var offset uint32
+ // We don't know how many events we just read into the buffer
+ // While the offset points to at least one whole event...
+ for offset <= uint32(n-unix.SizeofInotifyEvent) {
+ var (
+ // Point "raw" to the event in the buffer
+ raw = (*unix.InotifyEvent)(unsafe.Pointer(&buf[offset]))
+ mask = uint32(raw.Mask)
+ nameLen = uint32(raw.Len)
+ )
+
+ if mask&unix.IN_Q_OVERFLOW != 0 {
+ if !w.sendError(ErrEventOverflow) {
+ return
+ }
+ }
+
+ // If the event happened to the watched directory or the watched file, the kernel
+ // doesn't append the filename to the event, but we would like to always fill the
+ // the "Name" field with a valid filename. We retrieve the path of the watch from
+ // the "paths" map.
+ watch := w.watches.byWd(uint32(raw.Wd))
+
+ // inotify will automatically remove the watch on deletes; just need
+ // to clean our state here.
+ if watch != nil && mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF {
+ w.watches.remove(watch.wd)
+ }
+ // We can't really update the state when a watched path is moved;
+ // only IN_MOVE_SELF is sent and not IN_MOVED_{FROM,TO}. So remove
+ // the watch.
+ if watch != nil && mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF {
+ err := w.remove(watch.path)
+ if err != nil && !errors.Is(err, ErrNonExistentWatch) {
+ if !w.sendError(err) {
+ return
+ }
+ }
+ }
+
+ var name string
+ if watch != nil {
+ name = watch.path
+ }
+ if nameLen > 0 {
+ // Point "bytes" at the first byte of the filename
+ bytes := (*[unix.PathMax]byte)(unsafe.Pointer(&buf[offset+unix.SizeofInotifyEvent]))[:nameLen:nameLen]
+ // The filename is padded with NULL bytes. TrimRight() gets rid of those.
+ name += "/" + strings.TrimRight(string(bytes[0:nameLen]), "\000")
+ }
+
+ event := w.newEvent(name, mask)
+
+ // Send the events that are not ignored on the events channel
+ if mask&unix.IN_IGNORED == 0 {
+ if !w.sendEvent(event) {
+ return
+ }
+ }
+
+ // Move to the next event in the buffer
+ offset += unix.SizeofInotifyEvent + nameLen
+ }
+ }
+}
+
+// newEvent returns an platform-independent Event based on an inotify mask.
+func (w *Watcher) newEvent(name string, mask uint32) Event {
+ e := Event{Name: name}
+ if mask&unix.IN_CREATE == unix.IN_CREATE || mask&unix.IN_MOVED_TO == unix.IN_MOVED_TO {
+ e.Op |= Create
+ }
+ if mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF || mask&unix.IN_DELETE == unix.IN_DELETE {
+ e.Op |= Remove
+ }
+ if mask&unix.IN_MODIFY == unix.IN_MODIFY {
+ e.Op |= Write
+ }
+ if mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF || mask&unix.IN_MOVED_FROM == unix.IN_MOVED_FROM {
+ e.Op |= Rename
+ }
+ if mask&unix.IN_ATTRIB == unix.IN_ATTRIB {
+ e.Op |= Chmod
+ }
+ return e
+}
diff --git a/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go b/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go
new file mode 100644
index 0000000..063a091
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go
@@ -0,0 +1,782 @@
+//go:build freebsd || openbsd || netbsd || dragonfly || darwin
+// +build freebsd openbsd netbsd dragonfly darwin
+
+// Note: the documentation on the Watcher type and methods is generated from
+// mkdoc.zsh
+
+package fsnotify
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "sync"
+
+ "golang.org/x/sys/unix"
+)
+
+// Watcher watches a set of paths, delivering events on a channel.
+//
+// A watcher should not be copied (e.g. pass it by pointer, rather than by
+// value).
+//
+// # Linux notes
+//
+// When a file is removed a Remove event won't be emitted until all file
+// descriptors are closed, and deletes will always emit a Chmod. For example:
+//
+// fp := os.Open("file")
+// os.Remove("file") // Triggers Chmod
+// fp.Close() // Triggers Remove
+//
+// This is the event that inotify sends, so not much can be changed about this.
+//
+// The fs.inotify.max_user_watches sysctl variable specifies the upper limit
+// for the number of watches per user, and fs.inotify.max_user_instances
+// specifies the maximum number of inotify instances per user. Every Watcher you
+// create is an "instance", and every path you add is a "watch".
+//
+// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and
+// /proc/sys/fs/inotify/max_user_instances
+//
+// To increase them you can use sysctl or write the value to the /proc file:
+//
+// # Default values on Linux 5.18
+// sysctl fs.inotify.max_user_watches=124983
+// sysctl fs.inotify.max_user_instances=128
+//
+// To make the changes persist on reboot edit /etc/sysctl.conf or
+// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check
+// your distro's documentation):
+//
+// fs.inotify.max_user_watches=124983
+// fs.inotify.max_user_instances=128
+//
+// Reaching the limit will result in a "no space left on device" or "too many open
+// files" error.
+//
+// # kqueue notes (macOS, BSD)
+//
+// kqueue requires opening a file descriptor for every file that's being watched;
+// so if you're watching a directory with five files then that's six file
+// descriptors. You will run in to your system's "max open files" limit faster on
+// these platforms.
+//
+// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to
+// control the maximum number of open files, as well as /etc/login.conf on BSD
+// systems.
+//
+// # Windows notes
+//
+// Paths can be added as "C:\path\to\dir", but forward slashes
+// ("C:/path/to/dir") will also work.
+//
+// When a watched directory is removed it will always send an event for the
+// directory itself, but may not send events for all files in that directory.
+// Sometimes it will send events for all times, sometimes it will send no
+// events, and often only for some files.
+//
+// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest
+// value that is guaranteed to work with SMB filesystems. If you have many
+// events in quick succession this may not be enough, and you will have to use
+// [WithBufferSize] to increase the value.
+type Watcher struct {
+ // Events sends the filesystem change events.
+ //
+ // fsnotify can send the following events; a "path" here can refer to a
+ // file, directory, symbolic link, or special file like a FIFO.
+ //
+ // fsnotify.Create A new path was created; this may be followed by one
+ // or more Write events if data also gets written to a
+ // file.
+ //
+ // fsnotify.Remove A path was removed.
+ //
+ // fsnotify.Rename A path was renamed. A rename is always sent with the
+ // old path as Event.Name, and a Create event will be
+ // sent with the new name. Renames are only sent for
+ // paths that are currently watched; e.g. moving an
+ // unmonitored file into a monitored directory will
+ // show up as just a Create. Similarly, renaming a file
+ // to outside a monitored directory will show up as
+ // only a Rename.
+ //
+ // fsnotify.Write A file or named pipe was written to. A Truncate will
+ // also trigger a Write. A single "write action"
+ // initiated by the user may show up as one or multiple
+ // writes, depending on when the system syncs things to
+ // disk. For example when compiling a large Go program
+ // you may get hundreds of Write events, and you may
+ // want to wait until you've stopped receiving them
+ // (see the dedup example in cmd/fsnotify).
+ //
+ // Some systems may send Write event for directories
+ // when the directory content changes.
+ //
+ // fsnotify.Chmod Attributes were changed. On Linux this is also sent
+ // when a file is removed (or more accurately, when a
+ // link to an inode is removed). On kqueue it's sent
+ // when a file is truncated. On Windows it's never
+ // sent.
+ Events chan Event
+
+ // Errors sends any errors.
+ //
+ // ErrEventOverflow is used to indicate there are too many events:
+ //
+ // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl)
+ // - windows: The buffer size is too small; WithBufferSize() can be used to increase it.
+ // - kqueue, fen: Not used.
+ Errors chan error
+
+ done chan struct{}
+ kq int // File descriptor (as returned by the kqueue() syscall).
+ closepipe [2]int // Pipe used for closing.
+ mu sync.Mutex // Protects access to watcher data
+ watches map[string]int // Watched file descriptors (key: path).
+ watchesByDir map[string]map[int]struct{} // Watched file descriptors indexed by the parent directory (key: dirname(path)).
+ userWatches map[string]struct{} // Watches added with Watcher.Add()
+ dirFlags map[string]uint32 // Watched directories to fflags used in kqueue.
+ paths map[int]pathInfo // File descriptors to path names for processing kqueue events.
+ fileExists map[string]struct{} // Keep track of if we know this file exists (to stop duplicate create events).
+ isClosed bool // Set to true when Close() is first called
+}
+
+type pathInfo struct {
+ name string
+ isDir bool
+}
+
+// NewWatcher creates a new Watcher.
+func NewWatcher() (*Watcher, error) {
+ return NewBufferedWatcher(0)
+}
+
+// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events
+// channel.
+//
+// The main use case for this is situations with a very large number of events
+// where the kernel buffer size can't be increased (e.g. due to lack of
+// permissions). An unbuffered Watcher will perform better for almost all use
+// cases, and whenever possible you will be better off increasing the kernel
+// buffers instead of adding a large userspace buffer.
+func NewBufferedWatcher(sz uint) (*Watcher, error) {
+ kq, closepipe, err := newKqueue()
+ if err != nil {
+ return nil, err
+ }
+
+ w := &Watcher{
+ kq: kq,
+ closepipe: closepipe,
+ watches: make(map[string]int),
+ watchesByDir: make(map[string]map[int]struct{}),
+ dirFlags: make(map[string]uint32),
+ paths: make(map[int]pathInfo),
+ fileExists: make(map[string]struct{}),
+ userWatches: make(map[string]struct{}),
+ Events: make(chan Event, sz),
+ Errors: make(chan error),
+ done: make(chan struct{}),
+ }
+
+ go w.readEvents()
+ return w, nil
+}
+
+// newKqueue creates a new kernel event queue and returns a descriptor.
+//
+// This registers a new event on closepipe, which will trigger an event when
+// it's closed. This way we can use kevent() without timeout/polling; without
+// the closepipe, it would block forever and we wouldn't be able to stop it at
+// all.
+func newKqueue() (kq int, closepipe [2]int, err error) {
+ kq, err = unix.Kqueue()
+ if kq == -1 {
+ return kq, closepipe, err
+ }
+
+ // Register the close pipe.
+ err = unix.Pipe(closepipe[:])
+ if err != nil {
+ unix.Close(kq)
+ return kq, closepipe, err
+ }
+
+ // Register changes to listen on the closepipe.
+ changes := make([]unix.Kevent_t, 1)
+ // SetKevent converts int to the platform-specific types.
+ unix.SetKevent(&changes[0], closepipe[0], unix.EVFILT_READ,
+ unix.EV_ADD|unix.EV_ENABLE|unix.EV_ONESHOT)
+
+ ok, err := unix.Kevent(kq, changes, nil, nil)
+ if ok == -1 {
+ unix.Close(kq)
+ unix.Close(closepipe[0])
+ unix.Close(closepipe[1])
+ return kq, closepipe, err
+ }
+ return kq, closepipe, nil
+}
+
+// Returns true if the event was sent, or false if watcher is closed.
+func (w *Watcher) sendEvent(e Event) bool {
+ select {
+ case w.Events <- e:
+ return true
+ case <-w.done:
+ return false
+ }
+}
+
+// Returns true if the error was sent, or false if watcher is closed.
+func (w *Watcher) sendError(err error) bool {
+ select {
+ case w.Errors <- err:
+ return true
+ case <-w.done:
+ return false
+ }
+}
+
+// Close removes all watches and closes the Events channel.
+func (w *Watcher) Close() error {
+ w.mu.Lock()
+ if w.isClosed {
+ w.mu.Unlock()
+ return nil
+ }
+ w.isClosed = true
+
+ // copy paths to remove while locked
+ pathsToRemove := make([]string, 0, len(w.watches))
+ for name := range w.watches {
+ pathsToRemove = append(pathsToRemove, name)
+ }
+ w.mu.Unlock() // Unlock before calling Remove, which also locks
+ for _, name := range pathsToRemove {
+ w.Remove(name)
+ }
+
+ // Send "quit" message to the reader goroutine.
+ unix.Close(w.closepipe[1])
+ close(w.done)
+
+ return nil
+}
+
+// Add starts monitoring the path for changes.
+//
+// A path can only be watched once; watching it more than once is a no-op and will
+// not return an error. Paths that do not yet exist on the filesystem cannot be
+// watched.
+//
+// A watch will be automatically removed if the watched path is deleted or
+// renamed. The exception is the Windows backend, which doesn't remove the
+// watcher on renames.
+//
+// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special
+// filesystems (/proc, /sys, etc.) generally don't work.
+//
+// Returns [ErrClosed] if [Watcher.Close] was called.
+//
+// See [Watcher.AddWith] for a version that allows adding options.
+//
+// # Watching directories
+//
+// All files in a directory are monitored, including new files that are created
+// after the watcher is started. Subdirectories are not watched (i.e. it's
+// non-recursive).
+//
+// # Watching files
+//
+// Watching individual files (rather than directories) is generally not
+// recommended as many programs (especially editors) update files atomically: it
+// will write to a temporary file which is then moved to to destination,
+// overwriting the original (or some variant thereof). The watcher on the
+// original file is now lost, as that no longer exists.
+//
+// The upshot of this is that a power failure or crash won't leave a
+// half-written file.
+//
+// Watch the parent directory and use Event.Name to filter out files you're not
+// interested in. There is an example of this in cmd/fsnotify/file.go.
+func (w *Watcher) Add(name string) error { return w.AddWith(name) }
+
+// AddWith is like [Watcher.Add], but allows adding options. When using Add()
+// the defaults described below are used.
+//
+// Possible options are:
+//
+// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on
+// other platforms. The default is 64K (65536 bytes).
+func (w *Watcher) AddWith(name string, opts ...addOpt) error {
+ _ = getOptions(opts...)
+
+ w.mu.Lock()
+ w.userWatches[name] = struct{}{}
+ w.mu.Unlock()
+ _, err := w.addWatch(name, noteAllEvents)
+ return err
+}
+
+// Remove stops monitoring the path for changes.
+//
+// Directories are always removed non-recursively. For example, if you added
+// /tmp/dir and /tmp/dir/subdir then you will need to remove both.
+//
+// Removing a path that has not yet been added returns [ErrNonExistentWatch].
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) Remove(name string) error {
+ return w.remove(name, true)
+}
+
+func (w *Watcher) remove(name string, unwatchFiles bool) error {
+ name = filepath.Clean(name)
+ w.mu.Lock()
+ if w.isClosed {
+ w.mu.Unlock()
+ return nil
+ }
+ watchfd, ok := w.watches[name]
+ w.mu.Unlock()
+ if !ok {
+ return fmt.Errorf("%w: %s", ErrNonExistentWatch, name)
+ }
+
+ err := w.register([]int{watchfd}, unix.EV_DELETE, 0)
+ if err != nil {
+ return err
+ }
+
+ unix.Close(watchfd)
+
+ w.mu.Lock()
+ isDir := w.paths[watchfd].isDir
+ delete(w.watches, name)
+ delete(w.userWatches, name)
+
+ parentName := filepath.Dir(name)
+ delete(w.watchesByDir[parentName], watchfd)
+
+ if len(w.watchesByDir[parentName]) == 0 {
+ delete(w.watchesByDir, parentName)
+ }
+
+ delete(w.paths, watchfd)
+ delete(w.dirFlags, name)
+ delete(w.fileExists, name)
+ w.mu.Unlock()
+
+ // Find all watched paths that are in this directory that are not external.
+ if unwatchFiles && isDir {
+ var pathsToRemove []string
+ w.mu.Lock()
+ for fd := range w.watchesByDir[name] {
+ path := w.paths[fd]
+ if _, ok := w.userWatches[path.name]; !ok {
+ pathsToRemove = append(pathsToRemove, path.name)
+ }
+ }
+ w.mu.Unlock()
+ for _, name := range pathsToRemove {
+ // Since these are internal, not much sense in propagating error to
+ // the user, as that will just confuse them with an error about a
+ // path they did not explicitly watch themselves.
+ w.Remove(name)
+ }
+ }
+ return nil
+}
+
+// WatchList returns all paths explicitly added with [Watcher.Add] (and are not
+// yet removed).
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) WatchList() []string {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+ if w.isClosed {
+ return nil
+ }
+
+ entries := make([]string, 0, len(w.userWatches))
+ for pathname := range w.userWatches {
+ entries = append(entries, pathname)
+ }
+
+ return entries
+}
+
+// Watch all events (except NOTE_EXTEND, NOTE_LINK, NOTE_REVOKE)
+const noteAllEvents = unix.NOTE_DELETE | unix.NOTE_WRITE | unix.NOTE_ATTRIB | unix.NOTE_RENAME
+
+// addWatch adds name to the watched file set; the flags are interpreted as
+// described in kevent(2).
+//
+// Returns the real path to the file which was added, with symlinks resolved.
+func (w *Watcher) addWatch(name string, flags uint32) (string, error) {
+ var isDir bool
+ name = filepath.Clean(name)
+
+ w.mu.Lock()
+ if w.isClosed {
+ w.mu.Unlock()
+ return "", ErrClosed
+ }
+ watchfd, alreadyWatching := w.watches[name]
+ // We already have a watch, but we can still override flags.
+ if alreadyWatching {
+ isDir = w.paths[watchfd].isDir
+ }
+ w.mu.Unlock()
+
+ if !alreadyWatching {
+ fi, err := os.Lstat(name)
+ if err != nil {
+ return "", err
+ }
+
+ // Don't watch sockets or named pipes
+ if (fi.Mode()&os.ModeSocket == os.ModeSocket) || (fi.Mode()&os.ModeNamedPipe == os.ModeNamedPipe) {
+ return "", nil
+ }
+
+ // Follow Symlinks.
+ if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
+ link, err := os.Readlink(name)
+ if err != nil {
+ // Return nil because Linux can add unresolvable symlinks to the
+ // watch list without problems, so maintain consistency with
+ // that. There will be no file events for broken symlinks.
+ // TODO: more specific check; returns os.PathError; ENOENT?
+ return "", nil
+ }
+
+ w.mu.Lock()
+ _, alreadyWatching = w.watches[link]
+ w.mu.Unlock()
+
+ if alreadyWatching {
+ // Add to watches so we don't get spurious Create events later
+ // on when we diff the directories.
+ w.watches[name] = 0
+ w.fileExists[name] = struct{}{}
+ return link, nil
+ }
+
+ name = link
+ fi, err = os.Lstat(name)
+ if err != nil {
+ return "", nil
+ }
+ }
+
+ // Retry on EINTR; open() can return EINTR in practice on macOS.
+ // See #354, and Go issues 11180 and 39237.
+ for {
+ watchfd, err = unix.Open(name, openMode, 0)
+ if err == nil {
+ break
+ }
+ if errors.Is(err, unix.EINTR) {
+ continue
+ }
+
+ return "", err
+ }
+
+ isDir = fi.IsDir()
+ }
+
+ err := w.register([]int{watchfd}, unix.EV_ADD|unix.EV_CLEAR|unix.EV_ENABLE, flags)
+ if err != nil {
+ unix.Close(watchfd)
+ return "", err
+ }
+
+ if !alreadyWatching {
+ w.mu.Lock()
+ parentName := filepath.Dir(name)
+ w.watches[name] = watchfd
+
+ watchesByDir, ok := w.watchesByDir[parentName]
+ if !ok {
+ watchesByDir = make(map[int]struct{}, 1)
+ w.watchesByDir[parentName] = watchesByDir
+ }
+ watchesByDir[watchfd] = struct{}{}
+ w.paths[watchfd] = pathInfo{name: name, isDir: isDir}
+ w.mu.Unlock()
+ }
+
+ if isDir {
+ // Watch the directory if it has not been watched before, or if it was
+ // watched before, but perhaps only a NOTE_DELETE (watchDirectoryFiles)
+ w.mu.Lock()
+
+ watchDir := (flags&unix.NOTE_WRITE) == unix.NOTE_WRITE &&
+ (!alreadyWatching || (w.dirFlags[name]&unix.NOTE_WRITE) != unix.NOTE_WRITE)
+ // Store flags so this watch can be updated later
+ w.dirFlags[name] = flags
+ w.mu.Unlock()
+
+ if watchDir {
+ if err := w.watchDirectoryFiles(name); err != nil {
+ return "", err
+ }
+ }
+ }
+ return name, nil
+}
+
+// readEvents reads from kqueue and converts the received kevents into
+// Event values that it sends down the Events channel.
+func (w *Watcher) readEvents() {
+ defer func() {
+ close(w.Events)
+ close(w.Errors)
+ _ = unix.Close(w.kq)
+ unix.Close(w.closepipe[0])
+ }()
+
+ eventBuffer := make([]unix.Kevent_t, 10)
+ for closed := false; !closed; {
+ kevents, err := w.read(eventBuffer)
+ // EINTR is okay, the syscall was interrupted before timeout expired.
+ if err != nil && err != unix.EINTR {
+ if !w.sendError(fmt.Errorf("fsnotify.readEvents: %w", err)) {
+ closed = true
+ }
+ continue
+ }
+
+ // Flush the events we received to the Events channel
+ for _, kevent := range kevents {
+ var (
+ watchfd = int(kevent.Ident)
+ mask = uint32(kevent.Fflags)
+ )
+
+ // Shut down the loop when the pipe is closed, but only after all
+ // other events have been processed.
+ if watchfd == w.closepipe[0] {
+ closed = true
+ continue
+ }
+
+ w.mu.Lock()
+ path := w.paths[watchfd]
+ w.mu.Unlock()
+
+ event := w.newEvent(path.name, mask)
+
+ if event.Has(Rename) || event.Has(Remove) {
+ w.remove(event.Name, false)
+ w.mu.Lock()
+ delete(w.fileExists, event.Name)
+ w.mu.Unlock()
+ }
+
+ if path.isDir && event.Has(Write) && !event.Has(Remove) {
+ w.sendDirectoryChangeEvents(event.Name)
+ } else {
+ if !w.sendEvent(event) {
+ closed = true
+ continue
+ }
+ }
+
+ if event.Has(Remove) {
+ // Look for a file that may have overwritten this; for example,
+ // mv f1 f2 will delete f2, then create f2.
+ if path.isDir {
+ fileDir := filepath.Clean(event.Name)
+ w.mu.Lock()
+ _, found := w.watches[fileDir]
+ w.mu.Unlock()
+ if found {
+ err := w.sendDirectoryChangeEvents(fileDir)
+ if err != nil {
+ if !w.sendError(err) {
+ closed = true
+ }
+ }
+ }
+ } else {
+ filePath := filepath.Clean(event.Name)
+ if fi, err := os.Lstat(filePath); err == nil {
+ err := w.sendFileCreatedEventIfNew(filePath, fi)
+ if err != nil {
+ if !w.sendError(err) {
+ closed = true
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+// newEvent returns an platform-independent Event based on kqueue Fflags.
+func (w *Watcher) newEvent(name string, mask uint32) Event {
+ e := Event{Name: name}
+ if mask&unix.NOTE_DELETE == unix.NOTE_DELETE {
+ e.Op |= Remove
+ }
+ if mask&unix.NOTE_WRITE == unix.NOTE_WRITE {
+ e.Op |= Write
+ }
+ if mask&unix.NOTE_RENAME == unix.NOTE_RENAME {
+ e.Op |= Rename
+ }
+ if mask&unix.NOTE_ATTRIB == unix.NOTE_ATTRIB {
+ e.Op |= Chmod
+ }
+ // No point sending a write and delete event at the same time: if it's gone,
+ // then it's gone.
+ if e.Op.Has(Write) && e.Op.Has(Remove) {
+ e.Op &^= Write
+ }
+ return e
+}
+
+// watchDirectoryFiles to mimic inotify when adding a watch on a directory
+func (w *Watcher) watchDirectoryFiles(dirPath string) error {
+ // Get all files
+ files, err := os.ReadDir(dirPath)
+ if err != nil {
+ return err
+ }
+
+ for _, f := range files {
+ path := filepath.Join(dirPath, f.Name())
+
+ fi, err := f.Info()
+ if err != nil {
+ return fmt.Errorf("%q: %w", path, err)
+ }
+
+ cleanPath, err := w.internalWatch(path, fi)
+ if err != nil {
+ // No permission to read the file; that's not a problem: just skip.
+ // But do add it to w.fileExists to prevent it from being picked up
+ // as a "new" file later (it still shows up in the directory
+ // listing).
+ switch {
+ case errors.Is(err, unix.EACCES) || errors.Is(err, unix.EPERM):
+ cleanPath = filepath.Clean(path)
+ default:
+ return fmt.Errorf("%q: %w", path, err)
+ }
+ }
+
+ w.mu.Lock()
+ w.fileExists[cleanPath] = struct{}{}
+ w.mu.Unlock()
+ }
+
+ return nil
+}
+
+// Search the directory for new files and send an event for them.
+//
+// This functionality is to have the BSD watcher match the inotify, which sends
+// a create event for files created in a watched directory.
+func (w *Watcher) sendDirectoryChangeEvents(dir string) error {
+ files, err := os.ReadDir(dir)
+ if err != nil {
+ // Directory no longer exists: we can ignore this safely. kqueue will
+ // still give us the correct events.
+ if errors.Is(err, os.ErrNotExist) {
+ return nil
+ }
+ return fmt.Errorf("fsnotify.sendDirectoryChangeEvents: %w", err)
+ }
+
+ for _, f := range files {
+ fi, err := f.Info()
+ if err != nil {
+ return fmt.Errorf("fsnotify.sendDirectoryChangeEvents: %w", err)
+ }
+
+ err = w.sendFileCreatedEventIfNew(filepath.Join(dir, fi.Name()), fi)
+ if err != nil {
+ // Don't need to send an error if this file isn't readable.
+ if errors.Is(err, unix.EACCES) || errors.Is(err, unix.EPERM) {
+ return nil
+ }
+ return fmt.Errorf("fsnotify.sendDirectoryChangeEvents: %w", err)
+ }
+ }
+ return nil
+}
+
+// sendFileCreatedEvent sends a create event if the file isn't already being tracked.
+func (w *Watcher) sendFileCreatedEventIfNew(filePath string, fi os.FileInfo) (err error) {
+ w.mu.Lock()
+ _, doesExist := w.fileExists[filePath]
+ w.mu.Unlock()
+ if !doesExist {
+ if !w.sendEvent(Event{Name: filePath, Op: Create}) {
+ return
+ }
+ }
+
+ // like watchDirectoryFiles (but without doing another ReadDir)
+ filePath, err = w.internalWatch(filePath, fi)
+ if err != nil {
+ return err
+ }
+
+ w.mu.Lock()
+ w.fileExists[filePath] = struct{}{}
+ w.mu.Unlock()
+
+ return nil
+}
+
+func (w *Watcher) internalWatch(name string, fi os.FileInfo) (string, error) {
+ if fi.IsDir() {
+ // mimic Linux providing delete events for subdirectories, but preserve
+ // the flags used if currently watching subdirectory
+ w.mu.Lock()
+ flags := w.dirFlags[name]
+ w.mu.Unlock()
+
+ flags |= unix.NOTE_DELETE | unix.NOTE_RENAME
+ return w.addWatch(name, flags)
+ }
+
+ // watch file to mimic Linux inotify
+ return w.addWatch(name, noteAllEvents)
+}
+
+// Register events with the queue.
+func (w *Watcher) register(fds []int, flags int, fflags uint32) error {
+ changes := make([]unix.Kevent_t, len(fds))
+ for i, fd := range fds {
+ // SetKevent converts int to the platform-specific types.
+ unix.SetKevent(&changes[i], fd, unix.EVFILT_VNODE, flags)
+ changes[i].Fflags = fflags
+ }
+
+ // Register the events.
+ success, err := unix.Kevent(w.kq, changes, nil, nil)
+ if success == -1 {
+ return err
+ }
+ return nil
+}
+
+// read retrieves pending events, or waits until an event occurs.
+func (w *Watcher) read(events []unix.Kevent_t) ([]unix.Kevent_t, error) {
+ n, err := unix.Kevent(w.kq, nil, events, nil)
+ if err != nil {
+ return nil, err
+ }
+ return events[0:n], nil
+}
diff --git a/vendor/github.com/fsnotify/fsnotify/backend_other.go b/vendor/github.com/fsnotify/fsnotify/backend_other.go
new file mode 100644
index 0000000..d34a23c
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/backend_other.go
@@ -0,0 +1,205 @@
+//go:build appengine || (!darwin && !dragonfly && !freebsd && !openbsd && !linux && !netbsd && !solaris && !windows)
+// +build appengine !darwin,!dragonfly,!freebsd,!openbsd,!linux,!netbsd,!solaris,!windows
+
+// Note: the documentation on the Watcher type and methods is generated from
+// mkdoc.zsh
+
+package fsnotify
+
+import "errors"
+
+// Watcher watches a set of paths, delivering events on a channel.
+//
+// A watcher should not be copied (e.g. pass it by pointer, rather than by
+// value).
+//
+// # Linux notes
+//
+// When a file is removed a Remove event won't be emitted until all file
+// descriptors are closed, and deletes will always emit a Chmod. For example:
+//
+// fp := os.Open("file")
+// os.Remove("file") // Triggers Chmod
+// fp.Close() // Triggers Remove
+//
+// This is the event that inotify sends, so not much can be changed about this.
+//
+// The fs.inotify.max_user_watches sysctl variable specifies the upper limit
+// for the number of watches per user, and fs.inotify.max_user_instances
+// specifies the maximum number of inotify instances per user. Every Watcher you
+// create is an "instance", and every path you add is a "watch".
+//
+// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and
+// /proc/sys/fs/inotify/max_user_instances
+//
+// To increase them you can use sysctl or write the value to the /proc file:
+//
+// # Default values on Linux 5.18
+// sysctl fs.inotify.max_user_watches=124983
+// sysctl fs.inotify.max_user_instances=128
+//
+// To make the changes persist on reboot edit /etc/sysctl.conf or
+// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check
+// your distro's documentation):
+//
+// fs.inotify.max_user_watches=124983
+// fs.inotify.max_user_instances=128
+//
+// Reaching the limit will result in a "no space left on device" or "too many open
+// files" error.
+//
+// # kqueue notes (macOS, BSD)
+//
+// kqueue requires opening a file descriptor for every file that's being watched;
+// so if you're watching a directory with five files then that's six file
+// descriptors. You will run in to your system's "max open files" limit faster on
+// these platforms.
+//
+// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to
+// control the maximum number of open files, as well as /etc/login.conf on BSD
+// systems.
+//
+// # Windows notes
+//
+// Paths can be added as "C:\path\to\dir", but forward slashes
+// ("C:/path/to/dir") will also work.
+//
+// When a watched directory is removed it will always send an event for the
+// directory itself, but may not send events for all files in that directory.
+// Sometimes it will send events for all times, sometimes it will send no
+// events, and often only for some files.
+//
+// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest
+// value that is guaranteed to work with SMB filesystems. If you have many
+// events in quick succession this may not be enough, and you will have to use
+// [WithBufferSize] to increase the value.
+type Watcher struct {
+ // Events sends the filesystem change events.
+ //
+ // fsnotify can send the following events; a "path" here can refer to a
+ // file, directory, symbolic link, or special file like a FIFO.
+ //
+ // fsnotify.Create A new path was created; this may be followed by one
+ // or more Write events if data also gets written to a
+ // file.
+ //
+ // fsnotify.Remove A path was removed.
+ //
+ // fsnotify.Rename A path was renamed. A rename is always sent with the
+ // old path as Event.Name, and a Create event will be
+ // sent with the new name. Renames are only sent for
+ // paths that are currently watched; e.g. moving an
+ // unmonitored file into a monitored directory will
+ // show up as just a Create. Similarly, renaming a file
+ // to outside a monitored directory will show up as
+ // only a Rename.
+ //
+ // fsnotify.Write A file or named pipe was written to. A Truncate will
+ // also trigger a Write. A single "write action"
+ // initiated by the user may show up as one or multiple
+ // writes, depending on when the system syncs things to
+ // disk. For example when compiling a large Go program
+ // you may get hundreds of Write events, and you may
+ // want to wait until you've stopped receiving them
+ // (see the dedup example in cmd/fsnotify).
+ //
+ // Some systems may send Write event for directories
+ // when the directory content changes.
+ //
+ // fsnotify.Chmod Attributes were changed. On Linux this is also sent
+ // when a file is removed (or more accurately, when a
+ // link to an inode is removed). On kqueue it's sent
+ // when a file is truncated. On Windows it's never
+ // sent.
+ Events chan Event
+
+ // Errors sends any errors.
+ //
+ // ErrEventOverflow is used to indicate there are too many events:
+ //
+ // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl)
+ // - windows: The buffer size is too small; WithBufferSize() can be used to increase it.
+ // - kqueue, fen: Not used.
+ Errors chan error
+}
+
+// NewWatcher creates a new Watcher.
+func NewWatcher() (*Watcher, error) {
+ return nil, errors.New("fsnotify not supported on the current platform")
+}
+
+// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events
+// channel.
+//
+// The main use case for this is situations with a very large number of events
+// where the kernel buffer size can't be increased (e.g. due to lack of
+// permissions). An unbuffered Watcher will perform better for almost all use
+// cases, and whenever possible you will be better off increasing the kernel
+// buffers instead of adding a large userspace buffer.
+func NewBufferedWatcher(sz uint) (*Watcher, error) { return NewWatcher() }
+
+// Close removes all watches and closes the Events channel.
+func (w *Watcher) Close() error { return nil }
+
+// WatchList returns all paths explicitly added with [Watcher.Add] (and are not
+// yet removed).
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) WatchList() []string { return nil }
+
+// Add starts monitoring the path for changes.
+//
+// A path can only be watched once; watching it more than once is a no-op and will
+// not return an error. Paths that do not yet exist on the filesystem cannot be
+// watched.
+//
+// A watch will be automatically removed if the watched path is deleted or
+// renamed. The exception is the Windows backend, which doesn't remove the
+// watcher on renames.
+//
+// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special
+// filesystems (/proc, /sys, etc.) generally don't work.
+//
+// Returns [ErrClosed] if [Watcher.Close] was called.
+//
+// See [Watcher.AddWith] for a version that allows adding options.
+//
+// # Watching directories
+//
+// All files in a directory are monitored, including new files that are created
+// after the watcher is started. Subdirectories are not watched (i.e. it's
+// non-recursive).
+//
+// # Watching files
+//
+// Watching individual files (rather than directories) is generally not
+// recommended as many programs (especially editors) update files atomically: it
+// will write to a temporary file which is then moved to to destination,
+// overwriting the original (or some variant thereof). The watcher on the
+// original file is now lost, as that no longer exists.
+//
+// The upshot of this is that a power failure or crash won't leave a
+// half-written file.
+//
+// Watch the parent directory and use Event.Name to filter out files you're not
+// interested in. There is an example of this in cmd/fsnotify/file.go.
+func (w *Watcher) Add(name string) error { return nil }
+
+// AddWith is like [Watcher.Add], but allows adding options. When using Add()
+// the defaults described below are used.
+//
+// Possible options are:
+//
+// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on
+// other platforms. The default is 64K (65536 bytes).
+func (w *Watcher) AddWith(name string, opts ...addOpt) error { return nil }
+
+// Remove stops monitoring the path for changes.
+//
+// Directories are always removed non-recursively. For example, if you added
+// /tmp/dir and /tmp/dir/subdir then you will need to remove both.
+//
+// Removing a path that has not yet been added returns [ErrNonExistentWatch].
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) Remove(name string) error { return nil }
diff --git a/vendor/github.com/fsnotify/fsnotify/backend_windows.go b/vendor/github.com/fsnotify/fsnotify/backend_windows.go
new file mode 100644
index 0000000..9bc91e5
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/backend_windows.go
@@ -0,0 +1,827 @@
+//go:build windows
+// +build windows
+
+// Windows backend based on ReadDirectoryChangesW()
+//
+// https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-readdirectorychangesw
+//
+// Note: the documentation on the Watcher type and methods is generated from
+// mkdoc.zsh
+
+package fsnotify
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "strings"
+ "sync"
+ "unsafe"
+
+ "golang.org/x/sys/windows"
+)
+
+// Watcher watches a set of paths, delivering events on a channel.
+//
+// A watcher should not be copied (e.g. pass it by pointer, rather than by
+// value).
+//
+// # Linux notes
+//
+// When a file is removed a Remove event won't be emitted until all file
+// descriptors are closed, and deletes will always emit a Chmod. For example:
+//
+// fp := os.Open("file")
+// os.Remove("file") // Triggers Chmod
+// fp.Close() // Triggers Remove
+//
+// This is the event that inotify sends, so not much can be changed about this.
+//
+// The fs.inotify.max_user_watches sysctl variable specifies the upper limit
+// for the number of watches per user, and fs.inotify.max_user_instances
+// specifies the maximum number of inotify instances per user. Every Watcher you
+// create is an "instance", and every path you add is a "watch".
+//
+// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and
+// /proc/sys/fs/inotify/max_user_instances
+//
+// To increase them you can use sysctl or write the value to the /proc file:
+//
+// # Default values on Linux 5.18
+// sysctl fs.inotify.max_user_watches=124983
+// sysctl fs.inotify.max_user_instances=128
+//
+// To make the changes persist on reboot edit /etc/sysctl.conf or
+// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check
+// your distro's documentation):
+//
+// fs.inotify.max_user_watches=124983
+// fs.inotify.max_user_instances=128
+//
+// Reaching the limit will result in a "no space left on device" or "too many open
+// files" error.
+//
+// # kqueue notes (macOS, BSD)
+//
+// kqueue requires opening a file descriptor for every file that's being watched;
+// so if you're watching a directory with five files then that's six file
+// descriptors. You will run in to your system's "max open files" limit faster on
+// these platforms.
+//
+// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to
+// control the maximum number of open files, as well as /etc/login.conf on BSD
+// systems.
+//
+// # Windows notes
+//
+// Paths can be added as "C:\path\to\dir", but forward slashes
+// ("C:/path/to/dir") will also work.
+//
+// When a watched directory is removed it will always send an event for the
+// directory itself, but may not send events for all files in that directory.
+// Sometimes it will send events for all times, sometimes it will send no
+// events, and often only for some files.
+//
+// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest
+// value that is guaranteed to work with SMB filesystems. If you have many
+// events in quick succession this may not be enough, and you will have to use
+// [WithBufferSize] to increase the value.
+type Watcher struct {
+ // Events sends the filesystem change events.
+ //
+ // fsnotify can send the following events; a "path" here can refer to a
+ // file, directory, symbolic link, or special file like a FIFO.
+ //
+ // fsnotify.Create A new path was created; this may be followed by one
+ // or more Write events if data also gets written to a
+ // file.
+ //
+ // fsnotify.Remove A path was removed.
+ //
+ // fsnotify.Rename A path was renamed. A rename is always sent with the
+ // old path as Event.Name, and a Create event will be
+ // sent with the new name. Renames are only sent for
+ // paths that are currently watched; e.g. moving an
+ // unmonitored file into a monitored directory will
+ // show up as just a Create. Similarly, renaming a file
+ // to outside a monitored directory will show up as
+ // only a Rename.
+ //
+ // fsnotify.Write A file or named pipe was written to. A Truncate will
+ // also trigger a Write. A single "write action"
+ // initiated by the user may show up as one or multiple
+ // writes, depending on when the system syncs things to
+ // disk. For example when compiling a large Go program
+ // you may get hundreds of Write events, and you may
+ // want to wait until you've stopped receiving them
+ // (see the dedup example in cmd/fsnotify).
+ //
+ // Some systems may send Write event for directories
+ // when the directory content changes.
+ //
+ // fsnotify.Chmod Attributes were changed. On Linux this is also sent
+ // when a file is removed (or more accurately, when a
+ // link to an inode is removed). On kqueue it's sent
+ // when a file is truncated. On Windows it's never
+ // sent.
+ Events chan Event
+
+ // Errors sends any errors.
+ //
+ // ErrEventOverflow is used to indicate there are too many events:
+ //
+ // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl)
+ // - windows: The buffer size is too small; WithBufferSize() can be used to increase it.
+ // - kqueue, fen: Not used.
+ Errors chan error
+
+ port windows.Handle // Handle to completion port
+ input chan *input // Inputs to the reader are sent on this channel
+ quit chan chan<- error
+
+ mu sync.Mutex // Protects access to watches, closed
+ watches watchMap // Map of watches (key: i-number)
+ closed bool // Set to true when Close() is first called
+}
+
+// NewWatcher creates a new Watcher.
+func NewWatcher() (*Watcher, error) {
+ return NewBufferedWatcher(50)
+}
+
+// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events
+// channel.
+//
+// The main use case for this is situations with a very large number of events
+// where the kernel buffer size can't be increased (e.g. due to lack of
+// permissions). An unbuffered Watcher will perform better for almost all use
+// cases, and whenever possible you will be better off increasing the kernel
+// buffers instead of adding a large userspace buffer.
+func NewBufferedWatcher(sz uint) (*Watcher, error) {
+ port, err := windows.CreateIoCompletionPort(windows.InvalidHandle, 0, 0, 0)
+ if err != nil {
+ return nil, os.NewSyscallError("CreateIoCompletionPort", err)
+ }
+ w := &Watcher{
+ port: port,
+ watches: make(watchMap),
+ input: make(chan *input, 1),
+ Events: make(chan Event, sz),
+ Errors: make(chan error),
+ quit: make(chan chan<- error, 1),
+ }
+ go w.readEvents()
+ return w, nil
+}
+
+func (w *Watcher) isClosed() bool {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+ return w.closed
+}
+
+func (w *Watcher) sendEvent(name string, mask uint64) bool {
+ if mask == 0 {
+ return false
+ }
+
+ event := w.newEvent(name, uint32(mask))
+ select {
+ case ch := <-w.quit:
+ w.quit <- ch
+ case w.Events <- event:
+ }
+ return true
+}
+
+// Returns true if the error was sent, or false if watcher is closed.
+func (w *Watcher) sendError(err error) bool {
+ select {
+ case w.Errors <- err:
+ return true
+ case <-w.quit:
+ }
+ return false
+}
+
+// Close removes all watches and closes the Events channel.
+func (w *Watcher) Close() error {
+ if w.isClosed() {
+ return nil
+ }
+
+ w.mu.Lock()
+ w.closed = true
+ w.mu.Unlock()
+
+ // Send "quit" message to the reader goroutine
+ ch := make(chan error)
+ w.quit <- ch
+ if err := w.wakeupReader(); err != nil {
+ return err
+ }
+ return <-ch
+}
+
+// Add starts monitoring the path for changes.
+//
+// A path can only be watched once; watching it more than once is a no-op and will
+// not return an error. Paths that do not yet exist on the filesystem cannot be
+// watched.
+//
+// A watch will be automatically removed if the watched path is deleted or
+// renamed. The exception is the Windows backend, which doesn't remove the
+// watcher on renames.
+//
+// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special
+// filesystems (/proc, /sys, etc.) generally don't work.
+//
+// Returns [ErrClosed] if [Watcher.Close] was called.
+//
+// See [Watcher.AddWith] for a version that allows adding options.
+//
+// # Watching directories
+//
+// All files in a directory are monitored, including new files that are created
+// after the watcher is started. Subdirectories are not watched (i.e. it's
+// non-recursive).
+//
+// # Watching files
+//
+// Watching individual files (rather than directories) is generally not
+// recommended as many programs (especially editors) update files atomically: it
+// will write to a temporary file which is then moved to to destination,
+// overwriting the original (or some variant thereof). The watcher on the
+// original file is now lost, as that no longer exists.
+//
+// The upshot of this is that a power failure or crash won't leave a
+// half-written file.
+//
+// Watch the parent directory and use Event.Name to filter out files you're not
+// interested in. There is an example of this in cmd/fsnotify/file.go.
+func (w *Watcher) Add(name string) error { return w.AddWith(name) }
+
+// AddWith is like [Watcher.Add], but allows adding options. When using Add()
+// the defaults described below are used.
+//
+// Possible options are:
+//
+// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on
+// other platforms. The default is 64K (65536 bytes).
+func (w *Watcher) AddWith(name string, opts ...addOpt) error {
+ if w.isClosed() {
+ return ErrClosed
+ }
+
+ with := getOptions(opts...)
+ if with.bufsize < 4096 {
+ return fmt.Errorf("fsnotify.WithBufferSize: buffer size cannot be smaller than 4096 bytes")
+ }
+
+ in := &input{
+ op: opAddWatch,
+ path: filepath.Clean(name),
+ flags: sysFSALLEVENTS,
+ reply: make(chan error),
+ bufsize: with.bufsize,
+ }
+ w.input <- in
+ if err := w.wakeupReader(); err != nil {
+ return err
+ }
+ return <-in.reply
+}
+
+// Remove stops monitoring the path for changes.
+//
+// Directories are always removed non-recursively. For example, if you added
+// /tmp/dir and /tmp/dir/subdir then you will need to remove both.
+//
+// Removing a path that has not yet been added returns [ErrNonExistentWatch].
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) Remove(name string) error {
+ if w.isClosed() {
+ return nil
+ }
+
+ in := &input{
+ op: opRemoveWatch,
+ path: filepath.Clean(name),
+ reply: make(chan error),
+ }
+ w.input <- in
+ if err := w.wakeupReader(); err != nil {
+ return err
+ }
+ return <-in.reply
+}
+
+// WatchList returns all paths explicitly added with [Watcher.Add] (and are not
+// yet removed).
+//
+// Returns nil if [Watcher.Close] was called.
+func (w *Watcher) WatchList() []string {
+ if w.isClosed() {
+ return nil
+ }
+
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ entries := make([]string, 0, len(w.watches))
+ for _, entry := range w.watches {
+ for _, watchEntry := range entry {
+ entries = append(entries, watchEntry.path)
+ }
+ }
+
+ return entries
+}
+
+// These options are from the old golang.org/x/exp/winfsnotify, where you could
+// add various options to the watch. This has long since been removed.
+//
+// The "sys" in the name is misleading as they're not part of any "system".
+//
+// This should all be removed at some point, and just use windows.FILE_NOTIFY_*
+const (
+ sysFSALLEVENTS = 0xfff
+ sysFSCREATE = 0x100
+ sysFSDELETE = 0x200
+ sysFSDELETESELF = 0x400
+ sysFSMODIFY = 0x2
+ sysFSMOVE = 0xc0
+ sysFSMOVEDFROM = 0x40
+ sysFSMOVEDTO = 0x80
+ sysFSMOVESELF = 0x800
+ sysFSIGNORED = 0x8000
+)
+
+func (w *Watcher) newEvent(name string, mask uint32) Event {
+ e := Event{Name: name}
+ if mask&sysFSCREATE == sysFSCREATE || mask&sysFSMOVEDTO == sysFSMOVEDTO {
+ e.Op |= Create
+ }
+ if mask&sysFSDELETE == sysFSDELETE || mask&sysFSDELETESELF == sysFSDELETESELF {
+ e.Op |= Remove
+ }
+ if mask&sysFSMODIFY == sysFSMODIFY {
+ e.Op |= Write
+ }
+ if mask&sysFSMOVE == sysFSMOVE || mask&sysFSMOVESELF == sysFSMOVESELF || mask&sysFSMOVEDFROM == sysFSMOVEDFROM {
+ e.Op |= Rename
+ }
+ return e
+}
+
+const (
+ opAddWatch = iota
+ opRemoveWatch
+)
+
+const (
+ provisional uint64 = 1 << (32 + iota)
+)
+
+type input struct {
+ op int
+ path string
+ flags uint32
+ bufsize int
+ reply chan error
+}
+
+type inode struct {
+ handle windows.Handle
+ volume uint32
+ index uint64
+}
+
+type watch struct {
+ ov windows.Overlapped
+ ino *inode // i-number
+ recurse bool // Recursive watch?
+ path string // Directory path
+ mask uint64 // Directory itself is being watched with these notify flags
+ names map[string]uint64 // Map of names being watched and their notify flags
+ rename string // Remembers the old name while renaming a file
+ buf []byte // buffer, allocated later
+}
+
+type (
+ indexMap map[uint64]*watch
+ watchMap map[uint32]indexMap
+)
+
+func (w *Watcher) wakeupReader() error {
+ err := windows.PostQueuedCompletionStatus(w.port, 0, 0, nil)
+ if err != nil {
+ return os.NewSyscallError("PostQueuedCompletionStatus", err)
+ }
+ return nil
+}
+
+func (w *Watcher) getDir(pathname string) (dir string, err error) {
+ attr, err := windows.GetFileAttributes(windows.StringToUTF16Ptr(pathname))
+ if err != nil {
+ return "", os.NewSyscallError("GetFileAttributes", err)
+ }
+ if attr&windows.FILE_ATTRIBUTE_DIRECTORY != 0 {
+ dir = pathname
+ } else {
+ dir, _ = filepath.Split(pathname)
+ dir = filepath.Clean(dir)
+ }
+ return
+}
+
+func (w *Watcher) getIno(path string) (ino *inode, err error) {
+ h, err := windows.CreateFile(windows.StringToUTF16Ptr(path),
+ windows.FILE_LIST_DIRECTORY,
+ windows.FILE_SHARE_READ|windows.FILE_SHARE_WRITE|windows.FILE_SHARE_DELETE,
+ nil, windows.OPEN_EXISTING,
+ windows.FILE_FLAG_BACKUP_SEMANTICS|windows.FILE_FLAG_OVERLAPPED, 0)
+ if err != nil {
+ return nil, os.NewSyscallError("CreateFile", err)
+ }
+
+ var fi windows.ByHandleFileInformation
+ err = windows.GetFileInformationByHandle(h, &fi)
+ if err != nil {
+ windows.CloseHandle(h)
+ return nil, os.NewSyscallError("GetFileInformationByHandle", err)
+ }
+ ino = &inode{
+ handle: h,
+ volume: fi.VolumeSerialNumber,
+ index: uint64(fi.FileIndexHigh)<<32 | uint64(fi.FileIndexLow),
+ }
+ return ino, nil
+}
+
+// Must run within the I/O thread.
+func (m watchMap) get(ino *inode) *watch {
+ if i := m[ino.volume]; i != nil {
+ return i[ino.index]
+ }
+ return nil
+}
+
+// Must run within the I/O thread.
+func (m watchMap) set(ino *inode, watch *watch) {
+ i := m[ino.volume]
+ if i == nil {
+ i = make(indexMap)
+ m[ino.volume] = i
+ }
+ i[ino.index] = watch
+}
+
+// Must run within the I/O thread.
+func (w *Watcher) addWatch(pathname string, flags uint64, bufsize int) error {
+ //pathname, recurse := recursivePath(pathname)
+ recurse := false
+
+ dir, err := w.getDir(pathname)
+ if err != nil {
+ return err
+ }
+
+ ino, err := w.getIno(dir)
+ if err != nil {
+ return err
+ }
+ w.mu.Lock()
+ watchEntry := w.watches.get(ino)
+ w.mu.Unlock()
+ if watchEntry == nil {
+ _, err := windows.CreateIoCompletionPort(ino.handle, w.port, 0, 0)
+ if err != nil {
+ windows.CloseHandle(ino.handle)
+ return os.NewSyscallError("CreateIoCompletionPort", err)
+ }
+ watchEntry = &watch{
+ ino: ino,
+ path: dir,
+ names: make(map[string]uint64),
+ recurse: recurse,
+ buf: make([]byte, bufsize),
+ }
+ w.mu.Lock()
+ w.watches.set(ino, watchEntry)
+ w.mu.Unlock()
+ flags |= provisional
+ } else {
+ windows.CloseHandle(ino.handle)
+ }
+ if pathname == dir {
+ watchEntry.mask |= flags
+ } else {
+ watchEntry.names[filepath.Base(pathname)] |= flags
+ }
+
+ err = w.startRead(watchEntry)
+ if err != nil {
+ return err
+ }
+
+ if pathname == dir {
+ watchEntry.mask &= ^provisional
+ } else {
+ watchEntry.names[filepath.Base(pathname)] &= ^provisional
+ }
+ return nil
+}
+
+// Must run within the I/O thread.
+func (w *Watcher) remWatch(pathname string) error {
+ pathname, recurse := recursivePath(pathname)
+
+ dir, err := w.getDir(pathname)
+ if err != nil {
+ return err
+ }
+ ino, err := w.getIno(dir)
+ if err != nil {
+ return err
+ }
+
+ w.mu.Lock()
+ watch := w.watches.get(ino)
+ w.mu.Unlock()
+
+ if recurse && !watch.recurse {
+ return fmt.Errorf("can't use \\... with non-recursive watch %q", pathname)
+ }
+
+ err = windows.CloseHandle(ino.handle)
+ if err != nil {
+ w.sendError(os.NewSyscallError("CloseHandle", err))
+ }
+ if watch == nil {
+ return fmt.Errorf("%w: %s", ErrNonExistentWatch, pathname)
+ }
+ if pathname == dir {
+ w.sendEvent(watch.path, watch.mask&sysFSIGNORED)
+ watch.mask = 0
+ } else {
+ name := filepath.Base(pathname)
+ w.sendEvent(filepath.Join(watch.path, name), watch.names[name]&sysFSIGNORED)
+ delete(watch.names, name)
+ }
+
+ return w.startRead(watch)
+}
+
+// Must run within the I/O thread.
+func (w *Watcher) deleteWatch(watch *watch) {
+ for name, mask := range watch.names {
+ if mask&provisional == 0 {
+ w.sendEvent(filepath.Join(watch.path, name), mask&sysFSIGNORED)
+ }
+ delete(watch.names, name)
+ }
+ if watch.mask != 0 {
+ if watch.mask&provisional == 0 {
+ w.sendEvent(watch.path, watch.mask&sysFSIGNORED)
+ }
+ watch.mask = 0
+ }
+}
+
+// Must run within the I/O thread.
+func (w *Watcher) startRead(watch *watch) error {
+ err := windows.CancelIo(watch.ino.handle)
+ if err != nil {
+ w.sendError(os.NewSyscallError("CancelIo", err))
+ w.deleteWatch(watch)
+ }
+ mask := w.toWindowsFlags(watch.mask)
+ for _, m := range watch.names {
+ mask |= w.toWindowsFlags(m)
+ }
+ if mask == 0 {
+ err := windows.CloseHandle(watch.ino.handle)
+ if err != nil {
+ w.sendError(os.NewSyscallError("CloseHandle", err))
+ }
+ w.mu.Lock()
+ delete(w.watches[watch.ino.volume], watch.ino.index)
+ w.mu.Unlock()
+ return nil
+ }
+
+ // We need to pass the array, rather than the slice.
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&watch.buf))
+ rdErr := windows.ReadDirectoryChanges(watch.ino.handle,
+ (*byte)(unsafe.Pointer(hdr.Data)), uint32(hdr.Len),
+ watch.recurse, mask, nil, &watch.ov, 0)
+ if rdErr != nil {
+ err := os.NewSyscallError("ReadDirectoryChanges", rdErr)
+ if rdErr == windows.ERROR_ACCESS_DENIED && watch.mask&provisional == 0 {
+ // Watched directory was probably removed
+ w.sendEvent(watch.path, watch.mask&sysFSDELETESELF)
+ err = nil
+ }
+ w.deleteWatch(watch)
+ w.startRead(watch)
+ return err
+ }
+ return nil
+}
+
+// readEvents reads from the I/O completion port, converts the
+// received events into Event objects and sends them via the Events channel.
+// Entry point to the I/O thread.
+func (w *Watcher) readEvents() {
+ var (
+ n uint32
+ key uintptr
+ ov *windows.Overlapped
+ )
+ runtime.LockOSThread()
+
+ for {
+ // This error is handled after the watch == nil check below.
+ qErr := windows.GetQueuedCompletionStatus(w.port, &n, &key, &ov, windows.INFINITE)
+
+ watch := (*watch)(unsafe.Pointer(ov))
+ if watch == nil {
+ select {
+ case ch := <-w.quit:
+ w.mu.Lock()
+ var indexes []indexMap
+ for _, index := range w.watches {
+ indexes = append(indexes, index)
+ }
+ w.mu.Unlock()
+ for _, index := range indexes {
+ for _, watch := range index {
+ w.deleteWatch(watch)
+ w.startRead(watch)
+ }
+ }
+
+ err := windows.CloseHandle(w.port)
+ if err != nil {
+ err = os.NewSyscallError("CloseHandle", err)
+ }
+ close(w.Events)
+ close(w.Errors)
+ ch <- err
+ return
+ case in := <-w.input:
+ switch in.op {
+ case opAddWatch:
+ in.reply <- w.addWatch(in.path, uint64(in.flags), in.bufsize)
+ case opRemoveWatch:
+ in.reply <- w.remWatch(in.path)
+ }
+ default:
+ }
+ continue
+ }
+
+ switch qErr {
+ case nil:
+ // No error
+ case windows.ERROR_MORE_DATA:
+ if watch == nil {
+ w.sendError(errors.New("ERROR_MORE_DATA has unexpectedly null lpOverlapped buffer"))
+ } else {
+ // The i/o succeeded but the buffer is full.
+ // In theory we should be building up a full packet.
+ // In practice we can get away with just carrying on.
+ n = uint32(unsafe.Sizeof(watch.buf))
+ }
+ case windows.ERROR_ACCESS_DENIED:
+ // Watched directory was probably removed
+ w.sendEvent(watch.path, watch.mask&sysFSDELETESELF)
+ w.deleteWatch(watch)
+ w.startRead(watch)
+ continue
+ case windows.ERROR_OPERATION_ABORTED:
+ // CancelIo was called on this handle
+ continue
+ default:
+ w.sendError(os.NewSyscallError("GetQueuedCompletionPort", qErr))
+ continue
+ }
+
+ var offset uint32
+ for {
+ if n == 0 {
+ w.sendError(ErrEventOverflow)
+ break
+ }
+
+ // Point "raw" to the event in the buffer
+ raw := (*windows.FileNotifyInformation)(unsafe.Pointer(&watch.buf[offset]))
+
+ // Create a buf that is the size of the path name
+ size := int(raw.FileNameLength / 2)
+ var buf []uint16
+ // TODO: Use unsafe.Slice in Go 1.17; https://stackoverflow.com/questions/51187973
+ sh := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ sh.Data = uintptr(unsafe.Pointer(&raw.FileName))
+ sh.Len = size
+ sh.Cap = size
+ name := windows.UTF16ToString(buf)
+ fullname := filepath.Join(watch.path, name)
+
+ var mask uint64
+ switch raw.Action {
+ case windows.FILE_ACTION_REMOVED:
+ mask = sysFSDELETESELF
+ case windows.FILE_ACTION_MODIFIED:
+ mask = sysFSMODIFY
+ case windows.FILE_ACTION_RENAMED_OLD_NAME:
+ watch.rename = name
+ case windows.FILE_ACTION_RENAMED_NEW_NAME:
+ // Update saved path of all sub-watches.
+ old := filepath.Join(watch.path, watch.rename)
+ w.mu.Lock()
+ for _, watchMap := range w.watches {
+ for _, ww := range watchMap {
+ if strings.HasPrefix(ww.path, old) {
+ ww.path = filepath.Join(fullname, strings.TrimPrefix(ww.path, old))
+ }
+ }
+ }
+ w.mu.Unlock()
+
+ if watch.names[watch.rename] != 0 {
+ watch.names[name] |= watch.names[watch.rename]
+ delete(watch.names, watch.rename)
+ mask = sysFSMOVESELF
+ }
+ }
+
+ sendNameEvent := func() {
+ w.sendEvent(fullname, watch.names[name]&mask)
+ }
+ if raw.Action != windows.FILE_ACTION_RENAMED_NEW_NAME {
+ sendNameEvent()
+ }
+ if raw.Action == windows.FILE_ACTION_REMOVED {
+ w.sendEvent(fullname, watch.names[name]&sysFSIGNORED)
+ delete(watch.names, name)
+ }
+
+ w.sendEvent(fullname, watch.mask&w.toFSnotifyFlags(raw.Action))
+ if raw.Action == windows.FILE_ACTION_RENAMED_NEW_NAME {
+ fullname = filepath.Join(watch.path, watch.rename)
+ sendNameEvent()
+ }
+
+ // Move to the next event in the buffer
+ if raw.NextEntryOffset == 0 {
+ break
+ }
+ offset += raw.NextEntryOffset
+
+ // Error!
+ if offset >= n {
+ //lint:ignore ST1005 Windows should be capitalized
+ w.sendError(errors.New(
+ "Windows system assumed buffer larger than it is, events have likely been missed"))
+ break
+ }
+ }
+
+ if err := w.startRead(watch); err != nil {
+ w.sendError(err)
+ }
+ }
+}
+
+func (w *Watcher) toWindowsFlags(mask uint64) uint32 {
+ var m uint32
+ if mask&sysFSMODIFY != 0 {
+ m |= windows.FILE_NOTIFY_CHANGE_LAST_WRITE
+ }
+ if mask&(sysFSMOVE|sysFSCREATE|sysFSDELETE) != 0 {
+ m |= windows.FILE_NOTIFY_CHANGE_FILE_NAME | windows.FILE_NOTIFY_CHANGE_DIR_NAME
+ }
+ return m
+}
+
+func (w *Watcher) toFSnotifyFlags(action uint32) uint64 {
+ switch action {
+ case windows.FILE_ACTION_ADDED:
+ return sysFSCREATE
+ case windows.FILE_ACTION_REMOVED:
+ return sysFSDELETE
+ case windows.FILE_ACTION_MODIFIED:
+ return sysFSMODIFY
+ case windows.FILE_ACTION_RENAMED_OLD_NAME:
+ return sysFSMOVEDFROM
+ case windows.FILE_ACTION_RENAMED_NEW_NAME:
+ return sysFSMOVEDTO
+ }
+ return 0
+}
diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify.go b/vendor/github.com/fsnotify/fsnotify/fsnotify.go
new file mode 100644
index 0000000..24c99cc
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/fsnotify.go
@@ -0,0 +1,146 @@
+// Package fsnotify provides a cross-platform interface for file system
+// notifications.
+//
+// Currently supported systems:
+//
+// Linux 2.6.32+ via inotify
+// BSD, macOS via kqueue
+// Windows via ReadDirectoryChangesW
+// illumos via FEN
+package fsnotify
+
+import (
+ "errors"
+ "fmt"
+ "path/filepath"
+ "strings"
+)
+
+// Event represents a file system notification.
+type Event struct {
+ // Path to the file or directory.
+ //
+ // Paths are relative to the input; for example with Add("dir") the Name
+ // will be set to "dir/file" if you create that file, but if you use
+ // Add("/path/to/dir") it will be "/path/to/dir/file".
+ Name string
+
+ // File operation that triggered the event.
+ //
+ // This is a bitmask and some systems may send multiple operations at once.
+ // Use the Event.Has() method instead of comparing with ==.
+ Op Op
+}
+
+// Op describes a set of file operations.
+type Op uint32
+
+// The operations fsnotify can trigger; see the documentation on [Watcher] for a
+// full description, and check them with [Event.Has].
+const (
+ // A new pathname was created.
+ Create Op = 1 << iota
+
+ // The pathname was written to; this does *not* mean the write has finished,
+ // and a write can be followed by more writes.
+ Write
+
+ // The path was removed; any watches on it will be removed. Some "remove"
+ // operations may trigger a Rename if the file is actually moved (for
+ // example "remove to trash" is often a rename).
+ Remove
+
+ // The path was renamed to something else; any watched on it will be
+ // removed.
+ Rename
+
+ // File attributes were changed.
+ //
+ // It's generally not recommended to take action on this event, as it may
+ // get triggered very frequently by some software. For example, Spotlight
+ // indexing on macOS, anti-virus software, backup software, etc.
+ Chmod
+)
+
+// Common errors that can be reported.
+var (
+ ErrNonExistentWatch = errors.New("fsnotify: can't remove non-existent watch")
+ ErrEventOverflow = errors.New("fsnotify: queue or buffer overflow")
+ ErrClosed = errors.New("fsnotify: watcher already closed")
+)
+
+func (o Op) String() string {
+ var b strings.Builder
+ if o.Has(Create) {
+ b.WriteString("|CREATE")
+ }
+ if o.Has(Remove) {
+ b.WriteString("|REMOVE")
+ }
+ if o.Has(Write) {
+ b.WriteString("|WRITE")
+ }
+ if o.Has(Rename) {
+ b.WriteString("|RENAME")
+ }
+ if o.Has(Chmod) {
+ b.WriteString("|CHMOD")
+ }
+ if b.Len() == 0 {
+ return "[no events]"
+ }
+ return b.String()[1:]
+}
+
+// Has reports if this operation has the given operation.
+func (o Op) Has(h Op) bool { return o&h != 0 }
+
+// Has reports if this event has the given operation.
+func (e Event) Has(op Op) bool { return e.Op.Has(op) }
+
+// String returns a string representation of the event with their path.
+func (e Event) String() string {
+ return fmt.Sprintf("%-13s %q", e.Op.String(), e.Name)
+}
+
+type (
+ addOpt func(opt *withOpts)
+ withOpts struct {
+ bufsize int
+ }
+)
+
+var defaultOpts = withOpts{
+ bufsize: 65536, // 64K
+}
+
+func getOptions(opts ...addOpt) withOpts {
+ with := defaultOpts
+ for _, o := range opts {
+ o(&with)
+ }
+ return with
+}
+
+// WithBufferSize sets the [ReadDirectoryChangesW] buffer size.
+//
+// This only has effect on Windows systems, and is a no-op for other backends.
+//
+// The default value is 64K (65536 bytes) which is the highest value that works
+// on all filesystems and should be enough for most applications, but if you
+// have a large burst of events it may not be enough. You can increase it if
+// you're hitting "queue or buffer overflow" errors ([ErrEventOverflow]).
+//
+// [ReadDirectoryChangesW]: https://learn.microsoft.com/en-gb/windows/win32/api/winbase/nf-winbase-readdirectorychangesw
+func WithBufferSize(bytes int) addOpt {
+ return func(opt *withOpts) { opt.bufsize = bytes }
+}
+
+// Check if this path is recursive (ends with "/..." or "\..."), and return the
+// path with the /... stripped.
+func recursivePath(path string) (string, bool) {
+ if filepath.Base(path) == "..." {
+ return filepath.Dir(path), true
+ }
+ return path, false
+}
diff --git a/vendor/github.com/fsnotify/fsnotify/mkdoc.zsh b/vendor/github.com/fsnotify/fsnotify/mkdoc.zsh
new file mode 100644
index 0000000..99012ae
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/mkdoc.zsh
@@ -0,0 +1,259 @@
+#!/usr/bin/env zsh
+[ "${ZSH_VERSION:-}" = "" ] && echo >&2 "Only works with zsh" && exit 1
+setopt err_exit no_unset pipefail extended_glob
+
+# Simple script to update the godoc comments on all watchers so you don't need
+# to update the same comment 5 times.
+
+watcher=$(</tmp/x
+ print -r -- $cmt >>/tmp/x
+ tail -n+$(( end + 1 )) $file >>/tmp/x
+ mv /tmp/x $file
+ done
+}
+
+set-cmt '^type Watcher struct ' $watcher
+set-cmt '^func NewWatcher(' $new
+set-cmt '^func NewBufferedWatcher(' $newbuffered
+set-cmt '^func (w \*Watcher) Add(' $add
+set-cmt '^func (w \*Watcher) AddWith(' $addwith
+set-cmt '^func (w \*Watcher) Remove(' $remove
+set-cmt '^func (w \*Watcher) Close(' $close
+set-cmt '^func (w \*Watcher) WatchList(' $watchlist
+set-cmt '^[[:space:]]*Events *chan Event$' $events
+set-cmt '^[[:space:]]*Errors *chan error$' $errors
diff --git a/vendor/github.com/fsnotify/fsnotify/system_bsd.go b/vendor/github.com/fsnotify/fsnotify/system_bsd.go
new file mode 100644
index 0000000..4322b0b
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/system_bsd.go
@@ -0,0 +1,8 @@
+//go:build freebsd || openbsd || netbsd || dragonfly
+// +build freebsd openbsd netbsd dragonfly
+
+package fsnotify
+
+import "golang.org/x/sys/unix"
+
+const openMode = unix.O_NONBLOCK | unix.O_RDONLY | unix.O_CLOEXEC
diff --git a/vendor/github.com/fsnotify/fsnotify/system_darwin.go b/vendor/github.com/fsnotify/fsnotify/system_darwin.go
new file mode 100644
index 0000000..5da5ffa
--- /dev/null
+++ b/vendor/github.com/fsnotify/fsnotify/system_darwin.go
@@ -0,0 +1,9 @@
+//go:build darwin
+// +build darwin
+
+package fsnotify
+
+import "golang.org/x/sys/unix"
+
+// note: this constant is not defined on BSD
+const openMode = unix.O_EVTONLY | unix.O_CLOEXEC
diff --git a/vendor/github.com/go-playground/locales/.gitignore b/vendor/github.com/go-playground/locales/.gitignore
new file mode 100644
index 0000000..daf913b
--- /dev/null
+++ b/vendor/github.com/go-playground/locales/.gitignore
@@ -0,0 +1,24 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
diff --git a/vendor/github.com/go-playground/locales/.travis.yml b/vendor/github.com/go-playground/locales/.travis.yml
new file mode 100644
index 0000000..d50237a
--- /dev/null
+++ b/vendor/github.com/go-playground/locales/.travis.yml
@@ -0,0 +1,26 @@
+language: go
+go:
+ - 1.13.1
+ - tip
+matrix:
+ allow_failures:
+ - go: tip
+
+notifications:
+ email:
+ recipients: dean.karn@gmail.com
+ on_success: change
+ on_failure: always
+
+before_install:
+ - go install github.com/mattn/goveralls
+
+# Only clone the most recent commit.
+git:
+ depth: 1
+
+script:
+ - go test -v -race -covermode=atomic -coverprofile=coverage.coverprofile ./...
+
+after_success: |
+ goveralls -coverprofile=coverage.coverprofile -service travis-ci -repotoken $COVERALLS_TOKEN
\ No newline at end of file
diff --git a/vendor/github.com/go-playground/locales/LICENSE b/vendor/github.com/go-playground/locales/LICENSE
new file mode 100644
index 0000000..75854ac
--- /dev/null
+++ b/vendor/github.com/go-playground/locales/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Go Playground
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/vendor/github.com/go-playground/locales/README.md b/vendor/github.com/go-playground/locales/README.md
new file mode 100644
index 0000000..7b6be2c
--- /dev/null
+++ b/vendor/github.com/go-playground/locales/README.md
@@ -0,0 +1,170 @@
+## locales
+ ![Project status](https://img.shields.io/badge/version-0.14.1-green.svg)
+[![Build Status](https://travis-ci.org/go-playground/locales.svg?branch=master)](https://travis-ci.org/go-playground/locales)
+[![GoDoc](https://godoc.org/github.com/go-playground/locales?status.svg)](https://godoc.org/github.com/go-playground/locales)
+![License](https://img.shields.io/dub/l/vibe-d.svg)
+
+Locales is a set of locales generated from the [Unicode CLDR Project](http://cldr.unicode.org/) which can be used independently or within
+an i18n package; these were built for use with, but not exclusive to, [Universal Translator](https://github.com/go-playground/universal-translator).
+
+Features
+--------
+- [x] Rules generated from the latest [CLDR](http://cldr.unicode.org/index/downloads) data, v36.0.1
+- [x] Contains Cardinal, Ordinal and Range Plural Rules
+- [x] Contains Month, Weekday and Timezone translations built in
+- [x] Contains Date & Time formatting functions
+- [x] Contains Number, Currency, Accounting and Percent formatting functions
+- [x] Supports the "Gregorian" calendar only ( my time isn't unlimited, had to draw the line somewhere )
+
+Full Tests
+--------------------
+I could sure use your help adding tests for every locale, it is a huge undertaking and I just don't have the free time to do it all at the moment;
+any help would be **greatly appreciated!!!!** please see [issue](https://github.com/go-playground/locales/issues/1) for details.
+
+Installation
+-----------
+
+Use go get
+
+```shell
+go get github.com/go-playground/locales
+```
+
+NOTES
+--------
+You'll notice most return types are []byte, this is because most of the time the results will be concatenated with a larger body
+of text and can avoid some allocations if already appending to a byte array, otherwise just cast as string.
+
+Usage
+-------
+```go
+package main
+
+import (
+ "fmt"
+ "time"
+
+ "github.com/go-playground/locales/currency"
+ "github.com/go-playground/locales/en_CA"
+)
+
+func main() {
+
+ loc, _ := time.LoadLocation("America/Toronto")
+ datetime := time.Date(2016, 02, 03, 9, 0, 1, 0, loc)
+
+ l := en_CA.New()
+
+ // Dates
+ fmt.Println(l.FmtDateFull(datetime))
+ fmt.Println(l.FmtDateLong(datetime))
+ fmt.Println(l.FmtDateMedium(datetime))
+ fmt.Println(l.FmtDateShort(datetime))
+
+ // Times
+ fmt.Println(l.FmtTimeFull(datetime))
+ fmt.Println(l.FmtTimeLong(datetime))
+ fmt.Println(l.FmtTimeMedium(datetime))
+ fmt.Println(l.FmtTimeShort(datetime))
+
+ // Months Wide
+ fmt.Println(l.MonthWide(time.January))
+ fmt.Println(l.MonthWide(time.February))
+ fmt.Println(l.MonthWide(time.March))
+ // ...
+
+ // Months Abbreviated
+ fmt.Println(l.MonthAbbreviated(time.January))
+ fmt.Println(l.MonthAbbreviated(time.February))
+ fmt.Println(l.MonthAbbreviated(time.March))
+ // ...
+
+ // Months Narrow
+ fmt.Println(l.MonthNarrow(time.January))
+ fmt.Println(l.MonthNarrow(time.February))
+ fmt.Println(l.MonthNarrow(time.March))
+ // ...
+
+ // Weekdays Wide
+ fmt.Println(l.WeekdayWide(time.Sunday))
+ fmt.Println(l.WeekdayWide(time.Monday))
+ fmt.Println(l.WeekdayWide(time.Tuesday))
+ // ...
+
+ // Weekdays Abbreviated
+ fmt.Println(l.WeekdayAbbreviated(time.Sunday))
+ fmt.Println(l.WeekdayAbbreviated(time.Monday))
+ fmt.Println(l.WeekdayAbbreviated(time.Tuesday))
+ // ...
+
+ // Weekdays Short
+ fmt.Println(l.WeekdayShort(time.Sunday))
+ fmt.Println(l.WeekdayShort(time.Monday))
+ fmt.Println(l.WeekdayShort(time.Tuesday))
+ // ...
+
+ // Weekdays Narrow
+ fmt.Println(l.WeekdayNarrow(time.Sunday))
+ fmt.Println(l.WeekdayNarrow(time.Monday))
+ fmt.Println(l.WeekdayNarrow(time.Tuesday))
+ // ...
+
+ var f64 float64
+
+ f64 = -10356.4523
+
+ // Number
+ fmt.Println(l.FmtNumber(f64, 2))
+
+ // Currency
+ fmt.Println(l.FmtCurrency(f64, 2, currency.CAD))
+ fmt.Println(l.FmtCurrency(f64, 2, currency.USD))
+
+ // Accounting
+ fmt.Println(l.FmtAccounting(f64, 2, currency.CAD))
+ fmt.Println(l.FmtAccounting(f64, 2, currency.USD))
+
+ f64 = 78.12
+
+ // Percent
+ fmt.Println(l.FmtPercent(f64, 0))
+
+ // Plural Rules for locale, so you know what rules you must cover
+ fmt.Println(l.PluralsCardinal())
+ fmt.Println(l.PluralsOrdinal())
+
+ // Cardinal Plural Rules
+ fmt.Println(l.CardinalPluralRule(1, 0))
+ fmt.Println(l.CardinalPluralRule(1.0, 0))
+ fmt.Println(l.CardinalPluralRule(1.0, 1))
+ fmt.Println(l.CardinalPluralRule(3, 0))
+
+ // Ordinal Plural Rules
+ fmt.Println(l.OrdinalPluralRule(21, 0)) // 21st
+ fmt.Println(l.OrdinalPluralRule(22, 0)) // 22nd
+ fmt.Println(l.OrdinalPluralRule(33, 0)) // 33rd
+ fmt.Println(l.OrdinalPluralRule(34, 0)) // 34th
+
+ // Range Plural Rules
+ fmt.Println(l.RangePluralRule(1, 0, 1, 0)) // 1-1
+ fmt.Println(l.RangePluralRule(1, 0, 2, 0)) // 1-2
+ fmt.Println(l.RangePluralRule(5, 0, 8, 0)) // 5-8
+}
+```
+
+NOTES:
+-------
+These rules were generated from the [Unicode CLDR Project](http://cldr.unicode.org/), if you encounter any issues
+I strongly encourage contributing to the CLDR project to get the locale information corrected and the next time
+these locales are regenerated the fix will come with.
+
+I do however realize that time constraints are often important and so there are two options:
+
+1. Create your own locale, copy, paste and modify, and ensure it complies with the `Translator` interface.
+2. Add an exception in the locale generation code directly and once regenerated, fix will be in place.
+
+Please to not make fixes inside the locale files, they WILL get overwritten when the locales are regenerated.
+
+License
+------
+Distributed under MIT License, please see license file in code for more details.
diff --git a/vendor/github.com/go-playground/locales/currency/currency.go b/vendor/github.com/go-playground/locales/currency/currency.go
new file mode 100644
index 0000000..b5a95fb
--- /dev/null
+++ b/vendor/github.com/go-playground/locales/currency/currency.go
@@ -0,0 +1,311 @@
+package currency
+
+// Type is the currency type associated with the locales currency enum
+type Type int
+
+// locale currencies
+const (
+ ADP Type = iota
+ AED
+ AFA
+ AFN
+ ALK
+ ALL
+ AMD
+ ANG
+ AOA
+ AOK
+ AON
+ AOR
+ ARA
+ ARL
+ ARM
+ ARP
+ ARS
+ ATS
+ AUD
+ AWG
+ AZM
+ AZN
+ BAD
+ BAM
+ BAN
+ BBD
+ BDT
+ BEC
+ BEF
+ BEL
+ BGL
+ BGM
+ BGN
+ BGO
+ BHD
+ BIF
+ BMD
+ BND
+ BOB
+ BOL
+ BOP
+ BOV
+ BRB
+ BRC
+ BRE
+ BRL
+ BRN
+ BRR
+ BRZ
+ BSD
+ BTN
+ BUK
+ BWP
+ BYB
+ BYN
+ BYR
+ BZD
+ CAD
+ CDF
+ CHE
+ CHF
+ CHW
+ CLE
+ CLF
+ CLP
+ CNH
+ CNX
+ CNY
+ COP
+ COU
+ CRC
+ CSD
+ CSK
+ CUC
+ CUP
+ CVE
+ CYP
+ CZK
+ DDM
+ DEM
+ DJF
+ DKK
+ DOP
+ DZD
+ ECS
+ ECV
+ EEK
+ EGP
+ ERN
+ ESA
+ ESB
+ ESP
+ ETB
+ EUR
+ FIM
+ FJD
+ FKP
+ FRF
+ GBP
+ GEK
+ GEL
+ GHC
+ GHS
+ GIP
+ GMD
+ GNF
+ GNS
+ GQE
+ GRD
+ GTQ
+ GWE
+ GWP
+ GYD
+ HKD
+ HNL
+ HRD
+ HRK
+ HTG
+ HUF
+ IDR
+ IEP
+ ILP
+ ILR
+ ILS
+ INR
+ IQD
+ IRR
+ ISJ
+ ISK
+ ITL
+ JMD
+ JOD
+ JPY
+ KES
+ KGS
+ KHR
+ KMF
+ KPW
+ KRH
+ KRO
+ KRW
+ KWD
+ KYD
+ KZT
+ LAK
+ LBP
+ LKR
+ LRD
+ LSL
+ LTL
+ LTT
+ LUC
+ LUF
+ LUL
+ LVL
+ LVR
+ LYD
+ MAD
+ MAF
+ MCF
+ MDC
+ MDL
+ MGA
+ MGF
+ MKD
+ MKN
+ MLF
+ MMK
+ MNT
+ MOP
+ MRO
+ MRU
+ MTL
+ MTP
+ MUR
+ MVP
+ MVR
+ MWK
+ MXN
+ MXP
+ MXV
+ MYR
+ MZE
+ MZM
+ MZN
+ NAD
+ NGN
+ NIC
+ NIO
+ NLG
+ NOK
+ NPR
+ NZD
+ OMR
+ PAB
+ PEI
+ PEN
+ PES
+ PGK
+ PHP
+ PKR
+ PLN
+ PLZ
+ PTE
+ PYG
+ QAR
+ RHD
+ ROL
+ RON
+ RSD
+ RUB
+ RUR
+ RWF
+ SAR
+ SBD
+ SCR
+ SDD
+ SDG
+ SDP
+ SEK
+ SGD
+ SHP
+ SIT
+ SKK
+ SLL
+ SOS
+ SRD
+ SRG
+ SSP
+ STD
+ STN
+ SUR
+ SVC
+ SYP
+ SZL
+ THB
+ TJR
+ TJS
+ TMM
+ TMT
+ TND
+ TOP
+ TPE
+ TRL
+ TRY
+ TTD
+ TWD
+ TZS
+ UAH
+ UAK
+ UGS
+ UGX
+ USD
+ USN
+ USS
+ UYI
+ UYP
+ UYU
+ UYW
+ UZS
+ VEB
+ VEF
+ VES
+ VND
+ VNN
+ VUV
+ WST
+ XAF
+ XAG
+ XAU
+ XBA
+ XBB
+ XBC
+ XBD
+ XCD
+ XDR
+ XEU
+ XFO
+ XFU
+ XOF
+ XPD
+ XPF
+ XPT
+ XRE
+ XSU
+ XTS
+ XUA
+ XXX
+ YDD
+ YER
+ YUD
+ YUM
+ YUN
+ YUR
+ ZAL
+ ZAR
+ ZMK
+ ZMW
+ ZRN
+ ZRZ
+ ZWD
+ ZWL
+ ZWR
+)
diff --git a/vendor/github.com/go-playground/locales/logo.png b/vendor/github.com/go-playground/locales/logo.png
new file mode 100644
index 0000000..3038276
Binary files /dev/null and b/vendor/github.com/go-playground/locales/logo.png differ
diff --git a/vendor/github.com/go-playground/locales/rules.go b/vendor/github.com/go-playground/locales/rules.go
new file mode 100644
index 0000000..9202900
--- /dev/null
+++ b/vendor/github.com/go-playground/locales/rules.go
@@ -0,0 +1,293 @@
+package locales
+
+import (
+ "strconv"
+ "time"
+
+ "github.com/go-playground/locales/currency"
+)
+
+// // ErrBadNumberValue is returned when the number passed for
+// // plural rule determination cannot be parsed
+// type ErrBadNumberValue struct {
+// NumberValue string
+// InnerError error
+// }
+
+// // Error returns ErrBadNumberValue error string
+// func (e *ErrBadNumberValue) Error() string {
+// return fmt.Sprintf("Invalid Number Value '%s' %s", e.NumberValue, e.InnerError)
+// }
+
+// var _ error = new(ErrBadNumberValue)
+
+// PluralRule denotes the type of plural rules
+type PluralRule int
+
+// PluralRule's
+const (
+ PluralRuleUnknown PluralRule = iota
+ PluralRuleZero // zero
+ PluralRuleOne // one - singular
+ PluralRuleTwo // two - dual
+ PluralRuleFew // few - paucal
+ PluralRuleMany // many - also used for fractions if they have a separate class
+ PluralRuleOther // other - required—general plural form—also used if the language only has a single form
+)
+
+const (
+ pluralsString = "UnknownZeroOneTwoFewManyOther"
+)
+
+// Translator encapsulates an instance of a locale
+// NOTE: some values are returned as a []byte just in case the caller
+// wishes to add more and can help avoid allocations; otherwise just cast as string
+type Translator interface {
+
+ // The following Functions are for overriding, debugging or developing
+ // with a Translator Locale
+
+ // Locale returns the string value of the translator
+ Locale() string
+
+ // returns an array of cardinal plural rules associated
+ // with this translator
+ PluralsCardinal() []PluralRule
+
+ // returns an array of ordinal plural rules associated
+ // with this translator
+ PluralsOrdinal() []PluralRule
+
+ // returns an array of range plural rules associated
+ // with this translator
+ PluralsRange() []PluralRule
+
+ // returns the cardinal PluralRule given 'num' and digits/precision of 'v' for locale
+ CardinalPluralRule(num float64, v uint64) PluralRule
+
+ // returns the ordinal PluralRule given 'num' and digits/precision of 'v' for locale
+ OrdinalPluralRule(num float64, v uint64) PluralRule
+
+ // returns the ordinal PluralRule given 'num1', 'num2' and digits/precision of 'v1' and 'v2' for locale
+ RangePluralRule(num1 float64, v1 uint64, num2 float64, v2 uint64) PluralRule
+
+ // returns the locales abbreviated month given the 'month' provided
+ MonthAbbreviated(month time.Month) string
+
+ // returns the locales abbreviated months
+ MonthsAbbreviated() []string
+
+ // returns the locales narrow month given the 'month' provided
+ MonthNarrow(month time.Month) string
+
+ // returns the locales narrow months
+ MonthsNarrow() []string
+
+ // returns the locales wide month given the 'month' provided
+ MonthWide(month time.Month) string
+
+ // returns the locales wide months
+ MonthsWide() []string
+
+ // returns the locales abbreviated weekday given the 'weekday' provided
+ WeekdayAbbreviated(weekday time.Weekday) string
+
+ // returns the locales abbreviated weekdays
+ WeekdaysAbbreviated() []string
+
+ // returns the locales narrow weekday given the 'weekday' provided
+ WeekdayNarrow(weekday time.Weekday) string
+
+ // WeekdaysNarrowreturns the locales narrow weekdays
+ WeekdaysNarrow() []string
+
+ // returns the locales short weekday given the 'weekday' provided
+ WeekdayShort(weekday time.Weekday) string
+
+ // returns the locales short weekdays
+ WeekdaysShort() []string
+
+ // returns the locales wide weekday given the 'weekday' provided
+ WeekdayWide(weekday time.Weekday) string
+
+ // returns the locales wide weekdays
+ WeekdaysWide() []string
+
+ // The following Functions are common Formatting functionsfor the Translator's Locale
+
+ // returns 'num' with digits/precision of 'v' for locale and handles both Whole and Real numbers based on 'v'
+ FmtNumber(num float64, v uint64) string
+
+ // returns 'num' with digits/precision of 'v' for locale and handles both Whole and Real numbers based on 'v'
+ // NOTE: 'num' passed into FmtPercent is assumed to be in percent already
+ FmtPercent(num float64, v uint64) string
+
+ // returns the currency representation of 'num' with digits/precision of 'v' for locale
+ FmtCurrency(num float64, v uint64, currency currency.Type) string
+
+ // returns the currency representation of 'num' with digits/precision of 'v' for locale
+ // in accounting notation.
+ FmtAccounting(num float64, v uint64, currency currency.Type) string
+
+ // returns the short date representation of 't' for locale
+ FmtDateShort(t time.Time) string
+
+ // returns the medium date representation of 't' for locale
+ FmtDateMedium(t time.Time) string
+
+ // returns the long date representation of 't' for locale
+ FmtDateLong(t time.Time) string
+
+ // returns the full date representation of 't' for locale
+ FmtDateFull(t time.Time) string
+
+ // returns the short time representation of 't' for locale
+ FmtTimeShort(t time.Time) string
+
+ // returns the medium time representation of 't' for locale
+ FmtTimeMedium(t time.Time) string
+
+ // returns the long time representation of 't' for locale
+ FmtTimeLong(t time.Time) string
+
+ // returns the full time representation of 't' for locale
+ FmtTimeFull(t time.Time) string
+}
+
+// String returns the string value of PluralRule
+func (p PluralRule) String() string {
+
+ switch p {
+ case PluralRuleZero:
+ return pluralsString[7:11]
+ case PluralRuleOne:
+ return pluralsString[11:14]
+ case PluralRuleTwo:
+ return pluralsString[14:17]
+ case PluralRuleFew:
+ return pluralsString[17:20]
+ case PluralRuleMany:
+ return pluralsString[20:24]
+ case PluralRuleOther:
+ return pluralsString[24:]
+ default:
+ return pluralsString[:7]
+ }
+}
+
+//
+// Precision Notes:
+//
+// must specify a precision >= 0, and here is why https://play.golang.org/p/LyL90U0Vyh
+//
+// v := float64(3.141)
+// i := float64(int64(v))
+//
+// fmt.Println(v - i)
+//
+// or
+//
+// s := strconv.FormatFloat(v-i, 'f', -1, 64)
+// fmt.Println(s)
+//
+// these will not print what you'd expect: 0.14100000000000001
+// and so this library requires a precision to be specified, or
+// inaccurate plural rules could be applied.
+//
+//
+//
+// n - absolute value of the source number (integer and decimals).
+// i - integer digits of n.
+// v - number of visible fraction digits in n, with trailing zeros.
+// w - number of visible fraction digits in n, without trailing zeros.
+// f - visible fractional digits in n, with trailing zeros.
+// t - visible fractional digits in n, without trailing zeros.
+//
+//
+// Func(num float64, v uint64) // v = digits/precision and prevents -1 as a special case as this can lead to very unexpected behaviour, see precision note's above.
+//
+// n := math.Abs(num)
+// i := int64(n)
+// v := v
+//
+//
+// w := strconv.FormatFloat(num-float64(i), 'f', int(v), 64) // then parse backwards on string until no more zero's....
+// f := strconv.FormatFloat(n, 'f', int(v), 64) // then turn everything after decimal into an int64
+// t := strconv.FormatFloat(n, 'f', int(v), 64) // then parse backwards on string until no more zero's....
+//
+//
+//
+// General Inclusion Rules
+// - v will always be available inherently
+// - all require n
+// - w requires i
+//
+
+// W returns the number of visible fraction digits in N, without trailing zeros.
+func W(n float64, v uint64) (w int64) {
+
+ s := strconv.FormatFloat(n-float64(int64(n)), 'f', int(v), 64)
+
+ // with either be '0' or '0.xxxx', so if 1 then w will be zero
+ // otherwise need to parse
+ if len(s) != 1 {
+
+ s = s[2:]
+ end := len(s) + 1
+
+ for i := end; i >= 0; i-- {
+ if s[i] != '0' {
+ end = i + 1
+ break
+ }
+ }
+
+ w = int64(len(s[:end]))
+ }
+
+ return
+}
+
+// F returns the visible fractional digits in N, with trailing zeros.
+func F(n float64, v uint64) (f int64) {
+
+ s := strconv.FormatFloat(n-float64(int64(n)), 'f', int(v), 64)
+
+ // with either be '0' or '0.xxxx', so if 1 then f will be zero
+ // otherwise need to parse
+ if len(s) != 1 {
+
+ // ignoring error, because it can't fail as we generated
+ // the string internally from a real number
+ f, _ = strconv.ParseInt(s[2:], 10, 64)
+ }
+
+ return
+}
+
+// T returns the visible fractional digits in N, without trailing zeros.
+func T(n float64, v uint64) (t int64) {
+
+ s := strconv.FormatFloat(n-float64(int64(n)), 'f', int(v), 64)
+
+ // with either be '0' or '0.xxxx', so if 1 then t will be zero
+ // otherwise need to parse
+ if len(s) != 1 {
+
+ s = s[2:]
+ end := len(s) + 1
+
+ for i := end; i >= 0; i-- {
+ if s[i] != '0' {
+ end = i + 1
+ break
+ }
+ }
+
+ // ignoring error, because it can't fail as we generated
+ // the string internally from a real number
+ t, _ = strconv.ParseInt(s[:end], 10, 64)
+ }
+
+ return
+}
diff --git a/vendor/github.com/go-playground/universal-translator/.gitignore b/vendor/github.com/go-playground/universal-translator/.gitignore
new file mode 100644
index 0000000..bc4e07f
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/.gitignore
@@ -0,0 +1,25 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
+*.coverprofile
\ No newline at end of file
diff --git a/vendor/github.com/go-playground/universal-translator/.travis.yml b/vendor/github.com/go-playground/universal-translator/.travis.yml
new file mode 100644
index 0000000..39b8b92
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/.travis.yml
@@ -0,0 +1,27 @@
+language: go
+go:
+ - 1.13.4
+ - tip
+matrix:
+ allow_failures:
+ - go: tip
+
+notifications:
+ email:
+ recipients: dean.karn@gmail.com
+ on_success: change
+ on_failure: always
+
+before_install:
+ - go install github.com/mattn/goveralls
+
+# Only clone the most recent commit.
+git:
+ depth: 1
+
+script:
+ - go test -v -race -covermode=atomic -coverprofile=coverage.coverprofile ./...
+
+after_success: |
+ [ $TRAVIS_GO_VERSION = 1.13.4 ] &&
+ goveralls -coverprofile=coverage.coverprofile -service travis-ci -repotoken $COVERALLS_TOKEN
\ No newline at end of file
diff --git a/vendor/github.com/go-playground/universal-translator/LICENSE b/vendor/github.com/go-playground/universal-translator/LICENSE
new file mode 100644
index 0000000..8d8aba1
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Go Playground
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/go-playground/universal-translator/Makefile b/vendor/github.com/go-playground/universal-translator/Makefile
new file mode 100644
index 0000000..ec3455b
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/Makefile
@@ -0,0 +1,18 @@
+GOCMD=GO111MODULE=on go
+
+linters-install:
+ @golangci-lint --version >/dev/null 2>&1 || { \
+ echo "installing linting tools..."; \
+ curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s v1.41.1; \
+ }
+
+lint: linters-install
+ golangci-lint run
+
+test:
+ $(GOCMD) test -cover -race ./...
+
+bench:
+ $(GOCMD) test -bench=. -benchmem ./...
+
+.PHONY: test lint linters-install
\ No newline at end of file
diff --git a/vendor/github.com/go-playground/universal-translator/README.md b/vendor/github.com/go-playground/universal-translator/README.md
new file mode 100644
index 0000000..d9b6654
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/README.md
@@ -0,0 +1,87 @@
+## universal-translator
+ ![Project status](https://img.shields.io/badge/version-0.18.1-green.svg)
+[![Coverage Status](https://coveralls.io/repos/github/go-playground/universal-translator/badge.svg)](https://coveralls.io/github/go-playground/universal-translator)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/universal-translator)](https://goreportcard.com/report/github.com/go-playground/universal-translator)
+[![GoDoc](https://godoc.org/github.com/go-playground/universal-translator?status.svg)](https://godoc.org/github.com/go-playground/universal-translator)
+![License](https://img.shields.io/dub/l/vibe-d.svg)
+
+Universal Translator is an i18n Translator for Go/Golang using CLDR data + pluralization rules
+
+Why another i18n library?
+--------------------------
+Because none of the plural rules seem to be correct out there, including the previous implementation of this package,
+so I took it upon myself to create [locales](https://github.com/go-playground/locales) for everyone to use; this package
+is a thin wrapper around [locales](https://github.com/go-playground/locales) in order to store and translate text for
+use in your applications.
+
+Features
+--------
+- [x] Rules generated from the [CLDR](http://cldr.unicode.org/index/downloads) data, v36.0.1
+- [x] Contains Cardinal, Ordinal and Range Plural Rules
+- [x] Contains Month, Weekday and Timezone translations built in
+- [x] Contains Date & Time formatting functions
+- [x] Contains Number, Currency, Accounting and Percent formatting functions
+- [x] Supports the "Gregorian" calendar only ( my time isn't unlimited, had to draw the line somewhere )
+- [x] Support loading translations from files
+- [x] Exporting translations to file(s), mainly for getting them professionally translated
+- [ ] Code Generation for translation files -> Go code.. i.e. after it has been professionally translated
+- [ ] Tests for all languages, I need help with this, please see [here](https://github.com/go-playground/locales/issues/1)
+
+Installation
+-----------
+
+Use go get
+
+```shell
+go get github.com/go-playground/universal-translator
+```
+
+Usage & Documentation
+-------
+
+Please see https://godoc.org/github.com/go-playground/universal-translator for usage docs
+
+##### Examples:
+
+- [Basic](https://github.com/go-playground/universal-translator/tree/master/_examples/basic)
+- [Full - no files](https://github.com/go-playground/universal-translator/tree/master/_examples/full-no-files)
+- [Full - with files](https://github.com/go-playground/universal-translator/tree/master/_examples/full-with-files)
+
+File formatting
+--------------
+All types, Plain substitution, Cardinal, Ordinal and Range translations can all be contained within the same file(s);
+they are only separated for easy viewing.
+
+##### Examples:
+
+- [Formats](https://github.com/go-playground/universal-translator/tree/master/_examples/file-formats)
+
+##### Basic Makeup
+NOTE: not all fields are needed for all translation types, see [examples](https://github.com/go-playground/universal-translator/tree/master/_examples/file-formats)
+```json
+{
+ "locale": "en",
+ "key": "days-left",
+ "trans": "You have {0} day left.",
+ "type": "Cardinal",
+ "rule": "One",
+ "override": false
+}
+```
+|Field|Description|
+|---|---|
+|locale|The locale for which the translation is for.|
+|key|The translation key that will be used to store and lookup each translation; normally it is a string or integer.|
+|trans|The actual translation text.|
+|type|The type of translation Cardinal, Ordinal, Range or "" for a plain substitution(not required to be defined if plain used)|
+|rule|The plural rule for which the translation is for eg. One, Two, Few, Many or Other.(not required to be defined if plain used)|
+|override|If you wish to override an existing translation that has already been registered, set this to 'true'. 99% of the time there is no need to define it.|
+
+Help With Tests
+---------------
+To anyone interesting in helping or contributing, I sure could use some help creating tests for each language.
+Please see issue [here](https://github.com/go-playground/locales/issues/1) for details.
+
+License
+------
+Distributed under MIT License, please see license file in code for more details.
diff --git a/vendor/github.com/go-playground/universal-translator/errors.go b/vendor/github.com/go-playground/universal-translator/errors.go
new file mode 100644
index 0000000..38b163b
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/errors.go
@@ -0,0 +1,148 @@
+package ut
+
+import (
+ "errors"
+ "fmt"
+
+ "github.com/go-playground/locales"
+)
+
+var (
+ // ErrUnknowTranslation indicates the translation could not be found
+ ErrUnknowTranslation = errors.New("Unknown Translation")
+)
+
+var _ error = new(ErrConflictingTranslation)
+var _ error = new(ErrRangeTranslation)
+var _ error = new(ErrOrdinalTranslation)
+var _ error = new(ErrCardinalTranslation)
+var _ error = new(ErrMissingPluralTranslation)
+var _ error = new(ErrExistingTranslator)
+
+// ErrExistingTranslator is the error representing a conflicting translator
+type ErrExistingTranslator struct {
+ locale string
+}
+
+// Error returns ErrExistingTranslator's internal error text
+func (e *ErrExistingTranslator) Error() string {
+ return fmt.Sprintf("error: conflicting translator for locale '%s'", e.locale)
+}
+
+// ErrConflictingTranslation is the error representing a conflicting translation
+type ErrConflictingTranslation struct {
+ locale string
+ key interface{}
+ rule locales.PluralRule
+ text string
+}
+
+// Error returns ErrConflictingTranslation's internal error text
+func (e *ErrConflictingTranslation) Error() string {
+
+ if _, ok := e.key.(string); !ok {
+ return fmt.Sprintf("error: conflicting key '%#v' rule '%s' with text '%s' for locale '%s', value being ignored", e.key, e.rule, e.text, e.locale)
+ }
+
+ return fmt.Sprintf("error: conflicting key '%s' rule '%s' with text '%s' for locale '%s', value being ignored", e.key, e.rule, e.text, e.locale)
+}
+
+// ErrRangeTranslation is the error representing a range translation error
+type ErrRangeTranslation struct {
+ text string
+}
+
+// Error returns ErrRangeTranslation's internal error text
+func (e *ErrRangeTranslation) Error() string {
+ return e.text
+}
+
+// ErrOrdinalTranslation is the error representing an ordinal translation error
+type ErrOrdinalTranslation struct {
+ text string
+}
+
+// Error returns ErrOrdinalTranslation's internal error text
+func (e *ErrOrdinalTranslation) Error() string {
+ return e.text
+}
+
+// ErrCardinalTranslation is the error representing a cardinal translation error
+type ErrCardinalTranslation struct {
+ text string
+}
+
+// Error returns ErrCardinalTranslation's internal error text
+func (e *ErrCardinalTranslation) Error() string {
+ return e.text
+}
+
+// ErrMissingPluralTranslation is the error signifying a missing translation given
+// the locales plural rules.
+type ErrMissingPluralTranslation struct {
+ locale string
+ key interface{}
+ rule locales.PluralRule
+ translationType string
+}
+
+// Error returns ErrMissingPluralTranslation's internal error text
+func (e *ErrMissingPluralTranslation) Error() string {
+
+ if _, ok := e.key.(string); !ok {
+ return fmt.Sprintf("error: missing '%s' plural rule '%s' for translation with key '%#v' and locale '%s'", e.translationType, e.rule, e.key, e.locale)
+ }
+
+ return fmt.Sprintf("error: missing '%s' plural rule '%s' for translation with key '%s' and locale '%s'", e.translationType, e.rule, e.key, e.locale)
+}
+
+// ErrMissingBracket is the error representing a missing bracket in a translation
+// eg. This is a {0 <-- missing ending '}'
+type ErrMissingBracket struct {
+ locale string
+ key interface{}
+ text string
+}
+
+// Error returns ErrMissingBracket error message
+func (e *ErrMissingBracket) Error() string {
+ return fmt.Sprintf("error: missing bracket '{}', in translation. locale: '%s' key: '%v' text: '%s'", e.locale, e.key, e.text)
+}
+
+// ErrBadParamSyntax is the error representing a bad parameter definition in a translation
+// eg. This is a {must-be-int}
+type ErrBadParamSyntax struct {
+ locale string
+ param string
+ key interface{}
+ text string
+}
+
+// Error returns ErrBadParamSyntax error message
+func (e *ErrBadParamSyntax) Error() string {
+ return fmt.Sprintf("error: bad parameter syntax, missing parameter '%s' in translation. locale: '%s' key: '%v' text: '%s'", e.param, e.locale, e.key, e.text)
+}
+
+// import/export errors
+
+// ErrMissingLocale is the error representing an expected locale that could
+// not be found aka locale not registered with the UniversalTranslator Instance
+type ErrMissingLocale struct {
+ locale string
+}
+
+// Error returns ErrMissingLocale's internal error text
+func (e *ErrMissingLocale) Error() string {
+ return fmt.Sprintf("error: locale '%s' not registered.", e.locale)
+}
+
+// ErrBadPluralDefinition is the error representing an incorrect plural definition
+// usually found within translations defined within files during the import process.
+type ErrBadPluralDefinition struct {
+ tl translation
+}
+
+// Error returns ErrBadPluralDefinition's internal error text
+func (e *ErrBadPluralDefinition) Error() string {
+ return fmt.Sprintf("error: bad plural definition '%#v'", e.tl)
+}
diff --git a/vendor/github.com/go-playground/universal-translator/import_export.go b/vendor/github.com/go-playground/universal-translator/import_export.go
new file mode 100644
index 0000000..87a1b46
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/import_export.go
@@ -0,0 +1,274 @@
+package ut
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "io"
+
+ "github.com/go-playground/locales"
+)
+
+type translation struct {
+ Locale string `json:"locale"`
+ Key interface{} `json:"key"` // either string or integer
+ Translation string `json:"trans"`
+ PluralType string `json:"type,omitempty"`
+ PluralRule string `json:"rule,omitempty"`
+ OverrideExisting bool `json:"override,omitempty"`
+}
+
+const (
+ cardinalType = "Cardinal"
+ ordinalType = "Ordinal"
+ rangeType = "Range"
+)
+
+// ImportExportFormat is the format of the file import or export
+type ImportExportFormat uint8
+
+// supported Export Formats
+const (
+ FormatJSON ImportExportFormat = iota
+)
+
+// Export writes the translations out to a file on disk.
+//
+// NOTE: this currently only works with string or int translations keys.
+func (t *UniversalTranslator) Export(format ImportExportFormat, dirname string) error {
+
+ _, err := os.Stat(dirname)
+ if err != nil {
+
+ if !os.IsNotExist(err) {
+ return err
+ }
+
+ if err = os.MkdirAll(dirname, 0744); err != nil {
+ return err
+ }
+ }
+
+ // build up translations
+ var trans []translation
+ var b []byte
+ var ext string
+
+ for _, locale := range t.translators {
+
+ for k, v := range locale.(*translator).translations {
+ trans = append(trans, translation{
+ Locale: locale.Locale(),
+ Key: k,
+ Translation: v.text,
+ })
+ }
+
+ for k, pluralTrans := range locale.(*translator).cardinalTanslations {
+
+ for i, plural := range pluralTrans {
+
+ // leave enough for all plural rules
+ // but not all are set for all languages.
+ if plural == nil {
+ continue
+ }
+
+ trans = append(trans, translation{
+ Locale: locale.Locale(),
+ Key: k.(string),
+ Translation: plural.text,
+ PluralType: cardinalType,
+ PluralRule: locales.PluralRule(i).String(),
+ })
+ }
+ }
+
+ for k, pluralTrans := range locale.(*translator).ordinalTanslations {
+
+ for i, plural := range pluralTrans {
+
+ // leave enough for all plural rules
+ // but not all are set for all languages.
+ if plural == nil {
+ continue
+ }
+
+ trans = append(trans, translation{
+ Locale: locale.Locale(),
+ Key: k.(string),
+ Translation: plural.text,
+ PluralType: ordinalType,
+ PluralRule: locales.PluralRule(i).String(),
+ })
+ }
+ }
+
+ for k, pluralTrans := range locale.(*translator).rangeTanslations {
+
+ for i, plural := range pluralTrans {
+
+ // leave enough for all plural rules
+ // but not all are set for all languages.
+ if plural == nil {
+ continue
+ }
+
+ trans = append(trans, translation{
+ Locale: locale.Locale(),
+ Key: k.(string),
+ Translation: plural.text,
+ PluralType: rangeType,
+ PluralRule: locales.PluralRule(i).String(),
+ })
+ }
+ }
+
+ switch format {
+ case FormatJSON:
+ b, err = json.MarshalIndent(trans, "", " ")
+ ext = ".json"
+ }
+
+ if err != nil {
+ return err
+ }
+
+ err = os.WriteFile(filepath.Join(dirname, fmt.Sprintf("%s%s", locale.Locale(), ext)), b, 0644)
+ if err != nil {
+ return err
+ }
+
+ trans = trans[0:0]
+ }
+
+ return nil
+}
+
+// Import reads the translations out of a file or directory on disk.
+//
+// NOTE: this currently only works with string or int translations keys.
+func (t *UniversalTranslator) Import(format ImportExportFormat, dirnameOrFilename string) error {
+
+ fi, err := os.Stat(dirnameOrFilename)
+ if err != nil {
+ return err
+ }
+
+ processFn := func(filename string) error {
+
+ f, err := os.Open(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ return t.ImportByReader(format, f)
+ }
+
+ if !fi.IsDir() {
+ return processFn(dirnameOrFilename)
+ }
+
+ // recursively go through directory
+ walker := func(path string, info os.FileInfo, err error) error {
+
+ if info.IsDir() {
+ return nil
+ }
+
+ switch format {
+ case FormatJSON:
+ // skip non JSON files
+ if filepath.Ext(info.Name()) != ".json" {
+ return nil
+ }
+ }
+
+ return processFn(path)
+ }
+
+ return filepath.Walk(dirnameOrFilename, walker)
+}
+
+// ImportByReader imports the the translations found within the contents read from the supplied reader.
+//
+// NOTE: generally used when assets have been embedded into the binary and are already in memory.
+func (t *UniversalTranslator) ImportByReader(format ImportExportFormat, reader io.Reader) error {
+
+ b, err := io.ReadAll(reader)
+ if err != nil {
+ return err
+ }
+
+ var trans []translation
+
+ switch format {
+ case FormatJSON:
+ err = json.Unmarshal(b, &trans)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ for _, tl := range trans {
+
+ locale, found := t.FindTranslator(tl.Locale)
+ if !found {
+ return &ErrMissingLocale{locale: tl.Locale}
+ }
+
+ pr := stringToPR(tl.PluralRule)
+
+ if pr == locales.PluralRuleUnknown {
+
+ err = locale.Add(tl.Key, tl.Translation, tl.OverrideExisting)
+ if err != nil {
+ return err
+ }
+
+ continue
+ }
+
+ switch tl.PluralType {
+ case cardinalType:
+ err = locale.AddCardinal(tl.Key, tl.Translation, pr, tl.OverrideExisting)
+ case ordinalType:
+ err = locale.AddOrdinal(tl.Key, tl.Translation, pr, tl.OverrideExisting)
+ case rangeType:
+ err = locale.AddRange(tl.Key, tl.Translation, pr, tl.OverrideExisting)
+ default:
+ return &ErrBadPluralDefinition{tl: tl}
+ }
+
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func stringToPR(s string) locales.PluralRule {
+
+ switch s {
+ case "Zero":
+ return locales.PluralRuleZero
+ case "One":
+ return locales.PluralRuleOne
+ case "Two":
+ return locales.PluralRuleTwo
+ case "Few":
+ return locales.PluralRuleFew
+ case "Many":
+ return locales.PluralRuleMany
+ case "Other":
+ return locales.PluralRuleOther
+ default:
+ return locales.PluralRuleUnknown
+ }
+
+}
diff --git a/vendor/github.com/go-playground/universal-translator/logo.png b/vendor/github.com/go-playground/universal-translator/logo.png
new file mode 100644
index 0000000..a37aa8c
Binary files /dev/null and b/vendor/github.com/go-playground/universal-translator/logo.png differ
diff --git a/vendor/github.com/go-playground/universal-translator/translator.go b/vendor/github.com/go-playground/universal-translator/translator.go
new file mode 100644
index 0000000..24b18db
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/translator.go
@@ -0,0 +1,420 @@
+package ut
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "github.com/go-playground/locales"
+)
+
+const (
+ paramZero = "{0}"
+ paramOne = "{1}"
+ unknownTranslation = ""
+)
+
+// Translator is universal translators
+// translator instance which is a thin wrapper
+// around locales.Translator instance providing
+// some extra functionality
+type Translator interface {
+ locales.Translator
+
+ // adds a normal translation for a particular language/locale
+ // {#} is the only replacement type accepted and are ad infinitum
+ // eg. one: '{0} day left' other: '{0} days left'
+ Add(key interface{}, text string, override bool) error
+
+ // adds a cardinal plural translation for a particular language/locale
+ // {0} is the only replacement type accepted and only one variable is accepted as
+ // multiple cannot be used for a plural rule determination, unless it is a range;
+ // see AddRange below.
+ // eg. in locale 'en' one: '{0} day left' other: '{0} days left'
+ AddCardinal(key interface{}, text string, rule locales.PluralRule, override bool) error
+
+ // adds an ordinal plural translation for a particular language/locale
+ // {0} is the only replacement type accepted and only one variable is accepted as
+ // multiple cannot be used for a plural rule determination, unless it is a range;
+ // see AddRange below.
+ // eg. in locale 'en' one: '{0}st day of spring' other: '{0}nd day of spring'
+ // - 1st, 2nd, 3rd...
+ AddOrdinal(key interface{}, text string, rule locales.PluralRule, override bool) error
+
+ // adds a range plural translation for a particular language/locale
+ // {0} and {1} are the only replacement types accepted and only these are accepted.
+ // eg. in locale 'nl' one: '{0}-{1} day left' other: '{0}-{1} days left'
+ AddRange(key interface{}, text string, rule locales.PluralRule, override bool) error
+
+ // creates the translation for the locale given the 'key' and params passed in
+ T(key interface{}, params ...string) (string, error)
+
+ // creates the cardinal translation for the locale given the 'key', 'num' and 'digit' arguments
+ // and param passed in
+ C(key interface{}, num float64, digits uint64, param string) (string, error)
+
+ // creates the ordinal translation for the locale given the 'key', 'num' and 'digit' arguments
+ // and param passed in
+ O(key interface{}, num float64, digits uint64, param string) (string, error)
+
+ // creates the range translation for the locale given the 'key', 'num1', 'digit1', 'num2' and
+ // 'digit2' arguments and 'param1' and 'param2' passed in
+ R(key interface{}, num1 float64, digits1 uint64, num2 float64, digits2 uint64, param1, param2 string) (string, error)
+
+ // VerifyTranslations checks to ensures that no plural rules have been
+ // missed within the translations.
+ VerifyTranslations() error
+}
+
+var _ Translator = new(translator)
+var _ locales.Translator = new(translator)
+
+type translator struct {
+ locales.Translator
+ translations map[interface{}]*transText
+ cardinalTanslations map[interface{}][]*transText // array index is mapped to locales.PluralRule index + the locales.PluralRuleUnknown
+ ordinalTanslations map[interface{}][]*transText
+ rangeTanslations map[interface{}][]*transText
+}
+
+type transText struct {
+ text string
+ indexes []int
+}
+
+func newTranslator(trans locales.Translator) Translator {
+ return &translator{
+ Translator: trans,
+ translations: make(map[interface{}]*transText), // translation text broken up by byte index
+ cardinalTanslations: make(map[interface{}][]*transText),
+ ordinalTanslations: make(map[interface{}][]*transText),
+ rangeTanslations: make(map[interface{}][]*transText),
+ }
+}
+
+// Add adds a normal translation for a particular language/locale
+// {#} is the only replacement type accepted and are ad infinitum
+// eg. one: '{0} day left' other: '{0} days left'
+func (t *translator) Add(key interface{}, text string, override bool) error {
+
+ if _, ok := t.translations[key]; ok && !override {
+ return &ErrConflictingTranslation{locale: t.Locale(), key: key, text: text}
+ }
+
+ lb := strings.Count(text, "{")
+ rb := strings.Count(text, "}")
+
+ if lb != rb {
+ return &ErrMissingBracket{locale: t.Locale(), key: key, text: text}
+ }
+
+ trans := &transText{
+ text: text,
+ }
+
+ var idx int
+
+ for i := 0; i < lb; i++ {
+ s := "{" + strconv.Itoa(i) + "}"
+ idx = strings.Index(text, s)
+ if idx == -1 {
+ return &ErrBadParamSyntax{locale: t.Locale(), param: s, key: key, text: text}
+ }
+
+ trans.indexes = append(trans.indexes, idx)
+ trans.indexes = append(trans.indexes, idx+len(s))
+ }
+
+ t.translations[key] = trans
+
+ return nil
+}
+
+// AddCardinal adds a cardinal plural translation for a particular language/locale
+// {0} is the only replacement type accepted and only one variable is accepted as
+// multiple cannot be used for a plural rule determination, unless it is a range;
+// see AddRange below.
+// eg. in locale 'en' one: '{0} day left' other: '{0} days left'
+func (t *translator) AddCardinal(key interface{}, text string, rule locales.PluralRule, override bool) error {
+
+ var verified bool
+
+ // verify plural rule exists for locale
+ for _, pr := range t.PluralsCardinal() {
+ if pr == rule {
+ verified = true
+ break
+ }
+ }
+
+ if !verified {
+ return &ErrCardinalTranslation{text: fmt.Sprintf("error: cardinal plural rule '%s' does not exist for locale '%s' key: '%v' text: '%s'", rule, t.Locale(), key, text)}
+ }
+
+ tarr, ok := t.cardinalTanslations[key]
+ if ok {
+ // verify not adding a conflicting record
+ if len(tarr) > 0 && tarr[rule] != nil && !override {
+ return &ErrConflictingTranslation{locale: t.Locale(), key: key, rule: rule, text: text}
+ }
+
+ } else {
+ tarr = make([]*transText, 7)
+ t.cardinalTanslations[key] = tarr
+ }
+
+ trans := &transText{
+ text: text,
+ indexes: make([]int, 2),
+ }
+
+ tarr[rule] = trans
+
+ idx := strings.Index(text, paramZero)
+ if idx == -1 {
+ tarr[rule] = nil
+ return &ErrCardinalTranslation{text: fmt.Sprintf("error: parameter '%s' not found, may want to use 'Add' instead of 'AddCardinal'. locale: '%s' key: '%v' text: '%s'", paramZero, t.Locale(), key, text)}
+ }
+
+ trans.indexes[0] = idx
+ trans.indexes[1] = idx + len(paramZero)
+
+ return nil
+}
+
+// AddOrdinal adds an ordinal plural translation for a particular language/locale
+// {0} is the only replacement type accepted and only one variable is accepted as
+// multiple cannot be used for a plural rule determination, unless it is a range;
+// see AddRange below.
+// eg. in locale 'en' one: '{0}st day of spring' other: '{0}nd day of spring' - 1st, 2nd, 3rd...
+func (t *translator) AddOrdinal(key interface{}, text string, rule locales.PluralRule, override bool) error {
+
+ var verified bool
+
+ // verify plural rule exists for locale
+ for _, pr := range t.PluralsOrdinal() {
+ if pr == rule {
+ verified = true
+ break
+ }
+ }
+
+ if !verified {
+ return &ErrOrdinalTranslation{text: fmt.Sprintf("error: ordinal plural rule '%s' does not exist for locale '%s' key: '%v' text: '%s'", rule, t.Locale(), key, text)}
+ }
+
+ tarr, ok := t.ordinalTanslations[key]
+ if ok {
+ // verify not adding a conflicting record
+ if len(tarr) > 0 && tarr[rule] != nil && !override {
+ return &ErrConflictingTranslation{locale: t.Locale(), key: key, rule: rule, text: text}
+ }
+
+ } else {
+ tarr = make([]*transText, 7)
+ t.ordinalTanslations[key] = tarr
+ }
+
+ trans := &transText{
+ text: text,
+ indexes: make([]int, 2),
+ }
+
+ tarr[rule] = trans
+
+ idx := strings.Index(text, paramZero)
+ if idx == -1 {
+ tarr[rule] = nil
+ return &ErrOrdinalTranslation{text: fmt.Sprintf("error: parameter '%s' not found, may want to use 'Add' instead of 'AddOrdinal'. locale: '%s' key: '%v' text: '%s'", paramZero, t.Locale(), key, text)}
+ }
+
+ trans.indexes[0] = idx
+ trans.indexes[1] = idx + len(paramZero)
+
+ return nil
+}
+
+// AddRange adds a range plural translation for a particular language/locale
+// {0} and {1} are the only replacement types accepted and only these are accepted.
+// eg. in locale 'nl' one: '{0}-{1} day left' other: '{0}-{1} days left'
+func (t *translator) AddRange(key interface{}, text string, rule locales.PluralRule, override bool) error {
+
+ var verified bool
+
+ // verify plural rule exists for locale
+ for _, pr := range t.PluralsRange() {
+ if pr == rule {
+ verified = true
+ break
+ }
+ }
+
+ if !verified {
+ return &ErrRangeTranslation{text: fmt.Sprintf("error: range plural rule '%s' does not exist for locale '%s' key: '%v' text: '%s'", rule, t.Locale(), key, text)}
+ }
+
+ tarr, ok := t.rangeTanslations[key]
+ if ok {
+ // verify not adding a conflicting record
+ if len(tarr) > 0 && tarr[rule] != nil && !override {
+ return &ErrConflictingTranslation{locale: t.Locale(), key: key, rule: rule, text: text}
+ }
+
+ } else {
+ tarr = make([]*transText, 7)
+ t.rangeTanslations[key] = tarr
+ }
+
+ trans := &transText{
+ text: text,
+ indexes: make([]int, 4),
+ }
+
+ tarr[rule] = trans
+
+ idx := strings.Index(text, paramZero)
+ if idx == -1 {
+ tarr[rule] = nil
+ return &ErrRangeTranslation{text: fmt.Sprintf("error: parameter '%s' not found, are you sure you're adding a Range Translation? locale: '%s' key: '%v' text: '%s'", paramZero, t.Locale(), key, text)}
+ }
+
+ trans.indexes[0] = idx
+ trans.indexes[1] = idx + len(paramZero)
+
+ idx = strings.Index(text, paramOne)
+ if idx == -1 {
+ tarr[rule] = nil
+ return &ErrRangeTranslation{text: fmt.Sprintf("error: parameter '%s' not found, a Range Translation requires two parameters. locale: '%s' key: '%v' text: '%s'", paramOne, t.Locale(), key, text)}
+ }
+
+ trans.indexes[2] = idx
+ trans.indexes[3] = idx + len(paramOne)
+
+ return nil
+}
+
+// T creates the translation for the locale given the 'key' and params passed in
+func (t *translator) T(key interface{}, params ...string) (string, error) {
+
+ trans, ok := t.translations[key]
+ if !ok {
+ return unknownTranslation, ErrUnknowTranslation
+ }
+
+ b := make([]byte, 0, 64)
+
+ var start, end, count int
+
+ for i := 0; i < len(trans.indexes); i++ {
+ end = trans.indexes[i]
+ b = append(b, trans.text[start:end]...)
+ b = append(b, params[count]...)
+ i++
+ start = trans.indexes[i]
+ count++
+ }
+
+ b = append(b, trans.text[start:]...)
+
+ return string(b), nil
+}
+
+// C creates the cardinal translation for the locale given the 'key', 'num' and 'digit' arguments and param passed in
+func (t *translator) C(key interface{}, num float64, digits uint64, param string) (string, error) {
+
+ tarr, ok := t.cardinalTanslations[key]
+ if !ok {
+ return unknownTranslation, ErrUnknowTranslation
+ }
+
+ rule := t.CardinalPluralRule(num, digits)
+
+ trans := tarr[rule]
+
+ b := make([]byte, 0, 64)
+ b = append(b, trans.text[:trans.indexes[0]]...)
+ b = append(b, param...)
+ b = append(b, trans.text[trans.indexes[1]:]...)
+
+ return string(b), nil
+}
+
+// O creates the ordinal translation for the locale given the 'key', 'num' and 'digit' arguments and param passed in
+func (t *translator) O(key interface{}, num float64, digits uint64, param string) (string, error) {
+
+ tarr, ok := t.ordinalTanslations[key]
+ if !ok {
+ return unknownTranslation, ErrUnknowTranslation
+ }
+
+ rule := t.OrdinalPluralRule(num, digits)
+
+ trans := tarr[rule]
+
+ b := make([]byte, 0, 64)
+ b = append(b, trans.text[:trans.indexes[0]]...)
+ b = append(b, param...)
+ b = append(b, trans.text[trans.indexes[1]:]...)
+
+ return string(b), nil
+}
+
+// R creates the range translation for the locale given the 'key', 'num1', 'digit1', 'num2' and 'digit2' arguments
+// and 'param1' and 'param2' passed in
+func (t *translator) R(key interface{}, num1 float64, digits1 uint64, num2 float64, digits2 uint64, param1, param2 string) (string, error) {
+
+ tarr, ok := t.rangeTanslations[key]
+ if !ok {
+ return unknownTranslation, ErrUnknowTranslation
+ }
+
+ rule := t.RangePluralRule(num1, digits1, num2, digits2)
+
+ trans := tarr[rule]
+
+ b := make([]byte, 0, 64)
+ b = append(b, trans.text[:trans.indexes[0]]...)
+ b = append(b, param1...)
+ b = append(b, trans.text[trans.indexes[1]:trans.indexes[2]]...)
+ b = append(b, param2...)
+ b = append(b, trans.text[trans.indexes[3]:]...)
+
+ return string(b), nil
+}
+
+// VerifyTranslations checks to ensures that no plural rules have been
+// missed within the translations.
+func (t *translator) VerifyTranslations() error {
+
+ for k, v := range t.cardinalTanslations {
+
+ for _, rule := range t.PluralsCardinal() {
+
+ if v[rule] == nil {
+ return &ErrMissingPluralTranslation{locale: t.Locale(), translationType: "plural", rule: rule, key: k}
+ }
+ }
+ }
+
+ for k, v := range t.ordinalTanslations {
+
+ for _, rule := range t.PluralsOrdinal() {
+
+ if v[rule] == nil {
+ return &ErrMissingPluralTranslation{locale: t.Locale(), translationType: "ordinal", rule: rule, key: k}
+ }
+ }
+ }
+
+ for k, v := range t.rangeTanslations {
+
+ for _, rule := range t.PluralsRange() {
+
+ if v[rule] == nil {
+ return &ErrMissingPluralTranslation{locale: t.Locale(), translationType: "range", rule: rule, key: k}
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-playground/universal-translator/universal_translator.go b/vendor/github.com/go-playground/universal-translator/universal_translator.go
new file mode 100644
index 0000000..dbf707f
--- /dev/null
+++ b/vendor/github.com/go-playground/universal-translator/universal_translator.go
@@ -0,0 +1,113 @@
+package ut
+
+import (
+ "strings"
+
+ "github.com/go-playground/locales"
+)
+
+// UniversalTranslator holds all locale & translation data
+type UniversalTranslator struct {
+ translators map[string]Translator
+ fallback Translator
+}
+
+// New returns a new UniversalTranslator instance set with
+// the fallback locale and locales it should support
+func New(fallback locales.Translator, supportedLocales ...locales.Translator) *UniversalTranslator {
+
+ t := &UniversalTranslator{
+ translators: make(map[string]Translator),
+ }
+
+ for _, v := range supportedLocales {
+
+ trans := newTranslator(v)
+ t.translators[strings.ToLower(trans.Locale())] = trans
+
+ if fallback.Locale() == v.Locale() {
+ t.fallback = trans
+ }
+ }
+
+ if t.fallback == nil && fallback != nil {
+ t.fallback = newTranslator(fallback)
+ }
+
+ return t
+}
+
+// FindTranslator trys to find a Translator based on an array of locales
+// and returns the first one it can find, otherwise returns the
+// fallback translator.
+func (t *UniversalTranslator) FindTranslator(locales ...string) (trans Translator, found bool) {
+
+ for _, locale := range locales {
+
+ if trans, found = t.translators[strings.ToLower(locale)]; found {
+ return
+ }
+ }
+
+ return t.fallback, false
+}
+
+// GetTranslator returns the specified translator for the given locale,
+// or fallback if not found
+func (t *UniversalTranslator) GetTranslator(locale string) (trans Translator, found bool) {
+
+ if trans, found = t.translators[strings.ToLower(locale)]; found {
+ return
+ }
+
+ return t.fallback, false
+}
+
+// GetFallback returns the fallback locale
+func (t *UniversalTranslator) GetFallback() Translator {
+ return t.fallback
+}
+
+// AddTranslator adds the supplied translator, if it already exists the override param
+// will be checked and if false an error will be returned, otherwise the translator will be
+// overridden; if the fallback matches the supplied translator it will be overridden as well
+// NOTE: this is normally only used when translator is embedded within a library
+func (t *UniversalTranslator) AddTranslator(translator locales.Translator, override bool) error {
+
+ lc := strings.ToLower(translator.Locale())
+ _, ok := t.translators[lc]
+ if ok && !override {
+ return &ErrExistingTranslator{locale: translator.Locale()}
+ }
+
+ trans := newTranslator(translator)
+
+ if t.fallback.Locale() == translator.Locale() {
+
+ // because it's optional to have a fallback, I don't impose that limitation
+ // don't know why you wouldn't but...
+ if !override {
+ return &ErrExistingTranslator{locale: translator.Locale()}
+ }
+
+ t.fallback = trans
+ }
+
+ t.translators[lc] = trans
+
+ return nil
+}
+
+// VerifyTranslations runs through all locales and identifies any issues
+// eg. missing plural rules for a locale
+func (t *UniversalTranslator) VerifyTranslations() (err error) {
+
+ for _, trans := range t.translators {
+ err = trans.VerifyTranslations()
+ if err != nil {
+ return
+ }
+ }
+
+ return
+}
diff --git a/vendor/github.com/go-playground/validator/.gitignore b/vendor/github.com/go-playground/validator/.gitignore
new file mode 100644
index 0000000..792ca00
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/.gitignore
@@ -0,0 +1,29 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
+*.test
+*.out
+*.txt
+cover.html
+README.html
\ No newline at end of file
diff --git a/vendor/github.com/go-playground/validator/LICENSE b/vendor/github.com/go-playground/validator/LICENSE
new file mode 100644
index 0000000..6a2ae9a
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Dean Karn
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/vendor/github.com/go-playground/validator/Makefile b/vendor/github.com/go-playground/validator/Makefile
new file mode 100644
index 0000000..b912cae
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/Makefile
@@ -0,0 +1,18 @@
+GOCMD=go
+
+linters-install:
+ @golangci-lint --version >/dev/null 2>&1 || { \
+ echo "installing linting tools..."; \
+ curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s v1.19.1; \
+ }
+
+lint: linters-install
+ golangci-lint run
+
+test:
+ $(GOCMD) test -cover -race ./...
+
+bench:
+ $(GOCMD) test -bench=. -benchmem ./...
+
+.PHONY: test lint linters-install
\ No newline at end of file
diff --git a/vendor/github.com/go-playground/validator/README.md b/vendor/github.com/go-playground/validator/README.md
new file mode 100644
index 0000000..b55f3ec
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/README.md
@@ -0,0 +1,155 @@
+**NOTICE:** v9 has entered maintenance status as of 2019-12-24. Please make all new functionality PR's against master.
+
+Package validator
+================
+ [![Join the chat at https://gitter.im/go-playground/validator](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/go-playground/validator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+![Project status](https://img.shields.io/badge/version-9.31.0-green.svg)
+[![Build Status](https://semaphoreci.com/api/v1/joeybloggs/validator/branches/v9/badge.svg)](https://semaphoreci.com/joeybloggs/validator)
+[![Coverage Status](https://coveralls.io/repos/go-playground/validator/badge.svg?branch=v9&service=github)](https://coveralls.io/github/go-playground/validator?branch=v9)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/validator)](https://goreportcard.com/report/github.com/go-playground/validator)
+[![GoDoc](https://godoc.org/gopkg.in/go-playground/validator.v9?status.svg)](https://godoc.org/gopkg.in/go-playground/validator.v9)
+![License](https://img.shields.io/dub/l/vibe-d.svg)
+
+Package validator implements value validations for structs and individual fields based on tags.
+
+It has the following **unique** features:
+
+- Cross Field and Cross Struct validations by using validation tags or custom validators.
+- Slice, Array and Map diving, which allows any or all levels of a multidimensional field to be validated.
+- Ability to dive into both map keys and values for validation
+- Handles type interface by determining it's underlying type prior to validation.
+- Handles custom field types such as sql driver Valuer see [Valuer](https://golang.org/src/database/sql/driver/types.go?s=1210:1293#L29)
+- Alias validation tags, which allows for mapping of several validations to a single tag for easier defining of validations on structs
+- Extraction of custom defined Field Name e.g. can specify to extract the JSON name while validating and have it available in the resulting FieldError
+- Customizable i18n aware error messages.
+- Default validator for the [gin](https://github.com/gin-gonic/gin) web framework; upgrading from v8 to v9 in gin see [here](https://github.com/go-playground/validator/tree/v9/_examples/gin-upgrading-overriding)
+
+Installation
+------------
+
+Use go get.
+
+ go get gopkg.in/go-playground/validator.v9
+
+Then import the validator package into your own code.
+
+ import "gopkg.in/go-playground/validator.v9"
+
+Error Return Value
+-------
+
+Validation functions return type error
+
+They return type error to avoid the issue discussed in the following, where err is always != nil:
+
+* http://stackoverflow.com/a/29138676/3158232
+* https://github.com/go-playground/validator/issues/134
+
+Validator only InvalidValidationError for bad validation input, nil or ValidationErrors as type error; so, in your code all you need to do is check if the error returned is not nil, and if it's not check if error is InvalidValidationError ( if necessary, most of the time it isn't ) type cast it to type ValidationErrors like so:
+
+```go
+err := validate.Struct(mystruct)
+validationErrors := err.(validator.ValidationErrors)
+ ```
+
+Usage and documentation
+------
+
+Please see http://godoc.org/gopkg.in/go-playground/validator.v9 for detailed usage docs.
+
+##### Examples:
+
+- [Simple](https://github.com/go-playground/validator/blob/v9/_examples/simple/main.go)
+- [Custom Field Types](https://github.com/go-playground/validator/blob/v9/_examples/custom/main.go)
+- [Struct Level](https://github.com/go-playground/validator/blob/v9/_examples/struct-level/main.go)
+- [Translations & Custom Errors](https://github.com/go-playground/validator/blob/v9/_examples/translations/main.go)
+- [Gin upgrade and/or override validator](https://github.com/go-playground/validator/tree/v9/_examples/gin-upgrading-overriding)
+- [wash - an example application putting it all together](https://github.com/bluesuncorp/wash)
+
+Benchmarks
+------
+###### Run on MacBook Pro (15-inch, 2017) go version go1.10.2 darwin/amd64
+```go
+goos: darwin
+goarch: amd64
+pkg: github.com/go-playground/validator
+BenchmarkFieldSuccess-8 20000000 83.6 ns/op 0 B/op 0 allocs/op
+BenchmarkFieldSuccessParallel-8 50000000 26.8 ns/op 0 B/op 0 allocs/op
+BenchmarkFieldFailure-8 5000000 291 ns/op 208 B/op 4 allocs/op
+BenchmarkFieldFailureParallel-8 20000000 107 ns/op 208 B/op 4 allocs/op
+BenchmarkFieldArrayDiveSuccess-8 2000000 623 ns/op 201 B/op 11 allocs/op
+BenchmarkFieldArrayDiveSuccessParallel-8 10000000 237 ns/op 201 B/op 11 allocs/op
+BenchmarkFieldArrayDiveFailure-8 2000000 859 ns/op 412 B/op 16 allocs/op
+BenchmarkFieldArrayDiveFailureParallel-8 5000000 335 ns/op 413 B/op 16 allocs/op
+BenchmarkFieldMapDiveSuccess-8 1000000 1292 ns/op 432 B/op 18 allocs/op
+BenchmarkFieldMapDiveSuccessParallel-8 3000000 467 ns/op 432 B/op 18 allocs/op
+BenchmarkFieldMapDiveFailure-8 1000000 1082 ns/op 512 B/op 16 allocs/op
+BenchmarkFieldMapDiveFailureParallel-8 5000000 425 ns/op 512 B/op 16 allocs/op
+BenchmarkFieldMapDiveWithKeysSuccess-8 1000000 1539 ns/op 480 B/op 21 allocs/op
+BenchmarkFieldMapDiveWithKeysSuccessParallel-8 3000000 613 ns/op 480 B/op 21 allocs/op
+BenchmarkFieldMapDiveWithKeysFailure-8 1000000 1413 ns/op 721 B/op 21 allocs/op
+BenchmarkFieldMapDiveWithKeysFailureParallel-8 3000000 575 ns/op 721 B/op 21 allocs/op
+BenchmarkFieldCustomTypeSuccess-8 10000000 216 ns/op 32 B/op 2 allocs/op
+BenchmarkFieldCustomTypeSuccessParallel-8 20000000 82.2 ns/op 32 B/op 2 allocs/op
+BenchmarkFieldCustomTypeFailure-8 5000000 274 ns/op 208 B/op 4 allocs/op
+BenchmarkFieldCustomTypeFailureParallel-8 20000000 116 ns/op 208 B/op 4 allocs/op
+BenchmarkFieldOrTagSuccess-8 2000000 740 ns/op 16 B/op 1 allocs/op
+BenchmarkFieldOrTagSuccessParallel-8 3000000 474 ns/op 16 B/op 1 allocs/op
+BenchmarkFieldOrTagFailure-8 3000000 471 ns/op 224 B/op 5 allocs/op
+BenchmarkFieldOrTagFailureParallel-8 3000000 414 ns/op 224 B/op 5 allocs/op
+BenchmarkStructLevelValidationSuccess-8 10000000 213 ns/op 32 B/op 2 allocs/op
+BenchmarkStructLevelValidationSuccessParallel-8 20000000 91.8 ns/op 32 B/op 2 allocs/op
+BenchmarkStructLevelValidationFailure-8 3000000 473 ns/op 304 B/op 8 allocs/op
+BenchmarkStructLevelValidationFailureParallel-8 10000000 234 ns/op 304 B/op 8 allocs/op
+BenchmarkStructSimpleCustomTypeSuccess-8 5000000 385 ns/op 32 B/op 2 allocs/op
+BenchmarkStructSimpleCustomTypeSuccessParallel-8 10000000 161 ns/op 32 B/op 2 allocs/op
+BenchmarkStructSimpleCustomTypeFailure-8 2000000 640 ns/op 424 B/op 9 allocs/op
+BenchmarkStructSimpleCustomTypeFailureParallel-8 5000000 318 ns/op 440 B/op 10 allocs/op
+BenchmarkStructFilteredSuccess-8 2000000 597 ns/op 288 B/op 9 allocs/op
+BenchmarkStructFilteredSuccessParallel-8 10000000 266 ns/op 288 B/op 9 allocs/op
+BenchmarkStructFilteredFailure-8 3000000 454 ns/op 256 B/op 7 allocs/op
+BenchmarkStructFilteredFailureParallel-8 10000000 214 ns/op 256 B/op 7 allocs/op
+BenchmarkStructPartialSuccess-8 3000000 502 ns/op 256 B/op 6 allocs/op
+BenchmarkStructPartialSuccessParallel-8 10000000 225 ns/op 256 B/op 6 allocs/op
+BenchmarkStructPartialFailure-8 2000000 702 ns/op 480 B/op 11 allocs/op
+BenchmarkStructPartialFailureParallel-8 5000000 329 ns/op 480 B/op 11 allocs/op
+BenchmarkStructExceptSuccess-8 2000000 793 ns/op 496 B/op 12 allocs/op
+BenchmarkStructExceptSuccessParallel-8 10000000 193 ns/op 240 B/op 5 allocs/op
+BenchmarkStructExceptFailure-8 2000000 639 ns/op 464 B/op 10 allocs/op
+BenchmarkStructExceptFailureParallel-8 5000000 300 ns/op 464 B/op 10 allocs/op
+BenchmarkStructSimpleCrossFieldSuccess-8 3000000 417 ns/op 72 B/op 3 allocs/op
+BenchmarkStructSimpleCrossFieldSuccessParallel-8 10000000 163 ns/op 72 B/op 3 allocs/op
+BenchmarkStructSimpleCrossFieldFailure-8 2000000 645 ns/op 304 B/op 8 allocs/op
+BenchmarkStructSimpleCrossFieldFailureParallel-8 5000000 285 ns/op 304 B/op 8 allocs/op
+BenchmarkStructSimpleCrossStructCrossFieldSuccess-8 3000000 588 ns/op 80 B/op 4 allocs/op
+BenchmarkStructSimpleCrossStructCrossFieldSuccessParallel-8 10000000 221 ns/op 80 B/op 4 allocs/op
+BenchmarkStructSimpleCrossStructCrossFieldFailure-8 2000000 868 ns/op 320 B/op 9 allocs/op
+BenchmarkStructSimpleCrossStructCrossFieldFailureParallel-8 5000000 337 ns/op 320 B/op 9 allocs/op
+BenchmarkStructSimpleSuccess-8 5000000 260 ns/op 0 B/op 0 allocs/op
+BenchmarkStructSimpleSuccessParallel-8 20000000 90.6 ns/op 0 B/op 0 allocs/op
+BenchmarkStructSimpleFailure-8 2000000 619 ns/op 424 B/op 9 allocs/op
+BenchmarkStructSimpleFailureParallel-8 5000000 296 ns/op 424 B/op 9 allocs/op
+BenchmarkStructComplexSuccess-8 1000000 1454 ns/op 128 B/op 8 allocs/op
+BenchmarkStructComplexSuccessParallel-8 3000000 579 ns/op 128 B/op 8 allocs/op
+BenchmarkStructComplexFailure-8 300000 4140 ns/op 3041 B/op 53 allocs/op
+BenchmarkStructComplexFailureParallel-8 1000000 2127 ns/op 3041 B/op 53 allocs/op
+BenchmarkOneof-8 10000000 140 ns/op 0 B/op 0 allocs/op
+BenchmarkOneofParallel-8 20000000 70.1 ns/op 0 B/op 0 allocs/op
+```
+
+Complementary Software
+----------------------
+
+Here is a list of software that complements using this library either pre or post validation.
+
+* [form](https://github.com/go-playground/form) - Decodes url.Values into Go value(s) and Encodes Go value(s) into url.Values. Dual Array and Full map support.
+* [mold](https://github.com/go-playground/mold) - A general library to help modify or set data within data structures and other objects
+
+How to Contribute
+------
+
+Make a pull request...
+
+License
+------
+Distributed under MIT License, please see license file within the code for more details.
diff --git a/vendor/github.com/go-playground/validator/baked_in.go b/vendor/github.com/go-playground/validator/baked_in.go
new file mode 100644
index 0000000..cfc5686
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/baked_in.go
@@ -0,0 +1,2001 @@
+package validator
+
+import (
+ "bytes"
+ "context"
+ "crypto/sha256"
+ "fmt"
+ "net"
+ "net/url"
+ "os"
+ "reflect"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+ "unicode/utf8"
+
+ urn "github.com/leodido/go-urn"
+)
+
+// Func accepts a FieldLevel interface for all validation needs. The return
+// value should be true when validation succeeds.
+type Func func(fl FieldLevel) bool
+
+// FuncCtx accepts a context.Context and FieldLevel interface for all
+// validation needs. The return value should be true when validation succeeds.
+type FuncCtx func(ctx context.Context, fl FieldLevel) bool
+
+// wrapFunc wraps noramal Func makes it compatible with FuncCtx
+func wrapFunc(fn Func) FuncCtx {
+ if fn == nil {
+ return nil // be sure not to wrap a bad function.
+ }
+ return func(ctx context.Context, fl FieldLevel) bool {
+ return fn(fl)
+ }
+}
+
+var (
+ restrictedTags = map[string]struct{}{
+ diveTag: {},
+ keysTag: {},
+ endKeysTag: {},
+ structOnlyTag: {},
+ omitempty: {},
+ skipValidationTag: {},
+ utf8HexComma: {},
+ utf8Pipe: {},
+ noStructLevelTag: {},
+ requiredTag: {},
+ isdefault: {},
+ }
+
+ // BakedInAliasValidators is a default mapping of a single validation tag that
+ // defines a common or complex set of validation(s) to simplify
+ // adding validation to structs.
+ bakedInAliases = map[string]string{
+ "iscolor": "hexcolor|rgb|rgba|hsl|hsla",
+ }
+
+ // BakedInValidators is the default map of ValidationFunc
+ // you can add, remove or even replace items to suite your needs,
+ // or even disregard and use your own map if so desired.
+ bakedInValidators = map[string]Func{
+ "required": hasValue,
+ "required_with": requiredWith,
+ "required_with_all": requiredWithAll,
+ "required_without": requiredWithout,
+ "required_without_all": requiredWithoutAll,
+ "isdefault": isDefault,
+ "len": hasLengthOf,
+ "min": hasMinOf,
+ "max": hasMaxOf,
+ "eq": isEq,
+ "ne": isNe,
+ "lt": isLt,
+ "lte": isLte,
+ "gt": isGt,
+ "gte": isGte,
+ "eqfield": isEqField,
+ "eqcsfield": isEqCrossStructField,
+ "necsfield": isNeCrossStructField,
+ "gtcsfield": isGtCrossStructField,
+ "gtecsfield": isGteCrossStructField,
+ "ltcsfield": isLtCrossStructField,
+ "ltecsfield": isLteCrossStructField,
+ "nefield": isNeField,
+ "gtefield": isGteField,
+ "gtfield": isGtField,
+ "ltefield": isLteField,
+ "ltfield": isLtField,
+ "fieldcontains": fieldContains,
+ "fieldexcludes": fieldExcludes,
+ "alpha": isAlpha,
+ "alphanum": isAlphanum,
+ "alphaunicode": isAlphaUnicode,
+ "alphanumunicode": isAlphanumUnicode,
+ "numeric": isNumeric,
+ "number": isNumber,
+ "hexadecimal": isHexadecimal,
+ "hexcolor": isHEXColor,
+ "rgb": isRGB,
+ "rgba": isRGBA,
+ "hsl": isHSL,
+ "hsla": isHSLA,
+ "e164": isE164,
+ "email": isEmail,
+ "url": isURL,
+ "uri": isURI,
+ "urn_rfc2141": isUrnRFC2141, // RFC 2141
+ "file": isFile,
+ "base64": isBase64,
+ "base64url": isBase64URL,
+ "contains": contains,
+ "containsany": containsAny,
+ "containsrune": containsRune,
+ "excludes": excludes,
+ "excludesall": excludesAll,
+ "excludesrune": excludesRune,
+ "startswith": startsWith,
+ "endswith": endsWith,
+ "isbn": isISBN,
+ "isbn10": isISBN10,
+ "isbn13": isISBN13,
+ "eth_addr": isEthereumAddress,
+ "btc_addr": isBitcoinAddress,
+ "btc_addr_bech32": isBitcoinBech32Address,
+ "uuid": isUUID,
+ "uuid3": isUUID3,
+ "uuid4": isUUID4,
+ "uuid5": isUUID5,
+ "uuid_rfc4122": isUUIDRFC4122,
+ "uuid3_rfc4122": isUUID3RFC4122,
+ "uuid4_rfc4122": isUUID4RFC4122,
+ "uuid5_rfc4122": isUUID5RFC4122,
+ "ascii": isASCII,
+ "printascii": isPrintableASCII,
+ "multibyte": hasMultiByteCharacter,
+ "datauri": isDataURI,
+ "latitude": isLatitude,
+ "longitude": isLongitude,
+ "ssn": isSSN,
+ "ipv4": isIPv4,
+ "ipv6": isIPv6,
+ "ip": isIP,
+ "cidrv4": isCIDRv4,
+ "cidrv6": isCIDRv6,
+ "cidr": isCIDR,
+ "tcp4_addr": isTCP4AddrResolvable,
+ "tcp6_addr": isTCP6AddrResolvable,
+ "tcp_addr": isTCPAddrResolvable,
+ "udp4_addr": isUDP4AddrResolvable,
+ "udp6_addr": isUDP6AddrResolvable,
+ "udp_addr": isUDPAddrResolvable,
+ "ip4_addr": isIP4AddrResolvable,
+ "ip6_addr": isIP6AddrResolvable,
+ "ip_addr": isIPAddrResolvable,
+ "unix_addr": isUnixAddrResolvable,
+ "mac": isMAC,
+ "hostname": isHostnameRFC952, // RFC 952
+ "hostname_rfc1123": isHostnameRFC1123, // RFC 1123
+ "fqdn": isFQDN,
+ "unique": isUnique,
+ "oneof": isOneOf,
+ "html": isHTML,
+ "html_encoded": isHTMLEncoded,
+ "url_encoded": isURLEncoded,
+ "dir": isDir,
+ }
+)
+
+var oneofValsCache = map[string][]string{}
+var oneofValsCacheRWLock = sync.RWMutex{}
+
+func parseOneOfParam2(s string) []string {
+ oneofValsCacheRWLock.RLock()
+ vals, ok := oneofValsCache[s]
+ oneofValsCacheRWLock.RUnlock()
+ if !ok {
+ oneofValsCacheRWLock.Lock()
+ vals = strings.Fields(s)
+ oneofValsCache[s] = vals
+ oneofValsCacheRWLock.Unlock()
+ }
+ return vals
+}
+
+func isURLEncoded(fl FieldLevel) bool {
+ return uRLEncodedRegex.MatchString(fl.Field().String())
+}
+
+func isHTMLEncoded(fl FieldLevel) bool {
+ return hTMLEncodedRegex.MatchString(fl.Field().String())
+}
+
+func isHTML(fl FieldLevel) bool {
+ return hTMLRegex.MatchString(fl.Field().String())
+}
+
+func isOneOf(fl FieldLevel) bool {
+ vals := parseOneOfParam2(fl.Param())
+
+ field := fl.Field()
+
+ var v string
+ switch field.Kind() {
+ case reflect.String:
+ v = field.String()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ v = strconv.FormatInt(field.Int(), 10)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ v = strconv.FormatUint(field.Uint(), 10)
+ default:
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+ }
+ for i := 0; i < len(vals); i++ {
+ if vals[i] == v {
+ return true
+ }
+ }
+ return false
+}
+
+// isUnique is the validation function for validating if each array|slice|map value is unique
+func isUnique(fl FieldLevel) bool {
+
+ field := fl.Field()
+ param := fl.Param()
+ v := reflect.ValueOf(struct{}{})
+
+ switch field.Kind() {
+ case reflect.Slice, reflect.Array:
+ if param == "" {
+ m := reflect.MakeMap(reflect.MapOf(field.Type().Elem(), v.Type()))
+
+ for i := 0; i < field.Len(); i++ {
+ m.SetMapIndex(field.Index(i), v)
+ }
+ return field.Len() == m.Len()
+ }
+
+ sf, ok := field.Type().Elem().FieldByName(param)
+ if !ok {
+ panic(fmt.Sprintf("Bad field name %s", param))
+ }
+
+ m := reflect.MakeMap(reflect.MapOf(sf.Type, v.Type()))
+ for i := 0; i < field.Len(); i++ {
+ m.SetMapIndex(field.Index(i).FieldByName(param), v)
+ }
+ return field.Len() == m.Len()
+ case reflect.Map:
+ m := reflect.MakeMap(reflect.MapOf(field.Type().Elem(), v.Type()))
+
+ for _, k := range field.MapKeys() {
+ m.SetMapIndex(field.MapIndex(k), v)
+ }
+ return field.Len() == m.Len()
+ default:
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+ }
+}
+
+// IsMAC is the validation function for validating if the field's value is a valid MAC address.
+func isMAC(fl FieldLevel) bool {
+
+ _, err := net.ParseMAC(fl.Field().String())
+
+ return err == nil
+}
+
+// IsCIDRv4 is the validation function for validating if the field's value is a valid v4 CIDR address.
+func isCIDRv4(fl FieldLevel) bool {
+
+ ip, _, err := net.ParseCIDR(fl.Field().String())
+
+ return err == nil && ip.To4() != nil
+}
+
+// IsCIDRv6 is the validation function for validating if the field's value is a valid v6 CIDR address.
+func isCIDRv6(fl FieldLevel) bool {
+
+ ip, _, err := net.ParseCIDR(fl.Field().String())
+
+ return err == nil && ip.To4() == nil
+}
+
+// IsCIDR is the validation function for validating if the field's value is a valid v4 or v6 CIDR address.
+func isCIDR(fl FieldLevel) bool {
+
+ _, _, err := net.ParseCIDR(fl.Field().String())
+
+ return err == nil
+}
+
+// IsIPv4 is the validation function for validating if a value is a valid v4 IP address.
+func isIPv4(fl FieldLevel) bool {
+
+ ip := net.ParseIP(fl.Field().String())
+
+ return ip != nil && ip.To4() != nil
+}
+
+// IsIPv6 is the validation function for validating if the field's value is a valid v6 IP address.
+func isIPv6(fl FieldLevel) bool {
+
+ ip := net.ParseIP(fl.Field().String())
+
+ return ip != nil && ip.To4() == nil
+}
+
+// IsIP is the validation function for validating if the field's value is a valid v4 or v6 IP address.
+func isIP(fl FieldLevel) bool {
+
+ ip := net.ParseIP(fl.Field().String())
+
+ return ip != nil
+}
+
+// IsSSN is the validation function for validating if the field's value is a valid SSN.
+func isSSN(fl FieldLevel) bool {
+
+ field := fl.Field()
+
+ if field.Len() != 11 {
+ return false
+ }
+
+ return sSNRegex.MatchString(field.String())
+}
+
+// IsLongitude is the validation function for validating if the field's value is a valid longitude coordinate.
+func isLongitude(fl FieldLevel) bool {
+ field := fl.Field()
+
+ var v string
+ switch field.Kind() {
+ case reflect.String:
+ v = field.String()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ v = strconv.FormatInt(field.Int(), 10)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ v = strconv.FormatUint(field.Uint(), 10)
+ case reflect.Float32:
+ v = strconv.FormatFloat(field.Float(), 'f', -1, 32)
+ case reflect.Float64:
+ v = strconv.FormatFloat(field.Float(), 'f', -1, 64)
+ default:
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+ }
+
+ return longitudeRegex.MatchString(v)
+}
+
+// IsLatitude is the validation function for validating if the field's value is a valid latitude coordinate.
+func isLatitude(fl FieldLevel) bool {
+ field := fl.Field()
+
+ var v string
+ switch field.Kind() {
+ case reflect.String:
+ v = field.String()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ v = strconv.FormatInt(field.Int(), 10)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ v = strconv.FormatUint(field.Uint(), 10)
+ case reflect.Float32:
+ v = strconv.FormatFloat(field.Float(), 'f', -1, 32)
+ case reflect.Float64:
+ v = strconv.FormatFloat(field.Float(), 'f', -1, 64)
+ default:
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+ }
+
+ return latitudeRegex.MatchString(v)
+}
+
+// IsDataURI is the validation function for validating if the field's value is a valid data URI.
+func isDataURI(fl FieldLevel) bool {
+
+ uri := strings.SplitN(fl.Field().String(), ",", 2)
+
+ if len(uri) != 2 {
+ return false
+ }
+
+ if !dataURIRegex.MatchString(uri[0]) {
+ return false
+ }
+
+ return base64Regex.MatchString(uri[1])
+}
+
+// HasMultiByteCharacter is the validation function for validating if the field's value has a multi byte character.
+func hasMultiByteCharacter(fl FieldLevel) bool {
+
+ field := fl.Field()
+
+ if field.Len() == 0 {
+ return true
+ }
+
+ return multibyteRegex.MatchString(field.String())
+}
+
+// IsPrintableASCII is the validation function for validating if the field's value is a valid printable ASCII character.
+func isPrintableASCII(fl FieldLevel) bool {
+ return printableASCIIRegex.MatchString(fl.Field().String())
+}
+
+// IsASCII is the validation function for validating if the field's value is a valid ASCII character.
+func isASCII(fl FieldLevel) bool {
+ return aSCIIRegex.MatchString(fl.Field().String())
+}
+
+// IsUUID5 is the validation function for validating if the field's value is a valid v5 UUID.
+func isUUID5(fl FieldLevel) bool {
+ return uUID5Regex.MatchString(fl.Field().String())
+}
+
+// IsUUID4 is the validation function for validating if the field's value is a valid v4 UUID.
+func isUUID4(fl FieldLevel) bool {
+ return uUID4Regex.MatchString(fl.Field().String())
+}
+
+// IsUUID3 is the validation function for validating if the field's value is a valid v3 UUID.
+func isUUID3(fl FieldLevel) bool {
+ return uUID3Regex.MatchString(fl.Field().String())
+}
+
+// IsUUID is the validation function for validating if the field's value is a valid UUID of any version.
+func isUUID(fl FieldLevel) bool {
+ return uUIDRegex.MatchString(fl.Field().String())
+}
+
+// IsUUID5RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v5 UUID.
+func isUUID5RFC4122(fl FieldLevel) bool {
+ return uUID5RFC4122Regex.MatchString(fl.Field().String())
+}
+
+// IsUUID4RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v4 UUID.
+func isUUID4RFC4122(fl FieldLevel) bool {
+ return uUID4RFC4122Regex.MatchString(fl.Field().String())
+}
+
+// IsUUID3RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v3 UUID.
+func isUUID3RFC4122(fl FieldLevel) bool {
+ return uUID3RFC4122Regex.MatchString(fl.Field().String())
+}
+
+// IsUUIDRFC4122 is the validation function for validating if the field's value is a valid RFC4122 UUID of any version.
+func isUUIDRFC4122(fl FieldLevel) bool {
+ return uUIDRFC4122Regex.MatchString(fl.Field().String())
+}
+
+// IsISBN is the validation function for validating if the field's value is a valid v10 or v13 ISBN.
+func isISBN(fl FieldLevel) bool {
+ return isISBN10(fl) || isISBN13(fl)
+}
+
+// IsISBN13 is the validation function for validating if the field's value is a valid v13 ISBN.
+func isISBN13(fl FieldLevel) bool {
+
+ s := strings.Replace(strings.Replace(fl.Field().String(), "-", "", 4), " ", "", 4)
+
+ if !iSBN13Regex.MatchString(s) {
+ return false
+ }
+
+ var checksum int32
+ var i int32
+
+ factor := []int32{1, 3}
+
+ for i = 0; i < 12; i++ {
+ checksum += factor[i%2] * int32(s[i]-'0')
+ }
+
+ return (int32(s[12]-'0'))-((10-(checksum%10))%10) == 0
+}
+
+// IsISBN10 is the validation function for validating if the field's value is a valid v10 ISBN.
+func isISBN10(fl FieldLevel) bool {
+
+ s := strings.Replace(strings.Replace(fl.Field().String(), "-", "", 3), " ", "", 3)
+
+ if !iSBN10Regex.MatchString(s) {
+ return false
+ }
+
+ var checksum int32
+ var i int32
+
+ for i = 0; i < 9; i++ {
+ checksum += (i + 1) * int32(s[i]-'0')
+ }
+
+ if s[9] == 'X' {
+ checksum += 10 * 10
+ } else {
+ checksum += 10 * int32(s[9]-'0')
+ }
+
+ return checksum%11 == 0
+}
+
+// IsEthereumAddress is the validation function for validating if the field's value is a valid ethereum address based currently only on the format
+func isEthereumAddress(fl FieldLevel) bool {
+ address := fl.Field().String()
+
+ if !ethAddressRegex.MatchString(address) {
+ return false
+ }
+
+ if ethaddressRegexUpper.MatchString(address) || ethAddressRegexLower.MatchString(address) {
+ return true
+ }
+
+ // checksum validation is blocked by https://github.com/golang/crypto/pull/28
+
+ return true
+}
+
+// IsBitcoinAddress is the validation function for validating if the field's value is a valid btc address
+func isBitcoinAddress(fl FieldLevel) bool {
+ address := fl.Field().String()
+
+ if !btcAddressRegex.MatchString(address) {
+ return false
+ }
+
+ alphabet := []byte("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz")
+
+ decode := [25]byte{}
+
+ for _, n := range []byte(address) {
+ d := bytes.IndexByte(alphabet, n)
+
+ for i := 24; i >= 0; i-- {
+ d += 58 * int(decode[i])
+ decode[i] = byte(d % 256)
+ d /= 256
+ }
+ }
+
+ h := sha256.New()
+ _, _ = h.Write(decode[:21])
+ d := h.Sum([]byte{})
+ h = sha256.New()
+ _, _ = h.Write(d)
+
+ validchecksum := [4]byte{}
+ computedchecksum := [4]byte{}
+
+ copy(computedchecksum[:], h.Sum(d[:0]))
+ copy(validchecksum[:], decode[21:])
+
+ return validchecksum == computedchecksum
+}
+
+// IsBitcoinBech32Address is the validation function for validating if the field's value is a valid bech32 btc address
+func isBitcoinBech32Address(fl FieldLevel) bool {
+ address := fl.Field().String()
+
+ if !btcLowerAddressRegexBech32.MatchString(address) && !btcUpperAddressRegexBech32.MatchString(address) {
+ return false
+ }
+
+ am := len(address) % 8
+
+ if am == 0 || am == 3 || am == 5 {
+ return false
+ }
+
+ address = strings.ToLower(address)
+
+ alphabet := "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
+
+ hr := []int{3, 3, 0, 2, 3} // the human readable part will always be bc
+ addr := address[3:]
+ dp := make([]int, 0, len(addr))
+
+ for _, c := range addr {
+ dp = append(dp, strings.IndexRune(alphabet, c))
+ }
+
+ ver := dp[0]
+
+ if ver < 0 || ver > 16 {
+ return false
+ }
+
+ if ver == 0 {
+ if len(address) != 42 && len(address) != 62 {
+ return false
+ }
+ }
+
+ values := append(hr, dp...)
+
+ GEN := []int{0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3}
+
+ p := 1
+
+ for _, v := range values {
+ b := p >> 25
+ p = (p&0x1ffffff)<<5 ^ v
+
+ for i := 0; i < 5; i++ {
+ if (b>>uint(i))&1 == 1 {
+ p ^= GEN[i]
+ }
+ }
+ }
+
+ if p != 1 {
+ return false
+ }
+
+ b := uint(0)
+ acc := 0
+ mv := (1 << 5) - 1
+ var sw []int
+
+ for _, v := range dp[1 : len(dp)-6] {
+ acc = (acc << 5) | v
+ b += 5
+ for b >= 8 {
+ b -= 8
+ sw = append(sw, (acc>>b)&mv)
+ }
+ }
+
+ if len(sw) < 2 || len(sw) > 40 {
+ return false
+ }
+
+ return true
+}
+
+// ExcludesRune is the validation function for validating that the field's value does not contain the rune specified within the param.
+func excludesRune(fl FieldLevel) bool {
+ return !containsRune(fl)
+}
+
+// ExcludesAll is the validation function for validating that the field's value does not contain any of the characters specified within the param.
+func excludesAll(fl FieldLevel) bool {
+ return !containsAny(fl)
+}
+
+// Excludes is the validation function for validating that the field's value does not contain the text specified within the param.
+func excludes(fl FieldLevel) bool {
+ return !contains(fl)
+}
+
+// ContainsRune is the validation function for validating that the field's value contains the rune specified within the param.
+func containsRune(fl FieldLevel) bool {
+
+ r, _ := utf8.DecodeRuneInString(fl.Param())
+
+ return strings.ContainsRune(fl.Field().String(), r)
+}
+
+// ContainsAny is the validation function for validating that the field's value contains any of the characters specified within the param.
+func containsAny(fl FieldLevel) bool {
+ return strings.ContainsAny(fl.Field().String(), fl.Param())
+}
+
+// Contains is the validation function for validating that the field's value contains the text specified within the param.
+func contains(fl FieldLevel) bool {
+ return strings.Contains(fl.Field().String(), fl.Param())
+}
+
+// StartsWith is the validation function for validating that the field's value starts with the text specified within the param.
+func startsWith(fl FieldLevel) bool {
+ return strings.HasPrefix(fl.Field().String(), fl.Param())
+}
+
+// EndsWith is the validation function for validating that the field's value ends with the text specified within the param.
+func endsWith(fl FieldLevel) bool {
+ return strings.HasSuffix(fl.Field().String(), fl.Param())
+}
+
+// FieldContains is the validation function for validating if the current field's value contains the field specified by the param's value.
+func fieldContains(fl FieldLevel) bool {
+ field := fl.Field()
+
+ currentField, _, ok := fl.GetStructFieldOK()
+
+ if !ok {
+ return false
+ }
+
+ return strings.Contains(field.String(), currentField.String())
+}
+
+// FieldExcludes is the validation function for validating if the current field's value excludes the field specified by the param's value.
+func fieldExcludes(fl FieldLevel) bool {
+ field := fl.Field()
+
+ currentField, _, ok := fl.GetStructFieldOK()
+ if !ok {
+ return true
+ }
+
+ return !strings.Contains(field.String(), currentField.String())
+}
+
+// IsNeField is the validation function for validating if the current field's value is not equal to the field specified by the param's value.
+func isNeField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ currentField, currentKind, ok := fl.GetStructFieldOK()
+
+ if !ok || currentKind != kind {
+ return true
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return field.Int() != currentField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return field.Uint() != currentField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+ return field.Float() != currentField.Float()
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ return int64(field.Len()) != int64(currentField.Len())
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != currentField.Type() {
+ return true
+ }
+
+ if fieldType == timeType {
+
+ t := currentField.Interface().(time.Time)
+ fieldTime := field.Interface().(time.Time)
+
+ return !fieldTime.Equal(t)
+ }
+
+ }
+
+ // default reflect.String:
+ return field.String() != currentField.String()
+}
+
+// IsNe is the validation function for validating that the field's value does not equal the provided param value.
+func isNe(fl FieldLevel) bool {
+ return !isEq(fl)
+}
+
+// IsLteCrossStructField is the validation function for validating if the current field's value is less than or equal to the field, within a separate struct, specified by the param's value.
+func isLteCrossStructField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ topField, topKind, ok := fl.GetStructFieldOK()
+ if !ok || topKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return field.Int() <= topField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return field.Uint() <= topField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+ return field.Float() <= topField.Float()
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ return int64(field.Len()) <= int64(topField.Len())
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != topField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ fieldTime := field.Interface().(time.Time)
+ topTime := topField.Interface().(time.Time)
+
+ return fieldTime.Before(topTime) || fieldTime.Equal(topTime)
+ }
+ }
+
+ // default reflect.String:
+ return field.String() <= topField.String()
+}
+
+// IsLtCrossStructField is the validation function for validating if the current field's value is less than the field, within a separate struct, specified by the param's value.
+// NOTE: This is exposed for use within your own custom functions and not intended to be called directly.
+func isLtCrossStructField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ topField, topKind, ok := fl.GetStructFieldOK()
+ if !ok || topKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return field.Int() < topField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return field.Uint() < topField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+ return field.Float() < topField.Float()
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ return int64(field.Len()) < int64(topField.Len())
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != topField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ fieldTime := field.Interface().(time.Time)
+ topTime := topField.Interface().(time.Time)
+
+ return fieldTime.Before(topTime)
+ }
+ }
+
+ // default reflect.String:
+ return field.String() < topField.String()
+}
+
+// IsGteCrossStructField is the validation function for validating if the current field's value is greater than or equal to the field, within a separate struct, specified by the param's value.
+func isGteCrossStructField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ topField, topKind, ok := fl.GetStructFieldOK()
+ if !ok || topKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return field.Int() >= topField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return field.Uint() >= topField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+ return field.Float() >= topField.Float()
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ return int64(field.Len()) >= int64(topField.Len())
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != topField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ fieldTime := field.Interface().(time.Time)
+ topTime := topField.Interface().(time.Time)
+
+ return fieldTime.After(topTime) || fieldTime.Equal(topTime)
+ }
+ }
+
+ // default reflect.String:
+ return field.String() >= topField.String()
+}
+
+// IsGtCrossStructField is the validation function for validating if the current field's value is greater than the field, within a separate struct, specified by the param's value.
+func isGtCrossStructField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ topField, topKind, ok := fl.GetStructFieldOK()
+ if !ok || topKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return field.Int() > topField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return field.Uint() > topField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+ return field.Float() > topField.Float()
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ return int64(field.Len()) > int64(topField.Len())
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != topField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ fieldTime := field.Interface().(time.Time)
+ topTime := topField.Interface().(time.Time)
+
+ return fieldTime.After(topTime)
+ }
+ }
+
+ // default reflect.String:
+ return field.String() > topField.String()
+}
+
+// IsNeCrossStructField is the validation function for validating that the current field's value is not equal to the field, within a separate struct, specified by the param's value.
+func isNeCrossStructField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ topField, currentKind, ok := fl.GetStructFieldOK()
+ if !ok || currentKind != kind {
+ return true
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return topField.Int() != field.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return topField.Uint() != field.Uint()
+
+ case reflect.Float32, reflect.Float64:
+ return topField.Float() != field.Float()
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ return int64(topField.Len()) != int64(field.Len())
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != topField.Type() {
+ return true
+ }
+
+ if fieldType == timeType {
+
+ t := field.Interface().(time.Time)
+ fieldTime := topField.Interface().(time.Time)
+
+ return !fieldTime.Equal(t)
+ }
+ }
+
+ // default reflect.String:
+ return topField.String() != field.String()
+}
+
+// IsEqCrossStructField is the validation function for validating that the current field's value is equal to the field, within a separate struct, specified by the param's value.
+func isEqCrossStructField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ topField, topKind, ok := fl.GetStructFieldOK()
+ if !ok || topKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return topField.Int() == field.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return topField.Uint() == field.Uint()
+
+ case reflect.Float32, reflect.Float64:
+ return topField.Float() == field.Float()
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ return int64(topField.Len()) == int64(field.Len())
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != topField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ t := field.Interface().(time.Time)
+ fieldTime := topField.Interface().(time.Time)
+
+ return fieldTime.Equal(t)
+ }
+ }
+
+ // default reflect.String:
+ return topField.String() == field.String()
+}
+
+// IsEqField is the validation function for validating if the current field's value is equal to the field specified by the param's value.
+func isEqField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ currentField, currentKind, ok := fl.GetStructFieldOK()
+ if !ok || currentKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return field.Int() == currentField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return field.Uint() == currentField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+ return field.Float() == currentField.Float()
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ return int64(field.Len()) == int64(currentField.Len())
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != currentField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ t := currentField.Interface().(time.Time)
+ fieldTime := field.Interface().(time.Time)
+
+ return fieldTime.Equal(t)
+ }
+
+ }
+
+ // default reflect.String:
+ return field.String() == currentField.String()
+}
+
+// IsEq is the validation function for validating if the current field's value is equal to the param's value.
+func isEq(fl FieldLevel) bool {
+
+ field := fl.Field()
+ param := fl.Param()
+
+ switch field.Kind() {
+
+ case reflect.String:
+ return field.String() == param
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ p := asInt(param)
+
+ return int64(field.Len()) == p
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ p := asInt(param)
+
+ return field.Int() == p
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ p := asUint(param)
+
+ return field.Uint() == p
+
+ case reflect.Float32, reflect.Float64:
+ p := asFloat(param)
+
+ return field.Float() == p
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// IsBase64 is the validation function for validating if the current field's value is a valid base 64.
+func isBase64(fl FieldLevel) bool {
+ return base64Regex.MatchString(fl.Field().String())
+}
+
+// IsBase64URL is the validation function for validating if the current field's value is a valid base64 URL safe string.
+func isBase64URL(fl FieldLevel) bool {
+ return base64URLRegex.MatchString(fl.Field().String())
+}
+
+// IsURI is the validation function for validating if the current field's value is a valid URI.
+func isURI(fl FieldLevel) bool {
+
+ field := fl.Field()
+
+ switch field.Kind() {
+
+ case reflect.String:
+
+ s := field.String()
+
+ // checks needed as of Go 1.6 because of change https://github.com/golang/go/commit/617c93ce740c3c3cc28cdd1a0d712be183d0b328#diff-6c2d018290e298803c0c9419d8739885L195
+ // emulate browser and strip the '#' suffix prior to validation. see issue-#237
+ if i := strings.Index(s, "#"); i > -1 {
+ s = s[:i]
+ }
+
+ if len(s) == 0 {
+ return false
+ }
+
+ _, err := url.ParseRequestURI(s)
+
+ return err == nil
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// IsURL is the validation function for validating if the current field's value is a valid URL.
+func isURL(fl FieldLevel) bool {
+
+ field := fl.Field()
+
+ switch field.Kind() {
+
+ case reflect.String:
+
+ var i int
+ s := field.String()
+
+ // checks needed as of Go 1.6 because of change https://github.com/golang/go/commit/617c93ce740c3c3cc28cdd1a0d712be183d0b328#diff-6c2d018290e298803c0c9419d8739885L195
+ // emulate browser and strip the '#' suffix prior to validation. see issue-#237
+ if i = strings.Index(s, "#"); i > -1 {
+ s = s[:i]
+ }
+
+ if len(s) == 0 {
+ return false
+ }
+
+ url, err := url.ParseRequestURI(s)
+
+ if err != nil || url.Scheme == "" {
+ return false
+ }
+
+ return err == nil
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// isUrnRFC2141 is the validation function for validating if the current field's value is a valid URN as per RFC 2141.
+func isUrnRFC2141(fl FieldLevel) bool {
+ field := fl.Field()
+
+ switch field.Kind() {
+
+ case reflect.String:
+
+ str := field.String()
+
+ _, match := urn.Parse([]byte(str))
+
+ return match
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// IsFile is the validation function for validating if the current field's value is a valid file path.
+func isFile(fl FieldLevel) bool {
+ field := fl.Field()
+
+ switch field.Kind() {
+ case reflect.String:
+ fileInfo, err := os.Stat(field.String())
+ if err != nil {
+ return false
+ }
+
+ return !fileInfo.IsDir()
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// IsE164 is the validation function for validating if the current field's value is a valid e.164 formatted phone number.
+func isE164(fl FieldLevel) bool {
+ return e164Regex.MatchString(fl.Field().String())
+}
+
+// IsEmail is the validation function for validating if the current field's value is a valid email address.
+func isEmail(fl FieldLevel) bool {
+ return emailRegex.MatchString(fl.Field().String())
+}
+
+// IsHSLA is the validation function for validating if the current field's value is a valid HSLA color.
+func isHSLA(fl FieldLevel) bool {
+ return hslaRegex.MatchString(fl.Field().String())
+}
+
+// IsHSL is the validation function for validating if the current field's value is a valid HSL color.
+func isHSL(fl FieldLevel) bool {
+ return hslRegex.MatchString(fl.Field().String())
+}
+
+// IsRGBA is the validation function for validating if the current field's value is a valid RGBA color.
+func isRGBA(fl FieldLevel) bool {
+ return rgbaRegex.MatchString(fl.Field().String())
+}
+
+// IsRGB is the validation function for validating if the current field's value is a valid RGB color.
+func isRGB(fl FieldLevel) bool {
+ return rgbRegex.MatchString(fl.Field().String())
+}
+
+// IsHEXColor is the validation function for validating if the current field's value is a valid HEX color.
+func isHEXColor(fl FieldLevel) bool {
+ return hexcolorRegex.MatchString(fl.Field().String())
+}
+
+// IsHexadecimal is the validation function for validating if the current field's value is a valid hexadecimal.
+func isHexadecimal(fl FieldLevel) bool {
+ return hexadecimalRegex.MatchString(fl.Field().String())
+}
+
+// IsNumber is the validation function for validating if the current field's value is a valid number.
+func isNumber(fl FieldLevel) bool {
+ switch fl.Field().Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64:
+ return true
+ default:
+ return numberRegex.MatchString(fl.Field().String())
+ }
+}
+
+// IsNumeric is the validation function for validating if the current field's value is a valid numeric value.
+func isNumeric(fl FieldLevel) bool {
+ switch fl.Field().Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64:
+ return true
+ default:
+ return numericRegex.MatchString(fl.Field().String())
+ }
+}
+
+// IsAlphanum is the validation function for validating if the current field's value is a valid alphanumeric value.
+func isAlphanum(fl FieldLevel) bool {
+ return alphaNumericRegex.MatchString(fl.Field().String())
+}
+
+// IsAlpha is the validation function for validating if the current field's value is a valid alpha value.
+func isAlpha(fl FieldLevel) bool {
+ return alphaRegex.MatchString(fl.Field().String())
+}
+
+// IsAlphanumUnicode is the validation function for validating if the current field's value is a valid alphanumeric unicode value.
+func isAlphanumUnicode(fl FieldLevel) bool {
+ return alphaUnicodeNumericRegex.MatchString(fl.Field().String())
+}
+
+// IsAlphaUnicode is the validation function for validating if the current field's value is a valid alpha unicode value.
+func isAlphaUnicode(fl FieldLevel) bool {
+ return alphaUnicodeRegex.MatchString(fl.Field().String())
+}
+
+// isDefault is the opposite of required aka hasValue
+func isDefault(fl FieldLevel) bool {
+ return !hasValue(fl)
+}
+
+// HasValue is the validation function for validating if the current field's value is not the default static value.
+func hasValue(fl FieldLevel) bool {
+ field := fl.Field()
+ switch field.Kind() {
+ case reflect.Slice, reflect.Map, reflect.Ptr, reflect.Interface, reflect.Chan, reflect.Func:
+ return !field.IsNil()
+ default:
+ if fl.(*validate).fldIsPointer && field.Interface() != nil {
+ return true
+ }
+ return field.IsValid() && field.Interface() != reflect.Zero(field.Type()).Interface()
+ }
+}
+
+// requireCheckField is a func for check field kind
+func requireCheckFieldKind(fl FieldLevel, param string, defaultNotFoundValue bool) bool {
+ field := fl.Field()
+ kind := field.Kind()
+ var nullable, found bool
+ if len(param) > 0 {
+ field, kind, nullable, found = fl.GetStructFieldOKAdvanced2(fl.Parent(), param)
+ if !found {
+ return defaultNotFoundValue
+ }
+ }
+ switch kind {
+ case reflect.Invalid:
+ return defaultNotFoundValue
+ case reflect.Slice, reflect.Map, reflect.Ptr, reflect.Interface, reflect.Chan, reflect.Func:
+ return field.IsNil()
+ default:
+ if nullable && field.Interface() != nil {
+ return false
+ }
+ return field.IsValid() && field.Interface() == reflect.Zero(field.Type()).Interface()
+ }
+}
+
+// RequiredWith is the validation function
+// The field under validation must be present and not empty only if any of the other specified fields are present.
+func requiredWith(fl FieldLevel) bool {
+ params := parseOneOfParam2(fl.Param())
+ for _, param := range params {
+ if !requireCheckFieldKind(fl, param, true) {
+ return hasValue(fl)
+ }
+ }
+ return true
+}
+
+// RequiredWithAll is the validation function
+// The field under validation must be present and not empty only if all of the other specified fields are present.
+func requiredWithAll(fl FieldLevel) bool {
+ params := parseOneOfParam2(fl.Param())
+ for _, param := range params {
+ if requireCheckFieldKind(fl, param, true) {
+ return true
+ }
+ }
+ return hasValue(fl)
+}
+
+// RequiredWithout is the validation function
+// The field under validation must be present and not empty only when any of the other specified fields are not present.
+func requiredWithout(fl FieldLevel) bool {
+ if requireCheckFieldKind(fl, strings.TrimSpace(fl.Param()), true) {
+ return hasValue(fl)
+ }
+ return true
+}
+
+// RequiredWithoutAll is the validation function
+// The field under validation must be present and not empty only when all of the other specified fields are not present.
+func requiredWithoutAll(fl FieldLevel) bool {
+ params := parseOneOfParam2(fl.Param())
+ for _, param := range params {
+ if !requireCheckFieldKind(fl, param, true) {
+ return true
+ }
+ }
+ return hasValue(fl)
+}
+
+// IsGteField is the validation function for validating if the current field's value is greater than or equal to the field specified by the param's value.
+func isGteField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ currentField, currentKind, ok := fl.GetStructFieldOK()
+ if !ok || currentKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+
+ return field.Int() >= currentField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+
+ return field.Uint() >= currentField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+
+ return field.Float() >= currentField.Float()
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != currentField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ t := currentField.Interface().(time.Time)
+ fieldTime := field.Interface().(time.Time)
+
+ return fieldTime.After(t) || fieldTime.Equal(t)
+ }
+ }
+
+ // default reflect.String
+ return len(field.String()) >= len(currentField.String())
+}
+
+// IsGtField is the validation function for validating if the current field's value is greater than the field specified by the param's value.
+func isGtField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ currentField, currentKind, ok := fl.GetStructFieldOK()
+ if !ok || currentKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+
+ return field.Int() > currentField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+
+ return field.Uint() > currentField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+
+ return field.Float() > currentField.Float()
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != currentField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ t := currentField.Interface().(time.Time)
+ fieldTime := field.Interface().(time.Time)
+
+ return fieldTime.After(t)
+ }
+ }
+
+ // default reflect.String
+ return len(field.String()) > len(currentField.String())
+}
+
+// IsGte is the validation function for validating if the current field's value is greater than or equal to the param's value.
+func isGte(fl FieldLevel) bool {
+
+ field := fl.Field()
+ param := fl.Param()
+
+ switch field.Kind() {
+
+ case reflect.String:
+ p := asInt(param)
+
+ return int64(utf8.RuneCountInString(field.String())) >= p
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ p := asInt(param)
+
+ return int64(field.Len()) >= p
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ p := asInt(param)
+
+ return field.Int() >= p
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ p := asUint(param)
+
+ return field.Uint() >= p
+
+ case reflect.Float32, reflect.Float64:
+ p := asFloat(param)
+
+ return field.Float() >= p
+
+ case reflect.Struct:
+
+ if field.Type() == timeType {
+
+ now := time.Now().UTC()
+ t := field.Interface().(time.Time)
+
+ return t.After(now) || t.Equal(now)
+ }
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// IsGt is the validation function for validating if the current field's value is greater than the param's value.
+func isGt(fl FieldLevel) bool {
+
+ field := fl.Field()
+ param := fl.Param()
+
+ switch field.Kind() {
+
+ case reflect.String:
+ p := asInt(param)
+
+ return int64(utf8.RuneCountInString(field.String())) > p
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ p := asInt(param)
+
+ return int64(field.Len()) > p
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ p := asInt(param)
+
+ return field.Int() > p
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ p := asUint(param)
+
+ return field.Uint() > p
+
+ case reflect.Float32, reflect.Float64:
+ p := asFloat(param)
+
+ return field.Float() > p
+ case reflect.Struct:
+
+ if field.Type() == timeType {
+
+ return field.Interface().(time.Time).After(time.Now().UTC())
+ }
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// HasLengthOf is the validation function for validating if the current field's value is equal to the param's value.
+func hasLengthOf(fl FieldLevel) bool {
+
+ field := fl.Field()
+ param := fl.Param()
+
+ switch field.Kind() {
+
+ case reflect.String:
+ p := asInt(param)
+
+ return int64(utf8.RuneCountInString(field.String())) == p
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ p := asInt(param)
+
+ return int64(field.Len()) == p
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ p := asInt(param)
+
+ return field.Int() == p
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ p := asUint(param)
+
+ return field.Uint() == p
+
+ case reflect.Float32, reflect.Float64:
+ p := asFloat(param)
+
+ return field.Float() == p
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// HasMinOf is the validation function for validating if the current field's value is greater than or equal to the param's value.
+func hasMinOf(fl FieldLevel) bool {
+ return isGte(fl)
+}
+
+// IsLteField is the validation function for validating if the current field's value is less than or equal to the field specified by the param's value.
+func isLteField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ currentField, currentKind, ok := fl.GetStructFieldOK()
+ if !ok || currentKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+
+ return field.Int() <= currentField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+
+ return field.Uint() <= currentField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+
+ return field.Float() <= currentField.Float()
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != currentField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ t := currentField.Interface().(time.Time)
+ fieldTime := field.Interface().(time.Time)
+
+ return fieldTime.Before(t) || fieldTime.Equal(t)
+ }
+ }
+
+ // default reflect.String
+ return len(field.String()) <= len(currentField.String())
+}
+
+// IsLtField is the validation function for validating if the current field's value is less than the field specified by the param's value.
+func isLtField(fl FieldLevel) bool {
+
+ field := fl.Field()
+ kind := field.Kind()
+
+ currentField, currentKind, ok := fl.GetStructFieldOK()
+ if !ok || currentKind != kind {
+ return false
+ }
+
+ switch kind {
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+
+ return field.Int() < currentField.Int()
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+
+ return field.Uint() < currentField.Uint()
+
+ case reflect.Float32, reflect.Float64:
+
+ return field.Float() < currentField.Float()
+
+ case reflect.Struct:
+
+ fieldType := field.Type()
+
+ // Not Same underlying type i.e. struct and time
+ if fieldType != currentField.Type() {
+ return false
+ }
+
+ if fieldType == timeType {
+
+ t := currentField.Interface().(time.Time)
+ fieldTime := field.Interface().(time.Time)
+
+ return fieldTime.Before(t)
+ }
+ }
+
+ // default reflect.String
+ return len(field.String()) < len(currentField.String())
+}
+
+// IsLte is the validation function for validating if the current field's value is less than or equal to the param's value.
+func isLte(fl FieldLevel) bool {
+
+ field := fl.Field()
+ param := fl.Param()
+
+ switch field.Kind() {
+
+ case reflect.String:
+ p := asInt(param)
+
+ return int64(utf8.RuneCountInString(field.String())) <= p
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ p := asInt(param)
+
+ return int64(field.Len()) <= p
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ p := asInt(param)
+
+ return field.Int() <= p
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ p := asUint(param)
+
+ return field.Uint() <= p
+
+ case reflect.Float32, reflect.Float64:
+ p := asFloat(param)
+
+ return field.Float() <= p
+
+ case reflect.Struct:
+
+ if field.Type() == timeType {
+
+ now := time.Now().UTC()
+ t := field.Interface().(time.Time)
+
+ return t.Before(now) || t.Equal(now)
+ }
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// IsLt is the validation function for validating if the current field's value is less than the param's value.
+func isLt(fl FieldLevel) bool {
+
+ field := fl.Field()
+ param := fl.Param()
+
+ switch field.Kind() {
+
+ case reflect.String:
+ p := asInt(param)
+
+ return int64(utf8.RuneCountInString(field.String())) < p
+
+ case reflect.Slice, reflect.Map, reflect.Array:
+ p := asInt(param)
+
+ return int64(field.Len()) < p
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ p := asInt(param)
+
+ return field.Int() < p
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ p := asUint(param)
+
+ return field.Uint() < p
+
+ case reflect.Float32, reflect.Float64:
+ p := asFloat(param)
+
+ return field.Float() < p
+
+ case reflect.Struct:
+
+ if field.Type() == timeType {
+
+ return field.Interface().(time.Time).Before(time.Now().UTC())
+ }
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
+
+// HasMaxOf is the validation function for validating if the current field's value is less than or equal to the param's value.
+func hasMaxOf(fl FieldLevel) bool {
+ return isLte(fl)
+}
+
+// IsTCP4AddrResolvable is the validation function for validating if the field's value is a resolvable tcp4 address.
+func isTCP4AddrResolvable(fl FieldLevel) bool {
+
+ if !isIP4Addr(fl) {
+ return false
+ }
+
+ _, err := net.ResolveTCPAddr("tcp4", fl.Field().String())
+ return err == nil
+}
+
+// IsTCP6AddrResolvable is the validation function for validating if the field's value is a resolvable tcp6 address.
+func isTCP6AddrResolvable(fl FieldLevel) bool {
+
+ if !isIP6Addr(fl) {
+ return false
+ }
+
+ _, err := net.ResolveTCPAddr("tcp6", fl.Field().String())
+
+ return err == nil
+}
+
+// IsTCPAddrResolvable is the validation function for validating if the field's value is a resolvable tcp address.
+func isTCPAddrResolvable(fl FieldLevel) bool {
+
+ if !isIP4Addr(fl) && !isIP6Addr(fl) {
+ return false
+ }
+
+ _, err := net.ResolveTCPAddr("tcp", fl.Field().String())
+
+ return err == nil
+}
+
+// IsUDP4AddrResolvable is the validation function for validating if the field's value is a resolvable udp4 address.
+func isUDP4AddrResolvable(fl FieldLevel) bool {
+
+ if !isIP4Addr(fl) {
+ return false
+ }
+
+ _, err := net.ResolveUDPAddr("udp4", fl.Field().String())
+
+ return err == nil
+}
+
+// IsUDP6AddrResolvable is the validation function for validating if the field's value is a resolvable udp6 address.
+func isUDP6AddrResolvable(fl FieldLevel) bool {
+
+ if !isIP6Addr(fl) {
+ return false
+ }
+
+ _, err := net.ResolveUDPAddr("udp6", fl.Field().String())
+
+ return err == nil
+}
+
+// IsUDPAddrResolvable is the validation function for validating if the field's value is a resolvable udp address.
+func isUDPAddrResolvable(fl FieldLevel) bool {
+
+ if !isIP4Addr(fl) && !isIP6Addr(fl) {
+ return false
+ }
+
+ _, err := net.ResolveUDPAddr("udp", fl.Field().String())
+
+ return err == nil
+}
+
+// IsIP4AddrResolvable is the validation function for validating if the field's value is a resolvable ip4 address.
+func isIP4AddrResolvable(fl FieldLevel) bool {
+
+ if !isIPv4(fl) {
+ return false
+ }
+
+ _, err := net.ResolveIPAddr("ip4", fl.Field().String())
+
+ return err == nil
+}
+
+// IsIP6AddrResolvable is the validation function for validating if the field's value is a resolvable ip6 address.
+func isIP6AddrResolvable(fl FieldLevel) bool {
+
+ if !isIPv6(fl) {
+ return false
+ }
+
+ _, err := net.ResolveIPAddr("ip6", fl.Field().String())
+
+ return err == nil
+}
+
+// IsIPAddrResolvable is the validation function for validating if the field's value is a resolvable ip address.
+func isIPAddrResolvable(fl FieldLevel) bool {
+
+ if !isIP(fl) {
+ return false
+ }
+
+ _, err := net.ResolveIPAddr("ip", fl.Field().String())
+
+ return err == nil
+}
+
+// IsUnixAddrResolvable is the validation function for validating if the field's value is a resolvable unix address.
+func isUnixAddrResolvable(fl FieldLevel) bool {
+
+ _, err := net.ResolveUnixAddr("unix", fl.Field().String())
+
+ return err == nil
+}
+
+func isIP4Addr(fl FieldLevel) bool {
+
+ val := fl.Field().String()
+
+ if idx := strings.LastIndex(val, ":"); idx != -1 {
+ val = val[0:idx]
+ }
+
+ ip := net.ParseIP(val)
+
+ return ip != nil && ip.To4() != nil
+}
+
+func isIP6Addr(fl FieldLevel) bool {
+
+ val := fl.Field().String()
+
+ if idx := strings.LastIndex(val, ":"); idx != -1 {
+ if idx != 0 && val[idx-1:idx] == "]" {
+ val = val[1 : idx-1]
+ }
+ }
+
+ ip := net.ParseIP(val)
+
+ return ip != nil && ip.To4() == nil
+}
+
+func isHostnameRFC952(fl FieldLevel) bool {
+ return hostnameRegexRFC952.MatchString(fl.Field().String())
+}
+
+func isHostnameRFC1123(fl FieldLevel) bool {
+ return hostnameRegexRFC1123.MatchString(fl.Field().String())
+}
+
+func isFQDN(fl FieldLevel) bool {
+ val := fl.Field().String()
+
+ if val == "" {
+ return false
+ }
+
+ if val[len(val)-1] == '.' {
+ val = val[0 : len(val)-1]
+ }
+
+ return strings.ContainsAny(val, ".") &&
+ hostnameRegexRFC952.MatchString(val)
+}
+
+// IsDir is the validation function for validating if the current field's value is a valid directory.
+func isDir(fl FieldLevel) bool {
+ field := fl.Field()
+
+ if field.Kind() == reflect.String {
+ fileInfo, err := os.Stat(field.String())
+ if err != nil {
+ return false
+ }
+
+ return fileInfo.IsDir()
+ }
+
+ panic(fmt.Sprintf("Bad field type %T", field.Interface()))
+}
diff --git a/vendor/github.com/go-playground/validator/cache.go b/vendor/github.com/go-playground/validator/cache.go
new file mode 100644
index 0000000..0d18d6e
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/cache.go
@@ -0,0 +1,322 @@
+package validator
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+ "sync"
+ "sync/atomic"
+)
+
+type tagType uint8
+
+const (
+ typeDefault tagType = iota
+ typeOmitEmpty
+ typeIsDefault
+ typeNoStructLevel
+ typeStructOnly
+ typeDive
+ typeOr
+ typeKeys
+ typeEndKeys
+)
+
+const (
+ invalidValidation = "Invalid validation tag on field '%s'"
+ undefinedValidation = "Undefined validation function '%s' on field '%s'"
+ keysTagNotDefined = "'" + endKeysTag + "' tag encountered without a corresponding '" + keysTag + "' tag"
+)
+
+type structCache struct {
+ lock sync.Mutex
+ m atomic.Value // map[reflect.Type]*cStruct
+}
+
+func (sc *structCache) Get(key reflect.Type) (c *cStruct, found bool) {
+ c, found = sc.m.Load().(map[reflect.Type]*cStruct)[key]
+ return
+}
+
+func (sc *structCache) Set(key reflect.Type, value *cStruct) {
+ m := sc.m.Load().(map[reflect.Type]*cStruct)
+ nm := make(map[reflect.Type]*cStruct, len(m)+1)
+ for k, v := range m {
+ nm[k] = v
+ }
+ nm[key] = value
+ sc.m.Store(nm)
+}
+
+type tagCache struct {
+ lock sync.Mutex
+ m atomic.Value // map[string]*cTag
+}
+
+func (tc *tagCache) Get(key string) (c *cTag, found bool) {
+ c, found = tc.m.Load().(map[string]*cTag)[key]
+ return
+}
+
+func (tc *tagCache) Set(key string, value *cTag) {
+ m := tc.m.Load().(map[string]*cTag)
+ nm := make(map[string]*cTag, len(m)+1)
+ for k, v := range m {
+ nm[k] = v
+ }
+ nm[key] = value
+ tc.m.Store(nm)
+}
+
+type cStruct struct {
+ name string
+ fields []*cField
+ fn StructLevelFuncCtx
+}
+
+type cField struct {
+ idx int
+ name string
+ altName string
+ namesEqual bool
+ cTags *cTag
+}
+
+type cTag struct {
+ tag string
+ aliasTag string
+ actualAliasTag string
+ param string
+ keys *cTag // only populated when using tag's 'keys' and 'endkeys' for map key validation
+ next *cTag
+ fn FuncCtx
+ typeof tagType
+ hasTag bool
+ hasAlias bool
+ hasParam bool // true if parameter used eg. eq= where the equal sign has been set
+ isBlockEnd bool // indicates the current tag represents the last validation in the block
+ runValidationWhenNil bool
+}
+
+func (v *Validate) extractStructCache(current reflect.Value, sName string) *cStruct {
+ v.structCache.lock.Lock()
+ defer v.structCache.lock.Unlock() // leave as defer! because if inner panics, it will never get unlocked otherwise!
+
+ typ := current.Type()
+
+ // could have been multiple trying to access, but once first is done this ensures struct
+ // isn't parsed again.
+ cs, ok := v.structCache.Get(typ)
+ if ok {
+ return cs
+ }
+
+ cs = &cStruct{name: sName, fields: make([]*cField, 0), fn: v.structLevelFuncs[typ]}
+
+ numFields := current.NumField()
+
+ var ctag *cTag
+ var fld reflect.StructField
+ var tag string
+ var customName string
+
+ for i := 0; i < numFields; i++ {
+
+ fld = typ.Field(i)
+
+ if !fld.Anonymous && len(fld.PkgPath) > 0 {
+ continue
+ }
+
+ tag = fld.Tag.Get(v.tagName)
+
+ if tag == skipValidationTag {
+ continue
+ }
+
+ customName = fld.Name
+
+ if v.hasTagNameFunc {
+ name := v.tagNameFunc(fld)
+ if len(name) > 0 {
+ customName = name
+ }
+ }
+
+ // NOTE: cannot use shared tag cache, because tags may be equal, but things like alias may be different
+ // and so only struct level caching can be used instead of combined with Field tag caching
+
+ if len(tag) > 0 {
+ ctag, _ = v.parseFieldTagsRecursive(tag, fld.Name, "", false)
+ } else {
+ // even if field doesn't have validations need cTag for traversing to potential inner/nested
+ // elements of the field.
+ ctag = new(cTag)
+ }
+
+ cs.fields = append(cs.fields, &cField{
+ idx: i,
+ name: fld.Name,
+ altName: customName,
+ cTags: ctag,
+ namesEqual: fld.Name == customName,
+ })
+ }
+ v.structCache.Set(typ, cs)
+ return cs
+}
+
+func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias string, hasAlias bool) (firstCtag *cTag, current *cTag) {
+ var t string
+ noAlias := len(alias) == 0
+ tags := strings.Split(tag, tagSeparator)
+
+ for i := 0; i < len(tags); i++ {
+ t = tags[i]
+ if noAlias {
+ alias = t
+ }
+
+ // check map for alias and process new tags, otherwise process as usual
+ if tagsVal, found := v.aliases[t]; found {
+ if i == 0 {
+ firstCtag, current = v.parseFieldTagsRecursive(tagsVal, fieldName, t, true)
+ } else {
+ next, curr := v.parseFieldTagsRecursive(tagsVal, fieldName, t, true)
+ current.next, current = next, curr
+
+ }
+ continue
+ }
+
+ var prevTag tagType
+
+ if i == 0 {
+ current = &cTag{aliasTag: alias, hasAlias: hasAlias, hasTag: true, typeof: typeDefault}
+ firstCtag = current
+ } else {
+ prevTag = current.typeof
+ current.next = &cTag{aliasTag: alias, hasAlias: hasAlias, hasTag: true}
+ current = current.next
+ }
+
+ switch t {
+ case diveTag:
+ current.typeof = typeDive
+ continue
+
+ case keysTag:
+ current.typeof = typeKeys
+
+ if i == 0 || prevTag != typeDive {
+ panic(fmt.Sprintf("'%s' tag must be immediately preceded by the '%s' tag", keysTag, diveTag))
+ }
+
+ current.typeof = typeKeys
+
+ // need to pass along only keys tag
+ // need to increment i to skip over the keys tags
+ b := make([]byte, 0, 64)
+
+ i++
+
+ for ; i < len(tags); i++ {
+
+ b = append(b, tags[i]...)
+ b = append(b, ',')
+
+ if tags[i] == endKeysTag {
+ break
+ }
+ }
+
+ current.keys, _ = v.parseFieldTagsRecursive(string(b[:len(b)-1]), fieldName, "", false)
+ continue
+
+ case endKeysTag:
+ current.typeof = typeEndKeys
+
+ // if there are more in tags then there was no keysTag defined
+ // and an error should be thrown
+ if i != len(tags)-1 {
+ panic(keysTagNotDefined)
+ }
+ return
+
+ case omitempty:
+ current.typeof = typeOmitEmpty
+ continue
+
+ case structOnlyTag:
+ current.typeof = typeStructOnly
+ continue
+
+ case noStructLevelTag:
+ current.typeof = typeNoStructLevel
+ continue
+
+ default:
+ if t == isdefault {
+ current.typeof = typeIsDefault
+ }
+ // if a pipe character is needed within the param you must use the utf8Pipe representation "0x7C"
+ orVals := strings.Split(t, orSeparator)
+
+ for j := 0; j < len(orVals); j++ {
+ vals := strings.SplitN(orVals[j], tagKeySeparator, 2)
+ if noAlias {
+ alias = vals[0]
+ current.aliasTag = alias
+ } else {
+ current.actualAliasTag = t
+ }
+
+ if j > 0 {
+ current.next = &cTag{aliasTag: alias, actualAliasTag: current.actualAliasTag, hasAlias: hasAlias, hasTag: true}
+ current = current.next
+ }
+ current.hasParam = len(vals) > 1
+
+ current.tag = vals[0]
+ if len(current.tag) == 0 {
+ panic(strings.TrimSpace(fmt.Sprintf(invalidValidation, fieldName)))
+ }
+
+ if wrapper, ok := v.validations[current.tag]; ok {
+ current.fn = wrapper.fn
+ current.runValidationWhenNil = wrapper.runValidatinOnNil
+ } else {
+ panic(strings.TrimSpace(fmt.Sprintf(undefinedValidation, current.tag, fieldName)))
+ }
+
+ if len(orVals) > 1 {
+ current.typeof = typeOr
+ }
+
+ if len(vals) > 1 {
+ current.param = strings.Replace(strings.Replace(vals[1], utf8HexComma, ",", -1), utf8Pipe, "|", -1)
+ }
+ }
+ current.isBlockEnd = true
+ }
+ }
+ return
+}
+
+func (v *Validate) fetchCacheTag(tag string) *cTag {
+ // find cached tag
+ ctag, found := v.tagCache.Get(tag)
+ if !found {
+ v.tagCache.lock.Lock()
+ defer v.tagCache.lock.Unlock()
+
+ // could have been multiple trying to access, but once first is done this ensures tag
+ // isn't parsed again.
+ ctag, found = v.tagCache.Get(tag)
+ if !found {
+ ctag, _ = v.parseFieldTagsRecursive(tag, "", "", false)
+ v.tagCache.Set(tag, ctag)
+ }
+ }
+ return ctag
+}
diff --git a/vendor/github.com/go-playground/validator/doc.go b/vendor/github.com/go-playground/validator/doc.go
new file mode 100644
index 0000000..7ad9dea
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/doc.go
@@ -0,0 +1,1111 @@
+/*
+Package validator implements value validations for structs and individual fields
+based on tags.
+
+It can also handle Cross-Field and Cross-Struct validation for nested structs
+and has the ability to dive into arrays and maps of any type.
+
+see more examples https://github.com/go-playground/validator/tree/v9/_examples
+
+Validation Functions Return Type error
+
+Doing things this way is actually the way the standard library does, see the
+file.Open method here:
+
+ https://golang.org/pkg/os/#Open.
+
+The authors return type "error" to avoid the issue discussed in the following,
+where err is always != nil:
+
+ http://stackoverflow.com/a/29138676/3158232
+ https://github.com/go-playground/validator/issues/134
+
+Validator only InvalidValidationError for bad validation input, nil or
+ValidationErrors as type error; so, in your code all you need to do is check
+if the error returned is not nil, and if it's not check if error is
+InvalidValidationError ( if necessary, most of the time it isn't ) type cast
+it to type ValidationErrors like so err.(validator.ValidationErrors).
+
+Custom Validation Functions
+
+Custom Validation functions can be added. Example:
+
+ // Structure
+ func customFunc(fl validator.FieldLevel) bool {
+
+ if fl.Field().String() == "invalid" {
+ return false
+ }
+
+ return true
+ }
+
+ validate.RegisterValidation("custom tag name", customFunc)
+ // NOTES: using the same tag name as an existing function
+ // will overwrite the existing one
+
+Cross-Field Validation
+
+Cross-Field Validation can be done via the following tags:
+ - eqfield
+ - nefield
+ - gtfield
+ - gtefield
+ - ltfield
+ - ltefield
+ - eqcsfield
+ - necsfield
+ - gtcsfield
+ - gtecsfield
+ - ltcsfield
+ - ltecsfield
+
+If, however, some custom cross-field validation is required, it can be done
+using a custom validation.
+
+Why not just have cross-fields validation tags (i.e. only eqcsfield and not
+eqfield)?
+
+The reason is efficiency. If you want to check a field within the same struct
+"eqfield" only has to find the field on the same struct (1 level). But, if we
+used "eqcsfield" it could be multiple levels down. Example:
+
+ type Inner struct {
+ StartDate time.Time
+ }
+
+ type Outer struct {
+ InnerStructField *Inner
+ CreatedAt time.Time `validate:"ltecsfield=InnerStructField.StartDate"`
+ }
+
+ now := time.Now()
+
+ inner := &Inner{
+ StartDate: now,
+ }
+
+ outer := &Outer{
+ InnerStructField: inner,
+ CreatedAt: now,
+ }
+
+ errs := validate.Struct(outer)
+
+ // NOTE: when calling validate.Struct(val) topStruct will be the top level struct passed
+ // into the function
+ // when calling validate.VarWithValue(val, field, tag) val will be
+ // whatever you pass, struct, field...
+ // when calling validate.Field(field, tag) val will be nil
+
+Multiple Validators
+
+Multiple validators on a field will process in the order defined. Example:
+
+ type Test struct {
+ Field `validate:"max=10,min=1"`
+ }
+
+ // max will be checked then min
+
+Bad Validator definitions are not handled by the library. Example:
+
+ type Test struct {
+ Field `validate:"min=10,max=0"`
+ }
+
+ // this definition of min max will never succeed
+
+Using Validator Tags
+
+Baked In Cross-Field validation only compares fields on the same struct.
+If Cross-Field + Cross-Struct validation is needed you should implement your
+own custom validator.
+
+Comma (",") is the default separator of validation tags. If you wish to
+have a comma included within the parameter (i.e. excludesall=,) you will need to
+use the UTF-8 hex representation 0x2C, which is replaced in the code as a comma,
+so the above will become excludesall=0x2C.
+
+ type Test struct {
+ Field `validate:"excludesall=,"` // BAD! Do not include a comma.
+ Field `validate:"excludesall=0x2C"` // GOOD! Use the UTF-8 hex representation.
+ }
+
+Pipe ("|") is the 'or' validation tags deparator. If you wish to
+have a pipe included within the parameter i.e. excludesall=| you will need to
+use the UTF-8 hex representation 0x7C, which is replaced in the code as a pipe,
+so the above will become excludesall=0x7C
+
+ type Test struct {
+ Field `validate:"excludesall=|"` // BAD! Do not include a a pipe!
+ Field `validate:"excludesall=0x7C"` // GOOD! Use the UTF-8 hex representation.
+ }
+
+
+Baked In Validators and Tags
+
+Here is a list of the current built in validators:
+
+
+Skip Field
+
+Tells the validation to skip this struct field; this is particularly
+handy in ignoring embedded structs from being validated. (Usage: -)
+ Usage: -
+
+
+Or Operator
+
+This is the 'or' operator allowing multiple validators to be used and
+accepted. (Usage: rbg|rgba) <-- this would allow either rgb or rgba
+colors to be accepted. This can also be combined with 'and' for example
+( Usage: omitempty,rgb|rgba)
+
+ Usage: |
+
+StructOnly
+
+When a field that is a nested struct is encountered, and contains this flag
+any validation on the nested struct will be run, but none of the nested
+struct fields will be validated. This is useful if inside of your program
+you know the struct will be valid, but need to verify it has been assigned.
+NOTE: only "required" and "omitempty" can be used on a struct itself.
+
+ Usage: structonly
+
+NoStructLevel
+
+Same as structonly tag except that any struct level validations will not run.
+
+ Usage: nostructlevel
+
+Omit Empty
+
+Allows conditional validation, for example if a field is not set with
+a value (Determined by the "required" validator) then other validation
+such as min or max won't run, but if a value is set validation will run.
+
+ Usage: omitempty
+
+Dive
+
+This tells the validator to dive into a slice, array or map and validate that
+level of the slice, array or map with the validation tags that follow.
+Multidimensional nesting is also supported, each level you wish to dive will
+require another dive tag. dive has some sub-tags, 'keys' & 'endkeys', please see
+the Keys & EndKeys section just below.
+
+ Usage: dive
+
+Example #1
+
+ [][]string with validation tag "gt=0,dive,len=1,dive,required"
+ // gt=0 will be applied to []
+ // len=1 will be applied to []string
+ // required will be applied to string
+
+Example #2
+
+ [][]string with validation tag "gt=0,dive,dive,required"
+ // gt=0 will be applied to []
+ // []string will be spared validation
+ // required will be applied to string
+
+Keys & EndKeys
+
+These are to be used together directly after the dive tag and tells the validator
+that anything between 'keys' and 'endkeys' applies to the keys of a map and not the
+values; think of it like the 'dive' tag, but for map keys instead of values.
+Multidimensional nesting is also supported, each level you wish to validate will
+require another 'keys' and 'endkeys' tag. These tags are only valid for maps.
+
+ Usage: dive,keys,othertagvalidation(s),endkeys,valuevalidationtags
+
+Example #1
+
+ map[string]string with validation tag "gt=0,dive,keys,eg=1|eq=2,endkeys,required"
+ // gt=0 will be applied to the map itself
+ // eg=1|eq=2 will be applied to the map keys
+ // required will be applied to map values
+
+Example #2
+
+ map[[2]string]string with validation tag "gt=0,dive,keys,dive,eq=1|eq=2,endkeys,required"
+ // gt=0 will be applied to the map itself
+ // eg=1|eq=2 will be applied to each array element in the the map keys
+ // required will be applied to map values
+
+Required
+
+This validates that the value is not the data types default zero value.
+For numbers ensures value is not zero. For strings ensures value is
+not "". For slices, maps, pointers, interfaces, channels and functions
+ensures the value is not nil.
+
+ Usage: required
+
+Required With
+
+The field under validation must be present and not empty only if any
+of the other specified fields are present. For strings ensures value is
+not "". For slices, maps, pointers, interfaces, channels and functions
+ensures the value is not nil.
+
+ Usage: required_with
+
+Examples:
+
+ // require the field if the Field1 is present:
+ Usage: required_with=Field1
+
+ // require the field if the Field1 or Field2 is present:
+ Usage: required_with=Field1 Field2
+
+Required With All
+
+The field under validation must be present and not empty only if all
+of the other specified fields are present. For strings ensures value is
+not "". For slices, maps, pointers, interfaces, channels and functions
+ensures the value is not nil.
+
+ Usage: required_with_all
+
+Example:
+
+ // require the field if the Field1 and Field2 is present:
+ Usage: required_with_all=Field1 Field2
+
+Required Without
+
+The field under validation must be present and not empty only when any
+of the other specified fields are not present. For strings ensures value is
+not "". For slices, maps, pointers, interfaces, channels and functions
+ensures the value is not nil.
+
+ Usage: required_without
+
+Examples:
+
+ // require the field if the Field1 is not present:
+ Usage: required_without=Field1
+
+ // require the field if the Field1 or Field2 is not present:
+ Usage: required_without=Field1 Field2
+
+Required Without All
+
+The field under validation must be present and not empty only when all
+of the other specified fields are not present. For strings ensures value is
+not "". For slices, maps, pointers, interfaces, channels and functions
+ensures the value is not nil.
+
+ Usage: required_without_all
+
+Example:
+
+ // require the field if the Field1 and Field2 is not present:
+ Usage: required_without_all=Field1 Field2
+
+Is Default
+
+This validates that the value is the default value and is almost the
+opposite of required.
+
+ Usage: isdefault
+
+Length
+
+For numbers, length will ensure that the value is
+equal to the parameter given. For strings, it checks that
+the string length is exactly that number of characters. For slices,
+arrays, and maps, validates the number of items.
+
+ Usage: len=10
+
+Maximum
+
+For numbers, max will ensure that the value is
+less than or equal to the parameter given. For strings, it checks
+that the string length is at most that number of characters. For
+slices, arrays, and maps, validates the number of items.
+
+ Usage: max=10
+
+Minimum
+
+For numbers, min will ensure that the value is
+greater or equal to the parameter given. For strings, it checks that
+the string length is at least that number of characters. For slices,
+arrays, and maps, validates the number of items.
+
+ Usage: min=10
+
+Equals
+
+For strings & numbers, eq will ensure that the value is
+equal to the parameter given. For slices, arrays, and maps,
+validates the number of items.
+
+ Usage: eq=10
+
+Not Equal
+
+For strings & numbers, ne will ensure that the value is not
+equal to the parameter given. For slices, arrays, and maps,
+validates the number of items.
+
+ Usage: ne=10
+
+One Of
+
+For strings, ints, and uints, oneof will ensure that the value
+is one of the values in the parameter. The parameter should be
+a list of values separated by whitespace. Values may be
+strings or numbers.
+
+ Usage: oneof=red green
+ oneof=5 7 9
+
+Greater Than
+
+For numbers, this will ensure that the value is greater than the
+parameter given. For strings, it checks that the string length
+is greater than that number of characters. For slices, arrays
+and maps it validates the number of items.
+
+Example #1
+
+ Usage: gt=10
+
+Example #2 (time.Time)
+
+For time.Time ensures the time value is greater than time.Now.UTC().
+
+ Usage: gt
+
+Greater Than or Equal
+
+Same as 'min' above. Kept both to make terminology with 'len' easier.
+
+
+Example #1
+
+ Usage: gte=10
+
+Example #2 (time.Time)
+
+For time.Time ensures the time value is greater than or equal to time.Now.UTC().
+
+ Usage: gte
+
+Less Than
+
+For numbers, this will ensure that the value is less than the parameter given.
+For strings, it checks that the string length is less than that number of
+characters. For slices, arrays, and maps it validates the number of items.
+
+Example #1
+
+ Usage: lt=10
+
+Example #2 (time.Time)
+For time.Time ensures the time value is less than time.Now.UTC().
+
+ Usage: lt
+
+Less Than or Equal
+
+Same as 'max' above. Kept both to make terminology with 'len' easier.
+
+Example #1
+
+ Usage: lte=10
+
+Example #2 (time.Time)
+
+For time.Time ensures the time value is less than or equal to time.Now.UTC().
+
+ Usage: lte
+
+Field Equals Another Field
+
+This will validate the field value against another fields value either within
+a struct or passed in field.
+
+Example #1:
+
+ // Validation on Password field using:
+ Usage: eqfield=ConfirmPassword
+
+Example #2:
+
+ // Validating by field:
+ validate.VarWithValue(password, confirmpassword, "eqfield")
+
+Field Equals Another Field (relative)
+
+This does the same as eqfield except that it validates the field provided relative
+to the top level struct.
+
+ Usage: eqcsfield=InnerStructField.Field)
+
+Field Does Not Equal Another Field
+
+This will validate the field value against another fields value either within
+a struct or passed in field.
+
+Examples:
+
+ // Confirm two colors are not the same:
+ //
+ // Validation on Color field:
+ Usage: nefield=Color2
+
+ // Validating by field:
+ validate.VarWithValue(color1, color2, "nefield")
+
+Field Does Not Equal Another Field (relative)
+
+This does the same as nefield except that it validates the field provided
+relative to the top level struct.
+
+ Usage: necsfield=InnerStructField.Field
+
+Field Greater Than Another Field
+
+Only valid for Numbers and time.Time types, this will validate the field value
+against another fields value either within a struct or passed in field.
+usage examples are for validation of a Start and End date:
+
+Example #1:
+
+ // Validation on End field using:
+ validate.Struct Usage(gtfield=Start)
+
+Example #2:
+
+ // Validating by field:
+ validate.VarWithValue(start, end, "gtfield")
+
+
+Field Greater Than Another Relative Field
+
+This does the same as gtfield except that it validates the field provided
+relative to the top level struct.
+
+ Usage: gtcsfield=InnerStructField.Field
+
+Field Greater Than or Equal To Another Field
+
+Only valid for Numbers and time.Time types, this will validate the field value
+against another fields value either within a struct or passed in field.
+usage examples are for validation of a Start and End date:
+
+Example #1:
+
+ // Validation on End field using:
+ validate.Struct Usage(gtefield=Start)
+
+Example #2:
+
+ // Validating by field:
+ validate.VarWithValue(start, end, "gtefield")
+
+Field Greater Than or Equal To Another Relative Field
+
+This does the same as gtefield except that it validates the field provided relative
+to the top level struct.
+
+ Usage: gtecsfield=InnerStructField.Field
+
+Less Than Another Field
+
+Only valid for Numbers and time.Time types, this will validate the field value
+against another fields value either within a struct or passed in field.
+usage examples are for validation of a Start and End date:
+
+Example #1:
+
+ // Validation on End field using:
+ validate.Struct Usage(ltfield=Start)
+
+Example #2:
+
+ // Validating by field:
+ validate.VarWithValue(start, end, "ltfield")
+
+Less Than Another Relative Field
+
+This does the same as ltfield except that it validates the field provided relative
+to the top level struct.
+
+ Usage: ltcsfield=InnerStructField.Field
+
+Less Than or Equal To Another Field
+
+Only valid for Numbers and time.Time types, this will validate the field value
+against another fields value either within a struct or passed in field.
+usage examples are for validation of a Start and End date:
+
+Example #1:
+
+ // Validation on End field using:
+ validate.Struct Usage(ltefield=Start)
+
+Example #2:
+
+ // Validating by field:
+ validate.VarWithValue(start, end, "ltefield")
+
+Less Than or Equal To Another Relative Field
+
+This does the same as ltefield except that it validates the field provided relative
+to the top level struct.
+
+ Usage: ltecsfield=InnerStructField.Field
+
+Field Contains Another Field
+
+This does the same as contains except for struct fields. It should only be used
+with string types. See the behavior of reflect.Value.String() for behavior on
+other types.
+
+ Usage: containsfield=InnerStructField.Field
+
+Field Excludes Another Field
+
+This does the same as excludes except for struct fields. It should only be used
+with string types. See the behavior of reflect.Value.String() for behavior on
+other types.
+
+ Usage: excludesfield=InnerStructField.Field
+
+Unique
+
+For arrays & slices, unique will ensure that there are no duplicates.
+For maps, unique will ensure that there are no duplicate values.
+For slices of struct, unique will ensure that there are no duplicate values
+in a field of the struct specified via a parameter.
+
+ // For arrays, slices, and maps:
+ Usage: unique
+
+ // For slices of struct:
+ Usage: unique=field
+
+Alpha Only
+
+This validates that a string value contains ASCII alpha characters only
+
+ Usage: alpha
+
+Alphanumeric
+
+This validates that a string value contains ASCII alphanumeric characters only
+
+ Usage: alphanum
+
+Alpha Unicode
+
+This validates that a string value contains unicode alpha characters only
+
+ Usage: alphaunicode
+
+Alphanumeric Unicode
+
+This validates that a string value contains unicode alphanumeric characters only
+
+ Usage: alphanumunicode
+
+Numeric
+
+This validates that a string value contains a basic numeric value.
+basic excludes exponents etc...
+for integers or float it returns true.
+
+ Usage: numeric
+
+Hexadecimal String
+
+This validates that a string value contains a valid hexadecimal.
+
+ Usage: hexadecimal
+
+Hexcolor String
+
+This validates that a string value contains a valid hex color including
+hashtag (#)
+
+ Usage: hexcolor
+
+RGB String
+
+This validates that a string value contains a valid rgb color
+
+ Usage: rgb
+
+RGBA String
+
+This validates that a string value contains a valid rgba color
+
+ Usage: rgba
+
+HSL String
+
+This validates that a string value contains a valid hsl color
+
+ Usage: hsl
+
+HSLA String
+
+This validates that a string value contains a valid hsla color
+
+ Usage: hsla
+
+E-mail String
+
+This validates that a string value contains a valid email
+This may not conform to all possibilities of any rfc standard, but neither
+does any email provider accept all possibilities.
+
+ Usage: email
+
+File path
+
+This validates that a string value contains a valid file path and that
+the file exists on the machine.
+This is done using os.Stat, which is a platform independent function.
+
+ Usage: file
+
+URL String
+
+This validates that a string value contains a valid url
+This will accept any url the golang request uri accepts but must contain
+a schema for example http:// or rtmp://
+
+ Usage: url
+
+URI String
+
+This validates that a string value contains a valid uri
+This will accept any uri the golang request uri accepts
+
+ Usage: uri
+
+Urn RFC 2141 String
+
+This validataes that a string value contains a valid URN
+according to the RFC 2141 spec.
+
+ Usage: urn_rfc2141
+
+Base64 String
+
+This validates that a string value contains a valid base64 value.
+Although an empty string is valid base64 this will report an empty string
+as an error, if you wish to accept an empty string as valid you can use
+this with the omitempty tag.
+
+ Usage: base64
+
+Base64URL String
+
+This validates that a string value contains a valid base64 URL safe value
+according the the RFC4648 spec.
+Although an empty string is a valid base64 URL safe value, this will report
+an empty string as an error, if you wish to accept an empty string as valid
+you can use this with the omitempty tag.
+
+ Usage: base64url
+
+Bitcoin Address
+
+This validates that a string value contains a valid bitcoin address.
+The format of the string is checked to ensure it matches one of the three formats
+P2PKH, P2SH and performs checksum validation.
+
+ Usage: btc_addr
+
+Bitcoin Bech32 Address (segwit)
+
+This validates that a string value contains a valid bitcoin Bech32 address as defined
+by bip-0173 (https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki)
+Special thanks to Pieter Wuille for providng reference implementations.
+
+ Usage: btc_addr_bech32
+
+Ethereum Address
+
+This validates that a string value contains a valid ethereum address.
+The format of the string is checked to ensure it matches the standard Ethereum address format
+Full validation is blocked by https://github.com/golang/crypto/pull/28
+
+ Usage: eth_addr
+
+Contains
+
+This validates that a string value contains the substring value.
+
+ Usage: contains=@
+
+Contains Any
+
+This validates that a string value contains any Unicode code points
+in the substring value.
+
+ Usage: containsany=!@#?
+
+Contains Rune
+
+This validates that a string value contains the supplied rune value.
+
+ Usage: containsrune=@
+
+Excludes
+
+This validates that a string value does not contain the substring value.
+
+ Usage: excludes=@
+
+Excludes All
+
+This validates that a string value does not contain any Unicode code
+points in the substring value.
+
+ Usage: excludesall=!@#?
+
+Excludes Rune
+
+This validates that a string value does not contain the supplied rune value.
+
+ Usage: excludesrune=@
+
+Starts With
+
+This validates that a string value starts with the supplied string value
+
+ Usage: startswith=hello
+
+Ends With
+
+This validates that a string value ends with the supplied string value
+
+ Usage: endswith=goodbye
+
+International Standard Book Number
+
+This validates that a string value contains a valid isbn10 or isbn13 value.
+
+ Usage: isbn
+
+International Standard Book Number 10
+
+This validates that a string value contains a valid isbn10 value.
+
+ Usage: isbn10
+
+International Standard Book Number 13
+
+This validates that a string value contains a valid isbn13 value.
+
+ Usage: isbn13
+
+Universally Unique Identifier UUID
+
+This validates that a string value contains a valid UUID. Uppercase UUID values will not pass - use `uuid_rfc4122` instead.
+
+ Usage: uuid
+
+Universally Unique Identifier UUID v3
+
+This validates that a string value contains a valid version 3 UUID. Uppercase UUID values will not pass - use `uuid3_rfc4122` instead.
+
+ Usage: uuid3
+
+Universally Unique Identifier UUID v4
+
+This validates that a string value contains a valid version 4 UUID. Uppercase UUID values will not pass - use `uuid4_rfc4122` instead.
+
+ Usage: uuid4
+
+Universally Unique Identifier UUID v5
+
+This validates that a string value contains a valid version 5 UUID. Uppercase UUID values will not pass - use `uuid5_rfc4122` instead.
+
+ Usage: uuid5
+
+ASCII
+
+This validates that a string value contains only ASCII characters.
+NOTE: if the string is blank, this validates as true.
+
+ Usage: ascii
+
+Printable ASCII
+
+This validates that a string value contains only printable ASCII characters.
+NOTE: if the string is blank, this validates as true.
+
+ Usage: printascii
+
+Multi-Byte Characters
+
+This validates that a string value contains one or more multibyte characters.
+NOTE: if the string is blank, this validates as true.
+
+ Usage: multibyte
+
+Data URL
+
+This validates that a string value contains a valid DataURI.
+NOTE: this will also validate that the data portion is valid base64
+
+ Usage: datauri
+
+Latitude
+
+This validates that a string value contains a valid latitude.
+
+ Usage: latitude
+
+Longitude
+
+This validates that a string value contains a valid longitude.
+
+ Usage: longitude
+
+Social Security Number SSN
+
+This validates that a string value contains a valid U.S. Social Security Number.
+
+ Usage: ssn
+
+Internet Protocol Address IP
+
+This validates that a string value contains a valid IP Address.
+
+ Usage: ip
+
+Internet Protocol Address IPv4
+
+This validates that a string value contains a valid v4 IP Address.
+
+ Usage: ipv4
+
+Internet Protocol Address IPv6
+
+This validates that a string value contains a valid v6 IP Address.
+
+ Usage: ipv6
+
+Classless Inter-Domain Routing CIDR
+
+This validates that a string value contains a valid CIDR Address.
+
+ Usage: cidr
+
+Classless Inter-Domain Routing CIDRv4
+
+This validates that a string value contains a valid v4 CIDR Address.
+
+ Usage: cidrv4
+
+Classless Inter-Domain Routing CIDRv6
+
+This validates that a string value contains a valid v6 CIDR Address.
+
+ Usage: cidrv6
+
+Transmission Control Protocol Address TCP
+
+This validates that a string value contains a valid resolvable TCP Address.
+
+ Usage: tcp_addr
+
+Transmission Control Protocol Address TCPv4
+
+This validates that a string value contains a valid resolvable v4 TCP Address.
+
+ Usage: tcp4_addr
+
+Transmission Control Protocol Address TCPv6
+
+This validates that a string value contains a valid resolvable v6 TCP Address.
+
+ Usage: tcp6_addr
+
+User Datagram Protocol Address UDP
+
+This validates that a string value contains a valid resolvable UDP Address.
+
+ Usage: udp_addr
+
+User Datagram Protocol Address UDPv4
+
+This validates that a string value contains a valid resolvable v4 UDP Address.
+
+ Usage: udp4_addr
+
+User Datagram Protocol Address UDPv6
+
+This validates that a string value contains a valid resolvable v6 UDP Address.
+
+ Usage: udp6_addr
+
+Internet Protocol Address IP
+
+This validates that a string value contains a valid resolvable IP Address.
+
+ Usage: ip_addr
+
+Internet Protocol Address IPv4
+
+This validates that a string value contains a valid resolvable v4 IP Address.
+
+ Usage: ip4_addr
+
+Internet Protocol Address IPv6
+
+This validates that a string value contains a valid resolvable v6 IP Address.
+
+ Usage: ip6_addr
+
+Unix domain socket end point Address
+
+This validates that a string value contains a valid Unix Address.
+
+ Usage: unix_addr
+
+Media Access Control Address MAC
+
+This validates that a string value contains a valid MAC Address.
+
+ Usage: mac
+
+Note: See Go's ParseMAC for accepted formats and types:
+
+ http://golang.org/src/net/mac.go?s=866:918#L29
+
+Hostname RFC 952
+
+This validates that a string value is a valid Hostname according to RFC 952 https://tools.ietf.org/html/rfc952
+
+ Usage: hostname
+
+Hostname RFC 1123
+
+This validates that a string value is a valid Hostname according to RFC 1123 https://tools.ietf.org/html/rfc1123
+
+ Usage: hostname_rfc1123 or if you want to continue to use 'hostname' in your tags, create an alias.
+
+Full Qualified Domain Name (FQDN)
+
+This validates that a string value contains a valid FQDN.
+
+ Usage: fqdn
+
+HTML Tags
+
+This validates that a string value appears to be an HTML element tag
+including those described at https://developer.mozilla.org/en-US/docs/Web/HTML/Element
+
+ Usage: html
+
+HTML Encoded
+
+This validates that a string value is a proper character reference in decimal
+or hexadecimal format
+
+ Usage: html_encoded
+
+URL Encoded
+
+This validates that a string value is percent-encoded (URL encoded) according
+to https://tools.ietf.org/html/rfc3986#section-2.1
+
+ Usage: url_encoded
+
+Directory
+
+This validates that a string value contains a valid directory and that
+it exists on the machine.
+This is done using os.Stat, which is a platform independent function.
+
+ Usage: dir
+
+Alias Validators and Tags
+
+NOTE: When returning an error, the tag returned in "FieldError" will be
+the alias tag unless the dive tag is part of the alias. Everything after the
+dive tag is not reported as the alias tag. Also, the "ActualTag" in the before
+case will be the actual tag within the alias that failed.
+
+Here is a list of the current built in alias tags:
+
+ "iscolor"
+ alias is "hexcolor|rgb|rgba|hsl|hsla" (Usage: iscolor)
+
+Validator notes:
+
+ regex
+ a regex validator won't be added because commas and = signs can be part
+ of a regex which conflict with the validation definitions. Although
+ workarounds can be made, they take away from using pure regex's.
+ Furthermore it's quick and dirty but the regex's become harder to
+ maintain and are not reusable, so it's as much a programming philosophy
+ as anything.
+
+ In place of this new validator functions should be created; a regex can
+ be used within the validator function and even be precompiled for better
+ efficiency within regexes.go.
+
+ And the best reason, you can submit a pull request and we can keep on
+ adding to the validation library of this package!
+
+Non standard validators
+
+A collection of validation rules that are frequently needed but are more
+complex than the ones found in the baked in validators.
+A non standard validator must be registered manually like you would
+with your own custom validation functions.
+
+Example of registration and use:
+
+ type Test struct {
+ TestField string `validate:"yourtag"`
+ }
+
+ t := &Test{
+ TestField: "Test"
+ }
+
+ validate := validator.New()
+ validate.RegisterValidation("yourtag", validators.NotBlank)
+
+Here is a list of the current non standard validators:
+
+ NotBlank
+ This validates that the value is not blank or with length zero.
+ For strings ensures they do not contain only spaces. For channels, maps, slices and arrays
+ ensures they don't have zero length. For others, a non empty value is required.
+
+ Usage: notblank
+
+Panics
+
+This package panics when bad input is provided, this is by design, bad code like
+that should not make it to production.
+
+ type Test struct {
+ TestField string `validate:"nonexistantfunction=1"`
+ }
+
+ t := &Test{
+ TestField: "Test"
+ }
+
+ validate.Struct(t) // this will panic
+*/
+package validator
diff --git a/vendor/github.com/go-playground/validator/errors.go b/vendor/github.com/go-playground/validator/errors.go
new file mode 100644
index 0000000..46c24c9
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/errors.go
@@ -0,0 +1,272 @@
+package validator
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "strings"
+
+ ut "github.com/go-playground/universal-translator"
+)
+
+const (
+ fieldErrMsg = "Key: '%s' Error:Field validation for '%s' failed on the '%s' tag"
+)
+
+// ValidationErrorsTranslations is the translation return type
+type ValidationErrorsTranslations map[string]string
+
+// InvalidValidationError describes an invalid argument passed to
+// `Struct`, `StructExcept`, StructPartial` or `Field`
+type InvalidValidationError struct {
+ Type reflect.Type
+}
+
+// Error returns InvalidValidationError message
+func (e *InvalidValidationError) Error() string {
+
+ if e.Type == nil {
+ return "validator: (nil)"
+ }
+
+ return "validator: (nil " + e.Type.String() + ")"
+}
+
+// ValidationErrors is an array of FieldError's
+// for use in custom error messages post validation.
+type ValidationErrors []FieldError
+
+// Error is intended for use in development + debugging and not intended to be a production error message.
+// It allows ValidationErrors to subscribe to the Error interface.
+// All information to create an error message specific to your application is contained within
+// the FieldError found within the ValidationErrors array
+func (ve ValidationErrors) Error() string {
+
+ buff := bytes.NewBufferString("")
+
+ var fe *fieldError
+
+ for i := 0; i < len(ve); i++ {
+
+ fe = ve[i].(*fieldError)
+ buff.WriteString(fe.Error())
+ buff.WriteString("\n")
+ }
+
+ return strings.TrimSpace(buff.String())
+}
+
+// Translate translates all of the ValidationErrors
+func (ve ValidationErrors) Translate(ut ut.Translator) ValidationErrorsTranslations {
+
+ trans := make(ValidationErrorsTranslations)
+
+ var fe *fieldError
+
+ for i := 0; i < len(ve); i++ {
+ fe = ve[i].(*fieldError)
+
+ // // in case an Anonymous struct was used, ensure that the key
+ // // would be 'Username' instead of ".Username"
+ // if len(fe.ns) > 0 && fe.ns[:1] == "." {
+ // trans[fe.ns[1:]] = fe.Translate(ut)
+ // continue
+ // }
+
+ trans[fe.ns] = fe.Translate(ut)
+ }
+
+ return trans
+}
+
+// FieldError contains all functions to get error details
+type FieldError interface {
+
+ // returns the validation tag that failed. if the
+ // validation was an alias, this will return the
+ // alias name and not the underlying tag that failed.
+ //
+ // eg. alias "iscolor": "hexcolor|rgb|rgba|hsl|hsla"
+ // will return "iscolor"
+ Tag() string
+
+ // returns the validation tag that failed, even if an
+ // alias the actual tag within the alias will be returned.
+ // If an 'or' validation fails the entire or will be returned.
+ //
+ // eg. alias "iscolor": "hexcolor|rgb|rgba|hsl|hsla"
+ // will return "hexcolor|rgb|rgba|hsl|hsla"
+ ActualTag() string
+
+ // returns the namespace for the field error, with the tag
+ // name taking precedence over the fields actual name.
+ //
+ // eg. JSON name "User.fname"
+ //
+ // See StructNamespace() for a version that returns actual names.
+ //
+ // NOTE: this field can be blank when validating a single primitive field
+ // using validate.Field(...) as there is no way to extract it's name
+ Namespace() string
+
+ // returns the namespace for the field error, with the fields
+ // actual name.
+ //
+ // eq. "User.FirstName" see Namespace for comparison
+ //
+ // NOTE: this field can be blank when validating a single primitive field
+ // using validate.Field(...) as there is no way to extract it's name
+ StructNamespace() string
+
+ // returns the fields name with the tag name taking precedence over the
+ // fields actual name.
+ //
+ // eq. JSON name "fname"
+ // see StructField for comparison
+ Field() string
+
+ // returns the fields actual name from the struct, when able to determine.
+ //
+ // eq. "FirstName"
+ // see Field for comparison
+ StructField() string
+
+ // returns the actual fields value in case needed for creating the error
+ // message
+ Value() interface{}
+
+ // returns the param value, in string form for comparison; this will also
+ // help with generating an error message
+ Param() string
+
+ // Kind returns the Field's reflect Kind
+ //
+ // eg. time.Time's kind is a struct
+ Kind() reflect.Kind
+
+ // Type returns the Field's reflect Type
+ //
+ // // eg. time.Time's type is time.Time
+ Type() reflect.Type
+
+ // returns the FieldError's translated error
+ // from the provided 'ut.Translator' and registered 'TranslationFunc'
+ //
+ // NOTE: if no registered translator can be found it returns the same as
+ // calling fe.Error()
+ Translate(ut ut.Translator) string
+}
+
+// compile time interface checks
+var _ FieldError = new(fieldError)
+var _ error = new(fieldError)
+
+// fieldError contains a single field's validation error along
+// with other properties that may be needed for error message creation
+// it complies with the FieldError interface
+type fieldError struct {
+ v *Validate
+ tag string
+ actualTag string
+ ns string
+ structNs string
+ fieldLen uint8
+ structfieldLen uint8
+ value interface{}
+ param string
+ kind reflect.Kind
+ typ reflect.Type
+}
+
+// Tag returns the validation tag that failed.
+func (fe *fieldError) Tag() string {
+ return fe.tag
+}
+
+// ActualTag returns the validation tag that failed, even if an
+// alias the actual tag within the alias will be returned.
+func (fe *fieldError) ActualTag() string {
+ return fe.actualTag
+}
+
+// Namespace returns the namespace for the field error, with the tag
+// name taking precedence over the fields actual name.
+func (fe *fieldError) Namespace() string {
+ return fe.ns
+}
+
+// StructNamespace returns the namespace for the field error, with the fields
+// actual name.
+func (fe *fieldError) StructNamespace() string {
+ return fe.structNs
+}
+
+// Field returns the fields name with the tag name taking precedence over the
+// fields actual name.
+func (fe *fieldError) Field() string {
+
+ return fe.ns[len(fe.ns)-int(fe.fieldLen):]
+ // // return fe.field
+ // fld := fe.ns[len(fe.ns)-int(fe.fieldLen):]
+
+ // log.Println("FLD:", fld)
+
+ // if len(fld) > 0 && fld[:1] == "." {
+ // return fld[1:]
+ // }
+
+ // return fld
+}
+
+// returns the fields actual name from the struct, when able to determine.
+func (fe *fieldError) StructField() string {
+ // return fe.structField
+ return fe.structNs[len(fe.structNs)-int(fe.structfieldLen):]
+}
+
+// Value returns the actual fields value in case needed for creating the error
+// message
+func (fe *fieldError) Value() interface{} {
+ return fe.value
+}
+
+// Param returns the param value, in string form for comparison; this will
+// also help with generating an error message
+func (fe *fieldError) Param() string {
+ return fe.param
+}
+
+// Kind returns the Field's reflect Kind
+func (fe *fieldError) Kind() reflect.Kind {
+ return fe.kind
+}
+
+// Type returns the Field's reflect Type
+func (fe *fieldError) Type() reflect.Type {
+ return fe.typ
+}
+
+// Error returns the fieldError's error message
+func (fe *fieldError) Error() string {
+ return fmt.Sprintf(fieldErrMsg, fe.ns, fe.Field(), fe.tag)
+}
+
+// Translate returns the FieldError's translated error
+// from the provided 'ut.Translator' and registered 'TranslationFunc'
+//
+// NOTE: is not registered translation can be found it returns the same
+// as calling fe.Error()
+func (fe *fieldError) Translate(ut ut.Translator) string {
+
+ m, ok := fe.v.transTagFunc[ut]
+ if !ok {
+ return fe.Error()
+ }
+
+ fn, ok := m[fe.tag]
+ if !ok {
+ return fe.Error()
+ }
+
+ return fn(ut, fe)
+}
diff --git a/vendor/github.com/go-playground/validator/field_level.go b/vendor/github.com/go-playground/validator/field_level.go
new file mode 100644
index 0000000..f0e2a9a
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/field_level.go
@@ -0,0 +1,119 @@
+package validator
+
+import "reflect"
+
+// FieldLevel contains all the information and helper functions
+// to validate a field
+type FieldLevel interface {
+ // returns the top level struct, if any
+ Top() reflect.Value
+
+ // returns the current fields parent struct, if any or
+ // the comparison value if called 'VarWithValue'
+ Parent() reflect.Value
+
+ // returns current field for validation
+ Field() reflect.Value
+
+ // returns the field's name with the tag
+ // name taking precedence over the fields actual name.
+ FieldName() string
+
+ // returns the struct field's name
+ StructFieldName() string
+
+ // returns param for validation against current field
+ Param() string
+
+ // GetTag returns the current validations tag name
+ GetTag() string
+
+ // ExtractType gets the actual underlying type of field value.
+ // It will dive into pointers, customTypes and return you the
+ // underlying value and it's kind.
+ ExtractType(field reflect.Value) (value reflect.Value, kind reflect.Kind, nullable bool)
+
+ // traverses the parent struct to retrieve a specific field denoted by the provided namespace
+ // in the param and returns the field, field kind and whether is was successful in retrieving
+ // the field at all.
+ //
+ // NOTE: when not successful ok will be false, this can happen when a nested struct is nil and so the field
+ // could not be retrieved because it didn't exist.
+ //
+ // Deprecated: Use GetStructFieldOK2() instead which also return if the value is nullable.
+ GetStructFieldOK() (reflect.Value, reflect.Kind, bool)
+
+ // GetStructFieldOKAdvanced is the same as GetStructFieldOK except that it accepts the parent struct to start looking for
+ // the field and namespace allowing more extensibility for validators.
+ //
+ // Deprecated: Use GetStructFieldOKAdvanced2() instead which also return if the value is nullable.
+ GetStructFieldOKAdvanced(val reflect.Value, namespace string) (reflect.Value, reflect.Kind, bool)
+
+ // traverses the parent struct to retrieve a specific field denoted by the provided namespace
+ // in the param and returns the field, field kind, if it's a nullable type and whether is was successful in retrieving
+ // the field at all.
+ //
+ // NOTE: when not successful ok will be false, this can happen when a nested struct is nil and so the field
+ // could not be retrieved because it didn't exist.
+ GetStructFieldOK2() (reflect.Value, reflect.Kind, bool, bool)
+
+ // GetStructFieldOKAdvanced is the same as GetStructFieldOK except that it accepts the parent struct to start looking for
+ // the field and namespace allowing more extensibility for validators.
+ GetStructFieldOKAdvanced2(val reflect.Value, namespace string) (reflect.Value, reflect.Kind, bool, bool)
+}
+
+var _ FieldLevel = new(validate)
+
+// Field returns current field for validation
+func (v *validate) Field() reflect.Value {
+ return v.flField
+}
+
+// FieldName returns the field's name with the tag
+// name taking precedence over the fields actual name.
+func (v *validate) FieldName() string {
+ return v.cf.altName
+}
+
+// GetTag returns the current validations tag name
+func (v *validate) GetTag() string {
+ return v.ct.tag
+}
+
+// StructFieldName returns the struct field's name
+func (v *validate) StructFieldName() string {
+ return v.cf.name
+}
+
+// Param returns param for validation against current field
+func (v *validate) Param() string {
+ return v.ct.param
+}
+
+// GetStructFieldOK returns Param returns param for validation against current field
+//
+// Deprecated: Use GetStructFieldOK2() instead which also return if the value is nullable.
+func (v *validate) GetStructFieldOK() (reflect.Value, reflect.Kind, bool) {
+ current, kind, _, found := v.getStructFieldOKInternal(v.slflParent, v.ct.param)
+ return current, kind, found
+}
+
+// GetStructFieldOKAdvanced is the same as GetStructFieldOK except that it accepts the parent struct to start looking for
+// the field and namespace allowing more extensibility for validators.
+//
+// Deprecated: Use GetStructFieldOKAdvanced2() instead which also return if the value is nullable.
+func (v *validate) GetStructFieldOKAdvanced(val reflect.Value, namespace string) (reflect.Value, reflect.Kind, bool) {
+ current, kind, _, found := v.GetStructFieldOKAdvanced2(val, namespace)
+ return current, kind, found
+}
+
+// GetStructFieldOK returns Param returns param for validation against current field
+func (v *validate) GetStructFieldOK2() (reflect.Value, reflect.Kind, bool, bool) {
+ return v.getStructFieldOKInternal(v.slflParent, v.ct.param)
+}
+
+// GetStructFieldOKAdvanced is the same as GetStructFieldOK except that it accepts the parent struct to start looking for
+// the field and namespace allowing more extensibility for validators.
+func (v *validate) GetStructFieldOKAdvanced2(val reflect.Value, namespace string) (reflect.Value, reflect.Kind, bool, bool) {
+ return v.getStructFieldOKInternal(val, namespace)
+}
diff --git a/vendor/github.com/go-playground/validator/logo.png b/vendor/github.com/go-playground/validator/logo.png
new file mode 100644
index 0000000..355000f
Binary files /dev/null and b/vendor/github.com/go-playground/validator/logo.png differ
diff --git a/vendor/github.com/go-playground/validator/regexes.go b/vendor/github.com/go-playground/validator/regexes.go
new file mode 100644
index 0000000..7ba7c73
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/regexes.go
@@ -0,0 +1,97 @@
+package validator
+
+import "regexp"
+
+const (
+ alphaRegexString = "^[a-zA-Z]+$"
+ alphaNumericRegexString = "^[a-zA-Z0-9]+$"
+ alphaUnicodeRegexString = "^[\\p{L}]+$"
+ alphaUnicodeNumericRegexString = "^[\\p{L}\\p{N}]+$"
+ numericRegexString = "^[-+]?[0-9]+(?:\\.[0-9]+)?$"
+ numberRegexString = "^[0-9]+$"
+ hexadecimalRegexString = "^[0-9a-fA-F]+$"
+ hexcolorRegexString = "^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{6})$"
+ rgbRegexString = "^rgb\\(\\s*(?:(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])|(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%)\\s*\\)$"
+ rgbaRegexString = "^rgba\\(\\s*(?:(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])|(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%)\\s*,\\s*(?:(?:0.[1-9]*)|[01])\\s*\\)$"
+ hslRegexString = "^hsl\\(\\s*(?:0|[1-9]\\d?|[12]\\d\\d|3[0-5]\\d|360)\\s*,\\s*(?:(?:0|[1-9]\\d?|100)%)\\s*,\\s*(?:(?:0|[1-9]\\d?|100)%)\\s*\\)$"
+ hslaRegexString = "^hsla\\(\\s*(?:0|[1-9]\\d?|[12]\\d\\d|3[0-5]\\d|360)\\s*,\\s*(?:(?:0|[1-9]\\d?|100)%)\\s*,\\s*(?:(?:0|[1-9]\\d?|100)%)\\s*,\\s*(?:(?:0.[1-9]*)|[01])\\s*\\)$"
+ emailRegexString = "^(?:(?:(?:(?:[a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(?:\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|(?:(?:\\x22)(?:(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(?:\\x20|\\x09)+)?(?:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(\\x20|\\x09)+)?(?:\\x22))))@(?:(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
+ e164RegexString = "^\\+[1-9]?[0-9]{7,14}$"
+ base64RegexString = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$"
+ base64URLRegexString = "^(?:[A-Za-z0-9-_]{4})*(?:[A-Za-z0-9-_]{2}==|[A-Za-z0-9-_]{3}=|[A-Za-z0-9-_]{4})$"
+ iSBN10RegexString = "^(?:[0-9]{9}X|[0-9]{10})$"
+ iSBN13RegexString = "^(?:(?:97(?:8|9))[0-9]{10})$"
+ uUID3RegexString = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$"
+ uUID4RegexString = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
+ uUID5RegexString = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
+ uUIDRegexString = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
+ uUID3RFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-3[0-9a-fA-F]{3}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
+ uUID4RFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$"
+ uUID5RFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-5[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$"
+ uUIDRFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
+ aSCIIRegexString = "^[\x00-\x7F]*$"
+ printableASCIIRegexString = "^[\x20-\x7E]*$"
+ multibyteRegexString = "[^\x00-\x7F]"
+ dataURIRegexString = "^data:.+\\/(.+);base64$"
+ latitudeRegexString = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$"
+ longitudeRegexString = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$"
+ sSNRegexString = `^[0-9]{3}[ -]?(0[1-9]|[1-9][0-9])[ -]?([1-9][0-9]{3}|[0-9][1-9][0-9]{2}|[0-9]{2}[1-9][0-9]|[0-9]{3}[1-9])$`
+ hostnameRegexStringRFC952 = `^[a-zA-Z][a-zA-Z0-9\-\.]+[a-zA-Z0-9]$` // https://tools.ietf.org/html/rfc952
+ hostnameRegexStringRFC1123 = `^[a-zA-Z0-9][a-zA-Z0-9\-\.]+[a-zA-Z0-9]$` // accepts hostname starting with a digit https://tools.ietf.org/html/rfc1123
+ btcAddressRegexString = `^[13][a-km-zA-HJ-NP-Z1-9]{25,34}$` // bitcoin address
+ btcAddressUpperRegexStringBech32 = `^BC1[02-9AC-HJ-NP-Z]{7,76}$` // bitcoin bech32 address https://en.bitcoin.it/wiki/Bech32
+ btcAddressLowerRegexStringBech32 = `^bc1[02-9ac-hj-np-z]{7,76}$` // bitcoin bech32 address https://en.bitcoin.it/wiki/Bech32
+ ethAddressRegexString = `^0x[0-9a-fA-F]{40}$`
+ ethAddressUpperRegexString = `^0x[0-9A-F]{40}$`
+ ethAddressLowerRegexString = `^0x[0-9a-f]{40}$`
+ uRLEncodedRegexString = `(%[A-Fa-f0-9]{2})`
+ hTMLEncodedRegexString = `[x]?([0-9a-fA-F]{2})|(>)|(<)|(")|(&)+[;]?`
+ hTMLRegexString = `<[/]?([a-zA-Z]+).*?>`
+)
+
+var (
+ alphaRegex = regexp.MustCompile(alphaRegexString)
+ alphaNumericRegex = regexp.MustCompile(alphaNumericRegexString)
+ alphaUnicodeRegex = regexp.MustCompile(alphaUnicodeRegexString)
+ alphaUnicodeNumericRegex = regexp.MustCompile(alphaUnicodeNumericRegexString)
+ numericRegex = regexp.MustCompile(numericRegexString)
+ numberRegex = regexp.MustCompile(numberRegexString)
+ hexadecimalRegex = regexp.MustCompile(hexadecimalRegexString)
+ hexcolorRegex = regexp.MustCompile(hexcolorRegexString)
+ rgbRegex = regexp.MustCompile(rgbRegexString)
+ rgbaRegex = regexp.MustCompile(rgbaRegexString)
+ hslRegex = regexp.MustCompile(hslRegexString)
+ hslaRegex = regexp.MustCompile(hslaRegexString)
+ e164Regex = regexp.MustCompile(e164RegexString)
+ emailRegex = regexp.MustCompile(emailRegexString)
+ base64Regex = regexp.MustCompile(base64RegexString)
+ base64URLRegex = regexp.MustCompile(base64URLRegexString)
+ iSBN10Regex = regexp.MustCompile(iSBN10RegexString)
+ iSBN13Regex = regexp.MustCompile(iSBN13RegexString)
+ uUID3Regex = regexp.MustCompile(uUID3RegexString)
+ uUID4Regex = regexp.MustCompile(uUID4RegexString)
+ uUID5Regex = regexp.MustCompile(uUID5RegexString)
+ uUIDRegex = regexp.MustCompile(uUIDRegexString)
+ uUID3RFC4122Regex = regexp.MustCompile(uUID3RFC4122RegexString)
+ uUID4RFC4122Regex = regexp.MustCompile(uUID4RFC4122RegexString)
+ uUID5RFC4122Regex = regexp.MustCompile(uUID5RFC4122RegexString)
+ uUIDRFC4122Regex = regexp.MustCompile(uUIDRFC4122RegexString)
+ aSCIIRegex = regexp.MustCompile(aSCIIRegexString)
+ printableASCIIRegex = regexp.MustCompile(printableASCIIRegexString)
+ multibyteRegex = regexp.MustCompile(multibyteRegexString)
+ dataURIRegex = regexp.MustCompile(dataURIRegexString)
+ latitudeRegex = regexp.MustCompile(latitudeRegexString)
+ longitudeRegex = regexp.MustCompile(longitudeRegexString)
+ sSNRegex = regexp.MustCompile(sSNRegexString)
+ hostnameRegexRFC952 = regexp.MustCompile(hostnameRegexStringRFC952)
+ hostnameRegexRFC1123 = regexp.MustCompile(hostnameRegexStringRFC1123)
+ btcAddressRegex = regexp.MustCompile(btcAddressRegexString)
+ btcUpperAddressRegexBech32 = regexp.MustCompile(btcAddressUpperRegexStringBech32)
+ btcLowerAddressRegexBech32 = regexp.MustCompile(btcAddressLowerRegexStringBech32)
+ ethAddressRegex = regexp.MustCompile(ethAddressRegexString)
+ ethaddressRegexUpper = regexp.MustCompile(ethAddressUpperRegexString)
+ ethAddressRegexLower = regexp.MustCompile(ethAddressLowerRegexString)
+ uRLEncodedRegex = regexp.MustCompile(uRLEncodedRegexString)
+ hTMLEncodedRegex = regexp.MustCompile(hTMLEncodedRegexString)
+ hTMLRegex = regexp.MustCompile(hTMLRegexString)
+)
diff --git a/vendor/github.com/go-playground/validator/struct_level.go b/vendor/github.com/go-playground/validator/struct_level.go
new file mode 100644
index 0000000..57691ee
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/struct_level.go
@@ -0,0 +1,175 @@
+package validator
+
+import (
+ "context"
+ "reflect"
+)
+
+// StructLevelFunc accepts all values needed for struct level validation
+type StructLevelFunc func(sl StructLevel)
+
+// StructLevelFuncCtx accepts all values needed for struct level validation
+// but also allows passing of contextual validation information via context.Context.
+type StructLevelFuncCtx func(ctx context.Context, sl StructLevel)
+
+// wrapStructLevelFunc wraps normal StructLevelFunc makes it compatible with StructLevelFuncCtx
+func wrapStructLevelFunc(fn StructLevelFunc) StructLevelFuncCtx {
+ return func(ctx context.Context, sl StructLevel) {
+ fn(sl)
+ }
+}
+
+// StructLevel contains all the information and helper functions
+// to validate a struct
+type StructLevel interface {
+
+ // returns the main validation object, in case one wants to call validations internally.
+ // this is so you don't have to use anonymous functions to get access to the validate
+ // instance.
+ Validator() *Validate
+
+ // returns the top level struct, if any
+ Top() reflect.Value
+
+ // returns the current fields parent struct, if any
+ Parent() reflect.Value
+
+ // returns the current struct.
+ Current() reflect.Value
+
+ // ExtractType gets the actual underlying type of field value.
+ // It will dive into pointers, customTypes and return you the
+ // underlying value and its kind.
+ ExtractType(field reflect.Value) (value reflect.Value, kind reflect.Kind, nullable bool)
+
+ // reports an error just by passing the field and tag information
+ //
+ // NOTES:
+ //
+ // fieldName and altName get appended to the existing namespace that
+ // validator is on. e.g. pass 'FirstName' or 'Names[0]' depending
+ // on the nesting
+ //
+ // tag can be an existing validation tag or just something you make up
+ // and process on the flip side it's up to you.
+ ReportError(field interface{}, fieldName, structFieldName string, tag, param string)
+
+ // reports an error just by passing ValidationErrors
+ //
+ // NOTES:
+ //
+ // relativeNamespace and relativeActualNamespace get appended to the
+ // existing namespace that validator is on.
+ // e.g. pass 'User.FirstName' or 'Users[0].FirstName' depending
+ // on the nesting. most of the time they will be blank, unless you validate
+ // at a level lower the the current field depth
+ ReportValidationErrors(relativeNamespace, relativeActualNamespace string, errs ValidationErrors)
+}
+
+var _ StructLevel = new(validate)
+
+// Top returns the top level struct
+//
+// NOTE: this can be the same as the current struct being validated
+// if not is a nested struct.
+//
+// this is only called when within Struct and Field Level validation and
+// should not be relied upon for an acurate value otherwise.
+func (v *validate) Top() reflect.Value {
+ return v.top
+}
+
+// Parent returns the current structs parent
+//
+// NOTE: this can be the same as the current struct being validated
+// if not is a nested struct.
+//
+// this is only called when within Struct and Field Level validation and
+// should not be relied upon for an acurate value otherwise.
+func (v *validate) Parent() reflect.Value {
+ return v.slflParent
+}
+
+// Current returns the current struct.
+func (v *validate) Current() reflect.Value {
+ return v.slCurrent
+}
+
+// Validator returns the main validation object, in case one want to call validations internally.
+func (v *validate) Validator() *Validate {
+ return v.v
+}
+
+// ExtractType gets the actual underlying type of field value.
+func (v *validate) ExtractType(field reflect.Value) (reflect.Value, reflect.Kind, bool) {
+ return v.extractTypeInternal(field, false)
+}
+
+// ReportError reports an error just by passing the field and tag information
+func (v *validate) ReportError(field interface{}, fieldName, structFieldName, tag, param string) {
+
+ fv, kind, _ := v.extractTypeInternal(reflect.ValueOf(field), false)
+
+ if len(structFieldName) == 0 {
+ structFieldName = fieldName
+ }
+
+ v.str1 = string(append(v.ns, fieldName...))
+
+ if v.v.hasTagNameFunc || fieldName != structFieldName {
+ v.str2 = string(append(v.actualNs, structFieldName...))
+ } else {
+ v.str2 = v.str1
+ }
+
+ if kind == reflect.Invalid {
+
+ v.errs = append(v.errs,
+ &fieldError{
+ v: v.v,
+ tag: tag,
+ actualTag: tag,
+ ns: v.str1,
+ structNs: v.str2,
+ fieldLen: uint8(len(fieldName)),
+ structfieldLen: uint8(len(structFieldName)),
+ param: param,
+ kind: kind,
+ },
+ )
+ return
+ }
+
+ v.errs = append(v.errs,
+ &fieldError{
+ v: v.v,
+ tag: tag,
+ actualTag: tag,
+ ns: v.str1,
+ structNs: v.str2,
+ fieldLen: uint8(len(fieldName)),
+ structfieldLen: uint8(len(structFieldName)),
+ value: fv.Interface(),
+ param: param,
+ kind: kind,
+ typ: fv.Type(),
+ },
+ )
+}
+
+// ReportValidationErrors reports ValidationErrors obtained from running validations within the Struct Level validation.
+//
+// NOTE: this function prepends the current namespace to the relative ones.
+func (v *validate) ReportValidationErrors(relativeNamespace, relativeStructNamespace string, errs ValidationErrors) {
+
+ var err *fieldError
+
+ for i := 0; i < len(errs); i++ {
+
+ err = errs[i].(*fieldError)
+ err.ns = string(append(append(v.ns, relativeNamespace...), err.ns...))
+ err.structNs = string(append(append(v.actualNs, relativeStructNamespace...), err.structNs...))
+
+ v.errs = append(v.errs, err)
+ }
+}
diff --git a/vendor/github.com/go-playground/validator/translations.go b/vendor/github.com/go-playground/validator/translations.go
new file mode 100644
index 0000000..4d9d75c
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/translations.go
@@ -0,0 +1,11 @@
+package validator
+
+import ut "github.com/go-playground/universal-translator"
+
+// TranslationFunc is the function type used to register or override
+// custom translations
+type TranslationFunc func(ut ut.Translator, fe FieldError) string
+
+// RegisterTranslationsFunc allows for registering of translations
+// for a 'ut.Translator' for use within the 'TranslationFunc'
+type RegisterTranslationsFunc func(ut ut.Translator) error
diff --git a/vendor/github.com/go-playground/validator/util.go b/vendor/github.com/go-playground/validator/util.go
new file mode 100644
index 0000000..71acbdc
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/util.go
@@ -0,0 +1,256 @@
+package validator
+
+import (
+ "reflect"
+ "strconv"
+ "strings"
+)
+
+// extractTypeInternal gets the actual underlying type of field value.
+// It will dive into pointers, customTypes and return you the
+// underlying value and it's kind.
+func (v *validate) extractTypeInternal(current reflect.Value, nullable bool) (reflect.Value, reflect.Kind, bool) {
+
+BEGIN:
+ switch current.Kind() {
+ case reflect.Ptr:
+
+ nullable = true
+
+ if current.IsNil() {
+ return current, reflect.Ptr, nullable
+ }
+
+ current = current.Elem()
+ goto BEGIN
+
+ case reflect.Interface:
+
+ nullable = true
+
+ if current.IsNil() {
+ return current, reflect.Interface, nullable
+ }
+
+ current = current.Elem()
+ goto BEGIN
+
+ case reflect.Invalid:
+ return current, reflect.Invalid, nullable
+
+ default:
+
+ if v.v.hasCustomFuncs {
+
+ if fn, ok := v.v.customFuncs[current.Type()]; ok {
+ current = reflect.ValueOf(fn(current))
+ goto BEGIN
+ }
+ }
+
+ return current, current.Kind(), nullable
+ }
+}
+
+// getStructFieldOKInternal traverses a struct to retrieve a specific field denoted by the provided namespace and
+// returns the field, field kind and whether is was successful in retrieving the field at all.
+//
+// NOTE: when not successful ok will be false, this can happen when a nested struct is nil and so the field
+// could not be retrieved because it didn't exist.
+func (v *validate) getStructFieldOKInternal(val reflect.Value, namespace string) (current reflect.Value, kind reflect.Kind, nullable bool, found bool) {
+
+BEGIN:
+ current, kind, nullable = v.ExtractType(val)
+ if kind == reflect.Invalid {
+ return
+ }
+
+ if namespace == "" {
+ found = true
+ return
+ }
+
+ switch kind {
+
+ case reflect.Ptr, reflect.Interface:
+ return
+
+ case reflect.Struct:
+
+ typ := current.Type()
+ fld := namespace
+ var ns string
+
+ if typ != timeType {
+
+ idx := strings.Index(namespace, namespaceSeparator)
+
+ if idx != -1 {
+ fld = namespace[:idx]
+ ns = namespace[idx+1:]
+ } else {
+ ns = ""
+ }
+
+ bracketIdx := strings.Index(fld, leftBracket)
+ if bracketIdx != -1 {
+ fld = fld[:bracketIdx]
+
+ ns = namespace[bracketIdx:]
+ }
+
+ val = current.FieldByName(fld)
+ namespace = ns
+ goto BEGIN
+ }
+
+ case reflect.Array, reflect.Slice:
+ idx := strings.Index(namespace, leftBracket)
+ idx2 := strings.Index(namespace, rightBracket)
+
+ arrIdx, _ := strconv.Atoi(namespace[idx+1 : idx2])
+
+ if arrIdx >= current.Len() {
+ return
+ }
+
+ startIdx := idx2 + 1
+
+ if startIdx < len(namespace) {
+ if namespace[startIdx:startIdx+1] == namespaceSeparator {
+ startIdx++
+ }
+ }
+
+ val = current.Index(arrIdx)
+ namespace = namespace[startIdx:]
+ goto BEGIN
+
+ case reflect.Map:
+ idx := strings.Index(namespace, leftBracket) + 1
+ idx2 := strings.Index(namespace, rightBracket)
+
+ endIdx := idx2
+
+ if endIdx+1 < len(namespace) {
+ if namespace[endIdx+1:endIdx+2] == namespaceSeparator {
+ endIdx++
+ }
+ }
+
+ key := namespace[idx:idx2]
+
+ switch current.Type().Key().Kind() {
+ case reflect.Int:
+ i, _ := strconv.Atoi(key)
+ val = current.MapIndex(reflect.ValueOf(i))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Int8:
+ i, _ := strconv.ParseInt(key, 10, 8)
+ val = current.MapIndex(reflect.ValueOf(int8(i)))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Int16:
+ i, _ := strconv.ParseInt(key, 10, 16)
+ val = current.MapIndex(reflect.ValueOf(int16(i)))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Int32:
+ i, _ := strconv.ParseInt(key, 10, 32)
+ val = current.MapIndex(reflect.ValueOf(int32(i)))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Int64:
+ i, _ := strconv.ParseInt(key, 10, 64)
+ val = current.MapIndex(reflect.ValueOf(i))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Uint:
+ i, _ := strconv.ParseUint(key, 10, 0)
+ val = current.MapIndex(reflect.ValueOf(uint(i)))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Uint8:
+ i, _ := strconv.ParseUint(key, 10, 8)
+ val = current.MapIndex(reflect.ValueOf(uint8(i)))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Uint16:
+ i, _ := strconv.ParseUint(key, 10, 16)
+ val = current.MapIndex(reflect.ValueOf(uint16(i)))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Uint32:
+ i, _ := strconv.ParseUint(key, 10, 32)
+ val = current.MapIndex(reflect.ValueOf(uint32(i)))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Uint64:
+ i, _ := strconv.ParseUint(key, 10, 64)
+ val = current.MapIndex(reflect.ValueOf(i))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Float32:
+ f, _ := strconv.ParseFloat(key, 32)
+ val = current.MapIndex(reflect.ValueOf(float32(f)))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Float64:
+ f, _ := strconv.ParseFloat(key, 64)
+ val = current.MapIndex(reflect.ValueOf(f))
+ namespace = namespace[endIdx+1:]
+
+ case reflect.Bool:
+ b, _ := strconv.ParseBool(key)
+ val = current.MapIndex(reflect.ValueOf(b))
+ namespace = namespace[endIdx+1:]
+
+ // reflect.Type = string
+ default:
+ val = current.MapIndex(reflect.ValueOf(key))
+ namespace = namespace[endIdx+1:]
+ }
+
+ goto BEGIN
+ }
+
+ // if got here there was more namespace, cannot go any deeper
+ panic("Invalid field namespace")
+}
+
+// asInt returns the parameter as a int64
+// or panics if it can't convert
+func asInt(param string) int64 {
+
+ i, err := strconv.ParseInt(param, 0, 64)
+ panicIf(err)
+
+ return i
+}
+
+// asUint returns the parameter as a uint64
+// or panics if it can't convert
+func asUint(param string) uint64 {
+
+ i, err := strconv.ParseUint(param, 0, 64)
+ panicIf(err)
+
+ return i
+}
+
+// asFloat returns the parameter as a float64
+// or panics if it can't convert
+func asFloat(param string) float64 {
+
+ i, err := strconv.ParseFloat(param, 64)
+ panicIf(err)
+
+ return i
+}
+
+func panicIf(err error) {
+ if err != nil {
+ panic(err.Error())
+ }
+}
diff --git a/vendor/github.com/go-playground/validator/validator.go b/vendor/github.com/go-playground/validator/validator.go
new file mode 100644
index 0000000..342e72e
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/validator.go
@@ -0,0 +1,477 @@
+package validator
+
+import (
+ "context"
+ "fmt"
+ "reflect"
+ "strconv"
+)
+
+// per validate construct
+type validate struct {
+ v *Validate
+ top reflect.Value
+ ns []byte
+ actualNs []byte
+ errs ValidationErrors
+ includeExclude map[string]struct{} // reset only if StructPartial or StructExcept are called, no need otherwise
+ ffn FilterFunc
+ slflParent reflect.Value // StructLevel & FieldLevel
+ slCurrent reflect.Value // StructLevel & FieldLevel
+ flField reflect.Value // StructLevel & FieldLevel
+ cf *cField // StructLevel & FieldLevel
+ ct *cTag // StructLevel & FieldLevel
+ misc []byte // misc reusable
+ str1 string // misc reusable
+ str2 string // misc reusable
+ fldIsPointer bool // StructLevel & FieldLevel
+ isPartial bool
+ hasExcludes bool
+}
+
+// parent and current will be the same the first run of validateStruct
+func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, current reflect.Value, typ reflect.Type, ns []byte, structNs []byte, ct *cTag) {
+
+ cs, ok := v.v.structCache.Get(typ)
+ if !ok {
+ cs = v.v.extractStructCache(current, typ.Name())
+ }
+
+ if len(ns) == 0 && len(cs.name) != 0 {
+
+ ns = append(ns, cs.name...)
+ ns = append(ns, '.')
+
+ structNs = append(structNs, cs.name...)
+ structNs = append(structNs, '.')
+ }
+
+ // ct is nil on top level struct, and structs as fields that have no tag info
+ // so if nil or if not nil and the structonly tag isn't present
+ if ct == nil || ct.typeof != typeStructOnly {
+
+ var f *cField
+
+ for i := 0; i < len(cs.fields); i++ {
+
+ f = cs.fields[i]
+
+ if v.isPartial {
+
+ if v.ffn != nil {
+ // used with StructFiltered
+ if v.ffn(append(structNs, f.name...)) {
+ continue
+ }
+
+ } else {
+ // used with StructPartial & StructExcept
+ _, ok = v.includeExclude[string(append(structNs, f.name...))]
+
+ if (ok && v.hasExcludes) || (!ok && !v.hasExcludes) {
+ continue
+ }
+ }
+ }
+
+ v.traverseField(ctx, parent, current.Field(f.idx), ns, structNs, f, f.cTags)
+ }
+ }
+
+ // check if any struct level validations, after all field validations already checked.
+ // first iteration will have no info about nostructlevel tag, and is checked prior to
+ // calling the next iteration of validateStruct called from traverseField.
+ if cs.fn != nil {
+
+ v.slflParent = parent
+ v.slCurrent = current
+ v.ns = ns
+ v.actualNs = structNs
+
+ cs.fn(ctx, v)
+ }
+}
+
+// traverseField validates any field, be it a struct or single field, ensures it's validity and passes it along to be validated via it's tag options
+func (v *validate) traverseField(ctx context.Context, parent reflect.Value, current reflect.Value, ns []byte, structNs []byte, cf *cField, ct *cTag) {
+ var typ reflect.Type
+ var kind reflect.Kind
+
+ current, kind, v.fldIsPointer = v.extractTypeInternal(current, false)
+
+ switch kind {
+ case reflect.Ptr, reflect.Interface, reflect.Invalid:
+
+ if ct == nil {
+ return
+ }
+
+ if ct.typeof == typeOmitEmpty || ct.typeof == typeIsDefault {
+ return
+ }
+
+ if ct.hasTag {
+ if kind == reflect.Invalid {
+ v.str1 = string(append(ns, cf.altName...))
+ if v.v.hasTagNameFunc {
+ v.str2 = string(append(structNs, cf.name...))
+ } else {
+ v.str2 = v.str1
+ }
+ v.errs = append(v.errs,
+ &fieldError{
+ v: v.v,
+ tag: ct.aliasTag,
+ actualTag: ct.tag,
+ ns: v.str1,
+ structNs: v.str2,
+ fieldLen: uint8(len(cf.altName)),
+ structfieldLen: uint8(len(cf.name)),
+ param: ct.param,
+ kind: kind,
+ },
+ )
+ return
+ }
+
+ v.str1 = string(append(ns, cf.altName...))
+ if v.v.hasTagNameFunc {
+ v.str2 = string(append(structNs, cf.name...))
+ } else {
+ v.str2 = v.str1
+ }
+ if !ct.runValidationWhenNil {
+ v.errs = append(v.errs,
+ &fieldError{
+ v: v.v,
+ tag: ct.aliasTag,
+ actualTag: ct.tag,
+ ns: v.str1,
+ structNs: v.str2,
+ fieldLen: uint8(len(cf.altName)),
+ structfieldLen: uint8(len(cf.name)),
+ value: current.Interface(),
+ param: ct.param,
+ kind: kind,
+ typ: current.Type(),
+ },
+ )
+ return
+ }
+ }
+
+ case reflect.Struct:
+
+ typ = current.Type()
+
+ if typ != timeType {
+
+ if ct != nil {
+
+ if ct.typeof == typeStructOnly {
+ goto CONTINUE
+ } else if ct.typeof == typeIsDefault {
+ // set Field Level fields
+ v.slflParent = parent
+ v.flField = current
+ v.cf = cf
+ v.ct = ct
+
+ if !ct.fn(ctx, v) {
+ v.str1 = string(append(ns, cf.altName...))
+
+ if v.v.hasTagNameFunc {
+ v.str2 = string(append(structNs, cf.name...))
+ } else {
+ v.str2 = v.str1
+ }
+
+ v.errs = append(v.errs,
+ &fieldError{
+ v: v.v,
+ tag: ct.aliasTag,
+ actualTag: ct.tag,
+ ns: v.str1,
+ structNs: v.str2,
+ fieldLen: uint8(len(cf.altName)),
+ structfieldLen: uint8(len(cf.name)),
+ value: current.Interface(),
+ param: ct.param,
+ kind: kind,
+ typ: typ,
+ },
+ )
+ return
+ }
+ }
+
+ ct = ct.next
+ }
+
+ if ct != nil && ct.typeof == typeNoStructLevel {
+ return
+ }
+
+ CONTINUE:
+ // if len == 0 then validating using 'Var' or 'VarWithValue'
+ // Var - doesn't make much sense to do it that way, should call 'Struct', but no harm...
+ // VarWithField - this allows for validating against each field within the struct against a specific value
+ // pretty handy in certain situations
+ if len(cf.name) > 0 {
+ ns = append(append(ns, cf.altName...), '.')
+ structNs = append(append(structNs, cf.name...), '.')
+ }
+
+ v.validateStruct(ctx, current, current, typ, ns, structNs, ct)
+ return
+ }
+ }
+
+ if !ct.hasTag {
+ return
+ }
+
+ typ = current.Type()
+
+OUTER:
+ for {
+ if ct == nil {
+ return
+ }
+
+ switch ct.typeof {
+
+ case typeOmitEmpty:
+
+ // set Field Level fields
+ v.slflParent = parent
+ v.flField = current
+ v.cf = cf
+ v.ct = ct
+
+ if !v.fldIsPointer && !hasValue(v) {
+ return
+ }
+
+ ct = ct.next
+ continue
+
+ case typeEndKeys:
+ return
+
+ case typeDive:
+
+ ct = ct.next
+
+ // traverse slice or map here
+ // or panic ;)
+ switch kind {
+ case reflect.Slice, reflect.Array:
+
+ var i64 int64
+ reusableCF := &cField{}
+
+ for i := 0; i < current.Len(); i++ {
+
+ i64 = int64(i)
+
+ v.misc = append(v.misc[0:0], cf.name...)
+ v.misc = append(v.misc, '[')
+ v.misc = strconv.AppendInt(v.misc, i64, 10)
+ v.misc = append(v.misc, ']')
+
+ reusableCF.name = string(v.misc)
+
+ if cf.namesEqual {
+ reusableCF.altName = reusableCF.name
+ } else {
+
+ v.misc = append(v.misc[0:0], cf.altName...)
+ v.misc = append(v.misc, '[')
+ v.misc = strconv.AppendInt(v.misc, i64, 10)
+ v.misc = append(v.misc, ']')
+
+ reusableCF.altName = string(v.misc)
+ }
+ v.traverseField(ctx, parent, current.Index(i), ns, structNs, reusableCF, ct)
+ }
+
+ case reflect.Map:
+
+ var pv string
+ reusableCF := &cField{}
+
+ for _, key := range current.MapKeys() {
+
+ pv = fmt.Sprintf("%v", key.Interface())
+
+ v.misc = append(v.misc[0:0], cf.name...)
+ v.misc = append(v.misc, '[')
+ v.misc = append(v.misc, pv...)
+ v.misc = append(v.misc, ']')
+
+ reusableCF.name = string(v.misc)
+
+ if cf.namesEqual {
+ reusableCF.altName = reusableCF.name
+ } else {
+ v.misc = append(v.misc[0:0], cf.altName...)
+ v.misc = append(v.misc, '[')
+ v.misc = append(v.misc, pv...)
+ v.misc = append(v.misc, ']')
+
+ reusableCF.altName = string(v.misc)
+ }
+
+ if ct != nil && ct.typeof == typeKeys && ct.keys != nil {
+ v.traverseField(ctx, parent, key, ns, structNs, reusableCF, ct.keys)
+ // can be nil when just keys being validated
+ if ct.next != nil {
+ v.traverseField(ctx, parent, current.MapIndex(key), ns, structNs, reusableCF, ct.next)
+ }
+ } else {
+ v.traverseField(ctx, parent, current.MapIndex(key), ns, structNs, reusableCF, ct)
+ }
+ }
+
+ default:
+ // throw error, if not a slice or map then should not have gotten here
+ // bad dive tag
+ panic("dive error! can't dive on a non slice or map")
+ }
+
+ return
+
+ case typeOr:
+
+ v.misc = v.misc[0:0]
+
+ for {
+
+ // set Field Level fields
+ v.slflParent = parent
+ v.flField = current
+ v.cf = cf
+ v.ct = ct
+
+ if ct.fn(ctx, v) {
+
+ // drain rest of the 'or' values, then continue or leave
+ for {
+
+ ct = ct.next
+
+ if ct == nil {
+ return
+ }
+
+ if ct.typeof != typeOr {
+ continue OUTER
+ }
+ }
+ }
+
+ v.misc = append(v.misc, '|')
+ v.misc = append(v.misc, ct.tag...)
+
+ if ct.hasParam {
+ v.misc = append(v.misc, '=')
+ v.misc = append(v.misc, ct.param...)
+ }
+
+ if ct.isBlockEnd || ct.next == nil {
+ // if we get here, no valid 'or' value and no more tags
+ v.str1 = string(append(ns, cf.altName...))
+
+ if v.v.hasTagNameFunc {
+ v.str2 = string(append(structNs, cf.name...))
+ } else {
+ v.str2 = v.str1
+ }
+
+ if ct.hasAlias {
+
+ v.errs = append(v.errs,
+ &fieldError{
+ v: v.v,
+ tag: ct.aliasTag,
+ actualTag: ct.actualAliasTag,
+ ns: v.str1,
+ structNs: v.str2,
+ fieldLen: uint8(len(cf.altName)),
+ structfieldLen: uint8(len(cf.name)),
+ value: current.Interface(),
+ param: ct.param,
+ kind: kind,
+ typ: typ,
+ },
+ )
+
+ } else {
+
+ tVal := string(v.misc)[1:]
+
+ v.errs = append(v.errs,
+ &fieldError{
+ v: v.v,
+ tag: tVal,
+ actualTag: tVal,
+ ns: v.str1,
+ structNs: v.str2,
+ fieldLen: uint8(len(cf.altName)),
+ structfieldLen: uint8(len(cf.name)),
+ value: current.Interface(),
+ param: ct.param,
+ kind: kind,
+ typ: typ,
+ },
+ )
+ }
+
+ return
+ }
+
+ ct = ct.next
+ }
+
+ default:
+
+ // set Field Level fields
+ v.slflParent = parent
+ v.flField = current
+ v.cf = cf
+ v.ct = ct
+
+ if !ct.fn(ctx, v) {
+
+ v.str1 = string(append(ns, cf.altName...))
+
+ if v.v.hasTagNameFunc {
+ v.str2 = string(append(structNs, cf.name...))
+ } else {
+ v.str2 = v.str1
+ }
+
+ v.errs = append(v.errs,
+ &fieldError{
+ v: v.v,
+ tag: ct.aliasTag,
+ actualTag: ct.tag,
+ ns: v.str1,
+ structNs: v.str2,
+ fieldLen: uint8(len(cf.altName)),
+ structfieldLen: uint8(len(cf.name)),
+ value: current.Interface(),
+ param: ct.param,
+ kind: kind,
+ typ: typ,
+ },
+ )
+
+ return
+ }
+ ct = ct.next
+ }
+ }
+
+}
diff --git a/vendor/github.com/go-playground/validator/validator_instance.go b/vendor/github.com/go-playground/validator/validator_instance.go
new file mode 100644
index 0000000..4a89d40
--- /dev/null
+++ b/vendor/github.com/go-playground/validator/validator_instance.go
@@ -0,0 +1,615 @@
+package validator
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+ "sync"
+ "time"
+
+ ut "github.com/go-playground/universal-translator"
+)
+
+const (
+ defaultTagName = "validate"
+ utf8HexComma = "0x2C"
+ utf8Pipe = "0x7C"
+ tagSeparator = ","
+ orSeparator = "|"
+ tagKeySeparator = "="
+ structOnlyTag = "structonly"
+ noStructLevelTag = "nostructlevel"
+ omitempty = "omitempty"
+ isdefault = "isdefault"
+ requiredWithoutAllTag = "required_without_all"
+ requiredWithoutTag = "required_without"
+ requiredWithTag = "required_with"
+ requiredWithAllTag = "required_with_all"
+ skipValidationTag = "-"
+ diveTag = "dive"
+ keysTag = "keys"
+ endKeysTag = "endkeys"
+ requiredTag = "required"
+ namespaceSeparator = "."
+ leftBracket = "["
+ rightBracket = "]"
+ restrictedTagChars = ".[],|=+()`~!@#$%^&*\\\"/?<>{}"
+ restrictedAliasErr = "Alias '%s' either contains restricted characters or is the same as a restricted tag needed for normal operation"
+ restrictedTagErr = "Tag '%s' either contains restricted characters or is the same as a restricted tag needed for normal operation"
+)
+
+var (
+ timeType = reflect.TypeOf(time.Time{})
+ defaultCField = &cField{namesEqual: true}
+)
+
+// FilterFunc is the type used to filter fields using
+// StructFiltered(...) function.
+// returning true results in the field being filtered/skiped from
+// validation
+type FilterFunc func(ns []byte) bool
+
+// CustomTypeFunc allows for overriding or adding custom field type handler functions
+// field = field value of the type to return a value to be validated
+// example Valuer from sql drive see https://golang.org/src/database/sql/driver/types.go?s=1210:1293#L29
+type CustomTypeFunc func(field reflect.Value) interface{}
+
+// TagNameFunc allows for adding of a custom tag name parser
+type TagNameFunc func(field reflect.StructField) string
+
+type internalValidationFuncWrapper struct {
+ fn FuncCtx
+ runValidatinOnNil bool
+}
+
+// Validate contains the validator settings and cache
+type Validate struct {
+ tagName string
+ pool *sync.Pool
+ hasCustomFuncs bool
+ hasTagNameFunc bool
+ tagNameFunc TagNameFunc
+ structLevelFuncs map[reflect.Type]StructLevelFuncCtx
+ customFuncs map[reflect.Type]CustomTypeFunc
+ aliases map[string]string
+ validations map[string]internalValidationFuncWrapper
+ transTagFunc map[ut.Translator]map[string]TranslationFunc // map[]map[]TranslationFunc
+ tagCache *tagCache
+ structCache *structCache
+}
+
+// New returns a new instance of 'validate' with sane defaults.
+func New() *Validate {
+
+ tc := new(tagCache)
+ tc.m.Store(make(map[string]*cTag))
+
+ sc := new(structCache)
+ sc.m.Store(make(map[reflect.Type]*cStruct))
+
+ v := &Validate{
+ tagName: defaultTagName,
+ aliases: make(map[string]string, len(bakedInAliases)),
+ validations: make(map[string]internalValidationFuncWrapper, len(bakedInValidators)),
+ tagCache: tc,
+ structCache: sc,
+ }
+
+ // must copy alias validators for separate validations to be used in each validator instance
+ for k, val := range bakedInAliases {
+ v.RegisterAlias(k, val)
+ }
+
+ // must copy validators for separate validations to be used in each instance
+ for k, val := range bakedInValidators {
+
+ switch k {
+ // these require that even if the value is nil that the validation should run, omitempty still overrides this behaviour
+ case requiredWithTag, requiredWithAllTag, requiredWithoutTag, requiredWithoutAllTag:
+ _ = v.registerValidation(k, wrapFunc(val), true, true)
+ default:
+ // no need to error check here, baked in will always be valid
+ _ = v.registerValidation(k, wrapFunc(val), true, false)
+ }
+ }
+
+ v.pool = &sync.Pool{
+ New: func() interface{} {
+ return &validate{
+ v: v,
+ ns: make([]byte, 0, 64),
+ actualNs: make([]byte, 0, 64),
+ misc: make([]byte, 32),
+ }
+ },
+ }
+
+ return v
+}
+
+// SetTagName allows for changing of the default tag name of 'validate'
+func (v *Validate) SetTagName(name string) {
+ v.tagName = name
+}
+
+// RegisterTagNameFunc registers a function to get alternate names for StructFields.
+//
+// eg. to use the names which have been specified for JSON representations of structs, rather than normal Go field names:
+//
+// validate.RegisterTagNameFunc(func(fld reflect.StructField) string {
+// name := strings.SplitN(fld.Tag.Get("json"), ",", 2)[0]
+// if name == "-" {
+// return ""
+// }
+// return name
+// })
+func (v *Validate) RegisterTagNameFunc(fn TagNameFunc) {
+ v.tagNameFunc = fn
+ v.hasTagNameFunc = true
+}
+
+// RegisterValidation adds a validation with the given tag
+//
+// NOTES:
+// - if the key already exists, the previous validation function will be replaced.
+// - this method is not thread-safe it is intended that these all be registered prior to any validation
+func (v *Validate) RegisterValidation(tag string, fn Func, callValidationEvenIfNull ...bool) error {
+ return v.RegisterValidationCtx(tag, wrapFunc(fn), callValidationEvenIfNull...)
+}
+
+// RegisterValidationCtx does the same as RegisterValidation on accepts a FuncCtx validation
+// allowing context.Context validation support.
+func (v *Validate) RegisterValidationCtx(tag string, fn FuncCtx, callValidationEvenIfNull ...bool) error {
+ var nilCheckable bool
+ if len(callValidationEvenIfNull) > 0 {
+ nilCheckable = callValidationEvenIfNull[0]
+ }
+ return v.registerValidation(tag, fn, false, nilCheckable)
+}
+
+func (v *Validate) registerValidation(tag string, fn FuncCtx, bakedIn bool, nilCheckable bool) error {
+ if len(tag) == 0 {
+ return errors.New("Function Key cannot be empty")
+ }
+
+ if fn == nil {
+ return errors.New("Function cannot be empty")
+ }
+
+ _, ok := restrictedTags[tag]
+ if !bakedIn && (ok || strings.ContainsAny(tag, restrictedTagChars)) {
+ panic(fmt.Sprintf(restrictedTagErr, tag))
+ }
+ v.validations[tag] = internalValidationFuncWrapper{fn: fn, runValidatinOnNil: nilCheckable}
+ return nil
+}
+
+// RegisterAlias registers a mapping of a single validation tag that
+// defines a common or complex set of validation(s) to simplify adding validation
+// to structs.
+//
+// NOTE: this function is not thread-safe it is intended that these all be registered prior to any validation
+func (v *Validate) RegisterAlias(alias, tags string) {
+
+ _, ok := restrictedTags[alias]
+
+ if ok || strings.ContainsAny(alias, restrictedTagChars) {
+ panic(fmt.Sprintf(restrictedAliasErr, alias))
+ }
+
+ v.aliases[alias] = tags
+}
+
+// RegisterStructValidation registers a StructLevelFunc against a number of types.
+//
+// NOTE:
+// - this method is not thread-safe it is intended that these all be registered prior to any validation
+func (v *Validate) RegisterStructValidation(fn StructLevelFunc, types ...interface{}) {
+ v.RegisterStructValidationCtx(wrapStructLevelFunc(fn), types...)
+}
+
+// RegisterStructValidationCtx registers a StructLevelFuncCtx against a number of types and allows passing
+// of contextual validation information via context.Context.
+//
+// NOTE:
+// - this method is not thread-safe it is intended that these all be registered prior to any validation
+func (v *Validate) RegisterStructValidationCtx(fn StructLevelFuncCtx, types ...interface{}) {
+
+ if v.structLevelFuncs == nil {
+ v.structLevelFuncs = make(map[reflect.Type]StructLevelFuncCtx)
+ }
+
+ for _, t := range types {
+ tv := reflect.ValueOf(t)
+ if tv.Kind() == reflect.Ptr {
+ t = reflect.Indirect(tv).Interface()
+ }
+
+ v.structLevelFuncs[reflect.TypeOf(t)] = fn
+ }
+}
+
+// RegisterCustomTypeFunc registers a CustomTypeFunc against a number of types
+//
+// NOTE: this method is not thread-safe it is intended that these all be registered prior to any validation
+func (v *Validate) RegisterCustomTypeFunc(fn CustomTypeFunc, types ...interface{}) {
+
+ if v.customFuncs == nil {
+ v.customFuncs = make(map[reflect.Type]CustomTypeFunc)
+ }
+
+ for _, t := range types {
+ v.customFuncs[reflect.TypeOf(t)] = fn
+ }
+
+ v.hasCustomFuncs = true
+}
+
+// RegisterTranslation registers translations against the provided tag.
+func (v *Validate) RegisterTranslation(tag string, trans ut.Translator, registerFn RegisterTranslationsFunc, translationFn TranslationFunc) (err error) {
+
+ if v.transTagFunc == nil {
+ v.transTagFunc = make(map[ut.Translator]map[string]TranslationFunc)
+ }
+
+ if err = registerFn(trans); err != nil {
+ return
+ }
+
+ m, ok := v.transTagFunc[trans]
+ if !ok {
+ m = make(map[string]TranslationFunc)
+ v.transTagFunc[trans] = m
+ }
+
+ m[tag] = translationFn
+
+ return
+}
+
+// Struct validates a structs exposed fields, and automatically validates nested structs, unless otherwise specified.
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+func (v *Validate) Struct(s interface{}) error {
+ return v.StructCtx(context.Background(), s)
+}
+
+// StructCtx validates a structs exposed fields, and automatically validates nested structs, unless otherwise specified
+// and also allows passing of context.Context for contextual validation information.
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+func (v *Validate) StructCtx(ctx context.Context, s interface{}) (err error) {
+
+ val := reflect.ValueOf(s)
+ top := val
+
+ if val.Kind() == reflect.Ptr && !val.IsNil() {
+ val = val.Elem()
+ }
+
+ if val.Kind() != reflect.Struct || val.Type() == timeType {
+ return &InvalidValidationError{Type: reflect.TypeOf(s)}
+ }
+
+ // good to validate
+ vd := v.pool.Get().(*validate)
+ vd.top = top
+ vd.isPartial = false
+ // vd.hasExcludes = false // only need to reset in StructPartial and StructExcept
+
+ vd.validateStruct(ctx, top, val, val.Type(), vd.ns[0:0], vd.actualNs[0:0], nil)
+
+ if len(vd.errs) > 0 {
+ err = vd.errs
+ vd.errs = nil
+ }
+
+ v.pool.Put(vd)
+
+ return
+}
+
+// StructFiltered validates a structs exposed fields, that pass the FilterFunc check and automatically validates
+// nested structs, unless otherwise specified.
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+func (v *Validate) StructFiltered(s interface{}, fn FilterFunc) error {
+ return v.StructFilteredCtx(context.Background(), s, fn)
+}
+
+// StructFilteredCtx validates a structs exposed fields, that pass the FilterFunc check and automatically validates
+// nested structs, unless otherwise specified and also allows passing of contextual validation information via
+// context.Context
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+func (v *Validate) StructFilteredCtx(ctx context.Context, s interface{}, fn FilterFunc) (err error) {
+ val := reflect.ValueOf(s)
+ top := val
+
+ if val.Kind() == reflect.Ptr && !val.IsNil() {
+ val = val.Elem()
+ }
+
+ if val.Kind() != reflect.Struct || val.Type() == timeType {
+ return &InvalidValidationError{Type: reflect.TypeOf(s)}
+ }
+
+ // good to validate
+ vd := v.pool.Get().(*validate)
+ vd.top = top
+ vd.isPartial = true
+ vd.ffn = fn
+ // vd.hasExcludes = false // only need to reset in StructPartial and StructExcept
+
+ vd.validateStruct(ctx, top, val, val.Type(), vd.ns[0:0], vd.actualNs[0:0], nil)
+
+ if len(vd.errs) > 0 {
+ err = vd.errs
+ vd.errs = nil
+ }
+
+ v.pool.Put(vd)
+
+ return
+}
+
+// StructPartial validates the fields passed in only, ignoring all others.
+// Fields may be provided in a namespaced fashion relative to the struct provided
+// eg. NestedStruct.Field or NestedArrayField[0].Struct.Name
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+func (v *Validate) StructPartial(s interface{}, fields ...string) error {
+ return v.StructPartialCtx(context.Background(), s, fields...)
+}
+
+// StructPartialCtx validates the fields passed in only, ignoring all others and allows passing of contextual
+// validation validation information via context.Context
+// Fields may be provided in a namespaced fashion relative to the struct provided
+// eg. NestedStruct.Field or NestedArrayField[0].Struct.Name
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields ...string) (err error) {
+ val := reflect.ValueOf(s)
+ top := val
+
+ if val.Kind() == reflect.Ptr && !val.IsNil() {
+ val = val.Elem()
+ }
+
+ if val.Kind() != reflect.Struct || val.Type() == timeType {
+ return &InvalidValidationError{Type: reflect.TypeOf(s)}
+ }
+
+ // good to validate
+ vd := v.pool.Get().(*validate)
+ vd.top = top
+ vd.isPartial = true
+ vd.ffn = nil
+ vd.hasExcludes = false
+ vd.includeExclude = make(map[string]struct{})
+
+ typ := val.Type()
+ name := typ.Name()
+
+ for _, k := range fields {
+
+ flds := strings.Split(k, namespaceSeparator)
+ if len(flds) > 0 {
+
+ vd.misc = append(vd.misc[0:0], name...)
+ vd.misc = append(vd.misc, '.')
+
+ for _, s := range flds {
+
+ idx := strings.Index(s, leftBracket)
+
+ if idx != -1 {
+ for idx != -1 {
+ vd.misc = append(vd.misc, s[:idx]...)
+ vd.includeExclude[string(vd.misc)] = struct{}{}
+
+ idx2 := strings.Index(s, rightBracket)
+ idx2++
+ vd.misc = append(vd.misc, s[idx:idx2]...)
+ vd.includeExclude[string(vd.misc)] = struct{}{}
+ s = s[idx2:]
+ idx = strings.Index(s, leftBracket)
+ }
+ } else {
+
+ vd.misc = append(vd.misc, s...)
+ vd.includeExclude[string(vd.misc)] = struct{}{}
+ }
+
+ vd.misc = append(vd.misc, '.')
+ }
+ }
+ }
+
+ vd.validateStruct(ctx, top, val, typ, vd.ns[0:0], vd.actualNs[0:0], nil)
+
+ if len(vd.errs) > 0 {
+ err = vd.errs
+ vd.errs = nil
+ }
+
+ v.pool.Put(vd)
+
+ return
+}
+
+// StructExcept validates all fields except the ones passed in.
+// Fields may be provided in a namespaced fashion relative to the struct provided
+// i.e. NestedStruct.Field or NestedArrayField[0].Struct.Name
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+func (v *Validate) StructExcept(s interface{}, fields ...string) error {
+ return v.StructExceptCtx(context.Background(), s, fields...)
+}
+
+// StructExceptCtx validates all fields except the ones passed in and allows passing of contextual
+// validation validation information via context.Context
+// Fields may be provided in a namespaced fashion relative to the struct provided
+// i.e. NestedStruct.Field or NestedArrayField[0].Struct.Name
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+func (v *Validate) StructExceptCtx(ctx context.Context, s interface{}, fields ...string) (err error) {
+ val := reflect.ValueOf(s)
+ top := val
+
+ if val.Kind() == reflect.Ptr && !val.IsNil() {
+ val = val.Elem()
+ }
+
+ if val.Kind() != reflect.Struct || val.Type() == timeType {
+ return &InvalidValidationError{Type: reflect.TypeOf(s)}
+ }
+
+ // good to validate
+ vd := v.pool.Get().(*validate)
+ vd.top = top
+ vd.isPartial = true
+ vd.ffn = nil
+ vd.hasExcludes = true
+ vd.includeExclude = make(map[string]struct{})
+
+ typ := val.Type()
+ name := typ.Name()
+
+ for _, key := range fields {
+
+ vd.misc = vd.misc[0:0]
+
+ if len(name) > 0 {
+ vd.misc = append(vd.misc, name...)
+ vd.misc = append(vd.misc, '.')
+ }
+
+ vd.misc = append(vd.misc, key...)
+ vd.includeExclude[string(vd.misc)] = struct{}{}
+ }
+
+ vd.validateStruct(ctx, top, val, typ, vd.ns[0:0], vd.actualNs[0:0], nil)
+
+ if len(vd.errs) > 0 {
+ err = vd.errs
+ vd.errs = nil
+ }
+
+ v.pool.Put(vd)
+
+ return
+}
+
+// Var validates a single variable using tag style validation.
+// eg.
+// var i int
+// validate.Var(i, "gt=1,lt=10")
+//
+// WARNING: a struct can be passed for validation eg. time.Time is a struct or
+// if you have a custom type and have registered a custom type handler, so must
+// allow it; however unforeseen validations will occur if trying to validate a
+// struct that is meant to be passed to 'validate.Struct'
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+// validate Array, Slice and maps fields which may contain more than one error
+func (v *Validate) Var(field interface{}, tag string) error {
+ return v.VarCtx(context.Background(), field, tag)
+}
+
+// VarCtx validates a single variable using tag style validation and allows passing of contextual
+// validation validation information via context.Context.
+// eg.
+// var i int
+// validate.Var(i, "gt=1,lt=10")
+//
+// WARNING: a struct can be passed for validation eg. time.Time is a struct or
+// if you have a custom type and have registered a custom type handler, so must
+// allow it; however unforeseen validations will occur if trying to validate a
+// struct that is meant to be passed to 'validate.Struct'
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+// validate Array, Slice and maps fields which may contain more than one error
+func (v *Validate) VarCtx(ctx context.Context, field interface{}, tag string) (err error) {
+ if len(tag) == 0 || tag == skipValidationTag {
+ return nil
+ }
+
+ ctag := v.fetchCacheTag(tag)
+ val := reflect.ValueOf(field)
+ vd := v.pool.Get().(*validate)
+ vd.top = val
+ vd.isPartial = false
+ vd.traverseField(ctx, val, val, vd.ns[0:0], vd.actualNs[0:0], defaultCField, ctag)
+
+ if len(vd.errs) > 0 {
+ err = vd.errs
+ vd.errs = nil
+ }
+ v.pool.Put(vd)
+ return
+}
+
+// VarWithValue validates a single variable, against another variable/field's value using tag style validation
+// eg.
+// s1 := "abcd"
+// s2 := "abcd"
+// validate.VarWithValue(s1, s2, "eqcsfield") // returns true
+//
+// WARNING: a struct can be passed for validation eg. time.Time is a struct or
+// if you have a custom type and have registered a custom type handler, so must
+// allow it; however unforeseen validations will occur if trying to validate a
+// struct that is meant to be passed to 'validate.Struct'
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+// validate Array, Slice and maps fields which may contain more than one error
+func (v *Validate) VarWithValue(field interface{}, other interface{}, tag string) error {
+ return v.VarWithValueCtx(context.Background(), field, other, tag)
+}
+
+// VarWithValueCtx validates a single variable, against another variable/field's value using tag style validation and
+// allows passing of contextual validation validation information via context.Context.
+// eg.
+// s1 := "abcd"
+// s2 := "abcd"
+// validate.VarWithValue(s1, s2, "eqcsfield") // returns true
+//
+// WARNING: a struct can be passed for validation eg. time.Time is a struct or
+// if you have a custom type and have registered a custom type handler, so must
+// allow it; however unforeseen validations will occur if trying to validate a
+// struct that is meant to be passed to 'validate.Struct'
+//
+// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
+// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
+// validate Array, Slice and maps fields which may contain more than one error
+func (v *Validate) VarWithValueCtx(ctx context.Context, field interface{}, other interface{}, tag string) (err error) {
+ if len(tag) == 0 || tag == skipValidationTag {
+ return nil
+ }
+ ctag := v.fetchCacheTag(tag)
+ otherVal := reflect.ValueOf(other)
+ vd := v.pool.Get().(*validate)
+ vd.top = otherVal
+ vd.isPartial = false
+ vd.traverseField(ctx, otherVal, reflect.ValueOf(field), vd.ns[0:0], vd.actualNs[0:0], defaultCField, ctag)
+
+ if len(vd.errs) > 0 {
+ err = vd.errs
+ vd.errs = nil
+ }
+ v.pool.Put(vd)
+ return
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/.editorconfig b/vendor/github.com/gofiber/fiber/v2/.editorconfig
new file mode 100644
index 0000000..6a4ec76
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/.editorconfig
@@ -0,0 +1,8 @@
+; This file is for unifying the coding style for different editors and IDEs.
+; More information at http://editorconfig.org
+; This style originates from https://github.com/fewagency/best-practices
+root = true
+
+[*]
+charset = utf-8
+end_of_line = lf
diff --git a/vendor/github.com/gofiber/fiber/v2/.gitattributes b/vendor/github.com/gofiber/fiber/v2/.gitattributes
new file mode 100644
index 0000000..963a68e
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/.gitattributes
@@ -0,0 +1,12 @@
+# Handle line endings automatically for files detected as text
+# and leave all files detected as binary untouched.
+* text=auto eol=lf
+
+# Force batch scripts to always use CRLF line endings so that if a repo is accessed
+# in Windows via a file share from Linux, the scripts will work.
+*.{cmd,[cC][mM][dD]} text eol=crlf
+*.{bat,[bB][aA][tT]} text eol=crlf
+
+# Force bash scripts to always use LF line endings so that if a repo is accessed
+# in Unix via a file share from Windows, the scripts will work.
+*.sh text eol=lf
diff --git a/vendor/github.com/gofiber/fiber/v2/.gitignore b/vendor/github.com/gofiber/fiber/v2/.gitignore
new file mode 100644
index 0000000..119b111
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/.gitignore
@@ -0,0 +1,30 @@
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+*.tmp
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# IDE files
+.vscode
+.DS_Store
+.idea
+
+# Misc
+*.fiber.gz
+*.fasthttp.gz
+*.pprof
+*.workspace
+
+# Dependencies
+/vendor/
+vendor/
+vendor
+/Godeps/
\ No newline at end of file
diff --git a/vendor/github.com/gofiber/fiber/v2/.golangci.yml b/vendor/github.com/gofiber/fiber/v2/.golangci.yml
new file mode 100644
index 0000000..c58d525
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/.golangci.yml
@@ -0,0 +1,197 @@
+# Created based on v1.51.0
+# NOTE: Keep this in sync with the version in .github/workflows/linter.yml
+
+run:
+ modules-download-mode: readonly
+ skip-dirs-use-default: false
+ skip-dirs:
+ - internal
+
+output:
+ sort-results: true
+
+linters-settings:
+ errcheck:
+ check-type-assertions: true
+ check-blank: true
+ disable-default-exclusions: true
+
+ errchkjson:
+ report-no-exported: true
+
+ exhaustive:
+ default-signifies-exhaustive: true
+
+ forbidigo:
+ forbid:
+ - ^(fmt\.Print(|f|ln)|print|println)$
+ - 'http\.Default(Client|Transport)'
+ # TODO: Eventually enable these patterns
+ # - 'time\.Sleep'
+ # - 'panic'
+
+ gocritic:
+ disabled-checks:
+ - ifElseChain
+
+ gofumpt:
+ module-path: github.com/gofiber/fiber
+ extra-rules: true
+
+ gosec:
+ config:
+ global:
+ audit: true
+
+ govet:
+ check-shadowing: true
+ enable-all: true
+ disable:
+ - shadow
+ - fieldalignment
+ - loopclosure
+
+ grouper:
+ import-require-single-import: true
+ import-require-grouping: true
+
+ misspell:
+ locale: US
+
+ nolintlint:
+ require-explanation: true
+ require-specific: true
+
+ nonamedreturns:
+ report-error-in-defer: true
+
+ predeclared:
+ q: true
+
+ promlinter:
+ strict: true
+
+ revive:
+ enable-all-rules: true
+ rules:
+ # Provided by gomnd linter
+ - name: add-constant
+ disabled: true
+ - name: argument-limit
+ disabled: true
+ # Provided by bidichk
+ - name: banned-characters
+ disabled: true
+ - name: cognitive-complexity
+ disabled: true
+ - name: cyclomatic
+ disabled: true
+ - name: early-return
+ severity: warning
+ disabled: true
+ - name: exported
+ disabled: true
+ - name: file-header
+ disabled: true
+ - name: function-result-limit
+ disabled: true
+ - name: function-length
+ disabled: true
+ - name: line-length-limit
+ disabled: true
+ - name: max-public-structs
+ disabled: true
+ - name: modifies-parameter
+ disabled: true
+ - name: nested-structs
+ disabled: true
+ - name: package-comments
+ disabled: true
+
+ stylecheck:
+ checks:
+ - all
+ - -ST1000
+ - -ST1020
+ - -ST1021
+ - -ST1022
+
+ tagliatelle:
+ case:
+ rules:
+ json: snake
+
+ #tenv:
+ # all: true
+
+ #unparam:
+ # check-exported: true
+
+ wrapcheck:
+ ignorePackageGlobs:
+ - github.com/gofiber/fiber/*
+ - github.com/valyala/fasthttp
+
+issues:
+ exclude-use-default: false
+
+linters:
+ enable:
+ - asasalint
+ - asciicheck
+ - bidichk
+ - bodyclose
+ - containedctx
+ - contextcheck
+ - depguard
+ - dogsled
+ - durationcheck
+ - errcheck
+ - errchkjson
+ - errname
+ - errorlint
+ - execinquery
+ - exhaustive
+ - exportloopref
+ - forbidigo
+ - forcetypeassert
+ - goconst
+ - gocritic
+ - gofmt
+ - gofumpt
+ - goimports
+ - gomoddirectives
+ - goprintffuncname
+ - gosec
+ - gosimple
+ - govet
+ - grouper
+ - loggercheck
+ - misspell
+ - nakedret
+ - nilerr
+ - nilnil
+ - noctx
+ - nolintlint
+ - nonamedreturns
+ - nosprintfhostport
+ - predeclared
+ - promlinter
+ - reassign
+ - revive
+ - rowserrcheck
+ - sqlclosecheck
+ - staticcheck
+ - stylecheck
+ - tagliatelle
+ # - testpackage # TODO: Enable once https://github.com/gofiber/fiber/issues/2252 is implemented
+ - thelper
+ # - tparallel # TODO: Enable once https://github.com/gofiber/fiber/issues/2254 is implemented
+ - typecheck
+ - unconvert
+ - unparam
+ - unused
+ - usestdlibvars
+ - wastedassign
+ - whitespace
+ - wrapcheck
diff --git a/vendor/github.com/gofiber/fiber/v2/LICENSE b/vendor/github.com/gofiber/fiber/v2/LICENSE
new file mode 100644
index 0000000..5188bb8
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019-present Fenny and Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/gofiber/fiber/v2/app.go b/vendor/github.com/gofiber/fiber/v2/app.go
new file mode 100644
index 0000000..47a01dd
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/app.go
@@ -0,0 +1,1120 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+// Package fiber is an Express inspired web framework built on top of Fasthttp,
+// the fastest HTTP engine for Go. Designed to ease things up for fast
+// development with zero memory allocation and performance in mind.
+package fiber
+
+import (
+ "bufio"
+ "context"
+ "encoding/json"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "net"
+ "net/http"
+ "net/http/httputil"
+ "reflect"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/gofiber/fiber/v2/log"
+ "github.com/gofiber/fiber/v2/utils"
+
+ "github.com/valyala/fasthttp"
+)
+
+// Version of current fiber package
+const Version = "2.51.0"
+
+// Handler defines a function to serve HTTP requests.
+type Handler = func(*Ctx) error
+
+// Map is a shortcut for map[string]interface{}, useful for JSON returns
+type Map map[string]interface{}
+
+// Storage interface for communicating with different database/key-value
+// providers
+type Storage interface {
+ // Get gets the value for the given key.
+ // `nil, nil` is returned when the key does not exist
+ Get(key string) ([]byte, error)
+
+ // Set stores the given value for the given key along
+ // with an expiration value, 0 means no expiration.
+ // Empty key or value will be ignored without an error.
+ Set(key string, val []byte, exp time.Duration) error
+
+ // Delete deletes the value for the given key.
+ // It returns no error if the storage does not contain the key,
+ Delete(key string) error
+
+ // Reset resets the storage and delete all keys.
+ Reset() error
+
+ // Close closes the storage and will stop any running garbage
+ // collectors and open connections.
+ Close() error
+}
+
+// ErrorHandler defines a function that will process all errors
+// returned from any handlers in the stack
+//
+// cfg := fiber.Config{}
+// cfg.ErrorHandler = func(c *Ctx, err error) error {
+// code := StatusInternalServerError
+// var e *fiber.Error
+// if errors.As(err, &e) {
+// code = e.Code
+// }
+// c.Set(HeaderContentType, MIMETextPlainCharsetUTF8)
+// return c.Status(code).SendString(err.Error())
+// }
+// app := fiber.New(cfg)
+type ErrorHandler = func(*Ctx, error) error
+
+// Error represents an error that occurred while handling a request.
+type Error struct {
+ Code int `json:"code"`
+ Message string `json:"message"`
+}
+
+// App denotes the Fiber application.
+type App struct {
+ mutex sync.Mutex
+ // Route stack divided by HTTP methods
+ stack [][]*Route
+ // Route stack divided by HTTP methods and route prefixes
+ treeStack []map[string][]*Route
+ // contains the information if the route stack has been changed to build the optimized tree
+ routesRefreshed bool
+ // Amount of registered routes
+ routesCount uint32
+ // Amount of registered handlers
+ handlersCount uint32
+ // Ctx pool
+ pool sync.Pool
+ // Fasthttp server
+ server *fasthttp.Server
+ // App config
+ config Config
+ // Converts string to a byte slice
+ getBytes func(s string) (b []byte)
+ // Converts byte slice to a string
+ getString func(b []byte) string
+ // Hooks
+ hooks *Hooks
+ // Latest route & group
+ latestRoute *Route
+ // TLS handler
+ tlsHandler *TLSHandler
+ // Mount fields
+ mountFields *mountFields
+ // Indicates if the value was explicitly configured
+ configured Config
+}
+
+// Config is a struct holding the server settings.
+type Config struct {
+ // When set to true, this will spawn multiple Go processes listening on the same port.
+ //
+ // Default: false
+ Prefork bool `json:"prefork"`
+
+ // Enables the "Server: value" HTTP header.
+ //
+ // Default: ""
+ ServerHeader string `json:"server_header"`
+
+ // When set to true, the router treats "/foo" and "/foo/" as different.
+ // By default this is disabled and both "/foo" and "/foo/" will execute the same handler.
+ //
+ // Default: false
+ StrictRouting bool `json:"strict_routing"`
+
+ // When set to true, enables case sensitive routing.
+ // E.g. "/FoO" and "/foo" are treated as different routes.
+ // By default this is disabled and both "/FoO" and "/foo" will execute the same handler.
+ //
+ // Default: false
+ CaseSensitive bool `json:"case_sensitive"`
+
+ // When set to true, this relinquishes the 0-allocation promise in certain
+ // cases in order to access the handler values (e.g. request bodies) in an
+ // immutable fashion so that these values are available even if you return
+ // from handler.
+ //
+ // Default: false
+ Immutable bool `json:"immutable"`
+
+ // When set to true, converts all encoded characters in the route back
+ // before setting the path for the context, so that the routing,
+ // the returning of the current url from the context `ctx.Path()`
+ // and the parameters `ctx.Params(%key%)` with decoded characters will work
+ //
+ // Default: false
+ UnescapePath bool `json:"unescape_path"`
+
+ // Enable or disable ETag header generation, since both weak and strong etags are generated
+ // using the same hashing method (CRC-32). Weak ETags are the default when enabled.
+ //
+ // Default: false
+ ETag bool `json:"etag"`
+
+ // Max body size that the server accepts.
+ // -1 will decline any body size
+ //
+ // Default: 4 * 1024 * 1024
+ BodyLimit int `json:"body_limit"`
+
+ // Maximum number of concurrent connections.
+ //
+ // Default: 256 * 1024
+ Concurrency int `json:"concurrency"`
+
+ // Views is the interface that wraps the Render function.
+ //
+ // Default: nil
+ Views Views `json:"-"`
+
+ // Views Layout is the global layout for all template render until override on Render function.
+ //
+ // Default: ""
+ ViewsLayout string `json:"views_layout"`
+
+ // PassLocalsToViews Enables passing of the locals set on a fiber.Ctx to the template engine
+ //
+ // Default: false
+ PassLocalsToViews bool `json:"pass_locals_to_views"`
+
+ // The amount of time allowed to read the full request including body.
+ // It is reset after the request handler has returned.
+ // The connection's read deadline is reset when the connection opens.
+ //
+ // Default: unlimited
+ ReadTimeout time.Duration `json:"read_timeout"`
+
+ // The maximum duration before timing out writes of the response.
+ // It is reset after the request handler has returned.
+ //
+ // Default: unlimited
+ WriteTimeout time.Duration `json:"write_timeout"`
+
+ // The maximum amount of time to wait for the next request when keep-alive is enabled.
+ // If IdleTimeout is zero, the value of ReadTimeout is used.
+ //
+ // Default: unlimited
+ IdleTimeout time.Duration `json:"idle_timeout"`
+
+ // Per-connection buffer size for requests' reading.
+ // This also limits the maximum header size.
+ // Increase this buffer if your clients send multi-KB RequestURIs
+ // and/or multi-KB headers (for example, BIG cookies).
+ //
+ // Default: 4096
+ ReadBufferSize int `json:"read_buffer_size"`
+
+ // Per-connection buffer size for responses' writing.
+ //
+ // Default: 4096
+ WriteBufferSize int `json:"write_buffer_size"`
+
+ // CompressedFileSuffix adds suffix to the original file name and
+ // tries saving the resulting compressed file under the new file name.
+ //
+ // Default: ".fiber.gz"
+ CompressedFileSuffix string `json:"compressed_file_suffix"`
+
+ // ProxyHeader will enable c.IP() to return the value of the given header key
+ // By default c.IP() will return the Remote IP from the TCP connection
+ // This property can be useful if you are behind a load balancer: X-Forwarded-*
+ // NOTE: headers are easily spoofed and the detected IP addresses are unreliable.
+ //
+ // Default: ""
+ ProxyHeader string `json:"proxy_header"`
+
+ // GETOnly rejects all non-GET requests if set to true.
+ // This option is useful as anti-DoS protection for servers
+ // accepting only GET requests. The request size is limited
+ // by ReadBufferSize if GETOnly is set.
+ //
+ // Default: false
+ GETOnly bool `json:"get_only"`
+
+ // ErrorHandler is executed when an error is returned from fiber.Handler.
+ //
+ // Default: DefaultErrorHandler
+ ErrorHandler ErrorHandler `json:"-"`
+
+ // When set to true, disables keep-alive connections.
+ // The server will close incoming connections after sending the first response to client.
+ //
+ // Default: false
+ DisableKeepalive bool `json:"disable_keepalive"`
+
+ // When set to true, causes the default date header to be excluded from the response.
+ //
+ // Default: false
+ DisableDefaultDate bool `json:"disable_default_date"`
+
+ // When set to true, causes the default Content-Type header to be excluded from the response.
+ //
+ // Default: false
+ DisableDefaultContentType bool `json:"disable_default_content_type"`
+
+ // When set to true, disables header normalization.
+ // By default all header names are normalized: conteNT-tYPE -> Content-Type.
+ //
+ // Default: false
+ DisableHeaderNormalizing bool `json:"disable_header_normalizing"`
+
+ // When set to true, it will not print out the «Fiber» ASCII art and listening address.
+ //
+ // Default: false
+ DisableStartupMessage bool `json:"disable_startup_message"`
+
+ // This function allows to setup app name for the app
+ //
+ // Default: nil
+ AppName string `json:"app_name"`
+
+ // StreamRequestBody enables request body streaming,
+ // and calls the handler sooner when given body is
+ // larger then the current limit.
+ StreamRequestBody bool
+
+ // Will not pre parse Multipart Form data if set to true.
+ //
+ // This option is useful for servers that desire to treat
+ // multipart form data as a binary blob, or choose when to parse the data.
+ //
+ // Server pre parses multipart form data by default.
+ DisablePreParseMultipartForm bool
+
+ // Aggressively reduces memory usage at the cost of higher CPU usage
+ // if set to true.
+ //
+ // Try enabling this option only if the server consumes too much memory
+ // serving mostly idle keep-alive connections. This may reduce memory
+ // usage by more than 50%.
+ //
+ // Default: false
+ ReduceMemoryUsage bool `json:"reduce_memory_usage"`
+
+ // FEATURE: v2.3.x
+ // The router executes the same handler by default if StrictRouting or CaseSensitive is disabled.
+ // Enabling RedirectFixedPath will change this behavior into a client redirect to the original route path.
+ // Using the status code 301 for GET requests and 308 for all other request methods.
+ //
+ // Default: false
+ // RedirectFixedPath bool
+
+ // When set by an external client of Fiber it will use the provided implementation of a
+ // JSONMarshal
+ //
+ // Allowing for flexibility in using another json library for encoding
+ // Default: json.Marshal
+ JSONEncoder utils.JSONMarshal `json:"-"`
+
+ // When set by an external client of Fiber it will use the provided implementation of a
+ // JSONUnmarshal
+ //
+ // Allowing for flexibility in using another json library for decoding
+ // Default: json.Unmarshal
+ JSONDecoder utils.JSONUnmarshal `json:"-"`
+
+ // XMLEncoder set by an external client of Fiber it will use the provided implementation of a
+ // XMLMarshal
+ //
+ // Allowing for flexibility in using another XML library for encoding
+ // Default: xml.Marshal
+ XMLEncoder utils.XMLMarshal `json:"-"`
+
+ // Known networks are "tcp", "tcp4" (IPv4-only), "tcp6" (IPv6-only)
+ // WARNING: When prefork is set to true, only "tcp4" and "tcp6" can be chose.
+ //
+ // Default: NetworkTCP4
+ Network string
+
+ // If you find yourself behind some sort of proxy, like a load balancer,
+ // then certain header information may be sent to you using special X-Forwarded-* headers or the Forwarded header.
+ // For example, the Host HTTP header is usually used to return the requested host.
+ // But when you’re behind a proxy, the actual host may be stored in an X-Forwarded-Host header.
+ //
+ // If you are behind a proxy, you should enable TrustedProxyCheck to prevent header spoofing.
+ // If you enable EnableTrustedProxyCheck and leave TrustedProxies empty Fiber will skip
+ // all headers that could be spoofed.
+ // If request ip in TrustedProxies whitelist then:
+ // 1. c.Protocol() get value from X-Forwarded-Proto, X-Forwarded-Protocol, X-Forwarded-Ssl or X-Url-Scheme header
+ // 2. c.IP() get value from ProxyHeader header.
+ // 3. c.Hostname() get value from X-Forwarded-Host header
+ // But if request ip NOT in Trusted Proxies whitelist then:
+ // 1. c.Protocol() WON't get value from X-Forwarded-Proto, X-Forwarded-Protocol, X-Forwarded-Ssl or X-Url-Scheme header,
+ // will return https in case when tls connection is handled by the app, of http otherwise
+ // 2. c.IP() WON'T get value from ProxyHeader header, will return RemoteIP() from fasthttp context
+ // 3. c.Hostname() WON'T get value from X-Forwarded-Host header, fasthttp.Request.URI().Host()
+ // will be used to get the hostname.
+ //
+ // Default: false
+ EnableTrustedProxyCheck bool `json:"enable_trusted_proxy_check"`
+
+ // Read EnableTrustedProxyCheck doc.
+ //
+ // Default: []string
+ TrustedProxies []string `json:"trusted_proxies"`
+ trustedProxiesMap map[string]struct{}
+ trustedProxyRanges []*net.IPNet
+
+ // If set to true, c.IP() and c.IPs() will validate IP addresses before returning them.
+ // Also, c.IP() will return only the first valid IP rather than just the raw header
+ // WARNING: this has a performance cost associated with it.
+ //
+ // Default: false
+ EnableIPValidation bool `json:"enable_ip_validation"`
+
+ // If set to true, will print all routes with their method, path and handler.
+ // Default: false
+ EnablePrintRoutes bool `json:"enable_print_routes"`
+
+ // You can define custom color scheme. They'll be used for startup message, route list and some middlewares.
+ //
+ // Optional. Default: DefaultColors
+ ColorScheme Colors `json:"color_scheme"`
+
+ // RequestMethods provides customizibility for HTTP methods. You can add/remove methods as you wish.
+ //
+ // Optional. Default: DefaultMethods
+ RequestMethods []string
+
+ // EnableSplittingOnParsers splits the query/body/header parameters by comma when it's true.
+ // For example, you can use it to parse multiple values from a query parameter like this:
+ // /api?foo=bar,baz == foo[]=bar&foo[]=baz
+ //
+ // Optional. Default: false
+ EnableSplittingOnParsers bool `json:"enable_splitting_on_parsers"`
+}
+
+// Static defines configuration options when defining static assets.
+type Static struct {
+ // When set to true, the server tries minimizing CPU usage by caching compressed files.
+ // This works differently than the github.com/gofiber/compression middleware.
+ // Optional. Default value false
+ Compress bool `json:"compress"`
+
+ // When set to true, enables byte range requests.
+ // Optional. Default value false
+ ByteRange bool `json:"byte_range"`
+
+ // When set to true, enables directory browsing.
+ // Optional. Default value false.
+ Browse bool `json:"browse"`
+
+ // When set to true, enables direct download.
+ // Optional. Default value false.
+ Download bool `json:"download"`
+
+ // The name of the index file for serving a directory.
+ // Optional. Default value "index.html".
+ Index string `json:"index"`
+
+ // Expiration duration for inactive file handlers.
+ // Use a negative time.Duration to disable it.
+ //
+ // Optional. Default value 10 * time.Second.
+ CacheDuration time.Duration `json:"cache_duration"`
+
+ // The value for the Cache-Control HTTP-header
+ // that is set on the file response. MaxAge is defined in seconds.
+ //
+ // Optional. Default value 0.
+ MaxAge int `json:"max_age"`
+
+ // ModifyResponse defines a function that allows you to alter the response.
+ //
+ // Optional. Default: nil
+ ModifyResponse Handler
+
+ // Next defines a function to skip this middleware when returned true.
+ //
+ // Optional. Default: nil
+ Next func(c *Ctx) bool
+}
+
+// RouteMessage is some message need to be print when server starts
+type RouteMessage struct {
+ name string
+ method string
+ path string
+ handlers string
+}
+
+// Default Config values
+const (
+ DefaultBodyLimit = 4 * 1024 * 1024
+ DefaultConcurrency = 256 * 1024
+ DefaultReadBufferSize = 4096
+ DefaultWriteBufferSize = 4096
+ DefaultCompressedFileSuffix = ".fiber.gz"
+)
+
+// HTTP methods enabled by default
+var DefaultMethods = []string{
+ MethodGet,
+ MethodHead,
+ MethodPost,
+ MethodPut,
+ MethodDelete,
+ MethodConnect,
+ MethodOptions,
+ MethodTrace,
+ MethodPatch,
+}
+
+// DefaultErrorHandler that process return errors from handlers
+func DefaultErrorHandler(c *Ctx, err error) error {
+ code := StatusInternalServerError
+ var e *Error
+ if errors.As(err, &e) {
+ code = e.Code
+ }
+ c.Set(HeaderContentType, MIMETextPlainCharsetUTF8)
+ return c.Status(code).SendString(err.Error())
+}
+
+// New creates a new Fiber named instance.
+//
+// app := fiber.New()
+//
+// You can pass optional configuration options by passing a Config struct:
+//
+// app := fiber.New(fiber.Config{
+// Prefork: true,
+// ServerHeader: "Fiber",
+// })
+func New(config ...Config) *App {
+ // Create a new app
+ app := &App{
+ // Create Ctx pool
+ pool: sync.Pool{
+ New: func() interface{} {
+ return new(Ctx)
+ },
+ },
+ // Create config
+ config: Config{},
+ getBytes: utils.UnsafeBytes,
+ getString: utils.UnsafeString,
+ latestRoute: &Route{},
+ }
+
+ // Define hooks
+ app.hooks = newHooks(app)
+
+ // Define mountFields
+ app.mountFields = newMountFields(app)
+
+ // Override config if provided
+ if len(config) > 0 {
+ app.config = config[0]
+ }
+
+ // Initialize configured before defaults are set
+ app.configured = app.config
+
+ if app.config.ETag {
+ if !IsChild() {
+ log.Warn("Config.ETag is deprecated since v2.0.6, please use 'middleware/etag'.")
+ }
+ }
+
+ // Override default values
+ if app.config.BodyLimit == 0 {
+ app.config.BodyLimit = DefaultBodyLimit
+ }
+ if app.config.Concurrency <= 0 {
+ app.config.Concurrency = DefaultConcurrency
+ }
+ if app.config.ReadBufferSize <= 0 {
+ app.config.ReadBufferSize = DefaultReadBufferSize
+ }
+ if app.config.WriteBufferSize <= 0 {
+ app.config.WriteBufferSize = DefaultWriteBufferSize
+ }
+ if app.config.CompressedFileSuffix == "" {
+ app.config.CompressedFileSuffix = DefaultCompressedFileSuffix
+ }
+ if app.config.Immutable {
+ app.getBytes, app.getString = getBytesImmutable, getStringImmutable
+ }
+
+ if app.config.ErrorHandler == nil {
+ app.config.ErrorHandler = DefaultErrorHandler
+ }
+
+ if app.config.JSONEncoder == nil {
+ app.config.JSONEncoder = json.Marshal
+ }
+ if app.config.JSONDecoder == nil {
+ app.config.JSONDecoder = json.Unmarshal
+ }
+ if app.config.XMLEncoder == nil {
+ app.config.XMLEncoder = xml.Marshal
+ }
+ if app.config.Network == "" {
+ app.config.Network = NetworkTCP4
+ }
+ if len(app.config.RequestMethods) == 0 {
+ app.config.RequestMethods = DefaultMethods
+ }
+
+ app.config.trustedProxiesMap = make(map[string]struct{}, len(app.config.TrustedProxies))
+ for _, ipAddress := range app.config.TrustedProxies {
+ app.handleTrustedProxy(ipAddress)
+ }
+
+ // Create router stack
+ app.stack = make([][]*Route, len(app.config.RequestMethods))
+ app.treeStack = make([]map[string][]*Route, len(app.config.RequestMethods))
+
+ // Override colors
+ app.config.ColorScheme = defaultColors(app.config.ColorScheme)
+
+ // Init app
+ app.init()
+
+ // Return app
+ return app
+}
+
+// Adds an ip address to trustedProxyRanges or trustedProxiesMap based on whether it is an IP range or not
+func (app *App) handleTrustedProxy(ipAddress string) {
+ if strings.Contains(ipAddress, "/") {
+ _, ipNet, err := net.ParseCIDR(ipAddress)
+ if err != nil {
+ log.Warnf("IP range %q could not be parsed: %v", ipAddress, err)
+ } else {
+ app.config.trustedProxyRanges = append(app.config.trustedProxyRanges, ipNet)
+ }
+ } else {
+ app.config.trustedProxiesMap[ipAddress] = struct{}{}
+ }
+}
+
+// SetTLSHandler You can use SetTLSHandler to use ClientHelloInfo when using TLS with Listener.
+func (app *App) SetTLSHandler(tlsHandler *TLSHandler) {
+ // Attach the tlsHandler to the config
+ app.mutex.Lock()
+ app.tlsHandler = tlsHandler
+ app.mutex.Unlock()
+}
+
+// Name Assign name to specific route.
+func (app *App) Name(name string) Router {
+ app.mutex.Lock()
+ defer app.mutex.Unlock()
+
+ for _, routes := range app.stack {
+ for _, route := range routes {
+ isMethodValid := route.Method == app.latestRoute.Method || app.latestRoute.use ||
+ (app.latestRoute.Method == MethodGet && route.Method == MethodHead)
+
+ if route.Path == app.latestRoute.Path && isMethodValid {
+ route.Name = name
+ if route.group != nil {
+ route.Name = route.group.name + route.Name
+ }
+ }
+ }
+ }
+
+ if err := app.hooks.executeOnNameHooks(*app.latestRoute); err != nil {
+ panic(err)
+ }
+
+ return app
+}
+
+// GetRoute Get route by name
+func (app *App) GetRoute(name string) Route {
+ for _, routes := range app.stack {
+ for _, route := range routes {
+ if route.Name == name {
+ return *route
+ }
+ }
+ }
+
+ return Route{}
+}
+
+// GetRoutes Get all routes. When filterUseOption equal to true, it will filter the routes registered by the middleware.
+func (app *App) GetRoutes(filterUseOption ...bool) []Route {
+ var rs []Route
+ var filterUse bool
+ if len(filterUseOption) != 0 {
+ filterUse = filterUseOption[0]
+ }
+ for _, routes := range app.stack {
+ for _, route := range routes {
+ if filterUse && route.use {
+ continue
+ }
+ rs = append(rs, *route)
+ }
+ }
+ return rs
+}
+
+// Use registers a middleware route that will match requests
+// with the provided prefix (which is optional and defaults to "/").
+//
+// app.Use(func(c *fiber.Ctx) error {
+// return c.Next()
+// })
+// app.Use("/api", func(c *fiber.Ctx) error {
+// return c.Next()
+// })
+// app.Use("/api", handler, func(c *fiber.Ctx) error {
+// return c.Next()
+// })
+//
+// This method will match all HTTP verbs: GET, POST, PUT, HEAD etc...
+func (app *App) Use(args ...interface{}) Router {
+ var prefix string
+ var prefixes []string
+ var handlers []Handler
+
+ for i := 0; i < len(args); i++ {
+ switch arg := args[i].(type) {
+ case string:
+ prefix = arg
+ case []string:
+ prefixes = arg
+ case Handler:
+ handlers = append(handlers, arg)
+ default:
+ panic(fmt.Sprintf("use: invalid handler %v\n", reflect.TypeOf(arg)))
+ }
+ }
+
+ if len(prefixes) == 0 {
+ prefixes = append(prefixes, prefix)
+ }
+
+ for _, prefix := range prefixes {
+ app.register(methodUse, prefix, nil, handlers...)
+ }
+
+ return app
+}
+
+// Get registers a route for GET methods that requests a representation
+// of the specified resource. Requests using GET should only retrieve data.
+func (app *App) Get(path string, handlers ...Handler) Router {
+ return app.Head(path, handlers...).Add(MethodGet, path, handlers...)
+}
+
+// Head registers a route for HEAD methods that asks for a response identical
+// to that of a GET request, but without the response body.
+func (app *App) Head(path string, handlers ...Handler) Router {
+ return app.Add(MethodHead, path, handlers...)
+}
+
+// Post registers a route for POST methods that is used to submit an entity to the
+// specified resource, often causing a change in state or side effects on the server.
+func (app *App) Post(path string, handlers ...Handler) Router {
+ return app.Add(MethodPost, path, handlers...)
+}
+
+// Put registers a route for PUT methods that replaces all current representations
+// of the target resource with the request payload.
+func (app *App) Put(path string, handlers ...Handler) Router {
+ return app.Add(MethodPut, path, handlers...)
+}
+
+// Delete registers a route for DELETE methods that deletes the specified resource.
+func (app *App) Delete(path string, handlers ...Handler) Router {
+ return app.Add(MethodDelete, path, handlers...)
+}
+
+// Connect registers a route for CONNECT methods that establishes a tunnel to the
+// server identified by the target resource.
+func (app *App) Connect(path string, handlers ...Handler) Router {
+ return app.Add(MethodConnect, path, handlers...)
+}
+
+// Options registers a route for OPTIONS methods that is used to describe the
+// communication options for the target resource.
+func (app *App) Options(path string, handlers ...Handler) Router {
+ return app.Add(MethodOptions, path, handlers...)
+}
+
+// Trace registers a route for TRACE methods that performs a message loop-back
+// test along the path to the target resource.
+func (app *App) Trace(path string, handlers ...Handler) Router {
+ return app.Add(MethodTrace, path, handlers...)
+}
+
+// Patch registers a route for PATCH methods that is used to apply partial
+// modifications to a resource.
+func (app *App) Patch(path string, handlers ...Handler) Router {
+ return app.Add(MethodPatch, path, handlers...)
+}
+
+// Add allows you to specify a HTTP method to register a route
+func (app *App) Add(method, path string, handlers ...Handler) Router {
+ app.register(method, path, nil, handlers...)
+
+ return app
+}
+
+// Static will create a file server serving static files
+func (app *App) Static(prefix, root string, config ...Static) Router {
+ app.registerStatic(prefix, root, config...)
+
+ return app
+}
+
+// All will register the handler on all HTTP methods
+func (app *App) All(path string, handlers ...Handler) Router {
+ for _, method := range app.config.RequestMethods {
+ _ = app.Add(method, path, handlers...)
+ }
+ return app
+}
+
+// Group is used for Routes with common prefix to define a new sub-router with optional middleware.
+//
+// api := app.Group("/api")
+// api.Get("/users", handler)
+func (app *App) Group(prefix string, handlers ...Handler) Router {
+ grp := &Group{Prefix: prefix, app: app}
+ if len(handlers) > 0 {
+ app.register(methodUse, prefix, grp, handlers...)
+ }
+ if err := app.hooks.executeOnGroupHooks(*grp); err != nil {
+ panic(err)
+ }
+
+ return grp
+}
+
+// Route is used to define routes with a common prefix inside the common function.
+// Uses Group method to define new sub-router.
+func (app *App) Route(prefix string, fn func(router Router), name ...string) Router {
+ // Create new group
+ group := app.Group(prefix)
+ if len(name) > 0 {
+ group.Name(name[0])
+ }
+
+ // Define routes
+ fn(group)
+
+ return group
+}
+
+// Error makes it compatible with the `error` interface.
+func (e *Error) Error() string {
+ return e.Message
+}
+
+// NewError creates a new Error instance with an optional message
+func NewError(code int, message ...string) *Error {
+ err := &Error{
+ Code: code,
+ Message: utils.StatusMessage(code),
+ }
+ if len(message) > 0 {
+ err.Message = message[0]
+ }
+ return err
+}
+
+// Config returns the app config as value ( read-only ).
+func (app *App) Config() Config {
+ return app.config
+}
+
+// Handler returns the server handler.
+func (app *App) Handler() fasthttp.RequestHandler { //revive:disable-line:confusing-naming // Having both a Handler() (uppercase) and a handler() (lowercase) is fine. TODO: Use nolint:revive directive instead. See https://github.com/golangci/golangci-lint/issues/3476
+ // prepare the server for the start
+ app.startupProcess()
+ return app.handler
+}
+
+// Stack returns the raw router stack.
+func (app *App) Stack() [][]*Route {
+ return app.stack
+}
+
+// HandlersCount returns the amount of registered handlers.
+func (app *App) HandlersCount() uint32 {
+ return app.handlersCount
+}
+
+// Shutdown gracefully shuts down the server without interrupting any active connections.
+// Shutdown works by first closing all open listeners and then waiting indefinitely for all connections to return to idle before shutting down.
+//
+// Make sure the program doesn't exit and waits instead for Shutdown to return.
+//
+// Shutdown does not close keepalive connections so its recommended to set ReadTimeout to something else than 0.
+func (app *App) Shutdown() error {
+ return app.ShutdownWithContext(context.Background())
+}
+
+// ShutdownWithTimeout gracefully shuts down the server without interrupting any active connections. However, if the timeout is exceeded,
+// ShutdownWithTimeout will forcefully close any active connections.
+// ShutdownWithTimeout works by first closing all open listeners and then waiting for all connections to return to idle before shutting down.
+//
+// Make sure the program doesn't exit and waits instead for ShutdownWithTimeout to return.
+//
+// ShutdownWithTimeout does not close keepalive connections so its recommended to set ReadTimeout to something else than 0.
+func (app *App) ShutdownWithTimeout(timeout time.Duration) error {
+ ctx, cancelFunc := context.WithTimeout(context.Background(), timeout)
+ defer cancelFunc()
+ return app.ShutdownWithContext(ctx)
+}
+
+// ShutdownWithContext shuts down the server including by force if the context's deadline is exceeded.
+//
+// Make sure the program doesn't exit and waits instead for ShutdownWithTimeout to return.
+//
+// ShutdownWithContext does not close keepalive connections so its recommended to set ReadTimeout to something else than 0.
+func (app *App) ShutdownWithContext(ctx context.Context) error {
+ if app.hooks != nil {
+ defer app.hooks.executeOnShutdownHooks()
+ }
+
+ app.mutex.Lock()
+ defer app.mutex.Unlock()
+ if app.server == nil {
+ return fmt.Errorf("shutdown: server is not running")
+ }
+ return app.server.ShutdownWithContext(ctx)
+}
+
+// Server returns the underlying fasthttp server
+func (app *App) Server() *fasthttp.Server {
+ return app.server
+}
+
+// Hooks returns the hook struct to register hooks.
+func (app *App) Hooks() *Hooks {
+ return app.hooks
+}
+
+// Test is used for internal debugging by passing a *http.Request.
+// Timeout is optional and defaults to 1s, -1 will disable it completely.
+func (app *App) Test(req *http.Request, msTimeout ...int) (*http.Response, error) {
+ // Set timeout
+ timeout := 1000
+ if len(msTimeout) > 0 {
+ timeout = msTimeout[0]
+ }
+
+ // Add Content-Length if not provided with body
+ if req.Body != http.NoBody && req.Header.Get(HeaderContentLength) == "" {
+ req.Header.Add(HeaderContentLength, strconv.FormatInt(req.ContentLength, 10))
+ }
+
+ // Dump raw http request
+ dump, err := httputil.DumpRequest(req, true)
+ if err != nil {
+ return nil, fmt.Errorf("failed to dump request: %w", err)
+ }
+
+ // Create test connection
+ conn := new(testConn)
+
+ // Write raw http request
+ if _, err := conn.r.Write(dump); err != nil {
+ return nil, fmt.Errorf("failed to write: %w", err)
+ }
+ // prepare the server for the start
+ app.startupProcess()
+
+ // Serve conn to server
+ channel := make(chan error)
+ go func() {
+ var returned bool
+ defer func() {
+ if !returned {
+ channel <- fmt.Errorf("runtime.Goexit() called in handler or server panic")
+ }
+ }()
+
+ channel <- app.server.ServeConn(conn)
+ returned = true
+ }()
+
+ // Wait for callback
+ if timeout >= 0 {
+ // With timeout
+ select {
+ case err = <-channel:
+ case <-time.After(time.Duration(timeout) * time.Millisecond):
+ return nil, fmt.Errorf("test: timeout error %vms", timeout)
+ }
+ } else {
+ // Without timeout
+ err = <-channel
+ }
+
+ // Check for errors
+ if err != nil && !errors.Is(err, fasthttp.ErrGetOnly) {
+ return nil, err
+ }
+
+ // Read response
+ buffer := bufio.NewReader(&conn.w)
+
+ // Convert raw http response to *http.Response
+ res, err := http.ReadResponse(buffer, req)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read response: %w", err)
+ }
+
+ return res, nil
+}
+
+type disableLogger struct{}
+
+func (*disableLogger) Printf(_ string, _ ...interface{}) {
+ // fmt.Println(fmt.Sprintf(format, args...))
+}
+
+func (app *App) init() *App {
+ // lock application
+ app.mutex.Lock()
+
+ // Only load templates if a view engine is specified
+ if app.config.Views != nil {
+ if err := app.config.Views.Load(); err != nil {
+ log.Warnf("failed to load views: %v", err)
+ }
+ }
+
+ // create fasthttp server
+ app.server = &fasthttp.Server{
+ Logger: &disableLogger{},
+ LogAllErrors: false,
+ ErrorHandler: app.serverErrorHandler,
+ }
+
+ // fasthttp server settings
+ app.server.Handler = app.handler
+ app.server.Name = app.config.ServerHeader
+ app.server.Concurrency = app.config.Concurrency
+ app.server.NoDefaultDate = app.config.DisableDefaultDate
+ app.server.NoDefaultContentType = app.config.DisableDefaultContentType
+ app.server.DisableHeaderNamesNormalizing = app.config.DisableHeaderNormalizing
+ app.server.DisableKeepalive = app.config.DisableKeepalive
+ app.server.MaxRequestBodySize = app.config.BodyLimit
+ app.server.NoDefaultServerHeader = app.config.ServerHeader == ""
+ app.server.ReadTimeout = app.config.ReadTimeout
+ app.server.WriteTimeout = app.config.WriteTimeout
+ app.server.IdleTimeout = app.config.IdleTimeout
+ app.server.ReadBufferSize = app.config.ReadBufferSize
+ app.server.WriteBufferSize = app.config.WriteBufferSize
+ app.server.GetOnly = app.config.GETOnly
+ app.server.ReduceMemoryUsage = app.config.ReduceMemoryUsage
+ app.server.StreamRequestBody = app.config.StreamRequestBody
+ app.server.DisablePreParseMultipartForm = app.config.DisablePreParseMultipartForm
+
+ // unlock application
+ app.mutex.Unlock()
+ return app
+}
+
+// ErrorHandler is the application's method in charge of finding the
+// appropriate handler for the given request. It searches any mounted
+// sub fibers by their prefixes and if it finds a match, it uses that
+// error handler. Otherwise it uses the configured error handler for
+// the app, which if not set is the DefaultErrorHandler.
+func (app *App) ErrorHandler(ctx *Ctx, err error) error {
+ var (
+ mountedErrHandler ErrorHandler
+ mountedPrefixParts int
+ )
+
+ for prefix, subApp := range app.mountFields.appList {
+ if prefix != "" && strings.HasPrefix(ctx.path, prefix) {
+ parts := len(strings.Split(prefix, "/"))
+ if mountedPrefixParts <= parts {
+ if subApp.configured.ErrorHandler != nil {
+ mountedErrHandler = subApp.config.ErrorHandler
+ }
+
+ mountedPrefixParts = parts
+ }
+ }
+ }
+
+ if mountedErrHandler != nil {
+ return mountedErrHandler(ctx, err)
+ }
+
+ return app.config.ErrorHandler(ctx, err)
+}
+
+// serverErrorHandler is a wrapper around the application's error handler method
+// user for the fasthttp server configuration. It maps a set of fasthttp errors to fiber
+// errors before calling the application's error handler method.
+func (app *App) serverErrorHandler(fctx *fasthttp.RequestCtx, err error) {
+ c := app.AcquireCtx(fctx)
+ defer app.ReleaseCtx(c)
+
+ var (
+ errNetOP *net.OpError
+ netErr net.Error
+ )
+
+ switch {
+ case errors.As(err, new(*fasthttp.ErrSmallBuffer)):
+ err = ErrRequestHeaderFieldsTooLarge
+ case errors.As(err, &errNetOP) && errNetOP.Timeout():
+ err = ErrRequestTimeout
+ case errors.As(err, &netErr):
+ err = ErrBadGateway
+ case errors.Is(err, fasthttp.ErrBodyTooLarge):
+ err = ErrRequestEntityTooLarge
+ case errors.Is(err, fasthttp.ErrGetOnly):
+ err = ErrMethodNotAllowed
+ case strings.Contains(err.Error(), "timeout"):
+ err = ErrRequestTimeout
+ default:
+ err = NewError(StatusBadRequest, err.Error())
+ }
+
+ if catch := app.ErrorHandler(c, err); catch != nil {
+ log.Errorf("serverErrorHandler: failed to call ErrorHandler: %v", catch)
+ _ = c.SendStatus(StatusInternalServerError) //nolint:errcheck // It is fine to ignore the error here
+ return
+ }
+}
+
+// startupProcess Is the method which executes all the necessary processes just before the start of the server.
+func (app *App) startupProcess() *App {
+ app.mutex.Lock()
+ defer app.mutex.Unlock()
+
+ app.mountStartupProcess()
+
+ // build route tree stack
+ app.buildTree()
+
+ return app
+}
+
+// Run onListen hooks. If they return an error, panic.
+func (app *App) runOnListenHooks(listenData ListenData) {
+ if err := app.hooks.executeOnListenHooks(listenData); err != nil {
+ panic(err)
+ }
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/client.go b/vendor/github.com/gofiber/fiber/v2/client.go
new file mode 100644
index 0000000..ee191a6
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/client.go
@@ -0,0 +1,1021 @@
+package fiber
+
+import (
+ "bytes"
+ "crypto/tls"
+ "encoding/json"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "mime/multipart"
+ "os"
+ "path/filepath"
+ "strconv"
+ "sync"
+ "time"
+
+ "github.com/gofiber/fiber/v2/utils"
+
+ "github.com/valyala/fasthttp"
+)
+
+// Request represents HTTP request.
+//
+// It is forbidden copying Request instances. Create new instances
+// and use CopyTo instead.
+//
+// Request instance MUST NOT be used from concurrently running goroutines.
+// Copy from fasthttp
+type Request = fasthttp.Request
+
+// Response represents HTTP response.
+//
+// It is forbidden copying Response instances. Create new instances
+// and use CopyTo instead.
+//
+// Response instance MUST NOT be used from concurrently running goroutines.
+// Copy from fasthttp
+type Response = fasthttp.Response
+
+// Args represents query arguments.
+//
+// It is forbidden copying Args instances. Create new instances instead
+// and use CopyTo().
+//
+// Args instance MUST NOT be used from concurrently running goroutines.
+// Copy from fasthttp
+type Args = fasthttp.Args
+
+// RetryIfFunc signature of retry if function
+// Request argument passed to RetryIfFunc, if there are any request errors.
+// Copy from fasthttp
+type RetryIfFunc = fasthttp.RetryIfFunc
+
+var defaultClient Client
+
+// Client implements http client.
+//
+// It is safe calling Client methods from concurrently running goroutines.
+type Client struct {
+ mutex sync.RWMutex
+ // UserAgent is used in User-Agent request header.
+ UserAgent string
+
+ // NoDefaultUserAgentHeader when set to true, causes the default
+ // User-Agent header to be excluded from the Request.
+ NoDefaultUserAgentHeader bool
+
+ // When set by an external client of Fiber it will use the provided implementation of a
+ // JSONMarshal
+ //
+ // Allowing for flexibility in using another json library for encoding
+ JSONEncoder utils.JSONMarshal
+
+ // When set by an external client of Fiber it will use the provided implementation of a
+ // JSONUnmarshal
+ //
+ // Allowing for flexibility in using another json library for decoding
+ JSONDecoder utils.JSONUnmarshal
+}
+
+// Get returns an agent with http method GET.
+func Get(url string) *Agent { return defaultClient.Get(url) }
+
+// Get returns an agent with http method GET.
+func (c *Client) Get(url string) *Agent {
+ return c.createAgent(MethodGet, url)
+}
+
+// Head returns an agent with http method HEAD.
+func Head(url string) *Agent { return defaultClient.Head(url) }
+
+// Head returns an agent with http method GET.
+func (c *Client) Head(url string) *Agent {
+ return c.createAgent(MethodHead, url)
+}
+
+// Post sends POST request to the given URL.
+func Post(url string) *Agent { return defaultClient.Post(url) }
+
+// Post sends POST request to the given URL.
+func (c *Client) Post(url string) *Agent {
+ return c.createAgent(MethodPost, url)
+}
+
+// Put sends PUT request to the given URL.
+func Put(url string) *Agent { return defaultClient.Put(url) }
+
+// Put sends PUT request to the given URL.
+func (c *Client) Put(url string) *Agent {
+ return c.createAgent(MethodPut, url)
+}
+
+// Patch sends PATCH request to the given URL.
+func Patch(url string) *Agent { return defaultClient.Patch(url) }
+
+// Patch sends PATCH request to the given URL.
+func (c *Client) Patch(url string) *Agent {
+ return c.createAgent(MethodPatch, url)
+}
+
+// Delete sends DELETE request to the given URL.
+func Delete(url string) *Agent { return defaultClient.Delete(url) }
+
+// Delete sends DELETE request to the given URL.
+func (c *Client) Delete(url string) *Agent {
+ return c.createAgent(MethodDelete, url)
+}
+
+func (c *Client) createAgent(method, url string) *Agent {
+ a := AcquireAgent()
+ a.req.Header.SetMethod(method)
+ a.req.SetRequestURI(url)
+
+ c.mutex.RLock()
+ a.Name = c.UserAgent
+ a.NoDefaultUserAgentHeader = c.NoDefaultUserAgentHeader
+ a.jsonDecoder = c.JSONDecoder
+ a.jsonEncoder = c.JSONEncoder
+ if a.jsonDecoder == nil {
+ a.jsonDecoder = json.Unmarshal
+ }
+ c.mutex.RUnlock()
+
+ if err := a.Parse(); err != nil {
+ a.errs = append(a.errs, err)
+ }
+
+ return a
+}
+
+// Agent is an object storing all request data for client.
+// Agent instance MUST NOT be used from concurrently running goroutines.
+type Agent struct {
+ // Name is used in User-Agent request header.
+ Name string
+
+ // NoDefaultUserAgentHeader when set to true, causes the default
+ // User-Agent header to be excluded from the Request.
+ NoDefaultUserAgentHeader bool
+
+ // HostClient is an embedded fasthttp HostClient
+ *fasthttp.HostClient
+
+ req *Request
+ resp *Response
+ dest []byte
+ args *Args
+ timeout time.Duration
+ errs []error
+ formFiles []*FormFile
+ debugWriter io.Writer
+ mw multipartWriter
+ jsonEncoder utils.JSONMarshal
+ jsonDecoder utils.JSONUnmarshal
+ maxRedirectsCount int
+ boundary string
+ reuse bool
+ parsed bool
+}
+
+// Parse initializes URI and HostClient.
+func (a *Agent) Parse() error {
+ if a.parsed {
+ return nil
+ }
+ a.parsed = true
+
+ uri := a.req.URI()
+
+ var isTLS bool
+ scheme := uri.Scheme()
+ if bytes.Equal(scheme, []byte(schemeHTTPS)) {
+ isTLS = true
+ } else if !bytes.Equal(scheme, []byte(schemeHTTP)) {
+ return fmt.Errorf("unsupported protocol %q. http and https are supported", scheme)
+ }
+
+ name := a.Name
+ if name == "" && !a.NoDefaultUserAgentHeader {
+ name = defaultUserAgent
+ }
+
+ a.HostClient = &fasthttp.HostClient{
+ Addr: fasthttp.AddMissingPort(string(uri.Host()), isTLS),
+ Name: name,
+ NoDefaultUserAgentHeader: a.NoDefaultUserAgentHeader,
+ IsTLS: isTLS,
+ }
+
+ return nil
+}
+
+/************************** Header Setting **************************/
+
+// Set sets the given 'key: value' header.
+//
+// Use Add for setting multiple header values under the same key.
+func (a *Agent) Set(k, v string) *Agent {
+ a.req.Header.Set(k, v)
+
+ return a
+}
+
+// SetBytesK sets the given 'key: value' header.
+//
+// Use AddBytesK for setting multiple header values under the same key.
+func (a *Agent) SetBytesK(k []byte, v string) *Agent {
+ a.req.Header.SetBytesK(k, v)
+
+ return a
+}
+
+// SetBytesV sets the given 'key: value' header.
+//
+// Use AddBytesV for setting multiple header values under the same key.
+func (a *Agent) SetBytesV(k string, v []byte) *Agent {
+ a.req.Header.SetBytesV(k, v)
+
+ return a
+}
+
+// SetBytesKV sets the given 'key: value' header.
+//
+// Use AddBytesKV for setting multiple header values under the same key.
+func (a *Agent) SetBytesKV(k, v []byte) *Agent {
+ a.req.Header.SetBytesKV(k, v)
+
+ return a
+}
+
+// Add adds the given 'key: value' header.
+//
+// Multiple headers with the same key may be added with this function.
+// Use Set for setting a single header for the given key.
+func (a *Agent) Add(k, v string) *Agent {
+ a.req.Header.Add(k, v)
+
+ return a
+}
+
+// AddBytesK adds the given 'key: value' header.
+//
+// Multiple headers with the same key may be added with this function.
+// Use SetBytesK for setting a single header for the given key.
+func (a *Agent) AddBytesK(k []byte, v string) *Agent {
+ a.req.Header.AddBytesK(k, v)
+
+ return a
+}
+
+// AddBytesV adds the given 'key: value' header.
+//
+// Multiple headers with the same key may be added with this function.
+// Use SetBytesV for setting a single header for the given key.
+func (a *Agent) AddBytesV(k string, v []byte) *Agent {
+ a.req.Header.AddBytesV(k, v)
+
+ return a
+}
+
+// AddBytesKV adds the given 'key: value' header.
+//
+// Multiple headers with the same key may be added with this function.
+// Use SetBytesKV for setting a single header for the given key.
+func (a *Agent) AddBytesKV(k, v []byte) *Agent {
+ a.req.Header.AddBytesKV(k, v)
+
+ return a
+}
+
+// ConnectionClose sets 'Connection: close' header.
+func (a *Agent) ConnectionClose() *Agent {
+ a.req.Header.SetConnectionClose()
+
+ return a
+}
+
+// UserAgent sets User-Agent header value.
+func (a *Agent) UserAgent(userAgent string) *Agent {
+ a.req.Header.SetUserAgent(userAgent)
+
+ return a
+}
+
+// UserAgentBytes sets User-Agent header value.
+func (a *Agent) UserAgentBytes(userAgent []byte) *Agent {
+ a.req.Header.SetUserAgentBytes(userAgent)
+
+ return a
+}
+
+// Cookie sets one 'key: value' cookie.
+func (a *Agent) Cookie(key, value string) *Agent {
+ a.req.Header.SetCookie(key, value)
+
+ return a
+}
+
+// CookieBytesK sets one 'key: value' cookie.
+func (a *Agent) CookieBytesK(key []byte, value string) *Agent {
+ a.req.Header.SetCookieBytesK(key, value)
+
+ return a
+}
+
+// CookieBytesKV sets one 'key: value' cookie.
+func (a *Agent) CookieBytesKV(key, value []byte) *Agent {
+ a.req.Header.SetCookieBytesKV(key, value)
+
+ return a
+}
+
+// Cookies sets multiple 'key: value' cookies.
+func (a *Agent) Cookies(kv ...string) *Agent {
+ for i := 1; i < len(kv); i += 2 {
+ a.req.Header.SetCookie(kv[i-1], kv[i])
+ }
+
+ return a
+}
+
+// CookiesBytesKV sets multiple 'key: value' cookies.
+func (a *Agent) CookiesBytesKV(kv ...[]byte) *Agent {
+ for i := 1; i < len(kv); i += 2 {
+ a.req.Header.SetCookieBytesKV(kv[i-1], kv[i])
+ }
+
+ return a
+}
+
+// Referer sets Referer header value.
+func (a *Agent) Referer(referer string) *Agent {
+ a.req.Header.SetReferer(referer)
+
+ return a
+}
+
+// RefererBytes sets Referer header value.
+func (a *Agent) RefererBytes(referer []byte) *Agent {
+ a.req.Header.SetRefererBytes(referer)
+
+ return a
+}
+
+// ContentType sets Content-Type header value.
+func (a *Agent) ContentType(contentType string) *Agent {
+ a.req.Header.SetContentType(contentType)
+
+ return a
+}
+
+// ContentTypeBytes sets Content-Type header value.
+func (a *Agent) ContentTypeBytes(contentType []byte) *Agent {
+ a.req.Header.SetContentTypeBytes(contentType)
+
+ return a
+}
+
+/************************** End Header Setting **************************/
+
+/************************** URI Setting **************************/
+
+// Host sets host for the URI.
+func (a *Agent) Host(host string) *Agent {
+ a.req.URI().SetHost(host)
+
+ return a
+}
+
+// HostBytes sets host for the URI.
+func (a *Agent) HostBytes(host []byte) *Agent {
+ a.req.URI().SetHostBytes(host)
+
+ return a
+}
+
+// QueryString sets URI query string.
+func (a *Agent) QueryString(queryString string) *Agent {
+ a.req.URI().SetQueryString(queryString)
+
+ return a
+}
+
+// QueryStringBytes sets URI query string.
+func (a *Agent) QueryStringBytes(queryString []byte) *Agent {
+ a.req.URI().SetQueryStringBytes(queryString)
+
+ return a
+}
+
+// BasicAuth sets URI username and password.
+func (a *Agent) BasicAuth(username, password string) *Agent {
+ a.req.URI().SetUsername(username)
+ a.req.URI().SetPassword(password)
+
+ return a
+}
+
+// BasicAuthBytes sets URI username and password.
+func (a *Agent) BasicAuthBytes(username, password []byte) *Agent {
+ a.req.URI().SetUsernameBytes(username)
+ a.req.URI().SetPasswordBytes(password)
+
+ return a
+}
+
+/************************** End URI Setting **************************/
+
+/************************** Request Setting **************************/
+
+// BodyString sets request body.
+func (a *Agent) BodyString(bodyString string) *Agent {
+ a.req.SetBodyString(bodyString)
+
+ return a
+}
+
+// Body sets request body.
+func (a *Agent) Body(body []byte) *Agent {
+ a.req.SetBody(body)
+
+ return a
+}
+
+// BodyStream sets request body stream and, optionally body size.
+//
+// If bodySize is >= 0, then the bodyStream must provide exactly bodySize bytes
+// before returning io.EOF.
+//
+// If bodySize < 0, then bodyStream is read until io.EOF.
+//
+// bodyStream.Close() is called after finishing reading all body data
+// if it implements io.Closer.
+//
+// Note that GET and HEAD requests cannot have body.
+func (a *Agent) BodyStream(bodyStream io.Reader, bodySize int) *Agent {
+ a.req.SetBodyStream(bodyStream, bodySize)
+
+ return a
+}
+
+// JSON sends a JSON request.
+func (a *Agent) JSON(v interface{}, ctype ...string) *Agent {
+ if a.jsonEncoder == nil {
+ a.jsonEncoder = json.Marshal
+ }
+
+ if len(ctype) > 0 {
+ a.req.Header.SetContentType(ctype[0])
+ } else {
+ a.req.Header.SetContentType(MIMEApplicationJSON)
+ }
+
+ if body, err := a.jsonEncoder(v); err != nil {
+ a.errs = append(a.errs, err)
+ } else {
+ a.req.SetBody(body)
+ }
+
+ return a
+}
+
+// XML sends an XML request.
+func (a *Agent) XML(v interface{}) *Agent {
+ a.req.Header.SetContentType(MIMEApplicationXML)
+
+ if body, err := xml.Marshal(v); err != nil {
+ a.errs = append(a.errs, err)
+ } else {
+ a.req.SetBody(body)
+ }
+
+ return a
+}
+
+// Form sends form request with body if args is non-nil.
+//
+// It is recommended obtaining args via AcquireArgs and release it
+// manually in performance-critical code.
+func (a *Agent) Form(args *Args) *Agent {
+ a.req.Header.SetContentType(MIMEApplicationForm)
+
+ if args != nil {
+ a.req.SetBody(args.QueryString())
+ }
+
+ return a
+}
+
+// FormFile represents multipart form file
+type FormFile struct {
+ // Fieldname is form file's field name
+ Fieldname string
+ // Name is form file's name
+ Name string
+ // Content is form file's content
+ Content []byte
+ // autoRelease indicates if returns the object
+ // acquired via AcquireFormFile to the pool.
+ autoRelease bool
+}
+
+// FileData appends files for multipart form request.
+//
+// It is recommended obtaining formFile via AcquireFormFile and release it
+// manually in performance-critical code.
+func (a *Agent) FileData(formFiles ...*FormFile) *Agent {
+ a.formFiles = append(a.formFiles, formFiles...)
+
+ return a
+}
+
+// SendFile reads file and appends it to multipart form request.
+func (a *Agent) SendFile(filename string, fieldname ...string) *Agent {
+ content, err := os.ReadFile(filepath.Clean(filename))
+ if err != nil {
+ a.errs = append(a.errs, err)
+ return a
+ }
+
+ ff := AcquireFormFile()
+ if len(fieldname) > 0 && fieldname[0] != "" {
+ ff.Fieldname = fieldname[0]
+ } else {
+ ff.Fieldname = "file" + strconv.Itoa(len(a.formFiles)+1)
+ }
+ ff.Name = filepath.Base(filename)
+ ff.Content = append(ff.Content, content...)
+ ff.autoRelease = true
+
+ a.formFiles = append(a.formFiles, ff)
+
+ return a
+}
+
+// SendFiles reads files and appends them to multipart form request.
+//
+// Examples:
+//
+// SendFile("/path/to/file1", "fieldname1", "/path/to/file2")
+func (a *Agent) SendFiles(filenamesAndFieldnames ...string) *Agent {
+ pairs := len(filenamesAndFieldnames)
+ if pairs&1 == 1 {
+ filenamesAndFieldnames = append(filenamesAndFieldnames, "")
+ }
+
+ for i := 0; i < pairs; i += 2 {
+ a.SendFile(filenamesAndFieldnames[i], filenamesAndFieldnames[i+1])
+ }
+
+ return a
+}
+
+// Boundary sets boundary for multipart form request.
+func (a *Agent) Boundary(boundary string) *Agent {
+ a.boundary = boundary
+
+ return a
+}
+
+// MultipartForm sends multipart form request with k-v and files.
+//
+// It is recommended obtaining args via AcquireArgs and release it
+// manually in performance-critical code.
+func (a *Agent) MultipartForm(args *Args) *Agent {
+ if a.mw == nil {
+ a.mw = multipart.NewWriter(a.req.BodyWriter())
+ }
+
+ if a.boundary != "" {
+ if err := a.mw.SetBoundary(a.boundary); err != nil {
+ a.errs = append(a.errs, err)
+ return a
+ }
+ }
+
+ a.req.Header.SetMultipartFormBoundary(a.mw.Boundary())
+
+ if args != nil {
+ args.VisitAll(func(key, value []byte) {
+ if err := a.mw.WriteField(utils.UnsafeString(key), utils.UnsafeString(value)); err != nil {
+ a.errs = append(a.errs, err)
+ }
+ })
+ }
+
+ for _, ff := range a.formFiles {
+ w, err := a.mw.CreateFormFile(ff.Fieldname, ff.Name)
+ if err != nil {
+ a.errs = append(a.errs, err)
+ continue
+ }
+ if _, err = w.Write(ff.Content); err != nil {
+ a.errs = append(a.errs, err)
+ }
+ }
+
+ if err := a.mw.Close(); err != nil {
+ a.errs = append(a.errs, err)
+ }
+
+ return a
+}
+
+/************************** End Request Setting **************************/
+
+/************************** Agent Setting **************************/
+
+// Debug mode enables logging request and response detail
+func (a *Agent) Debug(w ...io.Writer) *Agent {
+ a.debugWriter = os.Stdout
+ if len(w) > 0 {
+ a.debugWriter = w[0]
+ }
+
+ return a
+}
+
+// Timeout sets request timeout duration.
+func (a *Agent) Timeout(timeout time.Duration) *Agent {
+ a.timeout = timeout
+
+ return a
+}
+
+// Reuse enables the Agent instance to be used again after one request.
+//
+// If agent is reusable, then it should be released manually when it is no
+// longer used.
+func (a *Agent) Reuse() *Agent {
+ a.reuse = true
+
+ return a
+}
+
+// InsecureSkipVerify controls whether the Agent verifies the server
+// certificate chain and host name.
+func (a *Agent) InsecureSkipVerify() *Agent {
+ if a.HostClient.TLSConfig == nil {
+ a.HostClient.TLSConfig = &tls.Config{InsecureSkipVerify: true} //nolint:gosec // We explicitly let the user set insecure mode here
+ } else {
+ a.HostClient.TLSConfig.InsecureSkipVerify = true
+ }
+
+ return a
+}
+
+// TLSConfig sets tls config.
+func (a *Agent) TLSConfig(config *tls.Config) *Agent {
+ a.HostClient.TLSConfig = config
+
+ return a
+}
+
+// MaxRedirectsCount sets max redirect count for GET and HEAD.
+func (a *Agent) MaxRedirectsCount(count int) *Agent {
+ a.maxRedirectsCount = count
+
+ return a
+}
+
+// JSONEncoder sets custom json encoder.
+func (a *Agent) JSONEncoder(jsonEncoder utils.JSONMarshal) *Agent {
+ a.jsonEncoder = jsonEncoder
+
+ return a
+}
+
+// JSONDecoder sets custom json decoder.
+func (a *Agent) JSONDecoder(jsonDecoder utils.JSONUnmarshal) *Agent {
+ a.jsonDecoder = jsonDecoder
+
+ return a
+}
+
+// Request returns Agent request instance.
+func (a *Agent) Request() *Request {
+ return a.req
+}
+
+// SetResponse sets custom response for the Agent instance.
+//
+// It is recommended obtaining custom response via AcquireResponse and release it
+// manually in performance-critical code.
+func (a *Agent) SetResponse(customResp *Response) *Agent {
+ a.resp = customResp
+
+ return a
+}
+
+// Dest sets custom dest.
+//
+// The contents of dest will be replaced by the response body, if the dest
+// is too small a new slice will be allocated.
+func (a *Agent) Dest(dest []byte) *Agent {
+ a.dest = dest
+
+ return a
+}
+
+// RetryIf controls whether a retry should be attempted after an error.
+//
+// By default, will use isIdempotent function from fasthttp
+func (a *Agent) RetryIf(retryIf RetryIfFunc) *Agent {
+ a.HostClient.RetryIf = retryIf
+ return a
+}
+
+/************************** End Agent Setting **************************/
+
+// Bytes returns the status code, bytes body and errors of url.
+//
+// it's not safe to use Agent after calling [Agent.Bytes]
+func (a *Agent) Bytes() (int, []byte, []error) {
+ defer a.release()
+ return a.bytes()
+}
+
+func (a *Agent) bytes() (code int, body []byte, errs []error) { //nolint:nonamedreturns,revive // We want to overwrite the body in a deferred func. TODO: Check if we really need to do this. We eventually want to get rid of all named returns.
+ if errs = append(errs, a.errs...); len(errs) > 0 {
+ return code, body, errs
+ }
+
+ var (
+ req = a.req
+ resp *Response
+ nilResp bool
+ )
+
+ if a.resp == nil {
+ resp = AcquireResponse()
+ nilResp = true
+ } else {
+ resp = a.resp
+ }
+
+ defer func() {
+ if a.debugWriter != nil {
+ printDebugInfo(req, resp, a.debugWriter)
+ }
+
+ if len(errs) == 0 {
+ code = resp.StatusCode()
+ }
+
+ body = append(a.dest, resp.Body()...) //nolint:gocritic // We want to append to the returned slice here
+
+ if nilResp {
+ ReleaseResponse(resp)
+ }
+ }()
+
+ if a.timeout > 0 {
+ if err := a.HostClient.DoTimeout(req, resp, a.timeout); err != nil {
+ errs = append(errs, err)
+ return code, body, errs
+ }
+ } else if a.maxRedirectsCount > 0 && (string(req.Header.Method()) == MethodGet || string(req.Header.Method()) == MethodHead) {
+ if err := a.HostClient.DoRedirects(req, resp, a.maxRedirectsCount); err != nil {
+ errs = append(errs, err)
+ return code, body, errs
+ }
+ } else if err := a.HostClient.Do(req, resp); err != nil {
+ errs = append(errs, err)
+ }
+
+ return code, body, errs
+}
+
+func printDebugInfo(req *Request, resp *Response, w io.Writer) {
+ msg := fmt.Sprintf("Connected to %s(%s)\r\n\r\n", req.URI().Host(), resp.RemoteAddr())
+ _, _ = w.Write(utils.UnsafeBytes(msg)) //nolint:errcheck // This will never fail
+ _, _ = req.WriteTo(w) //nolint:errcheck // This will never fail
+ _, _ = resp.WriteTo(w) //nolint:errcheck // This will never fail
+}
+
+// String returns the status code, string body and errors of url.
+//
+// it's not safe to use Agent after calling [Agent.String]
+func (a *Agent) String() (int, string, []error) {
+ defer a.release()
+ code, body, errs := a.bytes()
+ // TODO: There might be a data race here on body. Maybe use utils.CopyBytes on it?
+
+ return code, utils.UnsafeString(body), errs
+}
+
+// Struct returns the status code, bytes body and errors of URL.
+// And bytes body will be unmarshalled to given v.
+//
+// it's not safe to use Agent after calling [Agent.Struct]
+func (a *Agent) Struct(v interface{}) (int, []byte, []error) {
+ defer a.release()
+
+ code, body, errs := a.bytes()
+ if len(errs) > 0 {
+ return code, body, errs
+ }
+
+ // TODO: This should only be done once
+ if a.jsonDecoder == nil {
+ a.jsonDecoder = json.Unmarshal
+ }
+
+ if err := a.jsonDecoder(body, v); err != nil {
+ errs = append(errs, err)
+ }
+
+ return code, body, errs
+}
+
+func (a *Agent) release() {
+ if !a.reuse {
+ ReleaseAgent(a)
+ } else {
+ a.errs = a.errs[:0]
+ }
+}
+
+func (a *Agent) reset() {
+ a.HostClient = nil
+ a.req.Reset()
+ a.resp = nil
+ a.dest = nil
+ a.timeout = 0
+ a.args = nil
+ a.errs = a.errs[:0]
+ a.debugWriter = nil
+ a.mw = nil
+ a.reuse = false
+ a.parsed = false
+ a.maxRedirectsCount = 0
+ a.boundary = ""
+ a.Name = ""
+ a.NoDefaultUserAgentHeader = false
+ for i, ff := range a.formFiles {
+ if ff.autoRelease {
+ ReleaseFormFile(ff)
+ }
+ a.formFiles[i] = nil
+ }
+ a.formFiles = a.formFiles[:0]
+}
+
+var (
+ clientPool sync.Pool
+ agentPool = sync.Pool{
+ New: func() interface{} {
+ return &Agent{req: &Request{}}
+ },
+ }
+ responsePool sync.Pool
+ argsPool sync.Pool
+ formFilePool sync.Pool
+)
+
+// AcquireClient returns an empty Client instance from client pool.
+//
+// The returned Client instance may be passed to ReleaseClient when it is
+// no longer needed. This allows Client recycling, reduces GC pressure
+// and usually improves performance.
+func AcquireClient() *Client {
+ v := clientPool.Get()
+ if v == nil {
+ return &Client{}
+ }
+ c, ok := v.(*Client)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *Client"))
+ }
+ return c
+}
+
+// ReleaseClient returns c acquired via AcquireClient to client pool.
+//
+// It is forbidden accessing req and/or it's members after returning
+// it to client pool.
+func ReleaseClient(c *Client) {
+ c.UserAgent = ""
+ c.NoDefaultUserAgentHeader = false
+ c.JSONEncoder = nil
+ c.JSONDecoder = nil
+
+ clientPool.Put(c)
+}
+
+// AcquireAgent returns an empty Agent instance from Agent pool.
+//
+// The returned Agent instance may be passed to ReleaseAgent when it is
+// no longer needed. This allows Agent recycling, reduces GC pressure
+// and usually improves performance.
+func AcquireAgent() *Agent {
+ a, ok := agentPool.Get().(*Agent)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *Agent"))
+ }
+ return a
+}
+
+// ReleaseAgent returns an acquired via AcquireAgent to Agent pool.
+//
+// It is forbidden accessing req and/or it's members after returning
+// it to Agent pool.
+func ReleaseAgent(a *Agent) {
+ a.reset()
+ agentPool.Put(a)
+}
+
+// AcquireResponse returns an empty Response instance from response pool.
+//
+// The returned Response instance may be passed to ReleaseResponse when it is
+// no longer needed. This allows Response recycling, reduces GC pressure
+// and usually improves performance.
+// Copy from fasthttp
+func AcquireResponse() *Response {
+ v := responsePool.Get()
+ if v == nil {
+ return &Response{}
+ }
+ r, ok := v.(*Response)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *Response"))
+ }
+ return r
+}
+
+// ReleaseResponse return resp acquired via AcquireResponse to response pool.
+//
+// It is forbidden accessing resp and/or it's members after returning
+// it to response pool.
+// Copy from fasthttp
+func ReleaseResponse(resp *Response) {
+ resp.Reset()
+ responsePool.Put(resp)
+}
+
+// AcquireArgs returns an empty Args object from the pool.
+//
+// The returned Args may be returned to the pool with ReleaseArgs
+// when no longer needed. This allows reducing GC load.
+// Copy from fasthttp
+func AcquireArgs() *Args {
+ v := argsPool.Get()
+ if v == nil {
+ return &Args{}
+ }
+ a, ok := v.(*Args)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *Args"))
+ }
+ return a
+}
+
+// ReleaseArgs returns the object acquired via AcquireArgs to the pool.
+//
+// String not access the released Args object, otherwise data races may occur.
+// Copy from fasthttp
+func ReleaseArgs(a *Args) {
+ a.Reset()
+ argsPool.Put(a)
+}
+
+// AcquireFormFile returns an empty FormFile object from the pool.
+//
+// The returned FormFile may be returned to the pool with ReleaseFormFile
+// when no longer needed. This allows reducing GC load.
+func AcquireFormFile() *FormFile {
+ v := formFilePool.Get()
+ if v == nil {
+ return &FormFile{}
+ }
+ ff, ok := v.(*FormFile)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *FormFile"))
+ }
+ return ff
+}
+
+// ReleaseFormFile returns the object acquired via AcquireFormFile to the pool.
+//
+// String not access the released FormFile object, otherwise data races may occur.
+func ReleaseFormFile(ff *FormFile) {
+ ff.Fieldname = ""
+ ff.Name = ""
+ ff.Content = ff.Content[:0]
+ ff.autoRelease = false
+
+ formFilePool.Put(ff)
+}
+
+const (
+ defaultUserAgent = "fiber"
+)
+
+type multipartWriter interface {
+ Boundary() string
+ SetBoundary(boundary string) error
+ CreateFormFile(fieldname, filename string) (io.Writer, error)
+ WriteField(fieldname, value string) error
+ Close() error
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/color.go b/vendor/github.com/gofiber/fiber/v2/color.go
new file mode 100644
index 0000000..cbccd2e
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/color.go
@@ -0,0 +1,107 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package fiber
+
+// Colors is a struct to define custom colors for Fiber app and middlewares.
+type Colors struct {
+ // Black color.
+ //
+ // Optional. Default: "\u001b[90m"
+ Black string
+
+ // Red color.
+ //
+ // Optional. Default: "\u001b[91m"
+ Red string
+
+ // Green color.
+ //
+ // Optional. Default: "\u001b[92m"
+ Green string
+
+ // Yellow color.
+ //
+ // Optional. Default: "\u001b[93m"
+ Yellow string
+
+ // Blue color.
+ //
+ // Optional. Default: "\u001b[94m"
+ Blue string
+
+ // Magenta color.
+ //
+ // Optional. Default: "\u001b[95m"
+ Magenta string
+
+ // Cyan color.
+ //
+ // Optional. Default: "\u001b[96m"
+ Cyan string
+
+ // White color.
+ //
+ // Optional. Default: "\u001b[97m"
+ White string
+
+ // Reset color.
+ //
+ // Optional. Default: "\u001b[0m"
+ Reset string
+}
+
+// DefaultColors Default color codes
+var DefaultColors = Colors{
+ Black: "\u001b[90m",
+ Red: "\u001b[91m",
+ Green: "\u001b[92m",
+ Yellow: "\u001b[93m",
+ Blue: "\u001b[94m",
+ Magenta: "\u001b[95m",
+ Cyan: "\u001b[96m",
+ White: "\u001b[97m",
+ Reset: "\u001b[0m",
+}
+
+// defaultColors is a function to override default colors to config
+func defaultColors(colors Colors) Colors {
+ if colors.Black == "" {
+ colors.Black = DefaultColors.Black
+ }
+
+ if colors.Red == "" {
+ colors.Red = DefaultColors.Red
+ }
+
+ if colors.Green == "" {
+ colors.Green = DefaultColors.Green
+ }
+
+ if colors.Yellow == "" {
+ colors.Yellow = DefaultColors.Yellow
+ }
+
+ if colors.Blue == "" {
+ colors.Blue = DefaultColors.Blue
+ }
+
+ if colors.Magenta == "" {
+ colors.Magenta = DefaultColors.Magenta
+ }
+
+ if colors.Cyan == "" {
+ colors.Cyan = DefaultColors.Cyan
+ }
+
+ if colors.White == "" {
+ colors.White = DefaultColors.White
+ }
+
+ if colors.Reset == "" {
+ colors.Reset = DefaultColors.Reset
+ }
+
+ return colors
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/ctx.go b/vendor/github.com/gofiber/fiber/v2/ctx.go
new file mode 100644
index 0000000..55b81cf
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/ctx.go
@@ -0,0 +1,1989 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package fiber
+
+import (
+ "bytes"
+ "context"
+ "crypto/tls"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "io"
+ "mime/multipart"
+ "net"
+ "net/http"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+ "sync"
+ "text/template"
+ "time"
+
+ "github.com/gofiber/fiber/v2/internal/schema"
+ "github.com/gofiber/fiber/v2/utils"
+
+ "github.com/valyala/bytebufferpool"
+ "github.com/valyala/fasthttp"
+)
+
+const (
+ schemeHTTP = "http"
+ schemeHTTPS = "https"
+)
+
+// maxParams defines the maximum number of parameters per route.
+const maxParams = 30
+
+// Some constants for BodyParser, QueryParser, CookieParser and ReqHeaderParser.
+const (
+ queryTag = "query"
+ reqHeaderTag = "reqHeader"
+ bodyTag = "form"
+ paramsTag = "params"
+ cookieTag = "cookie"
+)
+
+// userContextKey define the key name for storing context.Context in *fasthttp.RequestCtx
+const userContextKey = "__local_user_context__"
+
+var (
+ // decoderPoolMap helps to improve BodyParser's, QueryParser's, CookieParser's and ReqHeaderParser's performance
+ decoderPoolMap = map[string]*sync.Pool{}
+ // tags is used to classify parser's pool
+ tags = []string{queryTag, bodyTag, reqHeaderTag, paramsTag, cookieTag}
+)
+
+func init() {
+ for _, tag := range tags {
+ decoderPoolMap[tag] = &sync.Pool{New: func() interface{} {
+ return decoderBuilder(ParserConfig{
+ IgnoreUnknownKeys: true,
+ ZeroEmpty: true,
+ })
+ }}
+ }
+}
+
+// SetParserDecoder allow globally change the option of form decoder, update decoderPool
+func SetParserDecoder(parserConfig ParserConfig) {
+ for _, tag := range tags {
+ decoderPoolMap[tag] = &sync.Pool{New: func() interface{} {
+ return decoderBuilder(parserConfig)
+ }}
+ }
+}
+
+// Ctx represents the Context which hold the HTTP request and response.
+// It has methods for the request query string, parameters, body, HTTP headers and so on.
+type Ctx struct {
+ app *App // Reference to *App
+ route *Route // Reference to *Route
+ indexRoute int // Index of the current route
+ indexHandler int // Index of the current handler
+ method string // HTTP method
+ methodINT int // HTTP method INT equivalent
+ baseURI string // HTTP base uri
+ path string // HTTP path with the modifications by the configuration -> string copy from pathBuffer
+ pathBuffer []byte // HTTP path buffer
+ detectionPath string // Route detection path -> string copy from detectionPathBuffer
+ detectionPathBuffer []byte // HTTP detectionPath buffer
+ treePath string // Path for the search in the tree
+ pathOriginal string // Original HTTP path
+ values [maxParams]string // Route parameter values
+ fasthttp *fasthttp.RequestCtx // Reference to *fasthttp.RequestCtx
+ matched bool // Non use route matched
+ viewBindMap sync.Map // Default view map to bind template engine
+}
+
+// TLSHandler object
+type TLSHandler struct {
+ clientHelloInfo *tls.ClientHelloInfo
+}
+
+// GetClientInfo Callback function to set ClientHelloInfo
+// Must comply with the method structure of https://cs.opensource.google/go/go/+/refs/tags/go1.20:src/crypto/tls/common.go;l=554-563
+// Since we overlay the method of the tls config in the listener method
+func (t *TLSHandler) GetClientInfo(info *tls.ClientHelloInfo) (*tls.Certificate, error) {
+ t.clientHelloInfo = info
+ return nil, nil //nolint:nilnil // Not returning anything useful here is probably fine
+}
+
+// Range data for c.Range
+type Range struct {
+ Type string
+ Ranges []RangeSet
+}
+
+// RangeSet represents a single content range from a request.
+type RangeSet struct {
+ Start int
+ End int
+}
+
+// Cookie data for c.Cookie
+type Cookie struct {
+ Name string `json:"name"`
+ Value string `json:"value"`
+ Path string `json:"path"`
+ Domain string `json:"domain"`
+ MaxAge int `json:"max_age"`
+ Expires time.Time `json:"expires"`
+ Secure bool `json:"secure"`
+ HTTPOnly bool `json:"http_only"`
+ SameSite string `json:"same_site"`
+ SessionOnly bool `json:"session_only"`
+}
+
+// Views is the interface that wraps the Render function.
+type Views interface {
+ Load() error
+ Render(io.Writer, string, interface{}, ...string) error
+}
+
+// ParserType require two element, type and converter for register.
+// Use ParserType with BodyParser for parsing custom type in form data.
+type ParserType struct {
+ Customtype interface{}
+ Converter func(string) reflect.Value
+}
+
+// ParserConfig form decoder config for SetParserDecoder
+type ParserConfig struct {
+ IgnoreUnknownKeys bool
+ SetAliasTag string
+ ParserType []ParserType
+ ZeroEmpty bool
+}
+
+// AcquireCtx retrieves a new Ctx from the pool.
+func (app *App) AcquireCtx(fctx *fasthttp.RequestCtx) *Ctx {
+ c, ok := app.pool.Get().(*Ctx)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *Ctx"))
+ }
+ // Set app reference
+ c.app = app
+ // Reset route and handler index
+ c.indexRoute = -1
+ c.indexHandler = 0
+ // Reset matched flag
+ c.matched = false
+ // Set paths
+ c.pathOriginal = app.getString(fctx.URI().PathOriginal())
+ // Set method
+ c.method = app.getString(fctx.Request.Header.Method())
+ c.methodINT = app.methodInt(c.method)
+ // Attach *fasthttp.RequestCtx to ctx
+ c.fasthttp = fctx
+ // reset base uri
+ c.baseURI = ""
+ // Prettify path
+ c.configDependentPaths()
+ return c
+}
+
+// ReleaseCtx releases the ctx back into the pool.
+func (app *App) ReleaseCtx(c *Ctx) {
+ // Reset values
+ c.route = nil
+ c.fasthttp = nil
+ c.viewBindMap = sync.Map{}
+ app.pool.Put(c)
+}
+
+// Accepts checks if the specified extensions or content types are acceptable.
+func (c *Ctx) Accepts(offers ...string) string {
+ return getOffer(c.Get(HeaderAccept), acceptsOfferType, offers...)
+}
+
+// AcceptsCharsets checks if the specified charset is acceptable.
+func (c *Ctx) AcceptsCharsets(offers ...string) string {
+ return getOffer(c.Get(HeaderAcceptCharset), acceptsOffer, offers...)
+}
+
+// AcceptsEncodings checks if the specified encoding is acceptable.
+func (c *Ctx) AcceptsEncodings(offers ...string) string {
+ return getOffer(c.Get(HeaderAcceptEncoding), acceptsOffer, offers...)
+}
+
+// AcceptsLanguages checks if the specified language is acceptable.
+func (c *Ctx) AcceptsLanguages(offers ...string) string {
+ return getOffer(c.Get(HeaderAcceptLanguage), acceptsOffer, offers...)
+}
+
+// App returns the *App reference to the instance of the Fiber application
+func (c *Ctx) App() *App {
+ return c.app
+}
+
+// Append the specified value to the HTTP response header field.
+// If the header is not already set, it creates the header with the specified value.
+func (c *Ctx) Append(field string, values ...string) {
+ if len(values) == 0 {
+ return
+ }
+ h := c.app.getString(c.fasthttp.Response.Header.Peek(field))
+ originalH := h
+ for _, value := range values {
+ if len(h) == 0 {
+ h = value
+ } else if h != value && !strings.HasPrefix(h, value+",") && !strings.HasSuffix(h, " "+value) &&
+ !strings.Contains(h, " "+value+",") {
+ h += ", " + value
+ }
+ }
+ if originalH != h {
+ c.Set(field, h)
+ }
+}
+
+// Attachment sets the HTTP response Content-Disposition header field to attachment.
+func (c *Ctx) Attachment(filename ...string) {
+ if len(filename) > 0 {
+ fname := filepath.Base(filename[0])
+ c.Type(filepath.Ext(fname))
+
+ c.setCanonical(HeaderContentDisposition, `attachment; filename="`+c.app.quoteString(fname)+`"`)
+ return
+ }
+ c.setCanonical(HeaderContentDisposition, "attachment")
+}
+
+// BaseURL returns (protocol + host + base path).
+func (c *Ctx) BaseURL() string {
+ // TODO: Could be improved: 53.8 ns/op 32 B/op 1 allocs/op
+ // Should work like https://codeigniter.com/user_guide/helpers/url_helper.html
+ if c.baseURI != "" {
+ return c.baseURI
+ }
+ c.baseURI = c.Protocol() + "://" + c.Hostname()
+ return c.baseURI
+}
+
+// BodyRaw contains the raw body submitted in a POST request.
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting instead.
+func (c *Ctx) BodyRaw() []byte {
+ return c.fasthttp.Request.Body()
+}
+
+func (c *Ctx) tryDecodeBodyInOrder(
+ originalBody *[]byte,
+ encodings []string,
+) ([]byte, uint8, error) {
+ var (
+ err error
+ body []byte
+ decodesRealized uint8
+ )
+
+ for index, encoding := range encodings {
+ decodesRealized++
+ switch encoding {
+ case StrGzip:
+ body, err = c.fasthttp.Request.BodyGunzip()
+ case StrBr, StrBrotli:
+ body, err = c.fasthttp.Request.BodyUnbrotli()
+ case StrDeflate:
+ body, err = c.fasthttp.Request.BodyInflate()
+ default:
+ decodesRealized--
+ if len(encodings) == 1 {
+ body = c.fasthttp.Request.Body()
+ }
+ return body, decodesRealized, nil
+ }
+
+ if err != nil {
+ return nil, decodesRealized, err
+ }
+
+ // Only execute body raw update if it has a next iteration to try to decode
+ if index < len(encodings)-1 && decodesRealized > 0 {
+ if index == 0 {
+ tempBody := c.fasthttp.Request.Body()
+ *originalBody = make([]byte, len(tempBody))
+ copy(*originalBody, tempBody)
+ }
+ c.fasthttp.Request.SetBodyRaw(body)
+ }
+ }
+
+ return body, decodesRealized, nil
+}
+
+// Body contains the raw body submitted in a POST request.
+// This method will decompress the body if the 'Content-Encoding' header is provided.
+// It returns the original (or decompressed) body data which is valid only within the handler.
+// Don't store direct references to the returned data.
+// If you need to keep the body's data later, make a copy or use the Immutable option.
+func (c *Ctx) Body() []byte {
+ var (
+ err error
+ body, originalBody []byte
+ headerEncoding string
+ encodingOrder = []string{"", "", ""}
+ )
+
+ // faster than peek
+ c.Request().Header.VisitAll(func(key, value []byte) {
+ if c.app.getString(key) == HeaderContentEncoding {
+ headerEncoding = c.app.getString(value)
+ }
+ })
+
+ // Split and get the encodings list, in order to attend the
+ // rule defined at: https://www.rfc-editor.org/rfc/rfc9110#section-8.4-5
+ encodingOrder = getSplicedStrList(headerEncoding, encodingOrder)
+ if len(encodingOrder) == 0 {
+ return c.fasthttp.Request.Body()
+ }
+
+ var decodesRealized uint8
+ body, decodesRealized, err = c.tryDecodeBodyInOrder(&originalBody, encodingOrder)
+
+ // Ensure that the body will be the original
+ if originalBody != nil && decodesRealized > 0 {
+ c.fasthttp.Request.SetBodyRaw(originalBody)
+ }
+ if err != nil {
+ return []byte(err.Error())
+ }
+
+ return body
+}
+
+func decoderBuilder(parserConfig ParserConfig) interface{} {
+ decoder := schema.NewDecoder()
+ decoder.IgnoreUnknownKeys(parserConfig.IgnoreUnknownKeys)
+ if parserConfig.SetAliasTag != "" {
+ decoder.SetAliasTag(parserConfig.SetAliasTag)
+ }
+ for _, v := range parserConfig.ParserType {
+ decoder.RegisterConverter(reflect.ValueOf(v.Customtype).Interface(), v.Converter)
+ }
+ decoder.ZeroEmpty(parserConfig.ZeroEmpty)
+ return decoder
+}
+
+// BodyParser binds the request body to a struct.
+// It supports decoding the following content types based on the Content-Type header:
+// application/json, application/xml, application/x-www-form-urlencoded, multipart/form-data
+// All JSON extenstion mime types are supported (eg. application/problem+json)
+// If none of the content types above are matched, it will return a ErrUnprocessableEntity error
+func (c *Ctx) BodyParser(out interface{}) error {
+ // Get content-type
+ ctype := utils.ToLower(c.app.getString(c.fasthttp.Request.Header.ContentType()))
+
+ ctype = utils.ParseVendorSpecificContentType(ctype)
+
+ // Only use ctype string up to and excluding byte ';'
+ ctypeEnd := strings.IndexByte(ctype, ';')
+ if ctypeEnd != -1 {
+ ctype = ctype[:ctypeEnd]
+ }
+
+ // Parse body accordingly
+ if strings.HasSuffix(ctype, "json") {
+ return c.app.config.JSONDecoder(c.Body(), out)
+ }
+ if strings.HasPrefix(ctype, MIMEApplicationForm) {
+ data := make(map[string][]string)
+ var err error
+
+ c.fasthttp.PostArgs().VisitAll(func(key, val []byte) {
+ if err != nil {
+ return
+ }
+
+ k := c.app.getString(key)
+ v := c.app.getString(val)
+
+ if strings.Contains(k, "[") {
+ k, err = parseParamSquareBrackets(k)
+ }
+
+ if c.app.config.EnableSplittingOnParsers && strings.Contains(v, ",") && equalFieldType(out, reflect.Slice, k, bodyTag) {
+ values := strings.Split(v, ",")
+ for i := 0; i < len(values); i++ {
+ data[k] = append(data[k], values[i])
+ }
+ } else {
+ data[k] = append(data[k], v)
+ }
+ })
+
+ return c.parseToStruct(bodyTag, out, data)
+ }
+ if strings.HasPrefix(ctype, MIMEMultipartForm) {
+ data, err := c.fasthttp.MultipartForm()
+ if err != nil {
+ return err
+ }
+ return c.parseToStruct(bodyTag, out, data.Value)
+ }
+ if strings.HasPrefix(ctype, MIMETextXML) || strings.HasPrefix(ctype, MIMEApplicationXML) {
+ if err := xml.Unmarshal(c.Body(), out); err != nil {
+ return fmt.Errorf("failed to unmarshal: %w", err)
+ }
+ return nil
+ }
+ // No suitable content type found
+ return ErrUnprocessableEntity
+}
+
+// ClearCookie expires a specific cookie by key on the client side.
+// If no key is provided it expires all cookies that came with the request.
+func (c *Ctx) ClearCookie(key ...string) {
+ if len(key) > 0 {
+ for i := range key {
+ c.fasthttp.Response.Header.DelClientCookie(key[i])
+ }
+ return
+ }
+ c.fasthttp.Request.Header.VisitAllCookie(func(k, v []byte) {
+ c.fasthttp.Response.Header.DelClientCookieBytes(k)
+ })
+}
+
+// Context returns *fasthttp.RequestCtx that carries a deadline
+// a cancellation signal, and other values across API boundaries.
+func (c *Ctx) Context() *fasthttp.RequestCtx {
+ return c.fasthttp
+}
+
+// UserContext returns a context implementation that was set by
+// user earlier or returns a non-nil, empty context,if it was not set earlier.
+func (c *Ctx) UserContext() context.Context {
+ ctx, ok := c.fasthttp.UserValue(userContextKey).(context.Context)
+ if !ok {
+ ctx = context.Background()
+ c.SetUserContext(ctx)
+ }
+
+ return ctx
+}
+
+// SetUserContext sets a context implementation by user.
+func (c *Ctx) SetUserContext(ctx context.Context) {
+ c.fasthttp.SetUserValue(userContextKey, ctx)
+}
+
+// Cookie sets a cookie by passing a cookie struct.
+func (c *Ctx) Cookie(cookie *Cookie) {
+ fcookie := fasthttp.AcquireCookie()
+ fcookie.SetKey(cookie.Name)
+ fcookie.SetValue(cookie.Value)
+ fcookie.SetPath(cookie.Path)
+ fcookie.SetDomain(cookie.Domain)
+ // only set max age and expiry when SessionOnly is false
+ // i.e. cookie supposed to last beyond browser session
+ // refer: https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#define_the_lifetime_of_a_cookie
+ if !cookie.SessionOnly {
+ fcookie.SetMaxAge(cookie.MaxAge)
+ fcookie.SetExpire(cookie.Expires)
+ }
+ fcookie.SetSecure(cookie.Secure)
+ fcookie.SetHTTPOnly(cookie.HTTPOnly)
+
+ switch utils.ToLower(cookie.SameSite) {
+ case CookieSameSiteStrictMode:
+ fcookie.SetSameSite(fasthttp.CookieSameSiteStrictMode)
+ case CookieSameSiteNoneMode:
+ fcookie.SetSameSite(fasthttp.CookieSameSiteNoneMode)
+ case CookieSameSiteDisabled:
+ fcookie.SetSameSite(fasthttp.CookieSameSiteDisabled)
+ default:
+ fcookie.SetSameSite(fasthttp.CookieSameSiteLaxMode)
+ }
+
+ c.fasthttp.Response.Header.SetCookie(fcookie)
+ fasthttp.ReleaseCookie(fcookie)
+}
+
+// Cookies are used for getting a cookie value by key.
+// Defaults to the empty string "" if the cookie doesn't exist.
+// If a default value is given, it will return that value if the cookie doesn't exist.
+// The returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting to use the value outside the Handler.
+func (c *Ctx) Cookies(key string, defaultValue ...string) string {
+ return defaultString(c.app.getString(c.fasthttp.Request.Header.Cookie(key)), defaultValue)
+}
+
+// CookieParser is used to bind cookies to a struct
+func (c *Ctx) CookieParser(out interface{}) error {
+ data := make(map[string][]string)
+ var err error
+
+ // loop through all cookies
+ c.fasthttp.Request.Header.VisitAllCookie(func(key, val []byte) {
+ if err != nil {
+ return
+ }
+
+ k := c.app.getString(key)
+ v := c.app.getString(val)
+
+ if strings.Contains(k, "[") {
+ k, err = parseParamSquareBrackets(k)
+ }
+
+ if c.app.config.EnableSplittingOnParsers && strings.Contains(v, ",") && equalFieldType(out, reflect.Slice, k, cookieTag) {
+ values := strings.Split(v, ",")
+ for i := 0; i < len(values); i++ {
+ data[k] = append(data[k], values[i])
+ }
+ } else {
+ data[k] = append(data[k], v)
+ }
+ })
+ if err != nil {
+ return err
+ }
+
+ return c.parseToStruct(cookieTag, out, data)
+}
+
+// Download transfers the file from path as an attachment.
+// Typically, browsers will prompt the user for download.
+// By default, the Content-Disposition header filename= parameter is the filepath (this typically appears in the browser dialog).
+// Override this default with the filename parameter.
+func (c *Ctx) Download(file string, filename ...string) error {
+ var fname string
+ if len(filename) > 0 {
+ fname = filename[0]
+ } else {
+ fname = filepath.Base(file)
+ }
+ c.setCanonical(HeaderContentDisposition, `attachment; filename="`+c.app.quoteString(fname)+`"`)
+ return c.SendFile(file)
+}
+
+// Request return the *fasthttp.Request object
+// This allows you to use all fasthttp request methods
+// https://godoc.org/github.com/valyala/fasthttp#Request
+func (c *Ctx) Request() *fasthttp.Request {
+ return &c.fasthttp.Request
+}
+
+// Response return the *fasthttp.Response object
+// This allows you to use all fasthttp response methods
+// https://godoc.org/github.com/valyala/fasthttp#Response
+func (c *Ctx) Response() *fasthttp.Response {
+ return &c.fasthttp.Response
+}
+
+// Format performs content-negotiation on the Accept HTTP header.
+// It uses Accepts to select a proper format.
+// If the header is not specified or there is no proper format, text/plain is used.
+func (c *Ctx) Format(body interface{}) error {
+ // Get accepted content type
+ accept := c.Accepts("html", "json", "txt", "xml")
+ // Set accepted content type
+ c.Type(accept)
+ // Type convert provided body
+ var b string
+ switch val := body.(type) {
+ case string:
+ b = val
+ case []byte:
+ b = c.app.getString(val)
+ default:
+ b = fmt.Sprintf("%v", val)
+ }
+
+ // Format based on the accept content type
+ switch accept {
+ case "html":
+ return c.SendString("" + b + "
")
+ case "json":
+ return c.JSON(body)
+ case "txt":
+ return c.SendString(b)
+ case "xml":
+ return c.XML(body)
+ }
+ return c.SendString(b)
+}
+
+// FormFile returns the first file by key from a MultipartForm.
+func (c *Ctx) FormFile(key string) (*multipart.FileHeader, error) {
+ return c.fasthttp.FormFile(key)
+}
+
+// FormValue returns the first value by key from a MultipartForm.
+// Search is performed in QueryArgs, PostArgs, MultipartForm and FormFile in this particular order.
+// Defaults to the empty string "" if the form value doesn't exist.
+// If a default value is given, it will return that value if the form value does not exist.
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting instead.
+func (c *Ctx) FormValue(key string, defaultValue ...string) string {
+ return defaultString(c.app.getString(c.fasthttp.FormValue(key)), defaultValue)
+}
+
+// Fresh returns true when the response is still “fresh” in the client's cache,
+// otherwise false is returned to indicate that the client cache is now stale
+// and the full response should be sent.
+// When a client sends the Cache-Control: no-cache request header to indicate an end-to-end
+// reload request, this module will return false to make handling these requests transparent.
+// https://github.com/jshttp/fresh/blob/10e0471669dbbfbfd8de65bc6efac2ddd0bfa057/index.js#L33
+func (c *Ctx) Fresh() bool {
+ // fields
+ modifiedSince := c.Get(HeaderIfModifiedSince)
+ noneMatch := c.Get(HeaderIfNoneMatch)
+
+ // unconditional request
+ if modifiedSince == "" && noneMatch == "" {
+ return false
+ }
+
+ // Always return stale when Cache-Control: no-cache
+ // to support end-to-end reload requests
+ // https://tools.ietf.org/html/rfc2616#section-14.9.4
+ cacheControl := c.Get(HeaderCacheControl)
+ if cacheControl != "" && isNoCache(cacheControl) {
+ return false
+ }
+
+ // if-none-match
+ if noneMatch != "" && noneMatch != "*" {
+ etag := c.app.getString(c.fasthttp.Response.Header.Peek(HeaderETag))
+ if etag == "" {
+ return false
+ }
+ if c.app.isEtagStale(etag, c.app.getBytes(noneMatch)) {
+ return false
+ }
+
+ if modifiedSince != "" {
+ lastModified := c.app.getString(c.fasthttp.Response.Header.Peek(HeaderLastModified))
+ if lastModified != "" {
+ lastModifiedTime, err := http.ParseTime(lastModified)
+ if err != nil {
+ return false
+ }
+ modifiedSinceTime, err := http.ParseTime(modifiedSince)
+ if err != nil {
+ return false
+ }
+ return lastModifiedTime.Before(modifiedSinceTime)
+ }
+ }
+ }
+ return true
+}
+
+// Get returns the HTTP request header specified by field.
+// Field names are case-insensitive
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting instead.
+func (c *Ctx) Get(key string, defaultValue ...string) string {
+ return defaultString(c.app.getString(c.fasthttp.Request.Header.Peek(key)), defaultValue)
+}
+
+// GetRespHeader returns the HTTP response header specified by field.
+// Field names are case-insensitive
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting instead.
+func (c *Ctx) GetRespHeader(key string, defaultValue ...string) string {
+ return defaultString(c.app.getString(c.fasthttp.Response.Header.Peek(key)), defaultValue)
+}
+
+// GetReqHeaders returns the HTTP request headers.
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting instead.
+func (c *Ctx) GetReqHeaders() map[string][]string {
+ headers := make(map[string][]string)
+ c.Request().Header.VisitAll(func(k, v []byte) {
+ key := c.app.getString(k)
+ headers[key] = append(headers[key], c.app.getString(v))
+ })
+
+ return headers
+}
+
+// GetRespHeaders returns the HTTP response headers.
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting instead.
+func (c *Ctx) GetRespHeaders() map[string][]string {
+ headers := make(map[string][]string)
+ c.Response().Header.VisitAll(func(k, v []byte) {
+ key := c.app.getString(k)
+ headers[key] = append(headers[key], c.app.getString(v))
+ })
+
+ return headers
+}
+
+// Hostname contains the hostname derived from the X-Forwarded-Host or Host HTTP header.
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting instead.
+// Please use Config.EnableTrustedProxyCheck to prevent header spoofing, in case when your app is behind the proxy.
+func (c *Ctx) Hostname() string {
+ if c.IsProxyTrusted() {
+ if host := c.Get(HeaderXForwardedHost); len(host) > 0 {
+ commaPos := strings.Index(host, ",")
+ if commaPos != -1 {
+ return host[:commaPos]
+ }
+ return host
+ }
+ }
+ return c.app.getString(c.fasthttp.Request.URI().Host())
+}
+
+// Port returns the remote port of the request.
+func (c *Ctx) Port() string {
+ tcpaddr, ok := c.fasthttp.RemoteAddr().(*net.TCPAddr)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *net.TCPAddr"))
+ }
+ return strconv.Itoa(tcpaddr.Port)
+}
+
+// IP returns the remote IP address of the request.
+// If ProxyHeader and IP Validation is configured, it will parse that header and return the first valid IP address.
+// Please use Config.EnableTrustedProxyCheck to prevent header spoofing, in case when your app is behind the proxy.
+func (c *Ctx) IP() string {
+ if c.IsProxyTrusted() && len(c.app.config.ProxyHeader) > 0 {
+ return c.extractIPFromHeader(c.app.config.ProxyHeader)
+ }
+
+ return c.fasthttp.RemoteIP().String()
+}
+
+// extractIPsFromHeader will return a slice of IPs it found given a header name in the order they appear.
+// When IP validation is enabled, any invalid IPs will be omitted.
+func (c *Ctx) extractIPsFromHeader(header string) []string {
+ // TODO: Reuse the c.extractIPFromHeader func somehow in here
+
+ headerValue := c.Get(header)
+
+ // We can't know how many IPs we will return, but we will try to guess with this constant division.
+ // Counting ',' makes function slower for about 50ns in general case.
+ const maxEstimatedCount = 8
+ estimatedCount := len(headerValue) / maxEstimatedCount
+ if estimatedCount > maxEstimatedCount {
+ estimatedCount = maxEstimatedCount // Avoid big allocation on big header
+ }
+
+ ipsFound := make([]string, 0, estimatedCount)
+
+ i := 0
+ j := -1
+
+iploop:
+ for {
+ var v4, v6 bool
+
+ // Manually splitting string without allocating slice, working with parts directly
+ i, j = j+1, j+2
+
+ if j > len(headerValue) {
+ break
+ }
+
+ for j < len(headerValue) && headerValue[j] != ',' {
+ if headerValue[j] == ':' {
+ v6 = true
+ } else if headerValue[j] == '.' {
+ v4 = true
+ }
+ j++
+ }
+
+ for i < j && (headerValue[i] == ' ' || headerValue[i] == ',') {
+ i++
+ }
+
+ s := utils.TrimRight(headerValue[i:j], ' ')
+
+ if c.app.config.EnableIPValidation {
+ // Skip validation if IP is clearly not IPv4/IPv6, otherwise validate without allocations
+ if (!v6 && !v4) || (v6 && !utils.IsIPv6(s)) || (v4 && !utils.IsIPv4(s)) {
+ continue iploop
+ }
+ }
+
+ ipsFound = append(ipsFound, s)
+ }
+
+ return ipsFound
+}
+
+// extractIPFromHeader will attempt to pull the real client IP from the given header when IP validation is enabled.
+// currently, it will return the first valid IP address in header.
+// when IP validation is disabled, it will simply return the value of the header without any inspection.
+// Implementation is almost the same as in extractIPsFromHeader, but without allocation of []string.
+func (c *Ctx) extractIPFromHeader(header string) string {
+ if c.app.config.EnableIPValidation {
+ headerValue := c.Get(header)
+
+ i := 0
+ j := -1
+
+ iploop:
+ for {
+ var v4, v6 bool
+
+ // Manually splitting string without allocating slice, working with parts directly
+ i, j = j+1, j+2
+
+ if j > len(headerValue) {
+ break
+ }
+
+ for j < len(headerValue) && headerValue[j] != ',' {
+ if headerValue[j] == ':' {
+ v6 = true
+ } else if headerValue[j] == '.' {
+ v4 = true
+ }
+ j++
+ }
+
+ for i < j && headerValue[i] == ' ' {
+ i++
+ }
+
+ s := utils.TrimRight(headerValue[i:j], ' ')
+
+ if c.app.config.EnableIPValidation {
+ if (!v6 && !v4) || (v6 && !utils.IsIPv6(s)) || (v4 && !utils.IsIPv4(s)) {
+ continue iploop
+ }
+ }
+
+ return s
+ }
+
+ return c.fasthttp.RemoteIP().String()
+ }
+
+ // default behavior if IP validation is not enabled is just to return whatever value is
+ // in the proxy header. Even if it is empty or invalid
+ return c.Get(c.app.config.ProxyHeader)
+}
+
+// IPs returns a string slice of IP addresses specified in the X-Forwarded-For request header.
+// When IP validation is enabled, only valid IPs are returned.
+func (c *Ctx) IPs() []string {
+ return c.extractIPsFromHeader(HeaderXForwardedFor)
+}
+
+// Is returns the matching content type,
+// if the incoming request's Content-Type HTTP header field matches the MIME type specified by the type parameter
+func (c *Ctx) Is(extension string) bool {
+ extensionHeader := utils.GetMIME(extension)
+ if extensionHeader == "" {
+ return false
+ }
+
+ return strings.HasPrefix(
+ utils.TrimLeft(c.app.getString(c.fasthttp.Request.Header.ContentType()), ' '),
+ extensionHeader,
+ )
+}
+
+// JSON converts any interface or string to JSON.
+// Array and slice values encode as JSON arrays,
+// except that []byte encodes as a base64-encoded string,
+// and a nil slice encodes as the null JSON value.
+// If the ctype parameter is given, this method will set the
+// Content-Type header equal to ctype. If ctype is not given,
+// The Content-Type header will be set to application/json.
+func (c *Ctx) JSON(data interface{}, ctype ...string) error {
+ raw, err := c.app.config.JSONEncoder(data)
+ if err != nil {
+ return err
+ }
+ c.fasthttp.Response.SetBodyRaw(raw)
+ if len(ctype) > 0 {
+ c.fasthttp.Response.Header.SetContentType(ctype[0])
+ } else {
+ c.fasthttp.Response.Header.SetContentType(MIMEApplicationJSON)
+ }
+ return nil
+}
+
+// JSONP sends a JSON response with JSONP support.
+// This method is identical to JSON, except that it opts-in to JSONP callback support.
+// By default, the callback name is simply callback.
+func (c *Ctx) JSONP(data interface{}, callback ...string) error {
+ raw, err := c.app.config.JSONEncoder(data)
+ if err != nil {
+ return err
+ }
+
+ var result, cb string
+
+ if len(callback) > 0 {
+ cb = callback[0]
+ } else {
+ cb = "callback"
+ }
+
+ result = cb + "(" + c.app.getString(raw) + ");"
+
+ c.setCanonical(HeaderXContentTypeOptions, "nosniff")
+ c.fasthttp.Response.Header.SetContentType(MIMETextJavaScriptCharsetUTF8)
+ return c.SendString(result)
+}
+
+// XML converts any interface or string to XML.
+// This method also sets the content header to application/xml.
+func (c *Ctx) XML(data interface{}) error {
+ raw, err := c.app.config.XMLEncoder(data)
+ if err != nil {
+ return err
+ }
+ c.fasthttp.Response.SetBodyRaw(raw)
+ c.fasthttp.Response.Header.SetContentType(MIMEApplicationXML)
+ return nil
+}
+
+// Links joins the links followed by the property to populate the response's Link HTTP header field.
+func (c *Ctx) Links(link ...string) {
+ if len(link) == 0 {
+ return
+ }
+ bb := bytebufferpool.Get()
+ for i := range link {
+ if i%2 == 0 {
+ _ = bb.WriteByte('<') //nolint:errcheck // This will never fail
+ _, _ = bb.WriteString(link[i]) //nolint:errcheck // This will never fail
+ _ = bb.WriteByte('>') //nolint:errcheck // This will never fail
+ } else {
+ _, _ = bb.WriteString(`; rel="` + link[i] + `",`) //nolint:errcheck // This will never fail
+ }
+ }
+ c.setCanonical(HeaderLink, utils.TrimRight(c.app.getString(bb.Bytes()), ','))
+ bytebufferpool.Put(bb)
+}
+
+// Locals makes it possible to pass interface{} values under keys scoped to the request
+// and therefore available to all following routes that match the request.
+func (c *Ctx) Locals(key interface{}, value ...interface{}) interface{} {
+ if len(value) == 0 {
+ return c.fasthttp.UserValue(key)
+ }
+ c.fasthttp.SetUserValue(key, value[0])
+ return value[0]
+}
+
+// Location sets the response Location HTTP header to the specified path parameter.
+func (c *Ctx) Location(path string) {
+ c.setCanonical(HeaderLocation, path)
+}
+
+// Method returns the HTTP request method for the context, optionally overridden by the provided argument.
+// If no override is given or if the provided override is not a valid HTTP method, it returns the current method from the context.
+// Otherwise, it updates the context's method and returns the overridden method as a string.
+func (c *Ctx) Method(override ...string) string {
+ if len(override) == 0 {
+ // Nothing to override, just return current method from context
+ return c.method
+ }
+
+ method := utils.ToUpper(override[0])
+ mINT := c.app.methodInt(method)
+ if mINT == -1 {
+ // Provided override does not valid HTTP method, no override, return current method
+ return c.method
+ }
+
+ c.method = method
+ c.methodINT = mINT
+ return c.method
+}
+
+// MultipartForm parse form entries from binary.
+// This returns a map[string][]string, so given a key the value will be a string slice.
+func (c *Ctx) MultipartForm() (*multipart.Form, error) {
+ return c.fasthttp.MultipartForm()
+}
+
+// ClientHelloInfo return CHI from context
+func (c *Ctx) ClientHelloInfo() *tls.ClientHelloInfo {
+ if c.app.tlsHandler != nil {
+ return c.app.tlsHandler.clientHelloInfo
+ }
+
+ return nil
+}
+
+// Next executes the next method in the stack that matches the current route.
+func (c *Ctx) Next() error {
+ // Increment handler index
+ c.indexHandler++
+ var err error
+ // Did we execute all route handlers?
+ if c.indexHandler < len(c.route.Handlers) {
+ // Continue route stack
+ err = c.route.Handlers[c.indexHandler](c)
+ } else {
+ // Continue handler stack
+ _, err = c.app.next(c)
+ }
+ return err
+}
+
+// RestartRouting instead of going to the next handler. This may be useful after
+// changing the request path. Note that handlers might be executed again.
+func (c *Ctx) RestartRouting() error {
+ c.indexRoute = -1
+ _, err := c.app.next(c)
+ return err
+}
+
+// OriginalURL contains the original request URL.
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting to use the value outside the Handler.
+func (c *Ctx) OriginalURL() string {
+ return c.app.getString(c.fasthttp.Request.Header.RequestURI())
+}
+
+// Params is used to get the route parameters.
+// Defaults to empty string "" if the param doesn't exist.
+// If a default value is given, it will return that value if the param doesn't exist.
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting to use the value outside the Handler.
+func (c *Ctx) Params(key string, defaultValue ...string) string {
+ if key == "*" || key == "+" {
+ key += "1"
+ }
+ for i := range c.route.Params {
+ if len(key) != len(c.route.Params[i]) {
+ continue
+ }
+ if c.route.Params[i] == key || (!c.app.config.CaseSensitive && utils.EqualFold(c.route.Params[i], key)) {
+ // in case values are not here
+ if len(c.values) <= i || len(c.values[i]) == 0 {
+ break
+ }
+ return c.values[i]
+ }
+ }
+ return defaultString("", defaultValue)
+}
+
+// AllParams Params is used to get all route parameters.
+// Using Params method to get params.
+func (c *Ctx) AllParams() map[string]string {
+ params := make(map[string]string, len(c.route.Params))
+ for _, param := range c.route.Params {
+ params[param] = c.Params(param)
+ }
+
+ return params
+}
+
+// ParamsParser binds the param string to a struct.
+func (c *Ctx) ParamsParser(out interface{}) error {
+ params := make(map[string][]string, len(c.route.Params))
+ for _, param := range c.route.Params {
+ params[param] = append(params[param], c.Params(param))
+ }
+ return c.parseToStruct(paramsTag, out, params)
+}
+
+// ParamsInt is used to get an integer from the route parameters
+// it defaults to zero if the parameter is not found or if the
+// parameter cannot be converted to an integer
+// If a default value is given, it will return that value in case the param
+// doesn't exist or cannot be converted to an integer
+func (c *Ctx) ParamsInt(key string, defaultValue ...int) (int, error) {
+ // Use Atoi to convert the param to an int or return zero and an error
+ value, err := strconv.Atoi(c.Params(key))
+ if err != nil {
+ if len(defaultValue) > 0 {
+ return defaultValue[0], nil
+ }
+ return 0, fmt.Errorf("failed to convert: %w", err)
+ }
+
+ return value, nil
+}
+
+// Path returns the path part of the request URL.
+// Optionally, you could override the path.
+func (c *Ctx) Path(override ...string) string {
+ if len(override) != 0 && c.path != override[0] {
+ // Set new path to context
+ c.pathOriginal = override[0]
+
+ // Set new path to request context
+ c.fasthttp.Request.URI().SetPath(c.pathOriginal)
+ // Prettify path
+ c.configDependentPaths()
+ }
+ return c.path
+}
+
+// Protocol contains the request protocol string: http or https for TLS requests.
+// Please use Config.EnableTrustedProxyCheck to prevent header spoofing, in case when your app is behind the proxy.
+func (c *Ctx) Protocol() string {
+ if c.fasthttp.IsTLS() {
+ return schemeHTTPS
+ }
+ if !c.IsProxyTrusted() {
+ return schemeHTTP
+ }
+
+ scheme := schemeHTTP
+ const lenXHeaderName = 12
+ c.fasthttp.Request.Header.VisitAll(func(key, val []byte) {
+ if len(key) < lenXHeaderName {
+ return // Neither "X-Forwarded-" nor "X-Url-Scheme"
+ }
+ switch {
+ case bytes.HasPrefix(key, []byte("X-Forwarded-")):
+ if bytes.Equal(key, []byte(HeaderXForwardedProto)) ||
+ bytes.Equal(key, []byte(HeaderXForwardedProtocol)) {
+ v := c.app.getString(val)
+ commaPos := strings.Index(v, ",")
+ if commaPos != -1 {
+ scheme = v[:commaPos]
+ } else {
+ scheme = v
+ }
+ } else if bytes.Equal(key, []byte(HeaderXForwardedSsl)) && bytes.Equal(val, []byte("on")) {
+ scheme = schemeHTTPS
+ }
+
+ case bytes.Equal(key, []byte(HeaderXUrlScheme)):
+ scheme = c.app.getString(val)
+ }
+ })
+ return scheme
+}
+
+// Query returns the query string parameter in the url.
+// Defaults to empty string "" if the query doesn't exist.
+// If a default value is given, it will return that value if the query doesn't exist.
+// Returned value is only valid within the handler. Do not store any references.
+// Make copies or use the Immutable setting to use the value outside the Handler.
+func (c *Ctx) Query(key string, defaultValue ...string) string {
+ return defaultString(c.app.getString(c.fasthttp.QueryArgs().Peek(key)), defaultValue)
+}
+
+// Queries returns a map of query parameters and their values.
+//
+// GET /?name=alex&wanna_cake=2&id=
+// Queries()["name"] == "alex"
+// Queries()["wanna_cake"] == "2"
+// Queries()["id"] == ""
+//
+// GET /?field1=value1&field1=value2&field2=value3
+// Queries()["field1"] == "value2"
+// Queries()["field2"] == "value3"
+//
+// GET /?list_a=1&list_a=2&list_a=3&list_b[]=1&list_b[]=2&list_b[]=3&list_c=1,2,3
+// Queries()["list_a"] == "3"
+// Queries()["list_b[]"] == "3"
+// Queries()["list_c"] == "1,2,3"
+//
+// GET /api/search?filters.author.name=John&filters.category.name=Technology&filters[customer][name]=Alice&filters[status]=pending
+// Queries()["filters.author.name"] == "John"
+// Queries()["filters.category.name"] == "Technology"
+// Queries()["filters[customer][name]"] == "Alice"
+// Queries()["filters[status]"] == "pending"
+func (c *Ctx) Queries() map[string]string {
+ m := make(map[string]string, c.Context().QueryArgs().Len())
+ c.Context().QueryArgs().VisitAll(func(key, value []byte) {
+ m[c.app.getString(key)] = c.app.getString(value)
+ })
+ return m
+}
+
+// QueryInt returns integer value of key string parameter in the url.
+// Default to empty or invalid key is 0.
+//
+// GET /?name=alex&wanna_cake=2&id=
+// QueryInt("wanna_cake", 1) == 2
+// QueryInt("name", 1) == 1
+// QueryInt("id", 1) == 1
+// QueryInt("id") == 0
+func (c *Ctx) QueryInt(key string, defaultValue ...int) int {
+ // Use Atoi to convert the param to an int or return zero and an error
+ value, err := strconv.Atoi(c.app.getString(c.fasthttp.QueryArgs().Peek(key)))
+ if err != nil {
+ if len(defaultValue) > 0 {
+ return defaultValue[0]
+ }
+ return 0
+ }
+
+ return value
+}
+
+// QueryBool returns bool value of key string parameter in the url.
+// Default to empty or invalid key is true.
+//
+// Get /?name=alex&want_pizza=false&id=
+// QueryBool("want_pizza") == false
+// QueryBool("want_pizza", true) == false
+// QueryBool("name") == false
+// QueryBool("name", true) == true
+// QueryBool("id") == false
+// QueryBool("id", true) == true
+func (c *Ctx) QueryBool(key string, defaultValue ...bool) bool {
+ value, err := strconv.ParseBool(c.app.getString(c.fasthttp.QueryArgs().Peek(key)))
+ if err != nil {
+ if len(defaultValue) > 0 {
+ return defaultValue[0]
+ }
+ return false
+ }
+ return value
+}
+
+// QueryFloat returns float64 value of key string parameter in the url.
+// Default to empty or invalid key is 0.
+//
+// GET /?name=alex&amount=32.23&id=
+// QueryFloat("amount") = 32.23
+// QueryFloat("amount", 3) = 32.23
+// QueryFloat("name", 1) = 1
+// QueryFloat("name") = 0
+// QueryFloat("id", 3) = 3
+func (c *Ctx) QueryFloat(key string, defaultValue ...float64) float64 {
+ // use strconv.ParseFloat to convert the param to a float or return zero and an error.
+ value, err := strconv.ParseFloat(c.app.getString(c.fasthttp.QueryArgs().Peek(key)), 64)
+ if err != nil {
+ if len(defaultValue) > 0 {
+ return defaultValue[0]
+ }
+ return 0
+ }
+ return value
+}
+
+// QueryParser binds the query string to a struct.
+func (c *Ctx) QueryParser(out interface{}) error {
+ data := make(map[string][]string)
+ var err error
+
+ c.fasthttp.QueryArgs().VisitAll(func(key, val []byte) {
+ if err != nil {
+ return
+ }
+
+ k := c.app.getString(key)
+ v := c.app.getString(val)
+
+ if strings.Contains(k, "[") {
+ k, err = parseParamSquareBrackets(k)
+ }
+
+ if c.app.config.EnableSplittingOnParsers && strings.Contains(v, ",") && equalFieldType(out, reflect.Slice, k, queryTag) {
+ values := strings.Split(v, ",")
+ for i := 0; i < len(values); i++ {
+ data[k] = append(data[k], values[i])
+ }
+ } else {
+ data[k] = append(data[k], v)
+ }
+ })
+
+ if err != nil {
+ return err
+ }
+
+ return c.parseToStruct(queryTag, out, data)
+}
+
+func parseParamSquareBrackets(k string) (string, error) {
+ bb := bytebufferpool.Get()
+ defer bytebufferpool.Put(bb)
+
+ kbytes := []byte(k)
+
+ for i, b := range kbytes {
+ if b == '[' && kbytes[i+1] != ']' {
+ if err := bb.WriteByte('.'); err != nil {
+ return "", fmt.Errorf("failed to write: %w", err)
+ }
+ }
+
+ if b == '[' || b == ']' {
+ continue
+ }
+
+ if err := bb.WriteByte(b); err != nil {
+ return "", fmt.Errorf("failed to write: %w", err)
+ }
+ }
+
+ return bb.String(), nil
+}
+
+// ReqHeaderParser binds the request header strings to a struct.
+func (c *Ctx) ReqHeaderParser(out interface{}) error {
+ data := make(map[string][]string)
+ c.fasthttp.Request.Header.VisitAll(func(key, val []byte) {
+ k := c.app.getString(key)
+ v := c.app.getString(val)
+
+ if c.app.config.EnableSplittingOnParsers && strings.Contains(v, ",") && equalFieldType(out, reflect.Slice, k, reqHeaderTag) {
+ values := strings.Split(v, ",")
+ for i := 0; i < len(values); i++ {
+ data[k] = append(data[k], values[i])
+ }
+ } else {
+ data[k] = append(data[k], v)
+ }
+ })
+
+ return c.parseToStruct(reqHeaderTag, out, data)
+}
+
+func (*Ctx) parseToStruct(aliasTag string, out interface{}, data map[string][]string) error {
+ // Get decoder from pool
+ schemaDecoder, ok := decoderPoolMap[aliasTag].Get().(*schema.Decoder)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *schema.Decoder"))
+ }
+ defer decoderPoolMap[aliasTag].Put(schemaDecoder)
+
+ // Set alias tag
+ schemaDecoder.SetAliasTag(aliasTag)
+
+ if err := schemaDecoder.Decode(out, data); err != nil {
+ return fmt.Errorf("failed to decode: %w", err)
+ }
+
+ return nil
+}
+
+func equalFieldType(out interface{}, kind reflect.Kind, key, tag string) bool {
+ // Get type of interface
+ outTyp := reflect.TypeOf(out).Elem()
+ key = utils.ToLower(key)
+ // Must be a struct to match a field
+ if outTyp.Kind() != reflect.Struct {
+ return false
+ }
+ // Copy interface to an value to be used
+ outVal := reflect.ValueOf(out).Elem()
+ // Loop over each field
+ for i := 0; i < outTyp.NumField(); i++ {
+ // Get field value data
+ structField := outVal.Field(i)
+ // Can this field be changed?
+ if !structField.CanSet() {
+ continue
+ }
+ // Get field key data
+ typeField := outTyp.Field(i)
+ // Get type of field key
+ structFieldKind := structField.Kind()
+ // Does the field type equals input?
+ if structFieldKind != kind {
+ continue
+ }
+ // Get tag from field if exist
+ inputFieldName := typeField.Tag.Get(tag)
+ if inputFieldName == "" {
+ inputFieldName = typeField.Name
+ } else {
+ inputFieldName = strings.Split(inputFieldName, ",")[0]
+ }
+ // Compare field/tag with provided key
+ if utils.ToLower(inputFieldName) == key {
+ return true
+ }
+ }
+ return false
+}
+
+var (
+ ErrRangeMalformed = errors.New("range: malformed range header string")
+ ErrRangeUnsatisfiable = errors.New("range: unsatisfiable range")
+)
+
+// Range returns a struct containing the type and a slice of ranges.
+func (c *Ctx) Range(size int) (Range, error) {
+ var (
+ rangeData Range
+ ranges string
+ )
+ rangeStr := c.Get(HeaderRange)
+
+ i := strings.IndexByte(rangeStr, '=')
+ if i == -1 || strings.Contains(rangeStr[i+1:], "=") {
+ return rangeData, ErrRangeMalformed
+ }
+ rangeData.Type = rangeStr[:i]
+ ranges = rangeStr[i+1:]
+
+ var (
+ singleRange string
+ moreRanges = ranges
+ )
+ for moreRanges != "" {
+ singleRange = moreRanges
+ if i := strings.IndexByte(moreRanges, ','); i >= 0 {
+ singleRange = moreRanges[:i]
+ moreRanges = moreRanges[i+1:]
+ } else {
+ moreRanges = ""
+ }
+
+ var (
+ startStr, endStr string
+ i int
+ )
+ if i = strings.IndexByte(singleRange, '-'); i == -1 {
+ return rangeData, ErrRangeMalformed
+ }
+ startStr = singleRange[:i]
+ endStr = singleRange[i+1:]
+
+ start, startErr := fasthttp.ParseUint(utils.UnsafeBytes(startStr))
+ end, endErr := fasthttp.ParseUint(utils.UnsafeBytes(endStr))
+ if startErr != nil { // -nnn
+ start = size - end
+ end = size - 1
+ } else if endErr != nil { // nnn-
+ end = size - 1
+ }
+ if end > size-1 { // limit last-byte-pos to current length
+ end = size - 1
+ }
+ if start > end || start < 0 {
+ continue
+ }
+ rangeData.Ranges = append(rangeData.Ranges, struct {
+ Start int
+ End int
+ }{
+ start,
+ end,
+ })
+ }
+ if len(rangeData.Ranges) < 1 {
+ return rangeData, ErrRangeUnsatisfiable
+ }
+
+ return rangeData, nil
+}
+
+// Redirect to the URL derived from the specified path, with specified status.
+// If status is not specified, status defaults to 302 Found.
+func (c *Ctx) Redirect(location string, status ...int) error {
+ c.setCanonical(HeaderLocation, location)
+ if len(status) > 0 {
+ c.Status(status[0])
+ } else {
+ c.Status(StatusFound)
+ }
+ return nil
+}
+
+// Bind Add vars to default view var map binding to template engine.
+// Variables are read by the Render method and may be overwritten.
+func (c *Ctx) Bind(vars Map) error {
+ // init viewBindMap - lazy map
+ for k, v := range vars {
+ c.viewBindMap.Store(k, v)
+ }
+ return nil
+}
+
+// getLocationFromRoute get URL location from route using parameters
+func (c *Ctx) getLocationFromRoute(route Route, params Map) (string, error) {
+ buf := bytebufferpool.Get()
+ for _, segment := range route.routeParser.segs {
+ if !segment.IsParam {
+ _, err := buf.WriteString(segment.Const)
+ if err != nil {
+ return "", fmt.Errorf("failed to write string: %w", err)
+ }
+ continue
+ }
+
+ for key, val := range params {
+ isSame := key == segment.ParamName || (!c.app.config.CaseSensitive && utils.EqualFold(key, segment.ParamName))
+ isGreedy := segment.IsGreedy && len(key) == 1 && isInCharset(key[0], greedyParameters)
+ if isSame || isGreedy {
+ _, err := buf.WriteString(utils.ToString(val))
+ if err != nil {
+ return "", fmt.Errorf("failed to write string: %w", err)
+ }
+ }
+ }
+ }
+ location := buf.String()
+ // release buffer
+ bytebufferpool.Put(buf)
+ return location, nil
+}
+
+// GetRouteURL generates URLs to named routes, with parameters. URLs are relative, for example: "/user/1831"
+func (c *Ctx) GetRouteURL(routeName string, params Map) (string, error) {
+ return c.getLocationFromRoute(c.App().GetRoute(routeName), params)
+}
+
+// RedirectToRoute to the Route registered in the app with appropriate parameters
+// If status is not specified, status defaults to 302 Found.
+// If you want to send queries to route, you must add "queries" key typed as map[string]string to params.
+func (c *Ctx) RedirectToRoute(routeName string, params Map, status ...int) error {
+ location, err := c.getLocationFromRoute(c.App().GetRoute(routeName), params)
+ if err != nil {
+ return err
+ }
+
+ // Check queries
+ if queries, ok := params["queries"].(map[string]string); ok {
+ queryText := bytebufferpool.Get()
+ defer bytebufferpool.Put(queryText)
+
+ i := 1
+ for k, v := range queries {
+ _, _ = queryText.WriteString(k + "=" + v) //nolint:errcheck // This will never fail
+
+ if i != len(queries) {
+ _, _ = queryText.WriteString("&") //nolint:errcheck // This will never fail
+ }
+ i++
+ }
+
+ return c.Redirect(location+"?"+queryText.String(), status...)
+ }
+ return c.Redirect(location, status...)
+}
+
+// RedirectBack to the URL to referer
+// If status is not specified, status defaults to 302 Found.
+func (c *Ctx) RedirectBack(fallback string, status ...int) error {
+ location := c.Get(HeaderReferer)
+ if location == "" {
+ location = fallback
+ }
+ return c.Redirect(location, status...)
+}
+
+// Render a template with data and sends a text/html response.
+// We support the following engines: html, amber, handlebars, mustache, pug
+func (c *Ctx) Render(name string, bind interface{}, layouts ...string) error {
+ // Get new buffer from pool
+ buf := bytebufferpool.Get()
+ defer bytebufferpool.Put(buf)
+
+ // Initialize empty bind map if bind is nil
+ if bind == nil {
+ bind = make(Map)
+ }
+
+ // Pass-locals-to-views, bind, appListKeys
+ c.renderExtensions(bind)
+
+ var rendered bool
+ for i := len(c.app.mountFields.appListKeys) - 1; i >= 0; i-- {
+ prefix := c.app.mountFields.appListKeys[i]
+ app := c.app.mountFields.appList[prefix]
+ if prefix == "" || strings.Contains(c.OriginalURL(), prefix) {
+ if len(layouts) == 0 && app.config.ViewsLayout != "" {
+ layouts = []string{
+ app.config.ViewsLayout,
+ }
+ }
+
+ // Render template from Views
+ if app.config.Views != nil {
+ if err := app.config.Views.Render(buf, name, bind, layouts...); err != nil {
+ return fmt.Errorf("failed to render: %w", err)
+ }
+
+ rendered = true
+ break
+ }
+ }
+ }
+
+ if !rendered {
+ // Render raw template using 'name' as filepath if no engine is set
+ var tmpl *template.Template
+ if _, err := readContent(buf, name); err != nil {
+ return err
+ }
+ // Parse template
+ tmpl, err := template.New("").Parse(c.app.getString(buf.Bytes()))
+ if err != nil {
+ return fmt.Errorf("failed to parse: %w", err)
+ }
+ buf.Reset()
+ // Render template
+ if err := tmpl.Execute(buf, bind); err != nil {
+ return fmt.Errorf("failed to execute: %w", err)
+ }
+ }
+
+ // Set Content-Type to text/html
+ c.fasthttp.Response.Header.SetContentType(MIMETextHTMLCharsetUTF8)
+ // Set rendered template to body
+ c.fasthttp.Response.SetBody(buf.Bytes())
+
+ return nil
+}
+
+func (c *Ctx) renderExtensions(bind interface{}) {
+ if bindMap, ok := bind.(Map); ok {
+ // Bind view map
+ c.viewBindMap.Range(func(key, value interface{}) bool {
+ keyValue, ok := key.(string)
+ if !ok {
+ return true
+ }
+ if _, ok := bindMap[keyValue]; !ok {
+ bindMap[keyValue] = value
+ }
+ return true
+ })
+
+ // Check if the PassLocalsToViews option is enabled (by default it is disabled)
+ if c.app.config.PassLocalsToViews {
+ // Loop through each local and set it in the map
+ c.fasthttp.VisitUserValues(func(key []byte, val interface{}) {
+ // check if bindMap doesn't contain the key
+ if _, ok := bindMap[c.app.getString(key)]; !ok {
+ // Set the key and value in the bindMap
+ bindMap[c.app.getString(key)] = val
+ }
+ })
+ }
+ }
+
+ if len(c.app.mountFields.appListKeys) == 0 {
+ c.app.generateAppListKeys()
+ }
+}
+
+// Route returns the matched Route struct.
+func (c *Ctx) Route() *Route {
+ if c.route == nil {
+ // Fallback for fasthttp error handler
+ return &Route{
+ path: c.pathOriginal,
+ Path: c.pathOriginal,
+ Method: c.method,
+ Handlers: make([]Handler, 0),
+ Params: make([]string, 0),
+ }
+ }
+ return c.route
+}
+
+// SaveFile saves any multipart file to disk.
+func (*Ctx) SaveFile(fileheader *multipart.FileHeader, path string) error {
+ return fasthttp.SaveMultipartFile(fileheader, path)
+}
+
+// SaveFileToStorage saves any multipart file to an external storage system.
+func (*Ctx) SaveFileToStorage(fileheader *multipart.FileHeader, path string, storage Storage) error {
+ file, err := fileheader.Open()
+ if err != nil {
+ return fmt.Errorf("failed to open: %w", err)
+ }
+
+ content, err := io.ReadAll(file)
+ if err != nil {
+ return fmt.Errorf("failed to read: %w", err)
+ }
+
+ if err := storage.Set(path, content, 0); err != nil {
+ return fmt.Errorf("failed to store: %w", err)
+ }
+
+ return nil
+}
+
+// Secure returns whether a secure connection was established.
+func (c *Ctx) Secure() bool {
+ return c.Protocol() == schemeHTTPS
+}
+
+// Send sets the HTTP response body without copying it.
+// From this point onward the body argument must not be changed.
+func (c *Ctx) Send(body []byte) error {
+ // Write response body
+ c.fasthttp.Response.SetBodyRaw(body)
+ return nil
+}
+
+var (
+ sendFileOnce sync.Once
+ sendFileFS *fasthttp.FS
+ sendFileHandler fasthttp.RequestHandler
+)
+
+// SendFile transfers the file from the given path.
+// The file is not compressed by default, enable this by passing a 'true' argument
+// Sets the Content-Type response HTTP header field based on the filenames extension.
+func (c *Ctx) SendFile(file string, compress ...bool) error {
+ // Save the filename, we will need it in the error message if the file isn't found
+ filename := file
+
+ // https://github.com/valyala/fasthttp/blob/c7576cc10cabfc9c993317a2d3f8355497bea156/fs.go#L129-L134
+ sendFileOnce.Do(func() {
+ const cacheDuration = 10 * time.Second
+ sendFileFS = &fasthttp.FS{
+ Root: "",
+ AllowEmptyRoot: true,
+ GenerateIndexPages: false,
+ AcceptByteRange: true,
+ Compress: true,
+ CompressedFileSuffix: c.app.config.CompressedFileSuffix,
+ CacheDuration: cacheDuration,
+ IndexNames: []string{"index.html"},
+ PathNotFound: func(ctx *fasthttp.RequestCtx) {
+ ctx.Response.SetStatusCode(StatusNotFound)
+ },
+ }
+ sendFileHandler = sendFileFS.NewRequestHandler()
+ })
+
+ // Keep original path for mutable params
+ c.pathOriginal = utils.CopyString(c.pathOriginal)
+ // Disable compression
+ if len(compress) == 0 || !compress[0] {
+ // https://github.com/valyala/fasthttp/blob/7cc6f4c513f9e0d3686142e0a1a5aa2f76b3194a/fs.go#L55
+ c.fasthttp.Request.Header.Del(HeaderAcceptEncoding)
+ }
+ // copy of https://github.com/valyala/fasthttp/blob/7cc6f4c513f9e0d3686142e0a1a5aa2f76b3194a/fs.go#L103-L121 with small adjustments
+ if len(file) == 0 || !filepath.IsAbs(file) {
+ // extend relative path to absolute path
+ hasTrailingSlash := len(file) > 0 && (file[len(file)-1] == '/' || file[len(file)-1] == '\\')
+
+ var err error
+ file = filepath.FromSlash(file)
+ if file, err = filepath.Abs(file); err != nil {
+ return fmt.Errorf("failed to determine abs file path: %w", err)
+ }
+ if hasTrailingSlash {
+ file += "/"
+ }
+ }
+ // convert the path to forward slashes regardless the OS in order to set the URI properly
+ // the handler will convert back to OS path separator before opening the file
+ file = filepath.ToSlash(file)
+
+ // Restore the original requested URL
+ originalURL := utils.CopyString(c.OriginalURL())
+ defer c.fasthttp.Request.SetRequestURI(originalURL)
+ // Set new URI for fileHandler
+ c.fasthttp.Request.SetRequestURI(file)
+ // Save status code
+ status := c.fasthttp.Response.StatusCode()
+ // Serve file
+ sendFileHandler(c.fasthttp)
+ // Get the status code which is set by fasthttp
+ fsStatus := c.fasthttp.Response.StatusCode()
+ // Set the status code set by the user if it is different from the fasthttp status code and 200
+ if status != fsStatus && status != StatusOK {
+ c.Status(status)
+ }
+ // Check for error
+ if status != StatusNotFound && fsStatus == StatusNotFound {
+ return NewError(StatusNotFound, fmt.Sprintf("sendfile: file %s not found", filename))
+ }
+ return nil
+}
+
+// SendStatus sets the HTTP status code and if the response body is empty,
+// it sets the correct status message in the body.
+func (c *Ctx) SendStatus(status int) error {
+ c.Status(status)
+
+ // Only set status body when there is no response body
+ if len(c.fasthttp.Response.Body()) == 0 {
+ return c.SendString(utils.StatusMessage(status))
+ }
+
+ return nil
+}
+
+// SendString sets the HTTP response body for string types.
+// This means no type assertion, recommended for faster performance
+func (c *Ctx) SendString(body string) error {
+ c.fasthttp.Response.SetBodyString(body)
+
+ return nil
+}
+
+// SendStream sets response body stream and optional body size.
+func (c *Ctx) SendStream(stream io.Reader, size ...int) error {
+ if len(size) > 0 && size[0] >= 0 {
+ c.fasthttp.Response.SetBodyStream(stream, size[0])
+ } else {
+ c.fasthttp.Response.SetBodyStream(stream, -1)
+ }
+
+ return nil
+}
+
+// Set sets the response's HTTP header field to the specified key, value.
+func (c *Ctx) Set(key, val string) {
+ c.fasthttp.Response.Header.Set(key, val)
+}
+
+func (c *Ctx) setCanonical(key, val string) {
+ c.fasthttp.Response.Header.SetCanonical(c.app.getBytes(key), c.app.getBytes(val))
+}
+
+// Subdomains returns a string slice of subdomains in the domain name of the request.
+// The subdomain offset, which defaults to 2, is used for determining the beginning of the subdomain segments.
+func (c *Ctx) Subdomains(offset ...int) []string {
+ o := 2
+ if len(offset) > 0 {
+ o = offset[0]
+ }
+ subdomains := strings.Split(c.Hostname(), ".")
+ l := len(subdomains) - o
+ // Check index to avoid slice bounds out of range panic
+ if l < 0 {
+ l = len(subdomains)
+ }
+ subdomains = subdomains[:l]
+ return subdomains
+}
+
+// Stale is not implemented yet, pull requests are welcome!
+func (c *Ctx) Stale() bool {
+ return !c.Fresh()
+}
+
+// Status sets the HTTP status for the response.
+// This method is chainable.
+func (c *Ctx) Status(status int) *Ctx {
+ c.fasthttp.Response.SetStatusCode(status)
+ return c
+}
+
+// String returns unique string representation of the ctx.
+//
+// The returned value may be useful for logging.
+func (c *Ctx) String() string {
+ return fmt.Sprintf(
+ "#%016X - %s <-> %s - %s %s",
+ c.fasthttp.ID(),
+ c.fasthttp.LocalAddr(),
+ c.fasthttp.RemoteAddr(),
+ c.fasthttp.Request.Header.Method(),
+ c.fasthttp.URI().FullURI(),
+ )
+}
+
+// Type sets the Content-Type HTTP header to the MIME type specified by the file extension.
+func (c *Ctx) Type(extension string, charset ...string) *Ctx {
+ if len(charset) > 0 {
+ c.fasthttp.Response.Header.SetContentType(utils.GetMIME(extension) + "; charset=" + charset[0])
+ } else {
+ c.fasthttp.Response.Header.SetContentType(utils.GetMIME(extension))
+ }
+ return c
+}
+
+// Vary adds the given header field to the Vary response header.
+// This will append the header, if not already listed, otherwise leaves it listed in the current location.
+func (c *Ctx) Vary(fields ...string) {
+ c.Append(HeaderVary, fields...)
+}
+
+// Write appends p into response body.
+func (c *Ctx) Write(p []byte) (int, error) {
+ c.fasthttp.Response.AppendBody(p)
+ return len(p), nil
+}
+
+// Writef appends f & a into response body writer.
+func (c *Ctx) Writef(f string, a ...interface{}) (int, error) {
+ //nolint:wrapcheck // This must not be wrapped
+ return fmt.Fprintf(c.fasthttp.Response.BodyWriter(), f, a...)
+}
+
+// WriteString appends s to response body.
+func (c *Ctx) WriteString(s string) (int, error) {
+ c.fasthttp.Response.AppendBodyString(s)
+ return len(s), nil
+}
+
+// XHR returns a Boolean property, that is true, if the request's X-Requested-With header field is XMLHttpRequest,
+// indicating that the request was issued by a client library (such as jQuery).
+func (c *Ctx) XHR() bool {
+ return utils.EqualFoldBytes(c.app.getBytes(c.Get(HeaderXRequestedWith)), []byte("xmlhttprequest"))
+}
+
+// configDependentPaths set paths for route recognition and prepared paths for the user,
+// here the features for caseSensitive, decoded paths, strict paths are evaluated
+func (c *Ctx) configDependentPaths() {
+ c.pathBuffer = append(c.pathBuffer[0:0], c.pathOriginal...)
+ // If UnescapePath enabled, we decode the path and save it for the framework user
+ if c.app.config.UnescapePath {
+ c.pathBuffer = fasthttp.AppendUnquotedArg(c.pathBuffer[:0], c.pathBuffer)
+ }
+ c.path = c.app.getString(c.pathBuffer)
+
+ // another path is specified which is for routing recognition only
+ // use the path that was changed by the previous configuration flags
+ c.detectionPathBuffer = append(c.detectionPathBuffer[0:0], c.pathBuffer...)
+ // If CaseSensitive is disabled, we lowercase the original path
+ if !c.app.config.CaseSensitive {
+ c.detectionPathBuffer = utils.ToLowerBytes(c.detectionPathBuffer)
+ }
+ // If StrictRouting is disabled, we strip all trailing slashes
+ if !c.app.config.StrictRouting && len(c.detectionPathBuffer) > 1 && c.detectionPathBuffer[len(c.detectionPathBuffer)-1] == '/' {
+ c.detectionPathBuffer = utils.TrimRightBytes(c.detectionPathBuffer, '/')
+ }
+ c.detectionPath = c.app.getString(c.detectionPathBuffer)
+
+ // Define the path for dividing routes into areas for fast tree detection, so that fewer routes need to be traversed,
+ // since the first three characters area select a list of routes
+ c.treePath = c.treePath[0:0]
+ const maxDetectionPaths = 3
+ if len(c.detectionPath) >= maxDetectionPaths {
+ c.treePath = c.detectionPath[:maxDetectionPaths]
+ }
+}
+
+func (c *Ctx) IsProxyTrusted() bool {
+ if !c.app.config.EnableTrustedProxyCheck {
+ return true
+ }
+
+ ip := c.fasthttp.RemoteIP()
+
+ if _, trusted := c.app.config.trustedProxiesMap[ip.String()]; trusted {
+ return true
+ }
+
+ for _, ipNet := range c.app.config.trustedProxyRanges {
+ if ipNet.Contains(ip) {
+ return true
+ }
+ }
+
+ return false
+}
+
+var localHosts = [...]string{"127.0.0.1", "::1"}
+
+// IsLocalHost will return true if address is a localhost address.
+func (*Ctx) isLocalHost(address string) bool {
+ for _, h := range localHosts {
+ if address == h {
+ return true
+ }
+ }
+ return false
+}
+
+// IsFromLocal will return true if request came from local.
+func (c *Ctx) IsFromLocal() bool {
+ return c.isLocalHost(c.fasthttp.RemoteIP().String())
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/error.go b/vendor/github.com/gofiber/fiber/v2/error.go
new file mode 100644
index 0000000..e520420
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/error.go
@@ -0,0 +1,40 @@
+package fiber
+
+import (
+ errors "encoding/json"
+
+ "github.com/gofiber/fiber/v2/internal/schema"
+)
+
+type (
+ // ConversionError Conversion error exposes the internal schema.ConversionError for public use.
+ ConversionError = schema.ConversionError
+ // UnknownKeyError error exposes the internal schema.UnknownKeyError for public use.
+ UnknownKeyError = schema.UnknownKeyError
+ // EmptyFieldError error exposes the internal schema.EmptyFieldError for public use.
+ EmptyFieldError = schema.EmptyFieldError
+ // MultiError error exposes the internal schema.MultiError for public use.
+ MultiError = schema.MultiError
+)
+
+type (
+ // An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
+ // (The argument to Unmarshal must be a non-nil pointer.)
+ InvalidUnmarshalError = errors.InvalidUnmarshalError
+
+ // A MarshalerError represents an error from calling a MarshalJSON or MarshalText method.
+ MarshalerError = errors.MarshalerError
+
+ // A SyntaxError is a description of a JSON syntax error.
+ SyntaxError = errors.SyntaxError
+
+ // An UnmarshalTypeError describes a JSON value that was
+ // not appropriate for a value of a specific Go type.
+ UnmarshalTypeError = errors.UnmarshalTypeError
+
+ // An UnsupportedTypeError is returned by Marshal when attempting
+ // to encode an unsupported value type.
+ UnsupportedTypeError = errors.UnsupportedTypeError
+
+ UnsupportedValueError = errors.UnsupportedValueError
+)
diff --git a/vendor/github.com/gofiber/fiber/v2/group.go b/vendor/github.com/gofiber/fiber/v2/group.go
new file mode 100644
index 0000000..0e546a3
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/group.go
@@ -0,0 +1,209 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package fiber
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// Group struct
+type Group struct {
+ app *App
+ parentGroup *Group
+ name string
+ anyRouteDefined bool
+
+ Prefix string
+}
+
+// Name Assign name to specific route or group itself.
+//
+// If this method is used before any route added to group, it'll set group name and OnGroupNameHook will be used.
+// Otherwise, it'll set route name and OnName hook will be used.
+func (grp *Group) Name(name string) Router {
+ if grp.anyRouteDefined {
+ grp.app.Name(name)
+
+ return grp
+ }
+
+ grp.app.mutex.Lock()
+ if grp.parentGroup != nil {
+ grp.name = grp.parentGroup.name + name
+ } else {
+ grp.name = name
+ }
+
+ if err := grp.app.hooks.executeOnGroupNameHooks(*grp); err != nil {
+ panic(err)
+ }
+ grp.app.mutex.Unlock()
+
+ return grp
+}
+
+// Use registers a middleware route that will match requests
+// with the provided prefix (which is optional and defaults to "/").
+//
+// app.Use(func(c *fiber.Ctx) error {
+// return c.Next()
+// })
+// app.Use("/api", func(c *fiber.Ctx) error {
+// return c.Next()
+// })
+// app.Use("/api", handler, func(c *fiber.Ctx) error {
+// return c.Next()
+// })
+//
+// This method will match all HTTP verbs: GET, POST, PUT, HEAD etc...
+func (grp *Group) Use(args ...interface{}) Router {
+ var prefix string
+ var prefixes []string
+ var handlers []Handler
+
+ for i := 0; i < len(args); i++ {
+ switch arg := args[i].(type) {
+ case string:
+ prefix = arg
+ case []string:
+ prefixes = arg
+ case Handler:
+ handlers = append(handlers, arg)
+ default:
+ panic(fmt.Sprintf("use: invalid handler %v\n", reflect.TypeOf(arg)))
+ }
+ }
+
+ if len(prefixes) == 0 {
+ prefixes = append(prefixes, prefix)
+ }
+
+ for _, prefix := range prefixes {
+ grp.app.register(methodUse, getGroupPath(grp.Prefix, prefix), grp, handlers...)
+ }
+
+ if !grp.anyRouteDefined {
+ grp.anyRouteDefined = true
+ }
+
+ return grp
+}
+
+// Get registers a route for GET methods that requests a representation
+// of the specified resource. Requests using GET should only retrieve data.
+func (grp *Group) Get(path string, handlers ...Handler) Router {
+ grp.Add(MethodHead, path, handlers...)
+ return grp.Add(MethodGet, path, handlers...)
+}
+
+// Head registers a route for HEAD methods that asks for a response identical
+// to that of a GET request, but without the response body.
+func (grp *Group) Head(path string, handlers ...Handler) Router {
+ return grp.Add(MethodHead, path, handlers...)
+}
+
+// Post registers a route for POST methods that is used to submit an entity to the
+// specified resource, often causing a change in state or side effects on the server.
+func (grp *Group) Post(path string, handlers ...Handler) Router {
+ return grp.Add(MethodPost, path, handlers...)
+}
+
+// Put registers a route for PUT methods that replaces all current representations
+// of the target resource with the request payload.
+func (grp *Group) Put(path string, handlers ...Handler) Router {
+ return grp.Add(MethodPut, path, handlers...)
+}
+
+// Delete registers a route for DELETE methods that deletes the specified resource.
+func (grp *Group) Delete(path string, handlers ...Handler) Router {
+ return grp.Add(MethodDelete, path, handlers...)
+}
+
+// Connect registers a route for CONNECT methods that establishes a tunnel to the
+// server identified by the target resource.
+func (grp *Group) Connect(path string, handlers ...Handler) Router {
+ return grp.Add(MethodConnect, path, handlers...)
+}
+
+// Options registers a route for OPTIONS methods that is used to describe the
+// communication options for the target resource.
+func (grp *Group) Options(path string, handlers ...Handler) Router {
+ return grp.Add(MethodOptions, path, handlers...)
+}
+
+// Trace registers a route for TRACE methods that performs a message loop-back
+// test along the path to the target resource.
+func (grp *Group) Trace(path string, handlers ...Handler) Router {
+ return grp.Add(MethodTrace, path, handlers...)
+}
+
+// Patch registers a route for PATCH methods that is used to apply partial
+// modifications to a resource.
+func (grp *Group) Patch(path string, handlers ...Handler) Router {
+ return grp.Add(MethodPatch, path, handlers...)
+}
+
+// Add allows you to specify a HTTP method to register a route
+func (grp *Group) Add(method, path string, handlers ...Handler) Router {
+ grp.app.register(method, getGroupPath(grp.Prefix, path), grp, handlers...)
+ if !grp.anyRouteDefined {
+ grp.anyRouteDefined = true
+ }
+
+ return grp
+}
+
+// Static will create a file server serving static files
+func (grp *Group) Static(prefix, root string, config ...Static) Router {
+ grp.app.registerStatic(getGroupPath(grp.Prefix, prefix), root, config...)
+ if !grp.anyRouteDefined {
+ grp.anyRouteDefined = true
+ }
+
+ return grp
+}
+
+// All will register the handler on all HTTP methods
+func (grp *Group) All(path string, handlers ...Handler) Router {
+ for _, method := range grp.app.config.RequestMethods {
+ _ = grp.Add(method, path, handlers...)
+ }
+ return grp
+}
+
+// Group is used for Routes with common prefix to define a new sub-router with optional middleware.
+//
+// api := app.Group("/api")
+// api.Get("/users", handler)
+func (grp *Group) Group(prefix string, handlers ...Handler) Router {
+ prefix = getGroupPath(grp.Prefix, prefix)
+ if len(handlers) > 0 {
+ grp.app.register(methodUse, prefix, grp, handlers...)
+ }
+
+ // Create new group
+ newGrp := &Group{Prefix: prefix, app: grp.app, parentGroup: grp}
+ if err := grp.app.hooks.executeOnGroupHooks(*newGrp); err != nil {
+ panic(err)
+ }
+
+ return newGrp
+}
+
+// Route is used to define routes with a common prefix inside the common function.
+// Uses Group method to define new sub-router.
+func (grp *Group) Route(prefix string, fn func(router Router), name ...string) Router {
+ // Create new group
+ group := grp.Group(prefix)
+ if len(name) > 0 {
+ group.Name(name[0])
+ }
+
+ // Define routes
+ fn(group)
+
+ return group
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/helpers.go b/vendor/github.com/gofiber/fiber/v2/helpers.go
new file mode 100644
index 0000000..dd8de15
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/helpers.go
@@ -0,0 +1,1153 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package fiber
+
+import (
+ "bytes"
+ "crypto/tls"
+ "fmt"
+ "hash/crc32"
+ "io"
+ "net"
+ "os"
+ "path/filepath"
+ "reflect"
+ "strings"
+ "time"
+ "unsafe"
+
+ "github.com/gofiber/fiber/v2/log"
+ "github.com/gofiber/fiber/v2/utils"
+
+ "github.com/valyala/bytebufferpool"
+ "github.com/valyala/fasthttp"
+)
+
+// acceptType is a struct that holds the parsed value of an Accept header
+// along with quality, specificity, parameters, and order.
+// Used for sorting accept headers.
+type acceptedType struct {
+ spec string
+ quality float64
+ specificity int
+ order int
+ params string
+}
+
+// getTLSConfig returns a net listener's tls config
+func getTLSConfig(ln net.Listener) *tls.Config {
+ // Get listener type
+ pointer := reflect.ValueOf(ln)
+
+ // Is it a tls.listener?
+ if pointer.String() == "<*tls.listener Value>" {
+ // Copy value from pointer
+ if val := reflect.Indirect(pointer); val.Type() != nil {
+ // Get private field from value
+ if field := val.FieldByName("config"); field.Type() != nil {
+ // Copy value from pointer field (unsafe)
+ newval := reflect.NewAt(field.Type(), unsafe.Pointer(field.UnsafeAddr())) //nolint:gosec // Probably the only way to extract the *tls.Config from a net.Listener. TODO: Verify there really is no easier way without using unsafe.
+ if newval.Type() != nil {
+ // Get element from pointer
+ if elem := newval.Elem(); elem.Type() != nil {
+ // Cast value to *tls.Config
+ c, ok := elem.Interface().(*tls.Config)
+ if !ok {
+ panic(fmt.Errorf("failed to type-assert to *tls.Config"))
+ }
+ return c
+ }
+ }
+ }
+ }
+ }
+
+ return nil
+}
+
+// readContent opens a named file and read content from it
+func readContent(rf io.ReaderFrom, name string) (int64, error) {
+ // Read file
+ f, err := os.Open(filepath.Clean(name))
+ if err != nil {
+ return 0, fmt.Errorf("failed to open: %w", err)
+ }
+ defer func() {
+ if err = f.Close(); err != nil {
+ log.Errorf("Error closing file: %s", err)
+ }
+ }()
+ if n, err := rf.ReadFrom(f); err != nil {
+ return n, fmt.Errorf("failed to read: %w", err)
+ }
+ return 0, nil
+}
+
+// quoteString escape special characters in a given string
+func (app *App) quoteString(raw string) string {
+ bb := bytebufferpool.Get()
+ // quoted := string(fasthttp.AppendQuotedArg(bb.B, getBytes(raw)))
+ quoted := app.getString(fasthttp.AppendQuotedArg(bb.B, app.getBytes(raw)))
+ bytebufferpool.Put(bb)
+ return quoted
+}
+
+// Scan stack if other methods match the request
+func (app *App) methodExist(ctx *Ctx) bool {
+ var exists bool
+ methods := app.config.RequestMethods
+ for i := 0; i < len(methods); i++ {
+ // Skip original method
+ if ctx.methodINT == i {
+ continue
+ }
+ // Reset stack index
+ indexRoute := -1
+ tree, ok := ctx.app.treeStack[i][ctx.treePath]
+ if !ok {
+ tree = ctx.app.treeStack[i][""]
+ }
+ // Get stack length
+ lenr := len(tree) - 1
+ // Loop over the route stack starting from previous index
+ for indexRoute < lenr {
+ // Increment route index
+ indexRoute++
+ // Get *Route
+ route := tree[indexRoute]
+ // Skip use routes
+ if route.use {
+ continue
+ }
+ // Check if it matches the request path
+ match := route.match(ctx.detectionPath, ctx.path, &ctx.values)
+ // No match, next route
+ if match {
+ // We matched
+ exists = true
+ // Add method to Allow header
+ ctx.Append(HeaderAllow, methods[i])
+ // Break stack loop
+ break
+ }
+ }
+ }
+ return exists
+}
+
+// uniqueRouteStack drop all not unique routes from the slice
+func uniqueRouteStack(stack []*Route) []*Route {
+ var unique []*Route
+ m := make(map[*Route]int)
+ for _, v := range stack {
+ if _, ok := m[v]; !ok {
+ // Unique key found. Record position and collect
+ // in result.
+ m[v] = len(unique)
+ unique = append(unique, v)
+ }
+ }
+
+ return unique
+}
+
+// defaultString returns the value or a default value if it is set
+func defaultString(value string, defaultValue []string) string {
+ if len(value) == 0 && len(defaultValue) > 0 {
+ return defaultValue[0]
+ }
+ return value
+}
+
+const normalizedHeaderETag = "Etag"
+
+// Generate and set ETag header to response
+func setETag(c *Ctx, weak bool) { //nolint: revive // Accepting a bool param is fine here
+ // Don't generate ETags for invalid responses
+ if c.fasthttp.Response.StatusCode() != StatusOK {
+ return
+ }
+ body := c.fasthttp.Response.Body()
+ // Skips ETag if no response body is present
+ if len(body) == 0 {
+ return
+ }
+ // Get ETag header from request
+ clientEtag := c.Get(HeaderIfNoneMatch)
+
+ // Generate ETag for response
+ const pol = 0xD5828281
+ crc32q := crc32.MakeTable(pol)
+ etag := fmt.Sprintf("\"%d-%v\"", len(body), crc32.Checksum(body, crc32q))
+
+ // Enable weak tag
+ if weak {
+ etag = "W/" + etag
+ }
+
+ // Check if client's ETag is weak
+ if strings.HasPrefix(clientEtag, "W/") {
+ // Check if server's ETag is weak
+ if clientEtag[2:] == etag || clientEtag[2:] == etag[2:] {
+ // W/1 == 1 || W/1 == W/1
+ if err := c.SendStatus(StatusNotModified); err != nil {
+ log.Errorf("setETag: failed to SendStatus: %v", err)
+ }
+ c.fasthttp.ResetBody()
+ return
+ }
+ // W/1 != W/2 || W/1 != 2
+ c.setCanonical(normalizedHeaderETag, etag)
+ return
+ }
+ if strings.Contains(clientEtag, etag) {
+ // 1 == 1
+ if err := c.SendStatus(StatusNotModified); err != nil {
+ log.Errorf("setETag: failed to SendStatus: %v", err)
+ }
+ c.fasthttp.ResetBody()
+ return
+ }
+ // 1 != 2
+ c.setCanonical(normalizedHeaderETag, etag)
+}
+
+func getGroupPath(prefix, path string) string {
+ if len(path) == 0 {
+ return prefix
+ }
+
+ if path[0] != '/' {
+ path = "/" + path
+ }
+
+ return utils.TrimRight(prefix, '/') + path
+}
+
+// acceptsOffer This function determines if an offer matches a given specification.
+// It checks if the specification ends with a '*' or if the offer has the prefix of the specification.
+// Returns true if the offer matches the specification, false otherwise.
+func acceptsOffer(spec, offer, _ string) bool {
+ if len(spec) >= 1 && spec[len(spec)-1] == '*' {
+ return true
+ } else if strings.HasPrefix(spec, offer) {
+ return true
+ }
+ return false
+}
+
+// acceptsOfferType This function determines if an offer type matches a given specification.
+// It checks if the specification is equal to */* (i.e., all types are accepted).
+// It gets the MIME type of the offer (either from the offer itself or by its file extension).
+// It checks if the offer MIME type matches the specification MIME type or if the specification is of the form /* and the offer MIME type has the same MIME type.
+// It checks if the offer contains every parameter present in the specification.
+// Returns true if the offer type matches the specification, false otherwise.
+func acceptsOfferType(spec, offerType, specParams string) bool {
+ var offerMime, offerParams string
+
+ if i := strings.IndexByte(offerType, ';'); i == -1 {
+ offerMime = offerType
+ } else {
+ offerMime = offerType[:i]
+ offerParams = offerType[i:]
+ }
+
+ // Accept: */*
+ if spec == "*/*" {
+ return paramsMatch(specParams, offerParams)
+ }
+
+ var mimetype string
+ if strings.IndexByte(offerMime, '/') != -1 {
+ mimetype = offerMime // MIME type
+ } else {
+ mimetype = utils.GetMIME(offerMime) // extension
+ }
+
+ if spec == mimetype {
+ // Accept: /
+ return paramsMatch(specParams, offerParams)
+ }
+
+ s := strings.IndexByte(mimetype, '/')
+ // Accept: /*
+ if strings.HasPrefix(spec, mimetype[:s]) && (spec[s:] == "/*" || mimetype[s:] == "/*") {
+ return paramsMatch(specParams, offerParams)
+ }
+
+ return false
+}
+
+// paramsMatch returns whether offerParams contains all parameters present in specParams.
+// Matching is case insensitive, and surrounding quotes are stripped.
+// To align with the behavior of res.format from Express, the order of parameters is
+// ignored, and if a parameter is specified twice in the incoming Accept, the last
+// provided value is given precedence.
+// In the case of quoted values, RFC 9110 says that we must treat any character escaped
+// by a backslash as equivalent to the character itself (e.g., "a\aa" is equivalent to "aaa").
+// For the sake of simplicity, we forgo this and compare the value as-is. Besides, it would
+// be highly unusual for a client to escape something other than a double quote or backslash.
+// See https://www.rfc-editor.org/rfc/rfc9110#name-parameters
+func paramsMatch(specParamStr, offerParams string) bool {
+ if specParamStr == "" {
+ return true
+ }
+
+ // Preprocess the spec params to more easily test
+ // for out-of-order parameters
+ specParams := make([][2]string, 0, 2)
+ forEachParameter(specParamStr, func(s1, s2 string) bool {
+ if s1 == "q" || s1 == "Q" {
+ return false
+ }
+ for i := range specParams {
+ if utils.EqualFold(s1, specParams[i][0]) {
+ specParams[i][1] = s2
+ return false
+ }
+ }
+ specParams = append(specParams, [2]string{s1, s2})
+ return true
+ })
+
+ allSpecParamsMatch := true
+ for i := range specParams {
+ foundParam := false
+ forEachParameter(offerParams, func(offerParam, offerVal string) bool {
+ if utils.EqualFold(specParams[i][0], offerParam) {
+ foundParam = true
+ allSpecParamsMatch = utils.EqualFold(specParams[i][1], offerVal)
+ return false
+ }
+ return true
+ })
+ if !foundParam || !allSpecParamsMatch {
+ return false
+ }
+ }
+ return allSpecParamsMatch
+}
+
+// getSplicedStrList function takes a string and a string slice as an argument, divides the string into different
+// elements divided by ',' and stores these elements in the string slice.
+// It returns the populated string slice as an output.
+//
+// If the given slice hasn't enough space, it will allocate more and return.
+func getSplicedStrList(headerValue string, dst []string) []string {
+ if headerValue == "" {
+ return nil
+ }
+
+ var (
+ index int
+ character rune
+ lastElementEndsAt uint8
+ insertIndex int
+ )
+ for index, character = range headerValue + "$" {
+ if character == ',' || index == len(headerValue) {
+ if insertIndex >= len(dst) {
+ oldSlice := dst
+ dst = make([]string, len(dst)+(len(dst)>>1)+2)
+ copy(dst, oldSlice)
+ }
+ dst[insertIndex] = utils.TrimLeft(headerValue[lastElementEndsAt:index], ' ')
+ lastElementEndsAt = uint8(index + 1)
+ insertIndex++
+ }
+ }
+
+ if len(dst) > insertIndex {
+ dst = dst[:insertIndex]
+ }
+ return dst
+}
+
+// forEachMediaRange parses an Accept or Content-Type header, calling functor
+// on each media range.
+// See: https://www.rfc-editor.org/rfc/rfc9110#name-content-negotiation-fields
+func forEachMediaRange(header string, functor func(string)) {
+ hasDQuote := strings.IndexByte(header, '"') != -1
+
+ for len(header) > 0 {
+ n := 0
+ header = utils.TrimLeft(header, ' ')
+ quotes := 0
+ escaping := false
+
+ if hasDQuote {
+ // Complex case. We need to keep track of quotes and quoted-pairs (i.e., characters escaped with \ )
+ loop:
+ for n < len(header) {
+ switch header[n] {
+ case ',':
+ if quotes%2 == 0 {
+ break loop
+ }
+ case '"':
+ if !escaping {
+ quotes++
+ }
+ case '\\':
+ if quotes%2 == 1 {
+ escaping = !escaping
+ }
+ }
+ n++
+ }
+ } else {
+ // Simple case. Just look for the next comma.
+ if n = strings.IndexByte(header, ','); n == -1 {
+ n = len(header)
+ }
+ }
+
+ functor(header[:n])
+
+ if n >= len(header) {
+ return
+ }
+ header = header[n+1:]
+ }
+}
+
+// forEachParamter parses a given parameter list, calling functor
+// on each valid parameter. If functor returns false, we stop processing.
+// It expects a leading ';'.
+// See: https://www.rfc-editor.org/rfc/rfc9110#section-5.6.6
+// According to RFC-9110 2.4, it is up to our discretion whether
+// to attempt to recover from errors in HTTP semantics. Therefor,
+// we take the simple approach and exit early when a semantic error
+// is detected in the header.
+//
+// parameter = parameter-name "=" parameter-value
+// parameter-name = token
+// parameter-value = ( token / quoted-string )
+// parameters = *( OWS ";" OWS [ parameter ] )
+func forEachParameter(params string, functor func(string, string) bool) {
+ for len(params) > 0 {
+ // eat OWS ";" OWS
+ params = utils.TrimLeft(params, ' ')
+ if len(params) == 0 || params[0] != ';' {
+ return
+ }
+ params = utils.TrimLeft(params[1:], ' ')
+
+ n := 0
+
+ // make sure the parameter is at least one character long
+ if len(params) == 0 || !validHeaderFieldByte(params[n]) {
+ return
+ }
+ n++
+ for n < len(params) && validHeaderFieldByte(params[n]) {
+ n++
+ }
+
+ // We should hit a '=' (that has more characters after it)
+ // If not, the parameter is invalid.
+ // param=foo
+ // ~~~~~^
+ if n >= len(params)-1 || params[n] != '=' {
+ return
+ }
+ param := params[:n]
+ n++
+
+ if params[n] == '"' {
+ // Handle quoted strings and quoted-pairs (i.e., characters escaped with \ )
+ // See: https://www.rfc-editor.org/rfc/rfc9110#section-5.6.4
+ foundEndQuote := false
+ escaping := false
+ n++
+ m := n
+ for ; n < len(params); n++ {
+ if params[n] == '"' && !escaping {
+ foundEndQuote = true
+ break
+ }
+ // Recipients that process the value of a quoted-string MUST handle
+ // a quoted-pair as if it were replaced by the octet following the backslash
+ escaping = params[n] == '\\' && !escaping
+ }
+ if !foundEndQuote {
+ // Not a valid parameter
+ return
+ }
+ if !functor(param, params[m:n]) {
+ return
+ }
+ n++
+ } else if validHeaderFieldByte(params[n]) {
+ // Parse a normal value, which should just be a token.
+ m := n
+ n++
+ for n < len(params) && validHeaderFieldByte(params[n]) {
+ n++
+ }
+ if !functor(param, params[m:n]) {
+ return
+ }
+ } else {
+ // Value was invalid
+ return
+ }
+ params = params[n:]
+ }
+}
+
+// validHeaderFieldByte returns true if a valid tchar
+//
+// tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
+// "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
+//
+// See: https://www.rfc-editor.org/rfc/rfc9110#section-5.6.2
+// Function copied from net/textproto:
+// https://github.com/golang/go/blob/master/src/net/textproto/reader.go#L663
+func validHeaderFieldByte(c byte) bool {
+ // mask is a 128-bit bitmap with 1s for allowed bytes,
+ // so that the byte c can be tested with a shift and an and.
+ // If c >= 128, then 1<>64)) != 0
+}
+
+// getOffer return valid offer for header negotiation
+func getOffer(header string, isAccepted func(spec, offer, specParams string) bool, offers ...string) string {
+ if len(offers) == 0 {
+ return ""
+ }
+ if header == "" {
+ return offers[0]
+ }
+
+ acceptedTypes := make([]acceptedType, 0, 8)
+ order := 0
+
+ // Parse header and get accepted types with their quality and specificity
+ // See: https://www.rfc-editor.org/rfc/rfc9110#name-content-negotiation-fields
+ forEachMediaRange(header, func(accept string) {
+ order++
+ spec, quality, params := accept, 1.0, ""
+
+ if i := strings.IndexByte(accept, ';'); i != -1 {
+ spec = accept[:i]
+
+ // The vast majority of requests will have only the q parameter with
+ // no whitespace. Check this first to see if we can skip
+ // the more involved parsing.
+ if strings.HasPrefix(accept[i:], ";q=") && strings.IndexByte(accept[i+3:], ';') == -1 {
+ if q, err := fasthttp.ParseUfloat([]byte(utils.TrimRight(accept[i+3:], ' '))); err == nil {
+ quality = q
+ }
+ } else {
+ hasParams := false
+ forEachParameter(accept[i:], func(param, val string) bool {
+ if param == "q" || param == "Q" {
+ if q, err := fasthttp.ParseUfloat([]byte(val)); err == nil {
+ quality = q
+ }
+ return false
+ }
+ hasParams = true
+ return true
+ })
+ if hasParams {
+ params = accept[i:]
+ }
+ }
+ // Skip this accept type if quality is 0.0
+ // See: https://www.rfc-editor.org/rfc/rfc9110#quality.values
+ if quality == 0.0 {
+ return
+ }
+ }
+
+ spec = utils.TrimRight(spec, ' ')
+
+ // Get specificity
+ var specificity int
+ // check for wildcard this could be a mime */* or a wildcard character *
+ if spec == "*/*" || spec == "*" {
+ specificity = 1
+ } else if strings.HasSuffix(spec, "/*") {
+ specificity = 2
+ } else if strings.IndexByte(spec, '/') != -1 {
+ specificity = 3
+ } else {
+ specificity = 4
+ }
+
+ // Add to accepted types
+ acceptedTypes = append(acceptedTypes, acceptedType{spec, quality, specificity, order, params})
+ })
+
+ if len(acceptedTypes) > 1 {
+ // Sort accepted types by quality and specificity, preserving order of equal elements
+ sortAcceptedTypes(&acceptedTypes)
+ }
+
+ // Find the first offer that matches the accepted types
+ for _, acceptedType := range acceptedTypes {
+ for _, offer := range offers {
+ if len(offer) == 0 {
+ continue
+ }
+ if isAccepted(acceptedType.spec, offer, acceptedType.params) {
+ return offer
+ }
+ }
+ }
+
+ return ""
+}
+
+// sortAcceptedTypes sorts accepted types by quality and specificity, preserving order of equal elements
+// A type with parameters has higher priority than an equivalent one without parameters.
+// e.g., text/html;a=1;b=2 comes before text/html;a=1
+// See: https://www.rfc-editor.org/rfc/rfc9110#name-content-negotiation-fields
+func sortAcceptedTypes(acceptedTypes *[]acceptedType) {
+ if acceptedTypes == nil || len(*acceptedTypes) < 2 {
+ return
+ }
+ at := *acceptedTypes
+
+ for i := 1; i < len(at); i++ {
+ lo, hi := 0, i-1
+ for lo <= hi {
+ mid := (lo + hi) / 2
+ if at[i].quality < at[mid].quality ||
+ (at[i].quality == at[mid].quality && at[i].specificity < at[mid].specificity) ||
+ (at[i].quality == at[mid].quality && at[i].specificity < at[mid].specificity && len(at[i].params) < len(at[mid].params)) ||
+ (at[i].quality == at[mid].quality && at[i].specificity == at[mid].specificity && len(at[i].params) == len(at[mid].params) && at[i].order > at[mid].order) {
+ lo = mid + 1
+ } else {
+ hi = mid - 1
+ }
+ }
+ for j := i; j > lo; j-- {
+ at[j-1], at[j] = at[j], at[j-1]
+ }
+ }
+}
+
+func matchEtag(s, etag string) bool {
+ if s == etag || s == "W/"+etag || "W/"+s == etag {
+ return true
+ }
+
+ return false
+}
+
+func (app *App) isEtagStale(etag string, noneMatchBytes []byte) bool {
+ var start, end int
+
+ // Adapted from:
+ // https://github.com/jshttp/fresh/blob/10e0471669dbbfbfd8de65bc6efac2ddd0bfa057/index.js#L110
+ for i := range noneMatchBytes {
+ switch noneMatchBytes[i] {
+ case 0x20:
+ if start == end {
+ start = i + 1
+ end = i + 1
+ }
+ case 0x2c:
+ if matchEtag(app.getString(noneMatchBytes[start:end]), etag) {
+ return false
+ }
+ start = i + 1
+ end = i + 1
+ default:
+ end = i + 1
+ }
+ }
+
+ return !matchEtag(app.getString(noneMatchBytes[start:end]), etag)
+}
+
+func parseAddr(raw string) (string, string) { //nolint:revive // Returns (host, port)
+ if i := strings.LastIndex(raw, ":"); i != -1 {
+ return raw[:i], raw[i+1:]
+ }
+ return raw, ""
+}
+
+const noCacheValue = "no-cache"
+
+// isNoCache checks if the cacheControl header value is a `no-cache`.
+func isNoCache(cacheControl string) bool {
+ i := strings.Index(cacheControl, noCacheValue)
+ if i == -1 {
+ return false
+ }
+
+ // Xno-cache
+ if i > 0 && !(cacheControl[i-1] == ' ' || cacheControl[i-1] == ',') {
+ return false
+ }
+
+ // bla bla, no-cache
+ if i+len(noCacheValue) == len(cacheControl) {
+ return true
+ }
+
+ // bla bla, no-cacheX
+ if cacheControl[i+len(noCacheValue)] != ',' {
+ return false
+ }
+
+ // OK
+ return true
+}
+
+type testConn struct {
+ r bytes.Buffer
+ w bytes.Buffer
+}
+
+func (c *testConn) Read(b []byte) (int, error) { return c.r.Read(b) } //nolint:wrapcheck // This must not be wrapped
+func (c *testConn) Write(b []byte) (int, error) { return c.w.Write(b) } //nolint:wrapcheck // This must not be wrapped
+func (*testConn) Close() error { return nil }
+
+func (*testConn) LocalAddr() net.Addr { return &net.TCPAddr{Port: 0, Zone: "", IP: net.IPv4zero} }
+func (*testConn) RemoteAddr() net.Addr { return &net.TCPAddr{Port: 0, Zone: "", IP: net.IPv4zero} }
+func (*testConn) SetDeadline(_ time.Time) error { return nil }
+func (*testConn) SetReadDeadline(_ time.Time) error { return nil }
+func (*testConn) SetWriteDeadline(_ time.Time) error { return nil }
+
+func getStringImmutable(b []byte) string {
+ return string(b)
+}
+
+func getBytesImmutable(s string) []byte {
+ return []byte(s)
+}
+
+// HTTP methods and their unique INTs
+func (app *App) methodInt(s string) int {
+ // For better performance
+ if len(app.configured.RequestMethods) == 0 {
+ // TODO: Use iota instead
+ switch s {
+ case MethodGet:
+ return 0
+ case MethodHead:
+ return 1
+ case MethodPost:
+ return 2
+ case MethodPut:
+ return 3
+ case MethodDelete:
+ return 4
+ case MethodConnect:
+ return 5
+ case MethodOptions:
+ return 6
+ case MethodTrace:
+ return 7
+ case MethodPatch:
+ return 8
+ default:
+ return -1
+ }
+ }
+
+ // For method customization
+ for i, v := range app.config.RequestMethods {
+ if s == v {
+ return i
+ }
+ }
+
+ return -1
+}
+
+// IsMethodSafe reports whether the HTTP method is considered safe.
+// See https://datatracker.ietf.org/doc/html/rfc9110#section-9.2.1
+func IsMethodSafe(m string) bool {
+ switch m {
+ case MethodGet,
+ MethodHead,
+ MethodOptions,
+ MethodTrace:
+ return true
+ default:
+ return false
+ }
+}
+
+// IsMethodIdempotent reports whether the HTTP method is considered idempotent.
+// See https://datatracker.ietf.org/doc/html/rfc9110#section-9.2.2
+func IsMethodIdempotent(m string) bool {
+ if IsMethodSafe(m) {
+ return true
+ }
+
+ switch m {
+ case MethodPut, MethodDelete:
+ return true
+ default:
+ return false
+ }
+}
+
+// HTTP methods were copied from net/http.
+const (
+ MethodGet = "GET" // RFC 7231, 4.3.1
+ MethodHead = "HEAD" // RFC 7231, 4.3.2
+ MethodPost = "POST" // RFC 7231, 4.3.3
+ MethodPut = "PUT" // RFC 7231, 4.3.4
+ MethodPatch = "PATCH" // RFC 5789
+ MethodDelete = "DELETE" // RFC 7231, 4.3.5
+ MethodConnect = "CONNECT" // RFC 7231, 4.3.6
+ MethodOptions = "OPTIONS" // RFC 7231, 4.3.7
+ MethodTrace = "TRACE" // RFC 7231, 4.3.8
+ methodUse = "USE"
+)
+
+// MIME types that are commonly used
+const (
+ MIMETextXML = "text/xml"
+ MIMETextHTML = "text/html"
+ MIMETextPlain = "text/plain"
+ MIMETextJavaScript = "text/javascript"
+ MIMEApplicationXML = "application/xml"
+ MIMEApplicationJSON = "application/json"
+ // Deprecated: use MIMETextJavaScript instead
+ MIMEApplicationJavaScript = "application/javascript"
+ MIMEApplicationForm = "application/x-www-form-urlencoded"
+ MIMEOctetStream = "application/octet-stream"
+ MIMEMultipartForm = "multipart/form-data"
+
+ MIMETextXMLCharsetUTF8 = "text/xml; charset=utf-8"
+ MIMETextHTMLCharsetUTF8 = "text/html; charset=utf-8"
+ MIMETextPlainCharsetUTF8 = "text/plain; charset=utf-8"
+ MIMETextJavaScriptCharsetUTF8 = "text/javascript; charset=utf-8"
+ MIMEApplicationXMLCharsetUTF8 = "application/xml; charset=utf-8"
+ MIMEApplicationJSONCharsetUTF8 = "application/json; charset=utf-8"
+ // Deprecated: use MIMETextJavaScriptCharsetUTF8 instead
+ MIMEApplicationJavaScriptCharsetUTF8 = "application/javascript; charset=utf-8"
+)
+
+// HTTP status codes were copied from net/http with the following updates:
+// - Rename StatusNonAuthoritativeInfo to StatusNonAuthoritativeInformation
+// - Add StatusSwitchProxy (306)
+// NOTE: Keep this list in sync with statusMessage
+const (
+ StatusContinue = 100 // RFC 9110, 15.2.1
+ StatusSwitchingProtocols = 101 // RFC 9110, 15.2.2
+ StatusProcessing = 102 // RFC 2518, 10.1
+ StatusEarlyHints = 103 // RFC 8297
+
+ StatusOK = 200 // RFC 9110, 15.3.1
+ StatusCreated = 201 // RFC 9110, 15.3.2
+ StatusAccepted = 202 // RFC 9110, 15.3.3
+ StatusNonAuthoritativeInformation = 203 // RFC 9110, 15.3.4
+ StatusNoContent = 204 // RFC 9110, 15.3.5
+ StatusResetContent = 205 // RFC 9110, 15.3.6
+ StatusPartialContent = 206 // RFC 9110, 15.3.7
+ StatusMultiStatus = 207 // RFC 4918, 11.1
+ StatusAlreadyReported = 208 // RFC 5842, 7.1
+ StatusIMUsed = 226 // RFC 3229, 10.4.1
+
+ StatusMultipleChoices = 300 // RFC 9110, 15.4.1
+ StatusMovedPermanently = 301 // RFC 9110, 15.4.2
+ StatusFound = 302 // RFC 9110, 15.4.3
+ StatusSeeOther = 303 // RFC 9110, 15.4.4
+ StatusNotModified = 304 // RFC 9110, 15.4.5
+ StatusUseProxy = 305 // RFC 9110, 15.4.6
+ StatusSwitchProxy = 306 // RFC 9110, 15.4.7 (Unused)
+ StatusTemporaryRedirect = 307 // RFC 9110, 15.4.8
+ StatusPermanentRedirect = 308 // RFC 9110, 15.4.9
+
+ StatusBadRequest = 400 // RFC 9110, 15.5.1
+ StatusUnauthorized = 401 // RFC 9110, 15.5.2
+ StatusPaymentRequired = 402 // RFC 9110, 15.5.3
+ StatusForbidden = 403 // RFC 9110, 15.5.4
+ StatusNotFound = 404 // RFC 9110, 15.5.5
+ StatusMethodNotAllowed = 405 // RFC 9110, 15.5.6
+ StatusNotAcceptable = 406 // RFC 9110, 15.5.7
+ StatusProxyAuthRequired = 407 // RFC 9110, 15.5.8
+ StatusRequestTimeout = 408 // RFC 9110, 15.5.9
+ StatusConflict = 409 // RFC 9110, 15.5.10
+ StatusGone = 410 // RFC 9110, 15.5.11
+ StatusLengthRequired = 411 // RFC 9110, 15.5.12
+ StatusPreconditionFailed = 412 // RFC 9110, 15.5.13
+ StatusRequestEntityTooLarge = 413 // RFC 9110, 15.5.14
+ StatusRequestURITooLong = 414 // RFC 9110, 15.5.15
+ StatusUnsupportedMediaType = 415 // RFC 9110, 15.5.16
+ StatusRequestedRangeNotSatisfiable = 416 // RFC 9110, 15.5.17
+ StatusExpectationFailed = 417 // RFC 9110, 15.5.18
+ StatusTeapot = 418 // RFC 9110, 15.5.19 (Unused)
+ StatusMisdirectedRequest = 421 // RFC 9110, 15.5.20
+ StatusUnprocessableEntity = 422 // RFC 9110, 15.5.21
+ StatusLocked = 423 // RFC 4918, 11.3
+ StatusFailedDependency = 424 // RFC 4918, 11.4
+ StatusTooEarly = 425 // RFC 8470, 5.2.
+ StatusUpgradeRequired = 426 // RFC 9110, 15.5.22
+ StatusPreconditionRequired = 428 // RFC 6585, 3
+ StatusTooManyRequests = 429 // RFC 6585, 4
+ StatusRequestHeaderFieldsTooLarge = 431 // RFC 6585, 5
+ StatusUnavailableForLegalReasons = 451 // RFC 7725, 3
+
+ StatusInternalServerError = 500 // RFC 9110, 15.6.1
+ StatusNotImplemented = 501 // RFC 9110, 15.6.2
+ StatusBadGateway = 502 // RFC 9110, 15.6.3
+ StatusServiceUnavailable = 503 // RFC 9110, 15.6.4
+ StatusGatewayTimeout = 504 // RFC 9110, 15.6.5
+ StatusHTTPVersionNotSupported = 505 // RFC 9110, 15.6.6
+ StatusVariantAlsoNegotiates = 506 // RFC 2295, 8.1
+ StatusInsufficientStorage = 507 // RFC 4918, 11.5
+ StatusLoopDetected = 508 // RFC 5842, 7.2
+ StatusNotExtended = 510 // RFC 2774, 7
+ StatusNetworkAuthenticationRequired = 511 // RFC 6585, 6
+)
+
+// Errors
+var (
+ ErrBadRequest = NewError(StatusBadRequest) // 400
+ ErrUnauthorized = NewError(StatusUnauthorized) // 401
+ ErrPaymentRequired = NewError(StatusPaymentRequired) // 402
+ ErrForbidden = NewError(StatusForbidden) // 403
+ ErrNotFound = NewError(StatusNotFound) // 404
+ ErrMethodNotAllowed = NewError(StatusMethodNotAllowed) // 405
+ ErrNotAcceptable = NewError(StatusNotAcceptable) // 406
+ ErrProxyAuthRequired = NewError(StatusProxyAuthRequired) // 407
+ ErrRequestTimeout = NewError(StatusRequestTimeout) // 408
+ ErrConflict = NewError(StatusConflict) // 409
+ ErrGone = NewError(StatusGone) // 410
+ ErrLengthRequired = NewError(StatusLengthRequired) // 411
+ ErrPreconditionFailed = NewError(StatusPreconditionFailed) // 412
+ ErrRequestEntityTooLarge = NewError(StatusRequestEntityTooLarge) // 413
+ ErrRequestURITooLong = NewError(StatusRequestURITooLong) // 414
+ ErrUnsupportedMediaType = NewError(StatusUnsupportedMediaType) // 415
+ ErrRequestedRangeNotSatisfiable = NewError(StatusRequestedRangeNotSatisfiable) // 416
+ ErrExpectationFailed = NewError(StatusExpectationFailed) // 417
+ ErrTeapot = NewError(StatusTeapot) // 418
+ ErrMisdirectedRequest = NewError(StatusMisdirectedRequest) // 421
+ ErrUnprocessableEntity = NewError(StatusUnprocessableEntity) // 422
+ ErrLocked = NewError(StatusLocked) // 423
+ ErrFailedDependency = NewError(StatusFailedDependency) // 424
+ ErrTooEarly = NewError(StatusTooEarly) // 425
+ ErrUpgradeRequired = NewError(StatusUpgradeRequired) // 426
+ ErrPreconditionRequired = NewError(StatusPreconditionRequired) // 428
+ ErrTooManyRequests = NewError(StatusTooManyRequests) // 429
+ ErrRequestHeaderFieldsTooLarge = NewError(StatusRequestHeaderFieldsTooLarge) // 431
+ ErrUnavailableForLegalReasons = NewError(StatusUnavailableForLegalReasons) // 451
+
+ ErrInternalServerError = NewError(StatusInternalServerError) // 500
+ ErrNotImplemented = NewError(StatusNotImplemented) // 501
+ ErrBadGateway = NewError(StatusBadGateway) // 502
+ ErrServiceUnavailable = NewError(StatusServiceUnavailable) // 503
+ ErrGatewayTimeout = NewError(StatusGatewayTimeout) // 504
+ ErrHTTPVersionNotSupported = NewError(StatusHTTPVersionNotSupported) // 505
+ ErrVariantAlsoNegotiates = NewError(StatusVariantAlsoNegotiates) // 506
+ ErrInsufficientStorage = NewError(StatusInsufficientStorage) // 507
+ ErrLoopDetected = NewError(StatusLoopDetected) // 508
+ ErrNotExtended = NewError(StatusNotExtended) // 510
+ ErrNetworkAuthenticationRequired = NewError(StatusNetworkAuthenticationRequired) // 511
+)
+
+// HTTP Headers were copied from net/http.
+const (
+ HeaderAuthorization = "Authorization"
+ HeaderProxyAuthenticate = "Proxy-Authenticate"
+ HeaderProxyAuthorization = "Proxy-Authorization"
+ HeaderWWWAuthenticate = "WWW-Authenticate"
+ HeaderAge = "Age"
+ HeaderCacheControl = "Cache-Control"
+ HeaderClearSiteData = "Clear-Site-Data"
+ HeaderExpires = "Expires"
+ HeaderPragma = "Pragma"
+ HeaderWarning = "Warning"
+ HeaderAcceptCH = "Accept-CH"
+ HeaderAcceptCHLifetime = "Accept-CH-Lifetime"
+ HeaderContentDPR = "Content-DPR"
+ HeaderDPR = "DPR"
+ HeaderEarlyData = "Early-Data"
+ HeaderSaveData = "Save-Data"
+ HeaderViewportWidth = "Viewport-Width"
+ HeaderWidth = "Width"
+ HeaderETag = "ETag"
+ HeaderIfMatch = "If-Match"
+ HeaderIfModifiedSince = "If-Modified-Since"
+ HeaderIfNoneMatch = "If-None-Match"
+ HeaderIfUnmodifiedSince = "If-Unmodified-Since"
+ HeaderLastModified = "Last-Modified"
+ HeaderVary = "Vary"
+ HeaderConnection = "Connection"
+ HeaderKeepAlive = "Keep-Alive"
+ HeaderAccept = "Accept"
+ HeaderAcceptCharset = "Accept-Charset"
+ HeaderAcceptEncoding = "Accept-Encoding"
+ HeaderAcceptLanguage = "Accept-Language"
+ HeaderCookie = "Cookie"
+ HeaderExpect = "Expect"
+ HeaderMaxForwards = "Max-Forwards"
+ HeaderSetCookie = "Set-Cookie"
+ HeaderAccessControlAllowCredentials = "Access-Control-Allow-Credentials"
+ HeaderAccessControlAllowHeaders = "Access-Control-Allow-Headers"
+ HeaderAccessControlAllowMethods = "Access-Control-Allow-Methods"
+ HeaderAccessControlAllowOrigin = "Access-Control-Allow-Origin"
+ HeaderAccessControlExposeHeaders = "Access-Control-Expose-Headers"
+ HeaderAccessControlMaxAge = "Access-Control-Max-Age"
+ HeaderAccessControlRequestHeaders = "Access-Control-Request-Headers"
+ HeaderAccessControlRequestMethod = "Access-Control-Request-Method"
+ HeaderOrigin = "Origin"
+ HeaderTimingAllowOrigin = "Timing-Allow-Origin"
+ HeaderXPermittedCrossDomainPolicies = "X-Permitted-Cross-Domain-Policies"
+ HeaderDNT = "DNT"
+ HeaderTk = "Tk"
+ HeaderContentDisposition = "Content-Disposition"
+ HeaderContentEncoding = "Content-Encoding"
+ HeaderContentLanguage = "Content-Language"
+ HeaderContentLength = "Content-Length"
+ HeaderContentLocation = "Content-Location"
+ HeaderContentType = "Content-Type"
+ HeaderForwarded = "Forwarded"
+ HeaderVia = "Via"
+ HeaderXForwardedFor = "X-Forwarded-For"
+ HeaderXForwardedHost = "X-Forwarded-Host"
+ HeaderXForwardedProto = "X-Forwarded-Proto"
+ HeaderXForwardedProtocol = "X-Forwarded-Protocol"
+ HeaderXForwardedSsl = "X-Forwarded-Ssl"
+ HeaderXUrlScheme = "X-Url-Scheme"
+ HeaderLocation = "Location"
+ HeaderFrom = "From"
+ HeaderHost = "Host"
+ HeaderReferer = "Referer"
+ HeaderReferrerPolicy = "Referrer-Policy"
+ HeaderUserAgent = "User-Agent"
+ HeaderAllow = "Allow"
+ HeaderServer = "Server"
+ HeaderAcceptRanges = "Accept-Ranges"
+ HeaderContentRange = "Content-Range"
+ HeaderIfRange = "If-Range"
+ HeaderRange = "Range"
+ HeaderContentSecurityPolicy = "Content-Security-Policy"
+ HeaderContentSecurityPolicyReportOnly = "Content-Security-Policy-Report-Only"
+ HeaderCrossOriginResourcePolicy = "Cross-Origin-Resource-Policy"
+ HeaderExpectCT = "Expect-CT"
+ // Deprecated: use HeaderPermissionsPolicy instead
+ HeaderFeaturePolicy = "Feature-Policy"
+ HeaderPermissionsPolicy = "Permissions-Policy"
+ HeaderPublicKeyPins = "Public-Key-Pins"
+ HeaderPublicKeyPinsReportOnly = "Public-Key-Pins-Report-Only"
+ HeaderStrictTransportSecurity = "Strict-Transport-Security"
+ HeaderUpgradeInsecureRequests = "Upgrade-Insecure-Requests"
+ HeaderXContentTypeOptions = "X-Content-Type-Options"
+ HeaderXDownloadOptions = "X-Download-Options"
+ HeaderXFrameOptions = "X-Frame-Options"
+ HeaderXPoweredBy = "X-Powered-By"
+ HeaderXXSSProtection = "X-XSS-Protection"
+ HeaderLastEventID = "Last-Event-ID"
+ HeaderNEL = "NEL"
+ HeaderPingFrom = "Ping-From"
+ HeaderPingTo = "Ping-To"
+ HeaderReportTo = "Report-To"
+ HeaderTE = "TE"
+ HeaderTrailer = "Trailer"
+ HeaderTransferEncoding = "Transfer-Encoding"
+ HeaderSecWebSocketAccept = "Sec-WebSocket-Accept"
+ HeaderSecWebSocketExtensions = "Sec-WebSocket-Extensions"
+ HeaderSecWebSocketKey = "Sec-WebSocket-Key"
+ HeaderSecWebSocketProtocol = "Sec-WebSocket-Protocol"
+ HeaderSecWebSocketVersion = "Sec-WebSocket-Version"
+ HeaderAcceptPatch = "Accept-Patch"
+ HeaderAcceptPushPolicy = "Accept-Push-Policy"
+ HeaderAcceptSignature = "Accept-Signature"
+ HeaderAltSvc = "Alt-Svc"
+ HeaderDate = "Date"
+ HeaderIndex = "Index"
+ HeaderLargeAllocation = "Large-Allocation"
+ HeaderLink = "Link"
+ HeaderPushPolicy = "Push-Policy"
+ HeaderRetryAfter = "Retry-After"
+ HeaderServerTiming = "Server-Timing"
+ HeaderSignature = "Signature"
+ HeaderSignedHeaders = "Signed-Headers"
+ HeaderSourceMap = "SourceMap"
+ HeaderUpgrade = "Upgrade"
+ HeaderXDNSPrefetchControl = "X-DNS-Prefetch-Control"
+ HeaderXPingback = "X-Pingback"
+ HeaderXRequestID = "X-Request-ID"
+ HeaderXRequestedWith = "X-Requested-With"
+ HeaderXRobotsTag = "X-Robots-Tag"
+ HeaderXUACompatible = "X-UA-Compatible"
+)
+
+// Network types that are commonly used
+const (
+ NetworkTCP = "tcp"
+ NetworkTCP4 = "tcp4"
+ NetworkTCP6 = "tcp6"
+)
+
+// Compression types
+const (
+ StrGzip = "gzip"
+ StrBr = "br"
+ StrDeflate = "deflate"
+ StrBrotli = "brotli"
+)
+
+// Cookie SameSite
+// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis-03#section-4.1.2.7
+const (
+ CookieSameSiteDisabled = "disabled" // not in RFC, just control "SameSite" attribute will not be set.
+ CookieSameSiteLaxMode = "lax"
+ CookieSameSiteStrictMode = "strict"
+ CookieSameSiteNoneMode = "none"
+)
+
+// Route Constraints
+const (
+ ConstraintInt = "int"
+ ConstraintBool = "bool"
+ ConstraintFloat = "float"
+ ConstraintAlpha = "alpha"
+ ConstraintGuid = "guid" //nolint:revive,stylecheck // TODO: Rename to "ConstraintGUID" in v3
+ ConstraintMinLen = "minLen"
+ ConstraintMaxLen = "maxLen"
+ ConstraintLen = "len"
+ ConstraintBetweenLen = "betweenLen"
+ ConstraintMinLenLower = "minlen"
+ ConstraintMaxLenLower = "maxlen"
+ ConstraintBetweenLenLower = "betweenlen"
+ ConstraintMin = "min"
+ ConstraintMax = "max"
+ ConstraintRange = "range"
+ ConstraintDatetime = "datetime"
+ ConstraintRegex = "regex"
+)
+
+func IndexRune(str string, needle int32) bool {
+ for _, b := range str {
+ if b == needle {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/hooks.go b/vendor/github.com/gofiber/fiber/v2/hooks.go
new file mode 100644
index 0000000..6b0b860
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/hooks.go
@@ -0,0 +1,218 @@
+package fiber
+
+import (
+ "github.com/gofiber/fiber/v2/log"
+)
+
+// OnRouteHandler Handlers define a function to create hooks for Fiber.
+type (
+ OnRouteHandler = func(Route) error
+ OnNameHandler = OnRouteHandler
+ OnGroupHandler = func(Group) error
+ OnGroupNameHandler = OnGroupHandler
+ OnListenHandler = func(ListenData) error
+ OnShutdownHandler = func() error
+ OnForkHandler = func(int) error
+ OnMountHandler = func(*App) error
+)
+
+// Hooks is a struct to use it with App.
+type Hooks struct {
+ // Embed app
+ app *App
+
+ // Hooks
+ onRoute []OnRouteHandler
+ onName []OnNameHandler
+ onGroup []OnGroupHandler
+ onGroupName []OnGroupNameHandler
+ onListen []OnListenHandler
+ onShutdown []OnShutdownHandler
+ onFork []OnForkHandler
+ onMount []OnMountHandler
+}
+
+// ListenData is a struct to use it with OnListenHandler
+type ListenData struct {
+ Host string
+ Port string
+ TLS bool
+}
+
+func newHooks(app *App) *Hooks {
+ return &Hooks{
+ app: app,
+ onRoute: make([]OnRouteHandler, 0),
+ onGroup: make([]OnGroupHandler, 0),
+ onGroupName: make([]OnGroupNameHandler, 0),
+ onName: make([]OnNameHandler, 0),
+ onListen: make([]OnListenHandler, 0),
+ onShutdown: make([]OnShutdownHandler, 0),
+ onFork: make([]OnForkHandler, 0),
+ onMount: make([]OnMountHandler, 0),
+ }
+}
+
+// OnRoute is a hook to execute user functions on each route registeration.
+// Also you can get route properties by route parameter.
+func (h *Hooks) OnRoute(handler ...OnRouteHandler) {
+ h.app.mutex.Lock()
+ h.onRoute = append(h.onRoute, handler...)
+ h.app.mutex.Unlock()
+}
+
+// OnName is a hook to execute user functions on each route naming.
+// Also you can get route properties by route parameter.
+//
+// WARN: OnName only works with naming routes, not groups.
+func (h *Hooks) OnName(handler ...OnNameHandler) {
+ h.app.mutex.Lock()
+ h.onName = append(h.onName, handler...)
+ h.app.mutex.Unlock()
+}
+
+// OnGroup is a hook to execute user functions on each group registeration.
+// Also you can get group properties by group parameter.
+func (h *Hooks) OnGroup(handler ...OnGroupHandler) {
+ h.app.mutex.Lock()
+ h.onGroup = append(h.onGroup, handler...)
+ h.app.mutex.Unlock()
+}
+
+// OnGroupName is a hook to execute user functions on each group naming.
+// Also you can get group properties by group parameter.
+//
+// WARN: OnGroupName only works with naming groups, not routes.
+func (h *Hooks) OnGroupName(handler ...OnGroupNameHandler) {
+ h.app.mutex.Lock()
+ h.onGroupName = append(h.onGroupName, handler...)
+ h.app.mutex.Unlock()
+}
+
+// OnListen is a hook to execute user functions on Listen, ListenTLS, Listener.
+func (h *Hooks) OnListen(handler ...OnListenHandler) {
+ h.app.mutex.Lock()
+ h.onListen = append(h.onListen, handler...)
+ h.app.mutex.Unlock()
+}
+
+// OnShutdown is a hook to execute user functions after Shutdown.
+func (h *Hooks) OnShutdown(handler ...OnShutdownHandler) {
+ h.app.mutex.Lock()
+ h.onShutdown = append(h.onShutdown, handler...)
+ h.app.mutex.Unlock()
+}
+
+// OnFork is a hook to execute user function after fork process.
+func (h *Hooks) OnFork(handler ...OnForkHandler) {
+ h.app.mutex.Lock()
+ h.onFork = append(h.onFork, handler...)
+ h.app.mutex.Unlock()
+}
+
+// OnMount is a hook to execute user function after mounting process.
+// The mount event is fired when sub-app is mounted on a parent app. The parent app is passed as a parameter.
+// It works for app and group mounting.
+func (h *Hooks) OnMount(handler ...OnMountHandler) {
+ h.app.mutex.Lock()
+ h.onMount = append(h.onMount, handler...)
+ h.app.mutex.Unlock()
+}
+
+func (h *Hooks) executeOnRouteHooks(route Route) error {
+ // Check mounting
+ if h.app.mountFields.mountPath != "" {
+ route.path = h.app.mountFields.mountPath + route.path
+ route.Path = route.path
+ }
+
+ for _, v := range h.onRoute {
+ if err := v(route); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (h *Hooks) executeOnNameHooks(route Route) error {
+ // Check mounting
+ if h.app.mountFields.mountPath != "" {
+ route.path = h.app.mountFields.mountPath + route.path
+ route.Path = route.path
+ }
+
+ for _, v := range h.onName {
+ if err := v(route); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (h *Hooks) executeOnGroupHooks(group Group) error {
+ // Check mounting
+ if h.app.mountFields.mountPath != "" {
+ group.Prefix = h.app.mountFields.mountPath + group.Prefix
+ }
+
+ for _, v := range h.onGroup {
+ if err := v(group); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (h *Hooks) executeOnGroupNameHooks(group Group) error {
+ // Check mounting
+ if h.app.mountFields.mountPath != "" {
+ group.Prefix = h.app.mountFields.mountPath + group.Prefix
+ }
+
+ for _, v := range h.onGroupName {
+ if err := v(group); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (h *Hooks) executeOnListenHooks(listenData ListenData) error {
+ for _, v := range h.onListen {
+ if err := v(listenData); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (h *Hooks) executeOnShutdownHooks() {
+ for _, v := range h.onShutdown {
+ if err := v(); err != nil {
+ log.Errorf("failed to call shutdown hook: %v", err)
+ }
+ }
+}
+
+func (h *Hooks) executeOnForkHooks(pid int) {
+ for _, v := range h.onFork {
+ if err := v(pid); err != nil {
+ log.Errorf("failed to call fork hook: %v", err)
+ }
+ }
+}
+
+func (h *Hooks) executeOnMountHooks(app *App) error {
+ for _, v := range h.onMount {
+ if err := v(app); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/internal/schema/LICENSE b/vendor/github.com/gofiber/fiber/v2/internal/schema/LICENSE
new file mode 100644
index 0000000..0e5fb87
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/internal/schema/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2012 Rodrigo Moraes. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/gofiber/fiber/v2/internal/schema/cache.go b/vendor/github.com/gofiber/fiber/v2/internal/schema/cache.go
new file mode 100644
index 0000000..bf21697
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/internal/schema/cache.go
@@ -0,0 +1,305 @@
+// Copyright 2012 The Gorilla Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package schema
+
+import (
+ "errors"
+ "reflect"
+ "strconv"
+ "strings"
+ "sync"
+)
+
+var errInvalidPath = errors.New("schema: invalid path")
+
+// newCache returns a new cache.
+func newCache() *cache {
+ c := cache{
+ m: make(map[reflect.Type]*structInfo),
+ regconv: make(map[reflect.Type]Converter),
+ tag: "schema",
+ }
+ return &c
+}
+
+// cache caches meta-data about a struct.
+type cache struct {
+ l sync.RWMutex
+ m map[reflect.Type]*structInfo
+ regconv map[reflect.Type]Converter
+ tag string
+}
+
+// registerConverter registers a converter function for a custom type.
+func (c *cache) registerConverter(value interface{}, converterFunc Converter) {
+ c.regconv[reflect.TypeOf(value)] = converterFunc
+}
+
+// parsePath parses a path in dotted notation verifying that it is a valid
+// path to a struct field.
+//
+// It returns "path parts" which contain indices to fields to be used by
+// reflect.Value.FieldByString(). Multiple parts are required for slices of
+// structs.
+func (c *cache) parsePath(p string, t reflect.Type) ([]pathPart, error) {
+ var struc *structInfo
+ var field *fieldInfo
+ var index64 int64
+ var err error
+ parts := make([]pathPart, 0)
+ path := make([]string, 0)
+ keys := strings.Split(p, ".")
+ for i := 0; i < len(keys); i++ {
+ if t.Kind() != reflect.Struct {
+ return nil, errInvalidPath
+ }
+ if struc = c.get(t); struc == nil {
+ return nil, errInvalidPath
+ }
+ if field = struc.get(keys[i]); field == nil {
+ return nil, errInvalidPath
+ }
+ // Valid field. Append index.
+ path = append(path, field.name)
+ if field.isSliceOfStructs && (!field.unmarshalerInfo.IsValid || (field.unmarshalerInfo.IsValid && field.unmarshalerInfo.IsSliceElement)) {
+ // Parse a special case: slices of structs.
+ // i+1 must be the slice index.
+ //
+ // Now that struct can implements TextUnmarshaler interface,
+ // we don't need to force the struct's fields to appear in the path.
+ // So checking i+2 is not necessary anymore.
+ i++
+ if i+1 > len(keys) {
+ return nil, errInvalidPath
+ }
+ if index64, err = strconv.ParseInt(keys[i], 10, 0); err != nil {
+ return nil, errInvalidPath
+ }
+ parts = append(parts, pathPart{
+ path: path,
+ field: field,
+ index: int(index64),
+ })
+ path = make([]string, 0)
+
+ // Get the next struct type, dropping ptrs.
+ if field.typ.Kind() == reflect.Ptr {
+ t = field.typ.Elem()
+ } else {
+ t = field.typ
+ }
+ if t.Kind() == reflect.Slice {
+ t = t.Elem()
+ if t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ }
+ }
+ } else if field.typ.Kind() == reflect.Ptr {
+ t = field.typ.Elem()
+ } else {
+ t = field.typ
+ }
+ }
+ // Add the remaining.
+ parts = append(parts, pathPart{
+ path: path,
+ field: field,
+ index: -1,
+ })
+ return parts, nil
+}
+
+// get returns a cached structInfo, creating it if necessary.
+func (c *cache) get(t reflect.Type) *structInfo {
+ c.l.RLock()
+ info := c.m[t]
+ c.l.RUnlock()
+ if info == nil {
+ info = c.create(t, "")
+ c.l.Lock()
+ c.m[t] = info
+ c.l.Unlock()
+ }
+ return info
+}
+
+// create creates a structInfo with meta-data about a struct.
+func (c *cache) create(t reflect.Type, parentAlias string) *structInfo {
+ info := &structInfo{}
+ var anonymousInfos []*structInfo
+ for i := 0; i < t.NumField(); i++ {
+ if f := c.createField(t.Field(i), parentAlias); f != nil {
+ info.fields = append(info.fields, f)
+ if ft := indirectType(f.typ); ft.Kind() == reflect.Struct && f.isAnonymous {
+ anonymousInfos = append(anonymousInfos, c.create(ft, f.canonicalAlias))
+ }
+ }
+ }
+ for i, a := range anonymousInfos {
+ others := []*structInfo{info}
+ others = append(others, anonymousInfos[:i]...)
+ others = append(others, anonymousInfos[i+1:]...)
+ for _, f := range a.fields {
+ if !containsAlias(others, f.alias) {
+ info.fields = append(info.fields, f)
+ }
+ }
+ }
+ return info
+}
+
+// createField creates a fieldInfo for the given field.
+func (c *cache) createField(field reflect.StructField, parentAlias string) *fieldInfo {
+ alias, options := fieldAlias(field, c.tag)
+ if alias == "-" {
+ // Ignore this field.
+ return nil
+ }
+ canonicalAlias := alias
+ if parentAlias != "" {
+ canonicalAlias = parentAlias + "." + alias
+ }
+ // Check if the type is supported and don't cache it if not.
+ // First let's get the basic type.
+ isSlice, isStruct := false, false
+ ft := field.Type
+ m := isTextUnmarshaler(reflect.Zero(ft))
+ if ft.Kind() == reflect.Ptr {
+ ft = ft.Elem()
+ }
+ if isSlice = ft.Kind() == reflect.Slice; isSlice {
+ ft = ft.Elem()
+ if ft.Kind() == reflect.Ptr {
+ ft = ft.Elem()
+ }
+ }
+ if ft.Kind() == reflect.Array {
+ ft = ft.Elem()
+ if ft.Kind() == reflect.Ptr {
+ ft = ft.Elem()
+ }
+ }
+ if isStruct = ft.Kind() == reflect.Struct; !isStruct {
+ if c.converter(ft) == nil && builtinConverters[ft.Kind()] == nil {
+ // Type is not supported.
+ return nil
+ }
+ }
+
+ return &fieldInfo{
+ typ: field.Type,
+ name: field.Name,
+ alias: alias,
+ canonicalAlias: canonicalAlias,
+ unmarshalerInfo: m,
+ isSliceOfStructs: isSlice && isStruct,
+ isAnonymous: field.Anonymous,
+ isRequired: options.Contains("required"),
+ }
+}
+
+// converter returns the converter for a type.
+func (c *cache) converter(t reflect.Type) Converter {
+ return c.regconv[t]
+}
+
+// ----------------------------------------------------------------------------
+
+type structInfo struct {
+ fields []*fieldInfo
+}
+
+func (i *structInfo) get(alias string) *fieldInfo {
+ for _, field := range i.fields {
+ if strings.EqualFold(field.alias, alias) {
+ return field
+ }
+ }
+ return nil
+}
+
+func containsAlias(infos []*structInfo, alias string) bool {
+ for _, info := range infos {
+ if info.get(alias) != nil {
+ return true
+ }
+ }
+ return false
+}
+
+type fieldInfo struct {
+ typ reflect.Type
+ // name is the field name in the struct.
+ name string
+ alias string
+ // canonicalAlias is almost the same as the alias, but is prefixed with
+ // an embedded struct field alias in dotted notation if this field is
+ // promoted from the struct.
+ // For instance, if the alias is "N" and this field is an embedded field
+ // in a struct "X", canonicalAlias will be "X.N".
+ canonicalAlias string
+ // unmarshalerInfo contains information regarding the
+ // encoding.TextUnmarshaler implementation of the field type.
+ unmarshalerInfo unmarshaler
+ // isSliceOfStructs indicates if the field type is a slice of structs.
+ isSliceOfStructs bool
+ // isAnonymous indicates whether the field is embedded in the struct.
+ isAnonymous bool
+ isRequired bool
+}
+
+func (f *fieldInfo) paths(prefix string) []string {
+ if f.alias == f.canonicalAlias {
+ return []string{prefix + f.alias}
+ }
+ return []string{prefix + f.alias, prefix + f.canonicalAlias}
+}
+
+type pathPart struct {
+ field *fieldInfo
+ path []string // path to the field: walks structs using field names.
+ index int // struct index in slices of structs.
+}
+
+// ----------------------------------------------------------------------------
+
+func indirectType(typ reflect.Type) reflect.Type {
+ if typ.Kind() == reflect.Ptr {
+ return typ.Elem()
+ }
+ return typ
+}
+
+// fieldAlias parses a field tag to get a field alias.
+func fieldAlias(field reflect.StructField, tagName string) (alias string, options tagOptions) {
+ if tag := field.Tag.Get(tagName); tag != "" {
+ alias, options = parseTag(tag)
+ }
+ if alias == "" {
+ alias = field.Name
+ }
+ return alias, options
+}
+
+// tagOptions is the string following a comma in a struct field's tag, or
+// the empty string. It does not include the leading comma.
+type tagOptions []string
+
+// parseTag splits a struct field's url tag into its name and comma-separated
+// options.
+func parseTag(tag string) (string, tagOptions) {
+ s := strings.Split(tag, ",")
+ return s[0], s[1:]
+}
+
+// Contains checks whether the tagOptions contains the specified option.
+func (o tagOptions) Contains(option string) bool {
+ for _, s := range o {
+ if s == option {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/internal/schema/converter.go b/vendor/github.com/gofiber/fiber/v2/internal/schema/converter.go
new file mode 100644
index 0000000..4f2116a
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/internal/schema/converter.go
@@ -0,0 +1,145 @@
+// Copyright 2012 The Gorilla Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package schema
+
+import (
+ "reflect"
+ "strconv"
+)
+
+type Converter func(string) reflect.Value
+
+var (
+ invalidValue = reflect.Value{}
+ boolType = reflect.Bool
+ float32Type = reflect.Float32
+ float64Type = reflect.Float64
+ intType = reflect.Int
+ int8Type = reflect.Int8
+ int16Type = reflect.Int16
+ int32Type = reflect.Int32
+ int64Type = reflect.Int64
+ stringType = reflect.String
+ uintType = reflect.Uint
+ uint8Type = reflect.Uint8
+ uint16Type = reflect.Uint16
+ uint32Type = reflect.Uint32
+ uint64Type = reflect.Uint64
+)
+
+// Default converters for basic types.
+var builtinConverters = map[reflect.Kind]Converter{
+ boolType: convertBool,
+ float32Type: convertFloat32,
+ float64Type: convertFloat64,
+ intType: convertInt,
+ int8Type: convertInt8,
+ int16Type: convertInt16,
+ int32Type: convertInt32,
+ int64Type: convertInt64,
+ stringType: convertString,
+ uintType: convertUint,
+ uint8Type: convertUint8,
+ uint16Type: convertUint16,
+ uint32Type: convertUint32,
+ uint64Type: convertUint64,
+}
+
+func convertBool(value string) reflect.Value {
+ if value == "on" {
+ return reflect.ValueOf(true)
+ } else if v, err := strconv.ParseBool(value); err == nil {
+ return reflect.ValueOf(v)
+ }
+ return invalidValue
+}
+
+func convertFloat32(value string) reflect.Value {
+ if v, err := strconv.ParseFloat(value, 32); err == nil {
+ return reflect.ValueOf(float32(v))
+ }
+ return invalidValue
+}
+
+func convertFloat64(value string) reflect.Value {
+ if v, err := strconv.ParseFloat(value, 64); err == nil {
+ return reflect.ValueOf(v)
+ }
+ return invalidValue
+}
+
+func convertInt(value string) reflect.Value {
+ if v, err := strconv.ParseInt(value, 10, 0); err == nil {
+ return reflect.ValueOf(int(v))
+ }
+ return invalidValue
+}
+
+func convertInt8(value string) reflect.Value {
+ if v, err := strconv.ParseInt(value, 10, 8); err == nil {
+ return reflect.ValueOf(int8(v))
+ }
+ return invalidValue
+}
+
+func convertInt16(value string) reflect.Value {
+ if v, err := strconv.ParseInt(value, 10, 16); err == nil {
+ return reflect.ValueOf(int16(v))
+ }
+ return invalidValue
+}
+
+func convertInt32(value string) reflect.Value {
+ if v, err := strconv.ParseInt(value, 10, 32); err == nil {
+ return reflect.ValueOf(int32(v))
+ }
+ return invalidValue
+}
+
+func convertInt64(value string) reflect.Value {
+ if v, err := strconv.ParseInt(value, 10, 64); err == nil {
+ return reflect.ValueOf(v)
+ }
+ return invalidValue
+}
+
+func convertString(value string) reflect.Value {
+ return reflect.ValueOf(value)
+}
+
+func convertUint(value string) reflect.Value {
+ if v, err := strconv.ParseUint(value, 10, 0); err == nil {
+ return reflect.ValueOf(uint(v))
+ }
+ return invalidValue
+}
+
+func convertUint8(value string) reflect.Value {
+ if v, err := strconv.ParseUint(value, 10, 8); err == nil {
+ return reflect.ValueOf(uint8(v))
+ }
+ return invalidValue
+}
+
+func convertUint16(value string) reflect.Value {
+ if v, err := strconv.ParseUint(value, 10, 16); err == nil {
+ return reflect.ValueOf(uint16(v))
+ }
+ return invalidValue
+}
+
+func convertUint32(value string) reflect.Value {
+ if v, err := strconv.ParseUint(value, 10, 32); err == nil {
+ return reflect.ValueOf(uint32(v))
+ }
+ return invalidValue
+}
+
+func convertUint64(value string) reflect.Value {
+ if v, err := strconv.ParseUint(value, 10, 64); err == nil {
+ return reflect.ValueOf(v)
+ }
+ return invalidValue
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/internal/schema/decoder.go b/vendor/github.com/gofiber/fiber/v2/internal/schema/decoder.go
new file mode 100644
index 0000000..b63c45e
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/internal/schema/decoder.go
@@ -0,0 +1,534 @@
+// Copyright 2012 The Gorilla Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package schema
+
+import (
+ "encoding"
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// NewDecoder returns a new Decoder.
+func NewDecoder() *Decoder {
+ return &Decoder{cache: newCache()}
+}
+
+// Decoder decodes values from a map[string][]string to a struct.
+type Decoder struct {
+ cache *cache
+ zeroEmpty bool
+ ignoreUnknownKeys bool
+}
+
+// SetAliasTag changes the tag used to locate custom field aliases.
+// The default tag is "schema".
+func (d *Decoder) SetAliasTag(tag string) {
+ d.cache.tag = tag
+}
+
+// ZeroEmpty controls the behaviour when the decoder encounters empty values
+// in a map.
+// If z is true and a key in the map has the empty string as a value
+// then the corresponding struct field is set to the zero value.
+// If z is false then empty strings are ignored.
+//
+// The default value is false, that is empty values do not change
+// the value of the struct field.
+func (d *Decoder) ZeroEmpty(z bool) {
+ d.zeroEmpty = z
+}
+
+// IgnoreUnknownKeys controls the behaviour when the decoder encounters unknown
+// keys in the map.
+// If i is true and an unknown field is encountered, it is ignored. This is
+// similar to how unknown keys are handled by encoding/json.
+// If i is false then Decode will return an error. Note that any valid keys
+// will still be decoded in to the target struct.
+//
+// To preserve backwards compatibility, the default value is false.
+func (d *Decoder) IgnoreUnknownKeys(i bool) {
+ d.ignoreUnknownKeys = i
+}
+
+// RegisterConverter registers a converter function for a custom type.
+func (d *Decoder) RegisterConverter(value interface{}, converterFunc Converter) {
+ d.cache.registerConverter(value, converterFunc)
+}
+
+// Decode decodes a map[string][]string to a struct.
+//
+// The first parameter must be a pointer to a struct.
+//
+// The second parameter is a map, typically url.Values from an HTTP request.
+// Keys are "paths" in dotted notation to the struct fields and nested structs.
+//
+// See the package documentation for a full explanation of the mechanics.
+func (d *Decoder) Decode(dst interface{}, src map[string][]string) error {
+ v := reflect.ValueOf(dst)
+ if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct {
+ return errors.New("schema: interface must be a pointer to struct")
+ }
+ v = v.Elem()
+ t := v.Type()
+ multiError := MultiError{}
+ for path, values := range src {
+ if parts, err := d.cache.parsePath(path, t); err == nil {
+ if err = d.decode(v, path, parts, values); err != nil {
+ multiError[path] = err
+ }
+ } else if !d.ignoreUnknownKeys {
+ multiError[path] = UnknownKeyError{Key: path}
+ }
+ }
+ multiError.merge(d.checkRequired(t, src))
+ if len(multiError) > 0 {
+ return multiError
+ }
+ return nil
+}
+
+// checkRequired checks whether required fields are empty
+//
+// check type t recursively if t has struct fields.
+//
+// src is the source map for decoding, we use it here to see if those required fields are included in src
+func (d *Decoder) checkRequired(t reflect.Type, src map[string][]string) MultiError {
+ m, errs := d.findRequiredFields(t, "", "")
+ for key, fields := range m {
+ if isEmptyFields(fields, src) {
+ errs[key] = EmptyFieldError{Key: key}
+ }
+ }
+ return errs
+}
+
+// findRequiredFields recursively searches the struct type t for required fields.
+//
+// canonicalPrefix and searchPrefix are used to resolve full paths in dotted notation
+// for nested struct fields. canonicalPrefix is a complete path which never omits
+// any embedded struct fields. searchPrefix is a user-friendly path which may omit
+// some embedded struct fields to point promoted fields.
+func (d *Decoder) findRequiredFields(t reflect.Type, canonicalPrefix, searchPrefix string) (map[string][]fieldWithPrefix, MultiError) {
+ struc := d.cache.get(t)
+ if struc == nil {
+ // unexpect, cache.get never return nil
+ return nil, MultiError{canonicalPrefix + "*": errors.New("cache fail")}
+ }
+
+ m := map[string][]fieldWithPrefix{}
+ errs := MultiError{}
+ for _, f := range struc.fields {
+ if f.typ.Kind() == reflect.Struct {
+ fcprefix := canonicalPrefix + f.canonicalAlias + "."
+ for _, fspath := range f.paths(searchPrefix) {
+ fm, ferrs := d.findRequiredFields(f.typ, fcprefix, fspath+".")
+ for key, fields := range fm {
+ m[key] = append(m[key], fields...)
+ }
+ errs.merge(ferrs)
+ }
+ }
+ if f.isRequired {
+ key := canonicalPrefix + f.canonicalAlias
+ m[key] = append(m[key], fieldWithPrefix{
+ fieldInfo: f,
+ prefix: searchPrefix,
+ })
+ }
+ }
+ return m, errs
+}
+
+type fieldWithPrefix struct {
+ *fieldInfo
+ prefix string
+}
+
+// isEmptyFields returns true if all of specified fields are empty.
+func isEmptyFields(fields []fieldWithPrefix, src map[string][]string) bool {
+ for _, f := range fields {
+ for _, path := range f.paths(f.prefix) {
+ v, ok := src[path]
+ if ok && !isEmpty(f.typ, v) {
+ return false
+ }
+ for key := range src {
+ // issue references:
+ // https://github.com/gofiber/fiber/issues/1414
+ // https://github.com/gorilla/schema/issues/176
+ nested := strings.IndexByte(key, '.') != -1
+
+ // for non required nested structs
+ c1 := strings.HasSuffix(f.prefix, ".") && key == path
+
+ // for required nested structs
+ c2 := f.prefix == "" && nested && strings.HasPrefix(key, path)
+
+ // for non nested fields
+ c3 := f.prefix == "" && !nested && key == path
+ if !isEmpty(f.typ, src[key]) && (c1 || c2 || c3) {
+ return false
+ }
+ }
+ }
+ }
+ return true
+}
+
+// isEmpty returns true if value is empty for specific type
+func isEmpty(t reflect.Type, value []string) bool {
+ if len(value) == 0 {
+ return true
+ }
+ switch t.Kind() {
+ case boolType, float32Type, float64Type, intType, int8Type, int32Type, int64Type, stringType, uint8Type, uint16Type, uint32Type, uint64Type:
+ return len(value[0]) == 0
+ }
+ return false
+}
+
+// decode fills a struct field using a parsed path.
+func (d *Decoder) decode(v reflect.Value, path string, parts []pathPart, values []string) error {
+ // Get the field walking the struct fields by index.
+ for _, name := range parts[0].path {
+ if v.Type().Kind() == reflect.Ptr {
+ if v.IsNil() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ v = v.Elem()
+ }
+
+ // alloc embedded structs
+ if v.Type().Kind() == reflect.Struct {
+ for i := 0; i < v.NumField(); i++ {
+ field := v.Field(i)
+ if field.Type().Kind() == reflect.Ptr && field.IsNil() && v.Type().Field(i).Anonymous {
+ field.Set(reflect.New(field.Type().Elem()))
+ }
+ }
+ }
+
+ v = v.FieldByName(name)
+ }
+ // Don't even bother for unexported fields.
+ if !v.CanSet() {
+ return nil
+ }
+
+ // Dereference if needed.
+ t := v.Type()
+ if t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ if v.IsNil() {
+ v.Set(reflect.New(t))
+ }
+ v = v.Elem()
+ }
+
+ // Slice of structs. Let's go recursive.
+ if len(parts) > 1 {
+ idx := parts[0].index
+ if v.IsNil() || v.Len() < idx+1 {
+ value := reflect.MakeSlice(t, idx+1, idx+1)
+ if v.Len() < idx+1 {
+ // Resize it.
+ reflect.Copy(value, v)
+ }
+ v.Set(value)
+ }
+ return d.decode(v.Index(idx), path, parts[1:], values)
+ }
+
+ // Get the converter early in case there is one for a slice type.
+ conv := d.cache.converter(t)
+ m := isTextUnmarshaler(v)
+ if conv == nil && t.Kind() == reflect.Slice && m.IsSliceElement {
+ var items []reflect.Value
+ elemT := t.Elem()
+ isPtrElem := elemT.Kind() == reflect.Ptr
+ if isPtrElem {
+ elemT = elemT.Elem()
+ }
+
+ // Try to get a converter for the element type.
+ conv := d.cache.converter(elemT)
+ if conv == nil {
+ conv = builtinConverters[elemT.Kind()]
+ if conv == nil {
+ // As we are not dealing with slice of structs here, we don't need to check if the type
+ // implements TextUnmarshaler interface
+ return fmt.Errorf("schema: converter not found for %v", elemT)
+ }
+ }
+
+ for key, value := range values {
+ if value == "" {
+ if d.zeroEmpty {
+ items = append(items, reflect.Zero(elemT))
+ }
+ } else if m.IsValid {
+ u := reflect.New(elemT)
+ if m.IsSliceElementPtr {
+ u = reflect.New(reflect.PtrTo(elemT).Elem())
+ }
+ if err := u.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(value)); err != nil {
+ return ConversionError{
+ Key: path,
+ Type: t,
+ Index: key,
+ Err: err,
+ }
+ }
+ if m.IsSliceElementPtr {
+ items = append(items, u.Elem().Addr())
+ } else if u.Kind() == reflect.Ptr {
+ items = append(items, u.Elem())
+ } else {
+ items = append(items, u)
+ }
+ } else if item := conv(value); item.IsValid() {
+ if isPtrElem {
+ ptr := reflect.New(elemT)
+ ptr.Elem().Set(item)
+ item = ptr
+ }
+ if item.Type() != elemT && !isPtrElem {
+ item = item.Convert(elemT)
+ }
+ items = append(items, item)
+ } else {
+ if strings.Contains(value, ",") {
+ values := strings.Split(value, ",")
+ for _, value := range values {
+ if value == "" {
+ if d.zeroEmpty {
+ items = append(items, reflect.Zero(elemT))
+ }
+ } else if item := conv(value); item.IsValid() {
+ if isPtrElem {
+ ptr := reflect.New(elemT)
+ ptr.Elem().Set(item)
+ item = ptr
+ }
+ if item.Type() != elemT && !isPtrElem {
+ item = item.Convert(elemT)
+ }
+ items = append(items, item)
+ } else {
+ return ConversionError{
+ Key: path,
+ Type: elemT,
+ Index: key,
+ }
+ }
+ }
+ } else {
+ return ConversionError{
+ Key: path,
+ Type: elemT,
+ Index: key,
+ }
+ }
+ }
+ }
+ value := reflect.Append(reflect.MakeSlice(t, 0, 0), items...)
+ v.Set(value)
+ } else {
+ val := ""
+ // Use the last value provided if any values were provided
+ if len(values) > 0 {
+ val = values[len(values)-1]
+ }
+
+ if conv != nil {
+ if value := conv(val); value.IsValid() {
+ v.Set(value.Convert(t))
+ } else {
+ return ConversionError{
+ Key: path,
+ Type: t,
+ Index: -1,
+ }
+ }
+ } else if m.IsValid {
+ if m.IsPtr {
+ u := reflect.New(v.Type())
+ if err := u.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(val)); err != nil {
+ return ConversionError{
+ Key: path,
+ Type: t,
+ Index: -1,
+ Err: err,
+ }
+ }
+ v.Set(reflect.Indirect(u))
+ } else {
+ // If the value implements the encoding.TextUnmarshaler interface
+ // apply UnmarshalText as the converter
+ if err := m.Unmarshaler.UnmarshalText([]byte(val)); err != nil {
+ return ConversionError{
+ Key: path,
+ Type: t,
+ Index: -1,
+ Err: err,
+ }
+ }
+ }
+ } else if val == "" {
+ if d.zeroEmpty {
+ v.Set(reflect.Zero(t))
+ }
+ } else if conv := builtinConverters[t.Kind()]; conv != nil {
+ if value := conv(val); value.IsValid() {
+ v.Set(value.Convert(t))
+ } else {
+ return ConversionError{
+ Key: path,
+ Type: t,
+ Index: -1,
+ }
+ }
+ } else {
+ return fmt.Errorf("schema: converter not found for %v", t)
+ }
+ }
+ return nil
+}
+
+func isTextUnmarshaler(v reflect.Value) unmarshaler {
+ // Create a new unmarshaller instance
+ m := unmarshaler{}
+ if m.Unmarshaler, m.IsValid = v.Interface().(encoding.TextUnmarshaler); m.IsValid {
+ return m
+ }
+ // As the UnmarshalText function should be applied to the pointer of the
+ // type, we check that type to see if it implements the necessary
+ // method.
+ if m.Unmarshaler, m.IsValid = reflect.New(v.Type()).Interface().(encoding.TextUnmarshaler); m.IsValid {
+ m.IsPtr = true
+ return m
+ }
+
+ // if v is []T or *[]T create new T
+ t := v.Type()
+ if t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ }
+ if t.Kind() == reflect.Slice {
+ // Check if the slice implements encoding.TextUnmarshaller
+ if m.Unmarshaler, m.IsValid = v.Interface().(encoding.TextUnmarshaler); m.IsValid {
+ return m
+ }
+ // If t is a pointer slice, check if its elements implement
+ // encoding.TextUnmarshaler
+ m.IsSliceElement = true
+ if t = t.Elem(); t.Kind() == reflect.Ptr {
+ t = reflect.PtrTo(t.Elem())
+ v = reflect.Zero(t)
+ m.IsSliceElementPtr = true
+ m.Unmarshaler, m.IsValid = v.Interface().(encoding.TextUnmarshaler)
+ return m
+ }
+ }
+
+ v = reflect.New(t)
+ m.Unmarshaler, m.IsValid = v.Interface().(encoding.TextUnmarshaler)
+ return m
+}
+
+// TextUnmarshaler helpers ----------------------------------------------------
+// unmarshaller contains information about a TextUnmarshaler type
+type unmarshaler struct {
+ Unmarshaler encoding.TextUnmarshaler
+ // IsValid indicates whether the resolved type indicated by the other
+ // flags implements the encoding.TextUnmarshaler interface.
+ IsValid bool
+ // IsPtr indicates that the resolved type is the pointer of the original
+ // type.
+ IsPtr bool
+ // IsSliceElement indicates that the resolved type is a slice element of
+ // the original type.
+ IsSliceElement bool
+ // IsSliceElementPtr indicates that the resolved type is a pointer to a
+ // slice element of the original type.
+ IsSliceElementPtr bool
+}
+
+// Errors ---------------------------------------------------------------------
+
+// ConversionError stores information about a failed conversion.
+type ConversionError struct {
+ Key string // key from the source map.
+ Type reflect.Type // expected type of elem
+ Index int // index for multi-value fields; -1 for single-value fields.
+ Err error // low-level error (when it exists)
+}
+
+func (e ConversionError) Error() string {
+ var output string
+
+ if e.Index < 0 {
+ output = fmt.Sprintf("schema: error converting value for %q", e.Key)
+ } else {
+ output = fmt.Sprintf("schema: error converting value for index %d of %q",
+ e.Index, e.Key)
+ }
+
+ if e.Err != nil {
+ output = fmt.Sprintf("%s. Details: %s", output, e.Err)
+ }
+
+ return output
+}
+
+// UnknownKeyError stores information about an unknown key in the source map.
+type UnknownKeyError struct {
+ Key string // key from the source map.
+}
+
+func (e UnknownKeyError) Error() string {
+ return fmt.Sprintf("schema: invalid path %q", e.Key)
+}
+
+// EmptyFieldError stores information about an empty required field.
+type EmptyFieldError struct {
+ Key string // required key in the source map.
+}
+
+func (e EmptyFieldError) Error() string {
+ return fmt.Sprintf("%v is empty", e.Key)
+}
+
+// MultiError stores multiple decoding errors.
+//
+// Borrowed from the App Engine SDK.
+type MultiError map[string]error
+
+func (e MultiError) Error() string {
+ s := ""
+ for _, err := range e {
+ s = err.Error()
+ break
+ }
+ switch len(e) {
+ case 0:
+ return "(0 errors)"
+ case 1:
+ return s
+ case 2:
+ return s + " (and 1 other error)"
+ }
+ return fmt.Sprintf("%s (and %d other errors)", s, len(e)-1)
+}
+
+func (e MultiError) merge(errors MultiError) {
+ for key, err := range errors {
+ if e[key] == nil {
+ e[key] = err
+ }
+ }
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/internal/schema/doc.go b/vendor/github.com/gofiber/fiber/v2/internal/schema/doc.go
new file mode 100644
index 0000000..fff0fe7
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/internal/schema/doc.go
@@ -0,0 +1,148 @@
+// Copyright 2012 The Gorilla Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package gorilla/schema fills a struct with form values.
+
+The basic usage is really simple. Given this struct:
+
+ type Person struct {
+ Name string
+ Phone string
+ }
+
+...we can fill it passing a map to the Decode() function:
+
+ values := map[string][]string{
+ "Name": {"John"},
+ "Phone": {"999-999-999"},
+ }
+ person := new(Person)
+ decoder := schema.NewDecoder()
+ decoder.Decode(person, values)
+
+This is just a simple example and it doesn't make a lot of sense to create
+the map manually. Typically it will come from a http.Request object and
+will be of type url.Values, http.Request.Form, or http.Request.MultipartForm:
+
+ func MyHandler(w http.ResponseWriter, r *http.Request) {
+ err := r.ParseForm()
+
+ if err != nil {
+ // Handle error
+ }
+
+ decoder := schema.NewDecoder()
+ // r.PostForm is a map of our POST form values
+ err := decoder.Decode(person, r.PostForm)
+
+ if err != nil {
+ // Handle error
+ }
+
+ // Do something with person.Name or person.Phone
+ }
+
+Note: it is a good idea to set a Decoder instance as a package global,
+because it caches meta-data about structs, and an instance can be shared safely:
+
+ var decoder = schema.NewDecoder()
+
+To define custom names for fields, use a struct tag "schema". To not populate
+certain fields, use a dash for the name and it will be ignored:
+
+ type Person struct {
+ Name string `schema:"name"` // custom name
+ Phone string `schema:"phone"` // custom name
+ Admin bool `schema:"-"` // this field is never set
+ }
+
+The supported field types in the destination struct are:
+
+ - bool
+ - float variants (float32, float64)
+ - int variants (int, int8, int16, int32, int64)
+ - string
+ - uint variants (uint, uint8, uint16, uint32, uint64)
+ - struct
+ - a pointer to one of the above types
+ - a slice or a pointer to a slice of one of the above types
+
+Non-supported types are simply ignored, however custom types can be registered
+to be converted.
+
+To fill nested structs, keys must use a dotted notation as the "path" for the
+field. So for example, to fill the struct Person below:
+
+ type Phone struct {
+ Label string
+ Number string
+ }
+
+ type Person struct {
+ Name string
+ Phone Phone
+ }
+
+...the source map must have the keys "Name", "Phone.Label" and "Phone.Number".
+This means that an HTML form to fill a Person struct must look like this:
+
+
+
+Single values are filled using the first value for a key from the source map.
+Slices are filled using all values for a key from the source map. So to fill
+a Person with multiple Phone values, like:
+
+ type Person struct {
+ Name string
+ Phones []Phone
+ }
+
+...an HTML form that accepts three Phone values would look like this:
+
+
+
+Notice that only for slices of structs the slice index is required.
+This is needed for disambiguation: if the nested struct also had a slice
+field, we could not translate multiple values to it if we did not use an
+index for the parent struct.
+
+There's also the possibility to create a custom type that implements the
+TextUnmarshaler interface, and in this case there's no need to register
+a converter, like:
+
+ type Person struct {
+ Emails []Email
+ }
+
+ type Email struct {
+ *mail.Address
+ }
+
+ func (e *Email) UnmarshalText(text []byte) (err error) {
+ e.Address, err = mail.ParseAddress(string(text))
+ return
+ }
+
+...an HTML form that accepts three Email values would look like this:
+
+
+*/
+package schema
diff --git a/vendor/github.com/gofiber/fiber/v2/internal/schema/encoder.go b/vendor/github.com/gofiber/fiber/v2/internal/schema/encoder.go
new file mode 100644
index 0000000..c01de00
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/internal/schema/encoder.go
@@ -0,0 +1,202 @@
+package schema
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strconv"
+)
+
+type encoderFunc func(reflect.Value) string
+
+// Encoder encodes values from a struct into url.Values.
+type Encoder struct {
+ cache *cache
+ regenc map[reflect.Type]encoderFunc
+}
+
+// NewEncoder returns a new Encoder with defaults.
+func NewEncoder() *Encoder {
+ return &Encoder{cache: newCache(), regenc: make(map[reflect.Type]encoderFunc)}
+}
+
+// Encode encodes a struct into map[string][]string.
+//
+// Intended for use with url.Values.
+func (e *Encoder) Encode(src interface{}, dst map[string][]string) error {
+ v := reflect.ValueOf(src)
+
+ return e.encode(v, dst)
+}
+
+// RegisterEncoder registers a converter for encoding a custom type.
+func (e *Encoder) RegisterEncoder(value interface{}, encoder func(reflect.Value) string) {
+ e.regenc[reflect.TypeOf(value)] = encoder
+}
+
+// SetAliasTag changes the tag used to locate custom field aliases.
+// The default tag is "schema".
+func (e *Encoder) SetAliasTag(tag string) {
+ e.cache.tag = tag
+}
+
+// isValidStructPointer test if input value is a valid struct pointer.
+func isValidStructPointer(v reflect.Value) bool {
+ return v.Type().Kind() == reflect.Ptr && v.Elem().IsValid() && v.Elem().Type().Kind() == reflect.Struct
+}
+
+func isZero(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.Func:
+ case reflect.Map, reflect.Slice:
+ return v.IsNil() || v.Len() == 0
+ case reflect.Array:
+ z := true
+ for i := 0; i < v.Len(); i++ {
+ z = z && isZero(v.Index(i))
+ }
+ return z
+ case reflect.Struct:
+ type zero interface {
+ IsZero() bool
+ }
+ if v.Type().Implements(reflect.TypeOf((*zero)(nil)).Elem()) {
+ iz := v.MethodByName("IsZero").Call([]reflect.Value{})[0]
+ return iz.Interface().(bool)
+ }
+ z := true
+ for i := 0; i < v.NumField(); i++ {
+ z = z && isZero(v.Field(i))
+ }
+ return z
+ }
+ // Compare other types directly:
+ z := reflect.Zero(v.Type())
+ return v.Interface() == z.Interface()
+}
+
+func (e *Encoder) encode(v reflect.Value, dst map[string][]string) error {
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+ if v.Kind() != reflect.Struct {
+ return errors.New("schema: interface must be a struct")
+ }
+ t := v.Type()
+
+ errors := MultiError{}
+
+ for i := 0; i < v.NumField(); i++ {
+ name, opts := fieldAlias(t.Field(i), e.cache.tag)
+ if name == "-" {
+ continue
+ }
+
+ // Encode struct pointer types if the field is a valid pointer and a struct.
+ if isValidStructPointer(v.Field(i)) {
+ _ = e.encode(v.Field(i).Elem(), dst)
+ continue
+ }
+
+ encFunc := typeEncoder(v.Field(i).Type(), e.regenc)
+
+ // Encode non-slice types and custom implementations immediately.
+ if encFunc != nil {
+ value := encFunc(v.Field(i))
+ if opts.Contains("omitempty") && isZero(v.Field(i)) {
+ continue
+ }
+
+ dst[name] = append(dst[name], value)
+ continue
+ }
+
+ if v.Field(i).Type().Kind() == reflect.Struct {
+ _ = e.encode(v.Field(i), dst)
+ continue
+ }
+
+ if v.Field(i).Type().Kind() == reflect.Slice {
+ encFunc = typeEncoder(v.Field(i).Type().Elem(), e.regenc)
+ }
+
+ if encFunc == nil {
+ errors[v.Field(i).Type().String()] = fmt.Errorf("schema: encoder not found for %v", v.Field(i))
+ continue
+ }
+
+ // Encode a slice.
+ if v.Field(i).Len() == 0 && opts.Contains("omitempty") {
+ continue
+ }
+
+ dst[name] = []string{}
+ for j := 0; j < v.Field(i).Len(); j++ {
+ dst[name] = append(dst[name], encFunc(v.Field(i).Index(j)))
+ }
+ }
+
+ if len(errors) > 0 {
+ return errors
+ }
+ return nil
+}
+
+func typeEncoder(t reflect.Type, reg map[reflect.Type]encoderFunc) encoderFunc {
+ if f, ok := reg[t]; ok {
+ return f
+ }
+
+ switch t.Kind() {
+ case reflect.Bool:
+ return encodeBool
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return encodeInt
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return encodeUint
+ case reflect.Float32:
+ return encodeFloat32
+ case reflect.Float64:
+ return encodeFloat64
+ case reflect.Ptr:
+ f := typeEncoder(t.Elem(), reg)
+ return func(v reflect.Value) string {
+ if v.IsNil() {
+ return "null"
+ }
+ return f(v.Elem())
+ }
+ case reflect.String:
+ return encodeString
+ default:
+ return nil
+ }
+}
+
+func encodeBool(v reflect.Value) string {
+ return strconv.FormatBool(v.Bool())
+}
+
+func encodeInt(v reflect.Value) string {
+ return strconv.FormatInt(int64(v.Int()), 10)
+}
+
+func encodeUint(v reflect.Value) string {
+ return strconv.FormatUint(uint64(v.Uint()), 10)
+}
+
+func encodeFloat(v reflect.Value, bits int) string {
+ return strconv.FormatFloat(v.Float(), 'f', 6, bits)
+}
+
+func encodeFloat32(v reflect.Value) string {
+ return encodeFloat(v, 32)
+}
+
+func encodeFloat64(v reflect.Value) string {
+ return encodeFloat(v, 64)
+}
+
+func encodeString(v reflect.Value) string {
+ return v.String()
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/listen.go b/vendor/github.com/gofiber/fiber/v2/listen.go
new file mode 100644
index 0000000..342b05f
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/listen.go
@@ -0,0 +1,502 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package fiber
+
+import (
+ "crypto/tls"
+ "crypto/x509"
+ "errors"
+ "fmt"
+ "net"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "sort"
+ "strconv"
+ "strings"
+ "text/tabwriter"
+
+ "github.com/mattn/go-colorable"
+ "github.com/mattn/go-isatty"
+ "github.com/mattn/go-runewidth"
+
+ "github.com/gofiber/fiber/v2/log"
+)
+
+const (
+ globalIpv4Addr = "0.0.0.0"
+)
+
+// Listener can be used to pass a custom listener.
+func (app *App) Listener(ln net.Listener) error {
+ // prepare the server for the start
+ app.startupProcess()
+
+ // run hooks
+ app.runOnListenHooks(app.prepareListenData(ln.Addr().String(), getTLSConfig(ln) != nil))
+
+ // Print startup message
+ if !app.config.DisableStartupMessage {
+ app.startupMessage(ln.Addr().String(), getTLSConfig(ln) != nil, "")
+ }
+
+ // Print routes
+ if app.config.EnablePrintRoutes {
+ app.printRoutesMessage()
+ }
+
+ // Prefork is not supported for custom listeners
+ if app.config.Prefork {
+ log.Warn("Prefork isn't supported for custom listeners.")
+ }
+
+ // Start listening
+ return app.server.Serve(ln)
+}
+
+// Listen serves HTTP requests from the given addr.
+//
+// app.Listen(":8080")
+// app.Listen("127.0.0.1:8080")
+func (app *App) Listen(addr string) error {
+ // Start prefork
+ if app.config.Prefork {
+ return app.prefork(app.config.Network, addr, nil)
+ }
+
+ // Setup listener
+ ln, err := net.Listen(app.config.Network, addr)
+ if err != nil {
+ return fmt.Errorf("failed to listen: %w", err)
+ }
+
+ // prepare the server for the start
+ app.startupProcess()
+
+ // run hooks
+ app.runOnListenHooks(app.prepareListenData(ln.Addr().String(), false))
+
+ // Print startup message
+ if !app.config.DisableStartupMessage {
+ app.startupMessage(ln.Addr().String(), false, "")
+ }
+
+ // Print routes
+ if app.config.EnablePrintRoutes {
+ app.printRoutesMessage()
+ }
+
+ // Start listening
+ return app.server.Serve(ln)
+}
+
+// ListenTLS serves HTTPS requests from the given addr.
+// certFile and keyFile are the paths to TLS certificate and key file:
+//
+// app.ListenTLS(":8080", "./cert.pem", "./cert.key")
+func (app *App) ListenTLS(addr, certFile, keyFile string) error {
+ // Check for valid cert/key path
+ if len(certFile) == 0 || len(keyFile) == 0 {
+ return errors.New("tls: provide a valid cert or key path")
+ }
+
+ // Set TLS config with handler
+ cert, err := tls.LoadX509KeyPair(certFile, keyFile)
+ if err != nil {
+ return fmt.Errorf("tls: cannot load TLS key pair from certFile=%q and keyFile=%q: %w", certFile, keyFile, err)
+ }
+
+ return app.ListenTLSWithCertificate(addr, cert)
+}
+
+// ListenTLS serves HTTPS requests from the given addr.
+// cert is a tls.Certificate
+//
+// app.ListenTLSWithCertificate(":8080", cert)
+func (app *App) ListenTLSWithCertificate(addr string, cert tls.Certificate) error {
+ tlsHandler := &TLSHandler{}
+ config := &tls.Config{
+ MinVersion: tls.VersionTLS12,
+ Certificates: []tls.Certificate{
+ cert,
+ },
+ GetCertificate: tlsHandler.GetClientInfo,
+ }
+
+ // Prefork is supported
+ if app.config.Prefork {
+ return app.prefork(app.config.Network, addr, config)
+ }
+
+ // Setup listener
+ ln, err := net.Listen(app.config.Network, addr)
+ ln = tls.NewListener(ln, config)
+ if err != nil {
+ return fmt.Errorf("failed to listen: %w", err)
+ }
+
+ // prepare the server for the start
+ app.startupProcess()
+
+ // run hooks
+ app.runOnListenHooks(app.prepareListenData(ln.Addr().String(), getTLSConfig(ln) != nil))
+
+ // Print startup message
+ if !app.config.DisableStartupMessage {
+ app.startupMessage(ln.Addr().String(), true, "")
+ }
+
+ // Print routes
+ if app.config.EnablePrintRoutes {
+ app.printRoutesMessage()
+ }
+
+ // Attach the tlsHandler to the config
+ app.SetTLSHandler(tlsHandler)
+
+ // Start listening
+ return app.server.Serve(ln)
+}
+
+// ListenMutualTLS serves HTTPS requests from the given addr.
+// certFile, keyFile and clientCertFile are the paths to TLS certificate and key file:
+//
+// app.ListenMutualTLS(":8080", "./cert.pem", "./cert.key", "./client.pem")
+func (app *App) ListenMutualTLS(addr, certFile, keyFile, clientCertFile string) error {
+ // Check for valid cert/key path
+ if len(certFile) == 0 || len(keyFile) == 0 {
+ return errors.New("tls: provide a valid cert or key path")
+ }
+
+ cert, err := tls.LoadX509KeyPair(certFile, keyFile)
+ if err != nil {
+ return fmt.Errorf("tls: cannot load TLS key pair from certFile=%q and keyFile=%q: %w", certFile, keyFile, err)
+ }
+
+ clientCACert, err := os.ReadFile(filepath.Clean(clientCertFile))
+ if err != nil {
+ return fmt.Errorf("failed to read file: %w", err)
+ }
+ clientCertPool := x509.NewCertPool()
+ clientCertPool.AppendCertsFromPEM(clientCACert)
+
+ return app.ListenMutualTLSWithCertificate(addr, cert, clientCertPool)
+}
+
+// ListenMutualTLSWithCertificate serves HTTPS requests from the given addr.
+// cert is a tls.Certificate and clientCertPool is a *x509.CertPool:
+//
+// app.ListenMutualTLS(":8080", cert, clientCertPool)
+func (app *App) ListenMutualTLSWithCertificate(addr string, cert tls.Certificate, clientCertPool *x509.CertPool) error {
+ tlsHandler := &TLSHandler{}
+ config := &tls.Config{
+ MinVersion: tls.VersionTLS12,
+ ClientAuth: tls.RequireAndVerifyClientCert,
+ ClientCAs: clientCertPool,
+ Certificates: []tls.Certificate{
+ cert,
+ },
+ GetCertificate: tlsHandler.GetClientInfo,
+ }
+
+ // Prefork is supported
+ if app.config.Prefork {
+ return app.prefork(app.config.Network, addr, config)
+ }
+
+ // Setup listener
+ ln, err := tls.Listen(app.config.Network, addr, config)
+ if err != nil {
+ return fmt.Errorf("failed to listen: %w", err)
+ }
+
+ // prepare the server for the start
+ app.startupProcess()
+
+ // run hooks
+ app.runOnListenHooks(app.prepareListenData(ln.Addr().String(), getTLSConfig(ln) != nil))
+
+ // Print startup message
+ if !app.config.DisableStartupMessage {
+ app.startupMessage(ln.Addr().String(), true, "")
+ }
+
+ // Print routes
+ if app.config.EnablePrintRoutes {
+ app.printRoutesMessage()
+ }
+
+ // Attach the tlsHandler to the config
+ app.SetTLSHandler(tlsHandler)
+
+ // Start listening
+ return app.server.Serve(ln)
+}
+
+// prepareListenData create an slice of ListenData
+func (app *App) prepareListenData(addr string, isTLS bool) ListenData { //revive:disable-line:flag-parameter // Accepting a bool param named isTLS if fine here
+ host, port := parseAddr(addr)
+ if host == "" {
+ if app.config.Network == NetworkTCP6 {
+ host = "[::1]"
+ } else {
+ host = globalIpv4Addr
+ }
+ }
+
+ return ListenData{
+ Host: host,
+ Port: port,
+ TLS: isTLS,
+ }
+}
+
+// startupMessage prepares the startup message with the handler number, port, address and other information
+func (app *App) startupMessage(addr string, isTLS bool, pids string) { //nolint: revive // Accepting a bool param named isTLS if fine here
+ // ignore child processes
+ if IsChild() {
+ return
+ }
+
+ // Alias colors
+ colors := app.config.ColorScheme
+
+ value := func(s string, width int) string {
+ pad := width - len(s)
+ str := ""
+ for i := 0; i < pad; i++ {
+ str += "."
+ }
+ if s == "Disabled" {
+ str += " " + s
+ } else {
+ str += fmt.Sprintf(" %s%s%s", colors.Cyan, s, colors.Black)
+ }
+ return str
+ }
+
+ center := func(s string, width int) string {
+ const padDiv = 2
+ pad := strconv.Itoa((width - len(s)) / padDiv)
+ str := fmt.Sprintf("%"+pad+"s", " ")
+ str += s
+ str += fmt.Sprintf("%"+pad+"s", " ")
+ if len(str) < width {
+ str += " "
+ }
+ return str
+ }
+
+ centerValue := func(s string, width int) string {
+ const padDiv = 2
+ pad := strconv.Itoa((width - runewidth.StringWidth(s)) / padDiv)
+ str := fmt.Sprintf("%"+pad+"s", " ")
+ str += fmt.Sprintf("%s%s%s", colors.Cyan, s, colors.Black)
+ str += fmt.Sprintf("%"+pad+"s", " ")
+ if runewidth.StringWidth(s)-10 < width && runewidth.StringWidth(s)%2 == 0 {
+ // add an ending space if the length of str is even and str is not too long
+ str += " "
+ }
+ return str
+ }
+
+ pad := func(s string, width int) string {
+ toAdd := width - len(s)
+ str := s
+ for i := 0; i < toAdd; i++ {
+ str += " "
+ }
+ return str
+ }
+
+ host, port := parseAddr(addr)
+ if host == "" {
+ if app.config.Network == NetworkTCP6 {
+ host = "[::1]"
+ } else {
+ host = globalIpv4Addr
+ }
+ }
+
+ scheme := schemeHTTP
+ if isTLS {
+ scheme = schemeHTTPS
+ }
+
+ isPrefork := "Disabled"
+ if app.config.Prefork {
+ isPrefork = "Enabled"
+ }
+
+ procs := strconv.Itoa(runtime.GOMAXPROCS(0))
+ if !app.config.Prefork {
+ procs = "1"
+ }
+
+ const lineLen = 49
+ mainLogo := colors.Black + " ┌───────────────────────────────────────────────────┐\n"
+ if app.config.AppName != "" {
+ mainLogo += " │ " + centerValue(app.config.AppName, lineLen) + " │\n"
+ }
+ mainLogo += " │ " + centerValue("Fiber v"+Version, lineLen) + " │\n"
+
+ if host == globalIpv4Addr {
+ mainLogo += " │ " + center(fmt.Sprintf("%s://127.0.0.1:%s", scheme, port), lineLen) + " │\n" +
+ " │ " + center(fmt.Sprintf("(bound on host 0.0.0.0 and port %s)", port), lineLen) + " │\n"
+ } else {
+ mainLogo += " │ " + center(fmt.Sprintf("%s://%s:%s", scheme, host, port), lineLen) + " │\n"
+ }
+
+ mainLogo += fmt.Sprintf(
+ " │ │\n"+
+ " │ Handlers %s Processes %s │\n"+
+ " │ Prefork .%s PID ....%s │\n"+
+ " └───────────────────────────────────────────────────┘"+
+ colors.Reset,
+ value(strconv.Itoa(int(app.handlersCount)), 14), value(procs, 12),
+ value(isPrefork, 14), value(strconv.Itoa(os.Getpid()), 14),
+ )
+
+ var childPidsLogo string
+ if app.config.Prefork {
+ var childPidsTemplate string
+ childPidsTemplate += "%s"
+ childPidsTemplate += " ┌───────────────────────────────────────────────────┐\n%s"
+ childPidsTemplate += " └───────────────────────────────────────────────────┘"
+ childPidsTemplate += "%s"
+
+ newLine := " │ %s%s%s │"
+
+ // Turn the `pids` variable (in the form ",a,b,c,d,e,f,etc") into a slice of PIDs
+ var pidSlice []string
+ for _, v := range strings.Split(pids, ",") {
+ if v != "" {
+ pidSlice = append(pidSlice, v)
+ }
+ }
+
+ var lines []string
+ thisLine := "Child PIDs ... "
+ var itemsOnThisLine []string
+
+ const maxLineLen = 49
+
+ addLine := func() {
+ lines = append(lines,
+ fmt.Sprintf(
+ newLine,
+ colors.Black,
+ thisLine+colors.Cyan+pad(strings.Join(itemsOnThisLine, ", "), maxLineLen-len(thisLine)),
+ colors.Black,
+ ),
+ )
+ }
+
+ for _, pid := range pidSlice {
+ if len(thisLine+strings.Join(append(itemsOnThisLine, pid), ", ")) > maxLineLen {
+ addLine()
+ thisLine = ""
+ itemsOnThisLine = []string{pid}
+ } else {
+ itemsOnThisLine = append(itemsOnThisLine, pid)
+ }
+ }
+
+ // Add left over items to their own line
+ if len(itemsOnThisLine) != 0 {
+ addLine()
+ }
+
+ // Form logo
+ childPidsLogo = fmt.Sprintf(childPidsTemplate,
+ colors.Black,
+ strings.Join(lines, "\n")+"\n",
+ colors.Reset,
+ )
+ }
+
+ // Combine both the child PID logo and the main Fiber logo
+
+ // Pad the shorter logo to the length of the longer one
+ splitMainLogo := strings.Split(mainLogo, "\n")
+ splitChildPidsLogo := strings.Split(childPidsLogo, "\n")
+
+ mainLen := len(splitMainLogo)
+ childLen := len(splitChildPidsLogo)
+
+ if mainLen > childLen {
+ diff := mainLen - childLen
+ for i := 0; i < diff; i++ {
+ splitChildPidsLogo = append(splitChildPidsLogo, "")
+ }
+ } else {
+ diff := childLen - mainLen
+ for i := 0; i < diff; i++ {
+ splitMainLogo = append(splitMainLogo, "")
+ }
+ }
+
+ // Combine the two logos, line by line
+ output := "\n"
+ for i := range splitMainLogo {
+ output += colors.Black + splitMainLogo[i] + " " + splitChildPidsLogo[i] + "\n"
+ }
+
+ out := colorable.NewColorableStdout()
+ if os.Getenv("TERM") == "dumb" || os.Getenv("NO_COLOR") == "1" || (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) {
+ out = colorable.NewNonColorable(os.Stdout)
+ }
+
+ _, _ = fmt.Fprintln(out, output)
+}
+
+// printRoutesMessage print all routes with method, path, name and handlers
+// in a format of table, like this:
+// method | path | name | handlers
+// GET | / | routeName | github.com/gofiber/fiber/v2.emptyHandler
+// HEAD | / | | github.com/gofiber/fiber/v2.emptyHandler
+func (app *App) printRoutesMessage() {
+ // ignore child processes
+ if IsChild() {
+ return
+ }
+
+ // Alias colors
+ colors := app.config.ColorScheme
+
+ var routes []RouteMessage
+ for _, routeStack := range app.stack {
+ for _, route := range routeStack {
+ var newRoute RouteMessage
+ newRoute.name = route.Name
+ newRoute.method = route.Method
+ newRoute.path = route.Path
+ for _, handler := range route.Handlers {
+ newRoute.handlers += runtime.FuncForPC(reflect.ValueOf(handler).Pointer()).Name() + " "
+ }
+ routes = append(routes, newRoute)
+ }
+ }
+
+ out := colorable.NewColorableStdout()
+ if os.Getenv("TERM") == "dumb" || os.Getenv("NO_COLOR") == "1" || (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) {
+ out = colorable.NewNonColorable(os.Stdout)
+ }
+
+ w := tabwriter.NewWriter(out, 1, 1, 1, ' ', 0)
+ // Sort routes by path
+ sort.Slice(routes, func(i, j int) bool {
+ return routes[i].path < routes[j].path
+ })
+
+ _, _ = fmt.Fprintf(w, "%smethod\t%s| %spath\t%s| %sname\t%s| %shandlers\t%s\n", colors.Blue, colors.White, colors.Green, colors.White, colors.Cyan, colors.White, colors.Yellow, colors.Reset)
+ _, _ = fmt.Fprintf(w, "%s------\t%s| %s----\t%s| %s----\t%s| %s--------\t%s\n", colors.Blue, colors.White, colors.Green, colors.White, colors.Cyan, colors.White, colors.Yellow, colors.Reset)
+ for _, route := range routes {
+ _, _ = fmt.Fprintf(w, "%s%s\t%s| %s%s\t%s| %s%s\t%s| %s%s%s\n", colors.Blue, route.method, colors.White, colors.Green, route.path, colors.White, colors.Cyan, route.name, colors.White, colors.Yellow, route.handlers, colors.Reset)
+ }
+
+ _ = w.Flush() //nolint:errcheck // It is fine to ignore the error here
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/log/default.go b/vendor/github.com/gofiber/fiber/v2/log/default.go
new file mode 100644
index 0000000..c898cd6
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/log/default.go
@@ -0,0 +1,209 @@
+package log
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "sync"
+
+ "github.com/valyala/bytebufferpool"
+)
+
+var _ AllLogger = (*defaultLogger)(nil)
+
+type defaultLogger struct {
+ stdlog *log.Logger
+ level Level
+ depth int
+}
+
+// privateLog logs a message at a given level log the default logger.
+// when the level is fatal, it will exit the program.
+func (l *defaultLogger) privateLog(lv Level, fmtArgs []interface{}) {
+ if l.level > lv {
+ return
+ }
+ level := lv.toString()
+ buf := bytebufferpool.Get()
+ _, _ = buf.WriteString(level) //nolint:errcheck // It is fine to ignore the error
+ _, _ = buf.WriteString(fmt.Sprint(fmtArgs...)) //nolint:errcheck // It is fine to ignore the error
+
+ _ = l.stdlog.Output(l.depth, buf.String()) //nolint:errcheck // It is fine to ignore the error
+ buf.Reset()
+ bytebufferpool.Put(buf)
+ if lv == LevelFatal {
+ os.Exit(1) //nolint:revive // we want to exit the program when Fatal is called
+ }
+}
+
+// privateLog logs a message at a given level log the default logger.
+// when the level is fatal, it will exit the program.
+func (l *defaultLogger) privateLogf(lv Level, format string, fmtArgs []interface{}) {
+ if l.level > lv {
+ return
+ }
+ level := lv.toString()
+ buf := bytebufferpool.Get()
+ _, _ = buf.WriteString(level) //nolint:errcheck // It is fine to ignore the error
+
+ if len(fmtArgs) > 0 {
+ _, _ = fmt.Fprintf(buf, format, fmtArgs...)
+ } else {
+ _, _ = fmt.Fprint(buf, fmtArgs...)
+ }
+ _ = l.stdlog.Output(l.depth, buf.String()) //nolint:errcheck // It is fine to ignore the error
+ buf.Reset()
+ bytebufferpool.Put(buf)
+ if lv == LevelFatal {
+ os.Exit(1) //nolint:revive // we want to exit the program when Fatal is called
+ }
+}
+
+// privateLogw logs a message at a given level log the default logger.
+// when the level is fatal, it will exit the program.
+func (l *defaultLogger) privateLogw(lv Level, format string, keysAndValues []interface{}) {
+ if l.level > lv {
+ return
+ }
+ level := lv.toString()
+ buf := bytebufferpool.Get()
+ _, _ = buf.WriteString(level) //nolint:errcheck // It is fine to ignore the error
+
+ // Write format privateLog buffer
+ if format != "" {
+ _, _ = buf.WriteString(format) //nolint:errcheck // It is fine to ignore the error
+ }
+ var once sync.Once
+ isFirst := true
+ // Write keys and values privateLog buffer
+ if len(keysAndValues) > 0 {
+ if (len(keysAndValues) & 1) == 1 {
+ keysAndValues = append(keysAndValues, "KEYVALS UNPAIRED")
+ }
+
+ for i := 0; i < len(keysAndValues); i += 2 {
+ if format == "" && isFirst {
+ once.Do(func() {
+ _, _ = fmt.Fprintf(buf, "%s=%v", keysAndValues[i], keysAndValues[i+1])
+ isFirst = false
+ })
+ continue
+ }
+ _, _ = fmt.Fprintf(buf, " %s=%v", keysAndValues[i], keysAndValues[i+1])
+ }
+ }
+
+ _ = l.stdlog.Output(l.depth, buf.String()) //nolint:errcheck // It is fine to ignore the error
+ buf.Reset()
+ bytebufferpool.Put(buf)
+ if lv == LevelFatal {
+ os.Exit(1) //nolint:revive // we want to exit the program when Fatal is called
+ }
+}
+
+func (l *defaultLogger) Trace(v ...interface{}) {
+ l.privateLog(LevelTrace, v)
+}
+
+func (l *defaultLogger) Debug(v ...interface{}) {
+ l.privateLog(LevelDebug, v)
+}
+
+func (l *defaultLogger) Info(v ...interface{}) {
+ l.privateLog(LevelInfo, v)
+}
+
+func (l *defaultLogger) Warn(v ...interface{}) {
+ l.privateLog(LevelWarn, v)
+}
+
+func (l *defaultLogger) Error(v ...interface{}) {
+ l.privateLog(LevelError, v)
+}
+
+func (l *defaultLogger) Fatal(v ...interface{}) {
+ l.privateLog(LevelFatal, v)
+}
+
+func (l *defaultLogger) Panic(v ...interface{}) {
+ l.privateLog(LevelPanic, v)
+}
+
+func (l *defaultLogger) Tracef(format string, v ...interface{}) {
+ l.privateLogf(LevelTrace, format, v)
+}
+
+func (l *defaultLogger) Debugf(format string, v ...interface{}) {
+ l.privateLogf(LevelDebug, format, v)
+}
+
+func (l *defaultLogger) Infof(format string, v ...interface{}) {
+ l.privateLogf(LevelInfo, format, v)
+}
+
+func (l *defaultLogger) Warnf(format string, v ...interface{}) {
+ l.privateLogf(LevelWarn, format, v)
+}
+
+func (l *defaultLogger) Errorf(format string, v ...interface{}) {
+ l.privateLogf(LevelError, format, v)
+}
+
+func (l *defaultLogger) Fatalf(format string, v ...interface{}) {
+ l.privateLogf(LevelFatal, format, v)
+}
+
+func (l *defaultLogger) Panicf(format string, v ...interface{}) {
+ l.privateLogf(LevelPanic, format, v)
+}
+
+func (l *defaultLogger) Tracew(msg string, keysAndValues ...interface{}) {
+ l.privateLogw(LevelTrace, msg, keysAndValues)
+}
+
+func (l *defaultLogger) Debugw(msg string, keysAndValues ...interface{}) {
+ l.privateLogw(LevelDebug, msg, keysAndValues)
+}
+
+func (l *defaultLogger) Infow(msg string, keysAndValues ...interface{}) {
+ l.privateLogw(LevelInfo, msg, keysAndValues)
+}
+
+func (l *defaultLogger) Warnw(msg string, keysAndValues ...interface{}) {
+ l.privateLogw(LevelWarn, msg, keysAndValues)
+}
+
+func (l *defaultLogger) Errorw(msg string, keysAndValues ...interface{}) {
+ l.privateLogw(LevelError, msg, keysAndValues)
+}
+
+func (l *defaultLogger) Fatalw(msg string, keysAndValues ...interface{}) {
+ l.privateLogw(LevelFatal, msg, keysAndValues)
+}
+
+func (l *defaultLogger) Panicw(msg string, keysAndValues ...interface{}) {
+ l.privateLogw(LevelPanic, msg, keysAndValues)
+}
+
+func (l *defaultLogger) WithContext(_ context.Context) CommonLogger {
+ return &defaultLogger{
+ stdlog: l.stdlog,
+ level: l.level,
+ depth: l.depth - 1,
+ }
+}
+
+func (l *defaultLogger) SetLevel(level Level) {
+ l.level = level
+}
+
+func (l *defaultLogger) SetOutput(writer io.Writer) {
+ l.stdlog.SetOutput(writer)
+}
+
+// DefaultLogger returns the default logger.
+func DefaultLogger() AllLogger {
+ return logger
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/log/fiberlog.go b/vendor/github.com/gofiber/fiber/v2/log/fiberlog.go
new file mode 100644
index 0000000..90333ee
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/log/fiberlog.go
@@ -0,0 +1,141 @@
+package log
+
+import (
+ "context"
+ "io"
+)
+
+// Fatal calls the default logger's Fatal method and then os.Exit(1).
+func Fatal(v ...interface{}) {
+ logger.Fatal(v...)
+}
+
+// Error calls the default logger's Error method.
+func Error(v ...interface{}) {
+ logger.Error(v...)
+}
+
+// Warn calls the default logger's Warn method.
+func Warn(v ...interface{}) {
+ logger.Warn(v...)
+}
+
+// Info calls the default logger's Info method.
+func Info(v ...interface{}) {
+ logger.Info(v...)
+}
+
+// Debug calls the default logger's Debug method.
+func Debug(v ...interface{}) {
+ logger.Debug(v...)
+}
+
+// Trace calls the default logger's Trace method.
+func Trace(v ...interface{}) {
+ logger.Trace(v...)
+}
+
+// Panic calls the default logger's Panic method.
+func Panic(v ...interface{}) {
+ logger.Panic(v...)
+}
+
+// Fatalf calls the default logger's Fatalf method and then os.Exit(1).
+func Fatalf(format string, v ...interface{}) {
+ logger.Fatalf(format, v...)
+}
+
+// Errorf calls the default logger's Errorf method.
+func Errorf(format string, v ...interface{}) {
+ logger.Errorf(format, v...)
+}
+
+// Warnf calls the default logger's Warnf method.
+func Warnf(format string, v ...interface{}) {
+ logger.Warnf(format, v...)
+}
+
+// Infof calls the default logger's Infof method.
+func Infof(format string, v ...interface{}) {
+ logger.Infof(format, v...)
+}
+
+// Debugf calls the default logger's Debugf method.
+func Debugf(format string, v ...interface{}) {
+ logger.Debugf(format, v...)
+}
+
+// Tracef calls the default logger's Tracef method.
+func Tracef(format string, v ...interface{}) {
+ logger.Tracef(format, v...)
+}
+
+// Panicf calls the default logger's Tracef method.
+func Panicf(format string, v ...interface{}) {
+ logger.Panicf(format, v...)
+}
+
+// Tracew logs a message with some additional context. The variadic key-value
+// pairs are treated as they are privateLog With.
+func Tracew(msg string, keysAndValues ...interface{}) {
+ logger.Tracew(msg, keysAndValues...)
+}
+
+// Debugw logs a message with some additional context. The variadic key-value
+// pairs are treated as they are privateLog With.
+func Debugw(msg string, keysAndValues ...interface{}) {
+ logger.Debugw(msg, keysAndValues...)
+}
+
+// Infow logs a message with some additional context. The variadic key-value
+// pairs are treated as they are privateLog With.
+func Infow(msg string, keysAndValues ...interface{}) {
+ logger.Infow(msg, keysAndValues...)
+}
+
+// Warnw logs a message with some additional context. The variadic key-value
+// pairs are treated as they are privateLog With.
+func Warnw(msg string, keysAndValues ...interface{}) {
+ logger.Warnw(msg, keysAndValues...)
+}
+
+// Errorw logs a message with some additional context. The variadic key-value
+// pairs are treated as they are privateLog With.
+func Errorw(msg string, keysAndValues ...interface{}) {
+ logger.Errorw(msg, keysAndValues...)
+}
+
+// Fatalw logs a message with some additional context. The variadic key-value
+// pairs are treated as they are privateLog With.
+func Fatalw(msg string, keysAndValues ...interface{}) {
+ logger.Fatalw(msg, keysAndValues...)
+}
+
+// Panicw logs a message with some additional context. The variadic key-value
+// pairs are treated as they are privateLog With.
+func Panicw(msg string, keysAndValues ...interface{}) {
+ logger.Panicw(msg, keysAndValues...)
+}
+
+func WithContext(ctx context.Context) CommonLogger {
+ return logger.WithContext(ctx)
+}
+
+// SetLogger sets the default logger and the system logger.
+// Note that this method is not concurrent-safe and must not be called
+// after the use of DefaultLogger and global functions privateLog this package.
+func SetLogger(v AllLogger) {
+ logger = v
+}
+
+// SetOutput sets the output of default logger and system logger. By default, it is stderr.
+func SetOutput(w io.Writer) {
+ logger.SetOutput(w)
+}
+
+// SetLevel sets the level of logs below which logs will not be output.
+// The default logger is LevelTrace.
+// Note that this method is not concurrent-safe.
+func SetLevel(lv Level) {
+ logger.SetLevel(lv)
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/log/log.go b/vendor/github.com/gofiber/fiber/v2/log/log.go
new file mode 100644
index 0000000..31b4cc8
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/log/log.go
@@ -0,0 +1,100 @@
+package log
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "log"
+ "os"
+)
+
+var logger AllLogger = &defaultLogger{
+ stdlog: log.New(os.Stderr, "", log.LstdFlags|log.Lshortfile|log.Lmicroseconds),
+ depth: 4,
+}
+
+// Logger is a logger interface that provides logging function with levels.
+type Logger interface {
+ Trace(v ...interface{})
+ Debug(v ...interface{})
+ Info(v ...interface{})
+ Warn(v ...interface{})
+ Error(v ...interface{})
+ Fatal(v ...interface{})
+ Panic(v ...interface{})
+}
+
+// FormatLogger is a logger interface that output logs with a format.
+type FormatLogger interface {
+ Tracef(format string, v ...interface{})
+ Debugf(format string, v ...interface{})
+ Infof(format string, v ...interface{})
+ Warnf(format string, v ...interface{})
+ Errorf(format string, v ...interface{})
+ Fatalf(format string, v ...interface{})
+ Panicf(format string, v ...interface{})
+}
+
+// WithLogger is a logger interface that output logs with a message and key-value pairs.
+type WithLogger interface {
+ Tracew(msg string, keysAndValues ...interface{})
+ Debugw(msg string, keysAndValues ...interface{})
+ Infow(msg string, keysAndValues ...interface{})
+ Warnw(msg string, keysAndValues ...interface{})
+ Errorw(msg string, keysAndValues ...interface{})
+ Fatalw(msg string, keysAndValues ...interface{})
+ Panicw(msg string, keysAndValues ...interface{})
+}
+
+type CommonLogger interface {
+ Logger
+ FormatLogger
+ WithLogger
+}
+
+// ControlLogger provides methods to config a logger.
+type ControlLogger interface {
+ SetLevel(Level)
+ SetOutput(io.Writer)
+}
+
+// AllLogger is the combination of Logger, FormatLogger, CtxLogger and ControlLogger.
+// Custom extensions can be made through AllLogger
+type AllLogger interface {
+ CommonLogger
+ ControlLogger
+ WithContext(ctx context.Context) CommonLogger
+}
+
+// Level defines the priority of a log message.
+// When a logger is configured with a level, any log message with a lower
+// log level (smaller by integer comparison) will not be output.
+type Level int
+
+// The levels of logs.
+const (
+ LevelTrace Level = iota
+ LevelDebug
+ LevelInfo
+ LevelWarn
+ LevelError
+ LevelFatal
+ LevelPanic
+)
+
+var strs = []string{
+ "[Trace] ",
+ "[Debug] ",
+ "[Info] ",
+ "[Warn] ",
+ "[Error] ",
+ "[Fatal] ",
+ "[Panic] ",
+}
+
+func (lv Level) toString() string {
+ if lv >= LevelTrace && lv <= LevelPanic {
+ return strs[lv]
+ }
+ return fmt.Sprintf("[?%d] ", lv)
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/middleware/cors/cors.go b/vendor/github.com/gofiber/fiber/v2/middleware/cors/cors.go
new file mode 100644
index 0000000..c347e43
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/middleware/cors/cors.go
@@ -0,0 +1,200 @@
+package cors
+
+import (
+ "strconv"
+ "strings"
+
+ "github.com/gofiber/fiber/v2"
+ "github.com/gofiber/fiber/v2/log"
+)
+
+// Config defines the config for middleware.
+type Config struct {
+ // Next defines a function to skip this middleware when returned true.
+ //
+ // Optional. Default: nil
+ Next func(c *fiber.Ctx) bool
+
+ // AllowOriginsFunc defines a function that will set the 'access-control-allow-origin'
+ // response header to the 'origin' request header when returned true.
+ //
+ // Optional. Default: nil
+ AllowOriginsFunc func(origin string) bool
+
+ // AllowOrigin defines a list of origins that may access the resource.
+ //
+ // Optional. Default value "*"
+ AllowOrigins string
+
+ // AllowMethods defines a list methods allowed when accessing the resource.
+ // This is used in response to a preflight request.
+ //
+ // Optional. Default value "GET,POST,HEAD,PUT,DELETE,PATCH"
+ AllowMethods string
+
+ // AllowHeaders defines a list of request headers that can be used when
+ // making the actual request. This is in response to a preflight request.
+ //
+ // Optional. Default value "".
+ AllowHeaders string
+
+ // AllowCredentials indicates whether or not the response to the request
+ // can be exposed when the credentials flag is true. When used as part of
+ // a response to a preflight request, this indicates whether or not the
+ // actual request can be made using credentials.
+ //
+ // Optional. Default value false.
+ AllowCredentials bool
+
+ // ExposeHeaders defines a whitelist headers that clients are allowed to
+ // access.
+ //
+ // Optional. Default value "".
+ ExposeHeaders string
+
+ // MaxAge indicates how long (in seconds) the results of a preflight request
+ // can be cached.
+ // If you pass MaxAge 0, Access-Control-Max-Age header will not be added and
+ // browser will use 5 seconds by default.
+ // To disable caching completely, pass MaxAge value negative. It will set the Access-Control-Max-Age header 0.
+ //
+ // Optional. Default value 0.
+ MaxAge int
+}
+
+// ConfigDefault is the default config
+var ConfigDefault = Config{
+ Next: nil,
+ AllowOriginsFunc: nil,
+ AllowOrigins: "*",
+ AllowMethods: strings.Join([]string{
+ fiber.MethodGet,
+ fiber.MethodPost,
+ fiber.MethodHead,
+ fiber.MethodPut,
+ fiber.MethodDelete,
+ fiber.MethodPatch,
+ }, ","),
+ AllowHeaders: "",
+ AllowCredentials: false,
+ ExposeHeaders: "",
+ MaxAge: 0,
+}
+
+// New creates a new middleware handler
+func New(config ...Config) fiber.Handler {
+ // Set default config
+ cfg := ConfigDefault
+
+ // Override config if provided
+ if len(config) > 0 {
+ cfg = config[0]
+
+ // Set default values
+ if cfg.AllowMethods == "" {
+ cfg.AllowMethods = ConfigDefault.AllowMethods
+ }
+ if cfg.AllowOrigins == "" {
+ cfg.AllowOrigins = ConfigDefault.AllowOrigins
+ }
+ }
+
+ // Warning logs if both AllowOrigins and AllowOriginsFunc are set
+ if cfg.AllowOrigins != ConfigDefault.AllowOrigins && cfg.AllowOriginsFunc != nil {
+ log.Warn("[CORS] Both 'AllowOrigins' and 'AllowOriginsFunc' have been defined.")
+ }
+
+ // Convert string to slice
+ allowOrigins := strings.Split(strings.ReplaceAll(cfg.AllowOrigins, " ", ""), ",")
+
+ // Strip white spaces
+ allowMethods := strings.ReplaceAll(cfg.AllowMethods, " ", "")
+ allowHeaders := strings.ReplaceAll(cfg.AllowHeaders, " ", "")
+ exposeHeaders := strings.ReplaceAll(cfg.ExposeHeaders, " ", "")
+
+ // Convert int to string
+ maxAge := strconv.Itoa(cfg.MaxAge)
+
+ // Return new handler
+ return func(c *fiber.Ctx) error {
+ // Don't execute middleware if Next returns true
+ if cfg.Next != nil && cfg.Next(c) {
+ return c.Next()
+ }
+
+ // Get origin header
+ origin := c.Get(fiber.HeaderOrigin)
+ allowOrigin := ""
+
+ // Check allowed origins
+ for _, o := range allowOrigins {
+ if o == "*" {
+ allowOrigin = "*"
+ break
+ }
+ if o == origin {
+ allowOrigin = o
+ break
+ }
+ if matchSubdomain(origin, o) {
+ allowOrigin = origin
+ break
+ }
+ }
+
+ // Run AllowOriginsFunc if the logic for
+ // handling the value in 'AllowOrigins' does
+ // not result in allowOrigin being set.
+ if (allowOrigin == "" || allowOrigin == ConfigDefault.AllowOrigins) && cfg.AllowOriginsFunc != nil {
+ if cfg.AllowOriginsFunc(origin) {
+ allowOrigin = origin
+ }
+ }
+
+ // Simple request
+ if c.Method() != fiber.MethodOptions {
+ c.Vary(fiber.HeaderOrigin)
+ c.Set(fiber.HeaderAccessControlAllowOrigin, allowOrigin)
+
+ if cfg.AllowCredentials {
+ c.Set(fiber.HeaderAccessControlAllowCredentials, "true")
+ }
+ if exposeHeaders != "" {
+ c.Set(fiber.HeaderAccessControlExposeHeaders, exposeHeaders)
+ }
+ return c.Next()
+ }
+
+ // Preflight request
+ c.Vary(fiber.HeaderOrigin)
+ c.Vary(fiber.HeaderAccessControlRequestMethod)
+ c.Vary(fiber.HeaderAccessControlRequestHeaders)
+ c.Set(fiber.HeaderAccessControlAllowOrigin, allowOrigin)
+ c.Set(fiber.HeaderAccessControlAllowMethods, allowMethods)
+
+ // Set Allow-Credentials if set to true
+ if cfg.AllowCredentials {
+ c.Set(fiber.HeaderAccessControlAllowCredentials, "true")
+ }
+
+ // Set Allow-Headers if not empty
+ if allowHeaders != "" {
+ c.Set(fiber.HeaderAccessControlAllowHeaders, allowHeaders)
+ } else {
+ h := c.Get(fiber.HeaderAccessControlRequestHeaders)
+ if h != "" {
+ c.Set(fiber.HeaderAccessControlAllowHeaders, h)
+ }
+ }
+
+ // Set MaxAge is set
+ if cfg.MaxAge > 0 {
+ c.Set(fiber.HeaderAccessControlMaxAge, maxAge)
+ } else if cfg.MaxAge < 0 {
+ c.Set(fiber.HeaderAccessControlMaxAge, "0")
+ }
+
+ // Send 204 No Content
+ return c.SendStatus(fiber.StatusNoContent)
+ }
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/middleware/cors/utils.go b/vendor/github.com/gofiber/fiber/v2/middleware/cors/utils.go
new file mode 100644
index 0000000..8b6114b
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/middleware/cors/utils.go
@@ -0,0 +1,56 @@
+package cors
+
+import (
+ "strings"
+)
+
+func matchScheme(domain, pattern string) bool {
+ didx := strings.Index(domain, ":")
+ pidx := strings.Index(pattern, ":")
+ return didx != -1 && pidx != -1 && domain[:didx] == pattern[:pidx]
+}
+
+// matchSubdomain compares authority with wildcard
+func matchSubdomain(domain, pattern string) bool {
+ if !matchScheme(domain, pattern) {
+ return false
+ }
+ didx := strings.Index(domain, "://")
+ pidx := strings.Index(pattern, "://")
+ if didx == -1 || pidx == -1 {
+ return false
+ }
+ domAuth := domain[didx+3:]
+ // to avoid long loop by invalid long domain
+ const maxDomainLen = 253
+ if len(domAuth) > maxDomainLen {
+ return false
+ }
+ patAuth := pattern[pidx+3:]
+
+ domComp := strings.Split(domAuth, ".")
+ patComp := strings.Split(patAuth, ".")
+ const divHalf = 2
+ for i := len(domComp)/divHalf - 1; i >= 0; i-- {
+ opp := len(domComp) - 1 - i
+ domComp[i], domComp[opp] = domComp[opp], domComp[i]
+ }
+ for i := len(patComp)/divHalf - 1; i >= 0; i-- {
+ opp := len(patComp) - 1 - i
+ patComp[i], patComp[opp] = patComp[opp], patComp[i]
+ }
+
+ for i, v := range domComp {
+ if len(patComp) <= i {
+ return false
+ }
+ p := patComp[i]
+ if p == "*" {
+ return true
+ }
+ if p != v {
+ return false
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/mount.go b/vendor/github.com/gofiber/fiber/v2/mount.go
new file mode 100644
index 0000000..abb5695
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/mount.go
@@ -0,0 +1,230 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package fiber
+
+import (
+ "sort"
+ "strings"
+ "sync"
+ "sync/atomic"
+)
+
+// Put fields related to mounting.
+type mountFields struct {
+ // Mounted and main apps
+ appList map[string]*App
+ // Ordered keys of apps (sorted by key length for Render)
+ appListKeys []string
+ // check added routes of sub-apps
+ subAppsRoutesAdded sync.Once
+ // check mounted sub-apps
+ subAppsProcessed sync.Once
+ // Prefix of app if it was mounted
+ mountPath string
+}
+
+// Create empty mountFields instance
+func newMountFields(app *App) *mountFields {
+ return &mountFields{
+ appList: map[string]*App{"": app},
+ appListKeys: make([]string, 0),
+ }
+}
+
+// Mount attaches another app instance as a sub-router along a routing path.
+// It's very useful to split up a large API as many independent routers and
+// compose them as a single service using Mount. The fiber's error handler and
+// any of the fiber's sub apps are added to the application's error handlers
+// to be invoked on errors that happen within the prefix route.
+func (app *App) Mount(prefix string, subApp *App) Router {
+ prefix = strings.TrimRight(prefix, "/")
+ if prefix == "" {
+ prefix = "/"
+ }
+
+ // Support for configs of mounted-apps and sub-mounted-apps
+ for mountedPrefixes, subApp := range subApp.mountFields.appList {
+ path := getGroupPath(prefix, mountedPrefixes)
+
+ subApp.mountFields.mountPath = path
+ app.mountFields.appList[path] = subApp
+ }
+
+ // register mounted group
+ mountGroup := &Group{Prefix: prefix, app: subApp}
+ app.register(methodUse, prefix, mountGroup)
+
+ // Execute onMount hooks
+ if err := subApp.hooks.executeOnMountHooks(app); err != nil {
+ panic(err)
+ }
+
+ return app
+}
+
+// Mount attaches another app instance as a sub-router along a routing path.
+// It's very useful to split up a large API as many independent routers and
+// compose them as a single service using Mount.
+func (grp *Group) Mount(prefix string, subApp *App) Router {
+ groupPath := getGroupPath(grp.Prefix, prefix)
+ groupPath = strings.TrimRight(groupPath, "/")
+ if groupPath == "" {
+ groupPath = "/"
+ }
+
+ // Support for configs of mounted-apps and sub-mounted-apps
+ for mountedPrefixes, subApp := range subApp.mountFields.appList {
+ path := getGroupPath(groupPath, mountedPrefixes)
+
+ subApp.mountFields.mountPath = path
+ grp.app.mountFields.appList[path] = subApp
+ }
+
+ // register mounted group
+ mountGroup := &Group{Prefix: groupPath, app: subApp}
+ grp.app.register(methodUse, groupPath, mountGroup)
+
+ // Execute onMount hooks
+ if err := subApp.hooks.executeOnMountHooks(grp.app); err != nil {
+ panic(err)
+ }
+
+ return grp
+}
+
+// The MountPath property contains one or more path patterns on which a sub-app was mounted.
+func (app *App) MountPath() string {
+ return app.mountFields.mountPath
+}
+
+// hasMountedApps Checks if there are any mounted apps in the current application.
+func (app *App) hasMountedApps() bool {
+ return len(app.mountFields.appList) > 1
+}
+
+// mountStartupProcess Handles the startup process of mounted apps by appending sub-app routes, generating app list keys, and processing sub-app routes.
+func (app *App) mountStartupProcess() {
+ if app.hasMountedApps() {
+ // add routes of sub-apps
+ app.mountFields.subAppsProcessed.Do(func() {
+ app.appendSubAppLists(app.mountFields.appList)
+ app.generateAppListKeys()
+ })
+ // adds the routes of the sub-apps to the current application.
+ app.mountFields.subAppsRoutesAdded.Do(func() {
+ app.processSubAppsRoutes()
+ })
+ }
+}
+
+// generateAppListKeys generates app list keys for Render, should work after appendSubAppLists
+func (app *App) generateAppListKeys() {
+ for key := range app.mountFields.appList {
+ app.mountFields.appListKeys = append(app.mountFields.appListKeys, key)
+ }
+
+ sort.Slice(app.mountFields.appListKeys, func(i, j int) bool {
+ return len(app.mountFields.appListKeys[i]) < len(app.mountFields.appListKeys[j])
+ })
+}
+
+// appendSubAppLists supports nested for sub apps
+func (app *App) appendSubAppLists(appList map[string]*App, parent ...string) {
+ // Optimize: Cache parent prefix
+ parentPrefix := ""
+ if len(parent) > 0 {
+ parentPrefix = parent[0]
+ }
+
+ for prefix, subApp := range appList {
+ // skip real app
+ if prefix == "" {
+ continue
+ }
+
+ if parentPrefix != "" {
+ prefix = getGroupPath(parentPrefix, prefix)
+ }
+
+ if _, ok := app.mountFields.appList[prefix]; !ok {
+ app.mountFields.appList[prefix] = subApp
+ }
+
+ // The first element of appList is always the app itself. If there are no other sub apps, we should skip appending nested apps.
+ if len(subApp.mountFields.appList) > 1 {
+ app.appendSubAppLists(subApp.mountFields.appList, prefix)
+ }
+ }
+}
+
+// processSubAppsRoutes adds routes of sub-apps recursively when the server is started
+func (app *App) processSubAppsRoutes() {
+ for prefix, subApp := range app.mountFields.appList {
+ // skip real app
+ if prefix == "" {
+ continue
+ }
+ // process the inner routes
+ if subApp.hasMountedApps() {
+ subApp.mountFields.subAppsRoutesAdded.Do(func() {
+ subApp.processSubAppsRoutes()
+ })
+ }
+ }
+ var handlersCount uint32
+ var routePos uint32
+ // Iterate over the stack of the parent app
+ for m := range app.stack {
+ // Iterate over each route in the stack
+ stackLen := len(app.stack[m])
+ for i := 0; i < stackLen; i++ {
+ route := app.stack[m][i]
+ // Check if the route has a mounted app
+ if !route.mount {
+ routePos++
+ // If not, update the route's position and continue
+ route.pos = routePos
+ if !route.use || (route.use && m == 0) {
+ handlersCount += uint32(len(route.Handlers))
+ }
+ continue
+ }
+
+ // Create a slice to hold the sub-app's routes
+ subRoutes := make([]*Route, len(route.group.app.stack[m]))
+
+ // Iterate over the sub-app's routes
+ for j, subAppRoute := range route.group.app.stack[m] {
+ // Clone the sub-app's route
+ subAppRouteClone := app.copyRoute(subAppRoute)
+
+ // Add the parent route's path as a prefix to the sub-app's route
+ app.addPrefixToRoute(route.path, subAppRouteClone)
+
+ // Add the cloned sub-app's route to the slice of sub-app routes
+ subRoutes[j] = subAppRouteClone
+ }
+
+ // Insert the sub-app's routes into the parent app's stack
+ newStack := make([]*Route, len(app.stack[m])+len(subRoutes)-1)
+ copy(newStack[:i], app.stack[m][:i])
+ copy(newStack[i:i+len(subRoutes)], subRoutes)
+ copy(newStack[i+len(subRoutes):], app.stack[m][i+1:])
+ app.stack[m] = newStack
+
+ // Decrease the parent app's route count to account for the mounted app's original route
+ atomic.AddUint32(&app.routesCount, ^uint32(0))
+ i--
+ // Increase the parent app's route count to account for the sub-app's routes
+ atomic.AddUint32(&app.routesCount, uint32(len(subRoutes)))
+
+ // Mark the parent app's routes as refreshed
+ app.routesRefreshed = true
+ // update stackLen after appending subRoutes to app.stack[m]
+ stackLen = len(app.stack[m])
+ }
+ }
+ atomic.StoreUint32(&app.handlersCount, handlersCount)
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/path.go b/vendor/github.com/gofiber/fiber/v2/path.go
new file mode 100644
index 0000000..2cf88c7
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/path.go
@@ -0,0 +1,740 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 📄 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+// ⚠️ This path parser was inspired by ucarion/urlpath (MIT License).
+// 💖 Maintained and modified for Fiber by @renewerner87
+
+package fiber
+
+import (
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+ "unicode"
+
+ "github.com/google/uuid"
+
+ "github.com/gofiber/fiber/v2/utils"
+)
+
+// routeParser holds the path segments and param names
+type routeParser struct {
+ segs []*routeSegment // the parsed segments of the route
+ params []string // that parameter names the parsed route
+ wildCardCount int // number of wildcard parameters, used internally to give the wildcard parameter its number
+ plusCount int // number of plus parameters, used internally to give the plus parameter its number
+}
+
+// paramsSeg holds the segment metadata
+type routeSegment struct {
+ // const information
+ Const string // constant part of the route
+ // parameter information
+ IsParam bool // Truth value that indicates whether it is a parameter or a constant part
+ ParamName string // name of the parameter for access to it, for wildcards and plus parameters access iterators starting with 1 are added
+ ComparePart string // search part to find the end of the parameter
+ PartCount int // how often is the search part contained in the non-param segments? -> necessary for greedy search
+ IsGreedy bool // indicates whether the parameter is greedy or not, is used with wildcard and plus
+ IsOptional bool // indicates whether the parameter is optional or not
+ // common information
+ IsLast bool // shows if the segment is the last one for the route
+ HasOptionalSlash bool // segment has the possibility of an optional slash
+ Constraints []*Constraint // Constraint type if segment is a parameter, if not it will be set to noConstraint by default
+ Length int // length of the parameter for segment, when its 0 then the length is undetermined
+ // future TODO: add support for optional groups "/abc(/def)?"
+}
+
+// different special routing signs
+const (
+ wildcardParam byte = '*' // indicates an optional greedy parameter
+ plusParam byte = '+' // indicates a required greedy parameter
+ optionalParam byte = '?' // concludes a parameter by name and makes it optional
+ paramStarterChar byte = ':' // start character for a parameter with name
+ slashDelimiter byte = '/' // separator for the route, unlike the other delimiters this character at the end can be optional
+ escapeChar byte = '\\' // escape character
+ paramConstraintStart byte = '<' // start of type constraint for a parameter
+ paramConstraintEnd byte = '>' // end of type constraint for a parameter
+ paramConstraintSeparator byte = ';' // separator of type constraints for a parameter
+ paramConstraintDataStart byte = '(' // start of data of type constraint for a parameter
+ paramConstraintDataEnd byte = ')' // end of data of type constraint for a parameter
+ paramConstraintDataSeparator byte = ',' // separator of datas of type constraint for a parameter
+)
+
+// TypeConstraint parameter constraint types
+type TypeConstraint int16
+
+type Constraint struct {
+ ID TypeConstraint
+ RegexCompiler *regexp.Regexp
+ Data []string
+}
+
+const (
+ noConstraint TypeConstraint = iota + 1
+ intConstraint
+ boolConstraint
+ floatConstraint
+ alphaConstraint
+ datetimeConstraint
+ guidConstraint
+ minLenConstraint
+ maxLenConstraint
+ lenConstraint
+ betweenLenConstraint
+ minConstraint
+ maxConstraint
+ rangeConstraint
+ regexConstraint
+)
+
+// list of possible parameter and segment delimiter
+var (
+ // slash has a special role, unlike the other parameters it must not be interpreted as a parameter
+ routeDelimiter = []byte{slashDelimiter, '-', '.'}
+ // list of greedy parameters
+ greedyParameters = []byte{wildcardParam, plusParam}
+ // list of chars for the parameter recognizing
+ parameterStartChars = []byte{wildcardParam, plusParam, paramStarterChar}
+ // list of chars of delimiters and the starting parameter name char
+ parameterDelimiterChars = append([]byte{paramStarterChar, escapeChar}, routeDelimiter...)
+ // list of chars to find the end of a parameter
+ parameterEndChars = append([]byte{optionalParam}, parameterDelimiterChars...)
+ // list of parameter constraint start
+ parameterConstraintStartChars = []byte{paramConstraintStart}
+ // list of parameter constraint end
+ parameterConstraintEndChars = []byte{paramConstraintEnd}
+ // list of parameter separator
+ parameterConstraintSeparatorChars = []byte{paramConstraintSeparator}
+ // list of parameter constraint data start
+ parameterConstraintDataStartChars = []byte{paramConstraintDataStart}
+ // list of parameter constraint data end
+ parameterConstraintDataEndChars = []byte{paramConstraintDataEnd}
+ // list of parameter constraint data separator
+ parameterConstraintDataSeparatorChars = []byte{paramConstraintDataSeparator}
+)
+
+// RoutePatternMatch checks if a given path matches a Fiber route pattern.
+func RoutePatternMatch(path, pattern string, cfg ...Config) bool {
+ // See logic in (*Route).match and (*App).register
+ var ctxParams [maxParams]string
+
+ config := Config{}
+ if len(cfg) > 0 {
+ config = cfg[0]
+ }
+
+ if path == "" {
+ path = "/"
+ }
+
+ // Cannot have an empty pattern
+ if pattern == "" {
+ pattern = "/"
+ }
+ // Pattern always start with a '/'
+ if pattern[0] != '/' {
+ pattern = "/" + pattern
+ }
+
+ patternPretty := pattern
+
+ // Case-sensitive routing, all to lowercase
+ if !config.CaseSensitive {
+ patternPretty = utils.ToLower(patternPretty)
+ path = utils.ToLower(path)
+ }
+ // Strict routing, remove trailing slashes
+ if !config.StrictRouting && len(patternPretty) > 1 {
+ patternPretty = utils.TrimRight(patternPretty, '/')
+ }
+
+ parser := parseRoute(patternPretty)
+
+ if patternPretty == "/" && path == "/" {
+ return true
+ // '*' wildcard matches any path
+ } else if patternPretty == "/*" {
+ return true
+ }
+
+ // Does this route have parameters
+ if len(parser.params) > 0 {
+ if match := parser.getMatch(path, path, &ctxParams, false); match {
+ return true
+ }
+ }
+ // Check for a simple match
+ patternPretty = RemoveEscapeChar(patternPretty)
+ if len(patternPretty) == len(path) && patternPretty == path {
+ return true
+ }
+ // No match
+ return false
+}
+
+// parseRoute analyzes the route and divides it into segments for constant areas and parameters,
+// this information is needed later when assigning the requests to the declared routes
+func parseRoute(pattern string) routeParser {
+ parser := routeParser{}
+
+ part := ""
+ for len(pattern) > 0 {
+ nextParamPosition := findNextParamPosition(pattern)
+ // handle the parameter part
+ if nextParamPosition == 0 {
+ processedPart, seg := parser.analyseParameterPart(pattern)
+ parser.params, parser.segs, part = append(parser.params, seg.ParamName), append(parser.segs, seg), processedPart
+ } else {
+ processedPart, seg := parser.analyseConstantPart(pattern, nextParamPosition)
+ parser.segs, part = append(parser.segs, seg), processedPart
+ }
+
+ // reduce the pattern by the processed parts
+ if len(part) == len(pattern) {
+ break
+ }
+ pattern = pattern[len(part):]
+ }
+ // mark last segment
+ if len(parser.segs) > 0 {
+ parser.segs[len(parser.segs)-1].IsLast = true
+ }
+ parser.segs = addParameterMetaInfo(parser.segs)
+
+ return parser
+}
+
+// addParameterMetaInfo add important meta information to the parameter segments
+// to simplify the search for the end of the parameter
+func addParameterMetaInfo(segs []*routeSegment) []*routeSegment {
+ var comparePart string
+ segLen := len(segs)
+ // loop from end to begin
+ for i := segLen - 1; i >= 0; i-- {
+ // set the compare part for the parameter
+ if segs[i].IsParam {
+ // important for finding the end of the parameter
+ segs[i].ComparePart = RemoveEscapeChar(comparePart)
+ } else {
+ comparePart = segs[i].Const
+ if len(comparePart) > 1 {
+ comparePart = utils.TrimRight(comparePart, slashDelimiter)
+ }
+ }
+ }
+
+ // loop from begin to end
+ for i := 0; i < segLen; i++ {
+ // check how often the compare part is in the following const parts
+ if segs[i].IsParam {
+ // check if parameter segments are directly after each other and if one of them is greedy
+ // in case the next parameter or the current parameter is not a wildcard it's not greedy, we only want one character
+ if segLen > i+1 && !segs[i].IsGreedy && segs[i+1].IsParam && !segs[i+1].IsGreedy {
+ segs[i].Length = 1
+ }
+ if segs[i].ComparePart == "" {
+ continue
+ }
+ for j := i + 1; j <= len(segs)-1; j++ {
+ if !segs[j].IsParam {
+ // count is important for the greedy match
+ segs[i].PartCount += strings.Count(segs[j].Const, segs[i].ComparePart)
+ }
+ }
+ // check if the end of the segment is a optional slash and then if the segement is optional or the last one
+ } else if segs[i].Const[len(segs[i].Const)-1] == slashDelimiter && (segs[i].IsLast || (segLen > i+1 && segs[i+1].IsOptional)) {
+ segs[i].HasOptionalSlash = true
+ }
+ }
+
+ return segs
+}
+
+// findNextParamPosition search for the next possible parameter start position
+func findNextParamPosition(pattern string) int {
+ nextParamPosition := findNextNonEscapedCharsetPosition(pattern, parameterStartChars)
+ if nextParamPosition != -1 && len(pattern) > nextParamPosition && pattern[nextParamPosition] != wildcardParam {
+ // search for parameter characters for the found parameter start,
+ // if there are more, move the parameter start to the last parameter char
+ for found := findNextNonEscapedCharsetPosition(pattern[nextParamPosition+1:], parameterStartChars); found == 0; {
+ nextParamPosition++
+ if len(pattern) > nextParamPosition {
+ break
+ }
+ }
+ }
+
+ return nextParamPosition
+}
+
+// analyseConstantPart find the end of the constant part and create the route segment
+func (*routeParser) analyseConstantPart(pattern string, nextParamPosition int) (string, *routeSegment) {
+ // handle the constant part
+ processedPart := pattern
+ if nextParamPosition != -1 {
+ // remove the constant part until the parameter
+ processedPart = pattern[:nextParamPosition]
+ }
+ constPart := RemoveEscapeChar(processedPart)
+ return processedPart, &routeSegment{
+ Const: constPart,
+ Length: len(constPart),
+ }
+}
+
+// analyseParameterPart find the parameter end and create the route segment
+func (routeParser *routeParser) analyseParameterPart(pattern string) (string, *routeSegment) {
+ isWildCard := pattern[0] == wildcardParam
+ isPlusParam := pattern[0] == plusParam
+
+ var parameterEndPosition int
+ if strings.ContainsRune(pattern, rune(paramConstraintStart)) && strings.ContainsRune(pattern, rune(paramConstraintEnd)) {
+ parameterEndPosition = findNextCharsetPositionConstraint(pattern[1:], parameterEndChars)
+ } else {
+ parameterEndPosition = findNextNonEscapedCharsetPosition(pattern[1:], parameterEndChars)
+ }
+
+ parameterConstraintStart := -1
+ parameterConstraintEnd := -1
+ // handle wildcard end
+ switch {
+ case isWildCard, isPlusParam:
+ parameterEndPosition = 0
+ case parameterEndPosition == -1:
+ parameterEndPosition = len(pattern) - 1
+ case !isInCharset(pattern[parameterEndPosition+1], parameterDelimiterChars):
+ parameterEndPosition++
+ }
+
+ // find constraint part if exists in the parameter part and remove it
+ if parameterEndPosition > 0 {
+ parameterConstraintStart = findNextNonEscapedCharsetPosition(pattern[0:parameterEndPosition], parameterConstraintStartChars)
+ parameterConstraintEnd = findLastCharsetPosition(pattern[0:parameterEndPosition+1], parameterConstraintEndChars)
+ }
+
+ // cut params part
+ processedPart := pattern[0 : parameterEndPosition+1]
+ paramName := RemoveEscapeChar(GetTrimmedParam(processedPart))
+
+ // Check has constraint
+ var constraints []*Constraint
+
+ if hasConstraint := parameterConstraintStart != -1 && parameterConstraintEnd != -1; hasConstraint {
+ constraintString := pattern[parameterConstraintStart+1 : parameterConstraintEnd]
+ userConstraints := splitNonEscaped(constraintString, string(parameterConstraintSeparatorChars))
+ constraints = make([]*Constraint, 0, len(userConstraints))
+
+ for _, c := range userConstraints {
+ start := findNextNonEscapedCharsetPosition(c, parameterConstraintDataStartChars)
+ end := findLastCharsetPosition(c, parameterConstraintDataEndChars)
+
+ // Assign constraint
+ if start != -1 && end != -1 {
+ constraint := &Constraint{
+ ID: getParamConstraintType(c[:start]),
+ }
+
+ // remove escapes from data
+ if constraint.ID != regexConstraint {
+ constraint.Data = splitNonEscaped(c[start+1:end], string(parameterConstraintDataSeparatorChars))
+ if len(constraint.Data) == 1 {
+ constraint.Data[0] = RemoveEscapeChar(constraint.Data[0])
+ } else if len(constraint.Data) == 2 { // This is fine, we simply expect two parts
+ constraint.Data[0] = RemoveEscapeChar(constraint.Data[0])
+ constraint.Data[1] = RemoveEscapeChar(constraint.Data[1])
+ }
+ }
+
+ // Precompile regex if has regex constraint
+ if constraint.ID == regexConstraint {
+ constraint.Data = []string{c[start+1 : end]}
+ constraint.RegexCompiler = regexp.MustCompile(constraint.Data[0])
+ }
+
+ constraints = append(constraints, constraint)
+ } else {
+ constraints = append(constraints, &Constraint{
+ ID: getParamConstraintType(c),
+ Data: []string{},
+ })
+ }
+ }
+
+ paramName = RemoveEscapeChar(GetTrimmedParam(pattern[0:parameterConstraintStart]))
+ }
+
+ // add access iterator to wildcard and plus
+ if isWildCard {
+ routeParser.wildCardCount++
+ paramName += strconv.Itoa(routeParser.wildCardCount)
+ } else if isPlusParam {
+ routeParser.plusCount++
+ paramName += strconv.Itoa(routeParser.plusCount)
+ }
+
+ segment := &routeSegment{
+ ParamName: paramName,
+ IsParam: true,
+ IsOptional: isWildCard || pattern[parameterEndPosition] == optionalParam,
+ IsGreedy: isWildCard || isPlusParam,
+ }
+
+ if len(constraints) > 0 {
+ segment.Constraints = constraints
+ }
+
+ return processedPart, segment
+}
+
+// isInCharset check is the given character in the charset list
+func isInCharset(searchChar byte, charset []byte) bool {
+ for _, char := range charset {
+ if char == searchChar {
+ return true
+ }
+ }
+ return false
+}
+
+// findNextCharsetPosition search the next char position from the charset
+func findNextCharsetPosition(search string, charset []byte) int {
+ nextPosition := -1
+ for _, char := range charset {
+ if pos := strings.IndexByte(search, char); pos != -1 && (pos < nextPosition || nextPosition == -1) {
+ nextPosition = pos
+ }
+ }
+
+ return nextPosition
+}
+
+// findNextCharsetPosition search the last char position from the charset
+func findLastCharsetPosition(search string, charset []byte) int {
+ lastPosition := -1
+ for _, char := range charset {
+ if pos := strings.LastIndexByte(search, char); pos != -1 && (pos < lastPosition || lastPosition == -1) {
+ lastPosition = pos
+ }
+ }
+
+ return lastPosition
+}
+
+// findNextCharsetPositionConstraint search the next char position from the charset
+// unlike findNextCharsetPosition, it takes care of constraint start-end chars to parse route pattern
+func findNextCharsetPositionConstraint(search string, charset []byte) int {
+ constraintStart := findNextNonEscapedCharsetPosition(search, parameterConstraintStartChars)
+ constraintEnd := findNextNonEscapedCharsetPosition(search, parameterConstraintEndChars)
+ nextPosition := -1
+
+ for _, char := range charset {
+ pos := strings.IndexByte(search, char)
+
+ if pos != -1 && (pos < nextPosition || nextPosition == -1) {
+ if (pos > constraintStart && pos > constraintEnd) || (pos < constraintStart && pos < constraintEnd) {
+ nextPosition = pos
+ }
+ }
+ }
+
+ return nextPosition
+}
+
+// findNextNonEscapedCharsetPosition search the next char position from the charset and skip the escaped characters
+func findNextNonEscapedCharsetPosition(search string, charset []byte) int {
+ pos := findNextCharsetPosition(search, charset)
+ for pos > 0 && search[pos-1] == escapeChar {
+ if len(search) == pos+1 {
+ // escaped character is at the end
+ return -1
+ }
+ nextPossiblePos := findNextCharsetPosition(search[pos+1:], charset)
+ if nextPossiblePos == -1 {
+ return -1
+ }
+ // the previous character is taken into consideration
+ pos = nextPossiblePos + pos + 1
+ }
+
+ return pos
+}
+
+// splitNonEscaped slices s into all substrings separated by sep and returns a slice of the substrings between those separators
+// This function also takes a care of escape char when splitting.
+func splitNonEscaped(s, sep string) []string {
+ var result []string
+ i := findNextNonEscapedCharsetPosition(s, []byte(sep))
+
+ for i > -1 {
+ result = append(result, s[:i])
+ s = s[i+len(sep):]
+ i = findNextNonEscapedCharsetPosition(s, []byte(sep))
+ }
+
+ return append(result, s)
+}
+
+// getMatch parses the passed url and tries to match it against the route segments and determine the parameter positions
+func (routeParser *routeParser) getMatch(detectionPath, path string, params *[maxParams]string, partialCheck bool) bool { //nolint: revive // Accepting a bool param is fine here
+ var i, paramsIterator, partLen int
+ for _, segment := range routeParser.segs {
+ partLen = len(detectionPath)
+ // check const segment
+ if !segment.IsParam {
+ i = segment.Length
+ // is optional part or the const part must match with the given string
+ // check if the end of the segment is an optional slash
+ if segment.HasOptionalSlash && partLen == i-1 && detectionPath == segment.Const[:i-1] {
+ i--
+ } else if !(i <= partLen && detectionPath[:i] == segment.Const) {
+ return false
+ }
+ } else {
+ // determine parameter length
+ i = findParamLen(detectionPath, segment)
+ if !segment.IsOptional && i == 0 {
+ return false
+ }
+ // take over the params positions
+ params[paramsIterator] = path[:i]
+
+ if !(segment.IsOptional && i == 0) {
+ // check constraint
+ for _, c := range segment.Constraints {
+ if matched := c.CheckConstraint(params[paramsIterator]); !matched {
+ return false
+ }
+ }
+ }
+
+ paramsIterator++
+ }
+
+ // reduce founded part from the string
+ if partLen > 0 {
+ detectionPath, path = detectionPath[i:], path[i:]
+ }
+ }
+ if detectionPath != "" && !partialCheck {
+ return false
+ }
+
+ return true
+}
+
+// findParamLen for the expressjs wildcard behavior (right to left greedy)
+// look at the other segments and take what is left for the wildcard from right to left
+func findParamLen(s string, segment *routeSegment) int {
+ if segment.IsLast {
+ return findParamLenForLastSegment(s, segment)
+ }
+
+ if segment.Length != 0 && len(s) >= segment.Length {
+ return segment.Length
+ } else if segment.IsGreedy {
+ // Search the parameters until the next constant part
+ // special logic for greedy params
+ searchCount := strings.Count(s, segment.ComparePart)
+ if searchCount > 1 {
+ return findGreedyParamLen(s, searchCount, segment)
+ }
+ }
+
+ if len(segment.ComparePart) == 1 {
+ if constPosition := strings.IndexByte(s, segment.ComparePart[0]); constPosition != -1 {
+ return constPosition
+ }
+ } else if constPosition := strings.Index(s, segment.ComparePart); constPosition != -1 {
+ // if the compare part was found, but contains a slash although this part is not greedy, then it must not match
+ // example: /api/:param/fixedEnd -> path: /api/123/456/fixedEnd = no match , /api/123/fixedEnd = match
+ if !segment.IsGreedy && strings.IndexByte(s[:constPosition], slashDelimiter) != -1 {
+ return 0
+ }
+ return constPosition
+ }
+
+ return len(s)
+}
+
+// findParamLenForLastSegment get the length of the parameter if it is the last segment
+func findParamLenForLastSegment(s string, seg *routeSegment) int {
+ if !seg.IsGreedy {
+ if i := strings.IndexByte(s, slashDelimiter); i != -1 {
+ return i
+ }
+ }
+
+ return len(s)
+}
+
+// findGreedyParamLen get the length of the parameter for greedy segments from right to left
+func findGreedyParamLen(s string, searchCount int, segment *routeSegment) int {
+ // check all from right to left segments
+ for i := segment.PartCount; i > 0 && searchCount > 0; i-- {
+ searchCount--
+ if constPosition := strings.LastIndex(s, segment.ComparePart); constPosition != -1 {
+ s = s[:constPosition]
+ } else {
+ break
+ }
+ }
+
+ return len(s)
+}
+
+// GetTrimmedParam trims the ':' & '?' from a string
+func GetTrimmedParam(param string) string {
+ start := 0
+ end := len(param)
+
+ if end == 0 || param[start] != paramStarterChar { // is not a param
+ return param
+ }
+ start++
+ if param[end-1] == optionalParam { // is ?
+ end--
+ }
+
+ return param[start:end]
+}
+
+// RemoveEscapeChar remove escape characters
+func RemoveEscapeChar(word string) string {
+ if strings.IndexByte(word, escapeChar) != -1 {
+ return strings.ReplaceAll(word, string(escapeChar), "")
+ }
+ return word
+}
+
+func getParamConstraintType(constraintPart string) TypeConstraint {
+ switch constraintPart {
+ case ConstraintInt:
+ return intConstraint
+ case ConstraintBool:
+ return boolConstraint
+ case ConstraintFloat:
+ return floatConstraint
+ case ConstraintAlpha:
+ return alphaConstraint
+ case ConstraintGuid:
+ return guidConstraint
+ case ConstraintMinLen, ConstraintMinLenLower:
+ return minLenConstraint
+ case ConstraintMaxLen, ConstraintMaxLenLower:
+ return maxLenConstraint
+ case ConstraintLen:
+ return lenConstraint
+ case ConstraintBetweenLen, ConstraintBetweenLenLower:
+ return betweenLenConstraint
+ case ConstraintMin:
+ return minConstraint
+ case ConstraintMax:
+ return maxConstraint
+ case ConstraintRange:
+ return rangeConstraint
+ case ConstraintDatetime:
+ return datetimeConstraint
+ case ConstraintRegex:
+ return regexConstraint
+ default:
+ return noConstraint
+ }
+}
+
+//nolint:errcheck // TODO: Properly check _all_ errors in here, log them & immediately return
+func (c *Constraint) CheckConstraint(param string) bool {
+ var err error
+ var num int
+
+ // check data exists
+ needOneData := []TypeConstraint{minLenConstraint, maxLenConstraint, lenConstraint, minConstraint, maxConstraint, datetimeConstraint, regexConstraint}
+ needTwoData := []TypeConstraint{betweenLenConstraint, rangeConstraint}
+
+ for _, data := range needOneData {
+ if c.ID == data && len(c.Data) == 0 {
+ return false
+ }
+ }
+
+ for _, data := range needTwoData {
+ if c.ID == data && len(c.Data) < 2 {
+ return false
+ }
+ }
+
+ // check constraints
+ switch c.ID {
+ case noConstraint:
+ // Nothing to check
+ case intConstraint:
+ _, err = strconv.Atoi(param)
+ case boolConstraint:
+ _, err = strconv.ParseBool(param)
+ case floatConstraint:
+ _, err = strconv.ParseFloat(param, 32)
+ case alphaConstraint:
+ for _, r := range param {
+ if !unicode.IsLetter(r) {
+ return false
+ }
+ }
+ case guidConstraint:
+ _, err = uuid.Parse(param)
+ case minLenConstraint:
+ data, _ := strconv.Atoi(c.Data[0])
+
+ if len(param) < data {
+ return false
+ }
+ case maxLenConstraint:
+ data, _ := strconv.Atoi(c.Data[0])
+
+ if len(param) > data {
+ return false
+ }
+ case lenConstraint:
+ data, _ := strconv.Atoi(c.Data[0])
+
+ if len(param) != data {
+ return false
+ }
+ case betweenLenConstraint:
+ data, _ := strconv.Atoi(c.Data[0])
+ data2, _ := strconv.Atoi(c.Data[1])
+ length := len(param)
+ if length < data || length > data2 {
+ return false
+ }
+ case minConstraint:
+ data, _ := strconv.Atoi(c.Data[0])
+ num, err = strconv.Atoi(param)
+
+ if num < data {
+ return false
+ }
+ case maxConstraint:
+ data, _ := strconv.Atoi(c.Data[0])
+ num, err = strconv.Atoi(param)
+
+ if num > data {
+ return false
+ }
+ case rangeConstraint:
+ data, _ := strconv.Atoi(c.Data[0])
+ data2, _ := strconv.Atoi(c.Data[1])
+ num, err = strconv.Atoi(param)
+
+ if num < data || num > data2 {
+ return false
+ }
+ case datetimeConstraint:
+ _, err = time.Parse(c.Data[0], param)
+ case regexConstraint:
+ if match := c.RegexCompiler.MatchString(param); !match {
+ return false
+ }
+ }
+
+ return err == nil
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/prefork.go b/vendor/github.com/gofiber/fiber/v2/prefork.go
new file mode 100644
index 0000000..3f64012
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/prefork.go
@@ -0,0 +1,179 @@
+package fiber
+
+import (
+ "crypto/tls"
+ "errors"
+ "fmt"
+ "os"
+ "os/exec"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync/atomic"
+ "time"
+
+ "github.com/valyala/fasthttp/reuseport"
+
+ "github.com/gofiber/fiber/v2/log"
+)
+
+const (
+ envPreforkChildKey = "FIBER_PREFORK_CHILD"
+ envPreforkChildVal = "1"
+)
+
+var (
+ testPreforkMaster = false
+ testOnPrefork = false
+)
+
+// IsChild determines if the current process is a child of Prefork
+func IsChild() bool {
+ return os.Getenv(envPreforkChildKey) == envPreforkChildVal
+}
+
+// prefork manages child processes to make use of the OS REUSEPORT or REUSEADDR feature
+func (app *App) prefork(network, addr string, tlsConfig *tls.Config) error {
+ // 👶 child process 👶
+ if IsChild() {
+ // use 1 cpu core per child process
+ runtime.GOMAXPROCS(1)
+ // Linux will use SO_REUSEPORT and Windows falls back to SO_REUSEADDR
+ // Only tcp4 or tcp6 is supported when preforking, both are not supported
+ ln, err := reuseport.Listen(network, addr)
+ if err != nil {
+ if !app.config.DisableStartupMessage {
+ const sleepDuration = 100 * time.Millisecond
+ time.Sleep(sleepDuration) // avoid colliding with startup message
+ }
+ return fmt.Errorf("prefork: %w", err)
+ }
+ // wrap a tls config around the listener if provided
+ if tlsConfig != nil {
+ ln = tls.NewListener(ln, tlsConfig)
+ }
+
+ // kill current child proc when master exits
+ go watchMaster()
+
+ // prepare the server for the start
+ app.startupProcess()
+
+ // listen for incoming connections
+ return app.server.Serve(ln)
+ }
+
+ // 👮 master process 👮
+ type child struct {
+ pid int
+ err error
+ }
+ // create variables
+ max := runtime.GOMAXPROCS(0)
+ childs := make(map[int]*exec.Cmd)
+ channel := make(chan child, max)
+
+ // kill child procs when master exits
+ defer func() {
+ for _, proc := range childs {
+ if err := proc.Process.Kill(); err != nil {
+ if !errors.Is(err, os.ErrProcessDone) {
+ log.Errorf("prefork: failed to kill child: %v", err)
+ }
+ }
+ }
+ }()
+
+ // collect child pids
+ var pids []string
+
+ // launch child procs
+ for i := 0; i < max; i++ {
+ cmd := exec.Command(os.Args[0], os.Args[1:]...) //nolint:gosec // It's fine to launch the same process again
+ if testPreforkMaster {
+ // When test prefork master,
+ // just start the child process with a dummy cmd,
+ // which will exit soon
+ cmd = dummyCmd()
+ }
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+
+ // add fiber prefork child flag into child proc env
+ cmd.Env = append(os.Environ(),
+ fmt.Sprintf("%s=%s", envPreforkChildKey, envPreforkChildVal),
+ )
+ if err := cmd.Start(); err != nil {
+ return fmt.Errorf("failed to start a child prefork process, error: %w", err)
+ }
+
+ // store child process
+ pid := cmd.Process.Pid
+ childs[pid] = cmd
+ pids = append(pids, strconv.Itoa(pid))
+
+ // execute fork hook
+ if app.hooks != nil {
+ if testOnPrefork {
+ app.hooks.executeOnForkHooks(dummyPid)
+ } else {
+ app.hooks.executeOnForkHooks(pid)
+ }
+ }
+
+ // notify master if child crashes
+ go func() {
+ channel <- child{pid, cmd.Wait()}
+ }()
+ }
+
+ // Run onListen hooks
+ // Hooks have to be run here as different as non-prefork mode due to they should run as child or master
+ app.runOnListenHooks(app.prepareListenData(addr, tlsConfig != nil))
+
+ // Print startup message
+ if !app.config.DisableStartupMessage {
+ app.startupMessage(addr, tlsConfig != nil, ","+strings.Join(pids, ","))
+ }
+
+ // return error if child crashes
+ return (<-channel).err
+}
+
+// watchMaster watches child procs
+func watchMaster() {
+ if runtime.GOOS == "windows" {
+ // finds parent process,
+ // and waits for it to exit
+ p, err := os.FindProcess(os.Getppid())
+ if err == nil {
+ _, _ = p.Wait() //nolint:errcheck // It is fine to ignore the error here
+ }
+ os.Exit(1) //nolint:revive // Calling os.Exit is fine here in the prefork
+ }
+ // if it is equal to 1 (init process ID),
+ // it indicates that the master process has exited
+ const watchInterval = 500 * time.Millisecond
+ for range time.NewTicker(watchInterval).C {
+ if os.Getppid() == 1 {
+ os.Exit(1) //nolint:revive // Calling os.Exit is fine here in the prefork
+ }
+ }
+}
+
+var (
+ dummyPid = 1
+ dummyChildCmd atomic.Value
+)
+
+// dummyCmd is for internal prefork testing
+func dummyCmd() *exec.Cmd {
+ command := "go"
+ if storeCommand := dummyChildCmd.Load(); storeCommand != nil && storeCommand != "" {
+ command = storeCommand.(string) //nolint:forcetypeassert,errcheck // We always store a string in here
+ }
+ if runtime.GOOS == "windows" {
+ return exec.Command("cmd", "/C", command, "version")
+ }
+ return exec.Command(command, "version")
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/router.go b/vendor/github.com/gofiber/fiber/v2/router.go
new file mode 100644
index 0000000..4afa741
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/router.go
@@ -0,0 +1,518 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package fiber
+
+import (
+ "fmt"
+ "html"
+ "sort"
+ "strconv"
+ "strings"
+ "sync/atomic"
+ "time"
+
+ "github.com/gofiber/fiber/v2/utils"
+
+ "github.com/valyala/fasthttp"
+)
+
+// Router defines all router handle interface, including app and group router.
+type Router interface {
+ Use(args ...interface{}) Router
+
+ Get(path string, handlers ...Handler) Router
+ Head(path string, handlers ...Handler) Router
+ Post(path string, handlers ...Handler) Router
+ Put(path string, handlers ...Handler) Router
+ Delete(path string, handlers ...Handler) Router
+ Connect(path string, handlers ...Handler) Router
+ Options(path string, handlers ...Handler) Router
+ Trace(path string, handlers ...Handler) Router
+ Patch(path string, handlers ...Handler) Router
+
+ Add(method, path string, handlers ...Handler) Router
+ Static(prefix, root string, config ...Static) Router
+ All(path string, handlers ...Handler) Router
+
+ Group(prefix string, handlers ...Handler) Router
+
+ Route(prefix string, fn func(router Router), name ...string) Router
+
+ Mount(prefix string, fiber *App) Router
+
+ Name(name string) Router
+}
+
+// Route is a struct that holds all metadata for each registered handler.
+type Route struct {
+ // ### important: always keep in sync with the copy method "app.copyRoute" ###
+ // Data for routing
+ pos uint32 // Position in stack -> important for the sort of the matched routes
+ use bool // USE matches path prefixes
+ mount bool // Indicated a mounted app on a specific route
+ star bool // Path equals '*'
+ root bool // Path equals '/'
+ path string // Prettified path
+ routeParser routeParser // Parameter parser
+ group *Group // Group instance. used for routes in groups
+
+ // Public fields
+ Method string `json:"method"` // HTTP method
+ Name string `json:"name"` // Route's name
+ //nolint:revive // Having both a Path (uppercase) and a path (lowercase) is fine
+ Path string `json:"path"` // Original registered route path
+ Params []string `json:"params"` // Case sensitive param keys
+ Handlers []Handler `json:"-"` // Ctx handlers
+}
+
+func (r *Route) match(detectionPath, path string, params *[maxParams]string) bool {
+ // root detectionPath check
+ if r.root && detectionPath == "/" {
+ return true
+ // '*' wildcard matches any detectionPath
+ } else if r.star {
+ if len(path) > 1 {
+ params[0] = path[1:]
+ } else {
+ params[0] = ""
+ }
+ return true
+ }
+ // Does this route have parameters
+ if len(r.Params) > 0 {
+ // Match params
+ if match := r.routeParser.getMatch(detectionPath, path, params, r.use); match {
+ // Get params from the path detectionPath
+ return match
+ }
+ }
+ // Is this route a Middleware?
+ if r.use {
+ // Single slash will match or detectionPath prefix
+ if r.root || strings.HasPrefix(detectionPath, r.path) {
+ return true
+ }
+ // Check for a simple detectionPath match
+ } else if len(r.path) == len(detectionPath) && r.path == detectionPath {
+ return true
+ }
+ // No match
+ return false
+}
+
+func (app *App) next(c *Ctx) (bool, error) {
+ // Get stack length
+ tree, ok := app.treeStack[c.methodINT][c.treePath]
+ if !ok {
+ tree = app.treeStack[c.methodINT][""]
+ }
+ lenTree := len(tree) - 1
+
+ // Loop over the route stack starting from previous index
+ for c.indexRoute < lenTree {
+ // Increment route index
+ c.indexRoute++
+
+ // Get *Route
+ route := tree[c.indexRoute]
+
+ var match bool
+ var err error
+ // skip for mounted apps
+ if route.mount {
+ continue
+ }
+
+ // Check if it matches the request path
+ match = route.match(c.detectionPath, c.path, &c.values)
+ if !match {
+ // No match, next route
+ continue
+ }
+ // Pass route reference and param values
+ c.route = route
+
+ // Non use handler matched
+ if !c.matched && !route.use {
+ c.matched = true
+ }
+
+ // Execute first handler of route
+ c.indexHandler = 0
+ if len(route.Handlers) > 0 {
+ err = route.Handlers[0](c)
+ }
+ return match, err // Stop scanning the stack
+ }
+
+ // If c.Next() does not match, return 404
+ err := NewError(StatusNotFound, "Cannot "+c.method+" "+html.EscapeString(c.pathOriginal))
+ if !c.matched && app.methodExist(c) {
+ // If no match, scan stack again if other methods match the request
+ // Moved from app.handler because middleware may break the route chain
+ err = ErrMethodNotAllowed
+ }
+ return false, err
+}
+
+func (app *App) handler(rctx *fasthttp.RequestCtx) { //revive:disable-line:confusing-naming // Having both a Handler() (uppercase) and a handler() (lowercase) is fine. TODO: Use nolint:revive directive instead. See https://github.com/golangci/golangci-lint/issues/3476
+ // Acquire Ctx with fasthttp request from pool
+ c := app.AcquireCtx(rctx)
+ defer app.ReleaseCtx(c)
+
+ // handle invalid http method directly
+ if c.methodINT == -1 {
+ _ = c.Status(StatusBadRequest).SendString("Invalid http method") //nolint:errcheck // It is fine to ignore the error here
+ return
+ }
+
+ // Find match in stack
+ match, err := app.next(c)
+ if err != nil {
+ if catch := c.app.ErrorHandler(c, err); catch != nil {
+ _ = c.SendStatus(StatusInternalServerError) //nolint:errcheck // It is fine to ignore the error here
+ }
+ // TODO: Do we need to return here?
+ }
+ // Generate ETag if enabled
+ if match && app.config.ETag {
+ setETag(c, false)
+ }
+}
+
+func (app *App) addPrefixToRoute(prefix string, route *Route) *Route {
+ prefixedPath := getGroupPath(prefix, route.Path)
+ prettyPath := prefixedPath
+ // Case-sensitive routing, all to lowercase
+ if !app.config.CaseSensitive {
+ prettyPath = utils.ToLower(prettyPath)
+ }
+ // Strict routing, remove trailing slashes
+ if !app.config.StrictRouting && len(prettyPath) > 1 {
+ prettyPath = utils.TrimRight(prettyPath, '/')
+ }
+
+ route.Path = prefixedPath
+ route.path = RemoveEscapeChar(prettyPath)
+ route.routeParser = parseRoute(prettyPath)
+ route.root = false
+ route.star = false
+
+ return route
+}
+
+func (*App) copyRoute(route *Route) *Route {
+ return &Route{
+ // Router booleans
+ use: route.use,
+ mount: route.mount,
+ star: route.star,
+ root: route.root,
+
+ // Path data
+ path: route.path,
+ routeParser: route.routeParser,
+
+ // misc
+ pos: route.pos,
+
+ // Public data
+ Path: route.Path,
+ Params: route.Params,
+ Name: route.Name,
+ Method: route.Method,
+ Handlers: route.Handlers,
+ }
+}
+
+func (app *App) register(method, pathRaw string, group *Group, handlers ...Handler) {
+ // Uppercase HTTP methods
+ method = utils.ToUpper(method)
+ // Check if the HTTP method is valid unless it's USE
+ if method != methodUse && app.methodInt(method) == -1 {
+ panic(fmt.Sprintf("add: invalid http method %s\n", method))
+ }
+ // is mounted app
+ isMount := group != nil && group.app != app
+ // A route requires atleast one ctx handler
+ if len(handlers) == 0 && !isMount {
+ panic(fmt.Sprintf("missing handler in route: %s\n", pathRaw))
+ }
+ // Cannot have an empty path
+ if pathRaw == "" {
+ pathRaw = "/"
+ }
+ // Path always start with a '/'
+ if pathRaw[0] != '/' {
+ pathRaw = "/" + pathRaw
+ }
+ // Create a stripped path in-case sensitive / trailing slashes
+ pathPretty := pathRaw
+ // Case-sensitive routing, all to lowercase
+ if !app.config.CaseSensitive {
+ pathPretty = utils.ToLower(pathPretty)
+ }
+ // Strict routing, remove trailing slashes
+ if !app.config.StrictRouting && len(pathPretty) > 1 {
+ pathPretty = utils.TrimRight(pathPretty, '/')
+ }
+ // Is layer a middleware?
+ isUse := method == methodUse
+ // Is path a direct wildcard?
+ isStar := pathPretty == "/*"
+ // Is path a root slash?
+ isRoot := pathPretty == "/"
+ // Parse path parameters
+ parsedRaw := parseRoute(pathRaw)
+ parsedPretty := parseRoute(pathPretty)
+
+ // Create route metadata without pointer
+ route := Route{
+ // Router booleans
+ use: isUse,
+ mount: isMount,
+ star: isStar,
+ root: isRoot,
+
+ // Path data
+ path: RemoveEscapeChar(pathPretty),
+ routeParser: parsedPretty,
+ Params: parsedRaw.params,
+
+ // Group data
+ group: group,
+
+ // Public data
+ Path: pathRaw,
+ Method: method,
+ Handlers: handlers,
+ }
+ // Increment global handler count
+ atomic.AddUint32(&app.handlersCount, uint32(len(handlers)))
+
+ // Middleware route matches all HTTP methods
+ if isUse {
+ // Add route to all HTTP methods stack
+ for _, m := range app.config.RequestMethods {
+ // Create a route copy to avoid duplicates during compression
+ r := route
+ app.addRoute(m, &r, isMount)
+ }
+ } else {
+ // Add route to stack
+ app.addRoute(method, &route, isMount)
+ }
+}
+
+func (app *App) registerStatic(prefix, root string, config ...Static) {
+ // For security, we want to restrict to the current work directory.
+ if root == "" {
+ root = "."
+ }
+ // Cannot have an empty prefix
+ if prefix == "" {
+ prefix = "/"
+ }
+ // Prefix always start with a '/' or '*'
+ if prefix[0] != '/' {
+ prefix = "/" + prefix
+ }
+ // in case-sensitive routing, all to lowercase
+ if !app.config.CaseSensitive {
+ prefix = utils.ToLower(prefix)
+ }
+ // Strip trailing slashes from the root path
+ if len(root) > 0 && root[len(root)-1] == '/' {
+ root = root[:len(root)-1]
+ }
+ // Is prefix a direct wildcard?
+ isStar := prefix == "/*"
+ // Is prefix a root slash?
+ isRoot := prefix == "/"
+ // Is prefix a partial wildcard?
+ if strings.Contains(prefix, "*") {
+ // /john* -> /john
+ isStar = true
+ prefix = strings.Split(prefix, "*")[0]
+ // Fix this later
+ }
+ prefixLen := len(prefix)
+ if prefixLen > 1 && prefix[prefixLen-1:] == "/" {
+ // /john/ -> /john
+ prefixLen--
+ prefix = prefix[:prefixLen]
+ }
+ const cacheDuration = 10 * time.Second
+ // Fileserver settings
+ fs := &fasthttp.FS{
+ Root: root,
+ AllowEmptyRoot: true,
+ GenerateIndexPages: false,
+ AcceptByteRange: false,
+ Compress: false,
+ CompressedFileSuffix: app.config.CompressedFileSuffix,
+ CacheDuration: cacheDuration,
+ IndexNames: []string{"index.html"},
+ PathRewrite: func(fctx *fasthttp.RequestCtx) []byte {
+ path := fctx.Path()
+ if len(path) >= prefixLen {
+ if isStar && app.getString(path[0:prefixLen]) == prefix {
+ path = append(path[0:0], '/')
+ } else {
+ path = path[prefixLen:]
+ if len(path) == 0 || path[len(path)-1] != '/' {
+ path = append(path, '/')
+ }
+ }
+ }
+ if len(path) > 0 && path[0] != '/' {
+ path = append([]byte("/"), path...)
+ }
+ return path
+ },
+ PathNotFound: func(fctx *fasthttp.RequestCtx) {
+ fctx.Response.SetStatusCode(StatusNotFound)
+ },
+ }
+
+ // Set config if provided
+ var cacheControlValue string
+ var modifyResponse Handler
+ if len(config) > 0 {
+ maxAge := config[0].MaxAge
+ if maxAge > 0 {
+ cacheControlValue = "public, max-age=" + strconv.Itoa(maxAge)
+ }
+ fs.CacheDuration = config[0].CacheDuration
+ fs.Compress = config[0].Compress
+ fs.AcceptByteRange = config[0].ByteRange
+ fs.GenerateIndexPages = config[0].Browse
+ if config[0].Index != "" {
+ fs.IndexNames = []string{config[0].Index}
+ }
+ modifyResponse = config[0].ModifyResponse
+ }
+ fileHandler := fs.NewRequestHandler()
+ handler := func(c *Ctx) error {
+ // Don't execute middleware if Next returns true
+ if len(config) != 0 && config[0].Next != nil && config[0].Next(c) {
+ return c.Next()
+ }
+ // Serve file
+ fileHandler(c.fasthttp)
+ // Sets the response Content-Disposition header to attachment if the Download option is true
+ if len(config) > 0 && config[0].Download {
+ c.Attachment()
+ }
+ // Return request if found and not forbidden
+ status := c.fasthttp.Response.StatusCode()
+ if status != StatusNotFound && status != StatusForbidden {
+ if len(cacheControlValue) > 0 {
+ c.fasthttp.Response.Header.Set(HeaderCacheControl, cacheControlValue)
+ }
+ if modifyResponse != nil {
+ return modifyResponse(c)
+ }
+ return nil
+ }
+ // Reset response to default
+ c.fasthttp.SetContentType("") // Issue #420
+ c.fasthttp.Response.SetStatusCode(StatusOK)
+ c.fasthttp.Response.SetBodyString("")
+ // Next middleware
+ return c.Next()
+ }
+
+ // Create route metadata without pointer
+ route := Route{
+ // Router booleans
+ use: true,
+ root: isRoot,
+ path: prefix,
+ // Public data
+ Method: MethodGet,
+ Path: prefix,
+ Handlers: []Handler{handler},
+ }
+ // Increment global handler count
+ atomic.AddUint32(&app.handlersCount, 1)
+ // Add route to stack
+ app.addRoute(MethodGet, &route)
+ // Add HEAD route
+ app.addRoute(MethodHead, &route)
+}
+
+func (app *App) addRoute(method string, route *Route, isMounted ...bool) {
+ // Check mounted routes
+ var mounted bool
+ if len(isMounted) > 0 {
+ mounted = isMounted[0]
+ }
+
+ // Get unique HTTP method identifier
+ m := app.methodInt(method)
+
+ // prevent identically route registration
+ l := len(app.stack[m])
+ if l > 0 && app.stack[m][l-1].Path == route.Path && route.use == app.stack[m][l-1].use && !route.mount && !app.stack[m][l-1].mount {
+ preRoute := app.stack[m][l-1]
+ preRoute.Handlers = append(preRoute.Handlers, route.Handlers...)
+ } else {
+ // Increment global route position
+ route.pos = atomic.AddUint32(&app.routesCount, 1)
+ route.Method = method
+ // Add route to the stack
+ app.stack[m] = append(app.stack[m], route)
+ app.routesRefreshed = true
+ }
+
+ // Execute onRoute hooks & change latestRoute if not adding mounted route
+ if !mounted {
+ app.mutex.Lock()
+ app.latestRoute = route
+ if err := app.hooks.executeOnRouteHooks(*route); err != nil {
+ panic(err)
+ }
+ app.mutex.Unlock()
+ }
+}
+
+// buildTree build the prefix tree from the previously registered routes
+func (app *App) buildTree() *App {
+ if !app.routesRefreshed {
+ return app
+ }
+
+ // loop all the methods and stacks and create the prefix tree
+ for m := range app.config.RequestMethods {
+ tsMap := make(map[string][]*Route)
+ for _, route := range app.stack[m] {
+ treePath := ""
+ if len(route.routeParser.segs) > 0 && len(route.routeParser.segs[0].Const) >= 3 {
+ treePath = route.routeParser.segs[0].Const[:3]
+ }
+ // create tree stack
+ tsMap[treePath] = append(tsMap[treePath], route)
+ }
+ app.treeStack[m] = tsMap
+ }
+
+ // loop the methods and tree stacks and add global stack and sort everything
+ for m := range app.config.RequestMethods {
+ tsMap := app.treeStack[m]
+ for treePart := range tsMap {
+ if treePart != "" {
+ // merge global tree routes in current tree stack
+ tsMap[treePart] = uniqueRouteStack(append(tsMap[treePart], tsMap[""]...))
+ }
+ // sort tree slices with the positions
+ slc := tsMap[treePart]
+ sort.Slice(slc, func(i, j int) bool { return slc[i].pos < slc[j].pos })
+ }
+ }
+ app.routesRefreshed = false
+
+ return app
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/README.md b/vendor/github.com/gofiber/fiber/v2/utils/README.md
new file mode 100644
index 0000000..0276ff3
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/README.md
@@ -0,0 +1,90 @@
+A collection of common functions but with better performance, less allocations and no dependencies created for [Fiber](https://github.com/gofiber/fiber).
+
+```go
+// go test -benchmem -run=^$ -bench=Benchmark_ -count=2
+
+Benchmark_ToLowerBytes/fiber-16 42847654 25.7 ns/op 0 B/op 0 allocs/op
+Benchmark_ToLowerBytes/fiber-16 46143196 25.7 ns/op 0 B/op 0 allocs/op
+Benchmark_ToLowerBytes/default-16 17387322 67.4 ns/op 48 B/op 1 allocs/op
+Benchmark_ToLowerBytes/default-16 17906491 67.4 ns/op 48 B/op 1 allocs/op
+
+Benchmark_ToUpperBytes/fiber-16 46143729 25.7 ns/op 0 B/op 0 allocs/op
+Benchmark_ToUpperBytes/fiber-16 47989250 25.6 ns/op 0 B/op 0 allocs/op
+Benchmark_ToUpperBytes/default-16 15580854 76.7 ns/op 48 B/op 1 allocs/op
+Benchmark_ToUpperBytes/default-16 15381202 76.9 ns/op 48 B/op 1 allocs/op
+
+Benchmark_TrimRightBytes/fiber-16 70572459 16.3 ns/op 8 B/op 1 allocs/op
+Benchmark_TrimRightBytes/fiber-16 74983597 16.3 ns/op 8 B/op 1 allocs/op
+Benchmark_TrimRightBytes/default-16 16212578 74.1 ns/op 40 B/op 2 allocs/op
+Benchmark_TrimRightBytes/default-16 16434686 74.1 ns/op 40 B/op 2 allocs/op
+
+Benchmark_TrimLeftBytes/fiber-16 74983128 16.3 ns/op 8 B/op 1 allocs/op
+Benchmark_TrimLeftBytes/fiber-16 74985002 16.3 ns/op 8 B/op 1 allocs/op
+Benchmark_TrimLeftBytes/default-16 21047868 56.5 ns/op 40 B/op 2 allocs/op
+Benchmark_TrimLeftBytes/default-16 21048015 56.5 ns/op 40 B/op 2 allocs/op
+
+Benchmark_TrimBytes/fiber-16 54533307 21.9 ns/op 16 B/op 1 allocs/op
+Benchmark_TrimBytes/fiber-16 54532812 21.9 ns/op 16 B/op 1 allocs/op
+Benchmark_TrimBytes/default-16 14282517 84.6 ns/op 48 B/op 2 allocs/op
+Benchmark_TrimBytes/default-16 14114508 84.7 ns/op 48 B/op 2 allocs/op
+
+Benchmark_EqualFolds/fiber-16 36355153 32.6 ns/op 0 B/op 0 allocs/op
+Benchmark_EqualFolds/fiber-16 36355593 32.6 ns/op 0 B/op 0 allocs/op
+Benchmark_EqualFolds/default-16 15186220 78.1 ns/op 0 B/op 0 allocs/op
+Benchmark_EqualFolds/default-16 15186412 78.3 ns/op 0 B/op 0 allocs/op
+
+Benchmark_UUID/fiber-16 23994625 49.8 ns/op 48 B/op 1 allocs/op
+Benchmark_UUID/fiber-16 23994768 50.1 ns/op 48 B/op 1 allocs/op
+Benchmark_UUID/default-16 3233772 371 ns/op 208 B/op 6 allocs/op
+Benchmark_UUID/default-16 3251295 370 ns/op 208 B/op 6 allocs/op
+
+Benchmark_GetString/unsafe-16 1000000000 0.709 ns/op 0 B/op 0 allocs/op
+Benchmark_GetString/unsafe-16 1000000000 0.713 ns/op 0 B/op 0 allocs/op
+Benchmark_GetString/default-16 59986202 19.0 ns/op 16 B/op 1 allocs/op
+Benchmark_GetString/default-16 63142939 19.0 ns/op 16 B/op 1 allocs/op
+
+Benchmark_GetBytes/unsafe-16 508360195 2.36 ns/op 0 B/op 0 allocs/op
+Benchmark_GetBytes/unsafe-16 508359979 2.35 ns/op 0 B/op 0 allocs/op
+Benchmark_GetBytes/default-16 46143019 25.7 ns/op 16 B/op 1 allocs/op
+Benchmark_GetBytes/default-16 44434734 25.6 ns/op 16 B/op 1 allocs/op
+
+Benchmark_GetMIME/fiber-16 21423750 56.3 ns/op 0 B/op 0 allocs/op
+Benchmark_GetMIME/fiber-16 21423559 55.4 ns/op 0 B/op 0 allocs/op
+Benchmark_GetMIME/default-16 6735282 173 ns/op 0 B/op 0 allocs/op
+Benchmark_GetMIME/default-16 6895002 172 ns/op 0 B/op 0 allocs/op
+
+Benchmark_StatusMessage/fiber-16 1000000000 0.766 ns/op 0 B/op 0 allocs/op
+Benchmark_StatusMessage/fiber-16 1000000000 0.767 ns/op 0 B/op 0 allocs/op
+Benchmark_StatusMessage/default-16 159538528 7.50 ns/op 0 B/op 0 allocs/op
+Benchmark_StatusMessage/default-16 159750830 7.51 ns/op 0 B/op 0 allocs/op
+
+Benchmark_ToUpper/fiber-16 22217408 53.3 ns/op 48 B/op 1 allocs/op
+Benchmark_ToUpper/fiber-16 22636554 53.2 ns/op 48 B/op 1 allocs/op
+Benchmark_ToUpper/default-16 11108600 108 ns/op 48 B/op 1 allocs/op
+Benchmark_ToUpper/default-16 11108580 108 ns/op 48 B/op 1 allocs/op
+
+Benchmark_ToLower/fiber-16 23994720 49.8 ns/op 48 B/op 1 allocs/op
+Benchmark_ToLower/fiber-16 23994768 50.1 ns/op 48 B/op 1 allocs/op
+Benchmark_ToLower/default-16 10808376 110 ns/op 48 B/op 1 allocs/op
+Benchmark_ToLower/default-16 10617034 110 ns/op 48 B/op 1 allocs/op
+
+Benchmark_TrimRight/fiber-16 413699521 2.94 ns/op 0 B/op 0 allocs/op
+Benchmark_TrimRight/fiber-16 415131687 2.91 ns/op 0 B/op 0 allocs/op
+Benchmark_TrimRight/default-16 23994577 49.1 ns/op 32 B/op 1 allocs/op
+Benchmark_TrimRight/default-16 24484249 49.4 ns/op 32 B/op 1 allocs/op
+
+Benchmark_TrimLeft/fiber-16 379661170 3.13 ns/op 0 B/op 0 allocs/op
+Benchmark_TrimLeft/fiber-16 382079941 3.16 ns/op 0 B/op 0 allocs/op
+Benchmark_TrimLeft/default-16 27900877 41.9 ns/op 32 B/op 1 allocs/op
+Benchmark_TrimLeft/default-16 28564898 42.0 ns/op 32 B/op 1 allocs/op
+
+Benchmark_Trim/fiber-16 236632856 4.96 ns/op 0 B/op 0 allocs/op
+Benchmark_Trim/fiber-16 237570085 4.93 ns/op 0 B/op 0 allocs/op
+Benchmark_Trim/default-16 18457221 66.0 ns/op 32 B/op 1 allocs/op
+Benchmark_Trim/default-16 18177328 65.9 ns/op 32 B/op 1 allocs/op
+Benchmark_Trim/default.trimspace-16 188933770 6.33 ns/op 0 B/op 0 allocs/op
+Benchmark_Trim/default.trimspace-16 184007649 6.42 ns/op 0 B/op 0 allocs/op
+
+Benchmark_ConvertToBytes/fiber-8 43773547 24.43 ns/op 0 B/op 0 allocs/op
+Benchmark_ConvertToBytes/fiber-8 45849477 25.33 ns/op 0 B/op 0 allocs/op
+```
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/assertions.go b/vendor/github.com/gofiber/fiber/v2/utils/assertions.go
new file mode 100644
index 0000000..3682d56
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/assertions.go
@@ -0,0 +1,68 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+import (
+ "bytes"
+ "fmt"
+ "log"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "testing"
+ "text/tabwriter"
+)
+
+// AssertEqual checks if values are equal
+func AssertEqual(tb testing.TB, expected, actual interface{}, description ...string) { //nolint:thelper // TODO: Verify if tb can be nil
+ if tb != nil {
+ tb.Helper()
+ }
+
+ if reflect.DeepEqual(expected, actual) {
+ return
+ }
+
+ aType := ""
+ bType := ""
+
+ if expected != nil {
+ aType = reflect.TypeOf(expected).String()
+ }
+ if actual != nil {
+ bType = reflect.TypeOf(actual).String()
+ }
+
+ testName := "AssertEqual"
+ if tb != nil {
+ testName = tb.Name()
+ }
+
+ _, file, line, _ := runtime.Caller(1)
+
+ var buf bytes.Buffer
+ const pad = 5
+ w := tabwriter.NewWriter(&buf, 0, 0, pad, ' ', 0)
+ _, _ = fmt.Fprintf(w, "\nTest:\t%s", testName)
+ _, _ = fmt.Fprintf(w, "\nTrace:\t%s:%d", filepath.Base(file), line)
+ if len(description) > 0 {
+ _, _ = fmt.Fprintf(w, "\nDescription:\t%s", description[0])
+ }
+ _, _ = fmt.Fprintf(w, "\nExpect:\t%v\t(%s)", expected, aType)
+ _, _ = fmt.Fprintf(w, "\nResult:\t%v\t(%s)", actual, bType)
+
+ var result string
+ if err := w.Flush(); err != nil {
+ result = err.Error()
+ } else {
+ result = buf.String()
+ }
+
+ if tb != nil {
+ tb.Fatal(result)
+ } else {
+ log.Fatal(result) //nolint:revive // tb might be nil, so we need a fallback
+ }
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/bytes.go b/vendor/github.com/gofiber/fiber/v2/utils/bytes.go
new file mode 100644
index 0000000..bd2c87b
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/bytes.go
@@ -0,0 +1,69 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+// ToLowerBytes converts ascii slice to lower-case in-place.
+func ToLowerBytes(b []byte) []byte {
+ for i := 0; i < len(b); i++ {
+ b[i] = toLowerTable[b[i]]
+ }
+ return b
+}
+
+// ToUpperBytes converts ascii slice to upper-case in-place.
+func ToUpperBytes(b []byte) []byte {
+ for i := 0; i < len(b); i++ {
+ b[i] = toUpperTable[b[i]]
+ }
+ return b
+}
+
+// TrimRightBytes is the equivalent of bytes.TrimRight
+func TrimRightBytes(b []byte, cutset byte) []byte {
+ lenStr := len(b)
+ for lenStr > 0 && b[lenStr-1] == cutset {
+ lenStr--
+ }
+ return b[:lenStr]
+}
+
+// TrimLeftBytes is the equivalent of bytes.TrimLeft
+func TrimLeftBytes(b []byte, cutset byte) []byte {
+ lenStr, start := len(b), 0
+ for start < lenStr && b[start] == cutset {
+ start++
+ }
+ return b[start:]
+}
+
+// TrimBytes is the equivalent of bytes.Trim
+func TrimBytes(b []byte, cutset byte) []byte {
+ i, j := 0, len(b)-1
+ for ; i <= j; i++ {
+ if b[i] != cutset {
+ break
+ }
+ }
+ for ; i < j; j-- {
+ if b[j] != cutset {
+ break
+ }
+ }
+
+ return b[i : j+1]
+}
+
+// EqualFoldBytes tests ascii slices for equality case-insensitively
+func EqualFoldBytes(b, s []byte) bool {
+ if len(b) != len(s) {
+ return false
+ }
+ for i := len(b) - 1; i >= 0; i-- {
+ if toUpperTable[b[i]] != toUpperTable[s[i]] {
+ return false
+ }
+ }
+ return true
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/common.go b/vendor/github.com/gofiber/fiber/v2/utils/common.go
new file mode 100644
index 0000000..6c1dd1e
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/common.go
@@ -0,0 +1,160 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+import (
+ "bytes"
+ "crypto/rand"
+ "encoding/binary"
+ "encoding/hex"
+ "math"
+ "net"
+ "os"
+ "reflect"
+ "runtime"
+ "strconv"
+ "sync"
+ "sync/atomic"
+ "unicode"
+
+ googleuuid "github.com/google/uuid"
+)
+
+const (
+ toLowerTable = "\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@abcdefghijklmnopqrstuvwxyz[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ toUpperTable = "\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~\u007f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+)
+
+// Copyright © 2014, Roger Peppe
+// github.com/rogpeppe/fastuuid
+// All rights reserved.
+
+const (
+ emptyUUID = "00000000-0000-0000-0000-000000000000"
+)
+
+var (
+ uuidSeed [24]byte
+ uuidCounter uint64
+ uuidSetup sync.Once
+ unitsSlice = []byte("kmgtp")
+)
+
+// UUID generates an universally unique identifier (UUID)
+func UUID() string {
+ // Setup seed & counter once
+ uuidSetup.Do(func() {
+ if _, err := rand.Read(uuidSeed[:]); err != nil {
+ return
+ }
+ uuidCounter = binary.LittleEndian.Uint64(uuidSeed[:8])
+ })
+ if atomic.LoadUint64(&uuidCounter) <= 0 {
+ return emptyUUID
+ }
+ // first 8 bytes differ, taking a slice of the first 16 bytes
+ x := atomic.AddUint64(&uuidCounter, 1)
+ uuid := uuidSeed
+ binary.LittleEndian.PutUint64(uuid[:8], x)
+ uuid[6], uuid[9] = uuid[9], uuid[6]
+
+ // RFC4122 v4
+ uuid[6] = (uuid[6] & 0x0f) | 0x40
+ uuid[8] = uuid[8]&0x3f | 0x80
+
+ // create UUID representation of the first 128 bits
+ b := make([]byte, 36)
+ hex.Encode(b[0:8], uuid[0:4])
+ b[8] = '-'
+ hex.Encode(b[9:13], uuid[4:6])
+ b[13] = '-'
+ hex.Encode(b[14:18], uuid[6:8])
+ b[18] = '-'
+ hex.Encode(b[19:23], uuid[8:10])
+ b[23] = '-'
+ hex.Encode(b[24:], uuid[10:16])
+
+ return UnsafeString(b)
+}
+
+// UUIDv4 returns a Random (Version 4) UUID.
+// The strength of the UUIDs is based on the strength of the crypto/rand package.
+func UUIDv4() string {
+ token, err := googleuuid.NewRandom()
+ if err != nil {
+ return UUID()
+ }
+ return token.String()
+}
+
+// FunctionName returns function name
+func FunctionName(fn interface{}) string {
+ t := reflect.ValueOf(fn).Type()
+ if t.Kind() == reflect.Func {
+ return runtime.FuncForPC(reflect.ValueOf(fn).Pointer()).Name()
+ }
+ return t.String()
+}
+
+// GetArgument check if key is in arguments
+func GetArgument(arg string) bool {
+ for i := range os.Args[1:] {
+ if os.Args[1:][i] == arg {
+ return true
+ }
+ }
+ return false
+}
+
+// IncrementIPRange Find available next IP address
+func IncrementIPRange(ip net.IP) {
+ for j := len(ip) - 1; j >= 0; j-- {
+ ip[j]++
+ if ip[j] > 0 {
+ break
+ }
+ }
+}
+
+// ConvertToBytes returns integer size of bytes from human-readable string, ex. 42kb, 42M
+// Returns 0 if string is unrecognized
+func ConvertToBytes(humanReadableString string) int {
+ strLen := len(humanReadableString)
+ if strLen == 0 {
+ return 0
+ }
+ var unitPrefixPos, lastNumberPos int
+ // loop the string
+ for i := strLen - 1; i >= 0; i-- {
+ // check if the char is a number
+ if unicode.IsDigit(rune(humanReadableString[i])) {
+ lastNumberPos = i
+ break
+ } else if humanReadableString[i] != ' ' {
+ unitPrefixPos = i
+ }
+ }
+
+ if lastNumberPos < 0 {
+ return 0
+ }
+ // fetch the number part and parse it to float
+ size, err := strconv.ParseFloat(humanReadableString[:lastNumberPos+1], 64)
+ if err != nil {
+ return 0
+ }
+
+ // check the multiplier from the string and use it
+ if unitPrefixPos > 0 {
+ // convert multiplier char to lowercase and check if exists in units slice
+ index := bytes.IndexByte(unitsSlice, toLowerTable[humanReadableString[unitPrefixPos]])
+ if index != -1 {
+ const bytesPerKB = 1000
+ size *= math.Pow(bytesPerKB, float64(index+1))
+ }
+ }
+
+ return int(size)
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/convert.go b/vendor/github.com/gofiber/fiber/v2/utils/convert.go
new file mode 100644
index 0000000..a5317bf
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/convert.go
@@ -0,0 +1,117 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// CopyString copies a string to make it immutable
+func CopyString(s string) string {
+ return string(UnsafeBytes(s))
+}
+
+// CopyBytes copies a slice to make it immutable
+func CopyBytes(b []byte) []byte {
+ tmp := make([]byte, len(b))
+ copy(tmp, b)
+ return tmp
+}
+
+const (
+ uByte = 1 << (10 * iota) // 1 << 10 == 1024
+ uKilobyte
+ uMegabyte
+ uGigabyte
+ uTerabyte
+ uPetabyte
+ uExabyte
+)
+
+// ByteSize returns a human-readable byte string of the form 10M, 12.5K, and so forth.
+// The unit that results in the smallest number greater than or equal to 1 is always chosen.
+func ByteSize(bytes uint64) string {
+ unit := ""
+ value := float64(bytes)
+ switch {
+ case bytes >= uExabyte:
+ unit = "EB"
+ value /= uExabyte
+ case bytes >= uPetabyte:
+ unit = "PB"
+ value /= uPetabyte
+ case bytes >= uTerabyte:
+ unit = "TB"
+ value /= uTerabyte
+ case bytes >= uGigabyte:
+ unit = "GB"
+ value /= uGigabyte
+ case bytes >= uMegabyte:
+ unit = "MB"
+ value /= uMegabyte
+ case bytes >= uKilobyte:
+ unit = "KB"
+ value /= uKilobyte
+ case bytes >= uByte:
+ unit = "B"
+ default:
+ return "0B"
+ }
+ result := strconv.FormatFloat(value, 'f', 1, 64)
+ result = strings.TrimSuffix(result, ".0")
+ return result + unit
+}
+
+// ToString Change arg to string
+func ToString(arg interface{}, timeFormat ...string) string {
+ tmp := reflect.Indirect(reflect.ValueOf(arg)).Interface()
+ switch v := tmp.(type) {
+ case int:
+ return strconv.Itoa(v)
+ case int8:
+ return strconv.FormatInt(int64(v), 10)
+ case int16:
+ return strconv.FormatInt(int64(v), 10)
+ case int32:
+ return strconv.FormatInt(int64(v), 10)
+ case int64:
+ return strconv.FormatInt(v, 10)
+ case uint:
+ return strconv.Itoa(int(v))
+ case uint8:
+ return strconv.FormatInt(int64(v), 10)
+ case uint16:
+ return strconv.FormatInt(int64(v), 10)
+ case uint32:
+ return strconv.FormatInt(int64(v), 10)
+ case uint64:
+ return strconv.FormatInt(int64(v), 10)
+ case string:
+ return v
+ case []byte:
+ return string(v)
+ case bool:
+ return strconv.FormatBool(v)
+ case float32:
+ return strconv.FormatFloat(float64(v), 'f', -1, 32)
+ case float64:
+ return strconv.FormatFloat(v, 'f', -1, 64)
+ case time.Time:
+ if len(timeFormat) > 0 {
+ return v.Format(timeFormat[0])
+ }
+ return v.Format("2006-01-02 15:04:05")
+ case reflect.Value:
+ return ToString(v.Interface(), timeFormat...)
+ case fmt.Stringer:
+ return v.String()
+ default:
+ return ""
+ }
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/convert_b2s_new.go b/vendor/github.com/gofiber/fiber/v2/utils/convert_b2s_new.go
new file mode 100644
index 0000000..3fcf7d5
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/convert_b2s_new.go
@@ -0,0 +1,12 @@
+//go:build go1.20
+
+package utils
+
+import (
+ "unsafe"
+)
+
+// UnsafeString returns a string pointer without allocation
+func UnsafeString(b []byte) string {
+ return unsafe.String(unsafe.SliceData(b), len(b))
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/convert_b2s_old.go b/vendor/github.com/gofiber/fiber/v2/utils/convert_b2s_old.go
new file mode 100644
index 0000000..36cbe30
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/convert_b2s_old.go
@@ -0,0 +1,14 @@
+//go:build !go1.20
+
+package utils
+
+import (
+ "unsafe"
+)
+
+// UnsafeString returns a string pointer without allocation
+//
+//nolint:gosec // unsafe is used for better performance here
+func UnsafeString(b []byte) string {
+ return *(*string)(unsafe.Pointer(&b))
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/convert_s2b_new.go b/vendor/github.com/gofiber/fiber/v2/utils/convert_s2b_new.go
new file mode 100644
index 0000000..5da5c81
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/convert_s2b_new.go
@@ -0,0 +1,12 @@
+//go:build go1.20
+
+package utils
+
+import (
+ "unsafe"
+)
+
+// UnsafeBytes returns a byte pointer without allocation.
+func UnsafeBytes(s string) []byte {
+ return unsafe.Slice(unsafe.StringData(s), len(s))
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/convert_s2b_old.go b/vendor/github.com/gofiber/fiber/v2/utils/convert_s2b_old.go
new file mode 100644
index 0000000..c9435bd
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/convert_s2b_old.go
@@ -0,0 +1,24 @@
+//go:build !go1.20
+
+package utils
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+const MaxStringLen = 0x7fff0000 // Maximum string length for UnsafeBytes. (decimal: 2147418112)
+
+// UnsafeBytes returns a byte pointer without allocation.
+// String length shouldn't be more than 2147418112.
+//
+//nolint:gosec // unsafe is used for better performance here
+func UnsafeBytes(s string) []byte {
+ if s == "" {
+ return nil
+ }
+
+ return (*[MaxStringLen]byte)(unsafe.Pointer(
+ (*reflect.StringHeader)(unsafe.Pointer(&s)).Data),
+ )[:len(s):len(s)]
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/deprecated.go b/vendor/github.com/gofiber/fiber/v2/utils/deprecated.go
new file mode 100644
index 0000000..a436e67
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/deprecated.go
@@ -0,0 +1,16 @@
+package utils
+
+// Deprecated: Please use UnsafeString instead
+func GetString(b []byte) string {
+ return UnsafeString(b)
+}
+
+// Deprecated: Please use UnsafeBytes instead
+func GetBytes(s string) []byte {
+ return UnsafeBytes(s)
+}
+
+// Deprecated: Please use CopyString instead
+func ImmutableString(s string) string {
+ return CopyString(s)
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/http.go b/vendor/github.com/gofiber/fiber/v2/utils/http.go
new file mode 100644
index 0000000..fe394f5
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/http.go
@@ -0,0 +1,267 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+import (
+ "mime"
+ "strings"
+)
+
+const MIMEOctetStream = "application/octet-stream"
+
+// GetMIME returns the content-type of a file extension
+func GetMIME(extension string) string {
+ if len(extension) == 0 {
+ return ""
+ }
+ var foundMime string
+ if extension[0] == '.' {
+ foundMime = mimeExtensions[extension[1:]]
+ } else {
+ foundMime = mimeExtensions[extension]
+ }
+
+ if len(foundMime) == 0 {
+ if extension[0] != '.' {
+ foundMime = mime.TypeByExtension("." + extension)
+ } else {
+ foundMime = mime.TypeByExtension(extension)
+ }
+
+ if foundMime == "" {
+ return MIMEOctetStream
+ }
+ }
+ return foundMime
+}
+
+// ParseVendorSpecificContentType check if content type is vendor specific and
+// if it is parsable to any known types. If its not vendor specific then returns
+// the original content type.
+func ParseVendorSpecificContentType(cType string) string {
+ plusIndex := strings.Index(cType, "+")
+
+ if plusIndex == -1 {
+ return cType
+ }
+
+ var parsableType string
+ if semiColonIndex := strings.Index(cType, ";"); semiColonIndex == -1 {
+ parsableType = cType[plusIndex+1:]
+ } else if plusIndex < semiColonIndex {
+ parsableType = cType[plusIndex+1 : semiColonIndex]
+ } else {
+ return cType[:semiColonIndex]
+ }
+
+ slashIndex := strings.Index(cType, "/")
+
+ if slashIndex == -1 {
+ return cType
+ }
+
+ return cType[0:slashIndex+1] + parsableType
+}
+
+// limits for HTTP statuscodes
+const (
+ statusMessageMin = 100
+ statusMessageMax = 511
+)
+
+// StatusMessage returns the correct message for the provided HTTP statuscode
+func StatusMessage(status int) string {
+ if status < statusMessageMin || status > statusMessageMax {
+ return ""
+ }
+ return statusMessage[status]
+}
+
+// NOTE: Keep this in sync with the status code list
+var statusMessage = []string{
+ 100: "Continue", // StatusContinue
+ 101: "Switching Protocols", // StatusSwitchingProtocols
+ 102: "Processing", // StatusProcessing
+ 103: "Early Hints", // StatusEarlyHints
+
+ 200: "OK", // StatusOK
+ 201: "Created", // StatusCreated
+ 202: "Accepted", // StatusAccepted
+ 203: "Non-Authoritative Information", // StatusNonAuthoritativeInformation
+ 204: "No Content", // StatusNoContent
+ 205: "Reset Content", // StatusResetContent
+ 206: "Partial Content", // StatusPartialContent
+ 207: "Multi-Status", // StatusMultiStatus
+ 208: "Already Reported", // StatusAlreadyReported
+ 226: "IM Used", // StatusIMUsed
+
+ 300: "Multiple Choices", // StatusMultipleChoices
+ 301: "Moved Permanently", // StatusMovedPermanently
+ 302: "Found", // StatusFound
+ 303: "See Other", // StatusSeeOther
+ 304: "Not Modified", // StatusNotModified
+ 305: "Use Proxy", // StatusUseProxy
+ 306: "Switch Proxy", // StatusSwitchProxy
+ 307: "Temporary Redirect", // StatusTemporaryRedirect
+ 308: "Permanent Redirect", // StatusPermanentRedirect
+
+ 400: "Bad Request", // StatusBadRequest
+ 401: "Unauthorized", // StatusUnauthorized
+ 402: "Payment Required", // StatusPaymentRequired
+ 403: "Forbidden", // StatusForbidden
+ 404: "Not Found", // StatusNotFound
+ 405: "Method Not Allowed", // StatusMethodNotAllowed
+ 406: "Not Acceptable", // StatusNotAcceptable
+ 407: "Proxy Authentication Required", // StatusProxyAuthRequired
+ 408: "Request Timeout", // StatusRequestTimeout
+ 409: "Conflict", // StatusConflict
+ 410: "Gone", // StatusGone
+ 411: "Length Required", // StatusLengthRequired
+ 412: "Precondition Failed", // StatusPreconditionFailed
+ 413: "Request Entity Too Large", // StatusRequestEntityTooLarge
+ 414: "Request URI Too Long", // StatusRequestURITooLong
+ 415: "Unsupported Media Type", // StatusUnsupportedMediaType
+ 416: "Requested Range Not Satisfiable", // StatusRequestedRangeNotSatisfiable
+ 417: "Expectation Failed", // StatusExpectationFailed
+ 418: "I'm a teapot", // StatusTeapot
+ 421: "Misdirected Request", // StatusMisdirectedRequest
+ 422: "Unprocessable Entity", // StatusUnprocessableEntity
+ 423: "Locked", // StatusLocked
+ 424: "Failed Dependency", // StatusFailedDependency
+ 425: "Too Early", // StatusTooEarly
+ 426: "Upgrade Required", // StatusUpgradeRequired
+ 428: "Precondition Required", // StatusPreconditionRequired
+ 429: "Too Many Requests", // StatusTooManyRequests
+ 431: "Request Header Fields Too Large", // StatusRequestHeaderFieldsTooLarge
+ 451: "Unavailable For Legal Reasons", // StatusUnavailableForLegalReasons
+
+ 500: "Internal Server Error", // StatusInternalServerError
+ 501: "Not Implemented", // StatusNotImplemented
+ 502: "Bad Gateway", // StatusBadGateway
+ 503: "Service Unavailable", // StatusServiceUnavailable
+ 504: "Gateway Timeout", // StatusGatewayTimeout
+ 505: "HTTP Version Not Supported", // StatusHTTPVersionNotSupported
+ 506: "Variant Also Negotiates", // StatusVariantAlsoNegotiates
+ 507: "Insufficient Storage", // StatusInsufficientStorage
+ 508: "Loop Detected", // StatusLoopDetected
+ 510: "Not Extended", // StatusNotExtended
+ 511: "Network Authentication Required", // StatusNetworkAuthenticationRequired
+}
+
+// MIME types were copied from https://github.com/nginx/nginx/blob/67d2a9541826ecd5db97d604f23460210fd3e517/conf/mime.types with the following updates:
+// - Use "application/xml" instead of "text/xml" as recommended per https://datatracker.ietf.org/doc/html/rfc7303#section-4.1
+// - Use "text/javascript" instead of "application/javascript" as recommended per https://www.rfc-editor.org/rfc/rfc9239#name-text-javascript
+var mimeExtensions = map[string]string{
+ "html": "text/html",
+ "htm": "text/html",
+ "shtml": "text/html",
+ "css": "text/css",
+ "xml": "application/xml",
+ "gif": "image/gif",
+ "jpeg": "image/jpeg",
+ "jpg": "image/jpeg",
+ "js": "text/javascript",
+ "atom": "application/atom+xml",
+ "rss": "application/rss+xml",
+ "mml": "text/mathml",
+ "txt": "text/plain",
+ "jad": "text/vnd.sun.j2me.app-descriptor",
+ "wml": "text/vnd.wap.wml",
+ "htc": "text/x-component",
+ "avif": "image/avif",
+ "png": "image/png",
+ "svg": "image/svg+xml",
+ "svgz": "image/svg+xml",
+ "tif": "image/tiff",
+ "tiff": "image/tiff",
+ "wbmp": "image/vnd.wap.wbmp",
+ "webp": "image/webp",
+ "ico": "image/x-icon",
+ "jng": "image/x-jng",
+ "bmp": "image/x-ms-bmp",
+ "woff": "font/woff",
+ "woff2": "font/woff2",
+ "jar": "application/java-archive",
+ "war": "application/java-archive",
+ "ear": "application/java-archive",
+ "json": "application/json",
+ "hqx": "application/mac-binhex40",
+ "doc": "application/msword",
+ "pdf": "application/pdf",
+ "ps": "application/postscript",
+ "eps": "application/postscript",
+ "ai": "application/postscript",
+ "rtf": "application/rtf",
+ "m3u8": "application/vnd.apple.mpegurl",
+ "kml": "application/vnd.google-earth.kml+xml",
+ "kmz": "application/vnd.google-earth.kmz",
+ "xls": "application/vnd.ms-excel",
+ "eot": "application/vnd.ms-fontobject",
+ "ppt": "application/vnd.ms-powerpoint",
+ "odg": "application/vnd.oasis.opendocument.graphics",
+ "odp": "application/vnd.oasis.opendocument.presentation",
+ "ods": "application/vnd.oasis.opendocument.spreadsheet",
+ "odt": "application/vnd.oasis.opendocument.text",
+ "pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
+ "xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
+ "docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
+ "wmlc": "application/vnd.wap.wmlc",
+ "wasm": "application/wasm",
+ "7z": "application/x-7z-compressed",
+ "cco": "application/x-cocoa",
+ "jardiff": "application/x-java-archive-diff",
+ "jnlp": "application/x-java-jnlp-file",
+ "run": "application/x-makeself",
+ "pl": "application/x-perl",
+ "pm": "application/x-perl",
+ "prc": "application/x-pilot",
+ "pdb": "application/x-pilot",
+ "rar": "application/x-rar-compressed",
+ "rpm": "application/x-redhat-package-manager",
+ "sea": "application/x-sea",
+ "swf": "application/x-shockwave-flash",
+ "sit": "application/x-stuffit",
+ "tcl": "application/x-tcl",
+ "tk": "application/x-tcl",
+ "der": "application/x-x509-ca-cert",
+ "pem": "application/x-x509-ca-cert",
+ "crt": "application/x-x509-ca-cert",
+ "xpi": "application/x-xpinstall",
+ "xhtml": "application/xhtml+xml",
+ "xspf": "application/xspf+xml",
+ "zip": "application/zip",
+ "bin": "application/octet-stream",
+ "exe": "application/octet-stream",
+ "dll": "application/octet-stream",
+ "deb": "application/octet-stream",
+ "dmg": "application/octet-stream",
+ "iso": "application/octet-stream",
+ "img": "application/octet-stream",
+ "msi": "application/octet-stream",
+ "msp": "application/octet-stream",
+ "msm": "application/octet-stream",
+ "mid": "audio/midi",
+ "midi": "audio/midi",
+ "kar": "audio/midi",
+ "mp3": "audio/mpeg",
+ "ogg": "audio/ogg",
+ "m4a": "audio/x-m4a",
+ "ra": "audio/x-realaudio",
+ "3gpp": "video/3gpp",
+ "3gp": "video/3gpp",
+ "ts": "video/mp2t",
+ "mp4": "video/mp4",
+ "mpeg": "video/mpeg",
+ "mpg": "video/mpeg",
+ "mov": "video/quicktime",
+ "webm": "video/webm",
+ "flv": "video/x-flv",
+ "m4v": "video/x-m4v",
+ "mng": "video/x-mng",
+ "asx": "video/x-ms-asf",
+ "asf": "video/x-ms-asf",
+ "wmv": "video/x-ms-wmv",
+ "avi": "video/x-msvideo",
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/ips.go b/vendor/github.com/gofiber/fiber/v2/utils/ips.go
new file mode 100644
index 0000000..4886c11
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/ips.go
@@ -0,0 +1,143 @@
+package utils
+
+import (
+ "net"
+)
+
+// IsIPv4 works the same way as net.ParseIP,
+// but without check for IPv6 case and without returning net.IP slice, whereby IsIPv4 makes no allocations.
+func IsIPv4(s string) bool {
+ for i := 0; i < net.IPv4len; i++ {
+ if len(s) == 0 {
+ return false
+ }
+
+ if i > 0 {
+ if s[0] != '.' {
+ return false
+ }
+ s = s[1:]
+ }
+
+ n, ci := 0, 0
+
+ for ci = 0; ci < len(s) && '0' <= s[ci] && s[ci] <= '9'; ci++ {
+ n = n*10 + int(s[ci]-'0')
+ if n >= 0xFF {
+ return false
+ }
+ }
+
+ if ci == 0 || (ci > 1 && s[0] == '0') {
+ return false
+ }
+
+ s = s[ci:]
+ }
+
+ return len(s) == 0
+}
+
+// IsIPv6 works the same way as net.ParseIP,
+// but without check for IPv4 case and without returning net.IP slice, whereby IsIPv6 makes no allocations.
+func IsIPv6(s string) bool {
+ ellipsis := -1 // position of ellipsis in ip
+
+ // Might have leading ellipsis
+ if len(s) >= 2 && s[0] == ':' && s[1] == ':' {
+ ellipsis = 0
+ s = s[2:]
+ // Might be only ellipsis
+ if len(s) == 0 {
+ return true
+ }
+ }
+
+ // Loop, parsing hex numbers followed by colon.
+ i := 0
+ for i < net.IPv6len {
+ // Hex number.
+ n, ci := 0, 0
+
+ for ci = 0; ci < len(s); ci++ {
+ if '0' <= s[ci] && s[ci] <= '9' {
+ n *= 16
+ n += int(s[ci] - '0')
+ } else if 'a' <= s[ci] && s[ci] <= 'f' {
+ n *= 16
+ n += int(s[ci]-'a') + 10
+ } else if 'A' <= s[ci] && s[ci] <= 'F' {
+ n *= 16
+ n += int(s[ci]-'A') + 10
+ } else {
+ break
+ }
+ if n > 0xFFFF {
+ return false
+ }
+ }
+ if ci == 0 || n > 0xFFFF {
+ return false
+ }
+
+ if ci < len(s) && s[ci] == '.' {
+ if ellipsis < 0 && i != net.IPv6len-net.IPv4len {
+ return false
+ }
+ if i+net.IPv4len > net.IPv6len {
+ return false
+ }
+
+ if !IsIPv4(s) {
+ return false
+ }
+
+ s = ""
+ i += net.IPv4len
+ break
+ }
+
+ // Save this 16-bit chunk.
+ i += 2
+
+ // Stop at end of string.
+ s = s[ci:]
+ if len(s) == 0 {
+ break
+ }
+
+ // Otherwise must be followed by colon and more.
+ if s[0] != ':' || len(s) == 1 {
+ return false
+ }
+ s = s[1:]
+
+ // Look for ellipsis.
+ if s[0] == ':' {
+ if ellipsis >= 0 { // already have one
+ return false
+ }
+ ellipsis = i
+ s = s[1:]
+ if len(s) == 0 { // can be at end
+ break
+ }
+ }
+ }
+
+ // Must have used entire string.
+ if len(s) != 0 {
+ return false
+ }
+
+ // If didn't parse enough, expand ellipsis.
+ if i < net.IPv6len {
+ if ellipsis < 0 {
+ return false
+ }
+ } else if ellipsis >= 0 {
+ // Ellipsis must represent at least one 0 group.
+ return false
+ }
+ return true
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/json.go b/vendor/github.com/gofiber/fiber/v2/utils/json.go
new file mode 100644
index 0000000..477c8c3
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/json.go
@@ -0,0 +1,9 @@
+package utils
+
+// JSONMarshal returns the JSON encoding of v.
+type JSONMarshal func(v interface{}) ([]byte, error)
+
+// JSONUnmarshal parses the JSON-encoded data and stores the result
+// in the value pointed to by v. If v is nil or not a pointer,
+// Unmarshal returns an InvalidUnmarshalError.
+type JSONUnmarshal func(data []byte, v interface{}) error
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/strings.go b/vendor/github.com/gofiber/fiber/v2/utils/strings.go
new file mode 100644
index 0000000..109d132
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/strings.go
@@ -0,0 +1,75 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+// ToLower converts ascii string to lower-case
+func ToLower(b string) string {
+ res := make([]byte, len(b))
+ copy(res, b)
+ for i := 0; i < len(res); i++ {
+ res[i] = toLowerTable[res[i]]
+ }
+
+ return UnsafeString(res)
+}
+
+// ToUpper converts ascii string to upper-case
+func ToUpper(b string) string {
+ res := make([]byte, len(b))
+ copy(res, b)
+ for i := 0; i < len(res); i++ {
+ res[i] = toUpperTable[res[i]]
+ }
+
+ return UnsafeString(res)
+}
+
+// TrimLeft is the equivalent of strings.TrimLeft
+func TrimLeft(s string, cutset byte) string {
+ lenStr, start := len(s), 0
+ for start < lenStr && s[start] == cutset {
+ start++
+ }
+ return s[start:]
+}
+
+// Trim is the equivalent of strings.Trim
+func Trim(s string, cutset byte) string {
+ i, j := 0, len(s)-1
+ for ; i <= j; i++ {
+ if s[i] != cutset {
+ break
+ }
+ }
+ for ; i < j; j-- {
+ if s[j] != cutset {
+ break
+ }
+ }
+
+ return s[i : j+1]
+}
+
+// TrimRight is the equivalent of strings.TrimRight
+func TrimRight(s string, cutset byte) string {
+ lenStr := len(s)
+ for lenStr > 0 && s[lenStr-1] == cutset {
+ lenStr--
+ }
+ return s[:lenStr]
+}
+
+// EqualFold tests ascii strings for equality case-insensitively
+func EqualFold(b, s string) bool {
+ if len(b) != len(s) {
+ return false
+ }
+ for i := len(b) - 1; i >= 0; i-- {
+ if toUpperTable[b[i]] != toUpperTable[s[i]] {
+ return false
+ }
+ }
+ return true
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/time.go b/vendor/github.com/gofiber/fiber/v2/utils/time.go
new file mode 100644
index 0000000..8ea13c2
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/time.go
@@ -0,0 +1,32 @@
+package utils
+
+import (
+ "sync"
+ "sync/atomic"
+ "time"
+)
+
+var (
+ timestampTimer sync.Once
+ // Timestamp please start the timer function before you use this value
+ // please load the value with atomic `atomic.LoadUint32(&utils.Timestamp)`
+ Timestamp uint32
+)
+
+// StartTimeStampUpdater starts a concurrent function which stores the timestamp to an atomic value per second,
+// which is much better for performance than determining it at runtime each time
+func StartTimeStampUpdater() {
+ timestampTimer.Do(func() {
+ // set initial value
+ atomic.StoreUint32(&Timestamp, uint32(time.Now().Unix()))
+ go func(sleep time.Duration) {
+ ticker := time.NewTicker(sleep)
+ defer ticker.Stop()
+
+ for t := range ticker.C {
+ // update timestamp
+ atomic.StoreUint32(&Timestamp, uint32(t.Unix()))
+ }
+ }(1 * time.Second) // duration
+ })
+}
diff --git a/vendor/github.com/gofiber/fiber/v2/utils/xml.go b/vendor/github.com/gofiber/fiber/v2/utils/xml.go
new file mode 100644
index 0000000..cc6a024
--- /dev/null
+++ b/vendor/github.com/gofiber/fiber/v2/utils/xml.go
@@ -0,0 +1,4 @@
+package utils
+
+// XMLMarshal returns the XML encoding of v.
+type XMLMarshal func(v interface{}) ([]byte, error)
diff --git a/vendor/github.com/gofiber/template/.gitignore b/vendor/github.com/gofiber/template/.gitignore
new file mode 100644
index 0000000..92956e7
--- /dev/null
+++ b/vendor/github.com/gofiber/template/.gitignore
@@ -0,0 +1,33 @@
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+*.tmp
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# IDE files
+.vscode
+.DS_Store
+.idea
+
+# Misc
+*.fiber.gz
+*.fasthttp.gz
+*.pprof
+*.workspace
+
+# Dependencies
+/vendor/
+vendor/
+vendor
+/Godeps/
+
+# test files
+*/views/ShouldReload.*
diff --git a/vendor/github.com/gofiber/template/.golangci.yml b/vendor/github.com/gofiber/template/.golangci.yml
new file mode 100644
index 0000000..19e18a5
--- /dev/null
+++ b/vendor/github.com/gofiber/template/.golangci.yml
@@ -0,0 +1,284 @@
+# Created based on v1.52.2
+# NOTE: Keep this in sync with the version in .github/workflows/lint_golangci-lint.yml
+
+run:
+ timeout: 5m
+ modules-download-mode: readonly
+ skip-dirs-use-default: false
+ skip-dirs:
+ - internal
+
+output:
+ sort-results: true
+
+linters-settings:
+ depguard:
+ include-go-root: true
+ packages:
+ - flag
+ - io/ioutil
+ - reflect
+ - unsafe
+ packages-with-error-message:
+ - flag: '`flag` package is only allowed in main.go'
+ - io/ioutil: '`io/ioutil` package is deprecated, use the `io` and `os` package instead'
+ - reflect: '`reflect` package is dangerous to use'
+ - unsafe: '`unsafe` package is dangerous to use'
+
+ errcheck:
+ check-type-assertions: true
+ check-blank: true
+ disable-default-exclusions: true
+
+ errchkjson:
+ report-no-exported: true
+
+ exhaustive:
+ check-generated: true
+ default-signifies-exhaustive: true
+
+ forbidigo:
+ forbid:
+ - ^(fmt\.Print(|f|ln)|print|println)$
+ # - 'http\.Default(Client|Transport)'
+ # - 'time\.Sleep'
+ # - 'panic'
+
+ gci:
+ sections:
+ - standard
+ - prefix(github.com/gofiber/fiber)
+ - default
+ - blank
+ - dot
+ custom-order: true
+
+ goconst:
+ numbers: true
+
+ gocritic:
+ enabled-tags:
+ - diagnostic
+ - style
+ - performance
+ - experimental
+ - opinionated
+ disabled-checks:
+ - hugeParam
+ - rangeValCopy
+
+ gofumpt:
+ module-path: github.com/gofiber/template
+ extra-rules: true
+
+ gosec:
+ config:
+ global:
+ audit: true
+
+ govet:
+ enable-all: true
+ disable:
+ - fieldalignment
+ - shadow
+
+ grouper:
+ import-require-single-import: true
+ import-require-grouping: true
+
+ misspell:
+ locale: US
+
+ nolintlint:
+ require-explanation: true
+ require-specific: true
+
+ nonamedreturns:
+ report-error-in-defer: true
+
+ predeclared:
+ q: true
+
+ promlinter:
+ strict: true
+
+ reassign:
+ patterns:
+ - '.*'
+
+ revive:
+ enable-all-rules: true
+ rules:
+ # Provided by gomnd linter
+ - name: add-constant
+ disabled: true
+ - name: argument-limit
+ disabled: true
+ # Provided by bidichk
+ - name: banned-characters
+ disabled: true
+ - name: cognitive-complexity
+ disabled: true
+ - name: comment-spacings
+ arguments:
+ - nolint
+ - msgp
+ - name: cyclomatic
+ disabled: true
+ - name: exported
+ disabled: true
+ - name: file-header
+ disabled: true
+ - name: function-result-limit
+ arguments: [3]
+ - name: function-length
+ disabled: true
+ - name: line-length-limit
+ disabled: true
+ - name: nested-structs
+ disabled: true
+ - name: max-public-structs
+ disabled: true
+ - name: modifies-parameter
+ disabled: true
+ - name: package-comments
+ disabled: true
+ - name: use-any
+ disabled: true # some tests still use go 1.17
+
+ stylecheck:
+ checks:
+ - all
+ - -ST1000
+ - -ST1020
+ - -ST1021
+ - -ST1022
+
+ tagliatelle:
+ case:
+ rules:
+ json: snake
+
+ tenv:
+ all: true
+
+ #unparam:
+ # check-exported: true
+
+ wrapcheck:
+ ignorePackageGlobs:
+ - github.com/gofiber/fiber/*
+ - github.com/valyala/fasthttp
+
+issues:
+ exclude-use-default: false
+ exclude-rules:
+ - linters:
+ - goerr113
+ text: 'do not define dynamic errors, use wrapped static errors instead*'
+
+linters:
+ enable:
+ - asasalint
+ - asciicheck
+ - bidichk
+ - bodyclose
+ - containedctx
+ - contextcheck
+ # - cyclop
+ - deadcode
+ # - decorder
+ - depguard
+ - dogsled
+ # - dupl
+ - dupword
+ - durationcheck
+ - errcheck
+ - errchkjson
+ - errname
+ - errorlint
+ - execinquery
+ - exhaustive
+ # - exhaustivestruct
+ # - exhaustruct
+ - exportloopref
+ - forbidigo
+ - forcetypeassert
+ # - funlen
+ - gci
+ - ginkgolinter
+ - gocheckcompilerdirectives
+ - gochecknoglobals # Enabled
+ - gochecknoinits # Enabled
+ # - gocognit
+ - goconst
+ - gocritic
+ # - gocyclo
+ # - godot
+ # - godox
+ - goerr113
+ - gofmt
+ - gofumpt
+ # - goheader
+ # - goimports
+ # - golint
+ - gomnd # Enabled
+ - gomoddirectives
+ # - gomodguard
+ - goprintffuncname
+ - gosec
+ - gosimple
+ - govet
+ - grouper
+ # - ifshort
+ # - importas
+ - ineffassign
+ # - interfacebloat
+ # - interfacer
+ # - ireturn
+ # - lll
+ - loggercheck
+ # - maintidx
+ # - makezero
+ # - maligned
+ - misspell
+ - musttag
+ - nakedret
+ # - nestif
+ - nilerr
+ - nilnil
+ # - nlreturn
+ - noctx
+ - nolintlint
+ - nonamedreturns
+ - nosnakecase
+ - nosprintfhostport
+ - paralleltest
+ # - prealloc
+ - predeclared
+ - promlinter
+ - reassign
+ - revive
+ - rowserrcheck
+ - scopelint
+ - sqlclosecheck
+ - staticcheck
+ - structcheck
+ - stylecheck
+ - tagliatelle
+ - tenv
+ - testableexamples
+ # - testpackage
+ - thelper
+ - tparallel
+ - typecheck
+ - unconvert
+ - unparam
+ - unused
+ - usestdlibvars
+ - varcheck
+ # - varnamelen
+ - wastedassign
+ - whitespace
+ # - wrapcheck # disabled
+ # - wsl
\ No newline at end of file
diff --git a/vendor/github.com/gofiber/template/LICENSE b/vendor/github.com/gofiber/template/LICENSE
new file mode 100644
index 0000000..ca80106
--- /dev/null
+++ b/vendor/github.com/gofiber/template/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 Fiber
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/gofiber/template/README.md b/vendor/github.com/gofiber/template/README.md
new file mode 100644
index 0000000..e24fdad
--- /dev/null
+++ b/vendor/github.com/gofiber/template/README.md
@@ -0,0 +1,250 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+This package provides universal methods to use multiple template engines with the [Fiber web framework](https://github.com/gofiber/fiber) using the new [Views](https://godoc.org/github.com/gofiber/fiber#Views) interface that is available from `> v1.11.1`. Special thanks to @bdtomlin & @arsmn for helping!
+
+9 template engines are supported:
+- [html](https://github.com/gofiber/template/tree/master/html)
+
+
+- [ace](https://github.com/gofiber/template/tree/master/ace)
+
+- [amber](https://github.com/gofiber/template/tree/master/amber)
+
+- [django](https://github.com/gofiber/template/tree/master/django)
+
+- [handlebars](https://github.com/gofiber/template/tree/master/handlebars)
+
+- [jet](https://github.com/gofiber/template/tree/master/jet)
+
+- [mustache](https://github.com/gofiber/template/tree/master/mustache)
+
+- [pug](https://github.com/gofiber/template/tree/master/pug)
+
+- [slim](https://github.com/gofiber/template/tree/master/slim)
+
+
+### Installation
+> Go version `1.17` or higher is required.
+
+```
+go get -u github.com/gofiber/fiber/v2
+go get -u github.com/gofiber/template/any_template_engine/vX
+```
+
+### Example
+```go
+package main
+
+import (
+ "log"
+
+ "github.com/gofiber/fiber/v2"
+
+ // To use a specific template engine, import as shown below:
+ // "github.com/gofiber/template/pug"
+ // "github.com/gofiber/template/mustache"
+ // etc..
+
+ // In this example we use the html template engine
+ "github.com/gofiber/template/html/v2"
+)
+
+func main() {
+ // Create a new engine by passing the template folder
+ // and template extension using .New(dir, ext string)
+ engine := html.New("./views", ".html")
+
+ // We also support the http.FileSystem interface
+ // See examples below to load templates from embedded files
+ engine := html.NewFileSystem(http.Dir("./views"), ".html")
+
+ // Reload the templates on each render, good for development
+ engine.Reload(true) // Optional. Default: false
+
+ // Debug will print each template that is parsed, good for debugging
+ engine.Debug(true) // Optional. Default: false
+
+ // Layout defines the variable name that is used to yield templates within layouts
+ engine.Layout("embed") // Optional. Default: "embed"
+
+ // Delims sets the action delimiters to the specified strings
+ engine.Delims("{{", "}}") // Optional. Default: engine delimiters
+
+ // AddFunc adds a function to the template's global function map.
+ engine.AddFunc("greet", func(name string) string {
+ return "Hello, " + name + "!"
+ })
+
+ // After you created your engine, you can pass it to Fiber's Views Engine
+ app := fiber.New(fiber.Config{
+ Views: engine,
+ })
+
+ // To render a template, you can call the ctx.Render function
+ // Render(tmpl string, values interface{}, layout ...string)
+ app.Get("/", func(c *fiber.Ctx) error {
+ return c.Render("index", fiber.Map{
+ "Title": "Hello, World!",
+ })
+ })
+
+ // Render with layout example
+ app.Get("/layout", func(c *fiber.Ctx) error {
+ return c.Render("index", fiber.Map{
+ "Title": "Hello, World!",
+ }, "layouts/main")
+ })
+
+ log.Fatal(app.Listen(":3000"))
+}
+
+```
+
+### More Examples
+
+To view more specific examples, you could visit each engine folder to learn more
+- [html](https://github.com/gofiber/template/tree/master/html)
+- [ace](https://github.com/gofiber/template/tree/master/ace)
+- [amber](https://github.com/gofiber/template/tree/master/amber)
+- [django](https://github.com/gofiber/template/tree/master/django)
+- [handlebars](https://github.com/gofiber/template/tree/master/handlebars)
+- [jet](https://github.com/gofiber/template/tree/master/jet)
+- [mustache](https://github.com/gofiber/template/tree/master/mustache)
+- [pug](https://github.com/gofiber/template/tree/master/pug)
+- [slim](https://github.com/gofiber/template/tree/master/slim)
+
+
+### embedded Systems
+
+We support the `http.FileSystem` interface, so you can use different libraries to load the templates from embedded binaries.
+
+#### pkger
+Read documentation: https://github.com/markbates/pkger
+
+```go
+package main
+
+import (
+ "log"
+
+ "github.com/gofiber/fiber/v2"
+ "github.com/gofiber/template/html"
+
+ "github.com/markbates/pkger"
+)
+
+func main() {
+ engine := html.NewFileSystem(pkger.Dir("/views"), ".html")
+
+ app := fiber.New(fiber.Config{
+ Views: engine,
+ })
+
+ // run pkger && go build
+}
+```
+#### packr
+Read documentation: https://github.com/gobuffalo/packr
+
+```go
+package main
+
+import (
+ "log"
+
+ "github.com/gofiber/fiber/v2"
+ "github.com/gofiber/template/html"
+
+ "github.com/gobuffalo/packr/v2"
+)
+
+func main() {
+ engine := html.NewFileSystem(packr.New("Templates", "/views"), ".html")
+
+ app := fiber.New(fiber.Config{
+ Views: engine,
+ })
+
+ // run packr && go build
+}
+```
+#### go.rice
+Read documentation: https://github.com/GeertJohan/go.rice
+
+```go
+package main
+
+import (
+ "log"
+
+ "github.com/gofiber/fiber/v2"
+ "github.com/gofiber/template/html"
+
+ "github.com/GeertJohan/go.rice"
+)
+
+func main() {
+ engine := html.NewFileSystem(rice.MustFindBox("views").HTTPBox(), ".html")
+
+ app := fiber.New(fiber.Config{
+ Views: engine,
+ })
+
+ // run rice embed-go && go build
+}
+
+```
+#### fileb0x
+Read documentation: https://github.com/UnnoTed/fileb0x
+
+```go
+package main
+
+import (
+ "log"
+
+ "github.com/gofiber/fiber/v2"
+ "github.com/gofiber/template/html"
+ // your generated package
+ "github.com///static"
+)
+
+func main() {
+ engine := html.NewFileSystem(static.HTTP, ".html")
+
+ app := fiber.New(fiber.Config{
+ Views: engine,
+ })
+
+ // Read the documentation on how to use fileb0x
+}
+```
+
+
+### Benchmarks
+
+#### Simple
+![](.github/data/Simple-TimeperOperation.png)
+
+#### Extended
+![](.github/data/Extended-TimeperOperation.png)
+
+Benchmarks were ran on Apple Macbook M1. Each engine was benchmarked 20 times and the results averaged into a single xlsx file. Mustache was excluded from the extended benchmark
diff --git a/vendor/github.com/gofiber/template/html/v2/LICENSE b/vendor/github.com/gofiber/template/html/v2/LICENSE
new file mode 100644
index 0000000..ca80106
--- /dev/null
+++ b/vendor/github.com/gofiber/template/html/v2/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 Fiber
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/gofiber/template/html/v2/README.md b/vendor/github.com/gofiber/template/html/v2/README.md
new file mode 100644
index 0000000..e4e65d5
--- /dev/null
+++ b/vendor/github.com/gofiber/template/html/v2/README.md
@@ -0,0 +1,203 @@
+---
+id: html
+title: HTML
+---
+
+![Release](https://img.shields.io/github/v/tag/gofiber/template?filter=django*)
+[![Discord](https://img.shields.io/discord/704680098577514527?style=flat&label=%F0%9F%92%AC%20discord&color=00ACD7)](https://gofiber.io/discord)
+![Test](https://github.com/gofiber/template/workflows/Tests/badge.svg)
+![Security](https://github.com/gofiber/template/workflows/Security/badge.svg)
+![Linter](https://github.com/gofiber/template/workflows/Linter/badge.svg)
+
+HTML is the official Go template engine [html/template](https://golang.org/pkg/html/template/), to see the original syntax documentation please [click here](TEMPLATES_CHEATSHEET.md)
+
+**Info:**
+
+All templates within the specified view directory are analyzed and compiled at the beginning to increase the performance when using them.
+Thus it should be noted that no `definition` with the same name should exist, otherwise they will overwrite each other.
+For templating the `{{embed}}` tag should be used
+
+### Basic Example
+
+_**./views/index.html**_
+
+```html
+{{template "partials/header" .}}
+
+{{.Title}}
+
+{{template "partials/footer" .}}
+```
+
+_**./views/partials/header.html**_
+
+```html
+Header
+```
+
+_**./views/partials/footer.html**_
+
+```html
+Footer
+```
+
+_**./views/layouts/main.html**_
+
+```html
+
+
+
+ Main
+
+
+
+ {{embed}}
+
+
+```
+
+```go
+package main
+
+import (
+ "log"
+
+ "github.com/gofiber/fiber/v2"
+ "github.com/gofiber/template/html/v2"
+)
+
+func main() {
+ // Create a new engine
+ engine := html.New("./views", ".html")
+
+ // Or from an embedded system
+ // See github.com/gofiber/embed for examples
+ // engine := html.NewFileSystem(http.Dir("./views", ".html"))
+
+ // Pass the engine to the Views
+ app := fiber.New(fiber.Config{
+ Views: engine,
+ })
+
+ app.Get("/", func(c *fiber.Ctx) error {
+ // Render index
+ return c.Render("index", fiber.Map{
+ "Title": "Hello, World!",
+ })
+ })
+
+ app.Get("/layout", func(c *fiber.Ctx) error {
+ // Render index within layouts/main
+ return c.Render("index", fiber.Map{
+ "Title": "Hello, World!",
+ }, "layouts/main")
+ })
+
+ log.Fatal(app.Listen(":3000"))
+}
+
+```
+
+### Example with embed.FS
+
+```go
+package main
+
+import (
+ "log"
+ "net/http"
+ "embed"
+
+ "github.com/gofiber/fiber/v2"
+ "github.com/gofiber/template/html"
+)
+
+//go:embed views/*
+var viewsfs embed.FS
+
+func main() {
+ engine := html.NewFileSystem(http.FS(viewsfs), ".html")
+
+ // Pass the engine to the Views
+ app := fiber.New(fiber.Config{
+ Views: engine,
+ })
+
+
+ app.Get("/", func(c *fiber.Ctx) error {
+ // Render index - start with views directory
+ return c.Render("views/index", fiber.Map{
+ "Title": "Hello, World!",
+ })
+ })
+
+ log.Fatal(app.Listen(":3000"))
+}
+```
+
+and change the starting point to the views directory
+
+_**./views/index.html**_
+
+```html
+{{template "views/partials/header" .}}
+
+{{.Title}}
+
+{{template "views/partials/footer" .}}
+```
+
+### Example with innerHTML
+
+```go
+package main
+
+import (
+ "embed"
+ "html/template"
+ "log"
+ "net/http"
+
+ "github.com/gofiber/fiber/v2"
+ "github.com/gofiber/template/html"
+)
+
+//go:embed views/*
+var viewsfs embed.FS
+
+func main() {
+ engine := html.NewFileSystem(http.FS(viewsfs), ".html")
+ engine.AddFunc(
+ // add unescape function
+ "unescape", func(s string) template.HTML {
+ return template.HTML(s)
+ },
+ )
+
+ // Pass the engine to the Views
+ app := fiber.New(fiber.Config{Views: engine})
+
+ app.Get("/", func(c *fiber.Ctx) error {
+ // Render index
+ return c.Render("views/index", fiber.Map{
+ "Title": "Hello, World !",
+ })
+ })
+
+ log.Fatal(app.Listen(":3000"))
+}
+```
+
+and change the starting point to the views directory
+
+_**./views/index.html**_
+
+```html
+{{ unescape .Title}}
+```
+
+**html output**
+
+```html
+Hello, World !
+```
diff --git a/vendor/github.com/gofiber/template/html/v2/TEMPLATES_CHEATSHEET.md b/vendor/github.com/gofiber/template/html/v2/TEMPLATES_CHEATSHEET.md
new file mode 100644
index 0000000..f5ec54c
--- /dev/null
+++ b/vendor/github.com/gofiber/template/html/v2/TEMPLATES_CHEATSHEET.md
@@ -0,0 +1,582 @@
+# Golang Templates Cheatsheet
+
+The Go standard library provides a set of packages to generate output. The [text/template](https://archive.is/o/2HksZ/https://golang.org/pkg/text/template/) package implements templates for generating text output, while the [html/template](https://archive.is/o/2HksZ/https://golang.org/pkg/html/template/) package implements templates for generating HTML output that is safe against certain attacks. Both packages use the same interface but the following examples of the core features are directed towards HTML applications.
+
+---
+
+## Table of Contents
+
+- [Parsing and Creating Templates](#parsing-and-creating-templates)
+- [Executing Templates](#executing-templates)
+- [Template Encoding and HTML](#template-encoding-and-html)
+- [Template Variables](#template-variables)
+- [Template Actions](#template-actions)
+- [Template Functions](#template-functions)
+- [Template Comparison Functions](#template-comparison-functions)
+- [Nested Templates and Layouts](#nested-templates-and-layouts)
+- [Templates Calling Functions](#templates-calling-functions)
+
+---
+
+## Parsing and Creating Templates
+
+#### Naming Templates
+
+There is no defined file extension for Go templates. One of the most popular is `.tmpl` supported by vim-go and [referenced in the text/template godocs](https://archive.is/o/2HksZ/golang.org/pkg/text/template/%23example_Template_helpers). The extension `.gohtml` supports syntax highlighting in both Atom and GoSublime editors. Finally analysis of large Go codebases finds that `.tpl` is often used by developers. While the extension is not important it is still good to be consistent within a project for clarity.
+
+---
+
+#### Creating a Template
+
+`tpl, err := template.Parse(filename)` will get the template at filename and store it in tpl. tpl can then be executed to show the template.
+
+---
+
+#### Parsing Multiple Templates
+
+`template.ParseFiles(filenames)` takes a list of filenames and stores all templates. `template.ParseGlob(pattern)` will find all templates matching the pattern and store the templates.
+
+---
+
+## Executing Templates
+
+#### Execute a Single Template
+
+Once a template has been parsed there are two options to execute them. A single template `tpl` can be executed using `tpl.Execute(io.Writer, data)`. The content of tpl will be written to the io.Writer. Data is an interface passed to the template that will be useable in the template.
+
+---
+
+#### Executing a Named Template
+
+`tpl.ExecuteTemplate(io.Writer, name, data)` works the same as execute but allows for a string name of the template the user wants to execute.
+
+---
+
+## Template Encoding and HTML
+
+#### Contextual Encoding
+
+Go’s html/template package does encoding based on the context of the code. As a result, html/template encodes any characters that need encoding to be rendered correctly.
+
+For example the < and > in `"A header! "` will be encoded as `<h1>A header!</h1>` .
+
+Type `template.HTML` can be used to skip encoding by telling Go the string is safe. `template.HTML("A Safe header ")` will then be `A Safe header ` . Using this type with user input is dangerous and leaves the application vulnerable.
+
+The go `html/template` package is aware of attributes within the template and will encode values differently based on the attribute.
+
+Go templates can also be used with javascript. Structs and maps will be expanded into JSON objects and quotes will be added to strings for use in function parameters and as variable values.
+
+```go
+ // Go
+ type Cat struct {
+ Name string
+ Age int
+ }
+
+ kitten := Cat{"Sam", 12}
+```
+
+```html
+// Template
+
+```
+
+```js
+ // Javascript
+ var cat = {"Name":"Sam", "Age" 12}
+```
+
+---
+
+#### Safe Strings and HTML Comments
+
+The `html/template` package will remove any comments from a template by default. This can cause issues when comments are necessary such as detecting internet explorer.
+
+```html
+
+```
+
+We can use the Custom Functions method (Globally) to create a function that returns html preserving comments. Define a function `htmlSafe` in the FuncMap of the template.
+
+```go
+ testTemplate, err = template.New("hello.gohtml").Funcs(template.FuncMap{
+ "htmlSafe": func(html string) template.HTML {
+ return template.HTML(html)
+ },
+ }).ParseFiles("hello.gohtml")
+```
+
+This function takes a string and produces the unaltered HTML code. This function can be used in a template like so to preserve the comments `` :
+
+```go
+ {{htmlSafe "" }}
+```
+
+---
+
+## Template Variables
+
+#### The dot character (.)
+
+A template variable can be a boolean, string, character, integer, floating-point, imaginary, or complex constant in Go syntax. Data passed to the template can be accessed using dot `{{ . }}`.
+
+If the data is a complex type then it’s fields can be accessed using the dot with the field name `{{ .FieldName }}`.
+
+Dots can be chained together if the data contains multiple complex structures. `{{ .Struct.StructTwo.Field }}`
+
+---
+
+#### Variables in Templates
+
+Data passed to the template can be saved in a variable and used throughout the template. `{{$number := .}}` We use the `$number` to create a variable then initialize it with the value passed to the template. To use the variable we call it in the template with `{{$number}}`.
+
+```go
+ {{$number := .}}
+ It is day number {{$number}} of the month
+```
+
+```go
+ var tpl *template.Template
+
+ tpl = template.Must(template.ParseFiles("templateName"))
+
+ err := tpl.ExecuteTemplate(os.Stdout, "templateName", 23)
+```
+
+In this example we pass 23 to the template and stored in the `$number` variable which can be used anywhere in the template
+
+---
+
+## Template Actions
+
+#### If/Else Statements
+
+Go templates support if/else statements like many programming languages. We can use the if statement to check for values, if it doesn’t exist we can use an else value. The empty values are false, 0, any nil pointer or interface value, and any array, slice, map, or string of length zero.
+
+```html
+Hello, {{if .Name}} {{.Name}} {{else}} Anonymous {{end}}!
+```
+
+If .Name exists then `Hello, Name` will be printed (replaced with the name value) otherwise it will print `Hello, Anonymous`.
+
+Templates also provide the else if statment `{{else if .Name2 }}` which can be used to evaluate other options after an if.
+
+---
+
+#### Removing Whitespace
+
+Adding different values to a template can add various amounts of whitespace. We can either change our template to better handle it, by ignoring or minimizing effects, or we can use the minus sign `-` within out template.
+
+`Hello, {{if .Name}} {{.Name}} {{- else}} Anonymous {{- end}}! `
+
+Here we are telling the template to remove all spaces between the `Name` variable and whatever comes after it. We are doing the same with the end keyword. This allows us to have whitespace within the template for easier reading but remove it in production.
+
+---
+
+#### Range Blocks
+
+Go templates have a `range` keyword to iterate over all objects in a structure. Suppose we had the Go structures:
+
+```go
+ type Item struct {
+ Name string
+ Price int
+ }
+
+ type ViewData struct {
+ Name string
+ Items []Item
+ }
+```
+
+We have an Item, with a name and price, then a ViewData which is the structure sent to the template. Consider the template containing the following:
+
+```html
+{{range .Items}}
+
+
{{.Name}}
+ ${{.Price}}
+
+{{end}}
+```
+
+For each Item in the range of Items (in the ViewData structure) get the Name and Price of that item and create html for each Item automatically. Within a range each Item becomes the `{{.}}` and the item properties therefore become `{{.Name}}` or `{{.Price}}` in this example.
+
+---
+
+## Template Functions
+
+The template package provides a list of predefined global functions. Below are some of the most used.
+
+---
+
+#### Indexing structures in Templates
+
+If the data passed to the template is a map, slice, or array it can be indexed from the template. We use `{{index x number}}` where index is the keyword, x is the data and number is a integer for the index value. If we had `{{index names 2}}` it is equivalent to `names[2]`. We can add more integers to index deeper into data. `{{index names 2 3 4}}` is equivalent to `names[2][3][4]`.
+
+```html
+
+ {{index .FavNums 2 }}
+
+```
+
+```go
+ type person struct {
+ Name string
+ FavNums []int
+ }
+
+ func main() {
+
+ tpl := template.Must(template.ParseGlob("*.gohtml"))
+ tpl.Execute(os.Stdout, &person{"Curtis", []int{7, 11, 94}})
+ }
+```
+
+This code example passes a person structure and gets the 3rd favourite number from the FavNums slice.
+
+---
+
+#### The `and` Function
+
+The and function returns the boolean AND of its arguments by returning the first empty argument or the last argument. `and x y` behaves logically as `if x then y else x` . Consider the following go code
+
+```go
+ type User struct {
+ Admin bool
+ }
+
+ type ViewData struct {
+ *User
+ }
+```
+
+Pass a ViewData with a User that has Admin set true to the following template
+
+```go
+
+ {{if and .User .User.Admin}}
+ You are an admin user!
+ {{else}}
+ Access denied!
+ {{end}}
+```
+
+The result will be `You are an admin user!`. However if the ViewData did not include a \*User object or Admin was set as false then the result will be `Access denied!`.
+
+---
+
+#### The `or` Function
+
+The or function operates similarly to the and function however will stop at the first true. `or x y` is equivalent to `if x then x else y` so y will never be evaluated if x is not empty.
+
+---
+
+#### The `not` Function
+
+The not function returns the boolean negation of the argument.
+
+```go
+ {{ if not .Authenticated}}
+ Access Denied!
+ {{ end }}
+```
+
+---
+
+## Template Comparison Functions
+
+#### Comparisons
+
+The `html/template` package provides a variety of functions to do comparisons between operators. The operators may only be basic types or named basic types such as `type Temp float32` Remember that template functions take the form `{{ function arg1 arg2 }}`.
+
+- `eq` Returns the result of arg1 == arg2
+- `ne` Returns the result of arg1 != arg2
+- `lt` Returns the result of arg1 < arg2
+- `le` Returns the result of arg1 <= arg2
+- `gt` Returns the result of arg1 > arg2
+- `ge` Returns the result of arg1 >= arg2
+
+Of special note `eq` can be used with two or more arguments by comparing all arguments to the first. `{{ eq arg1 arg2 arg3 arg4}}` will result in the following logical expression:
+
+`arg1==arg2 || arg1==arg3 || arg1==arg4`
+
+---
+
+## Nested Templates and Layouts
+
+#### Nesting Templates
+
+Nested templates can be used for parts of code frequently used across templates, a footer or header for example. Rather than updating each template separately we can use a nested template that all other templates can use. You can define a template as follows:
+
+```go
+ {{define "footer"}}
+
+ {{end}}
+```
+
+A template named “footer” is defined which can be used in other templates like so to add the footer template content into the other template:
+
+```go
+ {{template "footer"}}
+```
+
+---
+
+#### Passing Variables between Templates
+
+The `template` action used to include nested templates also allows a second parameter to pass data to the nested template.
+
+```html
+// Define a nested template called header
+{{define "header"}}
+{{.}}
+{{end}}
+
+// Call template and pass a name parameter
+{{range .Items}}
+
+ {{template "header" .Name}}
+ ${{.Price}}
+
+{{end}}
+```
+
+We use the same range to loop through Items as before but we pass the name to the header template each time in this simple example.
+
+---
+
+#### Creating Layouts
+
+Glob patterns specify sets of filenames with wildcard characters. The `template.ParseGlob(pattern string)` function will parse all templates that match the string pattern. `template.ParseFiles(files...)` can also be used with a list of file names.
+
+The templates are named by default based on the base names of the argument files. This mean `views/layouts/hello.gohtml` will have the name `hello.gohtml` . If the template has a ``{{define “templateName”}}` within it then that name will be usable.
+
+A specific template can be executed using `t.ExecuteTemplate(w, "templateName", nil)` . `t` is an object of type Template, `w` is type io.Writer such as an `http.ResponseWriter`, Then there is the name of the template to execute, and finally passing any data to the template, in this case a nil value.
+
+Example main.go file
+
+```go
+ // Omitted imports & package
+
+ var LayoutDir string = "views/layouts"
+ var bootstrap *template.Template
+
+ func main() {
+ var err error
+ bootstrap, err = template.ParseGlob(LayoutDir + "/*.gohtml")
+ if err != nil {
+ panic(err)
+ }
+
+ http.HandleFunc("/", handler)
+ http.ListenAndServe(":8080", nil)
+ }
+
+ func handler(w http.ResponseWriter, r *http.Request) {
+ bootstrap.ExecuteTemplate(w, "bootstrap", nil)
+ }
+```
+
+All `.gohtml` files are parsed in main. When route `/` is reached the template defined as `bootstrap` is executed using the handler function.
+
+Example views/layouts/bootstrap.gohtml file
+
+```html
+ {{define "bootstrap"}}
+
+
+
+ Go Templates
+
+
+
+
+
Filler header
+
Filler paragraph
+
+
+
+
+
+ {{end}}
+```
+
+## Templates Calling Functions
+
+#### Function Variables (calling struct methods)
+
+We can use templates to call the methods of objects in the template to return data. Consider the User struct with the following method.
+
+```go
+ type User struct {
+ ID int
+ Email string
+ }
+
+ func (u User) HasPermission(feature string) bool {
+ if feature == "feature-a" {
+ return true
+ } else {
+ return false
+ }
+ }
+```
+
+When a type User has been passed to the template we can then call this method from the template.
+
+```html
+{{if .User.HasPermission "feature-a"}}
+
+
Feature A
+
Some other stuff here...
+
+{{else}}
+
+
Feature A
+
To enable Feature A please upgrade your plan
+
+{{end}}
+```
+
+The template checks if the User HasPermission for the feature and renders depending on the result.
+
+---
+
+#### Function Variables (call)
+
+If the Method HasPermission has to change at times then the Function Variables (Methods) implementation may not fit the design. Instead a `HasPermission func(string) bool` attribute can be added on the `User` type. This can then have a function assigned to it at creation.
+
+```go
+ // Structs
+ type ViewData struct {
+ User User
+ }
+
+ type User struct {
+ ID int
+ Email string
+ HasPermission func(string) bool
+ }
+
+ // Example of creating a ViewData
+ vd := ViewData{
+ User: User{
+ ID: 1,
+ Email: "curtis.vermeeren@gmail.com",
+ // Create the HasPermission function
+ HasPermission: func(feature string) bool {
+ if feature == "feature-b" {
+ return true
+ }
+ return false
+ },
+ },
+ }
+
+ // Executing the ViewData with the template
+ err := testTemplate.Execute(w, vd)
+```
+
+We need to tell the Go template that we want to call this function so we must change the template from the Function Variables (Methods) implementation to do this. We use the `call` keyword supplied by the go `html/template` package. Changing the previous template to use `call` results in:
+
+```html
+{{if (call .User.HasPermission "feature-b")}}
+
+
Feature B
+
Some other stuff here...
+
+{{else}}
+
+
Feature B
+
To enable Feature B please upgrade your plan
+
+{{end}}
+```
+
+---
+
+#### Custom Functions
+
+Another way to call functions is to create custom functions with `template.FuncMap` . This method creates global methods that can be used throughout the entire application. FuncMap has type `map[string]interface{}` mapping a string, the function name, to a function. The mapped functions must have either a single return value, or two return values where the second has type error.
+
+```go
+ // Creating a template with function hasPermission
+ testTemplate, err = template.New("hello.gohtml").Funcs(template.FuncMap{
+ "hasPermission": func(user User, feature string) bool {
+ if user.ID == 1 && feature == "feature-a" {
+ return true
+ }
+ return false
+ },
+ }).ParseFiles("hello.gohtml")
+```
+
+Here the function to check if a user has permission for a feature is mapped to the string `"hasPermission"` and stored in the FuncMap. Note that the custom functions must be created before calling `ParseFiles()`
+
+The function could be executed in the template as follows:
+
+```go
+ {{ if hasPermission .User "feature-a" }}
+```
+
+The `.User` and string `"feature-a"` are both passed to `hasPermission` as arguments.
+
+---
+
+#### Custom Functions (Globally)
+
+The previous two methods of custom functions rely on `.User` being passed to the template. This works in many cases but in a large application passing too many objects to a template can become difficult to maintain across many templates. We can change the implementation of the custom function to work without the .User being passed.
+
+Using a similar feature example as the other 2 sections first you would have to create a default `hasPermission` function and define it in the template’s function map.
+
+```go
+ testTemplate, err = template.New("hello.gohtml").Funcs(template.FuncMap{
+ "hasPermission": func(feature string) bool {
+ return false
+ },
+ }).ParseFiles("hello.gohtml")
+```
+
+This function could be placed in `main()` or somewhere that ensures the default `hasPermission` is created in the `hello.gohtml` function map. The default function just returns false but it defines the function and implementation that doesn’t require `User` .
+
+Next a closure could be used to redefine the `hasPermission` function. It would use the `User` data available when it is created in a handler rather than having `User` data passed to it. Within the handler for the template you can redefine any functions to use the information available.
+
+```go
+ func handler(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "text/html")
+
+ user := User{
+ ID: 1,
+ Email: "Curtis.vermeeren@gmail.com",
+ }
+ vd := ViewData{}
+ err := testTemplate.Funcs(template.FuncMap{
+ "hasPermission": func(feature string) bool {
+ if user.ID == 1 && feature == "feature-a" {
+ return true
+ }
+ return false
+ },
+ }).Execute(w, vd)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ }
+ }
+```
+
+In this handler a `User` is created with ID and Email, Then a `ViewData` is created without passing the user to it. The `hasPermission` function is redefined using `user.ID` which is available when the function is created. `{{if hasPermission "feature-a"}}` can be used in a template without having to pass a `User` to the template as the User object in the handler is used instead.
+
+---
diff --git a/vendor/github.com/gofiber/template/html/v2/html.go b/vendor/github.com/gofiber/template/html/v2/html.go
new file mode 100644
index 0000000..097414a
--- /dev/null
+++ b/vendor/github.com/gofiber/template/html/v2/html.go
@@ -0,0 +1,156 @@
+package html
+
+import (
+ "fmt"
+ "html/template"
+ "io"
+ "log"
+ "net/http"
+ "os"
+ "path/filepath"
+ "strings"
+
+ core "github.com/gofiber/template"
+ "github.com/gofiber/utils"
+)
+
+// Engine struct
+type Engine struct {
+ core.Engine
+ // templates
+ Templates *template.Template
+}
+
+// New returns an HTML render engine for Fiber
+func New(directory, extension string) *Engine {
+ engine := &Engine{
+ Engine: core.Engine{
+ Left: "{{",
+ Right: "}}",
+ Directory: directory,
+ Extension: extension,
+ LayoutName: "embed",
+ Funcmap: make(map[string]interface{}),
+ },
+ }
+ engine.AddFunc(engine.LayoutName, func() error {
+ return fmt.Errorf("layoutName called unexpectedly")
+ })
+ return engine
+}
+
+// NewFileSystem returns an HTML render engine for Fiber with file system
+func NewFileSystem(fs http.FileSystem, extension string) *Engine {
+ engine := &Engine{
+ Engine: core.Engine{
+ Left: "{{",
+ Right: "}}",
+ Directory: "/",
+ FileSystem: fs,
+ Extension: extension,
+ LayoutName: "embed",
+ Funcmap: make(map[string]interface{}),
+ },
+ }
+ engine.AddFunc(engine.LayoutName, func() error {
+ return fmt.Errorf("layoutName called unexpectedly")
+ })
+ return engine
+}
+
+// Load parses the templates to the engine.
+func (e *Engine) Load() error {
+ if e.Loaded {
+ return nil
+ }
+ // race safe
+ e.Mutex.Lock()
+ defer e.Mutex.Unlock()
+ e.Templates = template.New(e.Directory)
+
+ // Set template settings
+ e.Templates.Delims(e.Left, e.Right)
+ e.Templates.Funcs(e.Funcmap)
+
+ walkFn := func(path string, info os.FileInfo, err error) error {
+ // Return error if exist
+ if err != nil {
+ return err
+ }
+ // Skip file if it's a directory or has no file info
+ if info == nil || info.IsDir() {
+ return nil
+ }
+ // Skip file if it does not equal the given template Extension
+ if len(e.Extension) >= len(path) || path[len(path)-len(e.Extension):] != e.Extension {
+ return nil
+ }
+ // Get the relative file path
+ // ./views/html/index.tmpl -> index.tmpl
+ rel, err := filepath.Rel(e.Directory, path)
+ if err != nil {
+ return err
+ }
+ // Reverse slashes '\' -> '/' and
+ // partials\footer.tmpl -> partials/footer.tmpl
+ name := filepath.ToSlash(rel)
+ // Remove ext from name 'index.tmpl' -> 'index'
+ name = strings.TrimSuffix(name, e.Extension)
+ // name = strings.Replace(name, e.Extension, "", -1)
+ // Read the file
+ // #gosec G304
+ buf, err := utils.ReadFile(path, e.FileSystem)
+ if err != nil {
+ return err
+ }
+ // Create new template associated with the current one
+ // This enable use to invoke other templates {{ template .. }}
+ _, err = e.Templates.New(name).Parse(string(buf))
+ if err != nil {
+ return err
+ }
+ // Debugging
+ if e.Verbose {
+ log.Printf("views: parsed template: %s\n", name)
+ }
+ return err
+ }
+ // notify engine that we parsed all templates
+ e.Loaded = true
+ if e.FileSystem != nil {
+ return utils.Walk(e.FileSystem, e.Directory, walkFn)
+ }
+ return filepath.Walk(e.Directory, walkFn)
+}
+
+// Render will execute the template name along with the given values.
+func (e *Engine) Render(out io.Writer, name string, binding interface{}, layout ...string) error {
+ if !e.Loaded || e.ShouldReload {
+ if e.ShouldReload {
+ e.Loaded = false
+ }
+ if err := e.Load(); err != nil {
+ return err
+ }
+ }
+
+ tmpl := e.Templates.Lookup(name)
+ if tmpl == nil {
+ return fmt.Errorf("render: template %s does not exist", name)
+ }
+ if len(layout) > 0 && layout[0] != "" {
+ lay := e.Templates.Lookup(layout[0])
+ if lay == nil {
+ return fmt.Errorf("render: LayoutName %s does not exist", layout[0])
+ }
+ e.Mutex.Lock()
+ defer e.Mutex.Unlock()
+ lay.Funcs(map[string]interface{}{
+ e.LayoutName: func() error {
+ return tmpl.Execute(out, binding)
+ },
+ })
+ return lay.Execute(out, binding)
+ }
+ return tmpl.Execute(out, binding)
+}
diff --git a/vendor/github.com/gofiber/template/template.go b/vendor/github.com/gofiber/template/template.go
new file mode 100644
index 0000000..b9865aa
--- /dev/null
+++ b/vendor/github.com/gofiber/template/template.go
@@ -0,0 +1,104 @@
+package template
+
+import (
+ "io"
+ "net/http"
+ "sync"
+)
+
+// IEngine interface, to be implemented for any templating engine added to the repository
+type IEngine interface {
+ IEngineCore
+ Load() error
+ Render(out io.Writer, template string, binding interface{}, layout ...string) error
+}
+
+// IEngineCore interface
+type IEngineCore interface {
+ AddFunc(name string, fn interface{}) IEngineCore
+ AddFuncMap(m map[string]interface{}) IEngineCore
+ Debug(enabled bool) IEngineCore
+ Delims(left, right string) IEngineCore
+ FuncMap() map[string]interface{}
+ Layout(key string) IEngineCore
+ Reload(enabled bool) IEngineCore
+}
+
+// Engine engine struct
+type Engine struct {
+ IEngineCore
+ // delimiters
+ Left string
+ Right string
+ // views folder
+ Directory string
+ // http.FileSystem supports embedded files
+ FileSystem http.FileSystem
+ // views extension
+ Extension string
+ // layout variable name that incapsulates the template
+ LayoutName string
+ // determines if the engine parsed all templates
+ Loaded bool
+ // reload on each render
+ ShouldReload bool
+ // debug prints the parsed templates
+ Verbose bool
+ // lock for funcmap and templates
+ Mutex sync.RWMutex
+ // template funcmap
+ Funcmap map[string]interface{}
+}
+
+// AddFunc adds the function to the template's function map.
+// It is legal to overwrite elements of the default actions
+func (e *Engine) AddFunc(name string, fn interface{}) *Engine {
+ e.Mutex.Lock()
+ e.Funcmap[name] = fn
+ e.Mutex.Unlock()
+ return e
+}
+
+// AddFuncMap adds the functions from a map to the template's function map.
+// It is legal to overwrite elements of the default actions
+func (e *Engine) AddFuncMap(m map[string]interface{}) *Engine {
+ e.Mutex.Lock()
+ for name, fn := range m {
+ e.Funcmap[name] = fn
+ }
+ e.Mutex.Unlock()
+ return e
+}
+
+// Debug will print the parsed templates when Load is triggered.
+func (e *Engine) Debug(enabled bool) *Engine {
+ e.Verbose = enabled
+ return e
+}
+
+// Delims sets the action delimiters to the specified strings, to be used in
+// templates. An empty delimiter stands for the
+// corresponding default: "{{" and "}}".
+func (e *Engine) Delims(left, right string) *Engine {
+ e.Left, e.Right = left, right
+ return e
+}
+
+// FuncMap returns the template's function map.
+func (e *Engine) FuncMap() map[string]interface{} {
+ return e.Funcmap
+}
+
+// Layout defines the variable name that will incapsulate the template
+func (e *Engine) Layout(key string) *Engine {
+ e.LayoutName = key
+ return e
+}
+
+// Reload if set to true the templates are reloading on each render,
+// use it when you're in development and you don't want to restart
+// the application when you edit a template file.
+func (e *Engine) Reload(enabled bool) *Engine {
+ e.ShouldReload = enabled
+ return e
+}
diff --git a/vendor/github.com/gofiber/utils/LICENSE b/vendor/github.com/gofiber/utils/LICENSE
new file mode 100644
index 0000000..ca80106
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 Fiber
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/gofiber/utils/README.md b/vendor/github.com/gofiber/utils/README.md
new file mode 100644
index 0000000..c4d222b
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/README.md
@@ -0,0 +1,87 @@
+A collection of common functions but with better performance, less allocations and no dependencies created for [Fiber](https://github.com/gofiber/fiber).
+
+```go
+// go test -benchmem -run=^$ -bench=Benchmark_ -count=2
+
+Benchmark_ToLowerBytes/fiber-16 42847654 25.7 ns/op 0 B/op 0 allocs/op
+Benchmark_ToLowerBytes/fiber-16 46143196 25.7 ns/op 0 B/op 0 allocs/op
+Benchmark_ToLowerBytes/default-16 17387322 67.4 ns/op 48 B/op 1 allocs/op
+Benchmark_ToLowerBytes/default-16 17906491 67.4 ns/op 48 B/op 1 allocs/op
+
+Benchmark_ToUpperBytes/fiber-16 46143729 25.7 ns/op 0 B/op 0 allocs/op
+Benchmark_ToUpperBytes/fiber-16 47989250 25.6 ns/op 0 B/op 0 allocs/op
+Benchmark_ToUpperBytes/default-16 15580854 76.7 ns/op 48 B/op 1 allocs/op
+Benchmark_ToUpperBytes/default-16 15381202 76.9 ns/op 48 B/op 1 allocs/op
+
+Benchmark_TrimRightBytes/fiber-16 70572459 16.3 ns/op 8 B/op 1 allocs/op
+Benchmark_TrimRightBytes/fiber-16 74983597 16.3 ns/op 8 B/op 1 allocs/op
+Benchmark_TrimRightBytes/default-16 16212578 74.1 ns/op 40 B/op 2 allocs/op
+Benchmark_TrimRightBytes/default-16 16434686 74.1 ns/op 40 B/op 2 allocs/op
+
+Benchmark_TrimLeftBytes/fiber-16 74983128 16.3 ns/op 8 B/op 1 allocs/op
+Benchmark_TrimLeftBytes/fiber-16 74985002 16.3 ns/op 8 B/op 1 allocs/op
+Benchmark_TrimLeftBytes/default-16 21047868 56.5 ns/op 40 B/op 2 allocs/op
+Benchmark_TrimLeftBytes/default-16 21048015 56.5 ns/op 40 B/op 2 allocs/op
+
+Benchmark_TrimBytes/fiber-16 54533307 21.9 ns/op 16 B/op 1 allocs/op
+Benchmark_TrimBytes/fiber-16 54532812 21.9 ns/op 16 B/op 1 allocs/op
+Benchmark_TrimBytes/default-16 14282517 84.6 ns/op 48 B/op 2 allocs/op
+Benchmark_TrimBytes/default-16 14114508 84.7 ns/op 48 B/op 2 allocs/op
+
+Benchmark_EqualFolds/fiber-16 36355153 32.6 ns/op 0 B/op 0 allocs/op
+Benchmark_EqualFolds/fiber-16 36355593 32.6 ns/op 0 B/op 0 allocs/op
+Benchmark_EqualFolds/default-16 15186220 78.1 ns/op 0 B/op 0 allocs/op
+Benchmark_EqualFolds/default-16 15186412 78.3 ns/op 0 B/op 0 allocs/op
+
+Benchmark_UUID/fiber-16 23994625 49.8 ns/op 48 B/op 1 allocs/op
+Benchmark_UUID/fiber-16 23994768 50.1 ns/op 48 B/op 1 allocs/op
+Benchmark_UUID/default-16 3233772 371 ns/op 208 B/op 6 allocs/op
+Benchmark_UUID/default-16 3251295 370 ns/op 208 B/op 6 allocs/op
+
+Benchmark_GetString/unsafe-16 1000000000 0.709 ns/op 0 B/op 0 allocs/op
+Benchmark_GetString/unsafe-16 1000000000 0.713 ns/op 0 B/op 0 allocs/op
+Benchmark_GetString/default-16 59986202 19.0 ns/op 16 B/op 1 allocs/op
+Benchmark_GetString/default-16 63142939 19.0 ns/op 16 B/op 1 allocs/op
+
+Benchmark_GetBytes/unsafe-16 508360195 2.36 ns/op 0 B/op 0 allocs/op
+Benchmark_GetBytes/unsafe-16 508359979 2.35 ns/op 0 B/op 0 allocs/op
+Benchmark_GetBytes/default-16 46143019 25.7 ns/op 16 B/op 1 allocs/op
+Benchmark_GetBytes/default-16 44434734 25.6 ns/op 16 B/op 1 allocs/op
+
+Benchmark_GetMIME/fiber-16 21423750 56.3 ns/op 0 B/op 0 allocs/op
+Benchmark_GetMIME/fiber-16 21423559 55.4 ns/op 0 B/op 0 allocs/op
+Benchmark_GetMIME/default-16 6735282 173 ns/op 0 B/op 0 allocs/op
+Benchmark_GetMIME/default-16 6895002 172 ns/op 0 B/op 0 allocs/op
+
+Benchmark_StatusMessage/fiber-16 1000000000 0.766 ns/op 0 B/op 0 allocs/op
+Benchmark_StatusMessage/fiber-16 1000000000 0.767 ns/op 0 B/op 0 allocs/op
+Benchmark_StatusMessage/default-16 159538528 7.50 ns/op 0 B/op 0 allocs/op
+Benchmark_StatusMessage/default-16 159750830 7.51 ns/op 0 B/op 0 allocs/op
+
+Benchmark_ToUpper/fiber-16 22217408 53.3 ns/op 48 B/op 1 allocs/op
+Benchmark_ToUpper/fiber-16 22636554 53.2 ns/op 48 B/op 1 allocs/op
+Benchmark_ToUpper/default-16 11108600 108 ns/op 48 B/op 1 allocs/op
+Benchmark_ToUpper/default-16 11108580 108 ns/op 48 B/op 1 allocs/op
+
+Benchmark_ToLower/fiber-16 23994720 49.8 ns/op 48 B/op 1 allocs/op
+Benchmark_ToLower/fiber-16 23994768 50.1 ns/op 48 B/op 1 allocs/op
+Benchmark_ToLower/default-16 10808376 110 ns/op 48 B/op 1 allocs/op
+Benchmark_ToLower/default-16 10617034 110 ns/op 48 B/op 1 allocs/op
+
+Benchmark_TrimRight/fiber-16 413699521 2.94 ns/op 0 B/op 0 allocs/op
+Benchmark_TrimRight/fiber-16 415131687 2.91 ns/op 0 B/op 0 allocs/op
+Benchmark_TrimRight/default-16 23994577 49.1 ns/op 32 B/op 1 allocs/op
+Benchmark_TrimRight/default-16 24484249 49.4 ns/op 32 B/op 1 allocs/op
+
+Benchmark_TrimLeft/fiber-16 379661170 3.13 ns/op 0 B/op 0 allocs/op
+Benchmark_TrimLeft/fiber-16 382079941 3.16 ns/op 0 B/op 0 allocs/op
+Benchmark_TrimLeft/default-16 27900877 41.9 ns/op 32 B/op 1 allocs/op
+Benchmark_TrimLeft/default-16 28564898 42.0 ns/op 32 B/op 1 allocs/op
+
+Benchmark_Trim/fiber-16 236632856 4.96 ns/op 0 B/op 0 allocs/op
+Benchmark_Trim/fiber-16 237570085 4.93 ns/op 0 B/op 0 allocs/op
+Benchmark_Trim/default-16 18457221 66.0 ns/op 32 B/op 1 allocs/op
+Benchmark_Trim/default-16 18177328 65.9 ns/op 32 B/op 1 allocs/op
+Benchmark_Trim/default.trimspace-16 188933770 6.33 ns/op 0 B/op 0 allocs/op
+Benchmark_Trim/default.trimspace-16 184007649 6.42 ns/op 0 B/op 0 allocs/op
+```
diff --git a/vendor/github.com/gofiber/utils/assertions.go b/vendor/github.com/gofiber/utils/assertions.go
new file mode 100644
index 0000000..a107a46
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/assertions.go
@@ -0,0 +1,62 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+import (
+ "bytes"
+ "fmt"
+ "log"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "testing"
+ "text/tabwriter"
+)
+
+// AssertEqual checks if values are equal
+func AssertEqual(t testing.TB, expected interface{}, actual interface{}, description ...string) {
+ if reflect.DeepEqual(expected, actual) {
+ return
+ }
+ var aType = ""
+ var bType = ""
+ if reflect.ValueOf(expected).IsValid() {
+ aType = reflect.TypeOf(expected).Name()
+ }
+ if reflect.ValueOf(actual).IsValid() {
+ bType = reflect.TypeOf(actual).Name()
+ }
+
+ testName := "AssertEqual"
+ if t != nil {
+ testName = t.Name()
+ }
+
+ _, file, line, _ := runtime.Caller(1)
+
+ var buf bytes.Buffer
+ w := tabwriter.NewWriter(&buf, 0, 0, 5, ' ', 0)
+ fmt.Fprintf(w, "\nTest:\t%s", testName)
+ fmt.Fprintf(w, "\nTrace:\t%s:%d", filepath.Base(file), line)
+ fmt.Fprintf(w, "\nError:\tNot equal")
+ fmt.Fprintf(w, "\nExpect:\t%v\t[%s]", expected, aType)
+ fmt.Fprintf(w, "\nResult:\t%v\t[%s]", actual, bType)
+
+ if len(description) > 0 {
+ fmt.Fprintf(w, "\nDescription:\t%s", description[0])
+ }
+
+ result := ""
+ if err := w.Flush(); err != nil {
+ result = err.Error()
+ } else {
+ result = buf.String()
+ }
+ if t != nil {
+ t.Fatal(result)
+ } else {
+ log.Fatal(result)
+ }
+}
diff --git a/vendor/github.com/gofiber/utils/bytes.go b/vendor/github.com/gofiber/utils/bytes.go
new file mode 100644
index 0000000..302d99a
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/bytes.go
@@ -0,0 +1,78 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+// ToLowerBytes is the equivalent of bytes.ToLower
+func ToLowerBytes(b []byte) []byte {
+ for i := 0; i < len(b); i++ {
+ b[i] = toLowerTable[b[i]]
+ }
+ return b
+}
+
+// ToUpperBytes is the equivalent of bytes.ToUpper
+func ToUpperBytes(b []byte) []byte {
+ for i := 0; i < len(b); i++ {
+ b[i] = toUpperTable[b[i]]
+ }
+ return b
+}
+
+// TrimRightBytes is the equivalent of bytes.TrimRight
+func TrimRightBytes(b []byte, cutset byte) []byte {
+ lenStr := len(b)
+ for lenStr > 0 && b[lenStr-1] == cutset {
+ lenStr--
+ }
+ return b[:lenStr]
+}
+
+// TrimLeftBytes is the equivalent of bytes.TrimLeft
+func TrimLeftBytes(b []byte, cutset byte) []byte {
+ lenStr, start := len(b), 0
+ for start < lenStr && b[start] == cutset {
+ start++
+ }
+ return b[start:]
+}
+
+// TrimBytes is the equivalent of bytes.Trim
+func TrimBytes(b []byte, cutset byte) []byte {
+ i, j := 0, len(b)-1
+ for ; i < j; i++ {
+ if b[i] != cutset {
+ break
+ }
+ }
+ for ; i < j; j-- {
+ if b[j] != cutset {
+ break
+ }
+ }
+
+ return b[i : j+1]
+}
+
+// EqualFold the equivalent of bytes.EqualFold
+func EqualsFold(b, s []byte) (equals bool) {
+ n := len(b)
+ equals = n == len(s)
+ if equals {
+ for i := 0; i < n; i++ {
+ if equals = b[i]|0x20 == s[i]|0x20; !equals {
+ break
+ }
+ }
+ }
+ return
+}
+
+// DefaultBytes returns the provided fallback value if []byte is empty
+func DefaultBytes(value []byte, defaultValue []byte) []byte {
+ if len(value) <= 0 {
+ return defaultValue
+ }
+ return value
+}
diff --git a/vendor/github.com/gofiber/utils/common.go b/vendor/github.com/gofiber/utils/common.go
new file mode 100644
index 0000000..323cc2b
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/common.go
@@ -0,0 +1,83 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+import (
+ "crypto/rand"
+ "encoding/binary"
+ "encoding/hex"
+ "os"
+ "reflect"
+ "runtime"
+ "sync"
+ "sync/atomic"
+)
+
+const toLowerTable = "\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@abcdefghijklmnopqrstuvwxyz[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+const toUpperTable = "\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~\u007f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+
+// Copyright © 2014, Roger Peppe
+// github.com/rogpeppe/fastuuid
+// All rights reserved.
+
+var uuidSeed [24]byte
+var uuidCounter uint64
+var uuidSetup sync.Once
+
+// UUID generates an universally unique identifier (UUID)
+func UUID() string {
+ // Setup seed & counter once
+ uuidSetup.Do(func() {
+ if _, err := rand.Read(uuidSeed[:]); err != nil {
+ return
+ }
+ uuidCounter = binary.LittleEndian.Uint64(uuidSeed[:8])
+ })
+ if atomic.LoadUint64(&uuidCounter) <= 0 {
+ return "00000000-0000-0000-0000-000000000000"
+ }
+ // first 8 bytes differ, taking a slice of the first 16 bytes
+ x := atomic.AddUint64(&uuidCounter, 1)
+ uuid := uuidSeed
+ binary.LittleEndian.PutUint64(uuid[:8], x)
+ uuid[6], uuid[9] = uuid[9], uuid[6]
+
+ // RFC4122 v4
+ uuid[6] = (uuid[6] & 0x0f) | 0x40
+ uuid[8] = uuid[8]&0x3f | 0x80
+
+ // create UUID representation of the first 128 bits
+ b := make([]byte, 36)
+ hex.Encode(b[0:8], uuid[0:4])
+ b[8] = '-'
+ hex.Encode(b[9:13], uuid[4:6])
+ b[13] = '-'
+ hex.Encode(b[14:18], uuid[6:8])
+ b[18] = '-'
+ hex.Encode(b[19:23], uuid[8:10])
+ b[23] = '-'
+ hex.Encode(b[24:], uuid[10:16])
+
+ return GetString(b)
+}
+
+// FunctionName returns function name
+func FunctionName(fn interface{}) string {
+ t := reflect.ValueOf(fn).Type()
+ if t.Kind() == reflect.Func {
+ return runtime.FuncForPC(reflect.ValueOf(fn).Pointer()).Name()
+ }
+ return t.String()
+}
+
+// GetArgument check if key is in arguments
+func GetArgument(arg string) bool {
+ for i := range os.Args[1:] {
+ if os.Args[1:][i] == arg {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/gofiber/utils/convert.go b/vendor/github.com/gofiber/utils/convert.go
new file mode 100644
index 0000000..ed5b84d
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/convert.go
@@ -0,0 +1,104 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+import (
+ "reflect"
+ "strconv"
+ "strings"
+ "unsafe"
+)
+
+// #nosec G103
+// GetString returns a string pointer without allocation
+func UnsafeString(b []byte) string {
+ return *(*string)(unsafe.Pointer(&b))
+}
+
+// #nosec G103
+// GetBytes returns a byte pointer without allocation
+func UnsafeBytes(s string) (bs []byte) {
+ sh := (*reflect.StringHeader)(unsafe.Pointer(&s))
+ bh := (*reflect.SliceHeader)(unsafe.Pointer(&bs))
+ bh.Data = sh.Data
+ bh.Len = sh.Len
+ bh.Cap = sh.Len
+ return
+}
+
+// SafeString copies a string to make it immutable
+func SafeString(s string) string {
+ return string(UnsafeBytes(s))
+}
+
+// SafeBytes copies a slice to make it immutable
+func SafeBytes(b []byte) []byte {
+ tmp := make([]byte, len(b))
+ copy(tmp, b)
+ return tmp
+}
+
+const (
+ uByte = 1 << (10 * iota)
+ uKilobyte
+ uMegabyte
+ uGigabyte
+ uTerabyte
+ uPetabyte
+ uExabyte
+)
+
+// ByteSize returns a human-readable byte string of the form 10M, 12.5K, and so forth.
+// The unit that results in the smallest number greater than or equal to 1 is always chosen.
+func ByteSize(bytes uint64) string {
+ unit := ""
+ value := float64(bytes)
+ switch {
+ case bytes >= uExabyte:
+ unit = "EB"
+ value = value / uExabyte
+ case bytes >= uPetabyte:
+ unit = "PB"
+ value = value / uPetabyte
+ case bytes >= uTerabyte:
+ unit = "TB"
+ value = value / uTerabyte
+ case bytes >= uGigabyte:
+ unit = "GB"
+ value = value / uGigabyte
+ case bytes >= uMegabyte:
+ unit = "MB"
+ value = value / uMegabyte
+ case bytes >= uKilobyte:
+ unit = "KB"
+ value = value / uKilobyte
+ case bytes >= uByte:
+ unit = "B"
+ default:
+ return "0B"
+ }
+ result := strconv.FormatFloat(value, 'f', 1, 64)
+ result = strings.TrimSuffix(result, ".0")
+ return result + unit
+}
+
+// Deprecated fn's
+
+// #nosec G103
+// GetString returns a string pointer without allocation
+func GetString(b []byte) string {
+ return UnsafeString(b)
+}
+
+// #nosec G103
+// GetBytes returns a byte pointer without allocation
+func GetBytes(s string) []byte {
+ return UnsafeBytes(s)
+}
+
+// ImmutableString copies a string to make it immutable
+func ImmutableString(s string) string {
+ return SafeString(s)
+}
diff --git a/vendor/github.com/gofiber/utils/file.go b/vendor/github.com/gofiber/utils/file.go
new file mode 100644
index 0000000..0ae8f22
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/file.go
@@ -0,0 +1,110 @@
+package utils
+
+import (
+ "io"
+ "net/http"
+ "os"
+ pathpkg "path"
+ "path/filepath"
+ "sort"
+)
+
+// Walk walks the filesystem rooted at root, calling walkFn for each file or
+// directory in the filesystem, including root. All errors that arise visiting files
+// and directories are filtered by walkFn. The files are walked in lexical
+// order.
+func Walk(fs http.FileSystem, root string, walkFn filepath.WalkFunc) error {
+ info, err := stat(fs, root)
+ if err != nil {
+ return walkFn(root, nil, err)
+ }
+ return walk(fs, root, info, walkFn)
+}
+
+// #nosec G304
+// ReadFile returns the raw content of a file
+func ReadFile(path string, fs http.FileSystem) ([]byte, error) {
+ if fs != nil {
+ file, err := fs.Open(path)
+ if err != nil {
+ return nil, err
+ }
+ defer file.Close()
+ return io.ReadAll(file)
+ }
+ return os.ReadFile(path)
+}
+
+// readDirNames reads the directory named by dirname and returns
+// a sorted list of directory entries.
+func readDirNames(fs http.FileSystem, dirname string) ([]string, error) {
+ fis, err := readDir(fs, dirname)
+ if err != nil {
+ return nil, err
+ }
+ names := make([]string, len(fis))
+ for i := range fis {
+ names[i] = fis[i].Name()
+ }
+ sort.Strings(names)
+ return names, nil
+}
+
+// walk recursively descends path, calling walkFn.
+func walk(fs http.FileSystem, path string, info os.FileInfo, walkFn filepath.WalkFunc) error {
+ err := walkFn(path, info, nil)
+ if err != nil {
+ if info.IsDir() && err == filepath.SkipDir {
+ return nil
+ }
+ return err
+ }
+
+ if !info.IsDir() {
+ return nil
+ }
+
+ names, err := readDirNames(fs, path)
+ if err != nil {
+ return walkFn(path, info, err)
+ }
+
+ for _, name := range names {
+ filename := pathpkg.Join(path, name)
+ fileInfo, err := stat(fs, filename)
+ if err != nil {
+ if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir {
+ return err
+ }
+ } else {
+ err = walk(fs, filename, fileInfo, walkFn)
+ if err != nil {
+ if !fileInfo.IsDir() || err != filepath.SkipDir {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// readDir reads the contents of the directory associated with file and
+// returns a slice of FileInfo values in directory order.
+func readDir(fs http.FileSystem, name string) ([]os.FileInfo, error) {
+ f, err := fs.Open(name)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+ return f.Readdir(0)
+}
+
+// stat returns the FileInfo structure describing file.
+func stat(fs http.FileSystem, name string) (os.FileInfo, error) {
+ f, err := fs.Open(name)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+ return f.Stat()
+}
diff --git a/vendor/github.com/gofiber/utils/http.go b/vendor/github.com/gofiber/utils/http.go
new file mode 100644
index 0000000..4584f3c
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/http.go
@@ -0,0 +1,212 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+const MIMEOctetStream = "application/octet-stream"
+
+// GetMIME returns the content-type of a file extension
+func GetMIME(extension string) (mime string) {
+ if len(extension) == 0 {
+ return mime
+ }
+ if extension[0] == '.' {
+ mime = mimeExtensions[extension[1:]]
+ } else {
+ mime = mimeExtensions[extension]
+ }
+ if len(mime) == 0 {
+ return MIMEOctetStream
+ }
+ return mime
+}
+
+// limits for HTTP statuscodes
+const (
+ statusMessageMin = 100
+ statusMessageMax = 511
+)
+
+// StatusMessage returns the correct message for the provided HTTP statuscode
+func StatusMessage(status int) string {
+ if status < statusMessageMin || status > statusMessageMax {
+ return ""
+ }
+ return statusMessage[status]
+}
+
+// HTTP status codes were copied from net/http.
+var statusMessage = []string{
+ 100: "Continue",
+ 101: "Switching Protocols",
+ 102: "Processing",
+ 103: "Early Hints",
+ 200: "OK",
+ 201: "Created",
+ 202: "Accepted",
+ 203: "Non-Authoritative Information",
+ 204: "No Content",
+ 205: "Reset Content",
+ 206: "Partial Content",
+ 207: "Multi-Status",
+ 208: "Already Reported",
+ 226: "IM Used",
+ 300: "Multiple Choices",
+ 301: "Moved Permanently",
+ 302: "Found",
+ 303: "See Other",
+ 304: "Not Modified",
+ 305: "Use Proxy",
+ 306: "Switch Proxy",
+ 307: "Temporary Redirect",
+ 308: "Permanent Redirect",
+ 400: "Bad Request",
+ 401: "Unauthorized",
+ 402: "Payment Required",
+ 403: "Forbidden",
+ 404: "Not Found",
+ 405: "Method Not Allowed",
+ 406: "Not Acceptable",
+ 407: "Proxy Authentication Required",
+ 408: "Request Timeout",
+ 409: "Conflict",
+ 410: "Gone",
+ 411: "Length Required",
+ 412: "Precondition Failed",
+ 413: "Request Entity Too Large",
+ 414: "Request URI Too Long",
+ 415: "Unsupported Media Type",
+ 416: "Requested Range Not Satisfiable",
+ 417: "Expectation Failed",
+ 418: "I'm a teapot",
+ 421: "Misdirected Request",
+ 422: "Unprocessable Entity",
+ 423: "Locked",
+ 424: "Failed Dependency",
+ 426: "Upgrade Required",
+ 428: "Precondition Required",
+ 429: "Too Many Requests",
+ 431: "Request Header Fields Too Large",
+ 451: "Unavailable For Legal Reasons",
+ 500: "Internal Server Error",
+ 501: "Not Implemented",
+ 502: "Bad Gateway",
+ 503: "Service Unavailable",
+ 504: "Gateway Timeout",
+ 505: "HTTP Version Not Supported",
+ 506: "Variant Also Negotiates",
+ 507: "Insufficient Storage",
+ 508: "Loop Detected",
+ 510: "Not Extended",
+ 511: "Network Authentication Required",
+}
+
+// MIME types were copied from https://github.com/nginx/nginx/blob/master/conf/mime.types
+var mimeExtensions = map[string]string{
+ "html": "text/html",
+ "htm": "text/html",
+ "shtml": "text/html",
+ "css": "text/css",
+ "gif": "image/gif",
+ "jpeg": "image/jpeg",
+ "jpg": "image/jpeg",
+ "xml": "application/xml",
+ "js": "application/javascript",
+ "atom": "application/atom+xml",
+ "rss": "application/rss+xml",
+ "mml": "text/mathml",
+ "txt": "text/plain",
+ "jad": "text/vnd.sun.j2me.app-descriptor",
+ "wml": "text/vnd.wap.wml",
+ "htc": "text/x-component",
+ "png": "image/png",
+ "svg": "image/svg+xml",
+ "svgz": "image/svg+xml",
+ "tif": "image/tiff",
+ "tiff": "image/tiff",
+ "wbmp": "image/vnd.wap.wbmp",
+ "webp": "image/webp",
+ "ico": "image/x-icon",
+ "jng": "image/x-jng",
+ "bmp": "image/x-ms-bmp",
+ "woff": "font/woff",
+ "woff2": "font/woff2",
+ "jar": "application/java-archive",
+ "war": "application/java-archive",
+ "ear": "application/java-archive",
+ "json": "application/json",
+ "hqx": "application/mac-binhex40",
+ "doc": "application/msword",
+ "pdf": "application/pdf",
+ "ps": "application/postscript",
+ "eps": "application/postscript",
+ "ai": "application/postscript",
+ "rtf": "application/rtf",
+ "m3u8": "application/vnd.apple.mpegurl",
+ "kml": "application/vnd.google-earth.kml+xml",
+ "kmz": "application/vnd.google-earth.kmz",
+ "xls": "application/vnd.ms-excel",
+ "eot": "application/vnd.ms-fontobject",
+ "ppt": "application/vnd.ms-powerpoint",
+ "odg": "application/vnd.oasis.opendocument.graphics",
+ "odp": "application/vnd.oasis.opendocument.presentation",
+ "ods": "application/vnd.oasis.opendocument.spreadsheet",
+ "odt": "application/vnd.oasis.opendocument.text",
+ "wmlc": "application/vnd.wap.wmlc",
+ "7z": "application/x-7z-compressed",
+ "cco": "application/x-cocoa",
+ "jardiff": "application/x-java-archive-diff",
+ "jnlp": "application/x-java-jnlp-file",
+ "run": "application/x-makeself",
+ "pl": "application/x-perl",
+ "pm": "application/x-perl",
+ "prc": "application/x-pilot",
+ "pdb": "application/x-pilot",
+ "rar": "application/x-rar-compressed",
+ "rpm": "application/x-redhat-package-manager",
+ "sea": "application/x-sea",
+ "swf": "application/x-shockwave-flash",
+ "sit": "application/x-stuffit",
+ "tcl": "application/x-tcl",
+ "tk": "application/x-tcl",
+ "der": "application/x-x509-ca-cert",
+ "pem": "application/x-x509-ca-cert",
+ "crt": "application/x-x509-ca-cert",
+ "xpi": "application/x-xpinstall",
+ "xhtml": "application/xhtml+xml",
+ "xspf": "application/xspf+xml",
+ "zip": "application/zip",
+ "bin": "application/octet-stream",
+ "exe": "application/octet-stream",
+ "dll": "application/octet-stream",
+ "deb": "application/octet-stream",
+ "dmg": "application/octet-stream",
+ "iso": "application/octet-stream",
+ "img": "application/octet-stream",
+ "msi": "application/octet-stream",
+ "msp": "application/octet-stream",
+ "msm": "application/octet-stream",
+ "mid": "audio/midi",
+ "midi": "audio/midi",
+ "kar": "audio/midi",
+ "mp3": "audio/mpeg",
+ "ogg": "audio/ogg",
+ "m4a": "audio/x-m4a",
+ "ra": "audio/x-realaudio",
+ "3gpp": "video/3gpp",
+ "3gp": "video/3gpp",
+ "ts": "video/mp2t",
+ "mp4": "video/mp4",
+ "mpeg": "video/mpeg",
+ "mpg": "video/mpeg",
+ "mov": "video/quicktime",
+ "webm": "video/webm",
+ "flv": "video/x-flv",
+ "m4v": "video/x-m4v",
+ "mng": "video/x-mng",
+ "asx": "video/x-ms-asf",
+ "asf": "video/x-ms-asf",
+ "wmv": "video/x-ms-wmv",
+ "avi": "video/x-msvideo",
+}
diff --git a/vendor/github.com/gofiber/utils/integer.go b/vendor/github.com/gofiber/utils/integer.go
new file mode 100644
index 0000000..c01923d
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/integer.go
@@ -0,0 +1,13 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+// DefaultINT returns the provided fallback value if int is 0 or lower
+func DefaultINT(value int, defaultValue int) int {
+ if value <= 0 {
+ return defaultValue
+ }
+ return value
+}
diff --git a/vendor/github.com/gofiber/utils/strings.go b/vendor/github.com/gofiber/utils/strings.go
new file mode 100644
index 0000000..0436f03
--- /dev/null
+++ b/vendor/github.com/gofiber/utils/strings.go
@@ -0,0 +1,70 @@
+// ⚡️ Fiber is an Express inspired web framework written in Go with ☕️
+// 🤖 Github Repository: https://github.com/gofiber/fiber
+// 📌 API Documentation: https://docs.gofiber.io
+
+package utils
+
+// ToLower is the equivalent of strings.ToLower
+func ToLower(b string) string {
+ var res = make([]byte, len(b))
+ copy(res, b)
+ for i := 0; i < len(res); i++ {
+ res[i] = toLowerTable[res[i]]
+ }
+
+ return GetString(res)
+}
+
+// ToUpper is the equivalent of strings.ToUpper
+func ToUpper(b string) string {
+ var res = make([]byte, len(b))
+ copy(res, b)
+ for i := 0; i < len(res); i++ {
+ res[i] = toUpperTable[res[i]]
+ }
+
+ return GetString(res)
+}
+
+// TrimLeft is the equivalent of strings.TrimLeft
+func TrimLeft(s string, cutset byte) string {
+ lenStr, start := len(s), 0
+ for start < lenStr && s[start] == cutset {
+ start++
+ }
+ return s[start:]
+}
+
+// Trim is the equivalent of strings.Trim
+func Trim(s string, cutset byte) string {
+ i, j := 0, len(s)-1
+ for ; i < j; i++ {
+ if s[i] != cutset {
+ break
+ }
+ }
+ for ; i < j; j-- {
+ if s[j] != cutset {
+ break
+ }
+ }
+
+ return s[i : j+1]
+}
+
+// TrimRight is the equivalent of strings.TrimRight
+func TrimRight(s string, cutset byte) string {
+ lenStr := len(s)
+ for lenStr > 0 && s[lenStr-1] == cutset {
+ lenStr--
+ }
+ return s[:lenStr]
+}
+
+// DefaultString returns the provided fallback value if string is empty
+func DefaultString(value string, defaultValue string) string {
+ if len(value) <= 0 {
+ return defaultValue
+ }
+ return value
+}
diff --git a/vendor/github.com/google/uuid/CHANGELOG.md b/vendor/github.com/google/uuid/CHANGELOG.md
new file mode 100644
index 0000000..c9fb829
--- /dev/null
+++ b/vendor/github.com/google/uuid/CHANGELOG.md
@@ -0,0 +1,28 @@
+# Changelog
+
+## [1.5.0](https://github.com/google/uuid/compare/v1.4.0...v1.5.0) (2023-12-12)
+
+
+### Features
+
+* Validate UUID without creating new UUID ([#141](https://github.com/google/uuid/issues/141)) ([9ee7366](https://github.com/google/uuid/commit/9ee7366e66c9ad96bab89139418a713dc584ae29))
+
+## [1.4.0](https://github.com/google/uuid/compare/v1.3.1...v1.4.0) (2023-10-26)
+
+
+### Features
+
+* UUIDs slice type with Strings() convenience method ([#133](https://github.com/google/uuid/issues/133)) ([cd5fbbd](https://github.com/google/uuid/commit/cd5fbbdd02f3e3467ac18940e07e062be1f864b4))
+
+### Fixes
+
+* Clarify that Parse's job is to parse but not necessarily validate strings. (Documents current behavior)
+
+## [1.3.1](https://github.com/google/uuid/compare/v1.3.0...v1.3.1) (2023-08-18)
+
+
+### Bug Fixes
+
+* Use .EqualFold() to parse urn prefixed UUIDs ([#118](https://github.com/google/uuid/issues/118)) ([574e687](https://github.com/google/uuid/commit/574e6874943741fb99d41764c705173ada5293f0))
+
+## Changelog
diff --git a/vendor/github.com/google/uuid/CONTRIBUTING.md b/vendor/github.com/google/uuid/CONTRIBUTING.md
new file mode 100644
index 0000000..a502fdc
--- /dev/null
+++ b/vendor/github.com/google/uuid/CONTRIBUTING.md
@@ -0,0 +1,26 @@
+# How to contribute
+
+We definitely welcome patches and contribution to this project!
+
+### Tips
+
+Commits must be formatted according to the [Conventional Commits Specification](https://www.conventionalcommits.org).
+
+Always try to include a test case! If it is not possible or not necessary,
+please explain why in the pull request description.
+
+### Releasing
+
+Commits that would precipitate a SemVer change, as described in the Conventional
+Commits Specification, will trigger [`release-please`](https://github.com/google-github-actions/release-please-action)
+to create a release candidate pull request. Once submitted, `release-please`
+will create a release.
+
+For tips on how to work with `release-please`, see its documentation.
+
+### Legal requirements
+
+In order to protect both you and ourselves, you will need to sign the
+[Contributor License Agreement](https://cla.developers.google.com/clas).
+
+You may have already signed it for other Google projects.
diff --git a/vendor/github.com/google/uuid/CONTRIBUTORS b/vendor/github.com/google/uuid/CONTRIBUTORS
new file mode 100644
index 0000000..b4bb97f
--- /dev/null
+++ b/vendor/github.com/google/uuid/CONTRIBUTORS
@@ -0,0 +1,9 @@
+Paul Borman
+bmatsuo
+shawnps
+theory
+jboverfelt
+dsymonds
+cd1
+wallclockbuilder
+dansouza
diff --git a/vendor/github.com/google/uuid/LICENSE b/vendor/github.com/google/uuid/LICENSE
new file mode 100644
index 0000000..5dc6826
--- /dev/null
+++ b/vendor/github.com/google/uuid/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009,2014 Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/google/uuid/README.md b/vendor/github.com/google/uuid/README.md
new file mode 100644
index 0000000..3e9a618
--- /dev/null
+++ b/vendor/github.com/google/uuid/README.md
@@ -0,0 +1,21 @@
+# uuid
+The uuid package generates and inspects UUIDs based on
+[RFC 4122](https://datatracker.ietf.org/doc/html/rfc4122)
+and DCE 1.1: Authentication and Security Services.
+
+This package is based on the github.com/pborman/uuid package (previously named
+code.google.com/p/go-uuid). It differs from these earlier packages in that
+a UUID is a 16 byte array rather than a byte slice. One loss due to this
+change is the ability to represent an invalid UUID (vs a NIL UUID).
+
+###### Install
+```sh
+go get github.com/google/uuid
+```
+
+###### Documentation
+[![Go Reference](https://pkg.go.dev/badge/github.com/google/uuid.svg)](https://pkg.go.dev/github.com/google/uuid)
+
+Full `go doc` style documentation for the package can be viewed online without
+installing this package by using the GoDoc site here:
+http://pkg.go.dev/github.com/google/uuid
diff --git a/vendor/github.com/google/uuid/dce.go b/vendor/github.com/google/uuid/dce.go
new file mode 100644
index 0000000..fa820b9
--- /dev/null
+++ b/vendor/github.com/google/uuid/dce.go
@@ -0,0 +1,80 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "encoding/binary"
+ "fmt"
+ "os"
+)
+
+// A Domain represents a Version 2 domain
+type Domain byte
+
+// Domain constants for DCE Security (Version 2) UUIDs.
+const (
+ Person = Domain(0)
+ Group = Domain(1)
+ Org = Domain(2)
+)
+
+// NewDCESecurity returns a DCE Security (Version 2) UUID.
+//
+// The domain should be one of Person, Group or Org.
+// On a POSIX system the id should be the users UID for the Person
+// domain and the users GID for the Group. The meaning of id for
+// the domain Org or on non-POSIX systems is site defined.
+//
+// For a given domain/id pair the same token may be returned for up to
+// 7 minutes and 10 seconds.
+func NewDCESecurity(domain Domain, id uint32) (UUID, error) {
+ uuid, err := NewUUID()
+ if err == nil {
+ uuid[6] = (uuid[6] & 0x0f) | 0x20 // Version 2
+ uuid[9] = byte(domain)
+ binary.BigEndian.PutUint32(uuid[0:], id)
+ }
+ return uuid, err
+}
+
+// NewDCEPerson returns a DCE Security (Version 2) UUID in the person
+// domain with the id returned by os.Getuid.
+//
+// NewDCESecurity(Person, uint32(os.Getuid()))
+func NewDCEPerson() (UUID, error) {
+ return NewDCESecurity(Person, uint32(os.Getuid()))
+}
+
+// NewDCEGroup returns a DCE Security (Version 2) UUID in the group
+// domain with the id returned by os.Getgid.
+//
+// NewDCESecurity(Group, uint32(os.Getgid()))
+func NewDCEGroup() (UUID, error) {
+ return NewDCESecurity(Group, uint32(os.Getgid()))
+}
+
+// Domain returns the domain for a Version 2 UUID. Domains are only defined
+// for Version 2 UUIDs.
+func (uuid UUID) Domain() Domain {
+ return Domain(uuid[9])
+}
+
+// ID returns the id for a Version 2 UUID. IDs are only defined for Version 2
+// UUIDs.
+func (uuid UUID) ID() uint32 {
+ return binary.BigEndian.Uint32(uuid[0:4])
+}
+
+func (d Domain) String() string {
+ switch d {
+ case Person:
+ return "Person"
+ case Group:
+ return "Group"
+ case Org:
+ return "Org"
+ }
+ return fmt.Sprintf("Domain%d", int(d))
+}
diff --git a/vendor/github.com/google/uuid/doc.go b/vendor/github.com/google/uuid/doc.go
new file mode 100644
index 0000000..5b8a4b9
--- /dev/null
+++ b/vendor/github.com/google/uuid/doc.go
@@ -0,0 +1,12 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package uuid generates and inspects UUIDs.
+//
+// UUIDs are based on RFC 4122 and DCE 1.1: Authentication and Security
+// Services.
+//
+// A UUID is a 16 byte (128 bit) array. UUIDs may be used as keys to
+// maps or compared directly.
+package uuid
diff --git a/vendor/github.com/google/uuid/hash.go b/vendor/github.com/google/uuid/hash.go
new file mode 100644
index 0000000..b404f4b
--- /dev/null
+++ b/vendor/github.com/google/uuid/hash.go
@@ -0,0 +1,53 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "crypto/md5"
+ "crypto/sha1"
+ "hash"
+)
+
+// Well known namespace IDs and UUIDs
+var (
+ NameSpaceDNS = Must(Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c8"))
+ NameSpaceURL = Must(Parse("6ba7b811-9dad-11d1-80b4-00c04fd430c8"))
+ NameSpaceOID = Must(Parse("6ba7b812-9dad-11d1-80b4-00c04fd430c8"))
+ NameSpaceX500 = Must(Parse("6ba7b814-9dad-11d1-80b4-00c04fd430c8"))
+ Nil UUID // empty UUID, all zeros
+)
+
+// NewHash returns a new UUID derived from the hash of space concatenated with
+// data generated by h. The hash should be at least 16 byte in length. The
+// first 16 bytes of the hash are used to form the UUID. The version of the
+// UUID will be the lower 4 bits of version. NewHash is used to implement
+// NewMD5 and NewSHA1.
+func NewHash(h hash.Hash, space UUID, data []byte, version int) UUID {
+ h.Reset()
+ h.Write(space[:]) //nolint:errcheck
+ h.Write(data) //nolint:errcheck
+ s := h.Sum(nil)
+ var uuid UUID
+ copy(uuid[:], s)
+ uuid[6] = (uuid[6] & 0x0f) | uint8((version&0xf)<<4)
+ uuid[8] = (uuid[8] & 0x3f) | 0x80 // RFC 4122 variant
+ return uuid
+}
+
+// NewMD5 returns a new MD5 (Version 3) UUID based on the
+// supplied name space and data. It is the same as calling:
+//
+// NewHash(md5.New(), space, data, 3)
+func NewMD5(space UUID, data []byte) UUID {
+ return NewHash(md5.New(), space, data, 3)
+}
+
+// NewSHA1 returns a new SHA1 (Version 5) UUID based on the
+// supplied name space and data. It is the same as calling:
+//
+// NewHash(sha1.New(), space, data, 5)
+func NewSHA1(space UUID, data []byte) UUID {
+ return NewHash(sha1.New(), space, data, 5)
+}
diff --git a/vendor/github.com/google/uuid/marshal.go b/vendor/github.com/google/uuid/marshal.go
new file mode 100644
index 0000000..14bd340
--- /dev/null
+++ b/vendor/github.com/google/uuid/marshal.go
@@ -0,0 +1,38 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import "fmt"
+
+// MarshalText implements encoding.TextMarshaler.
+func (uuid UUID) MarshalText() ([]byte, error) {
+ var js [36]byte
+ encodeHex(js[:], uuid)
+ return js[:], nil
+}
+
+// UnmarshalText implements encoding.TextUnmarshaler.
+func (uuid *UUID) UnmarshalText(data []byte) error {
+ id, err := ParseBytes(data)
+ if err != nil {
+ return err
+ }
+ *uuid = id
+ return nil
+}
+
+// MarshalBinary implements encoding.BinaryMarshaler.
+func (uuid UUID) MarshalBinary() ([]byte, error) {
+ return uuid[:], nil
+}
+
+// UnmarshalBinary implements encoding.BinaryUnmarshaler.
+func (uuid *UUID) UnmarshalBinary(data []byte) error {
+ if len(data) != 16 {
+ return fmt.Errorf("invalid UUID (got %d bytes)", len(data))
+ }
+ copy(uuid[:], data)
+ return nil
+}
diff --git a/vendor/github.com/google/uuid/node.go b/vendor/github.com/google/uuid/node.go
new file mode 100644
index 0000000..d651a2b
--- /dev/null
+++ b/vendor/github.com/google/uuid/node.go
@@ -0,0 +1,90 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "sync"
+)
+
+var (
+ nodeMu sync.Mutex
+ ifname string // name of interface being used
+ nodeID [6]byte // hardware for version 1 UUIDs
+ zeroID [6]byte // nodeID with only 0's
+)
+
+// NodeInterface returns the name of the interface from which the NodeID was
+// derived. The interface "user" is returned if the NodeID was set by
+// SetNodeID.
+func NodeInterface() string {
+ defer nodeMu.Unlock()
+ nodeMu.Lock()
+ return ifname
+}
+
+// SetNodeInterface selects the hardware address to be used for Version 1 UUIDs.
+// If name is "" then the first usable interface found will be used or a random
+// Node ID will be generated. If a named interface cannot be found then false
+// is returned.
+//
+// SetNodeInterface never fails when name is "".
+func SetNodeInterface(name string) bool {
+ defer nodeMu.Unlock()
+ nodeMu.Lock()
+ return setNodeInterface(name)
+}
+
+func setNodeInterface(name string) bool {
+ iname, addr := getHardwareInterface(name) // null implementation for js
+ if iname != "" && addr != nil {
+ ifname = iname
+ copy(nodeID[:], addr)
+ return true
+ }
+
+ // We found no interfaces with a valid hardware address. If name
+ // does not specify a specific interface generate a random Node ID
+ // (section 4.1.6)
+ if name == "" {
+ ifname = "random"
+ randomBits(nodeID[:])
+ return true
+ }
+ return false
+}
+
+// NodeID returns a slice of a copy of the current Node ID, setting the Node ID
+// if not already set.
+func NodeID() []byte {
+ defer nodeMu.Unlock()
+ nodeMu.Lock()
+ if nodeID == zeroID {
+ setNodeInterface("")
+ }
+ nid := nodeID
+ return nid[:]
+}
+
+// SetNodeID sets the Node ID to be used for Version 1 UUIDs. The first 6 bytes
+// of id are used. If id is less than 6 bytes then false is returned and the
+// Node ID is not set.
+func SetNodeID(id []byte) bool {
+ if len(id) < 6 {
+ return false
+ }
+ defer nodeMu.Unlock()
+ nodeMu.Lock()
+ copy(nodeID[:], id)
+ ifname = "user"
+ return true
+}
+
+// NodeID returns the 6 byte node id encoded in uuid. It returns nil if uuid is
+// not valid. The NodeID is only well defined for version 1 and 2 UUIDs.
+func (uuid UUID) NodeID() []byte {
+ var node [6]byte
+ copy(node[:], uuid[10:])
+ return node[:]
+}
diff --git a/vendor/github.com/google/uuid/node_js.go b/vendor/github.com/google/uuid/node_js.go
new file mode 100644
index 0000000..b2a0bc8
--- /dev/null
+++ b/vendor/github.com/google/uuid/node_js.go
@@ -0,0 +1,12 @@
+// Copyright 2017 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build js
+
+package uuid
+
+// getHardwareInterface returns nil values for the JS version of the code.
+// This removes the "net" dependency, because it is not used in the browser.
+// Using the "net" library inflates the size of the transpiled JS code by 673k bytes.
+func getHardwareInterface(name string) (string, []byte) { return "", nil }
diff --git a/vendor/github.com/google/uuid/node_net.go b/vendor/github.com/google/uuid/node_net.go
new file mode 100644
index 0000000..0cbbcdd
--- /dev/null
+++ b/vendor/github.com/google/uuid/node_net.go
@@ -0,0 +1,33 @@
+// Copyright 2017 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !js
+
+package uuid
+
+import "net"
+
+var interfaces []net.Interface // cached list of interfaces
+
+// getHardwareInterface returns the name and hardware address of interface name.
+// If name is "" then the name and hardware address of one of the system's
+// interfaces is returned. If no interfaces are found (name does not exist or
+// there are no interfaces) then "", nil is returned.
+//
+// Only addresses of at least 6 bytes are returned.
+func getHardwareInterface(name string) (string, []byte) {
+ if interfaces == nil {
+ var err error
+ interfaces, err = net.Interfaces()
+ if err != nil {
+ return "", nil
+ }
+ }
+ for _, ifs := range interfaces {
+ if len(ifs.HardwareAddr) >= 6 && (name == "" || name == ifs.Name) {
+ return ifs.Name, ifs.HardwareAddr
+ }
+ }
+ return "", nil
+}
diff --git a/vendor/github.com/google/uuid/null.go b/vendor/github.com/google/uuid/null.go
new file mode 100644
index 0000000..d7fcbf2
--- /dev/null
+++ b/vendor/github.com/google/uuid/null.go
@@ -0,0 +1,118 @@
+// Copyright 2021 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "bytes"
+ "database/sql/driver"
+ "encoding/json"
+ "fmt"
+)
+
+var jsonNull = []byte("null")
+
+// NullUUID represents a UUID that may be null.
+// NullUUID implements the SQL driver.Scanner interface so
+// it can be used as a scan destination:
+//
+// var u uuid.NullUUID
+// err := db.QueryRow("SELECT name FROM foo WHERE id=?", id).Scan(&u)
+// ...
+// if u.Valid {
+// // use u.UUID
+// } else {
+// // NULL value
+// }
+//
+type NullUUID struct {
+ UUID UUID
+ Valid bool // Valid is true if UUID is not NULL
+}
+
+// Scan implements the SQL driver.Scanner interface.
+func (nu *NullUUID) Scan(value interface{}) error {
+ if value == nil {
+ nu.UUID, nu.Valid = Nil, false
+ return nil
+ }
+
+ err := nu.UUID.Scan(value)
+ if err != nil {
+ nu.Valid = false
+ return err
+ }
+
+ nu.Valid = true
+ return nil
+}
+
+// Value implements the driver Valuer interface.
+func (nu NullUUID) Value() (driver.Value, error) {
+ if !nu.Valid {
+ return nil, nil
+ }
+ // Delegate to UUID Value function
+ return nu.UUID.Value()
+}
+
+// MarshalBinary implements encoding.BinaryMarshaler.
+func (nu NullUUID) MarshalBinary() ([]byte, error) {
+ if nu.Valid {
+ return nu.UUID[:], nil
+ }
+
+ return []byte(nil), nil
+}
+
+// UnmarshalBinary implements encoding.BinaryUnmarshaler.
+func (nu *NullUUID) UnmarshalBinary(data []byte) error {
+ if len(data) != 16 {
+ return fmt.Errorf("invalid UUID (got %d bytes)", len(data))
+ }
+ copy(nu.UUID[:], data)
+ nu.Valid = true
+ return nil
+}
+
+// MarshalText implements encoding.TextMarshaler.
+func (nu NullUUID) MarshalText() ([]byte, error) {
+ if nu.Valid {
+ return nu.UUID.MarshalText()
+ }
+
+ return jsonNull, nil
+}
+
+// UnmarshalText implements encoding.TextUnmarshaler.
+func (nu *NullUUID) UnmarshalText(data []byte) error {
+ id, err := ParseBytes(data)
+ if err != nil {
+ nu.Valid = false
+ return err
+ }
+ nu.UUID = id
+ nu.Valid = true
+ return nil
+}
+
+// MarshalJSON implements json.Marshaler.
+func (nu NullUUID) MarshalJSON() ([]byte, error) {
+ if nu.Valid {
+ return json.Marshal(nu.UUID)
+ }
+
+ return jsonNull, nil
+}
+
+// UnmarshalJSON implements json.Unmarshaler.
+func (nu *NullUUID) UnmarshalJSON(data []byte) error {
+ if bytes.Equal(data, jsonNull) {
+ *nu = NullUUID{}
+ return nil // valid null UUID
+ }
+ err := json.Unmarshal(data, &nu.UUID)
+ nu.Valid = err == nil
+ return err
+}
diff --git a/vendor/github.com/google/uuid/sql.go b/vendor/github.com/google/uuid/sql.go
new file mode 100644
index 0000000..2e02ec0
--- /dev/null
+++ b/vendor/github.com/google/uuid/sql.go
@@ -0,0 +1,59 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "database/sql/driver"
+ "fmt"
+)
+
+// Scan implements sql.Scanner so UUIDs can be read from databases transparently.
+// Currently, database types that map to string and []byte are supported. Please
+// consult database-specific driver documentation for matching types.
+func (uuid *UUID) Scan(src interface{}) error {
+ switch src := src.(type) {
+ case nil:
+ return nil
+
+ case string:
+ // if an empty UUID comes from a table, we return a null UUID
+ if src == "" {
+ return nil
+ }
+
+ // see Parse for required string format
+ u, err := Parse(src)
+ if err != nil {
+ return fmt.Errorf("Scan: %v", err)
+ }
+
+ *uuid = u
+
+ case []byte:
+ // if an empty UUID comes from a table, we return a null UUID
+ if len(src) == 0 {
+ return nil
+ }
+
+ // assumes a simple slice of bytes if 16 bytes
+ // otherwise attempts to parse
+ if len(src) != 16 {
+ return uuid.Scan(string(src))
+ }
+ copy((*uuid)[:], src)
+
+ default:
+ return fmt.Errorf("Scan: unable to scan type %T into UUID", src)
+ }
+
+ return nil
+}
+
+// Value implements sql.Valuer so that UUIDs can be written to databases
+// transparently. Currently, UUIDs map to strings. Please consult
+// database-specific driver documentation for matching types.
+func (uuid UUID) Value() (driver.Value, error) {
+ return uuid.String(), nil
+}
diff --git a/vendor/github.com/google/uuid/time.go b/vendor/github.com/google/uuid/time.go
new file mode 100644
index 0000000..c351129
--- /dev/null
+++ b/vendor/github.com/google/uuid/time.go
@@ -0,0 +1,134 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "encoding/binary"
+ "sync"
+ "time"
+)
+
+// A Time represents a time as the number of 100's of nanoseconds since 15 Oct
+// 1582.
+type Time int64
+
+const (
+ lillian = 2299160 // Julian day of 15 Oct 1582
+ unix = 2440587 // Julian day of 1 Jan 1970
+ epoch = unix - lillian // Days between epochs
+ g1582 = epoch * 86400 // seconds between epochs
+ g1582ns100 = g1582 * 10000000 // 100s of a nanoseconds between epochs
+)
+
+var (
+ timeMu sync.Mutex
+ lasttime uint64 // last time we returned
+ clockSeq uint16 // clock sequence for this run
+
+ timeNow = time.Now // for testing
+)
+
+// UnixTime converts t the number of seconds and nanoseconds using the Unix
+// epoch of 1 Jan 1970.
+func (t Time) UnixTime() (sec, nsec int64) {
+ sec = int64(t - g1582ns100)
+ nsec = (sec % 10000000) * 100
+ sec /= 10000000
+ return sec, nsec
+}
+
+// GetTime returns the current Time (100s of nanoseconds since 15 Oct 1582) and
+// clock sequence as well as adjusting the clock sequence as needed. An error
+// is returned if the current time cannot be determined.
+func GetTime() (Time, uint16, error) {
+ defer timeMu.Unlock()
+ timeMu.Lock()
+ return getTime()
+}
+
+func getTime() (Time, uint16, error) {
+ t := timeNow()
+
+ // If we don't have a clock sequence already, set one.
+ if clockSeq == 0 {
+ setClockSequence(-1)
+ }
+ now := uint64(t.UnixNano()/100) + g1582ns100
+
+ // If time has gone backwards with this clock sequence then we
+ // increment the clock sequence
+ if now <= lasttime {
+ clockSeq = ((clockSeq + 1) & 0x3fff) | 0x8000
+ }
+ lasttime = now
+ return Time(now), clockSeq, nil
+}
+
+// ClockSequence returns the current clock sequence, generating one if not
+// already set. The clock sequence is only used for Version 1 UUIDs.
+//
+// The uuid package does not use global static storage for the clock sequence or
+// the last time a UUID was generated. Unless SetClockSequence is used, a new
+// random clock sequence is generated the first time a clock sequence is
+// requested by ClockSequence, GetTime, or NewUUID. (section 4.2.1.1)
+func ClockSequence() int {
+ defer timeMu.Unlock()
+ timeMu.Lock()
+ return clockSequence()
+}
+
+func clockSequence() int {
+ if clockSeq == 0 {
+ setClockSequence(-1)
+ }
+ return int(clockSeq & 0x3fff)
+}
+
+// SetClockSequence sets the clock sequence to the lower 14 bits of seq. Setting to
+// -1 causes a new sequence to be generated.
+func SetClockSequence(seq int) {
+ defer timeMu.Unlock()
+ timeMu.Lock()
+ setClockSequence(seq)
+}
+
+func setClockSequence(seq int) {
+ if seq == -1 {
+ var b [2]byte
+ randomBits(b[:]) // clock sequence
+ seq = int(b[0])<<8 | int(b[1])
+ }
+ oldSeq := clockSeq
+ clockSeq = uint16(seq&0x3fff) | 0x8000 // Set our variant
+ if oldSeq != clockSeq {
+ lasttime = 0
+ }
+}
+
+// Time returns the time in 100s of nanoseconds since 15 Oct 1582 encoded in
+// uuid. The time is only defined for version 1, 2, 6 and 7 UUIDs.
+func (uuid UUID) Time() Time {
+ var t Time
+ switch uuid.Version() {
+ case 6:
+ time := binary.BigEndian.Uint64(uuid[:8]) // Ignore uuid[6] version b0110
+ t = Time(time)
+ case 7:
+ time := binary.BigEndian.Uint64(uuid[:8])
+ t = Time((time>>16)*10000 + g1582ns100)
+ default: // forward compatible
+ time := int64(binary.BigEndian.Uint32(uuid[0:4]))
+ time |= int64(binary.BigEndian.Uint16(uuid[4:6])) << 32
+ time |= int64(binary.BigEndian.Uint16(uuid[6:8])&0xfff) << 48
+ t = Time(time)
+ }
+ return t
+}
+
+// ClockSequence returns the clock sequence encoded in uuid.
+// The clock sequence is only well defined for version 1 and 2 UUIDs.
+func (uuid UUID) ClockSequence() int {
+ return int(binary.BigEndian.Uint16(uuid[8:10])) & 0x3fff
+}
diff --git a/vendor/github.com/google/uuid/util.go b/vendor/github.com/google/uuid/util.go
new file mode 100644
index 0000000..5ea6c73
--- /dev/null
+++ b/vendor/github.com/google/uuid/util.go
@@ -0,0 +1,43 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "io"
+)
+
+// randomBits completely fills slice b with random data.
+func randomBits(b []byte) {
+ if _, err := io.ReadFull(rander, b); err != nil {
+ panic(err.Error()) // rand should never fail
+ }
+}
+
+// xvalues returns the value of a byte as a hexadecimal digit or 255.
+var xvalues = [256]byte{
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 255, 255, 255, 255, 255, 255,
+ 255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+}
+
+// xtob converts hex characters x1 and x2 into a byte.
+func xtob(x1, x2 byte) (byte, bool) {
+ b1 := xvalues[x1]
+ b2 := xvalues[x2]
+ return (b1 << 4) | b2, b1 != 255 && b2 != 255
+}
diff --git a/vendor/github.com/google/uuid/uuid.go b/vendor/github.com/google/uuid/uuid.go
new file mode 100644
index 0000000..5232b48
--- /dev/null
+++ b/vendor/github.com/google/uuid/uuid.go
@@ -0,0 +1,365 @@
+// Copyright 2018 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "bytes"
+ "crypto/rand"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "strings"
+ "sync"
+)
+
+// A UUID is a 128 bit (16 byte) Universal Unique IDentifier as defined in RFC
+// 4122.
+type UUID [16]byte
+
+// A Version represents a UUID's version.
+type Version byte
+
+// A Variant represents a UUID's variant.
+type Variant byte
+
+// Constants returned by Variant.
+const (
+ Invalid = Variant(iota) // Invalid UUID
+ RFC4122 // The variant specified in RFC4122
+ Reserved // Reserved, NCS backward compatibility.
+ Microsoft // Reserved, Microsoft Corporation backward compatibility.
+ Future // Reserved for future definition.
+)
+
+const randPoolSize = 16 * 16
+
+var (
+ rander = rand.Reader // random function
+ poolEnabled = false
+ poolMu sync.Mutex
+ poolPos = randPoolSize // protected with poolMu
+ pool [randPoolSize]byte // protected with poolMu
+)
+
+type invalidLengthError struct{ len int }
+
+func (err invalidLengthError) Error() string {
+ return fmt.Sprintf("invalid UUID length: %d", err.len)
+}
+
+// IsInvalidLengthError is matcher function for custom error invalidLengthError
+func IsInvalidLengthError(err error) bool {
+ _, ok := err.(invalidLengthError)
+ return ok
+}
+
+// Parse decodes s into a UUID or returns an error if it cannot be parsed. Both
+// the standard UUID forms defined in RFC 4122
+// (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx and
+// urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) are decoded. In addition,
+// Parse accepts non-standard strings such as the raw hex encoding
+// xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx and 38 byte "Microsoft style" encodings,
+// e.g. {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}. Only the middle 36 bytes are
+// examined in the latter case. Parse should not be used to validate strings as
+// it parses non-standard encodings as indicated above.
+func Parse(s string) (UUID, error) {
+ var uuid UUID
+ switch len(s) {
+ // xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ case 36:
+
+ // urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ case 36 + 9:
+ if !strings.EqualFold(s[:9], "urn:uuid:") {
+ return uuid, fmt.Errorf("invalid urn prefix: %q", s[:9])
+ }
+ s = s[9:]
+
+ // {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}
+ case 36 + 2:
+ s = s[1:]
+
+ // xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ case 32:
+ var ok bool
+ for i := range uuid {
+ uuid[i], ok = xtob(s[i*2], s[i*2+1])
+ if !ok {
+ return uuid, errors.New("invalid UUID format")
+ }
+ }
+ return uuid, nil
+ default:
+ return uuid, invalidLengthError{len(s)}
+ }
+ // s is now at least 36 bytes long
+ // it must be of the form xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ if s[8] != '-' || s[13] != '-' || s[18] != '-' || s[23] != '-' {
+ return uuid, errors.New("invalid UUID format")
+ }
+ for i, x := range [16]int{
+ 0, 2, 4, 6,
+ 9, 11,
+ 14, 16,
+ 19, 21,
+ 24, 26, 28, 30, 32, 34,
+ } {
+ v, ok := xtob(s[x], s[x+1])
+ if !ok {
+ return uuid, errors.New("invalid UUID format")
+ }
+ uuid[i] = v
+ }
+ return uuid, nil
+}
+
+// ParseBytes is like Parse, except it parses a byte slice instead of a string.
+func ParseBytes(b []byte) (UUID, error) {
+ var uuid UUID
+ switch len(b) {
+ case 36: // xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ case 36 + 9: // urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ if !bytes.EqualFold(b[:9], []byte("urn:uuid:")) {
+ return uuid, fmt.Errorf("invalid urn prefix: %q", b[:9])
+ }
+ b = b[9:]
+ case 36 + 2: // {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}
+ b = b[1:]
+ case 32: // xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ var ok bool
+ for i := 0; i < 32; i += 2 {
+ uuid[i/2], ok = xtob(b[i], b[i+1])
+ if !ok {
+ return uuid, errors.New("invalid UUID format")
+ }
+ }
+ return uuid, nil
+ default:
+ return uuid, invalidLengthError{len(b)}
+ }
+ // s is now at least 36 bytes long
+ // it must be of the form xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ if b[8] != '-' || b[13] != '-' || b[18] != '-' || b[23] != '-' {
+ return uuid, errors.New("invalid UUID format")
+ }
+ for i, x := range [16]int{
+ 0, 2, 4, 6,
+ 9, 11,
+ 14, 16,
+ 19, 21,
+ 24, 26, 28, 30, 32, 34,
+ } {
+ v, ok := xtob(b[x], b[x+1])
+ if !ok {
+ return uuid, errors.New("invalid UUID format")
+ }
+ uuid[i] = v
+ }
+ return uuid, nil
+}
+
+// MustParse is like Parse but panics if the string cannot be parsed.
+// It simplifies safe initialization of global variables holding compiled UUIDs.
+func MustParse(s string) UUID {
+ uuid, err := Parse(s)
+ if err != nil {
+ panic(`uuid: Parse(` + s + `): ` + err.Error())
+ }
+ return uuid
+}
+
+// FromBytes creates a new UUID from a byte slice. Returns an error if the slice
+// does not have a length of 16. The bytes are copied from the slice.
+func FromBytes(b []byte) (uuid UUID, err error) {
+ err = uuid.UnmarshalBinary(b)
+ return uuid, err
+}
+
+// Must returns uuid if err is nil and panics otherwise.
+func Must(uuid UUID, err error) UUID {
+ if err != nil {
+ panic(err)
+ }
+ return uuid
+}
+
+// Validate returns an error if s is not a properly formatted UUID in one of the following formats:
+// xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+// urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+// xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+// {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}
+// It returns an error if the format is invalid, otherwise nil.
+func Validate(s string) error {
+ switch len(s) {
+ // Standard UUID format
+ case 36:
+
+ // UUID with "urn:uuid:" prefix
+ case 36 + 9:
+ if !strings.EqualFold(s[:9], "urn:uuid:") {
+ return fmt.Errorf("invalid urn prefix: %q", s[:9])
+ }
+ s = s[9:]
+
+ // UUID enclosed in braces
+ case 36 + 2:
+ if s[0] != '{' || s[len(s)-1] != '}' {
+ return fmt.Errorf("invalid bracketed UUID format")
+ }
+ s = s[1 : len(s)-1]
+
+ // UUID without hyphens
+ case 32:
+ for i := 0; i < len(s); i += 2 {
+ _, ok := xtob(s[i], s[i+1])
+ if !ok {
+ return errors.New("invalid UUID format")
+ }
+ }
+
+ default:
+ return invalidLengthError{len(s)}
+ }
+
+ // Check for standard UUID format
+ if len(s) == 36 {
+ if s[8] != '-' || s[13] != '-' || s[18] != '-' || s[23] != '-' {
+ return errors.New("invalid UUID format")
+ }
+ for _, x := range []int{0, 2, 4, 6, 9, 11, 14, 16, 19, 21, 24, 26, 28, 30, 32, 34} {
+ if _, ok := xtob(s[x], s[x+1]); !ok {
+ return errors.New("invalid UUID format")
+ }
+ }
+ }
+
+ return nil
+}
+
+// String returns the string form of uuid, xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+// , or "" if uuid is invalid.
+func (uuid UUID) String() string {
+ var buf [36]byte
+ encodeHex(buf[:], uuid)
+ return string(buf[:])
+}
+
+// URN returns the RFC 2141 URN form of uuid,
+// urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx, or "" if uuid is invalid.
+func (uuid UUID) URN() string {
+ var buf [36 + 9]byte
+ copy(buf[:], "urn:uuid:")
+ encodeHex(buf[9:], uuid)
+ return string(buf[:])
+}
+
+func encodeHex(dst []byte, uuid UUID) {
+ hex.Encode(dst, uuid[:4])
+ dst[8] = '-'
+ hex.Encode(dst[9:13], uuid[4:6])
+ dst[13] = '-'
+ hex.Encode(dst[14:18], uuid[6:8])
+ dst[18] = '-'
+ hex.Encode(dst[19:23], uuid[8:10])
+ dst[23] = '-'
+ hex.Encode(dst[24:], uuid[10:])
+}
+
+// Variant returns the variant encoded in uuid.
+func (uuid UUID) Variant() Variant {
+ switch {
+ case (uuid[8] & 0xc0) == 0x80:
+ return RFC4122
+ case (uuid[8] & 0xe0) == 0xc0:
+ return Microsoft
+ case (uuid[8] & 0xe0) == 0xe0:
+ return Future
+ default:
+ return Reserved
+ }
+}
+
+// Version returns the version of uuid.
+func (uuid UUID) Version() Version {
+ return Version(uuid[6] >> 4)
+}
+
+func (v Version) String() string {
+ if v > 15 {
+ return fmt.Sprintf("BAD_VERSION_%d", v)
+ }
+ return fmt.Sprintf("VERSION_%d", v)
+}
+
+func (v Variant) String() string {
+ switch v {
+ case RFC4122:
+ return "RFC4122"
+ case Reserved:
+ return "Reserved"
+ case Microsoft:
+ return "Microsoft"
+ case Future:
+ return "Future"
+ case Invalid:
+ return "Invalid"
+ }
+ return fmt.Sprintf("BadVariant%d", int(v))
+}
+
+// SetRand sets the random number generator to r, which implements io.Reader.
+// If r.Read returns an error when the package requests random data then
+// a panic will be issued.
+//
+// Calling SetRand with nil sets the random number generator to the default
+// generator.
+func SetRand(r io.Reader) {
+ if r == nil {
+ rander = rand.Reader
+ return
+ }
+ rander = r
+}
+
+// EnableRandPool enables internal randomness pool used for Random
+// (Version 4) UUID generation. The pool contains random bytes read from
+// the random number generator on demand in batches. Enabling the pool
+// may improve the UUID generation throughput significantly.
+//
+// Since the pool is stored on the Go heap, this feature may be a bad fit
+// for security sensitive applications.
+//
+// Both EnableRandPool and DisableRandPool are not thread-safe and should
+// only be called when there is no possibility that New or any other
+// UUID Version 4 generation function will be called concurrently.
+func EnableRandPool() {
+ poolEnabled = true
+}
+
+// DisableRandPool disables the randomness pool if it was previously
+// enabled with EnableRandPool.
+//
+// Both EnableRandPool and DisableRandPool are not thread-safe and should
+// only be called when there is no possibility that New or any other
+// UUID Version 4 generation function will be called concurrently.
+func DisableRandPool() {
+ poolEnabled = false
+ defer poolMu.Unlock()
+ poolMu.Lock()
+ poolPos = randPoolSize
+}
+
+// UUIDs is a slice of UUID types.
+type UUIDs []UUID
+
+// Strings returns a string slice containing the string form of each UUID in uuids.
+func (uuids UUIDs) Strings() []string {
+ var uuidStrs = make([]string, len(uuids))
+ for i, uuid := range uuids {
+ uuidStrs[i] = uuid.String()
+ }
+ return uuidStrs
+}
diff --git a/vendor/github.com/google/uuid/version1.go b/vendor/github.com/google/uuid/version1.go
new file mode 100644
index 0000000..4631096
--- /dev/null
+++ b/vendor/github.com/google/uuid/version1.go
@@ -0,0 +1,44 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "encoding/binary"
+)
+
+// NewUUID returns a Version 1 UUID based on the current NodeID and clock
+// sequence, and the current time. If the NodeID has not been set by SetNodeID
+// or SetNodeInterface then it will be set automatically. If the NodeID cannot
+// be set NewUUID returns nil. If clock sequence has not been set by
+// SetClockSequence then it will be set automatically. If GetTime fails to
+// return the current NewUUID returns nil and an error.
+//
+// In most cases, New should be used.
+func NewUUID() (UUID, error) {
+ var uuid UUID
+ now, seq, err := GetTime()
+ if err != nil {
+ return uuid, err
+ }
+
+ timeLow := uint32(now & 0xffffffff)
+ timeMid := uint16((now >> 32) & 0xffff)
+ timeHi := uint16((now >> 48) & 0x0fff)
+ timeHi |= 0x1000 // Version 1
+
+ binary.BigEndian.PutUint32(uuid[0:], timeLow)
+ binary.BigEndian.PutUint16(uuid[4:], timeMid)
+ binary.BigEndian.PutUint16(uuid[6:], timeHi)
+ binary.BigEndian.PutUint16(uuid[8:], seq)
+
+ nodeMu.Lock()
+ if nodeID == zeroID {
+ setNodeInterface("")
+ }
+ copy(uuid[10:], nodeID[:])
+ nodeMu.Unlock()
+
+ return uuid, nil
+}
diff --git a/vendor/github.com/google/uuid/version4.go b/vendor/github.com/google/uuid/version4.go
new file mode 100644
index 0000000..7697802
--- /dev/null
+++ b/vendor/github.com/google/uuid/version4.go
@@ -0,0 +1,76 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import "io"
+
+// New creates a new random UUID or panics. New is equivalent to
+// the expression
+//
+// uuid.Must(uuid.NewRandom())
+func New() UUID {
+ return Must(NewRandom())
+}
+
+// NewString creates a new random UUID and returns it as a string or panics.
+// NewString is equivalent to the expression
+//
+// uuid.New().String()
+func NewString() string {
+ return Must(NewRandom()).String()
+}
+
+// NewRandom returns a Random (Version 4) UUID.
+//
+// The strength of the UUIDs is based on the strength of the crypto/rand
+// package.
+//
+// Uses the randomness pool if it was enabled with EnableRandPool.
+//
+// A note about uniqueness derived from the UUID Wikipedia entry:
+//
+// Randomly generated UUIDs have 122 random bits. One's annual risk of being
+// hit by a meteorite is estimated to be one chance in 17 billion, that
+// means the probability is about 0.00000000006 (6 × 10−11),
+// equivalent to the odds of creating a few tens of trillions of UUIDs in a
+// year and having one duplicate.
+func NewRandom() (UUID, error) {
+ if !poolEnabled {
+ return NewRandomFromReader(rander)
+ }
+ return newRandomFromPool()
+}
+
+// NewRandomFromReader returns a UUID based on bytes read from a given io.Reader.
+func NewRandomFromReader(r io.Reader) (UUID, error) {
+ var uuid UUID
+ _, err := io.ReadFull(r, uuid[:])
+ if err != nil {
+ return Nil, err
+ }
+ uuid[6] = (uuid[6] & 0x0f) | 0x40 // Version 4
+ uuid[8] = (uuid[8] & 0x3f) | 0x80 // Variant is 10
+ return uuid, nil
+}
+
+func newRandomFromPool() (UUID, error) {
+ var uuid UUID
+ poolMu.Lock()
+ if poolPos == randPoolSize {
+ _, err := io.ReadFull(rander, pool[:])
+ if err != nil {
+ poolMu.Unlock()
+ return Nil, err
+ }
+ poolPos = 0
+ }
+ copy(uuid[:], pool[poolPos:(poolPos+16)])
+ poolPos += 16
+ poolMu.Unlock()
+
+ uuid[6] = (uuid[6] & 0x0f) | 0x40 // Version 4
+ uuid[8] = (uuid[8] & 0x3f) | 0x80 // Variant is 10
+ return uuid, nil
+}
diff --git a/vendor/github.com/google/uuid/version6.go b/vendor/github.com/google/uuid/version6.go
new file mode 100644
index 0000000..339a959
--- /dev/null
+++ b/vendor/github.com/google/uuid/version6.go
@@ -0,0 +1,56 @@
+// Copyright 2023 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import "encoding/binary"
+
+// UUID version 6 is a field-compatible version of UUIDv1, reordered for improved DB locality.
+// It is expected that UUIDv6 will primarily be used in contexts where there are existing v1 UUIDs.
+// Systems that do not involve legacy UUIDv1 SHOULD consider using UUIDv7 instead.
+//
+// see https://datatracker.ietf.org/doc/html/draft-peabody-dispatch-new-uuid-format-03#uuidv6
+//
+// NewV6 returns a Version 6 UUID based on the current NodeID and clock
+// sequence, and the current time. If the NodeID has not been set by SetNodeID
+// or SetNodeInterface then it will be set automatically. If the NodeID cannot
+// be set NewV6 set NodeID is random bits automatically . If clock sequence has not been set by
+// SetClockSequence then it will be set automatically. If GetTime fails to
+// return the current NewV6 returns Nil and an error.
+func NewV6() (UUID, error) {
+ var uuid UUID
+ now, seq, err := GetTime()
+ if err != nil {
+ return uuid, err
+ }
+
+ /*
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | time_high |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | time_mid | time_low_and_version |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ |clk_seq_hi_res | clk_seq_low | node (0-1) |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | node (2-5) |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+
+ binary.BigEndian.PutUint64(uuid[0:], uint64(now))
+ binary.BigEndian.PutUint16(uuid[8:], seq)
+
+ uuid[6] = 0x60 | (uuid[6] & 0x0F)
+ uuid[8] = 0x80 | (uuid[8] & 0x3F)
+
+ nodeMu.Lock()
+ if nodeID == zeroID {
+ setNodeInterface("")
+ }
+ copy(uuid[10:], nodeID[:])
+ nodeMu.Unlock()
+
+ return uuid, nil
+}
diff --git a/vendor/github.com/google/uuid/version7.go b/vendor/github.com/google/uuid/version7.go
new file mode 100644
index 0000000..ba9dd5e
--- /dev/null
+++ b/vendor/github.com/google/uuid/version7.go
@@ -0,0 +1,75 @@
+// Copyright 2023 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package uuid
+
+import (
+ "io"
+)
+
+// UUID version 7 features a time-ordered value field derived from the widely
+// implemented and well known Unix Epoch timestamp source,
+// the number of milliseconds seconds since midnight 1 Jan 1970 UTC, leap seconds excluded.
+// As well as improved entropy characteristics over versions 1 or 6.
+//
+// see https://datatracker.ietf.org/doc/html/draft-peabody-dispatch-new-uuid-format-03#name-uuid-version-7
+//
+// Implementations SHOULD utilize UUID version 7 over UUID version 1 and 6 if possible.
+//
+// NewV7 returns a Version 7 UUID based on the current time(Unix Epoch).
+// Uses the randomness pool if it was enabled with EnableRandPool.
+// On error, NewV7 returns Nil and an error
+func NewV7() (UUID, error) {
+ uuid, err := NewRandom()
+ if err != nil {
+ return uuid, err
+ }
+ makeV7(uuid[:])
+ return uuid, nil
+}
+
+// NewV7FromReader returns a Version 7 UUID based on the current time(Unix Epoch).
+// it use NewRandomFromReader fill random bits.
+// On error, NewV7FromReader returns Nil and an error.
+func NewV7FromReader(r io.Reader) (UUID, error) {
+ uuid, err := NewRandomFromReader(r)
+ if err != nil {
+ return uuid, err
+ }
+
+ makeV7(uuid[:])
+ return uuid, nil
+}
+
+// makeV7 fill 48 bits time (uuid[0] - uuid[5]), set version b0111 (uuid[6])
+// uuid[8] already has the right version number (Variant is 10)
+// see function NewV7 and NewV7FromReader
+func makeV7(uuid []byte) {
+ /*
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | unix_ts_ms |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | unix_ts_ms | ver | rand_a |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ |var| rand_b |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | rand_b |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ _ = uuid[15] // bounds check
+
+ t := timeNow().UnixMilli()
+
+ uuid[0] = byte(t >> 40)
+ uuid[1] = byte(t >> 32)
+ uuid[2] = byte(t >> 24)
+ uuid[3] = byte(t >> 16)
+ uuid[4] = byte(t >> 8)
+ uuid[5] = byte(t)
+
+ uuid[6] = 0x70 | (uuid[6] & 0x0F)
+ // uuid[8] has already has right version
+}
diff --git a/vendor/github.com/hashicorp/hcl/.gitignore b/vendor/github.com/hashicorp/hcl/.gitignore
new file mode 100644
index 0000000..15586a2
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/.gitignore
@@ -0,0 +1,9 @@
+y.output
+
+# ignore intellij files
+.idea
+*.iml
+*.ipr
+*.iws
+
+*.test
diff --git a/vendor/github.com/hashicorp/hcl/.travis.yml b/vendor/github.com/hashicorp/hcl/.travis.yml
new file mode 100644
index 0000000..cb63a32
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/.travis.yml
@@ -0,0 +1,13 @@
+sudo: false
+
+language: go
+
+go:
+ - 1.x
+ - tip
+
+branches:
+ only:
+ - master
+
+script: make test
diff --git a/vendor/github.com/hashicorp/hcl/LICENSE b/vendor/github.com/hashicorp/hcl/LICENSE
new file mode 100644
index 0000000..c33dcc7
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/LICENSE
@@ -0,0 +1,354 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. “Contributor”
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. “Contributor Version”
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor’s Contribution.
+
+1.3. “Contribution”
+
+ means Covered Software of a particular Contributor.
+
+1.4. “Covered Software”
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. “Incompatible With Secondary Licenses”
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of version
+ 1.1 or earlier of the License, but not also under the terms of a
+ Secondary License.
+
+1.6. “Executable Form”
+
+ means any form of the work other than Source Code Form.
+
+1.7. “Larger Work”
+
+ means a work that combines Covered Software with other material, in a separate
+ file or files, that is not Covered Software.
+
+1.8. “License”
+
+ means this document.
+
+1.9. “Licensable”
+
+ means having the right to grant, to the maximum extent possible, whether at the
+ time of the initial grant or subsequently, any and all of the rights conveyed by
+ this License.
+
+1.10. “Modifications”
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to, deletion
+ from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. “Patent Claims” of a Contributor
+
+ means any patent claim(s), including without limitation, method, process,
+ and apparatus claims, in any patent Licensable by such Contributor that
+ would be infringed, but for the grant of the License, by the making,
+ using, selling, offering for sale, having made, import, or transfer of
+ either its Contributions or its Contributor Version.
+
+1.12. “Secondary License”
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. “Source Code Form”
+
+ means the form of the work preferred for making modifications.
+
+1.14. “You” (or “Your”)
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, “You” includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, “control” means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or as
+ part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its Contributions
+ or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution become
+ effective for each Contribution on the date the Contributor first distributes
+ such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under this
+ License. No additional rights or licenses will be implied from the distribution
+ or licensing of Covered Software under this License. Notwithstanding Section
+ 2.1(b) above, no patent license is granted by a Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party’s
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of its
+ Contributions.
+
+ This License does not grant any rights in the trademarks, service marks, or
+ logos of any Contributor (except as may be necessary to comply with the
+ notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this License
+ (see Section 10.2) or under the terms of a Secondary License (if permitted
+ under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its Contributions
+ are its original creation(s) or it has sufficient rights to grant the
+ rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under applicable
+ copyright doctrines of fair use, fair dealing, or other equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under the
+ terms of this License. You must inform recipients that the Source Code Form
+ of the Covered Software is governed by the terms of this License, and how
+ they can obtain a copy of this License. You may not attempt to alter or
+ restrict the recipients’ rights in the Source Code Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this License,
+ or sublicense it under different terms, provided that the license for
+ the Executable Form does not attempt to limit or alter the recipients’
+ rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for the
+ Covered Software. If the Larger Work is a combination of Covered Software
+ with a work governed by one or more Secondary Licenses, and the Covered
+ Software is not Incompatible With Secondary Licenses, this License permits
+ You to additionally distribute such Covered Software under the terms of
+ such Secondary License(s), so that the recipient of the Larger Work may, at
+ their option, further distribute the Covered Software under the terms of
+ either this License or such Secondary License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices (including
+ copyright notices, patent notices, disclaimers of warranty, or limitations
+ of liability) contained within the Source Code Form of the Covered
+ Software, except that You may alter any license notices to the extent
+ required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on behalf
+ of any Contributor. You must make it absolutely clear that any such
+ warranty, support, indemnity, or liability obligation is offered by You
+ alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute, judicial
+ order, or regulation then You must: (a) comply with the terms of this License
+ to the maximum extent possible; and (b) describe the limitations and the code
+ they affect. Such description must be placed in a text file included with all
+ distributions of the Covered Software under this License. Except to the
+ extent prohibited by statute or regulation, such description must be
+ sufficiently detailed for a recipient of ordinary skill to be able to
+ understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing basis,
+ if such Contributor fails to notify You of the non-compliance by some
+ reasonable means prior to 60 days after You have come back into compliance.
+ Moreover, Your grants from a particular Contributor are reinstated on an
+ ongoing basis if such Contributor notifies You of the non-compliance by
+ some reasonable means, this is the first time You have received notice of
+ non-compliance with this License from such Contributor, and You become
+ compliant prior to 30 days after Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions, counter-claims,
+ and cross-claims) alleging that a Contributor Version directly or
+ indirectly infringes any patent, then the rights granted to You by any and
+ all Contributors for the Covered Software under Section 2.1 of this License
+ shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an “as is” basis, without
+ warranty of any kind, either expressed, implied, or statutory, including,
+ without limitation, warranties that the Covered Software is free of defects,
+ merchantable, fit for a particular purpose or non-infringing. The entire
+ risk as to the quality and performance of the Covered Software is with You.
+ Should any Covered Software prove defective in any respect, You (not any
+ Contributor) assume the cost of any necessary servicing, repair, or
+ correction. This disclaimer of warranty constitutes an essential part of this
+ License. No use of any Covered Software is authorized under this License
+ except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from such
+ party’s negligence to the extent applicable law prohibits such limitation.
+ Some jurisdictions do not allow the exclusion or limitation of incidental or
+ consequential damages, so this exclusion and limitation may not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts of
+ a jurisdiction where the defendant maintains its principal place of business
+ and such litigation shall be governed by laws of that jurisdiction, without
+ reference to its conflict-of-law provisions. Nothing in this Section shall
+ prevent a party’s ability to bring cross-claims or counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject matter
+ hereof. If any provision of this License is held to be unenforceable, such
+ provision shall be reformed only to the extent necessary to make it
+ enforceable. Any law or regulation which provides that the language of a
+ contract shall be construed against the drafter shall not be used to construe
+ this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version of
+ the License under which You originally received the Covered Software, or
+ under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a modified
+ version of this License if you rename the license and remove any
+ references to the name of the license steward (except to note that such
+ modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
+ If You choose to distribute Source Code Form that is Incompatible With
+ Secondary Licenses under the terms of this version of the License, the
+ notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file, then
+You may include the notice in a location (such as a LICENSE file in a relevant
+directory) where a recipient would be likely to look for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - “Incompatible With Secondary Licenses” Notice
+
+ This Source Code Form is “Incompatible
+ With Secondary Licenses”, as defined by
+ the Mozilla Public License, v. 2.0.
+
diff --git a/vendor/github.com/hashicorp/hcl/Makefile b/vendor/github.com/hashicorp/hcl/Makefile
new file mode 100644
index 0000000..84fd743
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/Makefile
@@ -0,0 +1,18 @@
+TEST?=./...
+
+default: test
+
+fmt: generate
+ go fmt ./...
+
+test: generate
+ go get -t ./...
+ go test $(TEST) $(TESTARGS)
+
+generate:
+ go generate ./...
+
+updatedeps:
+ go get -u golang.org/x/tools/cmd/stringer
+
+.PHONY: default generate test updatedeps
diff --git a/vendor/github.com/hashicorp/hcl/README.md b/vendor/github.com/hashicorp/hcl/README.md
new file mode 100644
index 0000000..c822332
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/README.md
@@ -0,0 +1,125 @@
+# HCL
+
+[![GoDoc](https://godoc.org/github.com/hashicorp/hcl?status.png)](https://godoc.org/github.com/hashicorp/hcl) [![Build Status](https://travis-ci.org/hashicorp/hcl.svg?branch=master)](https://travis-ci.org/hashicorp/hcl)
+
+HCL (HashiCorp Configuration Language) is a configuration language built
+by HashiCorp. The goal of HCL is to build a structured configuration language
+that is both human and machine friendly for use with command-line tools, but
+specifically targeted towards DevOps tools, servers, etc.
+
+HCL is also fully JSON compatible. That is, JSON can be used as completely
+valid input to a system expecting HCL. This helps makes systems
+interoperable with other systems.
+
+HCL is heavily inspired by
+[libucl](https://github.com/vstakhov/libucl),
+nginx configuration, and others similar.
+
+## Why?
+
+A common question when viewing HCL is to ask the question: why not
+JSON, YAML, etc.?
+
+Prior to HCL, the tools we built at [HashiCorp](http://www.hashicorp.com)
+used a variety of configuration languages from full programming languages
+such as Ruby to complete data structure languages such as JSON. What we
+learned is that some people wanted human-friendly configuration languages
+and some people wanted machine-friendly languages.
+
+JSON fits a nice balance in this, but is fairly verbose and most
+importantly doesn't support comments. With YAML, we found that beginners
+had a really hard time determining what the actual structure was, and
+ended up guessing more often than not whether to use a hyphen, colon, etc.
+in order to represent some configuration key.
+
+Full programming languages such as Ruby enable complex behavior
+a configuration language shouldn't usually allow, and also forces
+people to learn some set of Ruby.
+
+Because of this, we decided to create our own configuration language
+that is JSON-compatible. Our configuration language (HCL) is designed
+to be written and modified by humans. The API for HCL allows JSON
+as an input so that it is also machine-friendly (machines can generate
+JSON instead of trying to generate HCL).
+
+Our goal with HCL is not to alienate other configuration languages.
+It is instead to provide HCL as a specialized language for our tools,
+and JSON as the interoperability layer.
+
+## Syntax
+
+For a complete grammar, please see the parser itself. A high-level overview
+of the syntax and grammar is listed here.
+
+ * Single line comments start with `#` or `//`
+
+ * Multi-line comments are wrapped in `/*` and `*/`. Nested block comments
+ are not allowed. A multi-line comment (also known as a block comment)
+ terminates at the first `*/` found.
+
+ * Values are assigned with the syntax `key = value` (whitespace doesn't
+ matter). The value can be any primitive: a string, number, boolean,
+ object, or list.
+
+ * Strings are double-quoted and can contain any UTF-8 characters.
+ Example: `"Hello, World"`
+
+ * Multi-line strings start with `<-
+ echo %Path%
+
+ go version
+
+ go env
+
+ go get -t ./...
+
+build_script:
+- cmd: go test -v ./...
diff --git a/vendor/github.com/hashicorp/hcl/decoder.go b/vendor/github.com/hashicorp/hcl/decoder.go
new file mode 100644
index 0000000..bed9ebb
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/decoder.go
@@ -0,0 +1,729 @@
+package hcl
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/hashicorp/hcl/hcl/ast"
+ "github.com/hashicorp/hcl/hcl/parser"
+ "github.com/hashicorp/hcl/hcl/token"
+)
+
+// This is the tag to use with structures to have settings for HCL
+const tagName = "hcl"
+
+var (
+ // nodeType holds a reference to the type of ast.Node
+ nodeType reflect.Type = findNodeType()
+)
+
+// Unmarshal accepts a byte slice as input and writes the
+// data to the value pointed to by v.
+func Unmarshal(bs []byte, v interface{}) error {
+ root, err := parse(bs)
+ if err != nil {
+ return err
+ }
+
+ return DecodeObject(v, root)
+}
+
+// Decode reads the given input and decodes it into the structure
+// given by `out`.
+func Decode(out interface{}, in string) error {
+ obj, err := Parse(in)
+ if err != nil {
+ return err
+ }
+
+ return DecodeObject(out, obj)
+}
+
+// DecodeObject is a lower-level version of Decode. It decodes a
+// raw Object into the given output.
+func DecodeObject(out interface{}, n ast.Node) error {
+ val := reflect.ValueOf(out)
+ if val.Kind() != reflect.Ptr {
+ return errors.New("result must be a pointer")
+ }
+
+ // If we have the file, we really decode the root node
+ if f, ok := n.(*ast.File); ok {
+ n = f.Node
+ }
+
+ var d decoder
+ return d.decode("root", n, val.Elem())
+}
+
+type decoder struct {
+ stack []reflect.Kind
+}
+
+func (d *decoder) decode(name string, node ast.Node, result reflect.Value) error {
+ k := result
+
+ // If we have an interface with a valid value, we use that
+ // for the check.
+ if result.Kind() == reflect.Interface {
+ elem := result.Elem()
+ if elem.IsValid() {
+ k = elem
+ }
+ }
+
+ // Push current onto stack unless it is an interface.
+ if k.Kind() != reflect.Interface {
+ d.stack = append(d.stack, k.Kind())
+
+ // Schedule a pop
+ defer func() {
+ d.stack = d.stack[:len(d.stack)-1]
+ }()
+ }
+
+ switch k.Kind() {
+ case reflect.Bool:
+ return d.decodeBool(name, node, result)
+ case reflect.Float32, reflect.Float64:
+ return d.decodeFloat(name, node, result)
+ case reflect.Int, reflect.Int32, reflect.Int64:
+ return d.decodeInt(name, node, result)
+ case reflect.Interface:
+ // When we see an interface, we make our own thing
+ return d.decodeInterface(name, node, result)
+ case reflect.Map:
+ return d.decodeMap(name, node, result)
+ case reflect.Ptr:
+ return d.decodePtr(name, node, result)
+ case reflect.Slice:
+ return d.decodeSlice(name, node, result)
+ case reflect.String:
+ return d.decodeString(name, node, result)
+ case reflect.Struct:
+ return d.decodeStruct(name, node, result)
+ default:
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: unknown kind to decode into: %s", name, k.Kind()),
+ }
+ }
+}
+
+func (d *decoder) decodeBool(name string, node ast.Node, result reflect.Value) error {
+ switch n := node.(type) {
+ case *ast.LiteralType:
+ if n.Token.Type == token.BOOL {
+ v, err := strconv.ParseBool(n.Token.Text)
+ if err != nil {
+ return err
+ }
+
+ result.Set(reflect.ValueOf(v))
+ return nil
+ }
+ }
+
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: unknown type %T", name, node),
+ }
+}
+
+func (d *decoder) decodeFloat(name string, node ast.Node, result reflect.Value) error {
+ switch n := node.(type) {
+ case *ast.LiteralType:
+ if n.Token.Type == token.FLOAT || n.Token.Type == token.NUMBER {
+ v, err := strconv.ParseFloat(n.Token.Text, 64)
+ if err != nil {
+ return err
+ }
+
+ result.Set(reflect.ValueOf(v).Convert(result.Type()))
+ return nil
+ }
+ }
+
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: unknown type %T", name, node),
+ }
+}
+
+func (d *decoder) decodeInt(name string, node ast.Node, result reflect.Value) error {
+ switch n := node.(type) {
+ case *ast.LiteralType:
+ switch n.Token.Type {
+ case token.NUMBER:
+ v, err := strconv.ParseInt(n.Token.Text, 0, 0)
+ if err != nil {
+ return err
+ }
+
+ if result.Kind() == reflect.Interface {
+ result.Set(reflect.ValueOf(int(v)))
+ } else {
+ result.SetInt(v)
+ }
+ return nil
+ case token.STRING:
+ v, err := strconv.ParseInt(n.Token.Value().(string), 0, 0)
+ if err != nil {
+ return err
+ }
+
+ if result.Kind() == reflect.Interface {
+ result.Set(reflect.ValueOf(int(v)))
+ } else {
+ result.SetInt(v)
+ }
+ return nil
+ }
+ }
+
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: unknown type %T", name, node),
+ }
+}
+
+func (d *decoder) decodeInterface(name string, node ast.Node, result reflect.Value) error {
+ // When we see an ast.Node, we retain the value to enable deferred decoding.
+ // Very useful in situations where we want to preserve ast.Node information
+ // like Pos
+ if result.Type() == nodeType && result.CanSet() {
+ result.Set(reflect.ValueOf(node))
+ return nil
+ }
+
+ var set reflect.Value
+ redecode := true
+
+ // For testing types, ObjectType should just be treated as a list. We
+ // set this to a temporary var because we want to pass in the real node.
+ testNode := node
+ if ot, ok := node.(*ast.ObjectType); ok {
+ testNode = ot.List
+ }
+
+ switch n := testNode.(type) {
+ case *ast.ObjectList:
+ // If we're at the root or we're directly within a slice, then we
+ // decode objects into map[string]interface{}, otherwise we decode
+ // them into lists.
+ if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice {
+ var temp map[string]interface{}
+ tempVal := reflect.ValueOf(temp)
+ result := reflect.MakeMap(
+ reflect.MapOf(
+ reflect.TypeOf(""),
+ tempVal.Type().Elem()))
+
+ set = result
+ } else {
+ var temp []map[string]interface{}
+ tempVal := reflect.ValueOf(temp)
+ result := reflect.MakeSlice(
+ reflect.SliceOf(tempVal.Type().Elem()), 0, len(n.Items))
+ set = result
+ }
+ case *ast.ObjectType:
+ // If we're at the root or we're directly within a slice, then we
+ // decode objects into map[string]interface{}, otherwise we decode
+ // them into lists.
+ if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice {
+ var temp map[string]interface{}
+ tempVal := reflect.ValueOf(temp)
+ result := reflect.MakeMap(
+ reflect.MapOf(
+ reflect.TypeOf(""),
+ tempVal.Type().Elem()))
+
+ set = result
+ } else {
+ var temp []map[string]interface{}
+ tempVal := reflect.ValueOf(temp)
+ result := reflect.MakeSlice(
+ reflect.SliceOf(tempVal.Type().Elem()), 0, 1)
+ set = result
+ }
+ case *ast.ListType:
+ var temp []interface{}
+ tempVal := reflect.ValueOf(temp)
+ result := reflect.MakeSlice(
+ reflect.SliceOf(tempVal.Type().Elem()), 0, 0)
+ set = result
+ case *ast.LiteralType:
+ switch n.Token.Type {
+ case token.BOOL:
+ var result bool
+ set = reflect.Indirect(reflect.New(reflect.TypeOf(result)))
+ case token.FLOAT:
+ var result float64
+ set = reflect.Indirect(reflect.New(reflect.TypeOf(result)))
+ case token.NUMBER:
+ var result int
+ set = reflect.Indirect(reflect.New(reflect.TypeOf(result)))
+ case token.STRING, token.HEREDOC:
+ set = reflect.Indirect(reflect.New(reflect.TypeOf("")))
+ default:
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: cannot decode into interface: %T", name, node),
+ }
+ }
+ default:
+ return fmt.Errorf(
+ "%s: cannot decode into interface: %T",
+ name, node)
+ }
+
+ // Set the result to what its supposed to be, then reset
+ // result so we don't reflect into this method anymore.
+ result.Set(set)
+
+ if redecode {
+ // Revisit the node so that we can use the newly instantiated
+ // thing and populate it.
+ if err := d.decode(name, node, result); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (d *decoder) decodeMap(name string, node ast.Node, result reflect.Value) error {
+ if item, ok := node.(*ast.ObjectItem); ok {
+ node = &ast.ObjectList{Items: []*ast.ObjectItem{item}}
+ }
+
+ if ot, ok := node.(*ast.ObjectType); ok {
+ node = ot.List
+ }
+
+ n, ok := node.(*ast.ObjectList)
+ if !ok {
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: not an object type for map (%T)", name, node),
+ }
+ }
+
+ // If we have an interface, then we can address the interface,
+ // but not the slice itself, so get the element but set the interface
+ set := result
+ if result.Kind() == reflect.Interface {
+ result = result.Elem()
+ }
+
+ resultType := result.Type()
+ resultElemType := resultType.Elem()
+ resultKeyType := resultType.Key()
+ if resultKeyType.Kind() != reflect.String {
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: map must have string keys", name),
+ }
+ }
+
+ // Make a map if it is nil
+ resultMap := result
+ if result.IsNil() {
+ resultMap = reflect.MakeMap(
+ reflect.MapOf(resultKeyType, resultElemType))
+ }
+
+ // Go through each element and decode it.
+ done := make(map[string]struct{})
+ for _, item := range n.Items {
+ if item.Val == nil {
+ continue
+ }
+
+ // github.com/hashicorp/terraform/issue/5740
+ if len(item.Keys) == 0 {
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: map must have string keys", name),
+ }
+ }
+
+ // Get the key we're dealing with, which is the first item
+ keyStr := item.Keys[0].Token.Value().(string)
+
+ // If we've already processed this key, then ignore it
+ if _, ok := done[keyStr]; ok {
+ continue
+ }
+
+ // Determine the value. If we have more than one key, then we
+ // get the objectlist of only these keys.
+ itemVal := item.Val
+ if len(item.Keys) > 1 {
+ itemVal = n.Filter(keyStr)
+ done[keyStr] = struct{}{}
+ }
+
+ // Make the field name
+ fieldName := fmt.Sprintf("%s.%s", name, keyStr)
+
+ // Get the key/value as reflection values
+ key := reflect.ValueOf(keyStr)
+ val := reflect.Indirect(reflect.New(resultElemType))
+
+ // If we have a pre-existing value in the map, use that
+ oldVal := resultMap.MapIndex(key)
+ if oldVal.IsValid() {
+ val.Set(oldVal)
+ }
+
+ // Decode!
+ if err := d.decode(fieldName, itemVal, val); err != nil {
+ return err
+ }
+
+ // Set the value on the map
+ resultMap.SetMapIndex(key, val)
+ }
+
+ // Set the final map if we can
+ set.Set(resultMap)
+ return nil
+}
+
+func (d *decoder) decodePtr(name string, node ast.Node, result reflect.Value) error {
+ // Create an element of the concrete (non pointer) type and decode
+ // into that. Then set the value of the pointer to this type.
+ resultType := result.Type()
+ resultElemType := resultType.Elem()
+ val := reflect.New(resultElemType)
+ if err := d.decode(name, node, reflect.Indirect(val)); err != nil {
+ return err
+ }
+
+ result.Set(val)
+ return nil
+}
+
+func (d *decoder) decodeSlice(name string, node ast.Node, result reflect.Value) error {
+ // If we have an interface, then we can address the interface,
+ // but not the slice itself, so get the element but set the interface
+ set := result
+ if result.Kind() == reflect.Interface {
+ result = result.Elem()
+ }
+ // Create the slice if it isn't nil
+ resultType := result.Type()
+ resultElemType := resultType.Elem()
+ if result.IsNil() {
+ resultSliceType := reflect.SliceOf(resultElemType)
+ result = reflect.MakeSlice(
+ resultSliceType, 0, 0)
+ }
+
+ // Figure out the items we'll be copying into the slice
+ var items []ast.Node
+ switch n := node.(type) {
+ case *ast.ObjectList:
+ items = make([]ast.Node, len(n.Items))
+ for i, item := range n.Items {
+ items[i] = item
+ }
+ case *ast.ObjectType:
+ items = []ast.Node{n}
+ case *ast.ListType:
+ items = n.List
+ default:
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("unknown slice type: %T", node),
+ }
+ }
+
+ for i, item := range items {
+ fieldName := fmt.Sprintf("%s[%d]", name, i)
+
+ // Decode
+ val := reflect.Indirect(reflect.New(resultElemType))
+
+ // if item is an object that was decoded from ambiguous JSON and
+ // flattened, make sure it's expanded if it needs to decode into a
+ // defined structure.
+ item := expandObject(item, val)
+
+ if err := d.decode(fieldName, item, val); err != nil {
+ return err
+ }
+
+ // Append it onto the slice
+ result = reflect.Append(result, val)
+ }
+
+ set.Set(result)
+ return nil
+}
+
+// expandObject detects if an ambiguous JSON object was flattened to a List which
+// should be decoded into a struct, and expands the ast to properly deocode.
+func expandObject(node ast.Node, result reflect.Value) ast.Node {
+ item, ok := node.(*ast.ObjectItem)
+ if !ok {
+ return node
+ }
+
+ elemType := result.Type()
+
+ // our target type must be a struct
+ switch elemType.Kind() {
+ case reflect.Ptr:
+ switch elemType.Elem().Kind() {
+ case reflect.Struct:
+ //OK
+ default:
+ return node
+ }
+ case reflect.Struct:
+ //OK
+ default:
+ return node
+ }
+
+ // A list value will have a key and field name. If it had more fields,
+ // it wouldn't have been flattened.
+ if len(item.Keys) != 2 {
+ return node
+ }
+
+ keyToken := item.Keys[0].Token
+ item.Keys = item.Keys[1:]
+
+ // we need to un-flatten the ast enough to decode
+ newNode := &ast.ObjectItem{
+ Keys: []*ast.ObjectKey{
+ &ast.ObjectKey{
+ Token: keyToken,
+ },
+ },
+ Val: &ast.ObjectType{
+ List: &ast.ObjectList{
+ Items: []*ast.ObjectItem{item},
+ },
+ },
+ }
+
+ return newNode
+}
+
+func (d *decoder) decodeString(name string, node ast.Node, result reflect.Value) error {
+ switch n := node.(type) {
+ case *ast.LiteralType:
+ switch n.Token.Type {
+ case token.NUMBER:
+ result.Set(reflect.ValueOf(n.Token.Text).Convert(result.Type()))
+ return nil
+ case token.STRING, token.HEREDOC:
+ result.Set(reflect.ValueOf(n.Token.Value()).Convert(result.Type()))
+ return nil
+ }
+ }
+
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: unknown type for string %T", name, node),
+ }
+}
+
+func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) error {
+ var item *ast.ObjectItem
+ if it, ok := node.(*ast.ObjectItem); ok {
+ item = it
+ node = it.Val
+ }
+
+ if ot, ok := node.(*ast.ObjectType); ok {
+ node = ot.List
+ }
+
+ // Handle the special case where the object itself is a literal. Previously
+ // the yacc parser would always ensure top-level elements were arrays. The new
+ // parser does not make the same guarantees, thus we need to convert any
+ // top-level literal elements into a list.
+ if _, ok := node.(*ast.LiteralType); ok && item != nil {
+ node = &ast.ObjectList{Items: []*ast.ObjectItem{item}}
+ }
+
+ list, ok := node.(*ast.ObjectList)
+ if !ok {
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: not an object type for struct (%T)", name, node),
+ }
+ }
+
+ // This slice will keep track of all the structs we'll be decoding.
+ // There can be more than one struct if there are embedded structs
+ // that are squashed.
+ structs := make([]reflect.Value, 1, 5)
+ structs[0] = result
+
+ // Compile the list of all the fields that we're going to be decoding
+ // from all the structs.
+ type field struct {
+ field reflect.StructField
+ val reflect.Value
+ }
+ fields := []field{}
+ for len(structs) > 0 {
+ structVal := structs[0]
+ structs = structs[1:]
+
+ structType := structVal.Type()
+ for i := 0; i < structType.NumField(); i++ {
+ fieldType := structType.Field(i)
+ tagParts := strings.Split(fieldType.Tag.Get(tagName), ",")
+
+ // Ignore fields with tag name "-"
+ if tagParts[0] == "-" {
+ continue
+ }
+
+ if fieldType.Anonymous {
+ fieldKind := fieldType.Type.Kind()
+ if fieldKind != reflect.Struct {
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: unsupported type to struct: %s",
+ fieldType.Name, fieldKind),
+ }
+ }
+
+ // We have an embedded field. We "squash" the fields down
+ // if specified in the tag.
+ squash := false
+ for _, tag := range tagParts[1:] {
+ if tag == "squash" {
+ squash = true
+ break
+ }
+ }
+
+ if squash {
+ structs = append(
+ structs, result.FieldByName(fieldType.Name))
+ continue
+ }
+ }
+
+ // Normal struct field, store it away
+ fields = append(fields, field{fieldType, structVal.Field(i)})
+ }
+ }
+
+ usedKeys := make(map[string]struct{})
+ decodedFields := make([]string, 0, len(fields))
+ decodedFieldsVal := make([]reflect.Value, 0)
+ unusedKeysVal := make([]reflect.Value, 0)
+ for _, f := range fields {
+ field, fieldValue := f.field, f.val
+ if !fieldValue.IsValid() {
+ // This should never happen
+ panic("field is not valid")
+ }
+
+ // If we can't set the field, then it is unexported or something,
+ // and we just continue onwards.
+ if !fieldValue.CanSet() {
+ continue
+ }
+
+ fieldName := field.Name
+
+ tagValue := field.Tag.Get(tagName)
+ tagParts := strings.SplitN(tagValue, ",", 2)
+ if len(tagParts) >= 2 {
+ switch tagParts[1] {
+ case "decodedFields":
+ decodedFieldsVal = append(decodedFieldsVal, fieldValue)
+ continue
+ case "key":
+ if item == nil {
+ return &parser.PosError{
+ Pos: node.Pos(),
+ Err: fmt.Errorf("%s: %s asked for 'key', impossible",
+ name, fieldName),
+ }
+ }
+
+ fieldValue.SetString(item.Keys[0].Token.Value().(string))
+ continue
+ case "unusedKeys":
+ unusedKeysVal = append(unusedKeysVal, fieldValue)
+ continue
+ }
+ }
+
+ if tagParts[0] != "" {
+ fieldName = tagParts[0]
+ }
+
+ // Determine the element we'll use to decode. If it is a single
+ // match (only object with the field), then we decode it exactly.
+ // If it is a prefix match, then we decode the matches.
+ filter := list.Filter(fieldName)
+
+ prefixMatches := filter.Children()
+ matches := filter.Elem()
+ if len(matches.Items) == 0 && len(prefixMatches.Items) == 0 {
+ continue
+ }
+
+ // Track the used key
+ usedKeys[fieldName] = struct{}{}
+
+ // Create the field name and decode. We range over the elements
+ // because we actually want the value.
+ fieldName = fmt.Sprintf("%s.%s", name, fieldName)
+ if len(prefixMatches.Items) > 0 {
+ if err := d.decode(fieldName, prefixMatches, fieldValue); err != nil {
+ return err
+ }
+ }
+ for _, match := range matches.Items {
+ var decodeNode ast.Node = match.Val
+ if ot, ok := decodeNode.(*ast.ObjectType); ok {
+ decodeNode = &ast.ObjectList{Items: ot.List.Items}
+ }
+
+ if err := d.decode(fieldName, decodeNode, fieldValue); err != nil {
+ return err
+ }
+ }
+
+ decodedFields = append(decodedFields, field.Name)
+ }
+
+ if len(decodedFieldsVal) > 0 {
+ // Sort it so that it is deterministic
+ sort.Strings(decodedFields)
+
+ for _, v := range decodedFieldsVal {
+ v.Set(reflect.ValueOf(decodedFields))
+ }
+ }
+
+ return nil
+}
+
+// findNodeType returns the type of ast.Node
+func findNodeType() reflect.Type {
+ var nodeContainer struct {
+ Node ast.Node
+ }
+ value := reflect.ValueOf(nodeContainer).FieldByName("Node")
+ return value.Type()
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl.go b/vendor/github.com/hashicorp/hcl/hcl.go
new file mode 100644
index 0000000..575a20b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl.go
@@ -0,0 +1,11 @@
+// Package hcl decodes HCL into usable Go structures.
+//
+// hcl input can come in either pure HCL format or JSON format.
+// It can be parsed into an AST, and then decoded into a structure,
+// or it can be decoded directly from a string into a structure.
+//
+// If you choose to parse HCL into a raw AST, the benefit is that you
+// can write custom visitor implementations to implement custom
+// semantic checks. By default, HCL does not perform any semantic
+// checks.
+package hcl
diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go b/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go
new file mode 100644
index 0000000..6e5ef65
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go
@@ -0,0 +1,219 @@
+// Package ast declares the types used to represent syntax trees for HCL
+// (HashiCorp Configuration Language)
+package ast
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/hcl/hcl/token"
+)
+
+// Node is an element in the abstract syntax tree.
+type Node interface {
+ node()
+ Pos() token.Pos
+}
+
+func (File) node() {}
+func (ObjectList) node() {}
+func (ObjectKey) node() {}
+func (ObjectItem) node() {}
+func (Comment) node() {}
+func (CommentGroup) node() {}
+func (ObjectType) node() {}
+func (LiteralType) node() {}
+func (ListType) node() {}
+
+// File represents a single HCL file
+type File struct {
+ Node Node // usually a *ObjectList
+ Comments []*CommentGroup // list of all comments in the source
+}
+
+func (f *File) Pos() token.Pos {
+ return f.Node.Pos()
+}
+
+// ObjectList represents a list of ObjectItems. An HCL file itself is an
+// ObjectList.
+type ObjectList struct {
+ Items []*ObjectItem
+}
+
+func (o *ObjectList) Add(item *ObjectItem) {
+ o.Items = append(o.Items, item)
+}
+
+// Filter filters out the objects with the given key list as a prefix.
+//
+// The returned list of objects contain ObjectItems where the keys have
+// this prefix already stripped off. This might result in objects with
+// zero-length key lists if they have no children.
+//
+// If no matches are found, an empty ObjectList (non-nil) is returned.
+func (o *ObjectList) Filter(keys ...string) *ObjectList {
+ var result ObjectList
+ for _, item := range o.Items {
+ // If there aren't enough keys, then ignore this
+ if len(item.Keys) < len(keys) {
+ continue
+ }
+
+ match := true
+ for i, key := range item.Keys[:len(keys)] {
+ key := key.Token.Value().(string)
+ if key != keys[i] && !strings.EqualFold(key, keys[i]) {
+ match = false
+ break
+ }
+ }
+ if !match {
+ continue
+ }
+
+ // Strip off the prefix from the children
+ newItem := *item
+ newItem.Keys = newItem.Keys[len(keys):]
+ result.Add(&newItem)
+ }
+
+ return &result
+}
+
+// Children returns further nested objects (key length > 0) within this
+// ObjectList. This should be used with Filter to get at child items.
+func (o *ObjectList) Children() *ObjectList {
+ var result ObjectList
+ for _, item := range o.Items {
+ if len(item.Keys) > 0 {
+ result.Add(item)
+ }
+ }
+
+ return &result
+}
+
+// Elem returns items in the list that are direct element assignments
+// (key length == 0). This should be used with Filter to get at elements.
+func (o *ObjectList) Elem() *ObjectList {
+ var result ObjectList
+ for _, item := range o.Items {
+ if len(item.Keys) == 0 {
+ result.Add(item)
+ }
+ }
+
+ return &result
+}
+
+func (o *ObjectList) Pos() token.Pos {
+ // always returns the uninitiliazed position
+ return o.Items[0].Pos()
+}
+
+// ObjectItem represents a HCL Object Item. An item is represented with a key
+// (or keys). It can be an assignment or an object (both normal and nested)
+type ObjectItem struct {
+ // keys is only one length long if it's of type assignment. If it's a
+ // nested object it can be larger than one. In that case "assign" is
+ // invalid as there is no assignments for a nested object.
+ Keys []*ObjectKey
+
+ // assign contains the position of "=", if any
+ Assign token.Pos
+
+ // val is the item itself. It can be an object,list, number, bool or a
+ // string. If key length is larger than one, val can be only of type
+ // Object.
+ Val Node
+
+ LeadComment *CommentGroup // associated lead comment
+ LineComment *CommentGroup // associated line comment
+}
+
+func (o *ObjectItem) Pos() token.Pos {
+ // I'm not entirely sure what causes this, but removing this causes
+ // a test failure. We should investigate at some point.
+ if len(o.Keys) == 0 {
+ return token.Pos{}
+ }
+
+ return o.Keys[0].Pos()
+}
+
+// ObjectKeys are either an identifier or of type string.
+type ObjectKey struct {
+ Token token.Token
+}
+
+func (o *ObjectKey) Pos() token.Pos {
+ return o.Token.Pos
+}
+
+// LiteralType represents a literal of basic type. Valid types are:
+// token.NUMBER, token.FLOAT, token.BOOL and token.STRING
+type LiteralType struct {
+ Token token.Token
+
+ // comment types, only used when in a list
+ LeadComment *CommentGroup
+ LineComment *CommentGroup
+}
+
+func (l *LiteralType) Pos() token.Pos {
+ return l.Token.Pos
+}
+
+// ListStatement represents a HCL List type
+type ListType struct {
+ Lbrack token.Pos // position of "["
+ Rbrack token.Pos // position of "]"
+ List []Node // the elements in lexical order
+}
+
+func (l *ListType) Pos() token.Pos {
+ return l.Lbrack
+}
+
+func (l *ListType) Add(node Node) {
+ l.List = append(l.List, node)
+}
+
+// ObjectType represents a HCL Object Type
+type ObjectType struct {
+ Lbrace token.Pos // position of "{"
+ Rbrace token.Pos // position of "}"
+ List *ObjectList // the nodes in lexical order
+}
+
+func (o *ObjectType) Pos() token.Pos {
+ return o.Lbrace
+}
+
+// Comment node represents a single //, # style or /*- style commment
+type Comment struct {
+ Start token.Pos // position of / or #
+ Text string
+}
+
+func (c *Comment) Pos() token.Pos {
+ return c.Start
+}
+
+// CommentGroup node represents a sequence of comments with no other tokens and
+// no empty lines between.
+type CommentGroup struct {
+ List []*Comment // len(List) > 0
+}
+
+func (c *CommentGroup) Pos() token.Pos {
+ return c.List[0].Pos()
+}
+
+//-------------------------------------------------------------------
+// GoStringer
+//-------------------------------------------------------------------
+
+func (o *ObjectKey) GoString() string { return fmt.Sprintf("*%#v", *o) }
+func (o *ObjectList) GoString() string { return fmt.Sprintf("*%#v", *o) }
diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go b/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go
new file mode 100644
index 0000000..ba07ad4
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go
@@ -0,0 +1,52 @@
+package ast
+
+import "fmt"
+
+// WalkFunc describes a function to be called for each node during a Walk. The
+// returned node can be used to rewrite the AST. Walking stops the returned
+// bool is false.
+type WalkFunc func(Node) (Node, bool)
+
+// Walk traverses an AST in depth-first order: It starts by calling fn(node);
+// node must not be nil. If fn returns true, Walk invokes fn recursively for
+// each of the non-nil children of node, followed by a call of fn(nil). The
+// returned node of fn can be used to rewrite the passed node to fn.
+func Walk(node Node, fn WalkFunc) Node {
+ rewritten, ok := fn(node)
+ if !ok {
+ return rewritten
+ }
+
+ switch n := node.(type) {
+ case *File:
+ n.Node = Walk(n.Node, fn)
+ case *ObjectList:
+ for i, item := range n.Items {
+ n.Items[i] = Walk(item, fn).(*ObjectItem)
+ }
+ case *ObjectKey:
+ // nothing to do
+ case *ObjectItem:
+ for i, k := range n.Keys {
+ n.Keys[i] = Walk(k, fn).(*ObjectKey)
+ }
+
+ if n.Val != nil {
+ n.Val = Walk(n.Val, fn)
+ }
+ case *LiteralType:
+ // nothing to do
+ case *ListType:
+ for i, l := range n.List {
+ n.List[i] = Walk(l, fn)
+ }
+ case *ObjectType:
+ n.List = Walk(n.List, fn).(*ObjectList)
+ default:
+ // should we panic here?
+ fmt.Printf("unknown type: %T\n", n)
+ }
+
+ fn(nil)
+ return rewritten
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/error.go b/vendor/github.com/hashicorp/hcl/hcl/parser/error.go
new file mode 100644
index 0000000..5c99381
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/parser/error.go
@@ -0,0 +1,17 @@
+package parser
+
+import (
+ "fmt"
+
+ "github.com/hashicorp/hcl/hcl/token"
+)
+
+// PosError is a parse error that contains a position.
+type PosError struct {
+ Pos token.Pos
+ Err error
+}
+
+func (e *PosError) Error() string {
+ return fmt.Sprintf("At %s: %s", e.Pos, e.Err)
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go
new file mode 100644
index 0000000..64c83bc
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go
@@ -0,0 +1,532 @@
+// Package parser implements a parser for HCL (HashiCorp Configuration
+// Language)
+package parser
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/hcl/hcl/ast"
+ "github.com/hashicorp/hcl/hcl/scanner"
+ "github.com/hashicorp/hcl/hcl/token"
+)
+
+type Parser struct {
+ sc *scanner.Scanner
+
+ // Last read token
+ tok token.Token
+ commaPrev token.Token
+
+ comments []*ast.CommentGroup
+ leadComment *ast.CommentGroup // last lead comment
+ lineComment *ast.CommentGroup // last line comment
+
+ enableTrace bool
+ indent int
+ n int // buffer size (max = 1)
+}
+
+func newParser(src []byte) *Parser {
+ return &Parser{
+ sc: scanner.New(src),
+ }
+}
+
+// Parse returns the fully parsed source and returns the abstract syntax tree.
+func Parse(src []byte) (*ast.File, error) {
+ // normalize all line endings
+ // since the scanner and output only work with "\n" line endings, we may
+ // end up with dangling "\r" characters in the parsed data.
+ src = bytes.Replace(src, []byte("\r\n"), []byte("\n"), -1)
+
+ p := newParser(src)
+ return p.Parse()
+}
+
+var errEofToken = errors.New("EOF token found")
+
+// Parse returns the fully parsed source and returns the abstract syntax tree.
+func (p *Parser) Parse() (*ast.File, error) {
+ f := &ast.File{}
+ var err, scerr error
+ p.sc.Error = func(pos token.Pos, msg string) {
+ scerr = &PosError{Pos: pos, Err: errors.New(msg)}
+ }
+
+ f.Node, err = p.objectList(false)
+ if scerr != nil {
+ return nil, scerr
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ f.Comments = p.comments
+ return f, nil
+}
+
+// objectList parses a list of items within an object (generally k/v pairs).
+// The parameter" obj" tells this whether to we are within an object (braces:
+// '{', '}') or just at the top level. If we're within an object, we end
+// at an RBRACE.
+func (p *Parser) objectList(obj bool) (*ast.ObjectList, error) {
+ defer un(trace(p, "ParseObjectList"))
+ node := &ast.ObjectList{}
+
+ for {
+ if obj {
+ tok := p.scan()
+ p.unscan()
+ if tok.Type == token.RBRACE {
+ break
+ }
+ }
+
+ n, err := p.objectItem()
+ if err == errEofToken {
+ break // we are finished
+ }
+
+ // we don't return a nil node, because might want to use already
+ // collected items.
+ if err != nil {
+ return node, err
+ }
+
+ node.Add(n)
+
+ // object lists can be optionally comma-delimited e.g. when a list of maps
+ // is being expressed, so a comma is allowed here - it's simply consumed
+ tok := p.scan()
+ if tok.Type != token.COMMA {
+ p.unscan()
+ }
+ }
+ return node, nil
+}
+
+func (p *Parser) consumeComment() (comment *ast.Comment, endline int) {
+ endline = p.tok.Pos.Line
+
+ // count the endline if it's multiline comment, ie starting with /*
+ if len(p.tok.Text) > 1 && p.tok.Text[1] == '*' {
+ // don't use range here - no need to decode Unicode code points
+ for i := 0; i < len(p.tok.Text); i++ {
+ if p.tok.Text[i] == '\n' {
+ endline++
+ }
+ }
+ }
+
+ comment = &ast.Comment{Start: p.tok.Pos, Text: p.tok.Text}
+ p.tok = p.sc.Scan()
+ return
+}
+
+func (p *Parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
+ var list []*ast.Comment
+ endline = p.tok.Pos.Line
+
+ for p.tok.Type == token.COMMENT && p.tok.Pos.Line <= endline+n {
+ var comment *ast.Comment
+ comment, endline = p.consumeComment()
+ list = append(list, comment)
+ }
+
+ // add comment group to the comments list
+ comments = &ast.CommentGroup{List: list}
+ p.comments = append(p.comments, comments)
+
+ return
+}
+
+// objectItem parses a single object item
+func (p *Parser) objectItem() (*ast.ObjectItem, error) {
+ defer un(trace(p, "ParseObjectItem"))
+
+ keys, err := p.objectKey()
+ if len(keys) > 0 && err == errEofToken {
+ // We ignore eof token here since it is an error if we didn't
+ // receive a value (but we did receive a key) for the item.
+ err = nil
+ }
+ if len(keys) > 0 && err != nil && p.tok.Type == token.RBRACE {
+ // This is a strange boolean statement, but what it means is:
+ // We have keys with no value, and we're likely in an object
+ // (since RBrace ends an object). For this, we set err to nil so
+ // we continue and get the error below of having the wrong value
+ // type.
+ err = nil
+
+ // Reset the token type so we don't think it completed fine. See
+ // objectType which uses p.tok.Type to check if we're done with
+ // the object.
+ p.tok.Type = token.EOF
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ o := &ast.ObjectItem{
+ Keys: keys,
+ }
+
+ if p.leadComment != nil {
+ o.LeadComment = p.leadComment
+ p.leadComment = nil
+ }
+
+ switch p.tok.Type {
+ case token.ASSIGN:
+ o.Assign = p.tok.Pos
+ o.Val, err = p.object()
+ if err != nil {
+ return nil, err
+ }
+ case token.LBRACE:
+ o.Val, err = p.objectType()
+ if err != nil {
+ return nil, err
+ }
+ default:
+ keyStr := make([]string, 0, len(keys))
+ for _, k := range keys {
+ keyStr = append(keyStr, k.Token.Text)
+ }
+
+ return nil, &PosError{
+ Pos: p.tok.Pos,
+ Err: fmt.Errorf(
+ "key '%s' expected start of object ('{') or assignment ('=')",
+ strings.Join(keyStr, " ")),
+ }
+ }
+
+ // key=#comment
+ // val
+ if p.lineComment != nil {
+ o.LineComment, p.lineComment = p.lineComment, nil
+ }
+
+ // do a look-ahead for line comment
+ p.scan()
+ if len(keys) > 0 && o.Val.Pos().Line == keys[0].Pos().Line && p.lineComment != nil {
+ o.LineComment = p.lineComment
+ p.lineComment = nil
+ }
+ p.unscan()
+ return o, nil
+}
+
+// objectKey parses an object key and returns a ObjectKey AST
+func (p *Parser) objectKey() ([]*ast.ObjectKey, error) {
+ keyCount := 0
+ keys := make([]*ast.ObjectKey, 0)
+
+ for {
+ tok := p.scan()
+ switch tok.Type {
+ case token.EOF:
+ // It is very important to also return the keys here as well as
+ // the error. This is because we need to be able to tell if we
+ // did parse keys prior to finding the EOF, or if we just found
+ // a bare EOF.
+ return keys, errEofToken
+ case token.ASSIGN:
+ // assignment or object only, but not nested objects. this is not
+ // allowed: `foo bar = {}`
+ if keyCount > 1 {
+ return nil, &PosError{
+ Pos: p.tok.Pos,
+ Err: fmt.Errorf("nested object expected: LBRACE got: %s", p.tok.Type),
+ }
+ }
+
+ if keyCount == 0 {
+ return nil, &PosError{
+ Pos: p.tok.Pos,
+ Err: errors.New("no object keys found!"),
+ }
+ }
+
+ return keys, nil
+ case token.LBRACE:
+ var err error
+
+ // If we have no keys, then it is a syntax error. i.e. {{}} is not
+ // allowed.
+ if len(keys) == 0 {
+ err = &PosError{
+ Pos: p.tok.Pos,
+ Err: fmt.Errorf("expected: IDENT | STRING got: %s", p.tok.Type),
+ }
+ }
+
+ // object
+ return keys, err
+ case token.IDENT, token.STRING:
+ keyCount++
+ keys = append(keys, &ast.ObjectKey{Token: p.tok})
+ case token.ILLEGAL:
+ return keys, &PosError{
+ Pos: p.tok.Pos,
+ Err: fmt.Errorf("illegal character"),
+ }
+ default:
+ return keys, &PosError{
+ Pos: p.tok.Pos,
+ Err: fmt.Errorf("expected: IDENT | STRING | ASSIGN | LBRACE got: %s", p.tok.Type),
+ }
+ }
+ }
+}
+
+// object parses any type of object, such as number, bool, string, object or
+// list.
+func (p *Parser) object() (ast.Node, error) {
+ defer un(trace(p, "ParseType"))
+ tok := p.scan()
+
+ switch tok.Type {
+ case token.NUMBER, token.FLOAT, token.BOOL, token.STRING, token.HEREDOC:
+ return p.literalType()
+ case token.LBRACE:
+ return p.objectType()
+ case token.LBRACK:
+ return p.listType()
+ case token.COMMENT:
+ // implement comment
+ case token.EOF:
+ return nil, errEofToken
+ }
+
+ return nil, &PosError{
+ Pos: tok.Pos,
+ Err: fmt.Errorf("Unknown token: %+v", tok),
+ }
+}
+
+// objectType parses an object type and returns a ObjectType AST
+func (p *Parser) objectType() (*ast.ObjectType, error) {
+ defer un(trace(p, "ParseObjectType"))
+
+ // we assume that the currently scanned token is a LBRACE
+ o := &ast.ObjectType{
+ Lbrace: p.tok.Pos,
+ }
+
+ l, err := p.objectList(true)
+
+ // if we hit RBRACE, we are good to go (means we parsed all Items), if it's
+ // not a RBRACE, it's an syntax error and we just return it.
+ if err != nil && p.tok.Type != token.RBRACE {
+ return nil, err
+ }
+
+ // No error, scan and expect the ending to be a brace
+ if tok := p.scan(); tok.Type != token.RBRACE {
+ return nil, &PosError{
+ Pos: tok.Pos,
+ Err: fmt.Errorf("object expected closing RBRACE got: %s", tok.Type),
+ }
+ }
+
+ o.List = l
+ o.Rbrace = p.tok.Pos // advanced via parseObjectList
+ return o, nil
+}
+
+// listType parses a list type and returns a ListType AST
+func (p *Parser) listType() (*ast.ListType, error) {
+ defer un(trace(p, "ParseListType"))
+
+ // we assume that the currently scanned token is a LBRACK
+ l := &ast.ListType{
+ Lbrack: p.tok.Pos,
+ }
+
+ needComma := false
+ for {
+ tok := p.scan()
+ if needComma {
+ switch tok.Type {
+ case token.COMMA, token.RBRACK:
+ default:
+ return nil, &PosError{
+ Pos: tok.Pos,
+ Err: fmt.Errorf(
+ "error parsing list, expected comma or list end, got: %s",
+ tok.Type),
+ }
+ }
+ }
+ switch tok.Type {
+ case token.BOOL, token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC:
+ node, err := p.literalType()
+ if err != nil {
+ return nil, err
+ }
+
+ // If there is a lead comment, apply it
+ if p.leadComment != nil {
+ node.LeadComment = p.leadComment
+ p.leadComment = nil
+ }
+
+ l.Add(node)
+ needComma = true
+ case token.COMMA:
+ // get next list item or we are at the end
+ // do a look-ahead for line comment
+ p.scan()
+ if p.lineComment != nil && len(l.List) > 0 {
+ lit, ok := l.List[len(l.List)-1].(*ast.LiteralType)
+ if ok {
+ lit.LineComment = p.lineComment
+ l.List[len(l.List)-1] = lit
+ p.lineComment = nil
+ }
+ }
+ p.unscan()
+
+ needComma = false
+ continue
+ case token.LBRACE:
+ // Looks like a nested object, so parse it out
+ node, err := p.objectType()
+ if err != nil {
+ return nil, &PosError{
+ Pos: tok.Pos,
+ Err: fmt.Errorf(
+ "error while trying to parse object within list: %s", err),
+ }
+ }
+ l.Add(node)
+ needComma = true
+ case token.LBRACK:
+ node, err := p.listType()
+ if err != nil {
+ return nil, &PosError{
+ Pos: tok.Pos,
+ Err: fmt.Errorf(
+ "error while trying to parse list within list: %s", err),
+ }
+ }
+ l.Add(node)
+ case token.RBRACK:
+ // finished
+ l.Rbrack = p.tok.Pos
+ return l, nil
+ default:
+ return nil, &PosError{
+ Pos: tok.Pos,
+ Err: fmt.Errorf("unexpected token while parsing list: %s", tok.Type),
+ }
+ }
+ }
+}
+
+// literalType parses a literal type and returns a LiteralType AST
+func (p *Parser) literalType() (*ast.LiteralType, error) {
+ defer un(trace(p, "ParseLiteral"))
+
+ return &ast.LiteralType{
+ Token: p.tok,
+ }, nil
+}
+
+// scan returns the next token from the underlying scanner. If a token has
+// been unscanned then read that instead. In the process, it collects any
+// comment groups encountered, and remembers the last lead and line comments.
+func (p *Parser) scan() token.Token {
+ // If we have a token on the buffer, then return it.
+ if p.n != 0 {
+ p.n = 0
+ return p.tok
+ }
+
+ // Otherwise read the next token from the scanner and Save it to the buffer
+ // in case we unscan later.
+ prev := p.tok
+ p.tok = p.sc.Scan()
+
+ if p.tok.Type == token.COMMENT {
+ var comment *ast.CommentGroup
+ var endline int
+
+ // fmt.Printf("p.tok.Pos.Line = %+v prev: %d endline %d \n",
+ // p.tok.Pos.Line, prev.Pos.Line, endline)
+ if p.tok.Pos.Line == prev.Pos.Line {
+ // The comment is on same line as the previous token; it
+ // cannot be a lead comment but may be a line comment.
+ comment, endline = p.consumeCommentGroup(0)
+ if p.tok.Pos.Line != endline {
+ // The next token is on a different line, thus
+ // the last comment group is a line comment.
+ p.lineComment = comment
+ }
+ }
+
+ // consume successor comments, if any
+ endline = -1
+ for p.tok.Type == token.COMMENT {
+ comment, endline = p.consumeCommentGroup(1)
+ }
+
+ if endline+1 == p.tok.Pos.Line && p.tok.Type != token.RBRACE {
+ switch p.tok.Type {
+ case token.RBRACE, token.RBRACK:
+ // Do not count for these cases
+ default:
+ // The next token is following on the line immediately after the
+ // comment group, thus the last comment group is a lead comment.
+ p.leadComment = comment
+ }
+ }
+
+ }
+
+ return p.tok
+}
+
+// unscan pushes the previously read token back onto the buffer.
+func (p *Parser) unscan() {
+ p.n = 1
+}
+
+// ----------------------------------------------------------------------------
+// Parsing support
+
+func (p *Parser) printTrace(a ...interface{}) {
+ if !p.enableTrace {
+ return
+ }
+
+ const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
+ const n = len(dots)
+ fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column)
+
+ i := 2 * p.indent
+ for i > n {
+ fmt.Print(dots)
+ i -= n
+ }
+ // i <= n
+ fmt.Print(dots[0:i])
+ fmt.Println(a...)
+}
+
+func trace(p *Parser, msg string) *Parser {
+ p.printTrace(msg, "(")
+ p.indent++
+ return p
+}
+
+// Usage pattern: defer un(trace(p, "..."))
+func un(p *Parser) {
+ p.indent--
+ p.printTrace(")")
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go b/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go
new file mode 100644
index 0000000..7c038d1
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go
@@ -0,0 +1,789 @@
+package printer
+
+import (
+ "bytes"
+ "fmt"
+ "sort"
+
+ "github.com/hashicorp/hcl/hcl/ast"
+ "github.com/hashicorp/hcl/hcl/token"
+)
+
+const (
+ blank = byte(' ')
+ newline = byte('\n')
+ tab = byte('\t')
+ infinity = 1 << 30 // offset or line
+)
+
+var (
+ unindent = []byte("\uE123") // in the private use space
+)
+
+type printer struct {
+ cfg Config
+ prev token.Pos
+
+ comments []*ast.CommentGroup // may be nil, contains all comments
+ standaloneComments []*ast.CommentGroup // contains all standalone comments (not assigned to any node)
+
+ enableTrace bool
+ indentTrace int
+}
+
+type ByPosition []*ast.CommentGroup
+
+func (b ByPosition) Len() int { return len(b) }
+func (b ByPosition) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
+func (b ByPosition) Less(i, j int) bool { return b[i].Pos().Before(b[j].Pos()) }
+
+// collectComments comments all standalone comments which are not lead or line
+// comment
+func (p *printer) collectComments(node ast.Node) {
+ // first collect all comments. This is already stored in
+ // ast.File.(comments)
+ ast.Walk(node, func(nn ast.Node) (ast.Node, bool) {
+ switch t := nn.(type) {
+ case *ast.File:
+ p.comments = t.Comments
+ return nn, false
+ }
+ return nn, true
+ })
+
+ standaloneComments := make(map[token.Pos]*ast.CommentGroup, 0)
+ for _, c := range p.comments {
+ standaloneComments[c.Pos()] = c
+ }
+
+ // next remove all lead and line comments from the overall comment map.
+ // This will give us comments which are standalone, comments which are not
+ // assigned to any kind of node.
+ ast.Walk(node, func(nn ast.Node) (ast.Node, bool) {
+ switch t := nn.(type) {
+ case *ast.LiteralType:
+ if t.LeadComment != nil {
+ for _, comment := range t.LeadComment.List {
+ if _, ok := standaloneComments[comment.Pos()]; ok {
+ delete(standaloneComments, comment.Pos())
+ }
+ }
+ }
+
+ if t.LineComment != nil {
+ for _, comment := range t.LineComment.List {
+ if _, ok := standaloneComments[comment.Pos()]; ok {
+ delete(standaloneComments, comment.Pos())
+ }
+ }
+ }
+ case *ast.ObjectItem:
+ if t.LeadComment != nil {
+ for _, comment := range t.LeadComment.List {
+ if _, ok := standaloneComments[comment.Pos()]; ok {
+ delete(standaloneComments, comment.Pos())
+ }
+ }
+ }
+
+ if t.LineComment != nil {
+ for _, comment := range t.LineComment.List {
+ if _, ok := standaloneComments[comment.Pos()]; ok {
+ delete(standaloneComments, comment.Pos())
+ }
+ }
+ }
+ }
+
+ return nn, true
+ })
+
+ for _, c := range standaloneComments {
+ p.standaloneComments = append(p.standaloneComments, c)
+ }
+
+ sort.Sort(ByPosition(p.standaloneComments))
+}
+
+// output prints creates b printable HCL output and returns it.
+func (p *printer) output(n interface{}) []byte {
+ var buf bytes.Buffer
+
+ switch t := n.(type) {
+ case *ast.File:
+ // File doesn't trace so we add the tracing here
+ defer un(trace(p, "File"))
+ return p.output(t.Node)
+ case *ast.ObjectList:
+ defer un(trace(p, "ObjectList"))
+
+ var index int
+ for {
+ // Determine the location of the next actual non-comment
+ // item. If we're at the end, the next item is at "infinity"
+ var nextItem token.Pos
+ if index != len(t.Items) {
+ nextItem = t.Items[index].Pos()
+ } else {
+ nextItem = token.Pos{Offset: infinity, Line: infinity}
+ }
+
+ // Go through the standalone comments in the file and print out
+ // the comments that we should be for this object item.
+ for _, c := range p.standaloneComments {
+ // Go through all the comments in the group. The group
+ // should be printed together, not separated by double newlines.
+ printed := false
+ newlinePrinted := false
+ for _, comment := range c.List {
+ // We only care about comments after the previous item
+ // we've printed so that comments are printed in the
+ // correct locations (between two objects for example).
+ // And before the next item.
+ if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) {
+ // if we hit the end add newlines so we can print the comment
+ // we don't do this if prev is invalid which means the
+ // beginning of the file since the first comment should
+ // be at the first line.
+ if !newlinePrinted && p.prev.IsValid() && index == len(t.Items) {
+ buf.Write([]byte{newline, newline})
+ newlinePrinted = true
+ }
+
+ // Write the actual comment.
+ buf.WriteString(comment.Text)
+ buf.WriteByte(newline)
+
+ // Set printed to true to note that we printed something
+ printed = true
+ }
+ }
+
+ // If we're not at the last item, write a new line so
+ // that there is a newline separating this comment from
+ // the next object.
+ if printed && index != len(t.Items) {
+ buf.WriteByte(newline)
+ }
+ }
+
+ if index == len(t.Items) {
+ break
+ }
+
+ buf.Write(p.output(t.Items[index]))
+ if index != len(t.Items)-1 {
+ // Always write a newline to separate us from the next item
+ buf.WriteByte(newline)
+
+ // Need to determine if we're going to separate the next item
+ // with a blank line. The logic here is simple, though there
+ // are a few conditions:
+ //
+ // 1. The next object is more than one line away anyways,
+ // so we need an empty line.
+ //
+ // 2. The next object is not a "single line" object, so
+ // we need an empty line.
+ //
+ // 3. This current object is not a single line object,
+ // so we need an empty line.
+ current := t.Items[index]
+ next := t.Items[index+1]
+ if next.Pos().Line != t.Items[index].Pos().Line+1 ||
+ !p.isSingleLineObject(next) ||
+ !p.isSingleLineObject(current) {
+ buf.WriteByte(newline)
+ }
+ }
+ index++
+ }
+ case *ast.ObjectKey:
+ buf.WriteString(t.Token.Text)
+ case *ast.ObjectItem:
+ p.prev = t.Pos()
+ buf.Write(p.objectItem(t))
+ case *ast.LiteralType:
+ buf.Write(p.literalType(t))
+ case *ast.ListType:
+ buf.Write(p.list(t))
+ case *ast.ObjectType:
+ buf.Write(p.objectType(t))
+ default:
+ fmt.Printf(" unknown type: %T\n", n)
+ }
+
+ return buf.Bytes()
+}
+
+func (p *printer) literalType(lit *ast.LiteralType) []byte {
+ result := []byte(lit.Token.Text)
+ switch lit.Token.Type {
+ case token.HEREDOC:
+ // Clear the trailing newline from heredocs
+ if result[len(result)-1] == '\n' {
+ result = result[:len(result)-1]
+ }
+
+ // Poison lines 2+ so that we don't indent them
+ result = p.heredocIndent(result)
+ case token.STRING:
+ // If this is a multiline string, poison lines 2+ so we don't
+ // indent them.
+ if bytes.IndexRune(result, '\n') >= 0 {
+ result = p.heredocIndent(result)
+ }
+ }
+
+ return result
+}
+
+// objectItem returns the printable HCL form of an object item. An object type
+// starts with one/multiple keys and has a value. The value might be of any
+// type.
+func (p *printer) objectItem(o *ast.ObjectItem) []byte {
+ defer un(trace(p, fmt.Sprintf("ObjectItem: %s", o.Keys[0].Token.Text)))
+ var buf bytes.Buffer
+
+ if o.LeadComment != nil {
+ for _, comment := range o.LeadComment.List {
+ buf.WriteString(comment.Text)
+ buf.WriteByte(newline)
+ }
+ }
+
+ // If key and val are on different lines, treat line comments like lead comments.
+ if o.LineComment != nil && o.Val.Pos().Line != o.Keys[0].Pos().Line {
+ for _, comment := range o.LineComment.List {
+ buf.WriteString(comment.Text)
+ buf.WriteByte(newline)
+ }
+ }
+
+ for i, k := range o.Keys {
+ buf.WriteString(k.Token.Text)
+ buf.WriteByte(blank)
+
+ // reach end of key
+ if o.Assign.IsValid() && i == len(o.Keys)-1 && len(o.Keys) == 1 {
+ buf.WriteString("=")
+ buf.WriteByte(blank)
+ }
+ }
+
+ buf.Write(p.output(o.Val))
+
+ if o.LineComment != nil && o.Val.Pos().Line == o.Keys[0].Pos().Line {
+ buf.WriteByte(blank)
+ for _, comment := range o.LineComment.List {
+ buf.WriteString(comment.Text)
+ }
+ }
+
+ return buf.Bytes()
+}
+
+// objectType returns the printable HCL form of an object type. An object type
+// begins with a brace and ends with a brace.
+func (p *printer) objectType(o *ast.ObjectType) []byte {
+ defer un(trace(p, "ObjectType"))
+ var buf bytes.Buffer
+ buf.WriteString("{")
+
+ var index int
+ var nextItem token.Pos
+ var commented, newlinePrinted bool
+ for {
+ // Determine the location of the next actual non-comment
+ // item. If we're at the end, the next item is the closing brace
+ if index != len(o.List.Items) {
+ nextItem = o.List.Items[index].Pos()
+ } else {
+ nextItem = o.Rbrace
+ }
+
+ // Go through the standalone comments in the file and print out
+ // the comments that we should be for this object item.
+ for _, c := range p.standaloneComments {
+ printed := false
+ var lastCommentPos token.Pos
+ for _, comment := range c.List {
+ // We only care about comments after the previous item
+ // we've printed so that comments are printed in the
+ // correct locations (between two objects for example).
+ // And before the next item.
+ if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) {
+ // If there are standalone comments and the initial newline has not
+ // been printed yet, do it now.
+ if !newlinePrinted {
+ newlinePrinted = true
+ buf.WriteByte(newline)
+ }
+
+ // add newline if it's between other printed nodes
+ if index > 0 {
+ commented = true
+ buf.WriteByte(newline)
+ }
+
+ // Store this position
+ lastCommentPos = comment.Pos()
+
+ // output the comment itself
+ buf.Write(p.indent(p.heredocIndent([]byte(comment.Text))))
+
+ // Set printed to true to note that we printed something
+ printed = true
+
+ /*
+ if index != len(o.List.Items) {
+ buf.WriteByte(newline) // do not print on the end
+ }
+ */
+ }
+ }
+
+ // Stuff to do if we had comments
+ if printed {
+ // Always write a newline
+ buf.WriteByte(newline)
+
+ // If there is another item in the object and our comment
+ // didn't hug it directly, then make sure there is a blank
+ // line separating them.
+ if nextItem != o.Rbrace && nextItem.Line != lastCommentPos.Line+1 {
+ buf.WriteByte(newline)
+ }
+ }
+ }
+
+ if index == len(o.List.Items) {
+ p.prev = o.Rbrace
+ break
+ }
+
+ // At this point we are sure that it's not a totally empty block: print
+ // the initial newline if it hasn't been printed yet by the previous
+ // block about standalone comments.
+ if !newlinePrinted {
+ buf.WriteByte(newline)
+ newlinePrinted = true
+ }
+
+ // check if we have adjacent one liner items. If yes we'll going to align
+ // the comments.
+ var aligned []*ast.ObjectItem
+ for _, item := range o.List.Items[index:] {
+ // we don't group one line lists
+ if len(o.List.Items) == 1 {
+ break
+ }
+
+ // one means a oneliner with out any lead comment
+ // two means a oneliner with lead comment
+ // anything else might be something else
+ cur := lines(string(p.objectItem(item)))
+ if cur > 2 {
+ break
+ }
+
+ curPos := item.Pos()
+
+ nextPos := token.Pos{}
+ if index != len(o.List.Items)-1 {
+ nextPos = o.List.Items[index+1].Pos()
+ }
+
+ prevPos := token.Pos{}
+ if index != 0 {
+ prevPos = o.List.Items[index-1].Pos()
+ }
+
+ // fmt.Println("DEBUG ----------------")
+ // fmt.Printf("prev = %+v prevPos: %s\n", prev, prevPos)
+ // fmt.Printf("cur = %+v curPos: %s\n", cur, curPos)
+ // fmt.Printf("next = %+v nextPos: %s\n", next, nextPos)
+
+ if curPos.Line+1 == nextPos.Line {
+ aligned = append(aligned, item)
+ index++
+ continue
+ }
+
+ if curPos.Line-1 == prevPos.Line {
+ aligned = append(aligned, item)
+ index++
+
+ // finish if we have a new line or comment next. This happens
+ // if the next item is not adjacent
+ if curPos.Line+1 != nextPos.Line {
+ break
+ }
+ continue
+ }
+
+ break
+ }
+
+ // put newlines if the items are between other non aligned items.
+ // newlines are also added if there is a standalone comment already, so
+ // check it too
+ if !commented && index != len(aligned) {
+ buf.WriteByte(newline)
+ }
+
+ if len(aligned) >= 1 {
+ p.prev = aligned[len(aligned)-1].Pos()
+
+ items := p.alignedItems(aligned)
+ buf.Write(p.indent(items))
+ } else {
+ p.prev = o.List.Items[index].Pos()
+
+ buf.Write(p.indent(p.objectItem(o.List.Items[index])))
+ index++
+ }
+
+ buf.WriteByte(newline)
+ }
+
+ buf.WriteString("}")
+ return buf.Bytes()
+}
+
+func (p *printer) alignedItems(items []*ast.ObjectItem) []byte {
+ var buf bytes.Buffer
+
+ // find the longest key and value length, needed for alignment
+ var longestKeyLen int // longest key length
+ var longestValLen int // longest value length
+ for _, item := range items {
+ key := len(item.Keys[0].Token.Text)
+ val := len(p.output(item.Val))
+
+ if key > longestKeyLen {
+ longestKeyLen = key
+ }
+
+ if val > longestValLen {
+ longestValLen = val
+ }
+ }
+
+ for i, item := range items {
+ if item.LeadComment != nil {
+ for _, comment := range item.LeadComment.List {
+ buf.WriteString(comment.Text)
+ buf.WriteByte(newline)
+ }
+ }
+
+ for i, k := range item.Keys {
+ keyLen := len(k.Token.Text)
+ buf.WriteString(k.Token.Text)
+ for i := 0; i < longestKeyLen-keyLen+1; i++ {
+ buf.WriteByte(blank)
+ }
+
+ // reach end of key
+ if i == len(item.Keys)-1 && len(item.Keys) == 1 {
+ buf.WriteString("=")
+ buf.WriteByte(blank)
+ }
+ }
+
+ val := p.output(item.Val)
+ valLen := len(val)
+ buf.Write(val)
+
+ if item.Val.Pos().Line == item.Keys[0].Pos().Line && item.LineComment != nil {
+ for i := 0; i < longestValLen-valLen+1; i++ {
+ buf.WriteByte(blank)
+ }
+
+ for _, comment := range item.LineComment.List {
+ buf.WriteString(comment.Text)
+ }
+ }
+
+ // do not print for the last item
+ if i != len(items)-1 {
+ buf.WriteByte(newline)
+ }
+ }
+
+ return buf.Bytes()
+}
+
+// list returns the printable HCL form of an list type.
+func (p *printer) list(l *ast.ListType) []byte {
+ if p.isSingleLineList(l) {
+ return p.singleLineList(l)
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString("[")
+ buf.WriteByte(newline)
+
+ var longestLine int
+ for _, item := range l.List {
+ // for now we assume that the list only contains literal types
+ if lit, ok := item.(*ast.LiteralType); ok {
+ lineLen := len(lit.Token.Text)
+ if lineLen > longestLine {
+ longestLine = lineLen
+ }
+ }
+ }
+
+ haveEmptyLine := false
+ for i, item := range l.List {
+ // If we have a lead comment, then we want to write that first
+ leadComment := false
+ if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil {
+ leadComment = true
+
+ // Ensure an empty line before every element with a
+ // lead comment (except the first item in a list).
+ if !haveEmptyLine && i != 0 {
+ buf.WriteByte(newline)
+ }
+
+ for _, comment := range lit.LeadComment.List {
+ buf.Write(p.indent([]byte(comment.Text)))
+ buf.WriteByte(newline)
+ }
+ }
+
+ // also indent each line
+ val := p.output(item)
+ curLen := len(val)
+ buf.Write(p.indent(val))
+
+ // if this item is a heredoc, then we output the comma on
+ // the next line. This is the only case this happens.
+ comma := []byte{','}
+ if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC {
+ buf.WriteByte(newline)
+ comma = p.indent(comma)
+ }
+
+ buf.Write(comma)
+
+ if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil {
+ // if the next item doesn't have any comments, do not align
+ buf.WriteByte(blank) // align one space
+ for i := 0; i < longestLine-curLen; i++ {
+ buf.WriteByte(blank)
+ }
+
+ for _, comment := range lit.LineComment.List {
+ buf.WriteString(comment.Text)
+ }
+ }
+
+ buf.WriteByte(newline)
+
+ // Ensure an empty line after every element with a
+ // lead comment (except the first item in a list).
+ haveEmptyLine = leadComment && i != len(l.List)-1
+ if haveEmptyLine {
+ buf.WriteByte(newline)
+ }
+ }
+
+ buf.WriteString("]")
+ return buf.Bytes()
+}
+
+// isSingleLineList returns true if:
+// * they were previously formatted entirely on one line
+// * they consist entirely of literals
+// * there are either no heredoc strings or the list has exactly one element
+// * there are no line comments
+func (printer) isSingleLineList(l *ast.ListType) bool {
+ for _, item := range l.List {
+ if item.Pos().Line != l.Lbrack.Line {
+ return false
+ }
+
+ lit, ok := item.(*ast.LiteralType)
+ if !ok {
+ return false
+ }
+
+ if lit.Token.Type == token.HEREDOC && len(l.List) != 1 {
+ return false
+ }
+
+ if lit.LineComment != nil {
+ return false
+ }
+ }
+
+ return true
+}
+
+// singleLineList prints a simple single line list.
+// For a definition of "simple", see isSingleLineList above.
+func (p *printer) singleLineList(l *ast.ListType) []byte {
+ buf := &bytes.Buffer{}
+
+ buf.WriteString("[")
+ for i, item := range l.List {
+ if i != 0 {
+ buf.WriteString(", ")
+ }
+
+ // Output the item itself
+ buf.Write(p.output(item))
+
+ // The heredoc marker needs to be at the end of line.
+ if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC {
+ buf.WriteByte(newline)
+ }
+ }
+
+ buf.WriteString("]")
+ return buf.Bytes()
+}
+
+// indent indents the lines of the given buffer for each non-empty line
+func (p *printer) indent(buf []byte) []byte {
+ var prefix []byte
+ if p.cfg.SpacesWidth != 0 {
+ for i := 0; i < p.cfg.SpacesWidth; i++ {
+ prefix = append(prefix, blank)
+ }
+ } else {
+ prefix = []byte{tab}
+ }
+
+ var res []byte
+ bol := true
+ for _, c := range buf {
+ if bol && c != '\n' {
+ res = append(res, prefix...)
+ }
+
+ res = append(res, c)
+ bol = c == '\n'
+ }
+ return res
+}
+
+// unindent removes all the indentation from the tombstoned lines
+func (p *printer) unindent(buf []byte) []byte {
+ var res []byte
+ for i := 0; i < len(buf); i++ {
+ skip := len(buf)-i <= len(unindent)
+ if !skip {
+ skip = !bytes.Equal(unindent, buf[i:i+len(unindent)])
+ }
+ if skip {
+ res = append(res, buf[i])
+ continue
+ }
+
+ // We have a marker. we have to backtrace here and clean out
+ // any whitespace ahead of our tombstone up to a \n
+ for j := len(res) - 1; j >= 0; j-- {
+ if res[j] == '\n' {
+ break
+ }
+
+ res = res[:j]
+ }
+
+ // Skip the entire unindent marker
+ i += len(unindent) - 1
+ }
+
+ return res
+}
+
+// heredocIndent marks all the 2nd and further lines as unindentable
+func (p *printer) heredocIndent(buf []byte) []byte {
+ var res []byte
+ bol := false
+ for _, c := range buf {
+ if bol && c != '\n' {
+ res = append(res, unindent...)
+ }
+ res = append(res, c)
+ bol = c == '\n'
+ }
+ return res
+}
+
+// isSingleLineObject tells whether the given object item is a single
+// line object such as "obj {}".
+//
+// A single line object:
+//
+// * has no lead comments (hence multi-line)
+// * has no assignment
+// * has no values in the stanza (within {})
+//
+func (p *printer) isSingleLineObject(val *ast.ObjectItem) bool {
+ // If there is a lead comment, can't be one line
+ if val.LeadComment != nil {
+ return false
+ }
+
+ // If there is assignment, we always break by line
+ if val.Assign.IsValid() {
+ return false
+ }
+
+ // If it isn't an object type, then its not a single line object
+ ot, ok := val.Val.(*ast.ObjectType)
+ if !ok {
+ return false
+ }
+
+ // If the object has no items, it is single line!
+ return len(ot.List.Items) == 0
+}
+
+func lines(txt string) int {
+ endline := 1
+ for i := 0; i < len(txt); i++ {
+ if txt[i] == '\n' {
+ endline++
+ }
+ }
+ return endline
+}
+
+// ----------------------------------------------------------------------------
+// Tracing support
+
+func (p *printer) printTrace(a ...interface{}) {
+ if !p.enableTrace {
+ return
+ }
+
+ const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
+ const n = len(dots)
+ i := 2 * p.indentTrace
+ for i > n {
+ fmt.Print(dots)
+ i -= n
+ }
+ // i <= n
+ fmt.Print(dots[0:i])
+ fmt.Println(a...)
+}
+
+func trace(p *printer, msg string) *printer {
+ p.printTrace(msg, "(")
+ p.indentTrace++
+ return p
+}
+
+// Usage pattern: defer un(trace(p, "..."))
+func un(p *printer) {
+ p.indentTrace--
+ p.printTrace(")")
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go b/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go
new file mode 100644
index 0000000..6617ab8
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go
@@ -0,0 +1,66 @@
+// Package printer implements printing of AST nodes to HCL format.
+package printer
+
+import (
+ "bytes"
+ "io"
+ "text/tabwriter"
+
+ "github.com/hashicorp/hcl/hcl/ast"
+ "github.com/hashicorp/hcl/hcl/parser"
+)
+
+var DefaultConfig = Config{
+ SpacesWidth: 2,
+}
+
+// A Config node controls the output of Fprint.
+type Config struct {
+ SpacesWidth int // if set, it will use spaces instead of tabs for alignment
+}
+
+func (c *Config) Fprint(output io.Writer, node ast.Node) error {
+ p := &printer{
+ cfg: *c,
+ comments: make([]*ast.CommentGroup, 0),
+ standaloneComments: make([]*ast.CommentGroup, 0),
+ // enableTrace: true,
+ }
+
+ p.collectComments(node)
+
+ if _, err := output.Write(p.unindent(p.output(node))); err != nil {
+ return err
+ }
+
+ // flush tabwriter, if any
+ var err error
+ if tw, _ := output.(*tabwriter.Writer); tw != nil {
+ err = tw.Flush()
+ }
+
+ return err
+}
+
+// Fprint "pretty-prints" an HCL node to output
+// It calls Config.Fprint with default settings.
+func Fprint(output io.Writer, node ast.Node) error {
+ return DefaultConfig.Fprint(output, node)
+}
+
+// Format formats src HCL and returns the result.
+func Format(src []byte) ([]byte, error) {
+ node, err := parser.Parse(src)
+ if err != nil {
+ return nil, err
+ }
+
+ var buf bytes.Buffer
+ if err := DefaultConfig.Fprint(&buf, node); err != nil {
+ return nil, err
+ }
+
+ // Add trailing newline to result
+ buf.WriteString("\n")
+ return buf.Bytes(), nil
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go
new file mode 100644
index 0000000..624a18f
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go
@@ -0,0 +1,652 @@
+// Package scanner implements a scanner for HCL (HashiCorp Configuration
+// Language) source text.
+package scanner
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "regexp"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/hashicorp/hcl/hcl/token"
+)
+
+// eof represents a marker rune for the end of the reader.
+const eof = rune(0)
+
+// Scanner defines a lexical scanner
+type Scanner struct {
+ buf *bytes.Buffer // Source buffer for advancing and scanning
+ src []byte // Source buffer for immutable access
+
+ // Source Position
+ srcPos token.Pos // current position
+ prevPos token.Pos // previous position, used for peek() method
+
+ lastCharLen int // length of last character in bytes
+ lastLineLen int // length of last line in characters (for correct column reporting)
+
+ tokStart int // token text start position
+ tokEnd int // token text end position
+
+ // Error is called for each error encountered. If no Error
+ // function is set, the error is reported to os.Stderr.
+ Error func(pos token.Pos, msg string)
+
+ // ErrorCount is incremented by one for each error encountered.
+ ErrorCount int
+
+ // tokPos is the start position of most recently scanned token; set by
+ // Scan. The Filename field is always left untouched by the Scanner. If
+ // an error is reported (via Error) and Position is invalid, the scanner is
+ // not inside a token.
+ tokPos token.Pos
+}
+
+// New creates and initializes a new instance of Scanner using src as
+// its source content.
+func New(src []byte) *Scanner {
+ // even though we accept a src, we read from a io.Reader compatible type
+ // (*bytes.Buffer). So in the future we might easily change it to streaming
+ // read.
+ b := bytes.NewBuffer(src)
+ s := &Scanner{
+ buf: b,
+ src: src,
+ }
+
+ // srcPosition always starts with 1
+ s.srcPos.Line = 1
+ return s
+}
+
+// next reads the next rune from the bufferred reader. Returns the rune(0) if
+// an error occurs (or io.EOF is returned).
+func (s *Scanner) next() rune {
+ ch, size, err := s.buf.ReadRune()
+ if err != nil {
+ // advance for error reporting
+ s.srcPos.Column++
+ s.srcPos.Offset += size
+ s.lastCharLen = size
+ return eof
+ }
+
+ // remember last position
+ s.prevPos = s.srcPos
+
+ s.srcPos.Column++
+ s.lastCharLen = size
+ s.srcPos.Offset += size
+
+ if ch == utf8.RuneError && size == 1 {
+ s.err("illegal UTF-8 encoding")
+ return ch
+ }
+
+ if ch == '\n' {
+ s.srcPos.Line++
+ s.lastLineLen = s.srcPos.Column
+ s.srcPos.Column = 0
+ }
+
+ if ch == '\x00' {
+ s.err("unexpected null character (0x00)")
+ return eof
+ }
+
+ if ch == '\uE123' {
+ s.err("unicode code point U+E123 reserved for internal use")
+ return utf8.RuneError
+ }
+
+ // debug
+ // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column)
+ return ch
+}
+
+// unread unreads the previous read Rune and updates the source position
+func (s *Scanner) unread() {
+ if err := s.buf.UnreadRune(); err != nil {
+ panic(err) // this is user fault, we should catch it
+ }
+ s.srcPos = s.prevPos // put back last position
+}
+
+// peek returns the next rune without advancing the reader.
+func (s *Scanner) peek() rune {
+ peek, _, err := s.buf.ReadRune()
+ if err != nil {
+ return eof
+ }
+
+ s.buf.UnreadRune()
+ return peek
+}
+
+// Scan scans the next token and returns the token.
+func (s *Scanner) Scan() token.Token {
+ ch := s.next()
+
+ // skip white space
+ for isWhitespace(ch) {
+ ch = s.next()
+ }
+
+ var tok token.Type
+
+ // token text markings
+ s.tokStart = s.srcPos.Offset - s.lastCharLen
+
+ // token position, initial next() is moving the offset by one(size of rune
+ // actually), though we are interested with the starting point
+ s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen
+ if s.srcPos.Column > 0 {
+ // common case: last character was not a '\n'
+ s.tokPos.Line = s.srcPos.Line
+ s.tokPos.Column = s.srcPos.Column
+ } else {
+ // last character was a '\n'
+ // (we cannot be at the beginning of the source
+ // since we have called next() at least once)
+ s.tokPos.Line = s.srcPos.Line - 1
+ s.tokPos.Column = s.lastLineLen
+ }
+
+ switch {
+ case isLetter(ch):
+ tok = token.IDENT
+ lit := s.scanIdentifier()
+ if lit == "true" || lit == "false" {
+ tok = token.BOOL
+ }
+ case isDecimal(ch):
+ tok = s.scanNumber(ch)
+ default:
+ switch ch {
+ case eof:
+ tok = token.EOF
+ case '"':
+ tok = token.STRING
+ s.scanString()
+ case '#', '/':
+ tok = token.COMMENT
+ s.scanComment(ch)
+ case '.':
+ tok = token.PERIOD
+ ch = s.peek()
+ if isDecimal(ch) {
+ tok = token.FLOAT
+ ch = s.scanMantissa(ch)
+ ch = s.scanExponent(ch)
+ }
+ case '<':
+ tok = token.HEREDOC
+ s.scanHeredoc()
+ case '[':
+ tok = token.LBRACK
+ case ']':
+ tok = token.RBRACK
+ case '{':
+ tok = token.LBRACE
+ case '}':
+ tok = token.RBRACE
+ case ',':
+ tok = token.COMMA
+ case '=':
+ tok = token.ASSIGN
+ case '+':
+ tok = token.ADD
+ case '-':
+ if isDecimal(s.peek()) {
+ ch := s.next()
+ tok = s.scanNumber(ch)
+ } else {
+ tok = token.SUB
+ }
+ default:
+ s.err("illegal char")
+ }
+ }
+
+ // finish token ending
+ s.tokEnd = s.srcPos.Offset
+
+ // create token literal
+ var tokenText string
+ if s.tokStart >= 0 {
+ tokenText = string(s.src[s.tokStart:s.tokEnd])
+ }
+ s.tokStart = s.tokEnd // ensure idempotency of tokenText() call
+
+ return token.Token{
+ Type: tok,
+ Pos: s.tokPos,
+ Text: tokenText,
+ }
+}
+
+func (s *Scanner) scanComment(ch rune) {
+ // single line comments
+ if ch == '#' || (ch == '/' && s.peek() != '*') {
+ if ch == '/' && s.peek() != '/' {
+ s.err("expected '/' for comment")
+ return
+ }
+
+ ch = s.next()
+ for ch != '\n' && ch >= 0 && ch != eof {
+ ch = s.next()
+ }
+ if ch != eof && ch >= 0 {
+ s.unread()
+ }
+ return
+ }
+
+ // be sure we get the character after /* This allows us to find comment's
+ // that are not erminated
+ if ch == '/' {
+ s.next()
+ ch = s.next() // read character after "/*"
+ }
+
+ // look for /* - style comments
+ for {
+ if ch < 0 || ch == eof {
+ s.err("comment not terminated")
+ break
+ }
+
+ ch0 := ch
+ ch = s.next()
+ if ch0 == '*' && ch == '/' {
+ break
+ }
+ }
+}
+
+// scanNumber scans a HCL number definition starting with the given rune
+func (s *Scanner) scanNumber(ch rune) token.Type {
+ if ch == '0' {
+ // check for hexadecimal, octal or float
+ ch = s.next()
+ if ch == 'x' || ch == 'X' {
+ // hexadecimal
+ ch = s.next()
+ found := false
+ for isHexadecimal(ch) {
+ ch = s.next()
+ found = true
+ }
+
+ if !found {
+ s.err("illegal hexadecimal number")
+ }
+
+ if ch != eof {
+ s.unread()
+ }
+
+ return token.NUMBER
+ }
+
+ // now it's either something like: 0421(octal) or 0.1231(float)
+ illegalOctal := false
+ for isDecimal(ch) {
+ ch = s.next()
+ if ch == '8' || ch == '9' {
+ // this is just a possibility. For example 0159 is illegal, but
+ // 0159.23 is valid. So we mark a possible illegal octal. If
+ // the next character is not a period, we'll print the error.
+ illegalOctal = true
+ }
+ }
+
+ if ch == 'e' || ch == 'E' {
+ ch = s.scanExponent(ch)
+ return token.FLOAT
+ }
+
+ if ch == '.' {
+ ch = s.scanFraction(ch)
+
+ if ch == 'e' || ch == 'E' {
+ ch = s.next()
+ ch = s.scanExponent(ch)
+ }
+ return token.FLOAT
+ }
+
+ if illegalOctal {
+ s.err("illegal octal number")
+ }
+
+ if ch != eof {
+ s.unread()
+ }
+ return token.NUMBER
+ }
+
+ s.scanMantissa(ch)
+ ch = s.next() // seek forward
+ if ch == 'e' || ch == 'E' {
+ ch = s.scanExponent(ch)
+ return token.FLOAT
+ }
+
+ if ch == '.' {
+ ch = s.scanFraction(ch)
+ if ch == 'e' || ch == 'E' {
+ ch = s.next()
+ ch = s.scanExponent(ch)
+ }
+ return token.FLOAT
+ }
+
+ if ch != eof {
+ s.unread()
+ }
+ return token.NUMBER
+}
+
+// scanMantissa scans the mantissa beginning from the rune. It returns the next
+// non decimal rune. It's used to determine wheter it's a fraction or exponent.
+func (s *Scanner) scanMantissa(ch rune) rune {
+ scanned := false
+ for isDecimal(ch) {
+ ch = s.next()
+ scanned = true
+ }
+
+ if scanned && ch != eof {
+ s.unread()
+ }
+ return ch
+}
+
+// scanFraction scans the fraction after the '.' rune
+func (s *Scanner) scanFraction(ch rune) rune {
+ if ch == '.' {
+ ch = s.peek() // we peek just to see if we can move forward
+ ch = s.scanMantissa(ch)
+ }
+ return ch
+}
+
+// scanExponent scans the remaining parts of an exponent after the 'e' or 'E'
+// rune.
+func (s *Scanner) scanExponent(ch rune) rune {
+ if ch == 'e' || ch == 'E' {
+ ch = s.next()
+ if ch == '-' || ch == '+' {
+ ch = s.next()
+ }
+ ch = s.scanMantissa(ch)
+ }
+ return ch
+}
+
+// scanHeredoc scans a heredoc string
+func (s *Scanner) scanHeredoc() {
+ // Scan the second '<' in example: '<= len(identBytes) && identRegexp.Match(s.src[lineStart:s.srcPos.Offset-s.lastCharLen]) {
+ break
+ }
+
+ // Not an anchor match, record the start of a new line
+ lineStart = s.srcPos.Offset
+ }
+
+ if ch == eof {
+ s.err("heredoc not terminated")
+ return
+ }
+ }
+
+ return
+}
+
+// scanString scans a quoted string
+func (s *Scanner) scanString() {
+ braces := 0
+ for {
+ // '"' opening already consumed
+ // read character after quote
+ ch := s.next()
+
+ if (ch == '\n' && braces == 0) || ch < 0 || ch == eof {
+ s.err("literal not terminated")
+ return
+ }
+
+ if ch == '"' && braces == 0 {
+ break
+ }
+
+ // If we're going into a ${} then we can ignore quotes for awhile
+ if braces == 0 && ch == '$' && s.peek() == '{' {
+ braces++
+ s.next()
+ } else if braces > 0 && ch == '{' {
+ braces++
+ }
+ if braces > 0 && ch == '}' {
+ braces--
+ }
+
+ if ch == '\\' {
+ s.scanEscape()
+ }
+ }
+
+ return
+}
+
+// scanEscape scans an escape sequence
+func (s *Scanner) scanEscape() rune {
+ // http://en.cppreference.com/w/cpp/language/escape
+ ch := s.next() // read character after '/'
+ switch ch {
+ case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"':
+ // nothing to do
+ case '0', '1', '2', '3', '4', '5', '6', '7':
+ // octal notation
+ ch = s.scanDigits(ch, 8, 3)
+ case 'x':
+ // hexademical notation
+ ch = s.scanDigits(s.next(), 16, 2)
+ case 'u':
+ // universal character name
+ ch = s.scanDigits(s.next(), 16, 4)
+ case 'U':
+ // universal character name
+ ch = s.scanDigits(s.next(), 16, 8)
+ default:
+ s.err("illegal char escape")
+ }
+ return ch
+}
+
+// scanDigits scans a rune with the given base for n times. For example an
+// octal notation \184 would yield in scanDigits(ch, 8, 3)
+func (s *Scanner) scanDigits(ch rune, base, n int) rune {
+ start := n
+ for n > 0 && digitVal(ch) < base {
+ ch = s.next()
+ if ch == eof {
+ // If we see an EOF, we halt any more scanning of digits
+ // immediately.
+ break
+ }
+
+ n--
+ }
+ if n > 0 {
+ s.err("illegal char escape")
+ }
+
+ if n != start && ch != eof {
+ // we scanned all digits, put the last non digit char back,
+ // only if we read anything at all
+ s.unread()
+ }
+
+ return ch
+}
+
+// scanIdentifier scans an identifier and returns the literal string
+func (s *Scanner) scanIdentifier() string {
+ offs := s.srcPos.Offset - s.lastCharLen
+ ch := s.next()
+ for isLetter(ch) || isDigit(ch) || ch == '-' || ch == '.' {
+ ch = s.next()
+ }
+
+ if ch != eof {
+ s.unread() // we got identifier, put back latest char
+ }
+
+ return string(s.src[offs:s.srcPos.Offset])
+}
+
+// recentPosition returns the position of the character immediately after the
+// character or token returned by the last call to Scan.
+func (s *Scanner) recentPosition() (pos token.Pos) {
+ pos.Offset = s.srcPos.Offset - s.lastCharLen
+ switch {
+ case s.srcPos.Column > 0:
+ // common case: last character was not a '\n'
+ pos.Line = s.srcPos.Line
+ pos.Column = s.srcPos.Column
+ case s.lastLineLen > 0:
+ // last character was a '\n'
+ // (we cannot be at the beginning of the source
+ // since we have called next() at least once)
+ pos.Line = s.srcPos.Line - 1
+ pos.Column = s.lastLineLen
+ default:
+ // at the beginning of the source
+ pos.Line = 1
+ pos.Column = 1
+ }
+ return
+}
+
+// err prints the error of any scanning to s.Error function. If the function is
+// not defined, by default it prints them to os.Stderr
+func (s *Scanner) err(msg string) {
+ s.ErrorCount++
+ pos := s.recentPosition()
+
+ if s.Error != nil {
+ s.Error(pos, msg)
+ return
+ }
+
+ fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg)
+}
+
+// isHexadecimal returns true if the given rune is a letter
+func isLetter(ch rune) bool {
+ return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch)
+}
+
+// isDigit returns true if the given rune is a decimal digit
+func isDigit(ch rune) bool {
+ return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch)
+}
+
+// isDecimal returns true if the given rune is a decimal number
+func isDecimal(ch rune) bool {
+ return '0' <= ch && ch <= '9'
+}
+
+// isHexadecimal returns true if the given rune is an hexadecimal number
+func isHexadecimal(ch rune) bool {
+ return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F'
+}
+
+// isWhitespace returns true if the rune is a space, tab, newline or carriage return
+func isWhitespace(ch rune) bool {
+ return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'
+}
+
+// digitVal returns the integer value of a given octal,decimal or hexadecimal rune
+func digitVal(ch rune) int {
+ switch {
+ case '0' <= ch && ch <= '9':
+ return int(ch - '0')
+ case 'a' <= ch && ch <= 'f':
+ return int(ch - 'a' + 10)
+ case 'A' <= ch && ch <= 'F':
+ return int(ch - 'A' + 10)
+ }
+ return 16 // larger than any legal digit val
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go
new file mode 100644
index 0000000..5f981ea
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go
@@ -0,0 +1,241 @@
+package strconv
+
+import (
+ "errors"
+ "unicode/utf8"
+)
+
+// ErrSyntax indicates that a value does not have the right syntax for the target type.
+var ErrSyntax = errors.New("invalid syntax")
+
+// Unquote interprets s as a single-quoted, double-quoted,
+// or backquoted Go string literal, returning the string value
+// that s quotes. (If s is single-quoted, it would be a Go
+// character literal; Unquote returns the corresponding
+// one-character string.)
+func Unquote(s string) (t string, err error) {
+ n := len(s)
+ if n < 2 {
+ return "", ErrSyntax
+ }
+ quote := s[0]
+ if quote != s[n-1] {
+ return "", ErrSyntax
+ }
+ s = s[1 : n-1]
+
+ if quote != '"' {
+ return "", ErrSyntax
+ }
+ if !contains(s, '$') && !contains(s, '{') && contains(s, '\n') {
+ return "", ErrSyntax
+ }
+
+ // Is it trivial? Avoid allocation.
+ if !contains(s, '\\') && !contains(s, quote) && !contains(s, '$') {
+ switch quote {
+ case '"':
+ return s, nil
+ case '\'':
+ r, size := utf8.DecodeRuneInString(s)
+ if size == len(s) && (r != utf8.RuneError || size != 1) {
+ return s, nil
+ }
+ }
+ }
+
+ var runeTmp [utf8.UTFMax]byte
+ buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations.
+ for len(s) > 0 {
+ // If we're starting a '${}' then let it through un-unquoted.
+ // Specifically: we don't unquote any characters within the `${}`
+ // section.
+ if s[0] == '$' && len(s) > 1 && s[1] == '{' {
+ buf = append(buf, '$', '{')
+ s = s[2:]
+
+ // Continue reading until we find the closing brace, copying as-is
+ braces := 1
+ for len(s) > 0 && braces > 0 {
+ r, size := utf8.DecodeRuneInString(s)
+ if r == utf8.RuneError {
+ return "", ErrSyntax
+ }
+
+ s = s[size:]
+
+ n := utf8.EncodeRune(runeTmp[:], r)
+ buf = append(buf, runeTmp[:n]...)
+
+ switch r {
+ case '{':
+ braces++
+ case '}':
+ braces--
+ }
+ }
+ if braces != 0 {
+ return "", ErrSyntax
+ }
+ if len(s) == 0 {
+ // If there's no string left, we're done!
+ break
+ } else {
+ // If there's more left, we need to pop back up to the top of the loop
+ // in case there's another interpolation in this string.
+ continue
+ }
+ }
+
+ if s[0] == '\n' {
+ return "", ErrSyntax
+ }
+
+ c, multibyte, ss, err := unquoteChar(s, quote)
+ if err != nil {
+ return "", err
+ }
+ s = ss
+ if c < utf8.RuneSelf || !multibyte {
+ buf = append(buf, byte(c))
+ } else {
+ n := utf8.EncodeRune(runeTmp[:], c)
+ buf = append(buf, runeTmp[:n]...)
+ }
+ if quote == '\'' && len(s) != 0 {
+ // single-quoted must be single character
+ return "", ErrSyntax
+ }
+ }
+ return string(buf), nil
+}
+
+// contains reports whether the string contains the byte c.
+func contains(s string, c byte) bool {
+ for i := 0; i < len(s); i++ {
+ if s[i] == c {
+ return true
+ }
+ }
+ return false
+}
+
+func unhex(b byte) (v rune, ok bool) {
+ c := rune(b)
+ switch {
+ case '0' <= c && c <= '9':
+ return c - '0', true
+ case 'a' <= c && c <= 'f':
+ return c - 'a' + 10, true
+ case 'A' <= c && c <= 'F':
+ return c - 'A' + 10, true
+ }
+ return
+}
+
+func unquoteChar(s string, quote byte) (value rune, multibyte bool, tail string, err error) {
+ // easy cases
+ switch c := s[0]; {
+ case c == quote && (quote == '\'' || quote == '"'):
+ err = ErrSyntax
+ return
+ case c >= utf8.RuneSelf:
+ r, size := utf8.DecodeRuneInString(s)
+ return r, true, s[size:], nil
+ case c != '\\':
+ return rune(s[0]), false, s[1:], nil
+ }
+
+ // hard case: c is backslash
+ if len(s) <= 1 {
+ err = ErrSyntax
+ return
+ }
+ c := s[1]
+ s = s[2:]
+
+ switch c {
+ case 'a':
+ value = '\a'
+ case 'b':
+ value = '\b'
+ case 'f':
+ value = '\f'
+ case 'n':
+ value = '\n'
+ case 'r':
+ value = '\r'
+ case 't':
+ value = '\t'
+ case 'v':
+ value = '\v'
+ case 'x', 'u', 'U':
+ n := 0
+ switch c {
+ case 'x':
+ n = 2
+ case 'u':
+ n = 4
+ case 'U':
+ n = 8
+ }
+ var v rune
+ if len(s) < n {
+ err = ErrSyntax
+ return
+ }
+ for j := 0; j < n; j++ {
+ x, ok := unhex(s[j])
+ if !ok {
+ err = ErrSyntax
+ return
+ }
+ v = v<<4 | x
+ }
+ s = s[n:]
+ if c == 'x' {
+ // single-byte string, possibly not UTF-8
+ value = v
+ break
+ }
+ if v > utf8.MaxRune {
+ err = ErrSyntax
+ return
+ }
+ value = v
+ multibyte = true
+ case '0', '1', '2', '3', '4', '5', '6', '7':
+ v := rune(c) - '0'
+ if len(s) < 2 {
+ err = ErrSyntax
+ return
+ }
+ for j := 0; j < 2; j++ { // one digit already; two more
+ x := rune(s[j]) - '0'
+ if x < 0 || x > 7 {
+ err = ErrSyntax
+ return
+ }
+ v = (v << 3) | x
+ }
+ s = s[2:]
+ if v > 255 {
+ err = ErrSyntax
+ return
+ }
+ value = v
+ case '\\':
+ value = '\\'
+ case '\'', '"':
+ if c != quote {
+ err = ErrSyntax
+ return
+ }
+ value = rune(c)
+ default:
+ err = ErrSyntax
+ return
+ }
+ tail = s
+ return
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/position.go b/vendor/github.com/hashicorp/hcl/hcl/token/position.go
new file mode 100644
index 0000000..59c1bb7
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/token/position.go
@@ -0,0 +1,46 @@
+package token
+
+import "fmt"
+
+// Pos describes an arbitrary source position
+// including the file, line, and column location.
+// A Position is valid if the line number is > 0.
+type Pos struct {
+ Filename string // filename, if any
+ Offset int // offset, starting at 0
+ Line int // line number, starting at 1
+ Column int // column number, starting at 1 (character count)
+}
+
+// IsValid returns true if the position is valid.
+func (p *Pos) IsValid() bool { return p.Line > 0 }
+
+// String returns a string in one of several forms:
+//
+// file:line:column valid position with file name
+// line:column valid position without file name
+// file invalid position with file name
+// - invalid position without file name
+func (p Pos) String() string {
+ s := p.Filename
+ if p.IsValid() {
+ if s != "" {
+ s += ":"
+ }
+ s += fmt.Sprintf("%d:%d", p.Line, p.Column)
+ }
+ if s == "" {
+ s = "-"
+ }
+ return s
+}
+
+// Before reports whether the position p is before u.
+func (p Pos) Before(u Pos) bool {
+ return u.Offset > p.Offset || u.Line > p.Line
+}
+
+// After reports whether the position p is after u.
+func (p Pos) After(u Pos) bool {
+ return u.Offset < p.Offset || u.Line < p.Line
+}
diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/token.go b/vendor/github.com/hashicorp/hcl/hcl/token/token.go
new file mode 100644
index 0000000..e37c066
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/hcl/token/token.go
@@ -0,0 +1,219 @@
+// Package token defines constants representing the lexical tokens for HCL
+// (HashiCorp Configuration Language)
+package token
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ hclstrconv "github.com/hashicorp/hcl/hcl/strconv"
+)
+
+// Token defines a single HCL token which can be obtained via the Scanner
+type Token struct {
+ Type Type
+ Pos Pos
+ Text string
+ JSON bool
+}
+
+// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language)
+type Type int
+
+const (
+ // Special tokens
+ ILLEGAL Type = iota
+ EOF
+ COMMENT
+
+ identifier_beg
+ IDENT // literals
+ literal_beg
+ NUMBER // 12345
+ FLOAT // 123.45
+ BOOL // true,false
+ STRING // "abc"
+ HEREDOC // < 0 {
+ // Pop the current item
+ n := len(frontier)
+ item := frontier[n-1]
+ frontier = frontier[:n-1]
+
+ switch v := item.Val.(type) {
+ case *ast.ObjectType:
+ items, frontier = flattenObjectType(v, item, items, frontier)
+ case *ast.ListType:
+ items, frontier = flattenListType(v, item, items, frontier)
+ default:
+ items = append(items, item)
+ }
+ }
+
+ // Reverse the list since the frontier model runs things backwards
+ for i := len(items)/2 - 1; i >= 0; i-- {
+ opp := len(items) - 1 - i
+ items[i], items[opp] = items[opp], items[i]
+ }
+
+ // Done! Set the original items
+ list.Items = items
+ return n, true
+ })
+}
+
+func flattenListType(
+ ot *ast.ListType,
+ item *ast.ObjectItem,
+ items []*ast.ObjectItem,
+ frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) {
+ // If the list is empty, keep the original list
+ if len(ot.List) == 0 {
+ items = append(items, item)
+ return items, frontier
+ }
+
+ // All the elements of this object must also be objects!
+ for _, subitem := range ot.List {
+ if _, ok := subitem.(*ast.ObjectType); !ok {
+ items = append(items, item)
+ return items, frontier
+ }
+ }
+
+ // Great! We have a match go through all the items and flatten
+ for _, elem := range ot.List {
+ // Add it to the frontier so that we can recurse
+ frontier = append(frontier, &ast.ObjectItem{
+ Keys: item.Keys,
+ Assign: item.Assign,
+ Val: elem,
+ LeadComment: item.LeadComment,
+ LineComment: item.LineComment,
+ })
+ }
+
+ return items, frontier
+}
+
+func flattenObjectType(
+ ot *ast.ObjectType,
+ item *ast.ObjectItem,
+ items []*ast.ObjectItem,
+ frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) {
+ // If the list has no items we do not have to flatten anything
+ if ot.List.Items == nil {
+ items = append(items, item)
+ return items, frontier
+ }
+
+ // All the elements of this object must also be objects!
+ for _, subitem := range ot.List.Items {
+ if _, ok := subitem.Val.(*ast.ObjectType); !ok {
+ items = append(items, item)
+ return items, frontier
+ }
+ }
+
+ // Great! We have a match go through all the items and flatten
+ for _, subitem := range ot.List.Items {
+ // Copy the new key
+ keys := make([]*ast.ObjectKey, len(item.Keys)+len(subitem.Keys))
+ copy(keys, item.Keys)
+ copy(keys[len(item.Keys):], subitem.Keys)
+
+ // Add it to the frontier so that we can recurse
+ frontier = append(frontier, &ast.ObjectItem{
+ Keys: keys,
+ Assign: item.Assign,
+ Val: subitem.Val,
+ LeadComment: item.LeadComment,
+ LineComment: item.LineComment,
+ })
+ }
+
+ return items, frontier
+}
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/parser.go b/vendor/github.com/hashicorp/hcl/json/parser/parser.go
new file mode 100644
index 0000000..125a5f0
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/json/parser/parser.go
@@ -0,0 +1,313 @@
+package parser
+
+import (
+ "errors"
+ "fmt"
+
+ "github.com/hashicorp/hcl/hcl/ast"
+ hcltoken "github.com/hashicorp/hcl/hcl/token"
+ "github.com/hashicorp/hcl/json/scanner"
+ "github.com/hashicorp/hcl/json/token"
+)
+
+type Parser struct {
+ sc *scanner.Scanner
+
+ // Last read token
+ tok token.Token
+ commaPrev token.Token
+
+ enableTrace bool
+ indent int
+ n int // buffer size (max = 1)
+}
+
+func newParser(src []byte) *Parser {
+ return &Parser{
+ sc: scanner.New(src),
+ }
+}
+
+// Parse returns the fully parsed source and returns the abstract syntax tree.
+func Parse(src []byte) (*ast.File, error) {
+ p := newParser(src)
+ return p.Parse()
+}
+
+var errEofToken = errors.New("EOF token found")
+
+// Parse returns the fully parsed source and returns the abstract syntax tree.
+func (p *Parser) Parse() (*ast.File, error) {
+ f := &ast.File{}
+ var err, scerr error
+ p.sc.Error = func(pos token.Pos, msg string) {
+ scerr = fmt.Errorf("%s: %s", pos, msg)
+ }
+
+ // The root must be an object in JSON
+ object, err := p.object()
+ if scerr != nil {
+ return nil, scerr
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ // We make our final node an object list so it is more HCL compatible
+ f.Node = object.List
+
+ // Flatten it, which finds patterns and turns them into more HCL-like
+ // AST trees.
+ flattenObjects(f.Node)
+
+ return f, nil
+}
+
+func (p *Parser) objectList() (*ast.ObjectList, error) {
+ defer un(trace(p, "ParseObjectList"))
+ node := &ast.ObjectList{}
+
+ for {
+ n, err := p.objectItem()
+ if err == errEofToken {
+ break // we are finished
+ }
+
+ // we don't return a nil node, because might want to use already
+ // collected items.
+ if err != nil {
+ return node, err
+ }
+
+ node.Add(n)
+
+ // Check for a followup comma. If it isn't a comma, then we're done
+ if tok := p.scan(); tok.Type != token.COMMA {
+ break
+ }
+ }
+
+ return node, nil
+}
+
+// objectItem parses a single object item
+func (p *Parser) objectItem() (*ast.ObjectItem, error) {
+ defer un(trace(p, "ParseObjectItem"))
+
+ keys, err := p.objectKey()
+ if err != nil {
+ return nil, err
+ }
+
+ o := &ast.ObjectItem{
+ Keys: keys,
+ }
+
+ switch p.tok.Type {
+ case token.COLON:
+ pos := p.tok.Pos
+ o.Assign = hcltoken.Pos{
+ Filename: pos.Filename,
+ Offset: pos.Offset,
+ Line: pos.Line,
+ Column: pos.Column,
+ }
+
+ o.Val, err = p.objectValue()
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return o, nil
+}
+
+// objectKey parses an object key and returns a ObjectKey AST
+func (p *Parser) objectKey() ([]*ast.ObjectKey, error) {
+ keyCount := 0
+ keys := make([]*ast.ObjectKey, 0)
+
+ for {
+ tok := p.scan()
+ switch tok.Type {
+ case token.EOF:
+ return nil, errEofToken
+ case token.STRING:
+ keyCount++
+ keys = append(keys, &ast.ObjectKey{
+ Token: p.tok.HCLToken(),
+ })
+ case token.COLON:
+ // If we have a zero keycount it means that we never got
+ // an object key, i.e. `{ :`. This is a syntax error.
+ if keyCount == 0 {
+ return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type)
+ }
+
+ // Done
+ return keys, nil
+ case token.ILLEGAL:
+ return nil, errors.New("illegal")
+ default:
+ return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type)
+ }
+ }
+}
+
+// object parses any type of object, such as number, bool, string, object or
+// list.
+func (p *Parser) objectValue() (ast.Node, error) {
+ defer un(trace(p, "ParseObjectValue"))
+ tok := p.scan()
+
+ switch tok.Type {
+ case token.NUMBER, token.FLOAT, token.BOOL, token.NULL, token.STRING:
+ return p.literalType()
+ case token.LBRACE:
+ return p.objectType()
+ case token.LBRACK:
+ return p.listType()
+ case token.EOF:
+ return nil, errEofToken
+ }
+
+ return nil, fmt.Errorf("Expected object value, got unknown token: %+v", tok)
+}
+
+// object parses any type of object, such as number, bool, string, object or
+// list.
+func (p *Parser) object() (*ast.ObjectType, error) {
+ defer un(trace(p, "ParseType"))
+ tok := p.scan()
+
+ switch tok.Type {
+ case token.LBRACE:
+ return p.objectType()
+ case token.EOF:
+ return nil, errEofToken
+ }
+
+ return nil, fmt.Errorf("Expected object, got unknown token: %+v", tok)
+}
+
+// objectType parses an object type and returns a ObjectType AST
+func (p *Parser) objectType() (*ast.ObjectType, error) {
+ defer un(trace(p, "ParseObjectType"))
+
+ // we assume that the currently scanned token is a LBRACE
+ o := &ast.ObjectType{}
+
+ l, err := p.objectList()
+
+ // if we hit RBRACE, we are good to go (means we parsed all Items), if it's
+ // not a RBRACE, it's an syntax error and we just return it.
+ if err != nil && p.tok.Type != token.RBRACE {
+ return nil, err
+ }
+
+ o.List = l
+ return o, nil
+}
+
+// listType parses a list type and returns a ListType AST
+func (p *Parser) listType() (*ast.ListType, error) {
+ defer un(trace(p, "ParseListType"))
+
+ // we assume that the currently scanned token is a LBRACK
+ l := &ast.ListType{}
+
+ for {
+ tok := p.scan()
+ switch tok.Type {
+ case token.NUMBER, token.FLOAT, token.STRING:
+ node, err := p.literalType()
+ if err != nil {
+ return nil, err
+ }
+
+ l.Add(node)
+ case token.COMMA:
+ continue
+ case token.LBRACE:
+ node, err := p.objectType()
+ if err != nil {
+ return nil, err
+ }
+
+ l.Add(node)
+ case token.BOOL:
+ // TODO(arslan) should we support? not supported by HCL yet
+ case token.LBRACK:
+ // TODO(arslan) should we support nested lists? Even though it's
+ // written in README of HCL, it's not a part of the grammar
+ // (not defined in parse.y)
+ case token.RBRACK:
+ // finished
+ return l, nil
+ default:
+ return nil, fmt.Errorf("unexpected token while parsing list: %s", tok.Type)
+ }
+
+ }
+}
+
+// literalType parses a literal type and returns a LiteralType AST
+func (p *Parser) literalType() (*ast.LiteralType, error) {
+ defer un(trace(p, "ParseLiteral"))
+
+ return &ast.LiteralType{
+ Token: p.tok.HCLToken(),
+ }, nil
+}
+
+// scan returns the next token from the underlying scanner. If a token has
+// been unscanned then read that instead.
+func (p *Parser) scan() token.Token {
+ // If we have a token on the buffer, then return it.
+ if p.n != 0 {
+ p.n = 0
+ return p.tok
+ }
+
+ p.tok = p.sc.Scan()
+ return p.tok
+}
+
+// unscan pushes the previously read token back onto the buffer.
+func (p *Parser) unscan() {
+ p.n = 1
+}
+
+// ----------------------------------------------------------------------------
+// Parsing support
+
+func (p *Parser) printTrace(a ...interface{}) {
+ if !p.enableTrace {
+ return
+ }
+
+ const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
+ const n = len(dots)
+ fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column)
+
+ i := 2 * p.indent
+ for i > n {
+ fmt.Print(dots)
+ i -= n
+ }
+ // i <= n
+ fmt.Print(dots[0:i])
+ fmt.Println(a...)
+}
+
+func trace(p *Parser, msg string) *Parser {
+ p.printTrace(msg, "(")
+ p.indent++
+ return p
+}
+
+// Usage pattern: defer un(trace(p, "..."))
+func un(p *Parser) {
+ p.indent--
+ p.printTrace(")")
+}
diff --git a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go
new file mode 100644
index 0000000..fe3f0f0
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go
@@ -0,0 +1,451 @@
+package scanner
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/hashicorp/hcl/json/token"
+)
+
+// eof represents a marker rune for the end of the reader.
+const eof = rune(0)
+
+// Scanner defines a lexical scanner
+type Scanner struct {
+ buf *bytes.Buffer // Source buffer for advancing and scanning
+ src []byte // Source buffer for immutable access
+
+ // Source Position
+ srcPos token.Pos // current position
+ prevPos token.Pos // previous position, used for peek() method
+
+ lastCharLen int // length of last character in bytes
+ lastLineLen int // length of last line in characters (for correct column reporting)
+
+ tokStart int // token text start position
+ tokEnd int // token text end position
+
+ // Error is called for each error encountered. If no Error
+ // function is set, the error is reported to os.Stderr.
+ Error func(pos token.Pos, msg string)
+
+ // ErrorCount is incremented by one for each error encountered.
+ ErrorCount int
+
+ // tokPos is the start position of most recently scanned token; set by
+ // Scan. The Filename field is always left untouched by the Scanner. If
+ // an error is reported (via Error) and Position is invalid, the scanner is
+ // not inside a token.
+ tokPos token.Pos
+}
+
+// New creates and initializes a new instance of Scanner using src as
+// its source content.
+func New(src []byte) *Scanner {
+ // even though we accept a src, we read from a io.Reader compatible type
+ // (*bytes.Buffer). So in the future we might easily change it to streaming
+ // read.
+ b := bytes.NewBuffer(src)
+ s := &Scanner{
+ buf: b,
+ src: src,
+ }
+
+ // srcPosition always starts with 1
+ s.srcPos.Line = 1
+ return s
+}
+
+// next reads the next rune from the bufferred reader. Returns the rune(0) if
+// an error occurs (or io.EOF is returned).
+func (s *Scanner) next() rune {
+ ch, size, err := s.buf.ReadRune()
+ if err != nil {
+ // advance for error reporting
+ s.srcPos.Column++
+ s.srcPos.Offset += size
+ s.lastCharLen = size
+ return eof
+ }
+
+ if ch == utf8.RuneError && size == 1 {
+ s.srcPos.Column++
+ s.srcPos.Offset += size
+ s.lastCharLen = size
+ s.err("illegal UTF-8 encoding")
+ return ch
+ }
+
+ // remember last position
+ s.prevPos = s.srcPos
+
+ s.srcPos.Column++
+ s.lastCharLen = size
+ s.srcPos.Offset += size
+
+ if ch == '\n' {
+ s.srcPos.Line++
+ s.lastLineLen = s.srcPos.Column
+ s.srcPos.Column = 0
+ }
+
+ // debug
+ // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column)
+ return ch
+}
+
+// unread unreads the previous read Rune and updates the source position
+func (s *Scanner) unread() {
+ if err := s.buf.UnreadRune(); err != nil {
+ panic(err) // this is user fault, we should catch it
+ }
+ s.srcPos = s.prevPos // put back last position
+}
+
+// peek returns the next rune without advancing the reader.
+func (s *Scanner) peek() rune {
+ peek, _, err := s.buf.ReadRune()
+ if err != nil {
+ return eof
+ }
+
+ s.buf.UnreadRune()
+ return peek
+}
+
+// Scan scans the next token and returns the token.
+func (s *Scanner) Scan() token.Token {
+ ch := s.next()
+
+ // skip white space
+ for isWhitespace(ch) {
+ ch = s.next()
+ }
+
+ var tok token.Type
+
+ // token text markings
+ s.tokStart = s.srcPos.Offset - s.lastCharLen
+
+ // token position, initial next() is moving the offset by one(size of rune
+ // actually), though we are interested with the starting point
+ s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen
+ if s.srcPos.Column > 0 {
+ // common case: last character was not a '\n'
+ s.tokPos.Line = s.srcPos.Line
+ s.tokPos.Column = s.srcPos.Column
+ } else {
+ // last character was a '\n'
+ // (we cannot be at the beginning of the source
+ // since we have called next() at least once)
+ s.tokPos.Line = s.srcPos.Line - 1
+ s.tokPos.Column = s.lastLineLen
+ }
+
+ switch {
+ case isLetter(ch):
+ lit := s.scanIdentifier()
+ if lit == "true" || lit == "false" {
+ tok = token.BOOL
+ } else if lit == "null" {
+ tok = token.NULL
+ } else {
+ s.err("illegal char")
+ }
+ case isDecimal(ch):
+ tok = s.scanNumber(ch)
+ default:
+ switch ch {
+ case eof:
+ tok = token.EOF
+ case '"':
+ tok = token.STRING
+ s.scanString()
+ case '.':
+ tok = token.PERIOD
+ ch = s.peek()
+ if isDecimal(ch) {
+ tok = token.FLOAT
+ ch = s.scanMantissa(ch)
+ ch = s.scanExponent(ch)
+ }
+ case '[':
+ tok = token.LBRACK
+ case ']':
+ tok = token.RBRACK
+ case '{':
+ tok = token.LBRACE
+ case '}':
+ tok = token.RBRACE
+ case ',':
+ tok = token.COMMA
+ case ':':
+ tok = token.COLON
+ case '-':
+ if isDecimal(s.peek()) {
+ ch := s.next()
+ tok = s.scanNumber(ch)
+ } else {
+ s.err("illegal char")
+ }
+ default:
+ s.err("illegal char: " + string(ch))
+ }
+ }
+
+ // finish token ending
+ s.tokEnd = s.srcPos.Offset
+
+ // create token literal
+ var tokenText string
+ if s.tokStart >= 0 {
+ tokenText = string(s.src[s.tokStart:s.tokEnd])
+ }
+ s.tokStart = s.tokEnd // ensure idempotency of tokenText() call
+
+ return token.Token{
+ Type: tok,
+ Pos: s.tokPos,
+ Text: tokenText,
+ }
+}
+
+// scanNumber scans a HCL number definition starting with the given rune
+func (s *Scanner) scanNumber(ch rune) token.Type {
+ zero := ch == '0'
+ pos := s.srcPos
+
+ s.scanMantissa(ch)
+ ch = s.next() // seek forward
+ if ch == 'e' || ch == 'E' {
+ ch = s.scanExponent(ch)
+ return token.FLOAT
+ }
+
+ if ch == '.' {
+ ch = s.scanFraction(ch)
+ if ch == 'e' || ch == 'E' {
+ ch = s.next()
+ ch = s.scanExponent(ch)
+ }
+ return token.FLOAT
+ }
+
+ if ch != eof {
+ s.unread()
+ }
+
+ // If we have a larger number and this is zero, error
+ if zero && pos != s.srcPos {
+ s.err("numbers cannot start with 0")
+ }
+
+ return token.NUMBER
+}
+
+// scanMantissa scans the mantissa beginning from the rune. It returns the next
+// non decimal rune. It's used to determine wheter it's a fraction or exponent.
+func (s *Scanner) scanMantissa(ch rune) rune {
+ scanned := false
+ for isDecimal(ch) {
+ ch = s.next()
+ scanned = true
+ }
+
+ if scanned && ch != eof {
+ s.unread()
+ }
+ return ch
+}
+
+// scanFraction scans the fraction after the '.' rune
+func (s *Scanner) scanFraction(ch rune) rune {
+ if ch == '.' {
+ ch = s.peek() // we peek just to see if we can move forward
+ ch = s.scanMantissa(ch)
+ }
+ return ch
+}
+
+// scanExponent scans the remaining parts of an exponent after the 'e' or 'E'
+// rune.
+func (s *Scanner) scanExponent(ch rune) rune {
+ if ch == 'e' || ch == 'E' {
+ ch = s.next()
+ if ch == '-' || ch == '+' {
+ ch = s.next()
+ }
+ ch = s.scanMantissa(ch)
+ }
+ return ch
+}
+
+// scanString scans a quoted string
+func (s *Scanner) scanString() {
+ braces := 0
+ for {
+ // '"' opening already consumed
+ // read character after quote
+ ch := s.next()
+
+ if ch == '\n' || ch < 0 || ch == eof {
+ s.err("literal not terminated")
+ return
+ }
+
+ if ch == '"' {
+ break
+ }
+
+ // If we're going into a ${} then we can ignore quotes for awhile
+ if braces == 0 && ch == '$' && s.peek() == '{' {
+ braces++
+ s.next()
+ } else if braces > 0 && ch == '{' {
+ braces++
+ }
+ if braces > 0 && ch == '}' {
+ braces--
+ }
+
+ if ch == '\\' {
+ s.scanEscape()
+ }
+ }
+
+ return
+}
+
+// scanEscape scans an escape sequence
+func (s *Scanner) scanEscape() rune {
+ // http://en.cppreference.com/w/cpp/language/escape
+ ch := s.next() // read character after '/'
+ switch ch {
+ case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"':
+ // nothing to do
+ case '0', '1', '2', '3', '4', '5', '6', '7':
+ // octal notation
+ ch = s.scanDigits(ch, 8, 3)
+ case 'x':
+ // hexademical notation
+ ch = s.scanDigits(s.next(), 16, 2)
+ case 'u':
+ // universal character name
+ ch = s.scanDigits(s.next(), 16, 4)
+ case 'U':
+ // universal character name
+ ch = s.scanDigits(s.next(), 16, 8)
+ default:
+ s.err("illegal char escape")
+ }
+ return ch
+}
+
+// scanDigits scans a rune with the given base for n times. For example an
+// octal notation \184 would yield in scanDigits(ch, 8, 3)
+func (s *Scanner) scanDigits(ch rune, base, n int) rune {
+ for n > 0 && digitVal(ch) < base {
+ ch = s.next()
+ n--
+ }
+ if n > 0 {
+ s.err("illegal char escape")
+ }
+
+ // we scanned all digits, put the last non digit char back
+ s.unread()
+ return ch
+}
+
+// scanIdentifier scans an identifier and returns the literal string
+func (s *Scanner) scanIdentifier() string {
+ offs := s.srcPos.Offset - s.lastCharLen
+ ch := s.next()
+ for isLetter(ch) || isDigit(ch) || ch == '-' {
+ ch = s.next()
+ }
+
+ if ch != eof {
+ s.unread() // we got identifier, put back latest char
+ }
+
+ return string(s.src[offs:s.srcPos.Offset])
+}
+
+// recentPosition returns the position of the character immediately after the
+// character or token returned by the last call to Scan.
+func (s *Scanner) recentPosition() (pos token.Pos) {
+ pos.Offset = s.srcPos.Offset - s.lastCharLen
+ switch {
+ case s.srcPos.Column > 0:
+ // common case: last character was not a '\n'
+ pos.Line = s.srcPos.Line
+ pos.Column = s.srcPos.Column
+ case s.lastLineLen > 0:
+ // last character was a '\n'
+ // (we cannot be at the beginning of the source
+ // since we have called next() at least once)
+ pos.Line = s.srcPos.Line - 1
+ pos.Column = s.lastLineLen
+ default:
+ // at the beginning of the source
+ pos.Line = 1
+ pos.Column = 1
+ }
+ return
+}
+
+// err prints the error of any scanning to s.Error function. If the function is
+// not defined, by default it prints them to os.Stderr
+func (s *Scanner) err(msg string) {
+ s.ErrorCount++
+ pos := s.recentPosition()
+
+ if s.Error != nil {
+ s.Error(pos, msg)
+ return
+ }
+
+ fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg)
+}
+
+// isHexadecimal returns true if the given rune is a letter
+func isLetter(ch rune) bool {
+ return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch)
+}
+
+// isHexadecimal returns true if the given rune is a decimal digit
+func isDigit(ch rune) bool {
+ return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch)
+}
+
+// isHexadecimal returns true if the given rune is a decimal number
+func isDecimal(ch rune) bool {
+ return '0' <= ch && ch <= '9'
+}
+
+// isHexadecimal returns true if the given rune is an hexadecimal number
+func isHexadecimal(ch rune) bool {
+ return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F'
+}
+
+// isWhitespace returns true if the rune is a space, tab, newline or carriage return
+func isWhitespace(ch rune) bool {
+ return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'
+}
+
+// digitVal returns the integer value of a given octal,decimal or hexadecimal rune
+func digitVal(ch rune) int {
+ switch {
+ case '0' <= ch && ch <= '9':
+ return int(ch - '0')
+ case 'a' <= ch && ch <= 'f':
+ return int(ch - 'a' + 10)
+ case 'A' <= ch && ch <= 'F':
+ return int(ch - 'A' + 10)
+ }
+ return 16 // larger than any legal digit val
+}
diff --git a/vendor/github.com/hashicorp/hcl/json/token/position.go b/vendor/github.com/hashicorp/hcl/json/token/position.go
new file mode 100644
index 0000000..59c1bb7
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/json/token/position.go
@@ -0,0 +1,46 @@
+package token
+
+import "fmt"
+
+// Pos describes an arbitrary source position
+// including the file, line, and column location.
+// A Position is valid if the line number is > 0.
+type Pos struct {
+ Filename string // filename, if any
+ Offset int // offset, starting at 0
+ Line int // line number, starting at 1
+ Column int // column number, starting at 1 (character count)
+}
+
+// IsValid returns true if the position is valid.
+func (p *Pos) IsValid() bool { return p.Line > 0 }
+
+// String returns a string in one of several forms:
+//
+// file:line:column valid position with file name
+// line:column valid position without file name
+// file invalid position with file name
+// - invalid position without file name
+func (p Pos) String() string {
+ s := p.Filename
+ if p.IsValid() {
+ if s != "" {
+ s += ":"
+ }
+ s += fmt.Sprintf("%d:%d", p.Line, p.Column)
+ }
+ if s == "" {
+ s = "-"
+ }
+ return s
+}
+
+// Before reports whether the position p is before u.
+func (p Pos) Before(u Pos) bool {
+ return u.Offset > p.Offset || u.Line > p.Line
+}
+
+// After reports whether the position p is after u.
+func (p Pos) After(u Pos) bool {
+ return u.Offset < p.Offset || u.Line < p.Line
+}
diff --git a/vendor/github.com/hashicorp/hcl/json/token/token.go b/vendor/github.com/hashicorp/hcl/json/token/token.go
new file mode 100644
index 0000000..95a0c3e
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/json/token/token.go
@@ -0,0 +1,118 @@
+package token
+
+import (
+ "fmt"
+ "strconv"
+
+ hcltoken "github.com/hashicorp/hcl/hcl/token"
+)
+
+// Token defines a single HCL token which can be obtained via the Scanner
+type Token struct {
+ Type Type
+ Pos Pos
+ Text string
+}
+
+// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language)
+type Type int
+
+const (
+ // Special tokens
+ ILLEGAL Type = iota
+ EOF
+
+ identifier_beg
+ literal_beg
+ NUMBER // 12345
+ FLOAT // 123.45
+ BOOL // true,false
+ STRING // "abc"
+ NULL // null
+ literal_end
+ identifier_end
+
+ operator_beg
+ LBRACK // [
+ LBRACE // {
+ COMMA // ,
+ PERIOD // .
+ COLON // :
+
+ RBRACK // ]
+ RBRACE // }
+
+ operator_end
+)
+
+var tokens = [...]string{
+ ILLEGAL: "ILLEGAL",
+
+ EOF: "EOF",
+
+ NUMBER: "NUMBER",
+ FLOAT: "FLOAT",
+ BOOL: "BOOL",
+ STRING: "STRING",
+ NULL: "NULL",
+
+ LBRACK: "LBRACK",
+ LBRACE: "LBRACE",
+ COMMA: "COMMA",
+ PERIOD: "PERIOD",
+ COLON: "COLON",
+
+ RBRACK: "RBRACK",
+ RBRACE: "RBRACE",
+}
+
+// String returns the string corresponding to the token tok.
+func (t Type) String() string {
+ s := ""
+ if 0 <= t && t < Type(len(tokens)) {
+ s = tokens[t]
+ }
+ if s == "" {
+ s = "token(" + strconv.Itoa(int(t)) + ")"
+ }
+ return s
+}
+
+// IsIdentifier returns true for tokens corresponding to identifiers and basic
+// type literals; it returns false otherwise.
+func (t Type) IsIdentifier() bool { return identifier_beg < t && t < identifier_end }
+
+// IsLiteral returns true for tokens corresponding to basic type literals; it
+// returns false otherwise.
+func (t Type) IsLiteral() bool { return literal_beg < t && t < literal_end }
+
+// IsOperator returns true for tokens corresponding to operators and
+// delimiters; it returns false otherwise.
+func (t Type) IsOperator() bool { return operator_beg < t && t < operator_end }
+
+// String returns the token's literal text. Note that this is only
+// applicable for certain token types, such as token.IDENT,
+// token.STRING, etc..
+func (t Token) String() string {
+ return fmt.Sprintf("%s %s %s", t.Pos.String(), t.Type.String(), t.Text)
+}
+
+// HCLToken converts this token to an HCL token.
+//
+// The token type must be a literal type or this will panic.
+func (t Token) HCLToken() hcltoken.Token {
+ switch t.Type {
+ case BOOL:
+ return hcltoken.Token{Type: hcltoken.BOOL, Text: t.Text}
+ case FLOAT:
+ return hcltoken.Token{Type: hcltoken.FLOAT, Text: t.Text}
+ case NULL:
+ return hcltoken.Token{Type: hcltoken.STRING, Text: ""}
+ case NUMBER:
+ return hcltoken.Token{Type: hcltoken.NUMBER, Text: t.Text}
+ case STRING:
+ return hcltoken.Token{Type: hcltoken.STRING, Text: t.Text, JSON: true}
+ default:
+ panic(fmt.Sprintf("unimplemented HCLToken for type: %s", t.Type))
+ }
+}
diff --git a/vendor/github.com/hashicorp/hcl/lex.go b/vendor/github.com/hashicorp/hcl/lex.go
new file mode 100644
index 0000000..d9993c2
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/lex.go
@@ -0,0 +1,38 @@
+package hcl
+
+import (
+ "unicode"
+ "unicode/utf8"
+)
+
+type lexModeValue byte
+
+const (
+ lexModeUnknown lexModeValue = iota
+ lexModeHcl
+ lexModeJson
+)
+
+// lexMode returns whether we're going to be parsing in JSON
+// mode or HCL mode.
+func lexMode(v []byte) lexModeValue {
+ var (
+ r rune
+ w int
+ offset int
+ )
+
+ for {
+ r, w = utf8.DecodeRune(v[offset:])
+ offset += w
+ if unicode.IsSpace(r) {
+ continue
+ }
+ if r == '{' {
+ return lexModeJson
+ }
+ break
+ }
+
+ return lexModeHcl
+}
diff --git a/vendor/github.com/hashicorp/hcl/parse.go b/vendor/github.com/hashicorp/hcl/parse.go
new file mode 100644
index 0000000..1fca53c
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/parse.go
@@ -0,0 +1,39 @@
+package hcl
+
+import (
+ "fmt"
+
+ "github.com/hashicorp/hcl/hcl/ast"
+ hclParser "github.com/hashicorp/hcl/hcl/parser"
+ jsonParser "github.com/hashicorp/hcl/json/parser"
+)
+
+// ParseBytes accepts as input byte slice and returns ast tree.
+//
+// Input can be either JSON or HCL
+func ParseBytes(in []byte) (*ast.File, error) {
+ return parse(in)
+}
+
+// ParseString accepts input as a string and returns ast tree.
+func ParseString(input string) (*ast.File, error) {
+ return parse([]byte(input))
+}
+
+func parse(in []byte) (*ast.File, error) {
+ switch lexMode(in) {
+ case lexModeHcl:
+ return hclParser.Parse(in)
+ case lexModeJson:
+ return jsonParser.Parse(in)
+ }
+
+ return nil, fmt.Errorf("unknown config format")
+}
+
+// Parse parses the given input and returns the root object.
+//
+// The input format can be either HCL or JSON.
+func Parse(input string) (*ast.File, error) {
+ return parse([]byte(input))
+}
diff --git a/vendor/github.com/inconshreveable/mousetrap/LICENSE b/vendor/github.com/inconshreveable/mousetrap/LICENSE
new file mode 100644
index 0000000..5f920e9
--- /dev/null
+++ b/vendor/github.com/inconshreveable/mousetrap/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2022 Alan Shreve (@inconshreveable)
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/inconshreveable/mousetrap/README.md b/vendor/github.com/inconshreveable/mousetrap/README.md
new file mode 100644
index 0000000..7a950d1
--- /dev/null
+++ b/vendor/github.com/inconshreveable/mousetrap/README.md
@@ -0,0 +1,23 @@
+# mousetrap
+
+mousetrap is a tiny library that answers a single question.
+
+On a Windows machine, was the process invoked by someone double clicking on
+the executable file while browsing in explorer?
+
+### Motivation
+
+Windows developers unfamiliar with command line tools will often "double-click"
+the executable for a tool. Because most CLI tools print the help and then exit
+when invoked without arguments, this is often very frustrating for those users.
+
+mousetrap provides a way to detect these invocations so that you can provide
+more helpful behavior and instructions on how to run the CLI tool. To see what
+this looks like, both from an organizational and a technical perspective, see
+https://inconshreveable.com/09-09-2014/sweat-the-small-stuff/
+
+### The interface
+
+The library exposes a single interface:
+
+ func StartedByExplorer() (bool)
diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_others.go b/vendor/github.com/inconshreveable/mousetrap/trap_others.go
new file mode 100644
index 0000000..06a91f0
--- /dev/null
+++ b/vendor/github.com/inconshreveable/mousetrap/trap_others.go
@@ -0,0 +1,16 @@
+//go:build !windows
+// +build !windows
+
+package mousetrap
+
+// StartedByExplorer returns true if the program was invoked by the user
+// double-clicking on the executable from explorer.exe
+//
+// It is conservative and returns false if any of the internal calls fail.
+// It does not guarantee that the program was run from a terminal. It only can tell you
+// whether it was launched from explorer.exe
+//
+// On non-Windows platforms, it always returns false.
+func StartedByExplorer() bool {
+ return false
+}
diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go
new file mode 100644
index 0000000..0c56880
--- /dev/null
+++ b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go
@@ -0,0 +1,42 @@
+package mousetrap
+
+import (
+ "syscall"
+ "unsafe"
+)
+
+func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) {
+ snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0)
+ if err != nil {
+ return nil, err
+ }
+ defer syscall.CloseHandle(snapshot)
+ var procEntry syscall.ProcessEntry32
+ procEntry.Size = uint32(unsafe.Sizeof(procEntry))
+ if err = syscall.Process32First(snapshot, &procEntry); err != nil {
+ return nil, err
+ }
+ for {
+ if procEntry.ProcessID == uint32(pid) {
+ return &procEntry, nil
+ }
+ err = syscall.Process32Next(snapshot, &procEntry)
+ if err != nil {
+ return nil, err
+ }
+ }
+}
+
+// StartedByExplorer returns true if the program was invoked by the user double-clicking
+// on the executable from explorer.exe
+//
+// It is conservative and returns false if any of the internal calls fail.
+// It does not guarantee that the program was run from a terminal. It only can tell you
+// whether it was launched from explorer.exe
+func StartedByExplorer() bool {
+ pe, err := getProcessEntry(syscall.Getppid())
+ if err != nil {
+ return false
+ }
+ return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:])
+}
diff --git a/vendor/github.com/klauspost/compress/LICENSE b/vendor/github.com/klauspost/compress/LICENSE
new file mode 100644
index 0000000..87d5574
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/LICENSE
@@ -0,0 +1,304 @@
+Copyright (c) 2012 The Go Authors. All rights reserved.
+Copyright (c) 2019 Klaus Post. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------
+
+Files: gzhttp/*
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2016-2017 The New York Times Company
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+------------------
+
+Files: s2/cmd/internal/readahead/*
+
+The MIT License (MIT)
+
+Copyright (c) 2015 Klaus Post
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+---------------------
+Files: snappy/*
+Files: internal/snapref/*
+
+Copyright (c) 2011 The Snappy-Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+-----------------
+
+Files: s2/cmd/internal/filepathx/*
+
+Copyright 2016 The filepathx Authors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/klauspost/compress/flate/deflate.go b/vendor/github.com/klauspost/compress/flate/deflate.go
new file mode 100644
index 0000000..de912e1
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/deflate.go
@@ -0,0 +1,1017 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Copyright (c) 2015 Klaus Post
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flate
+
+import (
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "io"
+ "math"
+)
+
+const (
+ NoCompression = 0
+ BestSpeed = 1
+ BestCompression = 9
+ DefaultCompression = -1
+
+ // HuffmanOnly disables Lempel-Ziv match searching and only performs Huffman
+ // entropy encoding. This mode is useful in compressing data that has
+ // already been compressed with an LZ style algorithm (e.g. Snappy or LZ4)
+ // that lacks an entropy encoder. Compression gains are achieved when
+ // certain bytes in the input stream occur more frequently than others.
+ //
+ // Note that HuffmanOnly produces a compressed output that is
+ // RFC 1951 compliant. That is, any valid DEFLATE decompressor will
+ // continue to be able to decompress this output.
+ HuffmanOnly = -2
+ ConstantCompression = HuffmanOnly // compatibility alias.
+
+ logWindowSize = 15
+ windowSize = 1 << logWindowSize
+ windowMask = windowSize - 1
+ logMaxOffsetSize = 15 // Standard DEFLATE
+ minMatchLength = 4 // The smallest match that the compressor looks for
+ maxMatchLength = 258 // The longest match for the compressor
+ minOffsetSize = 1 // The shortest offset that makes any sense
+
+ // The maximum number of tokens we will encode at the time.
+ // Smaller sizes usually creates less optimal blocks.
+ // Bigger can make context switching slow.
+ // We use this for levels 7-9, so we make it big.
+ maxFlateBlockTokens = 1 << 15
+ maxStoreBlockSize = 65535
+ hashBits = 17 // After 17 performance degrades
+ hashSize = 1 << hashBits
+ hashMask = (1 << hashBits) - 1
+ hashShift = (hashBits + minMatchLength - 1) / minMatchLength
+ maxHashOffset = 1 << 28
+
+ skipNever = math.MaxInt32
+
+ debugDeflate = false
+)
+
+type compressionLevel struct {
+ good, lazy, nice, chain, fastSkipHashing, level int
+}
+
+// Compression levels have been rebalanced from zlib deflate defaults
+// to give a bigger spread in speed and compression.
+// See https://blog.klauspost.com/rebalancing-deflate-compression-levels/
+var levels = []compressionLevel{
+ {}, // 0
+ // Level 1-6 uses specialized algorithm - values not used
+ {0, 0, 0, 0, 0, 1},
+ {0, 0, 0, 0, 0, 2},
+ {0, 0, 0, 0, 0, 3},
+ {0, 0, 0, 0, 0, 4},
+ {0, 0, 0, 0, 0, 5},
+ {0, 0, 0, 0, 0, 6},
+ // Levels 7-9 use increasingly more lazy matching
+ // and increasingly stringent conditions for "good enough".
+ {8, 12, 16, 24, skipNever, 7},
+ {16, 30, 40, 64, skipNever, 8},
+ {32, 258, 258, 1024, skipNever, 9},
+}
+
+// advancedState contains state for the advanced levels, with bigger hash tables, etc.
+type advancedState struct {
+ // deflate state
+ length int
+ offset int
+ maxInsertIndex int
+ chainHead int
+ hashOffset int
+
+ ii uint16 // position of last match, intended to overflow to reset.
+
+ // input window: unprocessed data is window[index:windowEnd]
+ index int
+ hashMatch [maxMatchLength + minMatchLength]uint32
+
+ // Input hash chains
+ // hashHead[hashValue] contains the largest inputIndex with the specified hash value
+ // If hashHead[hashValue] is within the current window, then
+ // hashPrev[hashHead[hashValue] & windowMask] contains the previous index
+ // with the same hash value.
+ hashHead [hashSize]uint32
+ hashPrev [windowSize]uint32
+}
+
+type compressor struct {
+ compressionLevel
+
+ h *huffmanEncoder
+ w *huffmanBitWriter
+
+ // compression algorithm
+ fill func(*compressor, []byte) int // copy data to window
+ step func(*compressor) // process window
+
+ window []byte
+ windowEnd int
+ blockStart int // window index where current tokens start
+ err error
+
+ // queued output tokens
+ tokens tokens
+ fast fastEnc
+ state *advancedState
+
+ sync bool // requesting flush
+ byteAvailable bool // if true, still need to process window[index-1].
+}
+
+func (d *compressor) fillDeflate(b []byte) int {
+ s := d.state
+ if s.index >= 2*windowSize-(minMatchLength+maxMatchLength) {
+ // shift the window by windowSize
+ //copy(d.window[:], d.window[windowSize:2*windowSize])
+ *(*[windowSize]byte)(d.window) = *(*[windowSize]byte)(d.window[windowSize:])
+ s.index -= windowSize
+ d.windowEnd -= windowSize
+ if d.blockStart >= windowSize {
+ d.blockStart -= windowSize
+ } else {
+ d.blockStart = math.MaxInt32
+ }
+ s.hashOffset += windowSize
+ if s.hashOffset > maxHashOffset {
+ delta := s.hashOffset - 1
+ s.hashOffset -= delta
+ s.chainHead -= delta
+ // Iterate over slices instead of arrays to avoid copying
+ // the entire table onto the stack (Issue #18625).
+ for i, v := range s.hashPrev[:] {
+ if int(v) > delta {
+ s.hashPrev[i] = uint32(int(v) - delta)
+ } else {
+ s.hashPrev[i] = 0
+ }
+ }
+ for i, v := range s.hashHead[:] {
+ if int(v) > delta {
+ s.hashHead[i] = uint32(int(v) - delta)
+ } else {
+ s.hashHead[i] = 0
+ }
+ }
+ }
+ }
+ n := copy(d.window[d.windowEnd:], b)
+ d.windowEnd += n
+ return n
+}
+
+func (d *compressor) writeBlock(tok *tokens, index int, eof bool) error {
+ if index > 0 || eof {
+ var window []byte
+ if d.blockStart <= index {
+ window = d.window[d.blockStart:index]
+ }
+ d.blockStart = index
+ //d.w.writeBlock(tok, eof, window)
+ d.w.writeBlockDynamic(tok, eof, window, d.sync)
+ return d.w.err
+ }
+ return nil
+}
+
+// writeBlockSkip writes the current block and uses the number of tokens
+// to determine if the block should be stored on no matches, or
+// only huffman encoded.
+func (d *compressor) writeBlockSkip(tok *tokens, index int, eof bool) error {
+ if index > 0 || eof {
+ if d.blockStart <= index {
+ window := d.window[d.blockStart:index]
+ // If we removed less than a 64th of all literals
+ // we huffman compress the block.
+ if int(tok.n) > len(window)-int(tok.n>>6) {
+ d.w.writeBlockHuff(eof, window, d.sync)
+ } else {
+ // Write a dynamic huffman block.
+ d.w.writeBlockDynamic(tok, eof, window, d.sync)
+ }
+ } else {
+ d.w.writeBlock(tok, eof, nil)
+ }
+ d.blockStart = index
+ return d.w.err
+ }
+ return nil
+}
+
+// fillWindow will fill the current window with the supplied
+// dictionary and calculate all hashes.
+// This is much faster than doing a full encode.
+// Should only be used after a start/reset.
+func (d *compressor) fillWindow(b []byte) {
+ // Do not fill window if we are in store-only or huffman mode.
+ if d.level <= 0 {
+ return
+ }
+ if d.fast != nil {
+ // encode the last data, but discard the result
+ if len(b) > maxMatchOffset {
+ b = b[len(b)-maxMatchOffset:]
+ }
+ d.fast.Encode(&d.tokens, b)
+ d.tokens.Reset()
+ return
+ }
+ s := d.state
+ // If we are given too much, cut it.
+ if len(b) > windowSize {
+ b = b[len(b)-windowSize:]
+ }
+ // Add all to window.
+ n := copy(d.window[d.windowEnd:], b)
+
+ // Calculate 256 hashes at the time (more L1 cache hits)
+ loops := (n + 256 - minMatchLength) / 256
+ for j := 0; j < loops; j++ {
+ startindex := j * 256
+ end := startindex + 256 + minMatchLength - 1
+ if end > n {
+ end = n
+ }
+ tocheck := d.window[startindex:end]
+ dstSize := len(tocheck) - minMatchLength + 1
+
+ if dstSize <= 0 {
+ continue
+ }
+
+ dst := s.hashMatch[:dstSize]
+ bulkHash4(tocheck, dst)
+ var newH uint32
+ for i, val := range dst {
+ di := i + startindex
+ newH = val & hashMask
+ // Get previous value with the same hash.
+ // Our chain should point to the previous value.
+ s.hashPrev[di&windowMask] = s.hashHead[newH]
+ // Set the head of the hash chain to us.
+ s.hashHead[newH] = uint32(di + s.hashOffset)
+ }
+ }
+ // Update window information.
+ d.windowEnd += n
+ s.index = n
+}
+
+// Try to find a match starting at index whose length is greater than prevSize.
+// We only look at chainCount possibilities before giving up.
+// pos = s.index, prevHead = s.chainHead-s.hashOffset, prevLength=minMatchLength-1, lookahead
+func (d *compressor) findMatch(pos int, prevHead int, lookahead int) (length, offset int, ok bool) {
+ minMatchLook := maxMatchLength
+ if lookahead < minMatchLook {
+ minMatchLook = lookahead
+ }
+
+ win := d.window[0 : pos+minMatchLook]
+
+ // We quit when we get a match that's at least nice long
+ nice := len(win) - pos
+ if d.nice < nice {
+ nice = d.nice
+ }
+
+ // If we've got a match that's good enough, only look in 1/4 the chain.
+ tries := d.chain
+ length = minMatchLength - 1
+
+ wEnd := win[pos+length]
+ wPos := win[pos:]
+ minIndex := pos - windowSize
+ if minIndex < 0 {
+ minIndex = 0
+ }
+ offset = 0
+
+ if d.chain < 100 {
+ for i := prevHead; tries > 0; tries-- {
+ if wEnd == win[i+length] {
+ n := matchLen(win[i:i+minMatchLook], wPos)
+ if n > length {
+ length = n
+ offset = pos - i
+ ok = true
+ if n >= nice {
+ // The match is good enough that we don't try to find a better one.
+ break
+ }
+ wEnd = win[pos+n]
+ }
+ }
+ if i <= minIndex {
+ // hashPrev[i & windowMask] has already been overwritten, so stop now.
+ break
+ }
+ i = int(d.state.hashPrev[i&windowMask]) - d.state.hashOffset
+ if i < minIndex {
+ break
+ }
+ }
+ return
+ }
+
+ // Minimum gain to accept a match.
+ cGain := 4
+
+ // Some like it higher (CSV), some like it lower (JSON)
+ const baseCost = 3
+ // Base is 4 bytes at with an additional cost.
+ // Matches must be better than this.
+
+ for i := prevHead; tries > 0; tries-- {
+ if wEnd == win[i+length] {
+ n := matchLen(win[i:i+minMatchLook], wPos)
+ if n > length {
+ // Calculate gain. Estimate
+ newGain := d.h.bitLengthRaw(wPos[:n]) - int(offsetExtraBits[offsetCode(uint32(pos-i))]) - baseCost - int(lengthExtraBits[lengthCodes[(n-3)&255]])
+
+ //fmt.Println("gain:", newGain, "prev:", cGain, "raw:", d.h.bitLengthRaw(wPos[:n]), "this-len:", n, "prev-len:", length)
+ if newGain > cGain {
+ length = n
+ offset = pos - i
+ cGain = newGain
+ ok = true
+ if n >= nice {
+ // The match is good enough that we don't try to find a better one.
+ break
+ }
+ wEnd = win[pos+n]
+ }
+ }
+ }
+ if i <= minIndex {
+ // hashPrev[i & windowMask] has already been overwritten, so stop now.
+ break
+ }
+ i = int(d.state.hashPrev[i&windowMask]) - d.state.hashOffset
+ if i < minIndex {
+ break
+ }
+ }
+ return
+}
+
+func (d *compressor) writeStoredBlock(buf []byte) error {
+ if d.w.writeStoredHeader(len(buf), false); d.w.err != nil {
+ return d.w.err
+ }
+ d.w.writeBytes(buf)
+ return d.w.err
+}
+
+// hash4 returns a hash representation of the first 4 bytes
+// of the supplied slice.
+// The caller must ensure that len(b) >= 4.
+func hash4(b []byte) uint32 {
+ return hash4u(binary.LittleEndian.Uint32(b), hashBits)
+}
+
+// hash4 returns the hash of u to fit in a hash table with h bits.
+// Preferably h should be a constant and should always be <32.
+func hash4u(u uint32, h uint8) uint32 {
+ return (u * prime4bytes) >> (32 - h)
+}
+
+// bulkHash4 will compute hashes using the same
+// algorithm as hash4
+func bulkHash4(b []byte, dst []uint32) {
+ if len(b) < 4 {
+ return
+ }
+ hb := binary.LittleEndian.Uint32(b)
+
+ dst[0] = hash4u(hb, hashBits)
+ end := len(b) - 4 + 1
+ for i := 1; i < end; i++ {
+ hb = (hb >> 8) | uint32(b[i+3])<<24
+ dst[i] = hash4u(hb, hashBits)
+ }
+}
+
+func (d *compressor) initDeflate() {
+ d.window = make([]byte, 2*windowSize)
+ d.byteAvailable = false
+ d.err = nil
+ if d.state == nil {
+ return
+ }
+ s := d.state
+ s.index = 0
+ s.hashOffset = 1
+ s.length = minMatchLength - 1
+ s.offset = 0
+ s.chainHead = -1
+}
+
+// deflateLazy is the same as deflate, but with d.fastSkipHashing == skipNever,
+// meaning it always has lazy matching on.
+func (d *compressor) deflateLazy() {
+ s := d.state
+ // Sanity enables additional runtime tests.
+ // It's intended to be used during development
+ // to supplement the currently ad-hoc unit tests.
+ const sanity = debugDeflate
+
+ if d.windowEnd-s.index < minMatchLength+maxMatchLength && !d.sync {
+ return
+ }
+ if d.windowEnd != s.index && d.chain > 100 {
+ // Get literal huffman coder.
+ if d.h == nil {
+ d.h = newHuffmanEncoder(maxFlateBlockTokens)
+ }
+ var tmp [256]uint16
+ for _, v := range d.window[s.index:d.windowEnd] {
+ tmp[v]++
+ }
+ d.h.generate(tmp[:], 15)
+ }
+
+ s.maxInsertIndex = d.windowEnd - (minMatchLength - 1)
+
+ for {
+ if sanity && s.index > d.windowEnd {
+ panic("index > windowEnd")
+ }
+ lookahead := d.windowEnd - s.index
+ if lookahead < minMatchLength+maxMatchLength {
+ if !d.sync {
+ return
+ }
+ if sanity && s.index > d.windowEnd {
+ panic("index > windowEnd")
+ }
+ if lookahead == 0 {
+ // Flush current output block if any.
+ if d.byteAvailable {
+ // There is still one pending token that needs to be flushed
+ d.tokens.AddLiteral(d.window[s.index-1])
+ d.byteAvailable = false
+ }
+ if d.tokens.n > 0 {
+ if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil {
+ return
+ }
+ d.tokens.Reset()
+ }
+ return
+ }
+ }
+ if s.index < s.maxInsertIndex {
+ // Update the hash
+ hash := hash4(d.window[s.index:])
+ ch := s.hashHead[hash]
+ s.chainHead = int(ch)
+ s.hashPrev[s.index&windowMask] = ch
+ s.hashHead[hash] = uint32(s.index + s.hashOffset)
+ }
+ prevLength := s.length
+ prevOffset := s.offset
+ s.length = minMatchLength - 1
+ s.offset = 0
+ minIndex := s.index - windowSize
+ if minIndex < 0 {
+ minIndex = 0
+ }
+
+ if s.chainHead-s.hashOffset >= minIndex && lookahead > prevLength && prevLength < d.lazy {
+ if newLength, newOffset, ok := d.findMatch(s.index, s.chainHead-s.hashOffset, lookahead); ok {
+ s.length = newLength
+ s.offset = newOffset
+ }
+ }
+
+ if prevLength >= minMatchLength && s.length <= prevLength {
+ // No better match, but check for better match at end...
+ //
+ // Skip forward a number of bytes.
+ // Offset of 2 seems to yield best results. 3 is sometimes better.
+ const checkOff = 2
+
+ // Check all, except full length
+ if prevLength < maxMatchLength-checkOff {
+ prevIndex := s.index - 1
+ if prevIndex+prevLength < s.maxInsertIndex {
+ end := lookahead
+ if lookahead > maxMatchLength+checkOff {
+ end = maxMatchLength + checkOff
+ }
+ end += prevIndex
+
+ // Hash at match end.
+ h := hash4(d.window[prevIndex+prevLength:])
+ ch2 := int(s.hashHead[h]) - s.hashOffset - prevLength
+ if prevIndex-ch2 != prevOffset && ch2 > minIndex+checkOff {
+ length := matchLen(d.window[prevIndex+checkOff:end], d.window[ch2+checkOff:])
+ // It seems like a pure length metric is best.
+ if length > prevLength {
+ prevLength = length
+ prevOffset = prevIndex - ch2
+
+ // Extend back...
+ for i := checkOff - 1; i >= 0; i-- {
+ if prevLength >= maxMatchLength || d.window[prevIndex+i] != d.window[ch2+i] {
+ // Emit tokens we "owe"
+ for j := 0; j <= i; j++ {
+ d.tokens.AddLiteral(d.window[prevIndex+j])
+ if d.tokens.n == maxFlateBlockTokens {
+ // The block includes the current character
+ if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil {
+ return
+ }
+ d.tokens.Reset()
+ }
+ s.index++
+ if s.index < s.maxInsertIndex {
+ h := hash4(d.window[s.index:])
+ ch := s.hashHead[h]
+ s.chainHead = int(ch)
+ s.hashPrev[s.index&windowMask] = ch
+ s.hashHead[h] = uint32(s.index + s.hashOffset)
+ }
+ }
+ break
+ } else {
+ prevLength++
+ }
+ }
+ } else if false {
+ // Check one further ahead.
+ // Only rarely better, disabled for now.
+ prevIndex++
+ h := hash4(d.window[prevIndex+prevLength:])
+ ch2 := int(s.hashHead[h]) - s.hashOffset - prevLength
+ if prevIndex-ch2 != prevOffset && ch2 > minIndex+checkOff {
+ length := matchLen(d.window[prevIndex+checkOff:end], d.window[ch2+checkOff:])
+ // It seems like a pure length metric is best.
+ if length > prevLength+checkOff {
+ prevLength = length
+ prevOffset = prevIndex - ch2
+ prevIndex--
+
+ // Extend back...
+ for i := checkOff; i >= 0; i-- {
+ if prevLength >= maxMatchLength || d.window[prevIndex+i] != d.window[ch2+i-1] {
+ // Emit tokens we "owe"
+ for j := 0; j <= i; j++ {
+ d.tokens.AddLiteral(d.window[prevIndex+j])
+ if d.tokens.n == maxFlateBlockTokens {
+ // The block includes the current character
+ if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil {
+ return
+ }
+ d.tokens.Reset()
+ }
+ s.index++
+ if s.index < s.maxInsertIndex {
+ h := hash4(d.window[s.index:])
+ ch := s.hashHead[h]
+ s.chainHead = int(ch)
+ s.hashPrev[s.index&windowMask] = ch
+ s.hashHead[h] = uint32(s.index + s.hashOffset)
+ }
+ }
+ break
+ } else {
+ prevLength++
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ // There was a match at the previous step, and the current match is
+ // not better. Output the previous match.
+ d.tokens.AddMatch(uint32(prevLength-3), uint32(prevOffset-minOffsetSize))
+
+ // Insert in the hash table all strings up to the end of the match.
+ // index and index-1 are already inserted. If there is not enough
+ // lookahead, the last two strings are not inserted into the hash
+ // table.
+ newIndex := s.index + prevLength - 1
+ // Calculate missing hashes
+ end := newIndex
+ if end > s.maxInsertIndex {
+ end = s.maxInsertIndex
+ }
+ end += minMatchLength - 1
+ startindex := s.index + 1
+ if startindex > s.maxInsertIndex {
+ startindex = s.maxInsertIndex
+ }
+ tocheck := d.window[startindex:end]
+ dstSize := len(tocheck) - minMatchLength + 1
+ if dstSize > 0 {
+ dst := s.hashMatch[:dstSize]
+ bulkHash4(tocheck, dst)
+ var newH uint32
+ for i, val := range dst {
+ di := i + startindex
+ newH = val & hashMask
+ // Get previous value with the same hash.
+ // Our chain should point to the previous value.
+ s.hashPrev[di&windowMask] = s.hashHead[newH]
+ // Set the head of the hash chain to us.
+ s.hashHead[newH] = uint32(di + s.hashOffset)
+ }
+ }
+
+ s.index = newIndex
+ d.byteAvailable = false
+ s.length = minMatchLength - 1
+ if d.tokens.n == maxFlateBlockTokens {
+ // The block includes the current character
+ if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil {
+ return
+ }
+ d.tokens.Reset()
+ }
+ s.ii = 0
+ } else {
+ // Reset, if we got a match this run.
+ if s.length >= minMatchLength {
+ s.ii = 0
+ }
+ // We have a byte waiting. Emit it.
+ if d.byteAvailable {
+ s.ii++
+ d.tokens.AddLiteral(d.window[s.index-1])
+ if d.tokens.n == maxFlateBlockTokens {
+ if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil {
+ return
+ }
+ d.tokens.Reset()
+ }
+ s.index++
+
+ // If we have a long run of no matches, skip additional bytes
+ // Resets when s.ii overflows after 64KB.
+ if n := int(s.ii) - d.chain; n > 0 {
+ n = 1 + int(n>>6)
+ for j := 0; j < n; j++ {
+ if s.index >= d.windowEnd-1 {
+ break
+ }
+ d.tokens.AddLiteral(d.window[s.index-1])
+ if d.tokens.n == maxFlateBlockTokens {
+ if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil {
+ return
+ }
+ d.tokens.Reset()
+ }
+ // Index...
+ if s.index < s.maxInsertIndex {
+ h := hash4(d.window[s.index:])
+ ch := s.hashHead[h]
+ s.chainHead = int(ch)
+ s.hashPrev[s.index&windowMask] = ch
+ s.hashHead[h] = uint32(s.index + s.hashOffset)
+ }
+ s.index++
+ }
+ // Flush last byte
+ d.tokens.AddLiteral(d.window[s.index-1])
+ d.byteAvailable = false
+ // s.length = minMatchLength - 1 // not needed, since s.ii is reset above, so it should never be > minMatchLength
+ if d.tokens.n == maxFlateBlockTokens {
+ if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil {
+ return
+ }
+ d.tokens.Reset()
+ }
+ }
+ } else {
+ s.index++
+ d.byteAvailable = true
+ }
+ }
+ }
+}
+
+func (d *compressor) store() {
+ if d.windowEnd > 0 && (d.windowEnd == maxStoreBlockSize || d.sync) {
+ d.err = d.writeStoredBlock(d.window[:d.windowEnd])
+ d.windowEnd = 0
+ }
+}
+
+// fillWindow will fill the buffer with data for huffman-only compression.
+// The number of bytes copied is returned.
+func (d *compressor) fillBlock(b []byte) int {
+ n := copy(d.window[d.windowEnd:], b)
+ d.windowEnd += n
+ return n
+}
+
+// storeHuff will compress and store the currently added data,
+// if enough has been accumulated or we at the end of the stream.
+// Any error that occurred will be in d.err
+func (d *compressor) storeHuff() {
+ if d.windowEnd < len(d.window) && !d.sync || d.windowEnd == 0 {
+ return
+ }
+ d.w.writeBlockHuff(false, d.window[:d.windowEnd], d.sync)
+ d.err = d.w.err
+ d.windowEnd = 0
+}
+
+// storeFast will compress and store the currently added data,
+// if enough has been accumulated or we at the end of the stream.
+// Any error that occurred will be in d.err
+func (d *compressor) storeFast() {
+ // We only compress if we have maxStoreBlockSize.
+ if d.windowEnd < len(d.window) {
+ if !d.sync {
+ return
+ }
+ // Handle extremely small sizes.
+ if d.windowEnd < 128 {
+ if d.windowEnd == 0 {
+ return
+ }
+ if d.windowEnd <= 32 {
+ d.err = d.writeStoredBlock(d.window[:d.windowEnd])
+ } else {
+ d.w.writeBlockHuff(false, d.window[:d.windowEnd], true)
+ d.err = d.w.err
+ }
+ d.tokens.Reset()
+ d.windowEnd = 0
+ d.fast.Reset()
+ return
+ }
+ }
+
+ d.fast.Encode(&d.tokens, d.window[:d.windowEnd])
+ // If we made zero matches, store the block as is.
+ if d.tokens.n == 0 {
+ d.err = d.writeStoredBlock(d.window[:d.windowEnd])
+ // If we removed less than 1/16th, huffman compress the block.
+ } else if int(d.tokens.n) > d.windowEnd-(d.windowEnd>>4) {
+ d.w.writeBlockHuff(false, d.window[:d.windowEnd], d.sync)
+ d.err = d.w.err
+ } else {
+ d.w.writeBlockDynamic(&d.tokens, false, d.window[:d.windowEnd], d.sync)
+ d.err = d.w.err
+ }
+ d.tokens.Reset()
+ d.windowEnd = 0
+}
+
+// write will add input byte to the stream.
+// Unless an error occurs all bytes will be consumed.
+func (d *compressor) write(b []byte) (n int, err error) {
+ if d.err != nil {
+ return 0, d.err
+ }
+ n = len(b)
+ for len(b) > 0 {
+ if d.windowEnd == len(d.window) || d.sync {
+ d.step(d)
+ }
+ b = b[d.fill(d, b):]
+ if d.err != nil {
+ return 0, d.err
+ }
+ }
+ return n, d.err
+}
+
+func (d *compressor) syncFlush() error {
+ d.sync = true
+ if d.err != nil {
+ return d.err
+ }
+ d.step(d)
+ if d.err == nil {
+ d.w.writeStoredHeader(0, false)
+ d.w.flush()
+ d.err = d.w.err
+ }
+ d.sync = false
+ return d.err
+}
+
+func (d *compressor) init(w io.Writer, level int) (err error) {
+ d.w = newHuffmanBitWriter(w)
+
+ switch {
+ case level == NoCompression:
+ d.window = make([]byte, maxStoreBlockSize)
+ d.fill = (*compressor).fillBlock
+ d.step = (*compressor).store
+ case level == ConstantCompression:
+ d.w.logNewTablePenalty = 10
+ d.window = make([]byte, 32<<10)
+ d.fill = (*compressor).fillBlock
+ d.step = (*compressor).storeHuff
+ case level == DefaultCompression:
+ level = 5
+ fallthrough
+ case level >= 1 && level <= 6:
+ d.w.logNewTablePenalty = 7
+ d.fast = newFastEnc(level)
+ d.window = make([]byte, maxStoreBlockSize)
+ d.fill = (*compressor).fillBlock
+ d.step = (*compressor).storeFast
+ case 7 <= level && level <= 9:
+ d.w.logNewTablePenalty = 8
+ d.state = &advancedState{}
+ d.compressionLevel = levels[level]
+ d.initDeflate()
+ d.fill = (*compressor).fillDeflate
+ d.step = (*compressor).deflateLazy
+ case -level >= MinCustomWindowSize && -level <= MaxCustomWindowSize:
+ d.w.logNewTablePenalty = 7
+ d.fast = &fastEncL5Window{maxOffset: int32(-level), cur: maxStoreBlockSize}
+ d.window = make([]byte, maxStoreBlockSize)
+ d.fill = (*compressor).fillBlock
+ d.step = (*compressor).storeFast
+ default:
+ return fmt.Errorf("flate: invalid compression level %d: want value in range [-2, 9]", level)
+ }
+ d.level = level
+ return nil
+}
+
+// reset the state of the compressor.
+func (d *compressor) reset(w io.Writer) {
+ d.w.reset(w)
+ d.sync = false
+ d.err = nil
+ // We only need to reset a few things for Snappy.
+ if d.fast != nil {
+ d.fast.Reset()
+ d.windowEnd = 0
+ d.tokens.Reset()
+ return
+ }
+ switch d.compressionLevel.chain {
+ case 0:
+ // level was NoCompression or ConstantCompresssion.
+ d.windowEnd = 0
+ default:
+ s := d.state
+ s.chainHead = -1
+ for i := range s.hashHead {
+ s.hashHead[i] = 0
+ }
+ for i := range s.hashPrev {
+ s.hashPrev[i] = 0
+ }
+ s.hashOffset = 1
+ s.index, d.windowEnd = 0, 0
+ d.blockStart, d.byteAvailable = 0, false
+ d.tokens.Reset()
+ s.length = minMatchLength - 1
+ s.offset = 0
+ s.ii = 0
+ s.maxInsertIndex = 0
+ }
+}
+
+func (d *compressor) close() error {
+ if d.err != nil {
+ return d.err
+ }
+ d.sync = true
+ d.step(d)
+ if d.err != nil {
+ return d.err
+ }
+ if d.w.writeStoredHeader(0, true); d.w.err != nil {
+ return d.w.err
+ }
+ d.w.flush()
+ d.w.reset(nil)
+ return d.w.err
+}
+
+// NewWriter returns a new Writer compressing data at the given level.
+// Following zlib, levels range from 1 (BestSpeed) to 9 (BestCompression);
+// higher levels typically run slower but compress more.
+// Level 0 (NoCompression) does not attempt any compression; it only adds the
+// necessary DEFLATE framing.
+// Level -1 (DefaultCompression) uses the default compression level.
+// Level -2 (ConstantCompression) will use Huffman compression only, giving
+// a very fast compression for all types of input, but sacrificing considerable
+// compression efficiency.
+//
+// If level is in the range [-2, 9] then the error returned will be nil.
+// Otherwise the error returned will be non-nil.
+func NewWriter(w io.Writer, level int) (*Writer, error) {
+ var dw Writer
+ if err := dw.d.init(w, level); err != nil {
+ return nil, err
+ }
+ return &dw, nil
+}
+
+// NewWriterDict is like NewWriter but initializes the new
+// Writer with a preset dictionary. The returned Writer behaves
+// as if the dictionary had been written to it without producing
+// any compressed output. The compressed data written to w
+// can only be decompressed by a Reader initialized with the
+// same dictionary.
+func NewWriterDict(w io.Writer, level int, dict []byte) (*Writer, error) {
+ zw, err := NewWriter(w, level)
+ if err != nil {
+ return nil, err
+ }
+ zw.d.fillWindow(dict)
+ zw.dict = append(zw.dict, dict...) // duplicate dictionary for Reset method.
+ return zw, err
+}
+
+// MinCustomWindowSize is the minimum window size that can be sent to NewWriterWindow.
+const MinCustomWindowSize = 32
+
+// MaxCustomWindowSize is the maximum custom window that can be sent to NewWriterWindow.
+const MaxCustomWindowSize = windowSize
+
+// NewWriterWindow returns a new Writer compressing data with a custom window size.
+// windowSize must be from MinCustomWindowSize to MaxCustomWindowSize.
+func NewWriterWindow(w io.Writer, windowSize int) (*Writer, error) {
+ if windowSize < MinCustomWindowSize {
+ return nil, errors.New("flate: requested window size less than MinWindowSize")
+ }
+ if windowSize > MaxCustomWindowSize {
+ return nil, errors.New("flate: requested window size bigger than MaxCustomWindowSize")
+ }
+ var dw Writer
+ if err := dw.d.init(w, -windowSize); err != nil {
+ return nil, err
+ }
+ return &dw, nil
+}
+
+// A Writer takes data written to it and writes the compressed
+// form of that data to an underlying writer (see NewWriter).
+type Writer struct {
+ d compressor
+ dict []byte
+}
+
+// Write writes data to w, which will eventually write the
+// compressed form of data to its underlying writer.
+func (w *Writer) Write(data []byte) (n int, err error) {
+ return w.d.write(data)
+}
+
+// Flush flushes any pending data to the underlying writer.
+// It is useful mainly in compressed network protocols, to ensure that
+// a remote reader has enough data to reconstruct a packet.
+// Flush does not return until the data has been written.
+// Calling Flush when there is no pending data still causes the Writer
+// to emit a sync marker of at least 4 bytes.
+// If the underlying writer returns an error, Flush returns that error.
+//
+// In the terminology of the zlib library, Flush is equivalent to Z_SYNC_FLUSH.
+func (w *Writer) Flush() error {
+ // For more about flushing:
+ // http://www.bolet.org/~pornin/deflate-flush.html
+ return w.d.syncFlush()
+}
+
+// Close flushes and closes the writer.
+func (w *Writer) Close() error {
+ return w.d.close()
+}
+
+// Reset discards the writer's state and makes it equivalent to
+// the result of NewWriter or NewWriterDict called with dst
+// and w's level and dictionary.
+func (w *Writer) Reset(dst io.Writer) {
+ if len(w.dict) > 0 {
+ // w was created with NewWriterDict
+ w.d.reset(dst)
+ if dst != nil {
+ w.d.fillWindow(w.dict)
+ }
+ } else {
+ // w was created with NewWriter
+ w.d.reset(dst)
+ }
+}
+
+// ResetDict discards the writer's state and makes it equivalent to
+// the result of NewWriter or NewWriterDict called with dst
+// and w's level, but sets a specific dictionary.
+func (w *Writer) ResetDict(dst io.Writer, dict []byte) {
+ w.dict = dict
+ w.d.reset(dst)
+ w.d.fillWindow(w.dict)
+}
diff --git a/vendor/github.com/klauspost/compress/flate/dict_decoder.go b/vendor/github.com/klauspost/compress/flate/dict_decoder.go
new file mode 100644
index 0000000..bb36351
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/dict_decoder.go
@@ -0,0 +1,184 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flate
+
+// dictDecoder implements the LZ77 sliding dictionary as used in decompression.
+// LZ77 decompresses data through sequences of two forms of commands:
+//
+// - Literal insertions: Runs of one or more symbols are inserted into the data
+// stream as is. This is accomplished through the writeByte method for a
+// single symbol, or combinations of writeSlice/writeMark for multiple symbols.
+// Any valid stream must start with a literal insertion if no preset dictionary
+// is used.
+//
+// - Backward copies: Runs of one or more symbols are copied from previously
+// emitted data. Backward copies come as the tuple (dist, length) where dist
+// determines how far back in the stream to copy from and length determines how
+// many bytes to copy. Note that it is valid for the length to be greater than
+// the distance. Since LZ77 uses forward copies, that situation is used to
+// perform a form of run-length encoding on repeated runs of symbols.
+// The writeCopy and tryWriteCopy are used to implement this command.
+//
+// For performance reasons, this implementation performs little to no sanity
+// checks about the arguments. As such, the invariants documented for each
+// method call must be respected.
+type dictDecoder struct {
+ hist []byte // Sliding window history
+
+ // Invariant: 0 <= rdPos <= wrPos <= len(hist)
+ wrPos int // Current output position in buffer
+ rdPos int // Have emitted hist[:rdPos] already
+ full bool // Has a full window length been written yet?
+}
+
+// init initializes dictDecoder to have a sliding window dictionary of the given
+// size. If a preset dict is provided, it will initialize the dictionary with
+// the contents of dict.
+func (dd *dictDecoder) init(size int, dict []byte) {
+ *dd = dictDecoder{hist: dd.hist}
+
+ if cap(dd.hist) < size {
+ dd.hist = make([]byte, size)
+ }
+ dd.hist = dd.hist[:size]
+
+ if len(dict) > len(dd.hist) {
+ dict = dict[len(dict)-len(dd.hist):]
+ }
+ dd.wrPos = copy(dd.hist, dict)
+ if dd.wrPos == len(dd.hist) {
+ dd.wrPos = 0
+ dd.full = true
+ }
+ dd.rdPos = dd.wrPos
+}
+
+// histSize reports the total amount of historical data in the dictionary.
+func (dd *dictDecoder) histSize() int {
+ if dd.full {
+ return len(dd.hist)
+ }
+ return dd.wrPos
+}
+
+// availRead reports the number of bytes that can be flushed by readFlush.
+func (dd *dictDecoder) availRead() int {
+ return dd.wrPos - dd.rdPos
+}
+
+// availWrite reports the available amount of output buffer space.
+func (dd *dictDecoder) availWrite() int {
+ return len(dd.hist) - dd.wrPos
+}
+
+// writeSlice returns a slice of the available buffer to write data to.
+//
+// This invariant will be kept: len(s) <= availWrite()
+func (dd *dictDecoder) writeSlice() []byte {
+ return dd.hist[dd.wrPos:]
+}
+
+// writeMark advances the writer pointer by cnt.
+//
+// This invariant must be kept: 0 <= cnt <= availWrite()
+func (dd *dictDecoder) writeMark(cnt int) {
+ dd.wrPos += cnt
+}
+
+// writeByte writes a single byte to the dictionary.
+//
+// This invariant must be kept: 0 < availWrite()
+func (dd *dictDecoder) writeByte(c byte) {
+ dd.hist[dd.wrPos] = c
+ dd.wrPos++
+}
+
+// writeCopy copies a string at a given (dist, length) to the output.
+// This returns the number of bytes copied and may be less than the requested
+// length if the available space in the output buffer is too small.
+//
+// This invariant must be kept: 0 < dist <= histSize()
+func (dd *dictDecoder) writeCopy(dist, length int) int {
+ dstBase := dd.wrPos
+ dstPos := dstBase
+ srcPos := dstPos - dist
+ endPos := dstPos + length
+ if endPos > len(dd.hist) {
+ endPos = len(dd.hist)
+ }
+
+ // Copy non-overlapping section after destination position.
+ //
+ // This section is non-overlapping in that the copy length for this section
+ // is always less than or equal to the backwards distance. This can occur
+ // if a distance refers to data that wraps-around in the buffer.
+ // Thus, a backwards copy is performed here; that is, the exact bytes in
+ // the source prior to the copy is placed in the destination.
+ if srcPos < 0 {
+ srcPos += len(dd.hist)
+ dstPos += copy(dd.hist[dstPos:endPos], dd.hist[srcPos:])
+ srcPos = 0
+ }
+
+ // Copy possibly overlapping section before destination position.
+ //
+ // This section can overlap if the copy length for this section is larger
+ // than the backwards distance. This is allowed by LZ77 so that repeated
+ // strings can be succinctly represented using (dist, length) pairs.
+ // Thus, a forwards copy is performed here; that is, the bytes copied is
+ // possibly dependent on the resulting bytes in the destination as the copy
+ // progresses along. This is functionally equivalent to the following:
+ //
+ // for i := 0; i < endPos-dstPos; i++ {
+ // dd.hist[dstPos+i] = dd.hist[srcPos+i]
+ // }
+ // dstPos = endPos
+ //
+ for dstPos < endPos {
+ dstPos += copy(dd.hist[dstPos:endPos], dd.hist[srcPos:dstPos])
+ }
+
+ dd.wrPos = dstPos
+ return dstPos - dstBase
+}
+
+// tryWriteCopy tries to copy a string at a given (distance, length) to the
+// output. This specialized version is optimized for short distances.
+//
+// This method is designed to be inlined for performance reasons.
+//
+// This invariant must be kept: 0 < dist <= histSize()
+func (dd *dictDecoder) tryWriteCopy(dist, length int) int {
+ dstPos := dd.wrPos
+ endPos := dstPos + length
+ if dstPos < dist || endPos > len(dd.hist) {
+ return 0
+ }
+ dstBase := dstPos
+ srcPos := dstPos - dist
+
+ // Copy possibly overlapping section before destination position.
+loop:
+ dstPos += copy(dd.hist[dstPos:endPos], dd.hist[srcPos:dstPos])
+ if dstPos < endPos {
+ goto loop // Avoid for-loop so that this function can be inlined
+ }
+
+ dd.wrPos = dstPos
+ return dstPos - dstBase
+}
+
+// readFlush returns a slice of the historical buffer that is ready to be
+// emitted to the user. The data returned by readFlush must be fully consumed
+// before calling any other dictDecoder methods.
+func (dd *dictDecoder) readFlush() []byte {
+ toRead := dd.hist[dd.rdPos:dd.wrPos]
+ dd.rdPos = dd.wrPos
+ if dd.wrPos == len(dd.hist) {
+ dd.wrPos, dd.rdPos = 0, 0
+ dd.full = true
+ }
+ return toRead
+}
diff --git a/vendor/github.com/klauspost/compress/flate/fast_encoder.go b/vendor/github.com/klauspost/compress/flate/fast_encoder.go
new file mode 100644
index 0000000..c8124b5
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/fast_encoder.go
@@ -0,0 +1,193 @@
+// Copyright 2011 The Snappy-Go Authors. All rights reserved.
+// Modified for deflate by Klaus Post (c) 2015.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flate
+
+import (
+ "encoding/binary"
+ "fmt"
+)
+
+type fastEnc interface {
+ Encode(dst *tokens, src []byte)
+ Reset()
+}
+
+func newFastEnc(level int) fastEnc {
+ switch level {
+ case 1:
+ return &fastEncL1{fastGen: fastGen{cur: maxStoreBlockSize}}
+ case 2:
+ return &fastEncL2{fastGen: fastGen{cur: maxStoreBlockSize}}
+ case 3:
+ return &fastEncL3{fastGen: fastGen{cur: maxStoreBlockSize}}
+ case 4:
+ return &fastEncL4{fastGen: fastGen{cur: maxStoreBlockSize}}
+ case 5:
+ return &fastEncL5{fastGen: fastGen{cur: maxStoreBlockSize}}
+ case 6:
+ return &fastEncL6{fastGen: fastGen{cur: maxStoreBlockSize}}
+ default:
+ panic("invalid level specified")
+ }
+}
+
+const (
+ tableBits = 15 // Bits used in the table
+ tableSize = 1 << tableBits // Size of the table
+ tableShift = 32 - tableBits // Right-shift to get the tableBits most significant bits of a uint32.
+ baseMatchOffset = 1 // The smallest match offset
+ baseMatchLength = 3 // The smallest match length per the RFC section 3.2.5
+ maxMatchOffset = 1 << 15 // The largest match offset
+
+ bTableBits = 17 // Bits used in the big tables
+ bTableSize = 1 << bTableBits // Size of the table
+ allocHistory = maxStoreBlockSize * 5 // Size to preallocate for history.
+ bufferReset = (1 << 31) - allocHistory - maxStoreBlockSize - 1 // Reset the buffer offset when reaching this.
+)
+
+const (
+ prime3bytes = 506832829
+ prime4bytes = 2654435761
+ prime5bytes = 889523592379
+ prime6bytes = 227718039650203
+ prime7bytes = 58295818150454627
+ prime8bytes = 0xcf1bbcdcb7a56463
+)
+
+func load3232(b []byte, i int32) uint32 {
+ return binary.LittleEndian.Uint32(b[i:])
+}
+
+func load6432(b []byte, i int32) uint64 {
+ return binary.LittleEndian.Uint64(b[i:])
+}
+
+type tableEntry struct {
+ offset int32
+}
+
+// fastGen maintains the table for matches,
+// and the previous byte block for level 2.
+// This is the generic implementation.
+type fastGen struct {
+ hist []byte
+ cur int32
+}
+
+func (e *fastGen) addBlock(src []byte) int32 {
+ // check if we have space already
+ if len(e.hist)+len(src) > cap(e.hist) {
+ if cap(e.hist) == 0 {
+ e.hist = make([]byte, 0, allocHistory)
+ } else {
+ if cap(e.hist) < maxMatchOffset*2 {
+ panic("unexpected buffer size")
+ }
+ // Move down
+ offset := int32(len(e.hist)) - maxMatchOffset
+ // copy(e.hist[0:maxMatchOffset], e.hist[offset:])
+ *(*[maxMatchOffset]byte)(e.hist) = *(*[maxMatchOffset]byte)(e.hist[offset:])
+ e.cur += offset
+ e.hist = e.hist[:maxMatchOffset]
+ }
+ }
+ s := int32(len(e.hist))
+ e.hist = append(e.hist, src...)
+ return s
+}
+
+type tableEntryPrev struct {
+ Cur tableEntry
+ Prev tableEntry
+}
+
+// hash7 returns the hash of the lowest 7 bytes of u to fit in a hash table with h bits.
+// Preferably h should be a constant and should always be <64.
+func hash7(u uint64, h uint8) uint32 {
+ return uint32(((u << (64 - 56)) * prime7bytes) >> ((64 - h) & reg8SizeMask64))
+}
+
+// hashLen returns a hash of the lowest mls bytes of with length output bits.
+// mls must be >=3 and <=8. Any other value will return hash for 4 bytes.
+// length should always be < 32.
+// Preferably length and mls should be a constant for inlining.
+func hashLen(u uint64, length, mls uint8) uint32 {
+ switch mls {
+ case 3:
+ return (uint32(u<<8) * prime3bytes) >> (32 - length)
+ case 5:
+ return uint32(((u << (64 - 40)) * prime5bytes) >> (64 - length))
+ case 6:
+ return uint32(((u << (64 - 48)) * prime6bytes) >> (64 - length))
+ case 7:
+ return uint32(((u << (64 - 56)) * prime7bytes) >> (64 - length))
+ case 8:
+ return uint32((u * prime8bytes) >> (64 - length))
+ default:
+ return (uint32(u) * prime4bytes) >> (32 - length)
+ }
+}
+
+// matchlen will return the match length between offsets and t in src.
+// The maximum length returned is maxMatchLength - 4.
+// It is assumed that s > t, that t >=0 and s < len(src).
+func (e *fastGen) matchlen(s, t int32, src []byte) int32 {
+ if debugDecode {
+ if t >= s {
+ panic(fmt.Sprint("t >=s:", t, s))
+ }
+ if int(s) >= len(src) {
+ panic(fmt.Sprint("s >= len(src):", s, len(src)))
+ }
+ if t < 0 {
+ panic(fmt.Sprint("t < 0:", t))
+ }
+ if s-t > maxMatchOffset {
+ panic(fmt.Sprint(s, "-", t, "(", s-t, ") > maxMatchLength (", maxMatchOffset, ")"))
+ }
+ }
+ s1 := int(s) + maxMatchLength - 4
+ if s1 > len(src) {
+ s1 = len(src)
+ }
+
+ // Extend the match to be as long as possible.
+ return int32(matchLen(src[s:s1], src[t:]))
+}
+
+// matchlenLong will return the match length between offsets and t in src.
+// It is assumed that s > t, that t >=0 and s < len(src).
+func (e *fastGen) matchlenLong(s, t int32, src []byte) int32 {
+ if debugDeflate {
+ if t >= s {
+ panic(fmt.Sprint("t >=s:", t, s))
+ }
+ if int(s) >= len(src) {
+ panic(fmt.Sprint("s >= len(src):", s, len(src)))
+ }
+ if t < 0 {
+ panic(fmt.Sprint("t < 0:", t))
+ }
+ if s-t > maxMatchOffset {
+ panic(fmt.Sprint(s, "-", t, "(", s-t, ") > maxMatchLength (", maxMatchOffset, ")"))
+ }
+ }
+ // Extend the match to be as long as possible.
+ return int32(matchLen(src[s:], src[t:]))
+}
+
+// Reset the encoding table.
+func (e *fastGen) Reset() {
+ if cap(e.hist) < allocHistory {
+ e.hist = make([]byte, 0, allocHistory)
+ }
+ // We offset current position so everything will be out of reach.
+ // If we are above the buffer reset it will be cleared anyway since len(hist) == 0.
+ if e.cur <= bufferReset {
+ e.cur += maxMatchOffset + int32(len(e.hist))
+ }
+ e.hist = e.hist[:0]
+}
diff --git a/vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go b/vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go
new file mode 100644
index 0000000..f70594c
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go
@@ -0,0 +1,1182 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flate
+
+import (
+ "encoding/binary"
+ "fmt"
+ "io"
+ "math"
+)
+
+const (
+ // The largest offset code.
+ offsetCodeCount = 30
+
+ // The special code used to mark the end of a block.
+ endBlockMarker = 256
+
+ // The first length code.
+ lengthCodesStart = 257
+
+ // The number of codegen codes.
+ codegenCodeCount = 19
+ badCode = 255
+
+ // maxPredefinedTokens is the maximum number of tokens
+ // where we check if fixed size is smaller.
+ maxPredefinedTokens = 250
+
+ // bufferFlushSize indicates the buffer size
+ // after which bytes are flushed to the writer.
+ // Should preferably be a multiple of 6, since
+ // we accumulate 6 bytes between writes to the buffer.
+ bufferFlushSize = 246
+)
+
+// Minimum length code that emits bits.
+const lengthExtraBitsMinCode = 8
+
+// The number of extra bits needed by length code X - LENGTH_CODES_START.
+var lengthExtraBits = [32]uint8{
+ /* 257 */ 0, 0, 0,
+ /* 260 */ 0, 0, 0, 0, 0, 1, 1, 1, 1, 2,
+ /* 270 */ 2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
+ /* 280 */ 4, 5, 5, 5, 5, 0,
+}
+
+// The length indicated by length code X - LENGTH_CODES_START.
+var lengthBase = [32]uint8{
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 10,
+ 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
+ 64, 80, 96, 112, 128, 160, 192, 224, 255,
+}
+
+// Minimum offset code that emits bits.
+const offsetExtraBitsMinCode = 4
+
+// offset code word extra bits.
+var offsetExtraBits = [32]int8{
+ 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
+ 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
+ 9, 9, 10, 10, 11, 11, 12, 12, 13, 13,
+ /* extended window */
+ 14, 14,
+}
+
+var offsetCombined = [32]uint32{}
+
+func init() {
+ var offsetBase = [32]uint32{
+ /* normal deflate */
+ 0x000000, 0x000001, 0x000002, 0x000003, 0x000004,
+ 0x000006, 0x000008, 0x00000c, 0x000010, 0x000018,
+ 0x000020, 0x000030, 0x000040, 0x000060, 0x000080,
+ 0x0000c0, 0x000100, 0x000180, 0x000200, 0x000300,
+ 0x000400, 0x000600, 0x000800, 0x000c00, 0x001000,
+ 0x001800, 0x002000, 0x003000, 0x004000, 0x006000,
+
+ /* extended window */
+ 0x008000, 0x00c000,
+ }
+
+ for i := range offsetCombined[:] {
+ // Don't use extended window values...
+ if offsetExtraBits[i] == 0 || offsetBase[i] > 0x006000 {
+ continue
+ }
+ offsetCombined[i] = uint32(offsetExtraBits[i]) | (offsetBase[i] << 8)
+ }
+}
+
+// The odd order in which the codegen code sizes are written.
+var codegenOrder = []uint32{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}
+
+type huffmanBitWriter struct {
+ // writer is the underlying writer.
+ // Do not use it directly; use the write method, which ensures
+ // that Write errors are sticky.
+ writer io.Writer
+
+ // Data waiting to be written is bytes[0:nbytes]
+ // and then the low nbits of bits.
+ bits uint64
+ nbits uint8
+ nbytes uint8
+ lastHuffMan bool
+ literalEncoding *huffmanEncoder
+ tmpLitEncoding *huffmanEncoder
+ offsetEncoding *huffmanEncoder
+ codegenEncoding *huffmanEncoder
+ err error
+ lastHeader int
+ // Set between 0 (reused block can be up to 2x the size)
+ logNewTablePenalty uint
+ bytes [256 + 8]byte
+ literalFreq [lengthCodesStart + 32]uint16
+ offsetFreq [32]uint16
+ codegenFreq [codegenCodeCount]uint16
+
+ // codegen must have an extra space for the final symbol.
+ codegen [literalCount + offsetCodeCount + 1]uint8
+}
+
+// Huffman reuse.
+//
+// The huffmanBitWriter supports reusing huffman tables and thereby combining block sections.
+//
+// This is controlled by several variables:
+//
+// If lastHeader is non-zero the Huffman table can be reused.
+// This also indicates that a Huffman table has been generated that can output all
+// possible symbols.
+// It also indicates that an EOB has not yet been emitted, so if a new tabel is generated
+// an EOB with the previous table must be written.
+//
+// If lastHuffMan is set, a table for outputting literals has been generated and offsets are invalid.
+//
+// An incoming block estimates the output size of a new table using a 'fresh' by calculating the
+// optimal size and adding a penalty in 'logNewTablePenalty'.
+// A Huffman table is not optimal, which is why we add a penalty, and generating a new table
+// is slower both for compression and decompression.
+
+func newHuffmanBitWriter(w io.Writer) *huffmanBitWriter {
+ return &huffmanBitWriter{
+ writer: w,
+ literalEncoding: newHuffmanEncoder(literalCount),
+ tmpLitEncoding: newHuffmanEncoder(literalCount),
+ codegenEncoding: newHuffmanEncoder(codegenCodeCount),
+ offsetEncoding: newHuffmanEncoder(offsetCodeCount),
+ }
+}
+
+func (w *huffmanBitWriter) reset(writer io.Writer) {
+ w.writer = writer
+ w.bits, w.nbits, w.nbytes, w.err = 0, 0, 0, nil
+ w.lastHeader = 0
+ w.lastHuffMan = false
+}
+
+func (w *huffmanBitWriter) canReuse(t *tokens) (ok bool) {
+ a := t.offHist[:offsetCodeCount]
+ b := w.offsetEncoding.codes
+ b = b[:len(a)]
+ for i, v := range a {
+ if v != 0 && b[i].zero() {
+ return false
+ }
+ }
+
+ a = t.extraHist[:literalCount-256]
+ b = w.literalEncoding.codes[256:literalCount]
+ b = b[:len(a)]
+ for i, v := range a {
+ if v != 0 && b[i].zero() {
+ return false
+ }
+ }
+
+ a = t.litHist[:256]
+ b = w.literalEncoding.codes[:len(a)]
+ for i, v := range a {
+ if v != 0 && b[i].zero() {
+ return false
+ }
+ }
+ return true
+}
+
+func (w *huffmanBitWriter) flush() {
+ if w.err != nil {
+ w.nbits = 0
+ return
+ }
+ if w.lastHeader > 0 {
+ // We owe an EOB
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ w.lastHeader = 0
+ }
+ n := w.nbytes
+ for w.nbits != 0 {
+ w.bytes[n] = byte(w.bits)
+ w.bits >>= 8
+ if w.nbits > 8 { // Avoid underflow
+ w.nbits -= 8
+ } else {
+ w.nbits = 0
+ }
+ n++
+ }
+ w.bits = 0
+ w.write(w.bytes[:n])
+ w.nbytes = 0
+}
+
+func (w *huffmanBitWriter) write(b []byte) {
+ if w.err != nil {
+ return
+ }
+ _, w.err = w.writer.Write(b)
+}
+
+func (w *huffmanBitWriter) writeBits(b int32, nb uint8) {
+ w.bits |= uint64(b) << (w.nbits & 63)
+ w.nbits += nb
+ if w.nbits >= 48 {
+ w.writeOutBits()
+ }
+}
+
+func (w *huffmanBitWriter) writeBytes(bytes []byte) {
+ if w.err != nil {
+ return
+ }
+ n := w.nbytes
+ if w.nbits&7 != 0 {
+ w.err = InternalError("writeBytes with unfinished bits")
+ return
+ }
+ for w.nbits != 0 {
+ w.bytes[n] = byte(w.bits)
+ w.bits >>= 8
+ w.nbits -= 8
+ n++
+ }
+ if n != 0 {
+ w.write(w.bytes[:n])
+ }
+ w.nbytes = 0
+ w.write(bytes)
+}
+
+// RFC 1951 3.2.7 specifies a special run-length encoding for specifying
+// the literal and offset lengths arrays (which are concatenated into a single
+// array). This method generates that run-length encoding.
+//
+// The result is written into the codegen array, and the frequencies
+// of each code is written into the codegenFreq array.
+// Codes 0-15 are single byte codes. Codes 16-18 are followed by additional
+// information. Code badCode is an end marker
+//
+// numLiterals The number of literals in literalEncoding
+// numOffsets The number of offsets in offsetEncoding
+// litenc, offenc The literal and offset encoder to use
+func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int, litEnc, offEnc *huffmanEncoder) {
+ for i := range w.codegenFreq {
+ w.codegenFreq[i] = 0
+ }
+ // Note that we are using codegen both as a temporary variable for holding
+ // a copy of the frequencies, and as the place where we put the result.
+ // This is fine because the output is always shorter than the input used
+ // so far.
+ codegen := w.codegen[:] // cache
+ // Copy the concatenated code sizes to codegen. Put a marker at the end.
+ cgnl := codegen[:numLiterals]
+ for i := range cgnl {
+ cgnl[i] = litEnc.codes[i].len()
+ }
+
+ cgnl = codegen[numLiterals : numLiterals+numOffsets]
+ for i := range cgnl {
+ cgnl[i] = offEnc.codes[i].len()
+ }
+ codegen[numLiterals+numOffsets] = badCode
+
+ size := codegen[0]
+ count := 1
+ outIndex := 0
+ for inIndex := 1; size != badCode; inIndex++ {
+ // INVARIANT: We have seen "count" copies of size that have not yet
+ // had output generated for them.
+ nextSize := codegen[inIndex]
+ if nextSize == size {
+ count++
+ continue
+ }
+ // We need to generate codegen indicating "count" of size.
+ if size != 0 {
+ codegen[outIndex] = size
+ outIndex++
+ w.codegenFreq[size]++
+ count--
+ for count >= 3 {
+ n := 6
+ if n > count {
+ n = count
+ }
+ codegen[outIndex] = 16
+ outIndex++
+ codegen[outIndex] = uint8(n - 3)
+ outIndex++
+ w.codegenFreq[16]++
+ count -= n
+ }
+ } else {
+ for count >= 11 {
+ n := 138
+ if n > count {
+ n = count
+ }
+ codegen[outIndex] = 18
+ outIndex++
+ codegen[outIndex] = uint8(n - 11)
+ outIndex++
+ w.codegenFreq[18]++
+ count -= n
+ }
+ if count >= 3 {
+ // count >= 3 && count <= 10
+ codegen[outIndex] = 17
+ outIndex++
+ codegen[outIndex] = uint8(count - 3)
+ outIndex++
+ w.codegenFreq[17]++
+ count = 0
+ }
+ }
+ count--
+ for ; count >= 0; count-- {
+ codegen[outIndex] = size
+ outIndex++
+ w.codegenFreq[size]++
+ }
+ // Set up invariant for next time through the loop.
+ size = nextSize
+ count = 1
+ }
+ // Marker indicating the end of the codegen.
+ codegen[outIndex] = badCode
+}
+
+func (w *huffmanBitWriter) codegens() int {
+ numCodegens := len(w.codegenFreq)
+ for numCodegens > 4 && w.codegenFreq[codegenOrder[numCodegens-1]] == 0 {
+ numCodegens--
+ }
+ return numCodegens
+}
+
+func (w *huffmanBitWriter) headerSize() (size, numCodegens int) {
+ numCodegens = len(w.codegenFreq)
+ for numCodegens > 4 && w.codegenFreq[codegenOrder[numCodegens-1]] == 0 {
+ numCodegens--
+ }
+ return 3 + 5 + 5 + 4 + (3 * numCodegens) +
+ w.codegenEncoding.bitLength(w.codegenFreq[:]) +
+ int(w.codegenFreq[16])*2 +
+ int(w.codegenFreq[17])*3 +
+ int(w.codegenFreq[18])*7, numCodegens
+}
+
+// dynamicSize returns the size of dynamically encoded data in bits.
+func (w *huffmanBitWriter) dynamicReuseSize(litEnc, offEnc *huffmanEncoder) (size int) {
+ size = litEnc.bitLength(w.literalFreq[:]) +
+ offEnc.bitLength(w.offsetFreq[:])
+ return size
+}
+
+// dynamicSize returns the size of dynamically encoded data in bits.
+func (w *huffmanBitWriter) dynamicSize(litEnc, offEnc *huffmanEncoder, extraBits int) (size, numCodegens int) {
+ header, numCodegens := w.headerSize()
+ size = header +
+ litEnc.bitLength(w.literalFreq[:]) +
+ offEnc.bitLength(w.offsetFreq[:]) +
+ extraBits
+ return size, numCodegens
+}
+
+// extraBitSize will return the number of bits that will be written
+// as "extra" bits on matches.
+func (w *huffmanBitWriter) extraBitSize() int {
+ total := 0
+ for i, n := range w.literalFreq[257:literalCount] {
+ total += int(n) * int(lengthExtraBits[i&31])
+ }
+ for i, n := range w.offsetFreq[:offsetCodeCount] {
+ total += int(n) * int(offsetExtraBits[i&31])
+ }
+ return total
+}
+
+// fixedSize returns the size of dynamically encoded data in bits.
+func (w *huffmanBitWriter) fixedSize(extraBits int) int {
+ return 3 +
+ fixedLiteralEncoding.bitLength(w.literalFreq[:]) +
+ fixedOffsetEncoding.bitLength(w.offsetFreq[:]) +
+ extraBits
+}
+
+// storedSize calculates the stored size, including header.
+// The function returns the size in bits and whether the block
+// fits inside a single block.
+func (w *huffmanBitWriter) storedSize(in []byte) (int, bool) {
+ if in == nil {
+ return 0, false
+ }
+ if len(in) <= maxStoreBlockSize {
+ return (len(in) + 5) * 8, true
+ }
+ return 0, false
+}
+
+func (w *huffmanBitWriter) writeCode(c hcode) {
+ // The function does not get inlined if we "& 63" the shift.
+ w.bits |= c.code64() << (w.nbits & 63)
+ w.nbits += c.len()
+ if w.nbits >= 48 {
+ w.writeOutBits()
+ }
+}
+
+// writeOutBits will write bits to the buffer.
+func (w *huffmanBitWriter) writeOutBits() {
+ bits := w.bits
+ w.bits >>= 48
+ w.nbits -= 48
+ n := w.nbytes
+
+ // We over-write, but faster...
+ binary.LittleEndian.PutUint64(w.bytes[n:], bits)
+ n += 6
+
+ if n >= bufferFlushSize {
+ if w.err != nil {
+ n = 0
+ return
+ }
+ w.write(w.bytes[:n])
+ n = 0
+ }
+
+ w.nbytes = n
+}
+
+// Write the header of a dynamic Huffman block to the output stream.
+//
+// numLiterals The number of literals specified in codegen
+// numOffsets The number of offsets specified in codegen
+// numCodegens The number of codegens used in codegen
+func (w *huffmanBitWriter) writeDynamicHeader(numLiterals int, numOffsets int, numCodegens int, isEof bool) {
+ if w.err != nil {
+ return
+ }
+ var firstBits int32 = 4
+ if isEof {
+ firstBits = 5
+ }
+ w.writeBits(firstBits, 3)
+ w.writeBits(int32(numLiterals-257), 5)
+ w.writeBits(int32(numOffsets-1), 5)
+ w.writeBits(int32(numCodegens-4), 4)
+
+ for i := 0; i < numCodegens; i++ {
+ value := uint(w.codegenEncoding.codes[codegenOrder[i]].len())
+ w.writeBits(int32(value), 3)
+ }
+
+ i := 0
+ for {
+ var codeWord = uint32(w.codegen[i])
+ i++
+ if codeWord == badCode {
+ break
+ }
+ w.writeCode(w.codegenEncoding.codes[codeWord])
+
+ switch codeWord {
+ case 16:
+ w.writeBits(int32(w.codegen[i]), 2)
+ i++
+ case 17:
+ w.writeBits(int32(w.codegen[i]), 3)
+ i++
+ case 18:
+ w.writeBits(int32(w.codegen[i]), 7)
+ i++
+ }
+ }
+}
+
+// writeStoredHeader will write a stored header.
+// If the stored block is only used for EOF,
+// it is replaced with a fixed huffman block.
+func (w *huffmanBitWriter) writeStoredHeader(length int, isEof bool) {
+ if w.err != nil {
+ return
+ }
+ if w.lastHeader > 0 {
+ // We owe an EOB
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ w.lastHeader = 0
+ }
+
+ // To write EOF, use a fixed encoding block. 10 bits instead of 5 bytes.
+ if length == 0 && isEof {
+ w.writeFixedHeader(isEof)
+ // EOB: 7 bits, value: 0
+ w.writeBits(0, 7)
+ w.flush()
+ return
+ }
+
+ var flag int32
+ if isEof {
+ flag = 1
+ }
+ w.writeBits(flag, 3)
+ w.flush()
+ w.writeBits(int32(length), 16)
+ w.writeBits(int32(^uint16(length)), 16)
+}
+
+func (w *huffmanBitWriter) writeFixedHeader(isEof bool) {
+ if w.err != nil {
+ return
+ }
+ if w.lastHeader > 0 {
+ // We owe an EOB
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ w.lastHeader = 0
+ }
+
+ // Indicate that we are a fixed Huffman block
+ var value int32 = 2
+ if isEof {
+ value = 3
+ }
+ w.writeBits(value, 3)
+}
+
+// writeBlock will write a block of tokens with the smallest encoding.
+// The original input can be supplied, and if the huffman encoded data
+// is larger than the original bytes, the data will be written as a
+// stored block.
+// If the input is nil, the tokens will always be Huffman encoded.
+func (w *huffmanBitWriter) writeBlock(tokens *tokens, eof bool, input []byte) {
+ if w.err != nil {
+ return
+ }
+
+ tokens.AddEOB()
+ if w.lastHeader > 0 {
+ // We owe an EOB
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ w.lastHeader = 0
+ }
+ numLiterals, numOffsets := w.indexTokens(tokens, false)
+ w.generate()
+ var extraBits int
+ storedSize, storable := w.storedSize(input)
+ if storable {
+ extraBits = w.extraBitSize()
+ }
+
+ // Figure out smallest code.
+ // Fixed Huffman baseline.
+ var literalEncoding = fixedLiteralEncoding
+ var offsetEncoding = fixedOffsetEncoding
+ var size = math.MaxInt32
+ if tokens.n < maxPredefinedTokens {
+ size = w.fixedSize(extraBits)
+ }
+
+ // Dynamic Huffman?
+ var numCodegens int
+
+ // Generate codegen and codegenFrequencies, which indicates how to encode
+ // the literalEncoding and the offsetEncoding.
+ w.generateCodegen(numLiterals, numOffsets, w.literalEncoding, w.offsetEncoding)
+ w.codegenEncoding.generate(w.codegenFreq[:], 7)
+ dynamicSize, numCodegens := w.dynamicSize(w.literalEncoding, w.offsetEncoding, extraBits)
+
+ if dynamicSize < size {
+ size = dynamicSize
+ literalEncoding = w.literalEncoding
+ offsetEncoding = w.offsetEncoding
+ }
+
+ // Stored bytes?
+ if storable && storedSize <= size {
+ w.writeStoredHeader(len(input), eof)
+ w.writeBytes(input)
+ return
+ }
+
+ // Huffman.
+ if literalEncoding == fixedLiteralEncoding {
+ w.writeFixedHeader(eof)
+ } else {
+ w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof)
+ }
+
+ // Write the tokens.
+ w.writeTokens(tokens.Slice(), literalEncoding.codes, offsetEncoding.codes)
+}
+
+// writeBlockDynamic encodes a block using a dynamic Huffman table.
+// This should be used if the symbols used have a disproportionate
+// histogram distribution.
+// If input is supplied and the compression savings are below 1/16th of the
+// input size the block is stored.
+func (w *huffmanBitWriter) writeBlockDynamic(tokens *tokens, eof bool, input []byte, sync bool) {
+ if w.err != nil {
+ return
+ }
+
+ sync = sync || eof
+ if sync {
+ tokens.AddEOB()
+ }
+
+ // We cannot reuse pure huffman table, and must mark as EOF.
+ if (w.lastHuffMan || eof) && w.lastHeader > 0 {
+ // We will not try to reuse.
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ w.lastHeader = 0
+ w.lastHuffMan = false
+ }
+
+ // fillReuse enables filling of empty values.
+ // This will make encodings always reusable without testing.
+ // However, this does not appear to benefit on most cases.
+ const fillReuse = false
+
+ // Check if we can reuse...
+ if !fillReuse && w.lastHeader > 0 && !w.canReuse(tokens) {
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ w.lastHeader = 0
+ }
+
+ numLiterals, numOffsets := w.indexTokens(tokens, !sync)
+ extraBits := 0
+ ssize, storable := w.storedSize(input)
+
+ const usePrefs = true
+ if storable || w.lastHeader > 0 {
+ extraBits = w.extraBitSize()
+ }
+
+ var size int
+
+ // Check if we should reuse.
+ if w.lastHeader > 0 {
+ // Estimate size for using a new table.
+ // Use the previous header size as the best estimate.
+ newSize := w.lastHeader + tokens.EstimatedBits()
+ newSize += int(w.literalEncoding.codes[endBlockMarker].len()) + newSize>>w.logNewTablePenalty
+
+ // The estimated size is calculated as an optimal table.
+ // We add a penalty to make it more realistic and re-use a bit more.
+ reuseSize := w.dynamicReuseSize(w.literalEncoding, w.offsetEncoding) + extraBits
+
+ // Check if a new table is better.
+ if newSize < reuseSize {
+ // Write the EOB we owe.
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ size = newSize
+ w.lastHeader = 0
+ } else {
+ size = reuseSize
+ }
+
+ if tokens.n < maxPredefinedTokens {
+ if preSize := w.fixedSize(extraBits) + 7; usePrefs && preSize < size {
+ // Check if we get a reasonable size decrease.
+ if storable && ssize <= size {
+ w.writeStoredHeader(len(input), eof)
+ w.writeBytes(input)
+ return
+ }
+ w.writeFixedHeader(eof)
+ if !sync {
+ tokens.AddEOB()
+ }
+ w.writeTokens(tokens.Slice(), fixedLiteralEncoding.codes, fixedOffsetEncoding.codes)
+ return
+ }
+ }
+ // Check if we get a reasonable size decrease.
+ if storable && ssize <= size {
+ w.writeStoredHeader(len(input), eof)
+ w.writeBytes(input)
+ return
+ }
+ }
+
+ // We want a new block/table
+ if w.lastHeader == 0 {
+ if fillReuse && !sync {
+ w.fillTokens()
+ numLiterals, numOffsets = maxNumLit, maxNumDist
+ } else {
+ w.literalFreq[endBlockMarker] = 1
+ }
+
+ w.generate()
+ // Generate codegen and codegenFrequencies, which indicates how to encode
+ // the literalEncoding and the offsetEncoding.
+ w.generateCodegen(numLiterals, numOffsets, w.literalEncoding, w.offsetEncoding)
+ w.codegenEncoding.generate(w.codegenFreq[:], 7)
+
+ var numCodegens int
+ if fillReuse && !sync {
+ // Reindex for accurate size...
+ w.indexTokens(tokens, true)
+ }
+ size, numCodegens = w.dynamicSize(w.literalEncoding, w.offsetEncoding, extraBits)
+
+ // Store predefined, if we don't get a reasonable improvement.
+ if tokens.n < maxPredefinedTokens {
+ if preSize := w.fixedSize(extraBits); usePrefs && preSize <= size {
+ // Store bytes, if we don't get an improvement.
+ if storable && ssize <= preSize {
+ w.writeStoredHeader(len(input), eof)
+ w.writeBytes(input)
+ return
+ }
+ w.writeFixedHeader(eof)
+ if !sync {
+ tokens.AddEOB()
+ }
+ w.writeTokens(tokens.Slice(), fixedLiteralEncoding.codes, fixedOffsetEncoding.codes)
+ return
+ }
+ }
+
+ if storable && ssize <= size {
+ // Store bytes, if we don't get an improvement.
+ w.writeStoredHeader(len(input), eof)
+ w.writeBytes(input)
+ return
+ }
+
+ // Write Huffman table.
+ w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof)
+ if !sync {
+ w.lastHeader, _ = w.headerSize()
+ }
+ w.lastHuffMan = false
+ }
+
+ if sync {
+ w.lastHeader = 0
+ }
+ // Write the tokens.
+ w.writeTokens(tokens.Slice(), w.literalEncoding.codes, w.offsetEncoding.codes)
+}
+
+func (w *huffmanBitWriter) fillTokens() {
+ for i, v := range w.literalFreq[:literalCount] {
+ if v == 0 {
+ w.literalFreq[i] = 1
+ }
+ }
+ for i, v := range w.offsetFreq[:offsetCodeCount] {
+ if v == 0 {
+ w.offsetFreq[i] = 1
+ }
+ }
+}
+
+// indexTokens indexes a slice of tokens, and updates
+// literalFreq and offsetFreq, and generates literalEncoding
+// and offsetEncoding.
+// The number of literal and offset tokens is returned.
+func (w *huffmanBitWriter) indexTokens(t *tokens, filled bool) (numLiterals, numOffsets int) {
+ //copy(w.literalFreq[:], t.litHist[:])
+ *(*[256]uint16)(w.literalFreq[:]) = t.litHist
+ //copy(w.literalFreq[256:], t.extraHist[:])
+ *(*[32]uint16)(w.literalFreq[256:]) = t.extraHist
+ w.offsetFreq = t.offHist
+
+ if t.n == 0 {
+ return
+ }
+ if filled {
+ return maxNumLit, maxNumDist
+ }
+ // get the number of literals
+ numLiterals = len(w.literalFreq)
+ for w.literalFreq[numLiterals-1] == 0 {
+ numLiterals--
+ }
+ // get the number of offsets
+ numOffsets = len(w.offsetFreq)
+ for numOffsets > 0 && w.offsetFreq[numOffsets-1] == 0 {
+ numOffsets--
+ }
+ if numOffsets == 0 {
+ // We haven't found a single match. If we want to go with the dynamic encoding,
+ // we should count at least one offset to be sure that the offset huffman tree could be encoded.
+ w.offsetFreq[0] = 1
+ numOffsets = 1
+ }
+ return
+}
+
+func (w *huffmanBitWriter) generate() {
+ w.literalEncoding.generate(w.literalFreq[:literalCount], 15)
+ w.offsetEncoding.generate(w.offsetFreq[:offsetCodeCount], 15)
+}
+
+// writeTokens writes a slice of tokens to the output.
+// codes for literal and offset encoding must be supplied.
+func (w *huffmanBitWriter) writeTokens(tokens []token, leCodes, oeCodes []hcode) {
+ if w.err != nil {
+ return
+ }
+ if len(tokens) == 0 {
+ return
+ }
+
+ // Only last token should be endBlockMarker.
+ var deferEOB bool
+ if tokens[len(tokens)-1] == endBlockMarker {
+ tokens = tokens[:len(tokens)-1]
+ deferEOB = true
+ }
+
+ // Create slices up to the next power of two to avoid bounds checks.
+ lits := leCodes[:256]
+ offs := oeCodes[:32]
+ lengths := leCodes[lengthCodesStart:]
+ lengths = lengths[:32]
+
+ // Go 1.16 LOVES having these on stack.
+ bits, nbits, nbytes := w.bits, w.nbits, w.nbytes
+
+ for _, t := range tokens {
+ if t < 256 {
+ //w.writeCode(lits[t.literal()])
+ c := lits[t]
+ bits |= c.code64() << (nbits & 63)
+ nbits += c.len()
+ if nbits >= 48 {
+ binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits)
+ //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits
+ bits >>= 48
+ nbits -= 48
+ nbytes += 6
+ if nbytes >= bufferFlushSize {
+ if w.err != nil {
+ nbytes = 0
+ return
+ }
+ _, w.err = w.writer.Write(w.bytes[:nbytes])
+ nbytes = 0
+ }
+ }
+ continue
+ }
+
+ // Write the length
+ length := t.length()
+ lengthCode := lengthCode(length) & 31
+ if false {
+ w.writeCode(lengths[lengthCode])
+ } else {
+ // inlined
+ c := lengths[lengthCode]
+ bits |= c.code64() << (nbits & 63)
+ nbits += c.len()
+ if nbits >= 48 {
+ binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits)
+ //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits
+ bits >>= 48
+ nbits -= 48
+ nbytes += 6
+ if nbytes >= bufferFlushSize {
+ if w.err != nil {
+ nbytes = 0
+ return
+ }
+ _, w.err = w.writer.Write(w.bytes[:nbytes])
+ nbytes = 0
+ }
+ }
+ }
+
+ if lengthCode >= lengthExtraBitsMinCode {
+ extraLengthBits := lengthExtraBits[lengthCode]
+ //w.writeBits(extraLength, extraLengthBits)
+ extraLength := int32(length - lengthBase[lengthCode])
+ bits |= uint64(extraLength) << (nbits & 63)
+ nbits += extraLengthBits
+ if nbits >= 48 {
+ binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits)
+ //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits
+ bits >>= 48
+ nbits -= 48
+ nbytes += 6
+ if nbytes >= bufferFlushSize {
+ if w.err != nil {
+ nbytes = 0
+ return
+ }
+ _, w.err = w.writer.Write(w.bytes[:nbytes])
+ nbytes = 0
+ }
+ }
+ }
+ // Write the offset
+ offset := t.offset()
+ offsetCode := (offset >> 16) & 31
+ if false {
+ w.writeCode(offs[offsetCode])
+ } else {
+ // inlined
+ c := offs[offsetCode]
+ bits |= c.code64() << (nbits & 63)
+ nbits += c.len()
+ if nbits >= 48 {
+ binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits)
+ //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits
+ bits >>= 48
+ nbits -= 48
+ nbytes += 6
+ if nbytes >= bufferFlushSize {
+ if w.err != nil {
+ nbytes = 0
+ return
+ }
+ _, w.err = w.writer.Write(w.bytes[:nbytes])
+ nbytes = 0
+ }
+ }
+ }
+
+ if offsetCode >= offsetExtraBitsMinCode {
+ offsetComb := offsetCombined[offsetCode]
+ //w.writeBits(extraOffset, extraOffsetBits)
+ bits |= uint64((offset-(offsetComb>>8))&matchOffsetOnlyMask) << (nbits & 63)
+ nbits += uint8(offsetComb)
+ if nbits >= 48 {
+ binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits)
+ //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits
+ bits >>= 48
+ nbits -= 48
+ nbytes += 6
+ if nbytes >= bufferFlushSize {
+ if w.err != nil {
+ nbytes = 0
+ return
+ }
+ _, w.err = w.writer.Write(w.bytes[:nbytes])
+ nbytes = 0
+ }
+ }
+ }
+ }
+ // Restore...
+ w.bits, w.nbits, w.nbytes = bits, nbits, nbytes
+
+ if deferEOB {
+ w.writeCode(leCodes[endBlockMarker])
+ }
+}
+
+// huffOffset is a static offset encoder used for huffman only encoding.
+// It can be reused since we will not be encoding offset values.
+var huffOffset *huffmanEncoder
+
+func init() {
+ w := newHuffmanBitWriter(nil)
+ w.offsetFreq[0] = 1
+ huffOffset = newHuffmanEncoder(offsetCodeCount)
+ huffOffset.generate(w.offsetFreq[:offsetCodeCount], 15)
+}
+
+// writeBlockHuff encodes a block of bytes as either
+// Huffman encoded literals or uncompressed bytes if the
+// results only gains very little from compression.
+func (w *huffmanBitWriter) writeBlockHuff(eof bool, input []byte, sync bool) {
+ if w.err != nil {
+ return
+ }
+
+ // Clear histogram
+ for i := range w.literalFreq[:] {
+ w.literalFreq[i] = 0
+ }
+ if !w.lastHuffMan {
+ for i := range w.offsetFreq[:] {
+ w.offsetFreq[i] = 0
+ }
+ }
+
+ const numLiterals = endBlockMarker + 1
+ const numOffsets = 1
+
+ // Add everything as literals
+ // We have to estimate the header size.
+ // Assume header is around 70 bytes:
+ // https://stackoverflow.com/a/25454430
+ const guessHeaderSizeBits = 70 * 8
+ histogram(input, w.literalFreq[:numLiterals])
+ ssize, storable := w.storedSize(input)
+ if storable && len(input) > 1024 {
+ // Quick check for incompressible content.
+ abs := float64(0)
+ avg := float64(len(input)) / 256
+ max := float64(len(input) * 2)
+ for _, v := range w.literalFreq[:256] {
+ diff := float64(v) - avg
+ abs += diff * diff
+ if abs > max {
+ break
+ }
+ }
+ if abs < max {
+ if debugDeflate {
+ fmt.Println("stored", abs, "<", max)
+ }
+ // No chance we can compress this...
+ w.writeStoredHeader(len(input), eof)
+ w.writeBytes(input)
+ return
+ }
+ }
+ w.literalFreq[endBlockMarker] = 1
+ w.tmpLitEncoding.generate(w.literalFreq[:numLiterals], 15)
+ estBits := w.tmpLitEncoding.canReuseBits(w.literalFreq[:numLiterals])
+ if estBits < math.MaxInt32 {
+ estBits += w.lastHeader
+ if w.lastHeader == 0 {
+ estBits += guessHeaderSizeBits
+ }
+ estBits += estBits >> w.logNewTablePenalty
+ }
+
+ // Store bytes, if we don't get a reasonable improvement.
+ if storable && ssize <= estBits {
+ if debugDeflate {
+ fmt.Println("stored,", ssize, "<=", estBits)
+ }
+ w.writeStoredHeader(len(input), eof)
+ w.writeBytes(input)
+ return
+ }
+
+ if w.lastHeader > 0 {
+ reuseSize := w.literalEncoding.canReuseBits(w.literalFreq[:256])
+
+ if estBits < reuseSize {
+ if debugDeflate {
+ fmt.Println("NOT reusing, reuse:", reuseSize/8, "> new:", estBits/8, "header est:", w.lastHeader/8, "bytes")
+ }
+ // We owe an EOB
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ w.lastHeader = 0
+ } else if debugDeflate {
+ fmt.Println("reusing, reuse:", reuseSize/8, "> new:", estBits/8, "- header est:", w.lastHeader/8)
+ }
+ }
+
+ count := 0
+ if w.lastHeader == 0 {
+ // Use the temp encoding, so swap.
+ w.literalEncoding, w.tmpLitEncoding = w.tmpLitEncoding, w.literalEncoding
+ // Generate codegen and codegenFrequencies, which indicates how to encode
+ // the literalEncoding and the offsetEncoding.
+ w.generateCodegen(numLiterals, numOffsets, w.literalEncoding, huffOffset)
+ w.codegenEncoding.generate(w.codegenFreq[:], 7)
+ numCodegens := w.codegens()
+
+ // Huffman.
+ w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof)
+ w.lastHuffMan = true
+ w.lastHeader, _ = w.headerSize()
+ if debugDeflate {
+ count += w.lastHeader
+ fmt.Println("header:", count/8)
+ }
+ }
+
+ encoding := w.literalEncoding.codes[:256]
+ // Go 1.16 LOVES having these on stack. At least 1.5x the speed.
+ bits, nbits, nbytes := w.bits, w.nbits, w.nbytes
+
+ if debugDeflate {
+ count -= int(nbytes)*8 + int(nbits)
+ }
+ // Unroll, write 3 codes/loop.
+ // Fastest number of unrolls.
+ for len(input) > 3 {
+ // We must have at least 48 bits free.
+ if nbits >= 8 {
+ n := nbits >> 3
+ binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits)
+ bits >>= (n * 8) & 63
+ nbits -= n * 8
+ nbytes += n
+ }
+ if nbytes >= bufferFlushSize {
+ if w.err != nil {
+ nbytes = 0
+ return
+ }
+ if debugDeflate {
+ count += int(nbytes) * 8
+ }
+ _, w.err = w.writer.Write(w.bytes[:nbytes])
+ nbytes = 0
+ }
+ a, b := encoding[input[0]], encoding[input[1]]
+ bits |= a.code64() << (nbits & 63)
+ bits |= b.code64() << ((nbits + a.len()) & 63)
+ c := encoding[input[2]]
+ nbits += b.len() + a.len()
+ bits |= c.code64() << (nbits & 63)
+ nbits += c.len()
+ input = input[3:]
+ }
+
+ // Remaining...
+ for _, t := range input {
+ if nbits >= 48 {
+ binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits)
+ //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits
+ bits >>= 48
+ nbits -= 48
+ nbytes += 6
+ if nbytes >= bufferFlushSize {
+ if w.err != nil {
+ nbytes = 0
+ return
+ }
+ if debugDeflate {
+ count += int(nbytes) * 8
+ }
+ _, w.err = w.writer.Write(w.bytes[:nbytes])
+ nbytes = 0
+ }
+ }
+ // Bitwriting inlined, ~30% speedup
+ c := encoding[t]
+ bits |= c.code64() << (nbits & 63)
+
+ nbits += c.len()
+ if debugDeflate {
+ count += int(c.len())
+ }
+ }
+ // Restore...
+ w.bits, w.nbits, w.nbytes = bits, nbits, nbytes
+
+ if debugDeflate {
+ nb := count + int(nbytes)*8 + int(nbits)
+ fmt.Println("wrote", nb, "bits,", nb/8, "bytes.")
+ }
+ // Flush if needed to have space.
+ if w.nbits >= 48 {
+ w.writeOutBits()
+ }
+
+ if eof || sync {
+ w.writeCode(w.literalEncoding.codes[endBlockMarker])
+ w.lastHeader = 0
+ w.lastHuffMan = false
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/huffman_code.go b/vendor/github.com/klauspost/compress/flate/huffman_code.go
new file mode 100644
index 0000000..be7b58b
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/huffman_code.go
@@ -0,0 +1,417 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flate
+
+import (
+ "math"
+ "math/bits"
+)
+
+const (
+ maxBitsLimit = 16
+ // number of valid literals
+ literalCount = 286
+)
+
+// hcode is a huffman code with a bit code and bit length.
+type hcode uint32
+
+func (h hcode) len() uint8 {
+ return uint8(h)
+}
+
+func (h hcode) code64() uint64 {
+ return uint64(h >> 8)
+}
+
+func (h hcode) zero() bool {
+ return h == 0
+}
+
+type huffmanEncoder struct {
+ codes []hcode
+ bitCount [17]int32
+
+ // Allocate a reusable buffer with the longest possible frequency table.
+ // Possible lengths are codegenCodeCount, offsetCodeCount and literalCount.
+ // The largest of these is literalCount, so we allocate for that case.
+ freqcache [literalCount + 1]literalNode
+}
+
+type literalNode struct {
+ literal uint16
+ freq uint16
+}
+
+// A levelInfo describes the state of the constructed tree for a given depth.
+type levelInfo struct {
+ // Our level. for better printing
+ level int32
+
+ // The frequency of the last node at this level
+ lastFreq int32
+
+ // The frequency of the next character to add to this level
+ nextCharFreq int32
+
+ // The frequency of the next pair (from level below) to add to this level.
+ // Only valid if the "needed" value of the next lower level is 0.
+ nextPairFreq int32
+
+ // The number of chains remaining to generate for this level before moving
+ // up to the next level
+ needed int32
+}
+
+// set sets the code and length of an hcode.
+func (h *hcode) set(code uint16, length uint8) {
+ *h = hcode(length) | (hcode(code) << 8)
+}
+
+func newhcode(code uint16, length uint8) hcode {
+ return hcode(length) | (hcode(code) << 8)
+}
+
+func reverseBits(number uint16, bitLength byte) uint16 {
+ return bits.Reverse16(number << ((16 - bitLength) & 15))
+}
+
+func maxNode() literalNode { return literalNode{math.MaxUint16, math.MaxUint16} }
+
+func newHuffmanEncoder(size int) *huffmanEncoder {
+ // Make capacity to next power of two.
+ c := uint(bits.Len32(uint32(size - 1)))
+ return &huffmanEncoder{codes: make([]hcode, size, 1<= 3
+// The cases of 0, 1, and 2 literals are handled by special case code.
+//
+// list An array of the literals with non-zero frequencies
+//
+// and their associated frequencies. The array is in order of increasing
+// frequency, and has as its last element a special element with frequency
+// MaxInt32
+//
+// maxBits The maximum number of bits that should be used to encode any literal.
+//
+// Must be less than 16.
+//
+// return An integer array in which array[i] indicates the number of literals
+//
+// that should be encoded in i bits.
+func (h *huffmanEncoder) bitCounts(list []literalNode, maxBits int32) []int32 {
+ if maxBits >= maxBitsLimit {
+ panic("flate: maxBits too large")
+ }
+ n := int32(len(list))
+ list = list[0 : n+1]
+ list[n] = maxNode()
+
+ // The tree can't have greater depth than n - 1, no matter what. This
+ // saves a little bit of work in some small cases
+ if maxBits > n-1 {
+ maxBits = n - 1
+ }
+
+ // Create information about each of the levels.
+ // A bogus "Level 0" whose sole purpose is so that
+ // level1.prev.needed==0. This makes level1.nextPairFreq
+ // be a legitimate value that never gets chosen.
+ var levels [maxBitsLimit]levelInfo
+ // leafCounts[i] counts the number of literals at the left
+ // of ancestors of the rightmost node at level i.
+ // leafCounts[i][j] is the number of literals at the left
+ // of the level j ancestor.
+ var leafCounts [maxBitsLimit][maxBitsLimit]int32
+
+ // Descending to only have 1 bounds check.
+ l2f := int32(list[2].freq)
+ l1f := int32(list[1].freq)
+ l0f := int32(list[0].freq) + int32(list[1].freq)
+
+ for level := int32(1); level <= maxBits; level++ {
+ // For every level, the first two items are the first two characters.
+ // We initialize the levels as if we had already figured this out.
+ levels[level] = levelInfo{
+ level: level,
+ lastFreq: l1f,
+ nextCharFreq: l2f,
+ nextPairFreq: l0f,
+ }
+ leafCounts[level][level] = 2
+ if level == 1 {
+ levels[level].nextPairFreq = math.MaxInt32
+ }
+ }
+
+ // We need a total of 2*n - 2 items at top level and have already generated 2.
+ levels[maxBits].needed = 2*n - 4
+
+ level := uint32(maxBits)
+ for level < 16 {
+ l := &levels[level]
+ if l.nextPairFreq == math.MaxInt32 && l.nextCharFreq == math.MaxInt32 {
+ // We've run out of both leafs and pairs.
+ // End all calculations for this level.
+ // To make sure we never come back to this level or any lower level,
+ // set nextPairFreq impossibly large.
+ l.needed = 0
+ levels[level+1].nextPairFreq = math.MaxInt32
+ level++
+ continue
+ }
+
+ prevFreq := l.lastFreq
+ if l.nextCharFreq < l.nextPairFreq {
+ // The next item on this row is a leaf node.
+ n := leafCounts[level][level] + 1
+ l.lastFreq = l.nextCharFreq
+ // Lower leafCounts are the same of the previous node.
+ leafCounts[level][level] = n
+ e := list[n]
+ if e.literal < math.MaxUint16 {
+ l.nextCharFreq = int32(e.freq)
+ } else {
+ l.nextCharFreq = math.MaxInt32
+ }
+ } else {
+ // The next item on this row is a pair from the previous row.
+ // nextPairFreq isn't valid until we generate two
+ // more values in the level below
+ l.lastFreq = l.nextPairFreq
+ // Take leaf counts from the lower level, except counts[level] remains the same.
+ if true {
+ save := leafCounts[level][level]
+ leafCounts[level] = leafCounts[level-1]
+ leafCounts[level][level] = save
+ } else {
+ copy(leafCounts[level][:level], leafCounts[level-1][:level])
+ }
+ levels[l.level-1].needed = 2
+ }
+
+ if l.needed--; l.needed == 0 {
+ // We've done everything we need to do for this level.
+ // Continue calculating one level up. Fill in nextPairFreq
+ // of that level with the sum of the two nodes we've just calculated on
+ // this level.
+ if l.level == maxBits {
+ // All done!
+ break
+ }
+ levels[l.level+1].nextPairFreq = prevFreq + l.lastFreq
+ level++
+ } else {
+ // If we stole from below, move down temporarily to replenish it.
+ for levels[level-1].needed > 0 {
+ level--
+ }
+ }
+ }
+
+ // Somethings is wrong if at the end, the top level is null or hasn't used
+ // all of the leaves.
+ if leafCounts[maxBits][maxBits] != n {
+ panic("leafCounts[maxBits][maxBits] != n")
+ }
+
+ bitCount := h.bitCount[:maxBits+1]
+ bits := 1
+ counts := &leafCounts[maxBits]
+ for level := maxBits; level > 0; level-- {
+ // chain.leafCount gives the number of literals requiring at least "bits"
+ // bits to encode.
+ bitCount[bits] = counts[level] - counts[level-1]
+ bits++
+ }
+ return bitCount
+}
+
+// Look at the leaves and assign them a bit count and an encoding as specified
+// in RFC 1951 3.2.2
+func (h *huffmanEncoder) assignEncodingAndSize(bitCount []int32, list []literalNode) {
+ code := uint16(0)
+ for n, bits := range bitCount {
+ code <<= 1
+ if n == 0 || bits == 0 {
+ continue
+ }
+ // The literals list[len(list)-bits] .. list[len(list)-bits]
+ // are encoded using "bits" bits, and get the values
+ // code, code + 1, .... The code values are
+ // assigned in literal order (not frequency order).
+ chunk := list[len(list)-int(bits):]
+
+ sortByLiteral(chunk)
+ for _, node := range chunk {
+ h.codes[node.literal] = newhcode(reverseBits(code, uint8(n)), uint8(n))
+ code++
+ }
+ list = list[0 : len(list)-int(bits)]
+ }
+}
+
+// Update this Huffman Code object to be the minimum code for the specified frequency count.
+//
+// freq An array of frequencies, in which frequency[i] gives the frequency of literal i.
+// maxBits The maximum number of bits to use for any literal.
+func (h *huffmanEncoder) generate(freq []uint16, maxBits int32) {
+ list := h.freqcache[:len(freq)+1]
+ codes := h.codes[:len(freq)]
+ // Number of non-zero literals
+ count := 0
+ // Set list to be the set of all non-zero literals and their frequencies
+ for i, f := range freq {
+ if f != 0 {
+ list[count] = literalNode{uint16(i), f}
+ count++
+ } else {
+ codes[i] = 0
+ }
+ }
+ list[count] = literalNode{}
+
+ list = list[:count]
+ if count <= 2 {
+ // Handle the small cases here, because they are awkward for the general case code. With
+ // two or fewer literals, everything has bit length 1.
+ for i, node := range list {
+ // "list" is in order of increasing literal value.
+ h.codes[node.literal].set(uint16(i), 1)
+ }
+ return
+ }
+ sortByFreq(list)
+
+ // Get the number of literals for each bit count
+ bitCount := h.bitCounts(list, maxBits)
+ // And do the assignment
+ h.assignEncodingAndSize(bitCount, list)
+}
+
+// atLeastOne clamps the result between 1 and 15.
+func atLeastOne(v float32) float32 {
+ if v < 1 {
+ return 1
+ }
+ if v > 15 {
+ return 15
+ }
+ return v
+}
+
+func histogram(b []byte, h []uint16) {
+ if true && len(b) >= 8<<10 {
+ // Split for bigger inputs
+ histogramSplit(b, h)
+ } else {
+ h = h[:256]
+ for _, t := range b {
+ h[t]++
+ }
+ }
+}
+
+func histogramSplit(b []byte, h []uint16) {
+ // Tested, and slightly faster than 2-way.
+ // Writing to separate arrays and combining is also slightly slower.
+ h = h[:256]
+ for len(b)&3 != 0 {
+ h[b[0]]++
+ b = b[1:]
+ }
+ n := len(b) / 4
+ x, y, z, w := b[:n], b[n:], b[n+n:], b[n+n+n:]
+ y, z, w = y[:len(x)], z[:len(x)], w[:len(x)]
+ for i, t := range x {
+ v0 := &h[t]
+ v1 := &h[y[i]]
+ v3 := &h[w[i]]
+ v2 := &h[z[i]]
+ *v0++
+ *v1++
+ *v2++
+ *v3++
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/huffman_sortByFreq.go b/vendor/github.com/klauspost/compress/flate/huffman_sortByFreq.go
new file mode 100644
index 0000000..6c05ba8
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/huffman_sortByFreq.go
@@ -0,0 +1,159 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flate
+
+// Sort sorts data.
+// It makes one call to data.Len to determine n, and O(n*log(n)) calls to
+// data.Less and data.Swap. The sort is not guaranteed to be stable.
+func sortByFreq(data []literalNode) {
+ n := len(data)
+ quickSortByFreq(data, 0, n, maxDepth(n))
+}
+
+func quickSortByFreq(data []literalNode, a, b, maxDepth int) {
+ for b-a > 12 { // Use ShellSort for slices <= 12 elements
+ if maxDepth == 0 {
+ heapSort(data, a, b)
+ return
+ }
+ maxDepth--
+ mlo, mhi := doPivotByFreq(data, a, b)
+ // Avoiding recursion on the larger subproblem guarantees
+ // a stack depth of at most lg(b-a).
+ if mlo-a < b-mhi {
+ quickSortByFreq(data, a, mlo, maxDepth)
+ a = mhi // i.e., quickSortByFreq(data, mhi, b)
+ } else {
+ quickSortByFreq(data, mhi, b, maxDepth)
+ b = mlo // i.e., quickSortByFreq(data, a, mlo)
+ }
+ }
+ if b-a > 1 {
+ // Do ShellSort pass with gap 6
+ // It could be written in this simplified form cause b-a <= 12
+ for i := a + 6; i < b; i++ {
+ if data[i].freq == data[i-6].freq && data[i].literal < data[i-6].literal || data[i].freq < data[i-6].freq {
+ data[i], data[i-6] = data[i-6], data[i]
+ }
+ }
+ insertionSortByFreq(data, a, b)
+ }
+}
+
+func doPivotByFreq(data []literalNode, lo, hi int) (midlo, midhi int) {
+ m := int(uint(lo+hi) >> 1) // Written like this to avoid integer overflow.
+ if hi-lo > 40 {
+ // Tukey's ``Ninther,'' median of three medians of three.
+ s := (hi - lo) / 8
+ medianOfThreeSortByFreq(data, lo, lo+s, lo+2*s)
+ medianOfThreeSortByFreq(data, m, m-s, m+s)
+ medianOfThreeSortByFreq(data, hi-1, hi-1-s, hi-1-2*s)
+ }
+ medianOfThreeSortByFreq(data, lo, m, hi-1)
+
+ // Invariants are:
+ // data[lo] = pivot (set up by ChoosePivot)
+ // data[lo < i < a] < pivot
+ // data[a <= i < b] <= pivot
+ // data[b <= i < c] unexamined
+ // data[c <= i < hi-1] > pivot
+ // data[hi-1] >= pivot
+ pivot := lo
+ a, c := lo+1, hi-1
+
+ for ; a < c && (data[a].freq == data[pivot].freq && data[a].literal < data[pivot].literal || data[a].freq < data[pivot].freq); a++ {
+ }
+ b := a
+ for {
+ for ; b < c && (data[pivot].freq == data[b].freq && data[pivot].literal > data[b].literal || data[pivot].freq > data[b].freq); b++ { // data[b] <= pivot
+ }
+ for ; b < c && (data[pivot].freq == data[c-1].freq && data[pivot].literal < data[c-1].literal || data[pivot].freq < data[c-1].freq); c-- { // data[c-1] > pivot
+ }
+ if b >= c {
+ break
+ }
+ // data[b] > pivot; data[c-1] <= pivot
+ data[b], data[c-1] = data[c-1], data[b]
+ b++
+ c--
+ }
+ // If hi-c<3 then there are duplicates (by property of median of nine).
+ // Let's be a bit more conservative, and set border to 5.
+ protect := hi-c < 5
+ if !protect && hi-c < (hi-lo)/4 {
+ // Lets test some points for equality to pivot
+ dups := 0
+ if data[pivot].freq == data[hi-1].freq && data[pivot].literal > data[hi-1].literal || data[pivot].freq > data[hi-1].freq { // data[hi-1] = pivot
+ data[c], data[hi-1] = data[hi-1], data[c]
+ c++
+ dups++
+ }
+ if data[b-1].freq == data[pivot].freq && data[b-1].literal > data[pivot].literal || data[b-1].freq > data[pivot].freq { // data[b-1] = pivot
+ b--
+ dups++
+ }
+ // m-lo = (hi-lo)/2 > 6
+ // b-lo > (hi-lo)*3/4-1 > 8
+ // ==> m < b ==> data[m] <= pivot
+ if data[m].freq == data[pivot].freq && data[m].literal > data[pivot].literal || data[m].freq > data[pivot].freq { // data[m] = pivot
+ data[m], data[b-1] = data[b-1], data[m]
+ b--
+ dups++
+ }
+ // if at least 2 points are equal to pivot, assume skewed distribution
+ protect = dups > 1
+ }
+ if protect {
+ // Protect against a lot of duplicates
+ // Add invariant:
+ // data[a <= i < b] unexamined
+ // data[b <= i < c] = pivot
+ for {
+ for ; a < b && (data[b-1].freq == data[pivot].freq && data[b-1].literal > data[pivot].literal || data[b-1].freq > data[pivot].freq); b-- { // data[b] == pivot
+ }
+ for ; a < b && (data[a].freq == data[pivot].freq && data[a].literal < data[pivot].literal || data[a].freq < data[pivot].freq); a++ { // data[a] < pivot
+ }
+ if a >= b {
+ break
+ }
+ // data[a] == pivot; data[b-1] < pivot
+ data[a], data[b-1] = data[b-1], data[a]
+ a++
+ b--
+ }
+ }
+ // Swap pivot into middle
+ data[pivot], data[b-1] = data[b-1], data[pivot]
+ return b - 1, c
+}
+
+// Insertion sort
+func insertionSortByFreq(data []literalNode, a, b int) {
+ for i := a + 1; i < b; i++ {
+ for j := i; j > a && (data[j].freq == data[j-1].freq && data[j].literal < data[j-1].literal || data[j].freq < data[j-1].freq); j-- {
+ data[j], data[j-1] = data[j-1], data[j]
+ }
+ }
+}
+
+// quickSortByFreq, loosely following Bentley and McIlroy,
+// ``Engineering a Sort Function,'' SP&E November 1993.
+
+// medianOfThreeSortByFreq moves the median of the three values data[m0], data[m1], data[m2] into data[m1].
+func medianOfThreeSortByFreq(data []literalNode, m1, m0, m2 int) {
+ // sort 3 elements
+ if data[m1].freq == data[m0].freq && data[m1].literal < data[m0].literal || data[m1].freq < data[m0].freq {
+ data[m1], data[m0] = data[m0], data[m1]
+ }
+ // data[m0] <= data[m1]
+ if data[m2].freq == data[m1].freq && data[m2].literal < data[m1].literal || data[m2].freq < data[m1].freq {
+ data[m2], data[m1] = data[m1], data[m2]
+ // data[m0] <= data[m2] && data[m1] < data[m2]
+ if data[m1].freq == data[m0].freq && data[m1].literal < data[m0].literal || data[m1].freq < data[m0].freq {
+ data[m1], data[m0] = data[m0], data[m1]
+ }
+ }
+ // now data[m0] <= data[m1] <= data[m2]
+}
diff --git a/vendor/github.com/klauspost/compress/flate/huffman_sortByLiteral.go b/vendor/github.com/klauspost/compress/flate/huffman_sortByLiteral.go
new file mode 100644
index 0000000..93f1aea
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/huffman_sortByLiteral.go
@@ -0,0 +1,201 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flate
+
+// Sort sorts data.
+// It makes one call to data.Len to determine n, and O(n*log(n)) calls to
+// data.Less and data.Swap. The sort is not guaranteed to be stable.
+func sortByLiteral(data []literalNode) {
+ n := len(data)
+ quickSort(data, 0, n, maxDepth(n))
+}
+
+func quickSort(data []literalNode, a, b, maxDepth int) {
+ for b-a > 12 { // Use ShellSort for slices <= 12 elements
+ if maxDepth == 0 {
+ heapSort(data, a, b)
+ return
+ }
+ maxDepth--
+ mlo, mhi := doPivot(data, a, b)
+ // Avoiding recursion on the larger subproblem guarantees
+ // a stack depth of at most lg(b-a).
+ if mlo-a < b-mhi {
+ quickSort(data, a, mlo, maxDepth)
+ a = mhi // i.e., quickSort(data, mhi, b)
+ } else {
+ quickSort(data, mhi, b, maxDepth)
+ b = mlo // i.e., quickSort(data, a, mlo)
+ }
+ }
+ if b-a > 1 {
+ // Do ShellSort pass with gap 6
+ // It could be written in this simplified form cause b-a <= 12
+ for i := a + 6; i < b; i++ {
+ if data[i].literal < data[i-6].literal {
+ data[i], data[i-6] = data[i-6], data[i]
+ }
+ }
+ insertionSort(data, a, b)
+ }
+}
+func heapSort(data []literalNode, a, b int) {
+ first := a
+ lo := 0
+ hi := b - a
+
+ // Build heap with greatest element at top.
+ for i := (hi - 1) / 2; i >= 0; i-- {
+ siftDown(data, i, hi, first)
+ }
+
+ // Pop elements, largest first, into end of data.
+ for i := hi - 1; i >= 0; i-- {
+ data[first], data[first+i] = data[first+i], data[first]
+ siftDown(data, lo, i, first)
+ }
+}
+
+// siftDown implements the heap property on data[lo, hi).
+// first is an offset into the array where the root of the heap lies.
+func siftDown(data []literalNode, lo, hi, first int) {
+ root := lo
+ for {
+ child := 2*root + 1
+ if child >= hi {
+ break
+ }
+ if child+1 < hi && data[first+child].literal < data[first+child+1].literal {
+ child++
+ }
+ if data[first+root].literal > data[first+child].literal {
+ return
+ }
+ data[first+root], data[first+child] = data[first+child], data[first+root]
+ root = child
+ }
+}
+func doPivot(data []literalNode, lo, hi int) (midlo, midhi int) {
+ m := int(uint(lo+hi) >> 1) // Written like this to avoid integer overflow.
+ if hi-lo > 40 {
+ // Tukey's ``Ninther,'' median of three medians of three.
+ s := (hi - lo) / 8
+ medianOfThree(data, lo, lo+s, lo+2*s)
+ medianOfThree(data, m, m-s, m+s)
+ medianOfThree(data, hi-1, hi-1-s, hi-1-2*s)
+ }
+ medianOfThree(data, lo, m, hi-1)
+
+ // Invariants are:
+ // data[lo] = pivot (set up by ChoosePivot)
+ // data[lo < i < a] < pivot
+ // data[a <= i < b] <= pivot
+ // data[b <= i < c] unexamined
+ // data[c <= i < hi-1] > pivot
+ // data[hi-1] >= pivot
+ pivot := lo
+ a, c := lo+1, hi-1
+
+ for ; a < c && data[a].literal < data[pivot].literal; a++ {
+ }
+ b := a
+ for {
+ for ; b < c && data[pivot].literal > data[b].literal; b++ { // data[b] <= pivot
+ }
+ for ; b < c && data[pivot].literal < data[c-1].literal; c-- { // data[c-1] > pivot
+ }
+ if b >= c {
+ break
+ }
+ // data[b] > pivot; data[c-1] <= pivot
+ data[b], data[c-1] = data[c-1], data[b]
+ b++
+ c--
+ }
+ // If hi-c<3 then there are duplicates (by property of median of nine).
+ // Let's be a bit more conservative, and set border to 5.
+ protect := hi-c < 5
+ if !protect && hi-c < (hi-lo)/4 {
+ // Lets test some points for equality to pivot
+ dups := 0
+ if data[pivot].literal > data[hi-1].literal { // data[hi-1] = pivot
+ data[c], data[hi-1] = data[hi-1], data[c]
+ c++
+ dups++
+ }
+ if data[b-1].literal > data[pivot].literal { // data[b-1] = pivot
+ b--
+ dups++
+ }
+ // m-lo = (hi-lo)/2 > 6
+ // b-lo > (hi-lo)*3/4-1 > 8
+ // ==> m < b ==> data[m] <= pivot
+ if data[m].literal > data[pivot].literal { // data[m] = pivot
+ data[m], data[b-1] = data[b-1], data[m]
+ b--
+ dups++
+ }
+ // if at least 2 points are equal to pivot, assume skewed distribution
+ protect = dups > 1
+ }
+ if protect {
+ // Protect against a lot of duplicates
+ // Add invariant:
+ // data[a <= i < b] unexamined
+ // data[b <= i < c] = pivot
+ for {
+ for ; a < b && data[b-1].literal > data[pivot].literal; b-- { // data[b] == pivot
+ }
+ for ; a < b && data[a].literal < data[pivot].literal; a++ { // data[a] < pivot
+ }
+ if a >= b {
+ break
+ }
+ // data[a] == pivot; data[b-1] < pivot
+ data[a], data[b-1] = data[b-1], data[a]
+ a++
+ b--
+ }
+ }
+ // Swap pivot into middle
+ data[pivot], data[b-1] = data[b-1], data[pivot]
+ return b - 1, c
+}
+
+// Insertion sort
+func insertionSort(data []literalNode, a, b int) {
+ for i := a + 1; i < b; i++ {
+ for j := i; j > a && data[j].literal < data[j-1].literal; j-- {
+ data[j], data[j-1] = data[j-1], data[j]
+ }
+ }
+}
+
+// maxDepth returns a threshold at which quicksort should switch
+// to heapsort. It returns 2*ceil(lg(n+1)).
+func maxDepth(n int) int {
+ var depth int
+ for i := n; i > 0; i >>= 1 {
+ depth++
+ }
+ return depth * 2
+}
+
+// medianOfThree moves the median of the three values data[m0], data[m1], data[m2] into data[m1].
+func medianOfThree(data []literalNode, m1, m0, m2 int) {
+ // sort 3 elements
+ if data[m1].literal < data[m0].literal {
+ data[m1], data[m0] = data[m0], data[m1]
+ }
+ // data[m0] <= data[m1]
+ if data[m2].literal < data[m1].literal {
+ data[m2], data[m1] = data[m1], data[m2]
+ // data[m0] <= data[m2] && data[m1] < data[m2]
+ if data[m1].literal < data[m0].literal {
+ data[m1], data[m0] = data[m0], data[m1]
+ }
+ }
+ // now data[m0] <= data[m1] <= data[m2]
+}
diff --git a/vendor/github.com/klauspost/compress/flate/inflate.go b/vendor/github.com/klauspost/compress/flate/inflate.go
new file mode 100644
index 0000000..2f410d6
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/inflate.go
@@ -0,0 +1,829 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package flate implements the DEFLATE compressed data format, described in
+// RFC 1951. The gzip and zlib packages implement access to DEFLATE-based file
+// formats.
+package flate
+
+import (
+ "bufio"
+ "compress/flate"
+ "fmt"
+ "io"
+ "math/bits"
+ "sync"
+)
+
+const (
+ maxCodeLen = 16 // max length of Huffman code
+ maxCodeLenMask = 15 // mask for max length of Huffman code
+ // The next three numbers come from the RFC section 3.2.7, with the
+ // additional proviso in section 3.2.5 which implies that distance codes
+ // 30 and 31 should never occur in compressed data.
+ maxNumLit = 286
+ maxNumDist = 30
+ numCodes = 19 // number of codes in Huffman meta-code
+
+ debugDecode = false
+)
+
+// Value of length - 3 and extra bits.
+type lengthExtra struct {
+ length, extra uint8
+}
+
+var decCodeToLen = [32]lengthExtra{{length: 0x0, extra: 0x0}, {length: 0x1, extra: 0x0}, {length: 0x2, extra: 0x0}, {length: 0x3, extra: 0x0}, {length: 0x4, extra: 0x0}, {length: 0x5, extra: 0x0}, {length: 0x6, extra: 0x0}, {length: 0x7, extra: 0x0}, {length: 0x8, extra: 0x1}, {length: 0xa, extra: 0x1}, {length: 0xc, extra: 0x1}, {length: 0xe, extra: 0x1}, {length: 0x10, extra: 0x2}, {length: 0x14, extra: 0x2}, {length: 0x18, extra: 0x2}, {length: 0x1c, extra: 0x2}, {length: 0x20, extra: 0x3}, {length: 0x28, extra: 0x3}, {length: 0x30, extra: 0x3}, {length: 0x38, extra: 0x3}, {length: 0x40, extra: 0x4}, {length: 0x50, extra: 0x4}, {length: 0x60, extra: 0x4}, {length: 0x70, extra: 0x4}, {length: 0x80, extra: 0x5}, {length: 0xa0, extra: 0x5}, {length: 0xc0, extra: 0x5}, {length: 0xe0, extra: 0x5}, {length: 0xff, extra: 0x0}, {length: 0x0, extra: 0x0}, {length: 0x0, extra: 0x0}, {length: 0x0, extra: 0x0}}
+
+var bitMask32 = [32]uint32{
+ 0, 1, 3, 7, 0xF, 0x1F, 0x3F, 0x7F, 0xFF,
+ 0x1FF, 0x3FF, 0x7FF, 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF,
+ 0x1ffff, 0x3ffff, 0x7FFFF, 0xfFFFF, 0x1fFFFF, 0x3fFFFF, 0x7fFFFF, 0xffFFFF,
+ 0x1ffFFFF, 0x3ffFFFF, 0x7ffFFFF, 0xfffFFFF, 0x1fffFFFF, 0x3fffFFFF, 0x7fffFFFF,
+} // up to 32 bits
+
+// Initialize the fixedHuffmanDecoder only once upon first use.
+var fixedOnce sync.Once
+var fixedHuffmanDecoder huffmanDecoder
+
+// A CorruptInputError reports the presence of corrupt input at a given offset.
+type CorruptInputError = flate.CorruptInputError
+
+// An InternalError reports an error in the flate code itself.
+type InternalError string
+
+func (e InternalError) Error() string { return "flate: internal error: " + string(e) }
+
+// A ReadError reports an error encountered while reading input.
+//
+// Deprecated: No longer returned.
+type ReadError = flate.ReadError
+
+// A WriteError reports an error encountered while writing output.
+//
+// Deprecated: No longer returned.
+type WriteError = flate.WriteError
+
+// Resetter resets a ReadCloser returned by NewReader or NewReaderDict to
+// to switch to a new underlying Reader. This permits reusing a ReadCloser
+// instead of allocating a new one.
+type Resetter interface {
+ // Reset discards any buffered data and resets the Resetter as if it was
+ // newly initialized with the given reader.
+ Reset(r io.Reader, dict []byte) error
+}
+
+// The data structure for decoding Huffman tables is based on that of
+// zlib. There is a lookup table of a fixed bit width (huffmanChunkBits),
+// For codes smaller than the table width, there are multiple entries
+// (each combination of trailing bits has the same value). For codes
+// larger than the table width, the table contains a link to an overflow
+// table. The width of each entry in the link table is the maximum code
+// size minus the chunk width.
+//
+// Note that you can do a lookup in the table even without all bits
+// filled. Since the extra bits are zero, and the DEFLATE Huffman codes
+// have the property that shorter codes come before longer ones, the
+// bit length estimate in the result is a lower bound on the actual
+// number of bits.
+//
+// See the following:
+// http://www.gzip.org/algorithm.txt
+
+// chunk & 15 is number of bits
+// chunk >> 4 is value, including table link
+
+const (
+ huffmanChunkBits = 9
+ huffmanNumChunks = 1 << huffmanChunkBits
+ huffmanCountMask = 15
+ huffmanValueShift = 4
+)
+
+type huffmanDecoder struct {
+ maxRead int // the maximum number of bits we can read and not overread
+ chunks *[huffmanNumChunks]uint16 // chunks as described above
+ links [][]uint16 // overflow links
+ linkMask uint32 // mask the width of the link table
+}
+
+// Initialize Huffman decoding tables from array of code lengths.
+// Following this function, h is guaranteed to be initialized into a complete
+// tree (i.e., neither over-subscribed nor under-subscribed). The exception is a
+// degenerate case where the tree has only a single symbol with length 1. Empty
+// trees are permitted.
+func (h *huffmanDecoder) init(lengths []int) bool {
+ // Sanity enables additional runtime tests during Huffman
+ // table construction. It's intended to be used during
+ // development to supplement the currently ad-hoc unit tests.
+ const sanity = false
+
+ if h.chunks == nil {
+ h.chunks = new([huffmanNumChunks]uint16)
+ }
+
+ if h.maxRead != 0 {
+ *h = huffmanDecoder{chunks: h.chunks, links: h.links}
+ }
+
+ // Count number of codes of each length,
+ // compute maxRead and max length.
+ var count [maxCodeLen]int
+ var min, max int
+ for _, n := range lengths {
+ if n == 0 {
+ continue
+ }
+ if min == 0 || n < min {
+ min = n
+ }
+ if n > max {
+ max = n
+ }
+ count[n&maxCodeLenMask]++
+ }
+
+ // Empty tree. The decompressor.huffSym function will fail later if the tree
+ // is used. Technically, an empty tree is only valid for the HDIST tree and
+ // not the HCLEN and HLIT tree. However, a stream with an empty HCLEN tree
+ // is guaranteed to fail since it will attempt to use the tree to decode the
+ // codes for the HLIT and HDIST trees. Similarly, an empty HLIT tree is
+ // guaranteed to fail later since the compressed data section must be
+ // composed of at least one symbol (the end-of-block marker).
+ if max == 0 {
+ return true
+ }
+
+ code := 0
+ var nextcode [maxCodeLen]int
+ for i := min; i <= max; i++ {
+ code <<= 1
+ nextcode[i&maxCodeLenMask] = code
+ code += count[i&maxCodeLenMask]
+ }
+
+ // Check that the coding is complete (i.e., that we've
+ // assigned all 2-to-the-max possible bit sequences).
+ // Exception: To be compatible with zlib, we also need to
+ // accept degenerate single-code codings. See also
+ // TestDegenerateHuffmanCoding.
+ if code != 1< huffmanChunkBits {
+ numLinks := 1 << (uint(max) - huffmanChunkBits)
+ h.linkMask = uint32(numLinks - 1)
+
+ // create link tables
+ link := nextcode[huffmanChunkBits+1] >> 1
+ if cap(h.links) < huffmanNumChunks-link {
+ h.links = make([][]uint16, huffmanNumChunks-link)
+ } else {
+ h.links = h.links[:huffmanNumChunks-link]
+ }
+ for j := uint(link); j < huffmanNumChunks; j++ {
+ reverse := int(bits.Reverse16(uint16(j)))
+ reverse >>= uint(16 - huffmanChunkBits)
+ off := j - uint(link)
+ if sanity && h.chunks[reverse] != 0 {
+ panic("impossible: overwriting existing chunk")
+ }
+ h.chunks[reverse] = uint16(off<>= uint(16 - n)
+ if n <= huffmanChunkBits {
+ for off := reverse; off < len(h.chunks); off += 1 << uint(n) {
+ // We should never need to overwrite
+ // an existing chunk. Also, 0 is
+ // never a valid chunk, because the
+ // lower 4 "count" bits should be
+ // between 1 and 15.
+ if sanity && h.chunks[off] != 0 {
+ panic("impossible: overwriting existing chunk")
+ }
+ h.chunks[off] = chunk
+ }
+ } else {
+ j := reverse & (huffmanNumChunks - 1)
+ if sanity && h.chunks[j]&huffmanCountMask != huffmanChunkBits+1 {
+ // Longer codes should have been
+ // associated with a link table above.
+ panic("impossible: not an indirect chunk")
+ }
+ value := h.chunks[j] >> huffmanValueShift
+ linktab := h.links[value]
+ reverse >>= huffmanChunkBits
+ for off := reverse; off < len(linktab); off += 1 << uint(n-huffmanChunkBits) {
+ if sanity && linktab[off] != 0 {
+ panic("impossible: overwriting existing chunk")
+ }
+ linktab[off] = chunk
+ }
+ }
+ }
+
+ if sanity {
+ // Above we've sanity checked that we never overwrote
+ // an existing entry. Here we additionally check that
+ // we filled the tables completely.
+ for i, chunk := range h.chunks {
+ if chunk == 0 {
+ // As an exception, in the degenerate
+ // single-code case, we allow odd
+ // chunks to be missing.
+ if code == 1 && i%2 == 1 {
+ continue
+ }
+ panic("impossible: missing chunk")
+ }
+ }
+ for _, linktab := range h.links {
+ for _, chunk := range linktab {
+ if chunk == 0 {
+ panic("impossible: missing chunk")
+ }
+ }
+ }
+ }
+
+ return true
+}
+
+// Reader is the actual read interface needed by NewReader.
+// If the passed in io.Reader does not also have ReadByte,
+// the NewReader will introduce its own buffering.
+type Reader interface {
+ io.Reader
+ io.ByteReader
+}
+
+type step uint8
+
+const (
+ copyData step = iota + 1
+ nextBlock
+ huffmanBytesBuffer
+ huffmanBytesReader
+ huffmanBufioReader
+ huffmanStringsReader
+ huffmanGenericReader
+)
+
+// Decompress state.
+type decompressor struct {
+ // Input source.
+ r Reader
+ roffset int64
+
+ // Huffman decoders for literal/length, distance.
+ h1, h2 huffmanDecoder
+
+ // Length arrays used to define Huffman codes.
+ bits *[maxNumLit + maxNumDist]int
+ codebits *[numCodes]int
+
+ // Output history, buffer.
+ dict dictDecoder
+
+ // Next step in the decompression,
+ // and decompression state.
+ step step
+ stepState int
+ err error
+ toRead []byte
+ hl, hd *huffmanDecoder
+ copyLen int
+ copyDist int
+
+ // Temporary buffer (avoids repeated allocation).
+ buf [4]byte
+
+ // Input bits, in top of b.
+ b uint32
+
+ nb uint
+ final bool
+}
+
+func (f *decompressor) nextBlock() {
+ for f.nb < 1+2 {
+ if f.err = f.moreBits(); f.err != nil {
+ return
+ }
+ }
+ f.final = f.b&1 == 1
+ f.b >>= 1
+ typ := f.b & 3
+ f.b >>= 2
+ f.nb -= 1 + 2
+ switch typ {
+ case 0:
+ f.dataBlock()
+ if debugDecode {
+ fmt.Println("stored block")
+ }
+ case 1:
+ // compressed, fixed Huffman tables
+ f.hl = &fixedHuffmanDecoder
+ f.hd = nil
+ f.huffmanBlockDecoder()
+ if debugDecode {
+ fmt.Println("predefinied huffman block")
+ }
+ case 2:
+ // compressed, dynamic Huffman tables
+ if f.err = f.readHuffman(); f.err != nil {
+ break
+ }
+ f.hl = &f.h1
+ f.hd = &f.h2
+ f.huffmanBlockDecoder()
+ if debugDecode {
+ fmt.Println("dynamic huffman block")
+ }
+ default:
+ // 3 is reserved.
+ if debugDecode {
+ fmt.Println("reserved data block encountered")
+ }
+ f.err = CorruptInputError(f.roffset)
+ }
+}
+
+func (f *decompressor) Read(b []byte) (int, error) {
+ for {
+ if len(f.toRead) > 0 {
+ n := copy(b, f.toRead)
+ f.toRead = f.toRead[n:]
+ if len(f.toRead) == 0 {
+ return n, f.err
+ }
+ return n, nil
+ }
+ if f.err != nil {
+ return 0, f.err
+ }
+
+ f.doStep()
+
+ if f.err != nil && len(f.toRead) == 0 {
+ f.toRead = f.dict.readFlush() // Flush what's left in case of error
+ }
+ }
+}
+
+// WriteTo implements the io.WriteTo interface for io.Copy and friends.
+func (f *decompressor) WriteTo(w io.Writer) (int64, error) {
+ total := int64(0)
+ flushed := false
+ for {
+ if len(f.toRead) > 0 {
+ n, err := w.Write(f.toRead)
+ total += int64(n)
+ if err != nil {
+ f.err = err
+ return total, err
+ }
+ if n != len(f.toRead) {
+ return total, io.ErrShortWrite
+ }
+ f.toRead = f.toRead[:0]
+ }
+ if f.err != nil && flushed {
+ if f.err == io.EOF {
+ return total, nil
+ }
+ return total, f.err
+ }
+ if f.err == nil {
+ f.doStep()
+ }
+ if len(f.toRead) == 0 && f.err != nil && !flushed {
+ f.toRead = f.dict.readFlush() // Flush what's left in case of error
+ flushed = true
+ }
+ }
+}
+
+func (f *decompressor) Close() error {
+ if f.err == io.EOF {
+ return nil
+ }
+ return f.err
+}
+
+// RFC 1951 section 3.2.7.
+// Compression with dynamic Huffman codes
+
+var codeOrder = [...]int{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}
+
+func (f *decompressor) readHuffman() error {
+ // HLIT[5], HDIST[5], HCLEN[4].
+ for f.nb < 5+5+4 {
+ if err := f.moreBits(); err != nil {
+ return err
+ }
+ }
+ nlit := int(f.b&0x1F) + 257
+ if nlit > maxNumLit {
+ if debugDecode {
+ fmt.Println("nlit > maxNumLit", nlit)
+ }
+ return CorruptInputError(f.roffset)
+ }
+ f.b >>= 5
+ ndist := int(f.b&0x1F) + 1
+ if ndist > maxNumDist {
+ if debugDecode {
+ fmt.Println("ndist > maxNumDist", ndist)
+ }
+ return CorruptInputError(f.roffset)
+ }
+ f.b >>= 5
+ nclen := int(f.b&0xF) + 4
+ // numCodes is 19, so nclen is always valid.
+ f.b >>= 4
+ f.nb -= 5 + 5 + 4
+
+ // (HCLEN+4)*3 bits: code lengths in the magic codeOrder order.
+ for i := 0; i < nclen; i++ {
+ for f.nb < 3 {
+ if err := f.moreBits(); err != nil {
+ return err
+ }
+ }
+ f.codebits[codeOrder[i]] = int(f.b & 0x7)
+ f.b >>= 3
+ f.nb -= 3
+ }
+ for i := nclen; i < len(codeOrder); i++ {
+ f.codebits[codeOrder[i]] = 0
+ }
+ if !f.h1.init(f.codebits[0:]) {
+ if debugDecode {
+ fmt.Println("init codebits failed")
+ }
+ return CorruptInputError(f.roffset)
+ }
+
+ // HLIT + 257 code lengths, HDIST + 1 code lengths,
+ // using the code length Huffman code.
+ for i, n := 0, nlit+ndist; i < n; {
+ x, err := f.huffSym(&f.h1)
+ if err != nil {
+ return err
+ }
+ if x < 16 {
+ // Actual length.
+ f.bits[i] = x
+ i++
+ continue
+ }
+ // Repeat previous length or zero.
+ var rep int
+ var nb uint
+ var b int
+ switch x {
+ default:
+ return InternalError("unexpected length code")
+ case 16:
+ rep = 3
+ nb = 2
+ if i == 0 {
+ if debugDecode {
+ fmt.Println("i==0")
+ }
+ return CorruptInputError(f.roffset)
+ }
+ b = f.bits[i-1]
+ case 17:
+ rep = 3
+ nb = 3
+ b = 0
+ case 18:
+ rep = 11
+ nb = 7
+ b = 0
+ }
+ for f.nb < nb {
+ if err := f.moreBits(); err != nil {
+ if debugDecode {
+ fmt.Println("morebits:", err)
+ }
+ return err
+ }
+ }
+ rep += int(f.b & uint32(1<<(nb®SizeMaskUint32)-1))
+ f.b >>= nb & regSizeMaskUint32
+ f.nb -= nb
+ if i+rep > n {
+ if debugDecode {
+ fmt.Println("i+rep > n", i, rep, n)
+ }
+ return CorruptInputError(f.roffset)
+ }
+ for j := 0; j < rep; j++ {
+ f.bits[i] = b
+ i++
+ }
+ }
+
+ if !f.h1.init(f.bits[0:nlit]) || !f.h2.init(f.bits[nlit:nlit+ndist]) {
+ if debugDecode {
+ fmt.Println("init2 failed")
+ }
+ return CorruptInputError(f.roffset)
+ }
+
+ // As an optimization, we can initialize the maxRead bits to read at a time
+ // for the HLIT tree to the length of the EOB marker since we know that
+ // every block must terminate with one. This preserves the property that
+ // we never read any extra bytes after the end of the DEFLATE stream.
+ if f.h1.maxRead < f.bits[endBlockMarker] {
+ f.h1.maxRead = f.bits[endBlockMarker]
+ }
+ if !f.final {
+ // If not the final block, the smallest block possible is
+ // a predefined table, BTYPE=01, with a single EOB marker.
+ // This will take up 3 + 7 bits.
+ f.h1.maxRead += 10
+ }
+
+ return nil
+}
+
+// Copy a single uncompressed data block from input to output.
+func (f *decompressor) dataBlock() {
+ // Uncompressed.
+ // Discard current half-byte.
+ left := (f.nb) & 7
+ f.nb -= left
+ f.b >>= left
+
+ offBytes := f.nb >> 3
+ // Unfilled values will be overwritten.
+ f.buf[0] = uint8(f.b)
+ f.buf[1] = uint8(f.b >> 8)
+ f.buf[2] = uint8(f.b >> 16)
+ f.buf[3] = uint8(f.b >> 24)
+
+ f.roffset += int64(offBytes)
+ f.nb, f.b = 0, 0
+
+ // Length then ones-complement of length.
+ nr, err := io.ReadFull(f.r, f.buf[offBytes:4])
+ f.roffset += int64(nr)
+ if err != nil {
+ f.err = noEOF(err)
+ return
+ }
+ n := uint16(f.buf[0]) | uint16(f.buf[1])<<8
+ nn := uint16(f.buf[2]) | uint16(f.buf[3])<<8
+ if nn != ^n {
+ if debugDecode {
+ ncomp := ^n
+ fmt.Println("uint16(nn) != uint16(^n)", nn, ncomp)
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ if n == 0 {
+ f.toRead = f.dict.readFlush()
+ f.finishBlock()
+ return
+ }
+
+ f.copyLen = int(n)
+ f.copyData()
+}
+
+// copyData copies f.copyLen bytes from the underlying reader into f.hist.
+// It pauses for reads when f.hist is full.
+func (f *decompressor) copyData() {
+ buf := f.dict.writeSlice()
+ if len(buf) > f.copyLen {
+ buf = buf[:f.copyLen]
+ }
+
+ cnt, err := io.ReadFull(f.r, buf)
+ f.roffset += int64(cnt)
+ f.copyLen -= cnt
+ f.dict.writeMark(cnt)
+ if err != nil {
+ f.err = noEOF(err)
+ return
+ }
+
+ if f.dict.availWrite() == 0 || f.copyLen > 0 {
+ f.toRead = f.dict.readFlush()
+ f.step = copyData
+ return
+ }
+ f.finishBlock()
+}
+
+func (f *decompressor) finishBlock() {
+ if f.final {
+ if f.dict.availRead() > 0 {
+ f.toRead = f.dict.readFlush()
+ }
+ f.err = io.EOF
+ }
+ f.step = nextBlock
+}
+
+func (f *decompressor) doStep() {
+ switch f.step {
+ case copyData:
+ f.copyData()
+ case nextBlock:
+ f.nextBlock()
+ case huffmanBytesBuffer:
+ f.huffmanBytesBuffer()
+ case huffmanBytesReader:
+ f.huffmanBytesReader()
+ case huffmanBufioReader:
+ f.huffmanBufioReader()
+ case huffmanStringsReader:
+ f.huffmanStringsReader()
+ case huffmanGenericReader:
+ f.huffmanGenericReader()
+ default:
+ panic("BUG: unexpected step state")
+ }
+}
+
+// noEOF returns err, unless err == io.EOF, in which case it returns io.ErrUnexpectedEOF.
+func noEOF(e error) error {
+ if e == io.EOF {
+ return io.ErrUnexpectedEOF
+ }
+ return e
+}
+
+func (f *decompressor) moreBits() error {
+ c, err := f.r.ReadByte()
+ if err != nil {
+ return noEOF(err)
+ }
+ f.roffset++
+ f.b |= uint32(c) << (f.nb & regSizeMaskUint32)
+ f.nb += 8
+ return nil
+}
+
+// Read the next Huffman-encoded symbol from f according to h.
+func (f *decompressor) huffSym(h *huffmanDecoder) (int, error) {
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(h.maxRead)
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ nb, b := f.nb, f.b
+ for {
+ for nb < n {
+ c, err := f.r.ReadByte()
+ if err != nil {
+ f.b = b
+ f.nb = nb
+ return 0, noEOF(err)
+ }
+ f.roffset++
+ b |= uint32(c) << (nb & regSizeMaskUint32)
+ nb += 8
+ }
+ chunk := h.chunks[b&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = h.links[chunk>>huffmanValueShift][(b>>huffmanChunkBits)&h.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= nb {
+ if n == 0 {
+ f.b = b
+ f.nb = nb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return 0, f.err
+ }
+ f.b = b >> (n & regSizeMaskUint32)
+ f.nb = nb - n
+ return int(chunk >> huffmanValueShift), nil
+ }
+ }
+}
+
+func makeReader(r io.Reader) Reader {
+ if rr, ok := r.(Reader); ok {
+ return rr
+ }
+ return bufio.NewReader(r)
+}
+
+func fixedHuffmanDecoderInit() {
+ fixedOnce.Do(func() {
+ // These come from the RFC section 3.2.6.
+ var bits [288]int
+ for i := 0; i < 144; i++ {
+ bits[i] = 8
+ }
+ for i := 144; i < 256; i++ {
+ bits[i] = 9
+ }
+ for i := 256; i < 280; i++ {
+ bits[i] = 7
+ }
+ for i := 280; i < 288; i++ {
+ bits[i] = 8
+ }
+ fixedHuffmanDecoder.init(bits[:])
+ })
+}
+
+func (f *decompressor) Reset(r io.Reader, dict []byte) error {
+ *f = decompressor{
+ r: makeReader(r),
+ bits: f.bits,
+ codebits: f.codebits,
+ h1: f.h1,
+ h2: f.h2,
+ dict: f.dict,
+ step: nextBlock,
+ }
+ f.dict.init(maxMatchOffset, dict)
+ return nil
+}
+
+// NewReader returns a new ReadCloser that can be used
+// to read the uncompressed version of r.
+// If r does not also implement io.ByteReader,
+// the decompressor may read more data than necessary from r.
+// It is the caller's responsibility to call Close on the ReadCloser
+// when finished reading.
+//
+// The ReadCloser returned by NewReader also implements Resetter.
+func NewReader(r io.Reader) io.ReadCloser {
+ fixedHuffmanDecoderInit()
+
+ var f decompressor
+ f.r = makeReader(r)
+ f.bits = new([maxNumLit + maxNumDist]int)
+ f.codebits = new([numCodes]int)
+ f.step = nextBlock
+ f.dict.init(maxMatchOffset, nil)
+ return &f
+}
+
+// NewReaderDict is like NewReader but initializes the reader
+// with a preset dictionary. The returned Reader behaves as if
+// the uncompressed data stream started with the given dictionary,
+// which has already been read. NewReaderDict is typically used
+// to read data compressed by NewWriterDict.
+//
+// The ReadCloser returned by NewReader also implements Resetter.
+func NewReaderDict(r io.Reader, dict []byte) io.ReadCloser {
+ fixedHuffmanDecoderInit()
+
+ var f decompressor
+ f.r = makeReader(r)
+ f.bits = new([maxNumLit + maxNumDist]int)
+ f.codebits = new([numCodes]int)
+ f.step = nextBlock
+ f.dict.init(maxMatchOffset, dict)
+ return &f
+}
diff --git a/vendor/github.com/klauspost/compress/flate/inflate_gen.go b/vendor/github.com/klauspost/compress/flate/inflate_gen.go
new file mode 100644
index 0000000..2b2f993
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/inflate_gen.go
@@ -0,0 +1,1283 @@
+// Code generated by go generate gen_inflate.go. DO NOT EDIT.
+
+package flate
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "math/bits"
+ "strings"
+)
+
+// Decode a single Huffman block from f.
+// hl and hd are the Huffman states for the lit/length values
+// and the distance values, respectively. If hd == nil, using the
+// fixed distance encoding associated with fixed Huffman blocks.
+func (f *decompressor) huffmanBytesBuffer() {
+ const (
+ stateInit = iota // Zero value must be stateInit
+ stateDict
+ )
+ fr := f.r.(*bytes.Buffer)
+
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ fnb, fb, dict := f.nb, f.b, &f.dict
+
+ switch f.stepState {
+ case stateInit:
+ goto readLiteral
+ case stateDict:
+ goto copyHistory
+ }
+
+readLiteral:
+ // Read literal and/or (length, distance) according to RFC section 3.2.3.
+ {
+ var v int
+ {
+ // Inlined v, err := f.huffSym(f.hl)
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hl.maxRead)
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hl.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ v = int(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ var length int
+ switch {
+ case v < 256:
+ dict.writeByte(byte(v))
+ if dict.availWrite() == 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanBytesBuffer
+ f.stepState = stateInit
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ case v == 256:
+ f.b, f.nb = fb, fnb
+ f.finishBlock()
+ return
+ // otherwise, reference to older data
+ case v < 265:
+ length = v - (257 - 3)
+ case v < maxNumLit:
+ val := decCodeToLen[(v - 257)]
+ length = int(val.length) + 3
+ n := uint(val.extra)
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits n>0:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ length += int(fb & bitMask32[n])
+ fb >>= n & regSizeMaskUint32
+ fnb -= n
+ default:
+ if debugDecode {
+ fmt.Println(v, ">= maxNumLit")
+ }
+ f.err = CorruptInputError(f.roffset)
+ f.b, f.nb = fb, fnb
+ return
+ }
+
+ var dist uint32
+ if f.hd == nil {
+ for fnb < 5 {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb<5:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3)))
+ fb >>= 5
+ fnb -= 5
+ } else {
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hd.maxRead)
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hd.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ dist = uint32(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ switch {
+ case dist < 4:
+ dist++
+ case dist < maxNumDist:
+ nb := uint(dist-2) >> 1
+ // have 1 bit in bottom of dist, need nb more.
+ extra := (dist & 1) << (nb & regSizeMaskUint32)
+ for fnb < nb {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb>= nb & regSizeMaskUint32
+ fnb -= nb
+ dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra
+ // slower: dist = bitMask32[nb+1] + 2 + extra
+ default:
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist too big:", dist, maxNumDist)
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ // No check on length; encoding can be prescient.
+ if dist > uint32(dict.histSize()) {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist > dict.histSize():", dist, dict.histSize())
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ f.copyLen, f.copyDist = length, int(dist)
+ goto copyHistory
+ }
+
+copyHistory:
+ // Perform a backwards copy according to RFC section 3.2.3.
+ {
+ cnt := dict.tryWriteCopy(f.copyDist, f.copyLen)
+ if cnt == 0 {
+ cnt = dict.writeCopy(f.copyDist, f.copyLen)
+ }
+ f.copyLen -= cnt
+
+ if dict.availWrite() == 0 || f.copyLen > 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanBytesBuffer // We need to continue this work
+ f.stepState = stateDict
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ }
+ // Not reached
+}
+
+// Decode a single Huffman block from f.
+// hl and hd are the Huffman states for the lit/length values
+// and the distance values, respectively. If hd == nil, using the
+// fixed distance encoding associated with fixed Huffman blocks.
+func (f *decompressor) huffmanBytesReader() {
+ const (
+ stateInit = iota // Zero value must be stateInit
+ stateDict
+ )
+ fr := f.r.(*bytes.Reader)
+
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ fnb, fb, dict := f.nb, f.b, &f.dict
+
+ switch f.stepState {
+ case stateInit:
+ goto readLiteral
+ case stateDict:
+ goto copyHistory
+ }
+
+readLiteral:
+ // Read literal and/or (length, distance) according to RFC section 3.2.3.
+ {
+ var v int
+ {
+ // Inlined v, err := f.huffSym(f.hl)
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hl.maxRead)
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hl.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ v = int(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ var length int
+ switch {
+ case v < 256:
+ dict.writeByte(byte(v))
+ if dict.availWrite() == 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanBytesReader
+ f.stepState = stateInit
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ case v == 256:
+ f.b, f.nb = fb, fnb
+ f.finishBlock()
+ return
+ // otherwise, reference to older data
+ case v < 265:
+ length = v - (257 - 3)
+ case v < maxNumLit:
+ val := decCodeToLen[(v - 257)]
+ length = int(val.length) + 3
+ n := uint(val.extra)
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits n>0:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ length += int(fb & bitMask32[n])
+ fb >>= n & regSizeMaskUint32
+ fnb -= n
+ default:
+ if debugDecode {
+ fmt.Println(v, ">= maxNumLit")
+ }
+ f.err = CorruptInputError(f.roffset)
+ f.b, f.nb = fb, fnb
+ return
+ }
+
+ var dist uint32
+ if f.hd == nil {
+ for fnb < 5 {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb<5:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3)))
+ fb >>= 5
+ fnb -= 5
+ } else {
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hd.maxRead)
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hd.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ dist = uint32(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ switch {
+ case dist < 4:
+ dist++
+ case dist < maxNumDist:
+ nb := uint(dist-2) >> 1
+ // have 1 bit in bottom of dist, need nb more.
+ extra := (dist & 1) << (nb & regSizeMaskUint32)
+ for fnb < nb {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb>= nb & regSizeMaskUint32
+ fnb -= nb
+ dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra
+ // slower: dist = bitMask32[nb+1] + 2 + extra
+ default:
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist too big:", dist, maxNumDist)
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ // No check on length; encoding can be prescient.
+ if dist > uint32(dict.histSize()) {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist > dict.histSize():", dist, dict.histSize())
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ f.copyLen, f.copyDist = length, int(dist)
+ goto copyHistory
+ }
+
+copyHistory:
+ // Perform a backwards copy according to RFC section 3.2.3.
+ {
+ cnt := dict.tryWriteCopy(f.copyDist, f.copyLen)
+ if cnt == 0 {
+ cnt = dict.writeCopy(f.copyDist, f.copyLen)
+ }
+ f.copyLen -= cnt
+
+ if dict.availWrite() == 0 || f.copyLen > 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanBytesReader // We need to continue this work
+ f.stepState = stateDict
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ }
+ // Not reached
+}
+
+// Decode a single Huffman block from f.
+// hl and hd are the Huffman states for the lit/length values
+// and the distance values, respectively. If hd == nil, using the
+// fixed distance encoding associated with fixed Huffman blocks.
+func (f *decompressor) huffmanBufioReader() {
+ const (
+ stateInit = iota // Zero value must be stateInit
+ stateDict
+ )
+ fr := f.r.(*bufio.Reader)
+
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ fnb, fb, dict := f.nb, f.b, &f.dict
+
+ switch f.stepState {
+ case stateInit:
+ goto readLiteral
+ case stateDict:
+ goto copyHistory
+ }
+
+readLiteral:
+ // Read literal and/or (length, distance) according to RFC section 3.2.3.
+ {
+ var v int
+ {
+ // Inlined v, err := f.huffSym(f.hl)
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hl.maxRead)
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hl.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ v = int(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ var length int
+ switch {
+ case v < 256:
+ dict.writeByte(byte(v))
+ if dict.availWrite() == 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanBufioReader
+ f.stepState = stateInit
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ case v == 256:
+ f.b, f.nb = fb, fnb
+ f.finishBlock()
+ return
+ // otherwise, reference to older data
+ case v < 265:
+ length = v - (257 - 3)
+ case v < maxNumLit:
+ val := decCodeToLen[(v - 257)]
+ length = int(val.length) + 3
+ n := uint(val.extra)
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits n>0:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ length += int(fb & bitMask32[n])
+ fb >>= n & regSizeMaskUint32
+ fnb -= n
+ default:
+ if debugDecode {
+ fmt.Println(v, ">= maxNumLit")
+ }
+ f.err = CorruptInputError(f.roffset)
+ f.b, f.nb = fb, fnb
+ return
+ }
+
+ var dist uint32
+ if f.hd == nil {
+ for fnb < 5 {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb<5:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3)))
+ fb >>= 5
+ fnb -= 5
+ } else {
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hd.maxRead)
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hd.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ dist = uint32(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ switch {
+ case dist < 4:
+ dist++
+ case dist < maxNumDist:
+ nb := uint(dist-2) >> 1
+ // have 1 bit in bottom of dist, need nb more.
+ extra := (dist & 1) << (nb & regSizeMaskUint32)
+ for fnb < nb {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb>= nb & regSizeMaskUint32
+ fnb -= nb
+ dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra
+ // slower: dist = bitMask32[nb+1] + 2 + extra
+ default:
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist too big:", dist, maxNumDist)
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ // No check on length; encoding can be prescient.
+ if dist > uint32(dict.histSize()) {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist > dict.histSize():", dist, dict.histSize())
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ f.copyLen, f.copyDist = length, int(dist)
+ goto copyHistory
+ }
+
+copyHistory:
+ // Perform a backwards copy according to RFC section 3.2.3.
+ {
+ cnt := dict.tryWriteCopy(f.copyDist, f.copyLen)
+ if cnt == 0 {
+ cnt = dict.writeCopy(f.copyDist, f.copyLen)
+ }
+ f.copyLen -= cnt
+
+ if dict.availWrite() == 0 || f.copyLen > 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanBufioReader // We need to continue this work
+ f.stepState = stateDict
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ }
+ // Not reached
+}
+
+// Decode a single Huffman block from f.
+// hl and hd are the Huffman states for the lit/length values
+// and the distance values, respectively. If hd == nil, using the
+// fixed distance encoding associated with fixed Huffman blocks.
+func (f *decompressor) huffmanStringsReader() {
+ const (
+ stateInit = iota // Zero value must be stateInit
+ stateDict
+ )
+ fr := f.r.(*strings.Reader)
+
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ fnb, fb, dict := f.nb, f.b, &f.dict
+
+ switch f.stepState {
+ case stateInit:
+ goto readLiteral
+ case stateDict:
+ goto copyHistory
+ }
+
+readLiteral:
+ // Read literal and/or (length, distance) according to RFC section 3.2.3.
+ {
+ var v int
+ {
+ // Inlined v, err := f.huffSym(f.hl)
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hl.maxRead)
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hl.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ v = int(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ var length int
+ switch {
+ case v < 256:
+ dict.writeByte(byte(v))
+ if dict.availWrite() == 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanStringsReader
+ f.stepState = stateInit
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ case v == 256:
+ f.b, f.nb = fb, fnb
+ f.finishBlock()
+ return
+ // otherwise, reference to older data
+ case v < 265:
+ length = v - (257 - 3)
+ case v < maxNumLit:
+ val := decCodeToLen[(v - 257)]
+ length = int(val.length) + 3
+ n := uint(val.extra)
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits n>0:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ length += int(fb & bitMask32[n])
+ fb >>= n & regSizeMaskUint32
+ fnb -= n
+ default:
+ if debugDecode {
+ fmt.Println(v, ">= maxNumLit")
+ }
+ f.err = CorruptInputError(f.roffset)
+ f.b, f.nb = fb, fnb
+ return
+ }
+
+ var dist uint32
+ if f.hd == nil {
+ for fnb < 5 {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb<5:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3)))
+ fb >>= 5
+ fnb -= 5
+ } else {
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hd.maxRead)
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hd.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ dist = uint32(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ switch {
+ case dist < 4:
+ dist++
+ case dist < maxNumDist:
+ nb := uint(dist-2) >> 1
+ // have 1 bit in bottom of dist, need nb more.
+ extra := (dist & 1) << (nb & regSizeMaskUint32)
+ for fnb < nb {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb>= nb & regSizeMaskUint32
+ fnb -= nb
+ dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra
+ // slower: dist = bitMask32[nb+1] + 2 + extra
+ default:
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist too big:", dist, maxNumDist)
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ // No check on length; encoding can be prescient.
+ if dist > uint32(dict.histSize()) {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist > dict.histSize():", dist, dict.histSize())
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ f.copyLen, f.copyDist = length, int(dist)
+ goto copyHistory
+ }
+
+copyHistory:
+ // Perform a backwards copy according to RFC section 3.2.3.
+ {
+ cnt := dict.tryWriteCopy(f.copyDist, f.copyLen)
+ if cnt == 0 {
+ cnt = dict.writeCopy(f.copyDist, f.copyLen)
+ }
+ f.copyLen -= cnt
+
+ if dict.availWrite() == 0 || f.copyLen > 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanStringsReader // We need to continue this work
+ f.stepState = stateDict
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ }
+ // Not reached
+}
+
+// Decode a single Huffman block from f.
+// hl and hd are the Huffman states for the lit/length values
+// and the distance values, respectively. If hd == nil, using the
+// fixed distance encoding associated with fixed Huffman blocks.
+func (f *decompressor) huffmanGenericReader() {
+ const (
+ stateInit = iota // Zero value must be stateInit
+ stateDict
+ )
+ fr := f.r.(Reader)
+
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ fnb, fb, dict := f.nb, f.b, &f.dict
+
+ switch f.stepState {
+ case stateInit:
+ goto readLiteral
+ case stateDict:
+ goto copyHistory
+ }
+
+readLiteral:
+ // Read literal and/or (length, distance) according to RFC section 3.2.3.
+ {
+ var v int
+ {
+ // Inlined v, err := f.huffSym(f.hl)
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hl.maxRead)
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hl.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ v = int(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ var length int
+ switch {
+ case v < 256:
+ dict.writeByte(byte(v))
+ if dict.availWrite() == 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanGenericReader
+ f.stepState = stateInit
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ case v == 256:
+ f.b, f.nb = fb, fnb
+ f.finishBlock()
+ return
+ // otherwise, reference to older data
+ case v < 265:
+ length = v - (257 - 3)
+ case v < maxNumLit:
+ val := decCodeToLen[(v - 257)]
+ length = int(val.length) + 3
+ n := uint(val.extra)
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits n>0:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ length += int(fb & bitMask32[n])
+ fb >>= n & regSizeMaskUint32
+ fnb -= n
+ default:
+ if debugDecode {
+ fmt.Println(v, ">= maxNumLit")
+ }
+ f.err = CorruptInputError(f.roffset)
+ f.b, f.nb = fb, fnb
+ return
+ }
+
+ var dist uint32
+ if f.hd == nil {
+ for fnb < 5 {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb<5:", err)
+ }
+ f.err = err
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3)))
+ fb >>= 5
+ fnb -= 5
+ } else {
+ // Since a huffmanDecoder can be empty or be composed of a degenerate tree
+ // with single element, huffSym must error on these two edge cases. In both
+ // cases, the chunks slice will be 0 for the invalid sequence, leading it
+ // satisfy the n == 0 check below.
+ n := uint(f.hd.maxRead)
+ // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
+ // but is smart enough to keep local variables in registers, so use nb and b,
+ // inline call to moreBits and reassign b,nb back to f on return.
+ for {
+ for fnb < n {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ f.err = noEOF(err)
+ return
+ }
+ f.roffset++
+ fb |= uint32(c) << (fnb & regSizeMaskUint32)
+ fnb += 8
+ }
+ chunk := f.hd.chunks[fb&(huffmanNumChunks-1)]
+ n = uint(chunk & huffmanCountMask)
+ if n > huffmanChunkBits {
+ chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask]
+ n = uint(chunk & huffmanCountMask)
+ }
+ if n <= fnb {
+ if n == 0 {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("huffsym: n==0")
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+ fb = fb >> (n & regSizeMaskUint32)
+ fnb = fnb - n
+ dist = uint32(chunk >> huffmanValueShift)
+ break
+ }
+ }
+ }
+
+ switch {
+ case dist < 4:
+ dist++
+ case dist < maxNumDist:
+ nb := uint(dist-2) >> 1
+ // have 1 bit in bottom of dist, need nb more.
+ extra := (dist & 1) << (nb & regSizeMaskUint32)
+ for fnb < nb {
+ c, err := fr.ReadByte()
+ if err != nil {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("morebits f.nb>= nb & regSizeMaskUint32
+ fnb -= nb
+ dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra
+ // slower: dist = bitMask32[nb+1] + 2 + extra
+ default:
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist too big:", dist, maxNumDist)
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ // No check on length; encoding can be prescient.
+ if dist > uint32(dict.histSize()) {
+ f.b, f.nb = fb, fnb
+ if debugDecode {
+ fmt.Println("dist > dict.histSize():", dist, dict.histSize())
+ }
+ f.err = CorruptInputError(f.roffset)
+ return
+ }
+
+ f.copyLen, f.copyDist = length, int(dist)
+ goto copyHistory
+ }
+
+copyHistory:
+ // Perform a backwards copy according to RFC section 3.2.3.
+ {
+ cnt := dict.tryWriteCopy(f.copyDist, f.copyLen)
+ if cnt == 0 {
+ cnt = dict.writeCopy(f.copyDist, f.copyLen)
+ }
+ f.copyLen -= cnt
+
+ if dict.availWrite() == 0 || f.copyLen > 0 {
+ f.toRead = dict.readFlush()
+ f.step = huffmanGenericReader // We need to continue this work
+ f.stepState = stateDict
+ f.b, f.nb = fb, fnb
+ return
+ }
+ goto readLiteral
+ }
+ // Not reached
+}
+
+func (f *decompressor) huffmanBlockDecoder() {
+ switch f.r.(type) {
+ case *bytes.Buffer:
+ f.huffmanBytesBuffer()
+ case *bytes.Reader:
+ f.huffmanBytesReader()
+ case *bufio.Reader:
+ f.huffmanBufioReader()
+ case *strings.Reader:
+ f.huffmanStringsReader()
+ case Reader:
+ f.huffmanGenericReader()
+ default:
+ f.huffmanGenericReader()
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/level1.go b/vendor/github.com/klauspost/compress/flate/level1.go
new file mode 100644
index 0000000..703b9a8
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/level1.go
@@ -0,0 +1,241 @@
+package flate
+
+import (
+ "encoding/binary"
+ "fmt"
+ "math/bits"
+)
+
+// fastGen maintains the table for matches,
+// and the previous byte block for level 2.
+// This is the generic implementation.
+type fastEncL1 struct {
+ fastGen
+ table [tableSize]tableEntry
+}
+
+// EncodeL1 uses a similar algorithm to level 1
+func (e *fastEncL1) Encode(dst *tokens, src []byte) {
+ const (
+ inputMargin = 12 - 1
+ minNonLiteralBlockSize = 1 + 1 + inputMargin
+ hashBytes = 5
+ )
+ if debugDeflate && e.cur < 0 {
+ panic(fmt.Sprint("e.cur < 0: ", e.cur))
+ }
+
+ // Protect against e.cur wraparound.
+ for e.cur >= bufferReset {
+ if len(e.hist) == 0 {
+ for i := range e.table[:] {
+ e.table[i] = tableEntry{}
+ }
+ e.cur = maxMatchOffset
+ break
+ }
+ // Shift down everything in the table that isn't already too far away.
+ minOff := e.cur + int32(len(e.hist)) - maxMatchOffset
+ for i := range e.table[:] {
+ v := e.table[i].offset
+ if v <= minOff {
+ v = 0
+ } else {
+ v = v - e.cur + maxMatchOffset
+ }
+ e.table[i].offset = v
+ }
+ e.cur = maxMatchOffset
+ }
+
+ s := e.addBlock(src)
+
+ // This check isn't in the Snappy implementation, but there, the caller
+ // instead of the callee handles this case.
+ if len(src) < minNonLiteralBlockSize {
+ // We do not fill the token table.
+ // This will be picked up by caller.
+ dst.n = uint16(len(src))
+ return
+ }
+
+ // Override src
+ src = e.hist
+ nextEmit := s
+
+ // sLimit is when to stop looking for offset/length copies. The inputMargin
+ // lets us use a fast path for emitLiteral in the main loop, while we are
+ // looking for copies.
+ sLimit := int32(len(src) - inputMargin)
+
+ // nextEmit is where in src the next emitLiteral should start from.
+ cv := load6432(src, s)
+
+ for {
+ const skipLog = 5
+ const doEvery = 2
+
+ nextS := s
+ var candidate tableEntry
+ for {
+ nextHash := hashLen(cv, tableBits, hashBytes)
+ candidate = e.table[nextHash]
+ nextS = s + doEvery + (s-nextEmit)>>skipLog
+ if nextS > sLimit {
+ goto emitRemainder
+ }
+
+ now := load6432(src, nextS)
+ e.table[nextHash] = tableEntry{offset: s + e.cur}
+ nextHash = hashLen(now, tableBits, hashBytes)
+
+ offset := s - (candidate.offset - e.cur)
+ if offset < maxMatchOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) {
+ e.table[nextHash] = tableEntry{offset: nextS + e.cur}
+ break
+ }
+
+ // Do one right away...
+ cv = now
+ s = nextS
+ nextS++
+ candidate = e.table[nextHash]
+ now >>= 8
+ e.table[nextHash] = tableEntry{offset: s + e.cur}
+
+ offset = s - (candidate.offset - e.cur)
+ if offset < maxMatchOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) {
+ e.table[nextHash] = tableEntry{offset: nextS + e.cur}
+ break
+ }
+ cv = now
+ s = nextS
+ }
+
+ // A 4-byte match has been found. We'll later see if more than 4 bytes
+ // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
+ // them as literal bytes.
+ for {
+ // Invariant: we have a 4-byte match at s, and no need to emit any
+ // literal bytes prior to s.
+
+ // Extend the 4-byte match as long as possible.
+ t := candidate.offset - e.cur
+ var l = int32(4)
+ if false {
+ l = e.matchlenLong(s+4, t+4, src) + 4
+ } else {
+ // inlined:
+ a := src[s+4:]
+ b := src[t+4:]
+ for len(a) >= 8 {
+ if diff := binary.LittleEndian.Uint64(a) ^ binary.LittleEndian.Uint64(b); diff != 0 {
+ l += int32(bits.TrailingZeros64(diff) >> 3)
+ break
+ }
+ l += 8
+ a = a[8:]
+ b = b[8:]
+ }
+ if len(a) < 8 {
+ b = b[:len(a)]
+ for i := range a {
+ if a[i] != b[i] {
+ break
+ }
+ l++
+ }
+ }
+ }
+
+ // Extend backwards
+ for t > 0 && s > nextEmit && src[t-1] == src[s-1] {
+ s--
+ t--
+ l++
+ }
+ if nextEmit < s {
+ if false {
+ emitLiteral(dst, src[nextEmit:s])
+ } else {
+ for _, v := range src[nextEmit:s] {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+ }
+ }
+
+ // Save the match found
+ if false {
+ dst.AddMatchLong(l, uint32(s-t-baseMatchOffset))
+ } else {
+ // Inlined...
+ xoffset := uint32(s - t - baseMatchOffset)
+ xlength := l
+ oc := offsetCode(xoffset)
+ xoffset |= oc << 16
+ for xlength > 0 {
+ xl := xlength
+ if xl > 258 {
+ if xl > 258+baseMatchLength {
+ xl = 258
+ } else {
+ xl = 258 - baseMatchLength
+ }
+ }
+ xlength -= xl
+ xl -= baseMatchLength
+ dst.extraHist[lengthCodes1[uint8(xl)]]++
+ dst.offHist[oc]++
+ dst.tokens[dst.n] = token(matchType | uint32(xl)<= s {
+ s = nextS + 1
+ }
+ if s >= sLimit {
+ // Index first pair after match end.
+ if int(s+l+8) < len(src) {
+ cv := load6432(src, s)
+ e.table[hashLen(cv, tableBits, hashBytes)] = tableEntry{offset: s + e.cur}
+ }
+ goto emitRemainder
+ }
+
+ // We could immediately start working at s now, but to improve
+ // compression we first update the hash table at s-2 and at s. If
+ // another emitCopy is not our next move, also calculate nextHash
+ // at s+1. At least on GOARCH=amd64, these three hash calculations
+ // are faster as one load64 call (with some shifts) instead of
+ // three load32 calls.
+ x := load6432(src, s-2)
+ o := e.cur + s - 2
+ prevHash := hashLen(x, tableBits, hashBytes)
+ e.table[prevHash] = tableEntry{offset: o}
+ x >>= 16
+ currHash := hashLen(x, tableBits, hashBytes)
+ candidate = e.table[currHash]
+ e.table[currHash] = tableEntry{offset: o + 2}
+
+ offset := s - (candidate.offset - e.cur)
+ if offset > maxMatchOffset || uint32(x) != load3232(src, candidate.offset-e.cur) {
+ cv = x >> 8
+ s++
+ break
+ }
+ }
+ }
+
+emitRemainder:
+ if int(nextEmit) < len(src) {
+ // If nothing was added, don't encode literals.
+ if dst.n == 0 {
+ return
+ }
+ emitLiteral(dst, src[nextEmit:])
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/level2.go b/vendor/github.com/klauspost/compress/flate/level2.go
new file mode 100644
index 0000000..876dfbe
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/level2.go
@@ -0,0 +1,214 @@
+package flate
+
+import "fmt"
+
+// fastGen maintains the table for matches,
+// and the previous byte block for level 2.
+// This is the generic implementation.
+type fastEncL2 struct {
+ fastGen
+ table [bTableSize]tableEntry
+}
+
+// EncodeL2 uses a similar algorithm to level 1, but is capable
+// of matching across blocks giving better compression at a small slowdown.
+func (e *fastEncL2) Encode(dst *tokens, src []byte) {
+ const (
+ inputMargin = 12 - 1
+ minNonLiteralBlockSize = 1 + 1 + inputMargin
+ hashBytes = 5
+ )
+
+ if debugDeflate && e.cur < 0 {
+ panic(fmt.Sprint("e.cur < 0: ", e.cur))
+ }
+
+ // Protect against e.cur wraparound.
+ for e.cur >= bufferReset {
+ if len(e.hist) == 0 {
+ for i := range e.table[:] {
+ e.table[i] = tableEntry{}
+ }
+ e.cur = maxMatchOffset
+ break
+ }
+ // Shift down everything in the table that isn't already too far away.
+ minOff := e.cur + int32(len(e.hist)) - maxMatchOffset
+ for i := range e.table[:] {
+ v := e.table[i].offset
+ if v <= minOff {
+ v = 0
+ } else {
+ v = v - e.cur + maxMatchOffset
+ }
+ e.table[i].offset = v
+ }
+ e.cur = maxMatchOffset
+ }
+
+ s := e.addBlock(src)
+
+ // This check isn't in the Snappy implementation, but there, the caller
+ // instead of the callee handles this case.
+ if len(src) < minNonLiteralBlockSize {
+ // We do not fill the token table.
+ // This will be picked up by caller.
+ dst.n = uint16(len(src))
+ return
+ }
+
+ // Override src
+ src = e.hist
+ nextEmit := s
+
+ // sLimit is when to stop looking for offset/length copies. The inputMargin
+ // lets us use a fast path for emitLiteral in the main loop, while we are
+ // looking for copies.
+ sLimit := int32(len(src) - inputMargin)
+
+ // nextEmit is where in src the next emitLiteral should start from.
+ cv := load6432(src, s)
+ for {
+ // When should we start skipping if we haven't found matches in a long while.
+ const skipLog = 5
+ const doEvery = 2
+
+ nextS := s
+ var candidate tableEntry
+ for {
+ nextHash := hashLen(cv, bTableBits, hashBytes)
+ s = nextS
+ nextS = s + doEvery + (s-nextEmit)>>skipLog
+ if nextS > sLimit {
+ goto emitRemainder
+ }
+ candidate = e.table[nextHash]
+ now := load6432(src, nextS)
+ e.table[nextHash] = tableEntry{offset: s + e.cur}
+ nextHash = hashLen(now, bTableBits, hashBytes)
+
+ offset := s - (candidate.offset - e.cur)
+ if offset < maxMatchOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) {
+ e.table[nextHash] = tableEntry{offset: nextS + e.cur}
+ break
+ }
+
+ // Do one right away...
+ cv = now
+ s = nextS
+ nextS++
+ candidate = e.table[nextHash]
+ now >>= 8
+ e.table[nextHash] = tableEntry{offset: s + e.cur}
+
+ offset = s - (candidate.offset - e.cur)
+ if offset < maxMatchOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) {
+ break
+ }
+ cv = now
+ }
+
+ // A 4-byte match has been found. We'll later see if more than 4 bytes
+ // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
+ // them as literal bytes.
+
+ // Call emitCopy, and then see if another emitCopy could be our next
+ // move. Repeat until we find no match for the input immediately after
+ // what was consumed by the last emitCopy call.
+ //
+ // If we exit this loop normally then we need to call emitLiteral next,
+ // though we don't yet know how big the literal will be. We handle that
+ // by proceeding to the next iteration of the main loop. We also can
+ // exit this loop via goto if we get close to exhausting the input.
+ for {
+ // Invariant: we have a 4-byte match at s, and no need to emit any
+ // literal bytes prior to s.
+
+ // Extend the 4-byte match as long as possible.
+ t := candidate.offset - e.cur
+ l := e.matchlenLong(s+4, t+4, src) + 4
+
+ // Extend backwards
+ for t > 0 && s > nextEmit && src[t-1] == src[s-1] {
+ s--
+ t--
+ l++
+ }
+ if nextEmit < s {
+ if false {
+ emitLiteral(dst, src[nextEmit:s])
+ } else {
+ for _, v := range src[nextEmit:s] {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+ }
+ }
+
+ dst.AddMatchLong(l, uint32(s-t-baseMatchOffset))
+ s += l
+ nextEmit = s
+ if nextS >= s {
+ s = nextS + 1
+ }
+
+ if s >= sLimit {
+ // Index first pair after match end.
+ if int(s+l+8) < len(src) {
+ cv := load6432(src, s)
+ e.table[hashLen(cv, bTableBits, hashBytes)] = tableEntry{offset: s + e.cur}
+ }
+ goto emitRemainder
+ }
+
+ // Store every second hash in-between, but offset by 1.
+ for i := s - l + 2; i < s-5; i += 7 {
+ x := load6432(src, i)
+ nextHash := hashLen(x, bTableBits, hashBytes)
+ e.table[nextHash] = tableEntry{offset: e.cur + i}
+ // Skip one
+ x >>= 16
+ nextHash = hashLen(x, bTableBits, hashBytes)
+ e.table[nextHash] = tableEntry{offset: e.cur + i + 2}
+ // Skip one
+ x >>= 16
+ nextHash = hashLen(x, bTableBits, hashBytes)
+ e.table[nextHash] = tableEntry{offset: e.cur + i + 4}
+ }
+
+ // We could immediately start working at s now, but to improve
+ // compression we first update the hash table at s-2 to s. If
+ // another emitCopy is not our next move, also calculate nextHash
+ // at s+1. At least on GOARCH=amd64, these three hash calculations
+ // are faster as one load64 call (with some shifts) instead of
+ // three load32 calls.
+ x := load6432(src, s-2)
+ o := e.cur + s - 2
+ prevHash := hashLen(x, bTableBits, hashBytes)
+ prevHash2 := hashLen(x>>8, bTableBits, hashBytes)
+ e.table[prevHash] = tableEntry{offset: o}
+ e.table[prevHash2] = tableEntry{offset: o + 1}
+ currHash := hashLen(x>>16, bTableBits, hashBytes)
+ candidate = e.table[currHash]
+ e.table[currHash] = tableEntry{offset: o + 2}
+
+ offset := s - (candidate.offset - e.cur)
+ if offset > maxMatchOffset || uint32(x>>16) != load3232(src, candidate.offset-e.cur) {
+ cv = x >> 24
+ s++
+ break
+ }
+ }
+ }
+
+emitRemainder:
+ if int(nextEmit) < len(src) {
+ // If nothing was added, don't encode literals.
+ if dst.n == 0 {
+ return
+ }
+
+ emitLiteral(dst, src[nextEmit:])
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/level3.go b/vendor/github.com/klauspost/compress/flate/level3.go
new file mode 100644
index 0000000..7aa2b72
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/level3.go
@@ -0,0 +1,241 @@
+package flate
+
+import "fmt"
+
+// fastEncL3
+type fastEncL3 struct {
+ fastGen
+ table [1 << 16]tableEntryPrev
+}
+
+// Encode uses a similar algorithm to level 2, will check up to two candidates.
+func (e *fastEncL3) Encode(dst *tokens, src []byte) {
+ const (
+ inputMargin = 12 - 1
+ minNonLiteralBlockSize = 1 + 1 + inputMargin
+ tableBits = 16
+ tableSize = 1 << tableBits
+ hashBytes = 5
+ )
+
+ if debugDeflate && e.cur < 0 {
+ panic(fmt.Sprint("e.cur < 0: ", e.cur))
+ }
+
+ // Protect against e.cur wraparound.
+ for e.cur >= bufferReset {
+ if len(e.hist) == 0 {
+ for i := range e.table[:] {
+ e.table[i] = tableEntryPrev{}
+ }
+ e.cur = maxMatchOffset
+ break
+ }
+ // Shift down everything in the table that isn't already too far away.
+ minOff := e.cur + int32(len(e.hist)) - maxMatchOffset
+ for i := range e.table[:] {
+ v := e.table[i]
+ if v.Cur.offset <= minOff {
+ v.Cur.offset = 0
+ } else {
+ v.Cur.offset = v.Cur.offset - e.cur + maxMatchOffset
+ }
+ if v.Prev.offset <= minOff {
+ v.Prev.offset = 0
+ } else {
+ v.Prev.offset = v.Prev.offset - e.cur + maxMatchOffset
+ }
+ e.table[i] = v
+ }
+ e.cur = maxMatchOffset
+ }
+
+ s := e.addBlock(src)
+
+ // Skip if too small.
+ if len(src) < minNonLiteralBlockSize {
+ // We do not fill the token table.
+ // This will be picked up by caller.
+ dst.n = uint16(len(src))
+ return
+ }
+
+ // Override src
+ src = e.hist
+ nextEmit := s
+
+ // sLimit is when to stop looking for offset/length copies. The inputMargin
+ // lets us use a fast path for emitLiteral in the main loop, while we are
+ // looking for copies.
+ sLimit := int32(len(src) - inputMargin)
+
+ // nextEmit is where in src the next emitLiteral should start from.
+ cv := load6432(src, s)
+ for {
+ const skipLog = 7
+ nextS := s
+ var candidate tableEntry
+ for {
+ nextHash := hashLen(cv, tableBits, hashBytes)
+ s = nextS
+ nextS = s + 1 + (s-nextEmit)>>skipLog
+ if nextS > sLimit {
+ goto emitRemainder
+ }
+ candidates := e.table[nextHash]
+ now := load6432(src, nextS)
+
+ // Safe offset distance until s + 4...
+ minOffset := e.cur + s - (maxMatchOffset - 4)
+ e.table[nextHash] = tableEntryPrev{Prev: candidates.Cur, Cur: tableEntry{offset: s + e.cur}}
+
+ // Check both candidates
+ candidate = candidates.Cur
+ if candidate.offset < minOffset {
+ cv = now
+ // Previous will also be invalid, we have nothing.
+ continue
+ }
+
+ if uint32(cv) == load3232(src, candidate.offset-e.cur) {
+ if candidates.Prev.offset < minOffset || uint32(cv) != load3232(src, candidates.Prev.offset-e.cur) {
+ break
+ }
+ // Both match and are valid, pick longest.
+ offset := s - (candidate.offset - e.cur)
+ o2 := s - (candidates.Prev.offset - e.cur)
+ l1, l2 := matchLen(src[s+4:], src[s-offset+4:]), matchLen(src[s+4:], src[s-o2+4:])
+ if l2 > l1 {
+ candidate = candidates.Prev
+ }
+ break
+ } else {
+ // We only check if value mismatches.
+ // Offset will always be invalid in other cases.
+ candidate = candidates.Prev
+ if candidate.offset > minOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) {
+ break
+ }
+ }
+ cv = now
+ }
+
+ // Call emitCopy, and then see if another emitCopy could be our next
+ // move. Repeat until we find no match for the input immediately after
+ // what was consumed by the last emitCopy call.
+ //
+ // If we exit this loop normally then we need to call emitLiteral next,
+ // though we don't yet know how big the literal will be. We handle that
+ // by proceeding to the next iteration of the main loop. We also can
+ // exit this loop via goto if we get close to exhausting the input.
+ for {
+ // Invariant: we have a 4-byte match at s, and no need to emit any
+ // literal bytes prior to s.
+
+ // Extend the 4-byte match as long as possible.
+ //
+ t := candidate.offset - e.cur
+ l := e.matchlenLong(s+4, t+4, src) + 4
+
+ // Extend backwards
+ for t > 0 && s > nextEmit && src[t-1] == src[s-1] {
+ s--
+ t--
+ l++
+ }
+ if nextEmit < s {
+ if false {
+ emitLiteral(dst, src[nextEmit:s])
+ } else {
+ for _, v := range src[nextEmit:s] {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+ }
+ }
+
+ dst.AddMatchLong(l, uint32(s-t-baseMatchOffset))
+ s += l
+ nextEmit = s
+ if nextS >= s {
+ s = nextS + 1
+ }
+
+ if s >= sLimit {
+ t += l
+ // Index first pair after match end.
+ if int(t+8) < len(src) && t > 0 {
+ cv = load6432(src, t)
+ nextHash := hashLen(cv, tableBits, hashBytes)
+ e.table[nextHash] = tableEntryPrev{
+ Prev: e.table[nextHash].Cur,
+ Cur: tableEntry{offset: e.cur + t},
+ }
+ }
+ goto emitRemainder
+ }
+
+ // Store every 5th hash in-between.
+ for i := s - l + 2; i < s-5; i += 6 {
+ nextHash := hashLen(load6432(src, i), tableBits, hashBytes)
+ e.table[nextHash] = tableEntryPrev{
+ Prev: e.table[nextHash].Cur,
+ Cur: tableEntry{offset: e.cur + i}}
+ }
+ // We could immediately start working at s now, but to improve
+ // compression we first update the hash table at s-2 to s.
+ x := load6432(src, s-2)
+ prevHash := hashLen(x, tableBits, hashBytes)
+
+ e.table[prevHash] = tableEntryPrev{
+ Prev: e.table[prevHash].Cur,
+ Cur: tableEntry{offset: e.cur + s - 2},
+ }
+ x >>= 8
+ prevHash = hashLen(x, tableBits, hashBytes)
+
+ e.table[prevHash] = tableEntryPrev{
+ Prev: e.table[prevHash].Cur,
+ Cur: tableEntry{offset: e.cur + s - 1},
+ }
+ x >>= 8
+ currHash := hashLen(x, tableBits, hashBytes)
+ candidates := e.table[currHash]
+ cv = x
+ e.table[currHash] = tableEntryPrev{
+ Prev: candidates.Cur,
+ Cur: tableEntry{offset: s + e.cur},
+ }
+
+ // Check both candidates
+ candidate = candidates.Cur
+ minOffset := e.cur + s - (maxMatchOffset - 4)
+
+ if candidate.offset > minOffset {
+ if uint32(cv) == load3232(src, candidate.offset-e.cur) {
+ // Found a match...
+ continue
+ }
+ candidate = candidates.Prev
+ if candidate.offset > minOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) {
+ // Match at prev...
+ continue
+ }
+ }
+ cv = x >> 8
+ s++
+ break
+ }
+ }
+
+emitRemainder:
+ if int(nextEmit) < len(src) {
+ // If nothing was added, don't encode literals.
+ if dst.n == 0 {
+ return
+ }
+
+ emitLiteral(dst, src[nextEmit:])
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/level4.go b/vendor/github.com/klauspost/compress/flate/level4.go
new file mode 100644
index 0000000..23c08b3
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/level4.go
@@ -0,0 +1,221 @@
+package flate
+
+import "fmt"
+
+type fastEncL4 struct {
+ fastGen
+ table [tableSize]tableEntry
+ bTable [tableSize]tableEntry
+}
+
+func (e *fastEncL4) Encode(dst *tokens, src []byte) {
+ const (
+ inputMargin = 12 - 1
+ minNonLiteralBlockSize = 1 + 1 + inputMargin
+ hashShortBytes = 4
+ )
+ if debugDeflate && e.cur < 0 {
+ panic(fmt.Sprint("e.cur < 0: ", e.cur))
+ }
+ // Protect against e.cur wraparound.
+ for e.cur >= bufferReset {
+ if len(e.hist) == 0 {
+ for i := range e.table[:] {
+ e.table[i] = tableEntry{}
+ }
+ for i := range e.bTable[:] {
+ e.bTable[i] = tableEntry{}
+ }
+ e.cur = maxMatchOffset
+ break
+ }
+ // Shift down everything in the table that isn't already too far away.
+ minOff := e.cur + int32(len(e.hist)) - maxMatchOffset
+ for i := range e.table[:] {
+ v := e.table[i].offset
+ if v <= minOff {
+ v = 0
+ } else {
+ v = v - e.cur + maxMatchOffset
+ }
+ e.table[i].offset = v
+ }
+ for i := range e.bTable[:] {
+ v := e.bTable[i].offset
+ if v <= minOff {
+ v = 0
+ } else {
+ v = v - e.cur + maxMatchOffset
+ }
+ e.bTable[i].offset = v
+ }
+ e.cur = maxMatchOffset
+ }
+
+ s := e.addBlock(src)
+
+ // This check isn't in the Snappy implementation, but there, the caller
+ // instead of the callee handles this case.
+ if len(src) < minNonLiteralBlockSize {
+ // We do not fill the token table.
+ // This will be picked up by caller.
+ dst.n = uint16(len(src))
+ return
+ }
+
+ // Override src
+ src = e.hist
+ nextEmit := s
+
+ // sLimit is when to stop looking for offset/length copies. The inputMargin
+ // lets us use a fast path for emitLiteral in the main loop, while we are
+ // looking for copies.
+ sLimit := int32(len(src) - inputMargin)
+
+ // nextEmit is where in src the next emitLiteral should start from.
+ cv := load6432(src, s)
+ for {
+ const skipLog = 6
+ const doEvery = 1
+
+ nextS := s
+ var t int32
+ for {
+ nextHashS := hashLen(cv, tableBits, hashShortBytes)
+ nextHashL := hash7(cv, tableBits)
+
+ s = nextS
+ nextS = s + doEvery + (s-nextEmit)>>skipLog
+ if nextS > sLimit {
+ goto emitRemainder
+ }
+ // Fetch a short+long candidate
+ sCandidate := e.table[nextHashS]
+ lCandidate := e.bTable[nextHashL]
+ next := load6432(src, nextS)
+ entry := tableEntry{offset: s + e.cur}
+ e.table[nextHashS] = entry
+ e.bTable[nextHashL] = entry
+
+ t = lCandidate.offset - e.cur
+ if s-t < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.offset-e.cur) {
+ // We got a long match. Use that.
+ break
+ }
+
+ t = sCandidate.offset - e.cur
+ if s-t < maxMatchOffset && uint32(cv) == load3232(src, sCandidate.offset-e.cur) {
+ // Found a 4 match...
+ lCandidate = e.bTable[hash7(next, tableBits)]
+
+ // If the next long is a candidate, check if we should use that instead...
+ lOff := nextS - (lCandidate.offset - e.cur)
+ if lOff < maxMatchOffset && load3232(src, lCandidate.offset-e.cur) == uint32(next) {
+ l1, l2 := matchLen(src[s+4:], src[t+4:]), matchLen(src[nextS+4:], src[nextS-lOff+4:])
+ if l2 > l1 {
+ s = nextS
+ t = lCandidate.offset - e.cur
+ }
+ }
+ break
+ }
+ cv = next
+ }
+
+ // A 4-byte match has been found. We'll later see if more than 4 bytes
+ // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
+ // them as literal bytes.
+
+ // Extend the 4-byte match as long as possible.
+ l := e.matchlenLong(s+4, t+4, src) + 4
+
+ // Extend backwards
+ for t > 0 && s > nextEmit && src[t-1] == src[s-1] {
+ s--
+ t--
+ l++
+ }
+ if nextEmit < s {
+ if false {
+ emitLiteral(dst, src[nextEmit:s])
+ } else {
+ for _, v := range src[nextEmit:s] {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+ }
+ }
+ if debugDeflate {
+ if t >= s {
+ panic("s-t")
+ }
+ if (s - t) > maxMatchOffset {
+ panic(fmt.Sprintln("mmo", t))
+ }
+ if l < baseMatchLength {
+ panic("bml")
+ }
+ }
+
+ dst.AddMatchLong(l, uint32(s-t-baseMatchOffset))
+ s += l
+ nextEmit = s
+ if nextS >= s {
+ s = nextS + 1
+ }
+
+ if s >= sLimit {
+ // Index first pair after match end.
+ if int(s+8) < len(src) {
+ cv := load6432(src, s)
+ e.table[hashLen(cv, tableBits, hashShortBytes)] = tableEntry{offset: s + e.cur}
+ e.bTable[hash7(cv, tableBits)] = tableEntry{offset: s + e.cur}
+ }
+ goto emitRemainder
+ }
+
+ // Store every 3rd hash in-between
+ if true {
+ i := nextS
+ if i < s-1 {
+ cv := load6432(src, i)
+ t := tableEntry{offset: i + e.cur}
+ t2 := tableEntry{offset: t.offset + 1}
+ e.bTable[hash7(cv, tableBits)] = t
+ e.bTable[hash7(cv>>8, tableBits)] = t2
+ e.table[hashLen(cv>>8, tableBits, hashShortBytes)] = t2
+
+ i += 3
+ for ; i < s-1; i += 3 {
+ cv := load6432(src, i)
+ t := tableEntry{offset: i + e.cur}
+ t2 := tableEntry{offset: t.offset + 1}
+ e.bTable[hash7(cv, tableBits)] = t
+ e.bTable[hash7(cv>>8, tableBits)] = t2
+ e.table[hashLen(cv>>8, tableBits, hashShortBytes)] = t2
+ }
+ }
+ }
+
+ // We could immediately start working at s now, but to improve
+ // compression we first update the hash table at s-1 and at s.
+ x := load6432(src, s-1)
+ o := e.cur + s - 1
+ prevHashS := hashLen(x, tableBits, hashShortBytes)
+ prevHashL := hash7(x, tableBits)
+ e.table[prevHashS] = tableEntry{offset: o}
+ e.bTable[prevHashL] = tableEntry{offset: o}
+ cv = x >> 8
+ }
+
+emitRemainder:
+ if int(nextEmit) < len(src) {
+ // If nothing was added, don't encode literals.
+ if dst.n == 0 {
+ return
+ }
+
+ emitLiteral(dst, src[nextEmit:])
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/level5.go b/vendor/github.com/klauspost/compress/flate/level5.go
new file mode 100644
index 0000000..1f61ec1
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/level5.go
@@ -0,0 +1,708 @@
+package flate
+
+import "fmt"
+
+type fastEncL5 struct {
+ fastGen
+ table [tableSize]tableEntry
+ bTable [tableSize]tableEntryPrev
+}
+
+func (e *fastEncL5) Encode(dst *tokens, src []byte) {
+ const (
+ inputMargin = 12 - 1
+ minNonLiteralBlockSize = 1 + 1 + inputMargin
+ hashShortBytes = 4
+ )
+ if debugDeflate && e.cur < 0 {
+ panic(fmt.Sprint("e.cur < 0: ", e.cur))
+ }
+
+ // Protect against e.cur wraparound.
+ for e.cur >= bufferReset {
+ if len(e.hist) == 0 {
+ for i := range e.table[:] {
+ e.table[i] = tableEntry{}
+ }
+ for i := range e.bTable[:] {
+ e.bTable[i] = tableEntryPrev{}
+ }
+ e.cur = maxMatchOffset
+ break
+ }
+ // Shift down everything in the table that isn't already too far away.
+ minOff := e.cur + int32(len(e.hist)) - maxMatchOffset
+ for i := range e.table[:] {
+ v := e.table[i].offset
+ if v <= minOff {
+ v = 0
+ } else {
+ v = v - e.cur + maxMatchOffset
+ }
+ e.table[i].offset = v
+ }
+ for i := range e.bTable[:] {
+ v := e.bTable[i]
+ if v.Cur.offset <= minOff {
+ v.Cur.offset = 0
+ v.Prev.offset = 0
+ } else {
+ v.Cur.offset = v.Cur.offset - e.cur + maxMatchOffset
+ if v.Prev.offset <= minOff {
+ v.Prev.offset = 0
+ } else {
+ v.Prev.offset = v.Prev.offset - e.cur + maxMatchOffset
+ }
+ }
+ e.bTable[i] = v
+ }
+ e.cur = maxMatchOffset
+ }
+
+ s := e.addBlock(src)
+
+ // This check isn't in the Snappy implementation, but there, the caller
+ // instead of the callee handles this case.
+ if len(src) < minNonLiteralBlockSize {
+ // We do not fill the token table.
+ // This will be picked up by caller.
+ dst.n = uint16(len(src))
+ return
+ }
+
+ // Override src
+ src = e.hist
+ nextEmit := s
+
+ // sLimit is when to stop looking for offset/length copies. The inputMargin
+ // lets us use a fast path for emitLiteral in the main loop, while we are
+ // looking for copies.
+ sLimit := int32(len(src) - inputMargin)
+
+ // nextEmit is where in src the next emitLiteral should start from.
+ cv := load6432(src, s)
+ for {
+ const skipLog = 6
+ const doEvery = 1
+
+ nextS := s
+ var l int32
+ var t int32
+ for {
+ nextHashS := hashLen(cv, tableBits, hashShortBytes)
+ nextHashL := hash7(cv, tableBits)
+
+ s = nextS
+ nextS = s + doEvery + (s-nextEmit)>>skipLog
+ if nextS > sLimit {
+ goto emitRemainder
+ }
+ // Fetch a short+long candidate
+ sCandidate := e.table[nextHashS]
+ lCandidate := e.bTable[nextHashL]
+ next := load6432(src, nextS)
+ entry := tableEntry{offset: s + e.cur}
+ e.table[nextHashS] = entry
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = entry, eLong.Cur
+
+ nextHashS = hashLen(next, tableBits, hashShortBytes)
+ nextHashL = hash7(next, tableBits)
+
+ t = lCandidate.Cur.offset - e.cur
+ if s-t < maxMatchOffset {
+ if uint32(cv) == load3232(src, lCandidate.Cur.offset-e.cur) {
+ // Store the next match
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+
+ t2 := lCandidate.Prev.offset - e.cur
+ if s-t2 < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) {
+ l = e.matchlen(s+4, t+4, src) + 4
+ ml1 := e.matchlen(s+4, t2+4, src) + 4
+ if ml1 > l {
+ t = t2
+ l = ml1
+ break
+ }
+ }
+ break
+ }
+ t = lCandidate.Prev.offset - e.cur
+ if s-t < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) {
+ // Store the next match
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+ break
+ }
+ }
+
+ t = sCandidate.offset - e.cur
+ if s-t < maxMatchOffset && uint32(cv) == load3232(src, sCandidate.offset-e.cur) {
+ // Found a 4 match...
+ l = e.matchlen(s+4, t+4, src) + 4
+ lCandidate = e.bTable[nextHashL]
+ // Store the next match
+
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+
+ // If the next long is a candidate, use that...
+ t2 := lCandidate.Cur.offset - e.cur
+ if nextS-t2 < maxMatchOffset {
+ if load3232(src, lCandidate.Cur.offset-e.cur) == uint32(next) {
+ ml := e.matchlen(nextS+4, t2+4, src) + 4
+ if ml > l {
+ t = t2
+ s = nextS
+ l = ml
+ break
+ }
+ }
+ // If the previous long is a candidate, use that...
+ t2 = lCandidate.Prev.offset - e.cur
+ if nextS-t2 < maxMatchOffset && load3232(src, lCandidate.Prev.offset-e.cur) == uint32(next) {
+ ml := e.matchlen(nextS+4, t2+4, src) + 4
+ if ml > l {
+ t = t2
+ s = nextS
+ l = ml
+ break
+ }
+ }
+ }
+ break
+ }
+ cv = next
+ }
+
+ // A 4-byte match has been found. We'll later see if more than 4 bytes
+ // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
+ // them as literal bytes.
+
+ if l == 0 {
+ // Extend the 4-byte match as long as possible.
+ l = e.matchlenLong(s+4, t+4, src) + 4
+ } else if l == maxMatchLength {
+ l += e.matchlenLong(s+l, t+l, src)
+ }
+
+ // Try to locate a better match by checking the end of best match...
+ if sAt := s + l; l < 30 && sAt < sLimit {
+ // Allow some bytes at the beginning to mismatch.
+ // Sweet spot is 2/3 bytes depending on input.
+ // 3 is only a little better when it is but sometimes a lot worse.
+ // The skipped bytes are tested in Extend backwards,
+ // and still picked up as part of the match if they do.
+ const skipBeginning = 2
+ eLong := e.bTable[hash7(load6432(src, sAt), tableBits)].Cur.offset
+ t2 := eLong - e.cur - l + skipBeginning
+ s2 := s + skipBeginning
+ off := s2 - t2
+ if t2 >= 0 && off < maxMatchOffset && off > 0 {
+ if l2 := e.matchlenLong(s2, t2, src); l2 > l {
+ t = t2
+ l = l2
+ s = s2
+ }
+ }
+ }
+
+ // Extend backwards
+ for t > 0 && s > nextEmit && src[t-1] == src[s-1] {
+ s--
+ t--
+ l++
+ }
+ if nextEmit < s {
+ if false {
+ emitLiteral(dst, src[nextEmit:s])
+ } else {
+ for _, v := range src[nextEmit:s] {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+ }
+ }
+ if debugDeflate {
+ if t >= s {
+ panic(fmt.Sprintln("s-t", s, t))
+ }
+ if (s - t) > maxMatchOffset {
+ panic(fmt.Sprintln("mmo", s-t))
+ }
+ if l < baseMatchLength {
+ panic("bml")
+ }
+ }
+
+ dst.AddMatchLong(l, uint32(s-t-baseMatchOffset))
+ s += l
+ nextEmit = s
+ if nextS >= s {
+ s = nextS + 1
+ }
+
+ if s >= sLimit {
+ goto emitRemainder
+ }
+
+ // Store every 3rd hash in-between.
+ if true {
+ const hashEvery = 3
+ i := s - l + 1
+ if i < s-1 {
+ cv := load6432(src, i)
+ t := tableEntry{offset: i + e.cur}
+ e.table[hashLen(cv, tableBits, hashShortBytes)] = t
+ eLong := &e.bTable[hash7(cv, tableBits)]
+ eLong.Cur, eLong.Prev = t, eLong.Cur
+
+ // Do an long at i+1
+ cv >>= 8
+ t = tableEntry{offset: t.offset + 1}
+ eLong = &e.bTable[hash7(cv, tableBits)]
+ eLong.Cur, eLong.Prev = t, eLong.Cur
+
+ // We only have enough bits for a short entry at i+2
+ cv >>= 8
+ t = tableEntry{offset: t.offset + 1}
+ e.table[hashLen(cv, tableBits, hashShortBytes)] = t
+
+ // Skip one - otherwise we risk hitting 's'
+ i += 4
+ for ; i < s-1; i += hashEvery {
+ cv := load6432(src, i)
+ t := tableEntry{offset: i + e.cur}
+ t2 := tableEntry{offset: t.offset + 1}
+ eLong := &e.bTable[hash7(cv, tableBits)]
+ eLong.Cur, eLong.Prev = t, eLong.Cur
+ e.table[hashLen(cv>>8, tableBits, hashShortBytes)] = t2
+ }
+ }
+ }
+
+ // We could immediately start working at s now, but to improve
+ // compression we first update the hash table at s-1 and at s.
+ x := load6432(src, s-1)
+ o := e.cur + s - 1
+ prevHashS := hashLen(x, tableBits, hashShortBytes)
+ prevHashL := hash7(x, tableBits)
+ e.table[prevHashS] = tableEntry{offset: o}
+ eLong := &e.bTable[prevHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: o}, eLong.Cur
+ cv = x >> 8
+ }
+
+emitRemainder:
+ if int(nextEmit) < len(src) {
+ // If nothing was added, don't encode literals.
+ if dst.n == 0 {
+ return
+ }
+
+ emitLiteral(dst, src[nextEmit:])
+ }
+}
+
+// fastEncL5Window is a level 5 encoder,
+// but with a custom window size.
+type fastEncL5Window struct {
+ hist []byte
+ cur int32
+ maxOffset int32
+ table [tableSize]tableEntry
+ bTable [tableSize]tableEntryPrev
+}
+
+func (e *fastEncL5Window) Encode(dst *tokens, src []byte) {
+ const (
+ inputMargin = 12 - 1
+ minNonLiteralBlockSize = 1 + 1 + inputMargin
+ hashShortBytes = 4
+ )
+ maxMatchOffset := e.maxOffset
+ if debugDeflate && e.cur < 0 {
+ panic(fmt.Sprint("e.cur < 0: ", e.cur))
+ }
+
+ // Protect against e.cur wraparound.
+ for e.cur >= bufferReset {
+ if len(e.hist) == 0 {
+ for i := range e.table[:] {
+ e.table[i] = tableEntry{}
+ }
+ for i := range e.bTable[:] {
+ e.bTable[i] = tableEntryPrev{}
+ }
+ e.cur = maxMatchOffset
+ break
+ }
+ // Shift down everything in the table that isn't already too far away.
+ minOff := e.cur + int32(len(e.hist)) - maxMatchOffset
+ for i := range e.table[:] {
+ v := e.table[i].offset
+ if v <= minOff {
+ v = 0
+ } else {
+ v = v - e.cur + maxMatchOffset
+ }
+ e.table[i].offset = v
+ }
+ for i := range e.bTable[:] {
+ v := e.bTable[i]
+ if v.Cur.offset <= minOff {
+ v.Cur.offset = 0
+ v.Prev.offset = 0
+ } else {
+ v.Cur.offset = v.Cur.offset - e.cur + maxMatchOffset
+ if v.Prev.offset <= minOff {
+ v.Prev.offset = 0
+ } else {
+ v.Prev.offset = v.Prev.offset - e.cur + maxMatchOffset
+ }
+ }
+ e.bTable[i] = v
+ }
+ e.cur = maxMatchOffset
+ }
+
+ s := e.addBlock(src)
+
+ // This check isn't in the Snappy implementation, but there, the caller
+ // instead of the callee handles this case.
+ if len(src) < minNonLiteralBlockSize {
+ // We do not fill the token table.
+ // This will be picked up by caller.
+ dst.n = uint16(len(src))
+ return
+ }
+
+ // Override src
+ src = e.hist
+ nextEmit := s
+
+ // sLimit is when to stop looking for offset/length copies. The inputMargin
+ // lets us use a fast path for emitLiteral in the main loop, while we are
+ // looking for copies.
+ sLimit := int32(len(src) - inputMargin)
+
+ // nextEmit is where in src the next emitLiteral should start from.
+ cv := load6432(src, s)
+ for {
+ const skipLog = 6
+ const doEvery = 1
+
+ nextS := s
+ var l int32
+ var t int32
+ for {
+ nextHashS := hashLen(cv, tableBits, hashShortBytes)
+ nextHashL := hash7(cv, tableBits)
+
+ s = nextS
+ nextS = s + doEvery + (s-nextEmit)>>skipLog
+ if nextS > sLimit {
+ goto emitRemainder
+ }
+ // Fetch a short+long candidate
+ sCandidate := e.table[nextHashS]
+ lCandidate := e.bTable[nextHashL]
+ next := load6432(src, nextS)
+ entry := tableEntry{offset: s + e.cur}
+ e.table[nextHashS] = entry
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = entry, eLong.Cur
+
+ nextHashS = hashLen(next, tableBits, hashShortBytes)
+ nextHashL = hash7(next, tableBits)
+
+ t = lCandidate.Cur.offset - e.cur
+ if s-t < maxMatchOffset {
+ if uint32(cv) == load3232(src, lCandidate.Cur.offset-e.cur) {
+ // Store the next match
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+
+ t2 := lCandidate.Prev.offset - e.cur
+ if s-t2 < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) {
+ l = e.matchlen(s+4, t+4, src) + 4
+ ml1 := e.matchlen(s+4, t2+4, src) + 4
+ if ml1 > l {
+ t = t2
+ l = ml1
+ break
+ }
+ }
+ break
+ }
+ t = lCandidate.Prev.offset - e.cur
+ if s-t < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) {
+ // Store the next match
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+ break
+ }
+ }
+
+ t = sCandidate.offset - e.cur
+ if s-t < maxMatchOffset && uint32(cv) == load3232(src, sCandidate.offset-e.cur) {
+ // Found a 4 match...
+ l = e.matchlen(s+4, t+4, src) + 4
+ lCandidate = e.bTable[nextHashL]
+ // Store the next match
+
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+
+ // If the next long is a candidate, use that...
+ t2 := lCandidate.Cur.offset - e.cur
+ if nextS-t2 < maxMatchOffset {
+ if load3232(src, lCandidate.Cur.offset-e.cur) == uint32(next) {
+ ml := e.matchlen(nextS+4, t2+4, src) + 4
+ if ml > l {
+ t = t2
+ s = nextS
+ l = ml
+ break
+ }
+ }
+ // If the previous long is a candidate, use that...
+ t2 = lCandidate.Prev.offset - e.cur
+ if nextS-t2 < maxMatchOffset && load3232(src, lCandidate.Prev.offset-e.cur) == uint32(next) {
+ ml := e.matchlen(nextS+4, t2+4, src) + 4
+ if ml > l {
+ t = t2
+ s = nextS
+ l = ml
+ break
+ }
+ }
+ }
+ break
+ }
+ cv = next
+ }
+
+ // A 4-byte match has been found. We'll later see if more than 4 bytes
+ // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
+ // them as literal bytes.
+
+ if l == 0 {
+ // Extend the 4-byte match as long as possible.
+ l = e.matchlenLong(s+4, t+4, src) + 4
+ } else if l == maxMatchLength {
+ l += e.matchlenLong(s+l, t+l, src)
+ }
+
+ // Try to locate a better match by checking the end of best match...
+ if sAt := s + l; l < 30 && sAt < sLimit {
+ // Allow some bytes at the beginning to mismatch.
+ // Sweet spot is 2/3 bytes depending on input.
+ // 3 is only a little better when it is but sometimes a lot worse.
+ // The skipped bytes are tested in Extend backwards,
+ // and still picked up as part of the match if they do.
+ const skipBeginning = 2
+ eLong := e.bTable[hash7(load6432(src, sAt), tableBits)].Cur.offset
+ t2 := eLong - e.cur - l + skipBeginning
+ s2 := s + skipBeginning
+ off := s2 - t2
+ if t2 >= 0 && off < maxMatchOffset && off > 0 {
+ if l2 := e.matchlenLong(s2, t2, src); l2 > l {
+ t = t2
+ l = l2
+ s = s2
+ }
+ }
+ }
+
+ // Extend backwards
+ for t > 0 && s > nextEmit && src[t-1] == src[s-1] {
+ s--
+ t--
+ l++
+ }
+ if nextEmit < s {
+ if false {
+ emitLiteral(dst, src[nextEmit:s])
+ } else {
+ for _, v := range src[nextEmit:s] {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+ }
+ }
+ if debugDeflate {
+ if t >= s {
+ panic(fmt.Sprintln("s-t", s, t))
+ }
+ if (s - t) > maxMatchOffset {
+ panic(fmt.Sprintln("mmo", s-t))
+ }
+ if l < baseMatchLength {
+ panic("bml")
+ }
+ }
+
+ dst.AddMatchLong(l, uint32(s-t-baseMatchOffset))
+ s += l
+ nextEmit = s
+ if nextS >= s {
+ s = nextS + 1
+ }
+
+ if s >= sLimit {
+ goto emitRemainder
+ }
+
+ // Store every 3rd hash in-between.
+ if true {
+ const hashEvery = 3
+ i := s - l + 1
+ if i < s-1 {
+ cv := load6432(src, i)
+ t := tableEntry{offset: i + e.cur}
+ e.table[hashLen(cv, tableBits, hashShortBytes)] = t
+ eLong := &e.bTable[hash7(cv, tableBits)]
+ eLong.Cur, eLong.Prev = t, eLong.Cur
+
+ // Do an long at i+1
+ cv >>= 8
+ t = tableEntry{offset: t.offset + 1}
+ eLong = &e.bTable[hash7(cv, tableBits)]
+ eLong.Cur, eLong.Prev = t, eLong.Cur
+
+ // We only have enough bits for a short entry at i+2
+ cv >>= 8
+ t = tableEntry{offset: t.offset + 1}
+ e.table[hashLen(cv, tableBits, hashShortBytes)] = t
+
+ // Skip one - otherwise we risk hitting 's'
+ i += 4
+ for ; i < s-1; i += hashEvery {
+ cv := load6432(src, i)
+ t := tableEntry{offset: i + e.cur}
+ t2 := tableEntry{offset: t.offset + 1}
+ eLong := &e.bTable[hash7(cv, tableBits)]
+ eLong.Cur, eLong.Prev = t, eLong.Cur
+ e.table[hashLen(cv>>8, tableBits, hashShortBytes)] = t2
+ }
+ }
+ }
+
+ // We could immediately start working at s now, but to improve
+ // compression we first update the hash table at s-1 and at s.
+ x := load6432(src, s-1)
+ o := e.cur + s - 1
+ prevHashS := hashLen(x, tableBits, hashShortBytes)
+ prevHashL := hash7(x, tableBits)
+ e.table[prevHashS] = tableEntry{offset: o}
+ eLong := &e.bTable[prevHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: o}, eLong.Cur
+ cv = x >> 8
+ }
+
+emitRemainder:
+ if int(nextEmit) < len(src) {
+ // If nothing was added, don't encode literals.
+ if dst.n == 0 {
+ return
+ }
+
+ emitLiteral(dst, src[nextEmit:])
+ }
+}
+
+// Reset the encoding table.
+func (e *fastEncL5Window) Reset() {
+ // We keep the same allocs, since we are compressing the same block sizes.
+ if cap(e.hist) < allocHistory {
+ e.hist = make([]byte, 0, allocHistory)
+ }
+
+ // We offset current position so everything will be out of reach.
+ // If we are above the buffer reset it will be cleared anyway since len(hist) == 0.
+ if e.cur <= int32(bufferReset) {
+ e.cur += e.maxOffset + int32(len(e.hist))
+ }
+ e.hist = e.hist[:0]
+}
+
+func (e *fastEncL5Window) addBlock(src []byte) int32 {
+ // check if we have space already
+ maxMatchOffset := e.maxOffset
+
+ if len(e.hist)+len(src) > cap(e.hist) {
+ if cap(e.hist) == 0 {
+ e.hist = make([]byte, 0, allocHistory)
+ } else {
+ if cap(e.hist) < int(maxMatchOffset*2) {
+ panic("unexpected buffer size")
+ }
+ // Move down
+ offset := int32(len(e.hist)) - maxMatchOffset
+ copy(e.hist[0:maxMatchOffset], e.hist[offset:])
+ e.cur += offset
+ e.hist = e.hist[:maxMatchOffset]
+ }
+ }
+ s := int32(len(e.hist))
+ e.hist = append(e.hist, src...)
+ return s
+}
+
+// matchlen will return the match length between offsets and t in src.
+// The maximum length returned is maxMatchLength - 4.
+// It is assumed that s > t, that t >=0 and s < len(src).
+func (e *fastEncL5Window) matchlen(s, t int32, src []byte) int32 {
+ if debugDecode {
+ if t >= s {
+ panic(fmt.Sprint("t >=s:", t, s))
+ }
+ if int(s) >= len(src) {
+ panic(fmt.Sprint("s >= len(src):", s, len(src)))
+ }
+ if t < 0 {
+ panic(fmt.Sprint("t < 0:", t))
+ }
+ if s-t > e.maxOffset {
+ panic(fmt.Sprint(s, "-", t, "(", s-t, ") > maxMatchLength (", maxMatchOffset, ")"))
+ }
+ }
+ s1 := int(s) + maxMatchLength - 4
+ if s1 > len(src) {
+ s1 = len(src)
+ }
+
+ // Extend the match to be as long as possible.
+ return int32(matchLen(src[s:s1], src[t:]))
+}
+
+// matchlenLong will return the match length between offsets and t in src.
+// It is assumed that s > t, that t >=0 and s < len(src).
+func (e *fastEncL5Window) matchlenLong(s, t int32, src []byte) int32 {
+ if debugDeflate {
+ if t >= s {
+ panic(fmt.Sprint("t >=s:", t, s))
+ }
+ if int(s) >= len(src) {
+ panic(fmt.Sprint("s >= len(src):", s, len(src)))
+ }
+ if t < 0 {
+ panic(fmt.Sprint("t < 0:", t))
+ }
+ if s-t > e.maxOffset {
+ panic(fmt.Sprint(s, "-", t, "(", s-t, ") > maxMatchLength (", maxMatchOffset, ")"))
+ }
+ }
+ // Extend the match to be as long as possible.
+ return int32(matchLen(src[s:], src[t:]))
+}
diff --git a/vendor/github.com/klauspost/compress/flate/level6.go b/vendor/github.com/klauspost/compress/flate/level6.go
new file mode 100644
index 0000000..f1e9d98
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/level6.go
@@ -0,0 +1,325 @@
+package flate
+
+import "fmt"
+
+type fastEncL6 struct {
+ fastGen
+ table [tableSize]tableEntry
+ bTable [tableSize]tableEntryPrev
+}
+
+func (e *fastEncL6) Encode(dst *tokens, src []byte) {
+ const (
+ inputMargin = 12 - 1
+ minNonLiteralBlockSize = 1 + 1 + inputMargin
+ hashShortBytes = 4
+ )
+ if debugDeflate && e.cur < 0 {
+ panic(fmt.Sprint("e.cur < 0: ", e.cur))
+ }
+
+ // Protect against e.cur wraparound.
+ for e.cur >= bufferReset {
+ if len(e.hist) == 0 {
+ for i := range e.table[:] {
+ e.table[i] = tableEntry{}
+ }
+ for i := range e.bTable[:] {
+ e.bTable[i] = tableEntryPrev{}
+ }
+ e.cur = maxMatchOffset
+ break
+ }
+ // Shift down everything in the table that isn't already too far away.
+ minOff := e.cur + int32(len(e.hist)) - maxMatchOffset
+ for i := range e.table[:] {
+ v := e.table[i].offset
+ if v <= minOff {
+ v = 0
+ } else {
+ v = v - e.cur + maxMatchOffset
+ }
+ e.table[i].offset = v
+ }
+ for i := range e.bTable[:] {
+ v := e.bTable[i]
+ if v.Cur.offset <= minOff {
+ v.Cur.offset = 0
+ v.Prev.offset = 0
+ } else {
+ v.Cur.offset = v.Cur.offset - e.cur + maxMatchOffset
+ if v.Prev.offset <= minOff {
+ v.Prev.offset = 0
+ } else {
+ v.Prev.offset = v.Prev.offset - e.cur + maxMatchOffset
+ }
+ }
+ e.bTable[i] = v
+ }
+ e.cur = maxMatchOffset
+ }
+
+ s := e.addBlock(src)
+
+ // This check isn't in the Snappy implementation, but there, the caller
+ // instead of the callee handles this case.
+ if len(src) < minNonLiteralBlockSize {
+ // We do not fill the token table.
+ // This will be picked up by caller.
+ dst.n = uint16(len(src))
+ return
+ }
+
+ // Override src
+ src = e.hist
+ nextEmit := s
+
+ // sLimit is when to stop looking for offset/length copies. The inputMargin
+ // lets us use a fast path for emitLiteral in the main loop, while we are
+ // looking for copies.
+ sLimit := int32(len(src) - inputMargin)
+
+ // nextEmit is where in src the next emitLiteral should start from.
+ cv := load6432(src, s)
+ // Repeat MUST be > 1 and within range
+ repeat := int32(1)
+ for {
+ const skipLog = 7
+ const doEvery = 1
+
+ nextS := s
+ var l int32
+ var t int32
+ for {
+ nextHashS := hashLen(cv, tableBits, hashShortBytes)
+ nextHashL := hash7(cv, tableBits)
+ s = nextS
+ nextS = s + doEvery + (s-nextEmit)>>skipLog
+ if nextS > sLimit {
+ goto emitRemainder
+ }
+ // Fetch a short+long candidate
+ sCandidate := e.table[nextHashS]
+ lCandidate := e.bTable[nextHashL]
+ next := load6432(src, nextS)
+ entry := tableEntry{offset: s + e.cur}
+ e.table[nextHashS] = entry
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = entry, eLong.Cur
+
+ // Calculate hashes of 'next'
+ nextHashS = hashLen(next, tableBits, hashShortBytes)
+ nextHashL = hash7(next, tableBits)
+
+ t = lCandidate.Cur.offset - e.cur
+ if s-t < maxMatchOffset {
+ if uint32(cv) == load3232(src, lCandidate.Cur.offset-e.cur) {
+ // Long candidate matches at least 4 bytes.
+
+ // Store the next match
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+
+ // Check the previous long candidate as well.
+ t2 := lCandidate.Prev.offset - e.cur
+ if s-t2 < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) {
+ l = e.matchlen(s+4, t+4, src) + 4
+ ml1 := e.matchlen(s+4, t2+4, src) + 4
+ if ml1 > l {
+ t = t2
+ l = ml1
+ break
+ }
+ }
+ break
+ }
+ // Current value did not match, but check if previous long value does.
+ t = lCandidate.Prev.offset - e.cur
+ if s-t < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) {
+ // Store the next match
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+ break
+ }
+ }
+
+ t = sCandidate.offset - e.cur
+ if s-t < maxMatchOffset && uint32(cv) == load3232(src, sCandidate.offset-e.cur) {
+ // Found a 4 match...
+ l = e.matchlen(s+4, t+4, src) + 4
+
+ // Look up next long candidate (at nextS)
+ lCandidate = e.bTable[nextHashL]
+
+ // Store the next match
+ e.table[nextHashS] = tableEntry{offset: nextS + e.cur}
+ eLong := &e.bTable[nextHashL]
+ eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur
+
+ // Check repeat at s + repOff
+ const repOff = 1
+ t2 := s - repeat + repOff
+ if load3232(src, t2) == uint32(cv>>(8*repOff)) {
+ ml := e.matchlen(s+4+repOff, t2+4, src) + 4
+ if ml > l {
+ t = t2
+ l = ml
+ s += repOff
+ // Not worth checking more.
+ break
+ }
+ }
+
+ // If the next long is a candidate, use that...
+ t2 = lCandidate.Cur.offset - e.cur
+ if nextS-t2 < maxMatchOffset {
+ if load3232(src, lCandidate.Cur.offset-e.cur) == uint32(next) {
+ ml := e.matchlen(nextS+4, t2+4, src) + 4
+ if ml > l {
+ t = t2
+ s = nextS
+ l = ml
+ // This is ok, but check previous as well.
+ }
+ }
+ // If the previous long is a candidate, use that...
+ t2 = lCandidate.Prev.offset - e.cur
+ if nextS-t2 < maxMatchOffset && load3232(src, lCandidate.Prev.offset-e.cur) == uint32(next) {
+ ml := e.matchlen(nextS+4, t2+4, src) + 4
+ if ml > l {
+ t = t2
+ s = nextS
+ l = ml
+ break
+ }
+ }
+ }
+ break
+ }
+ cv = next
+ }
+
+ // A 4-byte match has been found. We'll later see if more than 4 bytes
+ // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
+ // them as literal bytes.
+
+ // Extend the 4-byte match as long as possible.
+ if l == 0 {
+ l = e.matchlenLong(s+4, t+4, src) + 4
+ } else if l == maxMatchLength {
+ l += e.matchlenLong(s+l, t+l, src)
+ }
+
+ // Try to locate a better match by checking the end-of-match...
+ if sAt := s + l; sAt < sLimit {
+ // Allow some bytes at the beginning to mismatch.
+ // Sweet spot is 2/3 bytes depending on input.
+ // 3 is only a little better when it is but sometimes a lot worse.
+ // The skipped bytes are tested in Extend backwards,
+ // and still picked up as part of the match if they do.
+ const skipBeginning = 2
+ eLong := &e.bTable[hash7(load6432(src, sAt), tableBits)]
+ // Test current
+ t2 := eLong.Cur.offset - e.cur - l + skipBeginning
+ s2 := s + skipBeginning
+ off := s2 - t2
+ if off < maxMatchOffset {
+ if off > 0 && t2 >= 0 {
+ if l2 := e.matchlenLong(s2, t2, src); l2 > l {
+ t = t2
+ l = l2
+ s = s2
+ }
+ }
+ // Test next:
+ t2 = eLong.Prev.offset - e.cur - l + skipBeginning
+ off := s2 - t2
+ if off > 0 && off < maxMatchOffset && t2 >= 0 {
+ if l2 := e.matchlenLong(s2, t2, src); l2 > l {
+ t = t2
+ l = l2
+ s = s2
+ }
+ }
+ }
+ }
+
+ // Extend backwards
+ for t > 0 && s > nextEmit && src[t-1] == src[s-1] {
+ s--
+ t--
+ l++
+ }
+ if nextEmit < s {
+ if false {
+ emitLiteral(dst, src[nextEmit:s])
+ } else {
+ for _, v := range src[nextEmit:s] {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+ }
+ }
+ if false {
+ if t >= s {
+ panic(fmt.Sprintln("s-t", s, t))
+ }
+ if (s - t) > maxMatchOffset {
+ panic(fmt.Sprintln("mmo", s-t))
+ }
+ if l < baseMatchLength {
+ panic("bml")
+ }
+ }
+
+ dst.AddMatchLong(l, uint32(s-t-baseMatchOffset))
+ repeat = s - t
+ s += l
+ nextEmit = s
+ if nextS >= s {
+ s = nextS + 1
+ }
+
+ if s >= sLimit {
+ // Index after match end.
+ for i := nextS + 1; i < int32(len(src))-8; i += 2 {
+ cv := load6432(src, i)
+ e.table[hashLen(cv, tableBits, hashShortBytes)] = tableEntry{offset: i + e.cur}
+ eLong := &e.bTable[hash7(cv, tableBits)]
+ eLong.Cur, eLong.Prev = tableEntry{offset: i + e.cur}, eLong.Cur
+ }
+ goto emitRemainder
+ }
+
+ // Store every long hash in-between and every second short.
+ if true {
+ for i := nextS + 1; i < s-1; i += 2 {
+ cv := load6432(src, i)
+ t := tableEntry{offset: i + e.cur}
+ t2 := tableEntry{offset: t.offset + 1}
+ eLong := &e.bTable[hash7(cv, tableBits)]
+ eLong2 := &e.bTable[hash7(cv>>8, tableBits)]
+ e.table[hashLen(cv, tableBits, hashShortBytes)] = t
+ eLong.Cur, eLong.Prev = t, eLong.Cur
+ eLong2.Cur, eLong2.Prev = t2, eLong2.Cur
+ }
+ }
+
+ // We could immediately start working at s now, but to improve
+ // compression we first update the hash table at s-1 and at s.
+ cv = load6432(src, s)
+ }
+
+emitRemainder:
+ if int(nextEmit) < len(src) {
+ // If nothing was added, don't encode literals.
+ if dst.n == 0 {
+ return
+ }
+
+ emitLiteral(dst, src[nextEmit:])
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/matchlen_amd64.go b/vendor/github.com/klauspost/compress/flate/matchlen_amd64.go
new file mode 100644
index 0000000..4bd3885
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/matchlen_amd64.go
@@ -0,0 +1,16 @@
+//go:build amd64 && !appengine && !noasm && gc
+// +build amd64,!appengine,!noasm,gc
+
+// Copyright 2019+ Klaus Post. All rights reserved.
+// License information can be found in the LICENSE file.
+
+package flate
+
+// matchLen returns how many bytes match in a and b
+//
+// It assumes that:
+//
+// len(a) <= len(b) and len(a) > 0
+//
+//go:noescape
+func matchLen(a []byte, b []byte) int
diff --git a/vendor/github.com/klauspost/compress/flate/matchlen_amd64.s b/vendor/github.com/klauspost/compress/flate/matchlen_amd64.s
new file mode 100644
index 0000000..9a7655c
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/matchlen_amd64.s
@@ -0,0 +1,68 @@
+// Copied from S2 implementation.
+
+//go:build !appengine && !noasm && gc && !noasm
+
+#include "textflag.h"
+
+// func matchLen(a []byte, b []byte) int
+// Requires: BMI
+TEXT ·matchLen(SB), NOSPLIT, $0-56
+ MOVQ a_base+0(FP), AX
+ MOVQ b_base+24(FP), CX
+ MOVQ a_len+8(FP), DX
+
+ // matchLen
+ XORL SI, SI
+ CMPL DX, $0x08
+ JB matchlen_match4_standalone
+
+matchlen_loopback_standalone:
+ MOVQ (AX)(SI*1), BX
+ XORQ (CX)(SI*1), BX
+ TESTQ BX, BX
+ JZ matchlen_loop_standalone
+
+#ifdef GOAMD64_v3
+ TZCNTQ BX, BX
+#else
+ BSFQ BX, BX
+#endif
+ SARQ $0x03, BX
+ LEAL (SI)(BX*1), SI
+ JMP gen_match_len_end
+
+matchlen_loop_standalone:
+ LEAL -8(DX), DX
+ LEAL 8(SI), SI
+ CMPL DX, $0x08
+ JAE matchlen_loopback_standalone
+
+matchlen_match4_standalone:
+ CMPL DX, $0x04
+ JB matchlen_match2_standalone
+ MOVL (AX)(SI*1), BX
+ CMPL (CX)(SI*1), BX
+ JNE matchlen_match2_standalone
+ LEAL -4(DX), DX
+ LEAL 4(SI), SI
+
+matchlen_match2_standalone:
+ CMPL DX, $0x02
+ JB matchlen_match1_standalone
+ MOVW (AX)(SI*1), BX
+ CMPW (CX)(SI*1), BX
+ JNE matchlen_match1_standalone
+ LEAL -2(DX), DX
+ LEAL 2(SI), SI
+
+matchlen_match1_standalone:
+ CMPL DX, $0x01
+ JB gen_match_len_end
+ MOVB (AX)(SI*1), BL
+ CMPB (CX)(SI*1), BL
+ JNE gen_match_len_end
+ INCL SI
+
+gen_match_len_end:
+ MOVQ SI, ret+48(FP)
+ RET
diff --git a/vendor/github.com/klauspost/compress/flate/matchlen_generic.go b/vendor/github.com/klauspost/compress/flate/matchlen_generic.go
new file mode 100644
index 0000000..ad5cd81
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/matchlen_generic.go
@@ -0,0 +1,33 @@
+//go:build !amd64 || appengine || !gc || noasm
+// +build !amd64 appengine !gc noasm
+
+// Copyright 2019+ Klaus Post. All rights reserved.
+// License information can be found in the LICENSE file.
+
+package flate
+
+import (
+ "encoding/binary"
+ "math/bits"
+)
+
+// matchLen returns the maximum common prefix length of a and b.
+// a must be the shortest of the two.
+func matchLen(a, b []byte) (n int) {
+ for ; len(a) >= 8 && len(b) >= 8; a, b = a[8:], b[8:] {
+ diff := binary.LittleEndian.Uint64(a) ^ binary.LittleEndian.Uint64(b)
+ if diff != 0 {
+ return n + bits.TrailingZeros64(diff)>>3
+ }
+ n += 8
+ }
+
+ for i := range a {
+ if a[i] != b[i] {
+ break
+ }
+ n++
+ }
+ return n
+
+}
diff --git a/vendor/github.com/klauspost/compress/flate/regmask_amd64.go b/vendor/github.com/klauspost/compress/flate/regmask_amd64.go
new file mode 100644
index 0000000..6ed2806
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/regmask_amd64.go
@@ -0,0 +1,37 @@
+package flate
+
+const (
+ // Masks for shifts with register sizes of the shift value.
+ // This can be used to work around the x86 design of shifting by mod register size.
+ // It can be used when a variable shift is always smaller than the register size.
+
+ // reg8SizeMaskX - shift value is 8 bits, shifted is X
+ reg8SizeMask8 = 7
+ reg8SizeMask16 = 15
+ reg8SizeMask32 = 31
+ reg8SizeMask64 = 63
+
+ // reg16SizeMaskX - shift value is 16 bits, shifted is X
+ reg16SizeMask8 = reg8SizeMask8
+ reg16SizeMask16 = reg8SizeMask16
+ reg16SizeMask32 = reg8SizeMask32
+ reg16SizeMask64 = reg8SizeMask64
+
+ // reg32SizeMaskX - shift value is 32 bits, shifted is X
+ reg32SizeMask8 = reg8SizeMask8
+ reg32SizeMask16 = reg8SizeMask16
+ reg32SizeMask32 = reg8SizeMask32
+ reg32SizeMask64 = reg8SizeMask64
+
+ // reg64SizeMaskX - shift value is 64 bits, shifted is X
+ reg64SizeMask8 = reg8SizeMask8
+ reg64SizeMask16 = reg8SizeMask16
+ reg64SizeMask32 = reg8SizeMask32
+ reg64SizeMask64 = reg8SizeMask64
+
+ // regSizeMaskUintX - shift value is uint, shifted is X
+ regSizeMaskUint8 = reg8SizeMask8
+ regSizeMaskUint16 = reg8SizeMask16
+ regSizeMaskUint32 = reg8SizeMask32
+ regSizeMaskUint64 = reg8SizeMask64
+)
diff --git a/vendor/github.com/klauspost/compress/flate/regmask_other.go b/vendor/github.com/klauspost/compress/flate/regmask_other.go
new file mode 100644
index 0000000..1b7a2cb
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/regmask_other.go
@@ -0,0 +1,40 @@
+//go:build !amd64
+// +build !amd64
+
+package flate
+
+const (
+ // Masks for shifts with register sizes of the shift value.
+ // This can be used to work around the x86 design of shifting by mod register size.
+ // It can be used when a variable shift is always smaller than the register size.
+
+ // reg8SizeMaskX - shift value is 8 bits, shifted is X
+ reg8SizeMask8 = 0xff
+ reg8SizeMask16 = 0xff
+ reg8SizeMask32 = 0xff
+ reg8SizeMask64 = 0xff
+
+ // reg16SizeMaskX - shift value is 16 bits, shifted is X
+ reg16SizeMask8 = 0xffff
+ reg16SizeMask16 = 0xffff
+ reg16SizeMask32 = 0xffff
+ reg16SizeMask64 = 0xffff
+
+ // reg32SizeMaskX - shift value is 32 bits, shifted is X
+ reg32SizeMask8 = 0xffffffff
+ reg32SizeMask16 = 0xffffffff
+ reg32SizeMask32 = 0xffffffff
+ reg32SizeMask64 = 0xffffffff
+
+ // reg64SizeMaskX - shift value is 64 bits, shifted is X
+ reg64SizeMask8 = 0xffffffffffffffff
+ reg64SizeMask16 = 0xffffffffffffffff
+ reg64SizeMask32 = 0xffffffffffffffff
+ reg64SizeMask64 = 0xffffffffffffffff
+
+ // regSizeMaskUintX - shift value is uint, shifted is X
+ regSizeMaskUint8 = ^uint(0)
+ regSizeMaskUint16 = ^uint(0)
+ regSizeMaskUint32 = ^uint(0)
+ regSizeMaskUint64 = ^uint(0)
+)
diff --git a/vendor/github.com/klauspost/compress/flate/stateless.go b/vendor/github.com/klauspost/compress/flate/stateless.go
new file mode 100644
index 0000000..f3d4139
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/stateless.go
@@ -0,0 +1,318 @@
+package flate
+
+import (
+ "io"
+ "math"
+ "sync"
+)
+
+const (
+ maxStatelessBlock = math.MaxInt16
+ // dictionary will be taken from maxStatelessBlock, so limit it.
+ maxStatelessDict = 8 << 10
+
+ slTableBits = 13
+ slTableSize = 1 << slTableBits
+ slTableShift = 32 - slTableBits
+)
+
+type statelessWriter struct {
+ dst io.Writer
+ closed bool
+}
+
+func (s *statelessWriter) Close() error {
+ if s.closed {
+ return nil
+ }
+ s.closed = true
+ // Emit EOF block
+ return StatelessDeflate(s.dst, nil, true, nil)
+}
+
+func (s *statelessWriter) Write(p []byte) (n int, err error) {
+ err = StatelessDeflate(s.dst, p, false, nil)
+ if err != nil {
+ return 0, err
+ }
+ return len(p), nil
+}
+
+func (s *statelessWriter) Reset(w io.Writer) {
+ s.dst = w
+ s.closed = false
+}
+
+// NewStatelessWriter will do compression but without maintaining any state
+// between Write calls.
+// There will be no memory kept between Write calls,
+// but compression and speed will be suboptimal.
+// Because of this, the size of actual Write calls will affect output size.
+func NewStatelessWriter(dst io.Writer) io.WriteCloser {
+ return &statelessWriter{dst: dst}
+}
+
+// bitWriterPool contains bit writers that can be reused.
+var bitWriterPool = sync.Pool{
+ New: func() interface{} {
+ return newHuffmanBitWriter(nil)
+ },
+}
+
+// StatelessDeflate allows compressing directly to a Writer without retaining state.
+// When returning everything will be flushed.
+// Up to 8KB of an optional dictionary can be given which is presumed to precede the block.
+// Longer dictionaries will be truncated and will still produce valid output.
+// Sending nil dictionary is perfectly fine.
+func StatelessDeflate(out io.Writer, in []byte, eof bool, dict []byte) error {
+ var dst tokens
+ bw := bitWriterPool.Get().(*huffmanBitWriter)
+ bw.reset(out)
+ defer func() {
+ // don't keep a reference to our output
+ bw.reset(nil)
+ bitWriterPool.Put(bw)
+ }()
+ if eof && len(in) == 0 {
+ // Just write an EOF block.
+ // Could be faster...
+ bw.writeStoredHeader(0, true)
+ bw.flush()
+ return bw.err
+ }
+
+ // Truncate dict
+ if len(dict) > maxStatelessDict {
+ dict = dict[len(dict)-maxStatelessDict:]
+ }
+
+ // For subsequent loops, keep shallow dict reference to avoid alloc+copy.
+ var inDict []byte
+
+ for len(in) > 0 {
+ todo := in
+ if len(inDict) > 0 {
+ if len(todo) > maxStatelessBlock-maxStatelessDict {
+ todo = todo[:maxStatelessBlock-maxStatelessDict]
+ }
+ } else if len(todo) > maxStatelessBlock-len(dict) {
+ todo = todo[:maxStatelessBlock-len(dict)]
+ }
+ inOrg := in
+ in = in[len(todo):]
+ uncompressed := todo
+ if len(dict) > 0 {
+ // combine dict and source
+ bufLen := len(todo) + len(dict)
+ combined := make([]byte, bufLen)
+ copy(combined, dict)
+ copy(combined[len(dict):], todo)
+ todo = combined
+ }
+ // Compress
+ if len(inDict) == 0 {
+ statelessEnc(&dst, todo, int16(len(dict)))
+ } else {
+ statelessEnc(&dst, inDict[:maxStatelessDict+len(todo)], maxStatelessDict)
+ }
+ isEof := eof && len(in) == 0
+
+ if dst.n == 0 {
+ bw.writeStoredHeader(len(uncompressed), isEof)
+ if bw.err != nil {
+ return bw.err
+ }
+ bw.writeBytes(uncompressed)
+ } else if int(dst.n) > len(uncompressed)-len(uncompressed)>>4 {
+ // If we removed less than 1/16th, huffman compress the block.
+ bw.writeBlockHuff(isEof, uncompressed, len(in) == 0)
+ } else {
+ bw.writeBlockDynamic(&dst, isEof, uncompressed, len(in) == 0)
+ }
+ if len(in) > 0 {
+ // Retain a dict if we have more
+ inDict = inOrg[len(uncompressed)-maxStatelessDict:]
+ dict = nil
+ dst.Reset()
+ }
+ if bw.err != nil {
+ return bw.err
+ }
+ }
+ if !eof {
+ // Align, only a stored block can do that.
+ bw.writeStoredHeader(0, false)
+ }
+ bw.flush()
+ return bw.err
+}
+
+func hashSL(u uint32) uint32 {
+ return (u * 0x1e35a7bd) >> slTableShift
+}
+
+func load3216(b []byte, i int16) uint32 {
+ // Help the compiler eliminate bounds checks on the read so it can be done in a single read.
+ b = b[i:]
+ b = b[:4]
+ return uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24
+}
+
+func load6416(b []byte, i int16) uint64 {
+ // Help the compiler eliminate bounds checks on the read so it can be done in a single read.
+ b = b[i:]
+ b = b[:8]
+ return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 |
+ uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56
+}
+
+func statelessEnc(dst *tokens, src []byte, startAt int16) {
+ const (
+ inputMargin = 12 - 1
+ minNonLiteralBlockSize = 1 + 1 + inputMargin
+ )
+
+ type tableEntry struct {
+ offset int16
+ }
+
+ var table [slTableSize]tableEntry
+
+ // This check isn't in the Snappy implementation, but there, the caller
+ // instead of the callee handles this case.
+ if len(src)-int(startAt) < minNonLiteralBlockSize {
+ // We do not fill the token table.
+ // This will be picked up by caller.
+ dst.n = 0
+ return
+ }
+ // Index until startAt
+ if startAt > 0 {
+ cv := load3232(src, 0)
+ for i := int16(0); i < startAt; i++ {
+ table[hashSL(cv)] = tableEntry{offset: i}
+ cv = (cv >> 8) | (uint32(src[i+4]) << 24)
+ }
+ }
+
+ s := startAt + 1
+ nextEmit := startAt
+ // sLimit is when to stop looking for offset/length copies. The inputMargin
+ // lets us use a fast path for emitLiteral in the main loop, while we are
+ // looking for copies.
+ sLimit := int16(len(src) - inputMargin)
+
+ // nextEmit is where in src the next emitLiteral should start from.
+ cv := load3216(src, s)
+
+ for {
+ const skipLog = 5
+ const doEvery = 2
+
+ nextS := s
+ var candidate tableEntry
+ for {
+ nextHash := hashSL(cv)
+ candidate = table[nextHash]
+ nextS = s + doEvery + (s-nextEmit)>>skipLog
+ if nextS > sLimit || nextS <= 0 {
+ goto emitRemainder
+ }
+
+ now := load6416(src, nextS)
+ table[nextHash] = tableEntry{offset: s}
+ nextHash = hashSL(uint32(now))
+
+ if cv == load3216(src, candidate.offset) {
+ table[nextHash] = tableEntry{offset: nextS}
+ break
+ }
+
+ // Do one right away...
+ cv = uint32(now)
+ s = nextS
+ nextS++
+ candidate = table[nextHash]
+ now >>= 8
+ table[nextHash] = tableEntry{offset: s}
+
+ if cv == load3216(src, candidate.offset) {
+ table[nextHash] = tableEntry{offset: nextS}
+ break
+ }
+ cv = uint32(now)
+ s = nextS
+ }
+
+ // A 4-byte match has been found. We'll later see if more than 4 bytes
+ // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
+ // them as literal bytes.
+ for {
+ // Invariant: we have a 4-byte match at s, and no need to emit any
+ // literal bytes prior to s.
+
+ // Extend the 4-byte match as long as possible.
+ t := candidate.offset
+ l := int16(matchLen(src[s+4:], src[t+4:]) + 4)
+
+ // Extend backwards
+ for t > 0 && s > nextEmit && src[t-1] == src[s-1] {
+ s--
+ t--
+ l++
+ }
+ if nextEmit < s {
+ if false {
+ emitLiteral(dst, src[nextEmit:s])
+ } else {
+ for _, v := range src[nextEmit:s] {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+ }
+ }
+
+ // Save the match found
+ dst.AddMatchLong(int32(l), uint32(s-t-baseMatchOffset))
+ s += l
+ nextEmit = s
+ if nextS >= s {
+ s = nextS + 1
+ }
+ if s >= sLimit {
+ goto emitRemainder
+ }
+
+ // We could immediately start working at s now, but to improve
+ // compression we first update the hash table at s-2 and at s. If
+ // another emitCopy is not our next move, also calculate nextHash
+ // at s+1. At least on GOARCH=amd64, these three hash calculations
+ // are faster as one load64 call (with some shifts) instead of
+ // three load32 calls.
+ x := load6416(src, s-2)
+ o := s - 2
+ prevHash := hashSL(uint32(x))
+ table[prevHash] = tableEntry{offset: o}
+ x >>= 16
+ currHash := hashSL(uint32(x))
+ candidate = table[currHash]
+ table[currHash] = tableEntry{offset: o + 2}
+
+ if uint32(x) != load3216(src, candidate.offset) {
+ cv = uint32(x >> 8)
+ s++
+ break
+ }
+ }
+ }
+
+emitRemainder:
+ if int(nextEmit) < len(src) {
+ // If nothing was added, don't encode literals.
+ if dst.n == 0 {
+ return
+ }
+ emitLiteral(dst, src[nextEmit:])
+ }
+}
diff --git a/vendor/github.com/klauspost/compress/flate/token.go b/vendor/github.com/klauspost/compress/flate/token.go
new file mode 100644
index 0000000..d818790
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/flate/token.go
@@ -0,0 +1,379 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flate
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "io"
+ "math"
+)
+
+const (
+ // bits 0-16 xoffset = offset - MIN_OFFSET_SIZE, or literal - 16 bits
+ // bits 16-22 offsetcode - 5 bits
+ // bits 22-30 xlength = length - MIN_MATCH_LENGTH - 8 bits
+ // bits 30-32 type 0 = literal 1=EOF 2=Match 3=Unused - 2 bits
+ lengthShift = 22
+ offsetMask = 1<maxnumlit
+ offHist [32]uint16 // offset codes
+ litHist [256]uint16 // codes 0->255
+ nFilled int
+ n uint16 // Must be able to contain maxStoreBlockSize
+ tokens [maxStoreBlockSize + 1]token
+}
+
+func (t *tokens) Reset() {
+ if t.n == 0 {
+ return
+ }
+ t.n = 0
+ t.nFilled = 0
+ for i := range t.litHist[:] {
+ t.litHist[i] = 0
+ }
+ for i := range t.extraHist[:] {
+ t.extraHist[i] = 0
+ }
+ for i := range t.offHist[:] {
+ t.offHist[i] = 0
+ }
+}
+
+func (t *tokens) Fill() {
+ if t.n == 0 {
+ return
+ }
+ for i, v := range t.litHist[:] {
+ if v == 0 {
+ t.litHist[i] = 1
+ t.nFilled++
+ }
+ }
+ for i, v := range t.extraHist[:literalCount-256] {
+ if v == 0 {
+ t.nFilled++
+ t.extraHist[i] = 1
+ }
+ }
+ for i, v := range t.offHist[:offsetCodeCount] {
+ if v == 0 {
+ t.offHist[i] = 1
+ }
+ }
+}
+
+func indexTokens(in []token) tokens {
+ var t tokens
+ t.indexTokens(in)
+ return t
+}
+
+func (t *tokens) indexTokens(in []token) {
+ t.Reset()
+ for _, tok := range in {
+ if tok < matchType {
+ t.AddLiteral(tok.literal())
+ continue
+ }
+ t.AddMatch(uint32(tok.length()), tok.offset()&matchOffsetOnlyMask)
+ }
+}
+
+// emitLiteral writes a literal chunk and returns the number of bytes written.
+func emitLiteral(dst *tokens, lit []byte) {
+ for _, v := range lit {
+ dst.tokens[dst.n] = token(v)
+ dst.litHist[v]++
+ dst.n++
+ }
+}
+
+func (t *tokens) AddLiteral(lit byte) {
+ t.tokens[t.n] = token(lit)
+ t.litHist[lit]++
+ t.n++
+}
+
+// from https://stackoverflow.com/a/28730362
+func mFastLog2(val float32) float32 {
+ ux := int32(math.Float32bits(val))
+ log2 := (float32)(((ux >> 23) & 255) - 128)
+ ux &= -0x7f800001
+ ux += 127 << 23
+ uval := math.Float32frombits(uint32(ux))
+ log2 += ((-0.34484843)*uval+2.02466578)*uval - 0.67487759
+ return log2
+}
+
+// EstimatedBits will return an minimum size estimated by an *optimal*
+// compression of the block.
+// The size of the block
+func (t *tokens) EstimatedBits() int {
+ shannon := float32(0)
+ bits := int(0)
+ nMatches := 0
+ total := int(t.n) + t.nFilled
+ if total > 0 {
+ invTotal := 1.0 / float32(total)
+ for _, v := range t.litHist[:] {
+ if v > 0 {
+ n := float32(v)
+ shannon += atLeastOne(-mFastLog2(n*invTotal)) * n
+ }
+ }
+ // Just add 15 for EOB
+ shannon += 15
+ for i, v := range t.extraHist[1 : literalCount-256] {
+ if v > 0 {
+ n := float32(v)
+ shannon += atLeastOne(-mFastLog2(n*invTotal)) * n
+ bits += int(lengthExtraBits[i&31]) * int(v)
+ nMatches += int(v)
+ }
+ }
+ }
+ if nMatches > 0 {
+ invTotal := 1.0 / float32(nMatches)
+ for i, v := range t.offHist[:offsetCodeCount] {
+ if v > 0 {
+ n := float32(v)
+ shannon += atLeastOne(-mFastLog2(n*invTotal)) * n
+ bits += int(offsetExtraBits[i&31]) * int(v)
+ }
+ }
+ }
+ return int(shannon) + bits
+}
+
+// AddMatch adds a match to the tokens.
+// This function is very sensitive to inlining and right on the border.
+func (t *tokens) AddMatch(xlength uint32, xoffset uint32) {
+ if debugDeflate {
+ if xlength >= maxMatchLength+baseMatchLength {
+ panic(fmt.Errorf("invalid length: %v", xlength))
+ }
+ if xoffset >= maxMatchOffset+baseMatchOffset {
+ panic(fmt.Errorf("invalid offset: %v", xoffset))
+ }
+ }
+ oCode := offsetCode(xoffset)
+ xoffset |= oCode << 16
+
+ t.extraHist[lengthCodes1[uint8(xlength)]]++
+ t.offHist[oCode&31]++
+ t.tokens[t.n] = token(matchType | xlength<= maxMatchOffset+baseMatchOffset {
+ panic(fmt.Errorf("invalid offset: %v", xoffset))
+ }
+ }
+ oc := offsetCode(xoffset)
+ xoffset |= oc << 16
+ for xlength > 0 {
+ xl := xlength
+ if xl > 258 {
+ // We need to have at least baseMatchLength left over for next loop.
+ if xl > 258+baseMatchLength {
+ xl = 258
+ } else {
+ xl = 258 - baseMatchLength
+ }
+ }
+ xlength -= xl
+ xl -= baseMatchLength
+ t.extraHist[lengthCodes1[uint8(xl)]]++
+ t.offHist[oc&31]++
+ t.tokens[t.n] = token(matchType | uint32(xl)<> lengthShift) }
+
+// Convert length to code.
+func lengthCode(len uint8) uint8 { return lengthCodes[len] }
+
+// Returns the offset code corresponding to a specific offset
+func offsetCode(off uint32) uint32 {
+ if false {
+ if off < uint32(len(offsetCodes)) {
+ return offsetCodes[off&255]
+ } else if off>>7 < uint32(len(offsetCodes)) {
+ return offsetCodes[(off>>7)&255] + 14
+ } else {
+ return offsetCodes[(off>>14)&255] + 28
+ }
+ }
+ if off < uint32(len(offsetCodes)) {
+ return offsetCodes[uint8(off)]
+ }
+ return offsetCodes14[uint8(off>>7)]
+}
diff --git a/vendor/github.com/klauspost/compress/gzip/gunzip.go b/vendor/github.com/klauspost/compress/gzip/gunzip.go
new file mode 100644
index 0000000..00a0a2c
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/gzip/gunzip.go
@@ -0,0 +1,380 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gzip implements reading and writing of gzip format compressed files,
+// as specified in RFC 1952.
+package gzip
+
+import (
+ "bufio"
+ "compress/gzip"
+ "encoding/binary"
+ "hash/crc32"
+ "io"
+ "time"
+
+ "github.com/klauspost/compress/flate"
+)
+
+const (
+ gzipID1 = 0x1f
+ gzipID2 = 0x8b
+ gzipDeflate = 8
+ flagText = 1 << 0
+ flagHdrCrc = 1 << 1
+ flagExtra = 1 << 2
+ flagName = 1 << 3
+ flagComment = 1 << 4
+)
+
+var (
+ // ErrChecksum is returned when reading GZIP data that has an invalid checksum.
+ ErrChecksum = gzip.ErrChecksum
+ // ErrHeader is returned when reading GZIP data that has an invalid header.
+ ErrHeader = gzip.ErrHeader
+)
+
+var le = binary.LittleEndian
+
+// noEOF converts io.EOF to io.ErrUnexpectedEOF.
+func noEOF(err error) error {
+ if err == io.EOF {
+ return io.ErrUnexpectedEOF
+ }
+ return err
+}
+
+// The gzip file stores a header giving metadata about the compressed file.
+// That header is exposed as the fields of the Writer and Reader structs.
+//
+// Strings must be UTF-8 encoded and may only contain Unicode code points
+// U+0001 through U+00FF, due to limitations of the GZIP file format.
+type Header struct {
+ Comment string // comment
+ Extra []byte // "extra data"
+ ModTime time.Time // modification time
+ Name string // file name
+ OS byte // operating system type
+}
+
+// A Reader is an io.Reader that can be read to retrieve
+// uncompressed data from a gzip-format compressed file.
+//
+// In general, a gzip file can be a concatenation of gzip files,
+// each with its own header. Reads from the Reader
+// return the concatenation of the uncompressed data of each.
+// Only the first header is recorded in the Reader fields.
+//
+// Gzip files store a length and checksum of the uncompressed data.
+// The Reader will return a ErrChecksum when Read
+// reaches the end of the uncompressed data if it does not
+// have the expected length or checksum. Clients should treat data
+// returned by Read as tentative until they receive the io.EOF
+// marking the end of the data.
+type Reader struct {
+ Header // valid after NewReader or Reader.Reset
+ r flate.Reader
+ br *bufio.Reader
+ decompressor io.ReadCloser
+ digest uint32 // CRC-32, IEEE polynomial (section 8)
+ size uint32 // Uncompressed size (section 2.3.1)
+ buf [512]byte
+ err error
+ multistream bool
+}
+
+// NewReader creates a new Reader reading the given reader.
+// If r does not also implement io.ByteReader,
+// the decompressor may read more data than necessary from r.
+//
+// It is the caller's responsibility to call Close on the Reader when done.
+//
+// The Reader.Header fields will be valid in the Reader returned.
+func NewReader(r io.Reader) (*Reader, error) {
+ z := new(Reader)
+ if err := z.Reset(r); err != nil {
+ return nil, err
+ }
+ return z, nil
+}
+
+// Reset discards the Reader z's state and makes it equivalent to the
+// result of its original state from NewReader, but reading from r instead.
+// This permits reusing a Reader rather than allocating a new one.
+func (z *Reader) Reset(r io.Reader) error {
+ *z = Reader{
+ decompressor: z.decompressor,
+ multistream: true,
+ br: z.br,
+ }
+ if rr, ok := r.(flate.Reader); ok {
+ z.r = rr
+ } else {
+ // Reuse if we can.
+ if z.br != nil {
+ z.br.Reset(r)
+ } else {
+ z.br = bufio.NewReader(r)
+ }
+ z.r = z.br
+ }
+ z.Header, z.err = z.readHeader()
+ return z.err
+}
+
+// Multistream controls whether the reader supports multistream files.
+//
+// If enabled (the default), the Reader expects the input to be a sequence
+// of individually gzipped data streams, each with its own header and
+// trailer, ending at EOF. The effect is that the concatenation of a sequence
+// of gzipped files is treated as equivalent to the gzip of the concatenation
+// of the sequence. This is standard behavior for gzip readers.
+//
+// Calling Multistream(false) disables this behavior; disabling the behavior
+// can be useful when reading file formats that distinguish individual gzip
+// data streams or mix gzip data streams with other data streams.
+// In this mode, when the Reader reaches the end of the data stream,
+// Read returns io.EOF. If the underlying reader implements io.ByteReader,
+// it will be left positioned just after the gzip stream.
+// To start the next stream, call z.Reset(r) followed by z.Multistream(false).
+// If there is no next stream, z.Reset(r) will return io.EOF.
+func (z *Reader) Multistream(ok bool) {
+ z.multistream = ok
+}
+
+// readString reads a NUL-terminated string from z.r.
+// It treats the bytes read as being encoded as ISO 8859-1 (Latin-1) and
+// will output a string encoded using UTF-8.
+// This method always updates z.digest with the data read.
+func (z *Reader) readString() (string, error) {
+ var err error
+ needConv := false
+ for i := 0; ; i++ {
+ if i >= len(z.buf) {
+ return "", ErrHeader
+ }
+ z.buf[i], err = z.r.ReadByte()
+ if err != nil {
+ return "", err
+ }
+ if z.buf[i] > 0x7f {
+ needConv = true
+ }
+ if z.buf[i] == 0 {
+ // Digest covers the NUL terminator.
+ z.digest = crc32.Update(z.digest, crc32.IEEETable, z.buf[:i+1])
+
+ // Strings are ISO 8859-1, Latin-1 (RFC 1952, section 2.3.1).
+ if needConv {
+ s := make([]rune, 0, i)
+ for _, v := range z.buf[:i] {
+ s = append(s, rune(v))
+ }
+ return string(s), nil
+ }
+ return string(z.buf[:i]), nil
+ }
+ }
+}
+
+// readHeader reads the GZIP header according to section 2.3.1.
+// This method does not set z.err.
+func (z *Reader) readHeader() (hdr Header, err error) {
+ if _, err = io.ReadFull(z.r, z.buf[:10]); err != nil {
+ // RFC 1952, section 2.2, says the following:
+ // A gzip file consists of a series of "members" (compressed data sets).
+ //
+ // Other than this, the specification does not clarify whether a
+ // "series" is defined as "one or more" or "zero or more". To err on the
+ // side of caution, Go interprets this to mean "zero or more".
+ // Thus, it is okay to return io.EOF here.
+ return hdr, err
+ }
+ if z.buf[0] != gzipID1 || z.buf[1] != gzipID2 || z.buf[2] != gzipDeflate {
+ return hdr, ErrHeader
+ }
+ flg := z.buf[3]
+ hdr.ModTime = time.Unix(int64(le.Uint32(z.buf[4:8])), 0)
+ // z.buf[8] is XFL and is currently ignored.
+ hdr.OS = z.buf[9]
+ z.digest = crc32.ChecksumIEEE(z.buf[:10])
+
+ if flg&flagExtra != 0 {
+ if _, err = io.ReadFull(z.r, z.buf[:2]); err != nil {
+ return hdr, noEOF(err)
+ }
+ z.digest = crc32.Update(z.digest, crc32.IEEETable, z.buf[:2])
+ data := make([]byte, le.Uint16(z.buf[:2]))
+ if _, err = io.ReadFull(z.r, data); err != nil {
+ return hdr, noEOF(err)
+ }
+ z.digest = crc32.Update(z.digest, crc32.IEEETable, data)
+ hdr.Extra = data
+ }
+
+ var s string
+ if flg&flagName != 0 {
+ if s, err = z.readString(); err != nil {
+ return hdr, err
+ }
+ hdr.Name = s
+ }
+
+ if flg&flagComment != 0 {
+ if s, err = z.readString(); err != nil {
+ return hdr, err
+ }
+ hdr.Comment = s
+ }
+
+ if flg&flagHdrCrc != 0 {
+ if _, err = io.ReadFull(z.r, z.buf[:2]); err != nil {
+ return hdr, noEOF(err)
+ }
+ digest := le.Uint16(z.buf[:2])
+ if digest != uint16(z.digest) {
+ return hdr, ErrHeader
+ }
+ }
+
+ // Reserved FLG bits must be zero.
+ if flg>>5 != 0 {
+ return hdr, ErrHeader
+ }
+
+ z.digest = 0
+ if z.decompressor == nil {
+ z.decompressor = flate.NewReader(z.r)
+ } else {
+ z.decompressor.(flate.Resetter).Reset(z.r, nil)
+ }
+ return hdr, nil
+}
+
+// Read implements io.Reader, reading uncompressed bytes from its underlying Reader.
+func (z *Reader) Read(p []byte) (n int, err error) {
+ if z.err != nil {
+ return 0, z.err
+ }
+
+ for n == 0 {
+ n, z.err = z.decompressor.Read(p)
+ z.digest = crc32.Update(z.digest, crc32.IEEETable, p[:n])
+ z.size += uint32(n)
+ if z.err != io.EOF {
+ // In the normal case we return here.
+ return n, z.err
+ }
+
+ // Finished file; check checksum and size.
+ if _, err := io.ReadFull(z.r, z.buf[:8]); err != nil {
+ z.err = noEOF(err)
+ return n, z.err
+ }
+ digest := le.Uint32(z.buf[:4])
+ size := le.Uint32(z.buf[4:8])
+ if digest != z.digest || size != z.size {
+ z.err = ErrChecksum
+ return n, z.err
+ }
+ z.digest, z.size = 0, 0
+
+ // File is ok; check if there is another.
+ if !z.multistream {
+ return n, io.EOF
+ }
+ z.err = nil // Remove io.EOF
+
+ if _, z.err = z.readHeader(); z.err != nil {
+ return n, z.err
+ }
+ }
+
+ return n, nil
+}
+
+type crcer interface {
+ io.Writer
+ Sum32() uint32
+ Reset()
+}
+type crcUpdater struct {
+ z *Reader
+}
+
+func (c *crcUpdater) Write(p []byte) (int, error) {
+ c.z.digest = crc32.Update(c.z.digest, crc32.IEEETable, p)
+ return len(p), nil
+}
+
+func (c *crcUpdater) Sum32() uint32 {
+ return c.z.digest
+}
+
+func (c *crcUpdater) Reset() {
+ c.z.digest = 0
+}
+
+// WriteTo support the io.WriteTo interface for io.Copy and friends.
+func (z *Reader) WriteTo(w io.Writer) (int64, error) {
+ total := int64(0)
+ crcWriter := crcer(crc32.NewIEEE())
+ if z.digest != 0 {
+ crcWriter = &crcUpdater{z: z}
+ }
+ for {
+ if z.err != nil {
+ if z.err == io.EOF {
+ return total, nil
+ }
+ return total, z.err
+ }
+
+ // We write both to output and digest.
+ mw := io.MultiWriter(w, crcWriter)
+ n, err := z.decompressor.(io.WriterTo).WriteTo(mw)
+ total += n
+ z.size += uint32(n)
+ if err != nil {
+ z.err = err
+ return total, z.err
+ }
+
+ // Finished file; check checksum + size.
+ if _, err := io.ReadFull(z.r, z.buf[0:8]); err != nil {
+ if err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ z.err = err
+ return total, err
+ }
+ z.digest = crcWriter.Sum32()
+ digest := le.Uint32(z.buf[:4])
+ size := le.Uint32(z.buf[4:8])
+ if digest != z.digest || size != z.size {
+ z.err = ErrChecksum
+ return total, z.err
+ }
+ z.digest, z.size = 0, 0
+
+ // File is ok; check if there is another.
+ if !z.multistream {
+ return total, nil
+ }
+ crcWriter.Reset()
+ z.err = nil // Remove io.EOF
+
+ if _, z.err = z.readHeader(); z.err != nil {
+ if z.err == io.EOF {
+ return total, nil
+ }
+ return total, z.err
+ }
+ }
+}
+
+// Close closes the Reader. It does not close the underlying io.Reader.
+// In order for the GZIP checksum to be verified, the reader must be
+// fully consumed until the io.EOF.
+func (z *Reader) Close() error { return z.decompressor.Close() }
diff --git a/vendor/github.com/klauspost/compress/gzip/gzip.go b/vendor/github.com/klauspost/compress/gzip/gzip.go
new file mode 100644
index 0000000..5bc7205
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/gzip/gzip.go
@@ -0,0 +1,290 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gzip
+
+import (
+ "errors"
+ "fmt"
+ "hash/crc32"
+ "io"
+
+ "github.com/klauspost/compress/flate"
+)
+
+// These constants are copied from the flate package, so that code that imports
+// "compress/gzip" does not also have to import "compress/flate".
+const (
+ NoCompression = flate.NoCompression
+ BestSpeed = flate.BestSpeed
+ BestCompression = flate.BestCompression
+ DefaultCompression = flate.DefaultCompression
+ ConstantCompression = flate.ConstantCompression
+ HuffmanOnly = flate.HuffmanOnly
+
+ // StatelessCompression will do compression but without maintaining any state
+ // between Write calls.
+ // There will be no memory kept between Write calls,
+ // but compression and speed will be suboptimal.
+ // Because of this, the size of actual Write calls will affect output size.
+ StatelessCompression = -3
+)
+
+// A Writer is an io.WriteCloser.
+// Writes to a Writer are compressed and written to w.
+type Writer struct {
+ Header // written at first call to Write, Flush, or Close
+ w io.Writer
+ level int
+ err error
+ compressor *flate.Writer
+ digest uint32 // CRC-32, IEEE polynomial (section 8)
+ size uint32 // Uncompressed size (section 2.3.1)
+ wroteHeader bool
+ closed bool
+ buf [10]byte
+}
+
+// NewWriter returns a new Writer.
+// Writes to the returned writer are compressed and written to w.
+//
+// It is the caller's responsibility to call Close on the WriteCloser when done.
+// Writes may be buffered and not flushed until Close.
+//
+// Callers that wish to set the fields in Writer.Header must do so before
+// the first call to Write, Flush, or Close.
+func NewWriter(w io.Writer) *Writer {
+ z, _ := NewWriterLevel(w, DefaultCompression)
+ return z
+}
+
+// NewWriterLevel is like NewWriter but specifies the compression level instead
+// of assuming DefaultCompression.
+//
+// The compression level can be DefaultCompression, NoCompression, or any
+// integer value between BestSpeed and BestCompression inclusive. The error
+// returned will be nil if the level is valid.
+func NewWriterLevel(w io.Writer, level int) (*Writer, error) {
+ if level < StatelessCompression || level > BestCompression {
+ return nil, fmt.Errorf("gzip: invalid compression level: %d", level)
+ }
+ z := new(Writer)
+ z.init(w, level)
+ return z, nil
+}
+
+// MinCustomWindowSize is the minimum window size that can be sent to NewWriterWindow.
+const MinCustomWindowSize = flate.MinCustomWindowSize
+
+// MaxCustomWindowSize is the maximum custom window that can be sent to NewWriterWindow.
+const MaxCustomWindowSize = flate.MaxCustomWindowSize
+
+// NewWriterWindow returns a new Writer compressing data with a custom window size.
+// windowSize must be from MinCustomWindowSize to MaxCustomWindowSize.
+func NewWriterWindow(w io.Writer, windowSize int) (*Writer, error) {
+ if windowSize < MinCustomWindowSize {
+ return nil, errors.New("gzip: requested window size less than MinWindowSize")
+ }
+ if windowSize > MaxCustomWindowSize {
+ return nil, errors.New("gzip: requested window size bigger than MaxCustomWindowSize")
+ }
+
+ z := new(Writer)
+ z.init(w, -windowSize)
+ return z, nil
+}
+
+func (z *Writer) init(w io.Writer, level int) {
+ compressor := z.compressor
+ if level != StatelessCompression {
+ if compressor != nil {
+ compressor.Reset(w)
+ }
+ }
+
+ *z = Writer{
+ Header: Header{
+ OS: 255, // unknown
+ },
+ w: w,
+ level: level,
+ compressor: compressor,
+ }
+}
+
+// Reset discards the Writer z's state and makes it equivalent to the
+// result of its original state from NewWriter or NewWriterLevel, but
+// writing to w instead. This permits reusing a Writer rather than
+// allocating a new one.
+func (z *Writer) Reset(w io.Writer) {
+ z.init(w, z.level)
+}
+
+// writeBytes writes a length-prefixed byte slice to z.w.
+func (z *Writer) writeBytes(b []byte) error {
+ if len(b) > 0xffff {
+ return errors.New("gzip.Write: Extra data is too large")
+ }
+ le.PutUint16(z.buf[:2], uint16(len(b)))
+ _, err := z.w.Write(z.buf[:2])
+ if err != nil {
+ return err
+ }
+ _, err = z.w.Write(b)
+ return err
+}
+
+// writeString writes a UTF-8 string s in GZIP's format to z.w.
+// GZIP (RFC 1952) specifies that strings are NUL-terminated ISO 8859-1 (Latin-1).
+func (z *Writer) writeString(s string) (err error) {
+ // GZIP stores Latin-1 strings; error if non-Latin-1; convert if non-ASCII.
+ needconv := false
+ for _, v := range s {
+ if v == 0 || v > 0xff {
+ return errors.New("gzip.Write: non-Latin-1 header string")
+ }
+ if v > 0x7f {
+ needconv = true
+ }
+ }
+ if needconv {
+ b := make([]byte, 0, len(s))
+ for _, v := range s {
+ b = append(b, byte(v))
+ }
+ _, err = z.w.Write(b)
+ } else {
+ _, err = io.WriteString(z.w, s)
+ }
+ if err != nil {
+ return err
+ }
+ // GZIP strings are NUL-terminated.
+ z.buf[0] = 0
+ _, err = z.w.Write(z.buf[:1])
+ return err
+}
+
+// Write writes a compressed form of p to the underlying io.Writer. The
+// compressed bytes are not necessarily flushed until the Writer is closed.
+func (z *Writer) Write(p []byte) (int, error) {
+ if z.err != nil {
+ return 0, z.err
+ }
+ var n int
+ // Write the GZIP header lazily.
+ if !z.wroteHeader {
+ z.wroteHeader = true
+ z.buf[0] = gzipID1
+ z.buf[1] = gzipID2
+ z.buf[2] = gzipDeflate
+ z.buf[3] = 0
+ if z.Extra != nil {
+ z.buf[3] |= 0x04
+ }
+ if z.Name != "" {
+ z.buf[3] |= 0x08
+ }
+ if z.Comment != "" {
+ z.buf[3] |= 0x10
+ }
+ le.PutUint32(z.buf[4:8], uint32(z.ModTime.Unix()))
+ if z.level == BestCompression {
+ z.buf[8] = 2
+ } else if z.level == BestSpeed {
+ z.buf[8] = 4
+ } else {
+ z.buf[8] = 0
+ }
+ z.buf[9] = z.OS
+ n, z.err = z.w.Write(z.buf[:10])
+ if z.err != nil {
+ return n, z.err
+ }
+ if z.Extra != nil {
+ z.err = z.writeBytes(z.Extra)
+ if z.err != nil {
+ return n, z.err
+ }
+ }
+ if z.Name != "" {
+ z.err = z.writeString(z.Name)
+ if z.err != nil {
+ return n, z.err
+ }
+ }
+ if z.Comment != "" {
+ z.err = z.writeString(z.Comment)
+ if z.err != nil {
+ return n, z.err
+ }
+ }
+
+ if z.compressor == nil && z.level != StatelessCompression {
+ z.compressor, _ = flate.NewWriter(z.w, z.level)
+ }
+ }
+ z.size += uint32(len(p))
+ z.digest = crc32.Update(z.digest, crc32.IEEETable, p)
+ if z.level == StatelessCompression {
+ return len(p), flate.StatelessDeflate(z.w, p, false, nil)
+ }
+ n, z.err = z.compressor.Write(p)
+ return n, z.err
+}
+
+// Flush flushes any pending compressed data to the underlying writer.
+//
+// It is useful mainly in compressed network protocols, to ensure that
+// a remote reader has enough data to reconstruct a packet. Flush does
+// not return until the data has been written. If the underlying
+// writer returns an error, Flush returns that error.
+//
+// In the terminology of the zlib library, Flush is equivalent to Z_SYNC_FLUSH.
+func (z *Writer) Flush() error {
+ if z.err != nil {
+ return z.err
+ }
+ if z.closed || z.level == StatelessCompression {
+ return nil
+ }
+ if !z.wroteHeader {
+ z.Write(nil)
+ if z.err != nil {
+ return z.err
+ }
+ }
+ z.err = z.compressor.Flush()
+ return z.err
+}
+
+// Close closes the Writer, flushing any unwritten data to the underlying
+// io.Writer, but does not close the underlying io.Writer.
+func (z *Writer) Close() error {
+ if z.err != nil {
+ return z.err
+ }
+ if z.closed {
+ return nil
+ }
+ z.closed = true
+ if !z.wroteHeader {
+ z.Write(nil)
+ if z.err != nil {
+ return z.err
+ }
+ }
+ if z.level == StatelessCompression {
+ z.err = flate.StatelessDeflate(z.w, nil, true, nil)
+ } else {
+ z.err = z.compressor.Close()
+ }
+ if z.err != nil {
+ return z.err
+ }
+ le.PutUint32(z.buf[:4], z.digest)
+ le.PutUint32(z.buf[4:8], z.size)
+ _, z.err = z.w.Write(z.buf[:8])
+ return z.err
+}
diff --git a/vendor/github.com/klauspost/compress/zlib/reader.go b/vendor/github.com/klauspost/compress/zlib/reader.go
new file mode 100644
index 0000000..f127d47
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/zlib/reader.go
@@ -0,0 +1,183 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package zlib implements reading and writing of zlib format compressed data,
+as specified in RFC 1950.
+
+The implementation provides filters that uncompress during reading
+and compress during writing. For example, to write compressed data
+to a buffer:
+
+ var b bytes.Buffer
+ w := zlib.NewWriter(&b)
+ w.Write([]byte("hello, world\n"))
+ w.Close()
+
+and to read that data back:
+
+ r, err := zlib.NewReader(&b)
+ io.Copy(os.Stdout, r)
+ r.Close()
+*/
+package zlib
+
+import (
+ "bufio"
+ "compress/zlib"
+ "hash"
+ "hash/adler32"
+ "io"
+
+ "github.com/klauspost/compress/flate"
+)
+
+const zlibDeflate = 8
+
+var (
+ // ErrChecksum is returned when reading ZLIB data that has an invalid checksum.
+ ErrChecksum = zlib.ErrChecksum
+ // ErrDictionary is returned when reading ZLIB data that has an invalid dictionary.
+ ErrDictionary = zlib.ErrDictionary
+ // ErrHeader is returned when reading ZLIB data that has an invalid header.
+ ErrHeader = zlib.ErrHeader
+)
+
+type reader struct {
+ r flate.Reader
+ decompressor io.ReadCloser
+ digest hash.Hash32
+ err error
+ scratch [4]byte
+}
+
+// Resetter resets a ReadCloser returned by NewReader or NewReaderDict to
+// to switch to a new underlying Reader. This permits reusing a ReadCloser
+// instead of allocating a new one.
+type Resetter interface {
+ // Reset discards any buffered data and resets the Resetter as if it was
+ // newly initialized with the given reader.
+ Reset(r io.Reader, dict []byte) error
+}
+
+// NewReader creates a new ReadCloser.
+// Reads from the returned ReadCloser read and decompress data from r.
+// If r does not implement io.ByteReader, the decompressor may read more
+// data than necessary from r.
+// It is the caller's responsibility to call Close on the ReadCloser when done.
+//
+// The ReadCloser returned by NewReader also implements Resetter.
+func NewReader(r io.Reader) (io.ReadCloser, error) {
+ return NewReaderDict(r, nil)
+}
+
+// NewReaderDict is like NewReader but uses a preset dictionary.
+// NewReaderDict ignores the dictionary if the compressed data does not refer to it.
+// If the compressed data refers to a different dictionary, NewReaderDict returns ErrDictionary.
+//
+// The ReadCloser returned by NewReaderDict also implements Resetter.
+func NewReaderDict(r io.Reader, dict []byte) (io.ReadCloser, error) {
+ z := new(reader)
+ err := z.Reset(r, dict)
+ if err != nil {
+ return nil, err
+ }
+ return z, nil
+}
+
+func (z *reader) Read(p []byte) (int, error) {
+ if z.err != nil {
+ return 0, z.err
+ }
+
+ var n int
+ n, z.err = z.decompressor.Read(p)
+ z.digest.Write(p[0:n])
+ if z.err != io.EOF {
+ // In the normal case we return here.
+ return n, z.err
+ }
+
+ // Finished file; check checksum.
+ if _, err := io.ReadFull(z.r, z.scratch[0:4]); err != nil {
+ if err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ z.err = err
+ return n, z.err
+ }
+ // ZLIB (RFC 1950) is big-endian, unlike GZIP (RFC 1952).
+ checksum := uint32(z.scratch[0])<<24 | uint32(z.scratch[1])<<16 | uint32(z.scratch[2])<<8 | uint32(z.scratch[3])
+ if checksum != z.digest.Sum32() {
+ z.err = ErrChecksum
+ return n, z.err
+ }
+ return n, io.EOF
+}
+
+// Calling Close does not close the wrapped io.Reader originally passed to NewReader.
+// In order for the ZLIB checksum to be verified, the reader must be
+// fully consumed until the io.EOF.
+func (z *reader) Close() error {
+ if z.err != nil && z.err != io.EOF {
+ return z.err
+ }
+ z.err = z.decompressor.Close()
+ return z.err
+}
+
+func (z *reader) Reset(r io.Reader, dict []byte) error {
+ *z = reader{decompressor: z.decompressor, digest: z.digest}
+ if fr, ok := r.(flate.Reader); ok {
+ z.r = fr
+ } else {
+ z.r = bufio.NewReader(r)
+ }
+
+ // Read the header (RFC 1950 section 2.2.).
+ _, z.err = io.ReadFull(z.r, z.scratch[0:2])
+ if z.err != nil {
+ if z.err == io.EOF {
+ z.err = io.ErrUnexpectedEOF
+ }
+ return z.err
+ }
+ h := uint(z.scratch[0])<<8 | uint(z.scratch[1])
+ if (z.scratch[0]&0x0f != zlibDeflate) || (h%31 != 0) {
+ z.err = ErrHeader
+ return z.err
+ }
+ haveDict := z.scratch[1]&0x20 != 0
+ if haveDict {
+ _, z.err = io.ReadFull(z.r, z.scratch[0:4])
+ if z.err != nil {
+ if z.err == io.EOF {
+ z.err = io.ErrUnexpectedEOF
+ }
+ return z.err
+ }
+ checksum := uint32(z.scratch[0])<<24 | uint32(z.scratch[1])<<16 | uint32(z.scratch[2])<<8 | uint32(z.scratch[3])
+ if checksum != adler32.Checksum(dict) {
+ z.err = ErrDictionary
+ return z.err
+ }
+ }
+
+ if z.decompressor == nil {
+ if haveDict {
+ z.decompressor = flate.NewReaderDict(z.r, dict)
+ } else {
+ z.decompressor = flate.NewReader(z.r)
+ }
+ } else {
+ z.decompressor.(flate.Resetter).Reset(z.r, dict)
+ }
+
+ if z.digest != nil {
+ z.digest.Reset()
+ } else {
+ z.digest = adler32.New()
+ }
+ return nil
+}
diff --git a/vendor/github.com/klauspost/compress/zlib/writer.go b/vendor/github.com/klauspost/compress/zlib/writer.go
new file mode 100644
index 0000000..605816b
--- /dev/null
+++ b/vendor/github.com/klauspost/compress/zlib/writer.go
@@ -0,0 +1,201 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package zlib
+
+import (
+ "fmt"
+ "hash"
+ "hash/adler32"
+ "io"
+
+ "github.com/klauspost/compress/flate"
+)
+
+// These constants are copied from the flate package, so that code that imports
+// "compress/zlib" does not also have to import "compress/flate".
+const (
+ NoCompression = flate.NoCompression
+ BestSpeed = flate.BestSpeed
+ BestCompression = flate.BestCompression
+ DefaultCompression = flate.DefaultCompression
+ ConstantCompression = flate.ConstantCompression
+ HuffmanOnly = flate.HuffmanOnly
+)
+
+// A Writer takes data written to it and writes the compressed
+// form of that data to an underlying writer (see NewWriter).
+type Writer struct {
+ w io.Writer
+ level int
+ dict []byte
+ compressor *flate.Writer
+ digest hash.Hash32
+ err error
+ scratch [4]byte
+ wroteHeader bool
+}
+
+// NewWriter creates a new Writer.
+// Writes to the returned Writer are compressed and written to w.
+//
+// It is the caller's responsibility to call Close on the WriteCloser when done.
+// Writes may be buffered and not flushed until Close.
+func NewWriter(w io.Writer) *Writer {
+ z, _ := NewWriterLevelDict(w, DefaultCompression, nil)
+ return z
+}
+
+// NewWriterLevel is like NewWriter but specifies the compression level instead
+// of assuming DefaultCompression.
+//
+// The compression level can be DefaultCompression, NoCompression, HuffmanOnly
+// or any integer value between BestSpeed and BestCompression inclusive.
+// The error returned will be nil if the level is valid.
+func NewWriterLevel(w io.Writer, level int) (*Writer, error) {
+ return NewWriterLevelDict(w, level, nil)
+}
+
+// NewWriterLevelDict is like NewWriterLevel but specifies a dictionary to
+// compress with.
+//
+// The dictionary may be nil. If not, its contents should not be modified until
+// the Writer is closed.
+func NewWriterLevelDict(w io.Writer, level int, dict []byte) (*Writer, error) {
+ if level < HuffmanOnly || level > BestCompression {
+ return nil, fmt.Errorf("zlib: invalid compression level: %d", level)
+ }
+ return &Writer{
+ w: w,
+ level: level,
+ dict: dict,
+ }, nil
+}
+
+// Reset clears the state of the Writer z such that it is equivalent to its
+// initial state from NewWriterLevel or NewWriterLevelDict, but instead writing
+// to w.
+func (z *Writer) Reset(w io.Writer) {
+ z.w = w
+ // z.level and z.dict left unchanged.
+ if z.compressor != nil {
+ z.compressor.Reset(w)
+ }
+ if z.digest != nil {
+ z.digest.Reset()
+ }
+ z.err = nil
+ z.scratch = [4]byte{}
+ z.wroteHeader = false
+}
+
+// writeHeader writes the ZLIB header.
+func (z *Writer) writeHeader() (err error) {
+ z.wroteHeader = true
+ // ZLIB has a two-byte header (as documented in RFC 1950).
+ // The first four bits is the CINFO (compression info), which is 7 for the default deflate window size.
+ // The next four bits is the CM (compression method), which is 8 for deflate.
+ z.scratch[0] = 0x78
+ // The next two bits is the FLEVEL (compression level). The four values are:
+ // 0=fastest, 1=fast, 2=default, 3=best.
+ // The next bit, FDICT, is set if a dictionary is given.
+ // The final five FCHECK bits form a mod-31 checksum.
+ switch z.level {
+ case -2, 0, 1:
+ z.scratch[1] = 0 << 6
+ case 2, 3, 4, 5:
+ z.scratch[1] = 1 << 6
+ case 6, -1:
+ z.scratch[1] = 2 << 6
+ case 7, 8, 9:
+ z.scratch[1] = 3 << 6
+ default:
+ panic("unreachable")
+ }
+ if z.dict != nil {
+ z.scratch[1] |= 1 << 5
+ }
+ z.scratch[1] += uint8(31 - (uint16(z.scratch[0])<<8+uint16(z.scratch[1]))%31)
+ if _, err = z.w.Write(z.scratch[0:2]); err != nil {
+ return err
+ }
+ if z.dict != nil {
+ // The next four bytes are the Adler-32 checksum of the dictionary.
+ checksum := adler32.Checksum(z.dict)
+ z.scratch[0] = uint8(checksum >> 24)
+ z.scratch[1] = uint8(checksum >> 16)
+ z.scratch[2] = uint8(checksum >> 8)
+ z.scratch[3] = uint8(checksum >> 0)
+ if _, err = z.w.Write(z.scratch[0:4]); err != nil {
+ return err
+ }
+ }
+ if z.compressor == nil {
+ // Initialize deflater unless the Writer is being reused
+ // after a Reset call.
+ z.compressor, err = flate.NewWriterDict(z.w, z.level, z.dict)
+ if err != nil {
+ return err
+ }
+ z.digest = adler32.New()
+ }
+ return nil
+}
+
+// Write writes a compressed form of p to the underlying io.Writer. The
+// compressed bytes are not necessarily flushed until the Writer is closed or
+// explicitly flushed.
+func (z *Writer) Write(p []byte) (n int, err error) {
+ if !z.wroteHeader {
+ z.err = z.writeHeader()
+ }
+ if z.err != nil {
+ return 0, z.err
+ }
+ if len(p) == 0 {
+ return 0, nil
+ }
+ n, err = z.compressor.Write(p)
+ if err != nil {
+ z.err = err
+ return
+ }
+ z.digest.Write(p)
+ return
+}
+
+// Flush flushes the Writer to its underlying io.Writer.
+func (z *Writer) Flush() error {
+ if !z.wroteHeader {
+ z.err = z.writeHeader()
+ }
+ if z.err != nil {
+ return z.err
+ }
+ z.err = z.compressor.Flush()
+ return z.err
+}
+
+// Close closes the Writer, flushing any unwritten data to the underlying
+// io.Writer, but does not close the underlying io.Writer.
+func (z *Writer) Close() error {
+ if !z.wroteHeader {
+ z.err = z.writeHeader()
+ }
+ if z.err != nil {
+ return z.err
+ }
+ z.err = z.compressor.Close()
+ if z.err != nil {
+ return z.err
+ }
+ checksum := z.digest.Sum32()
+ // ZLIB (RFC 1950) is big-endian, unlike GZIP (RFC 1952).
+ z.scratch[0] = uint8(checksum >> 24)
+ z.scratch[1] = uint8(checksum >> 16)
+ z.scratch[2] = uint8(checksum >> 8)
+ z.scratch[3] = uint8(checksum >> 0)
+ _, z.err = z.w.Write(z.scratch[0:4])
+ return z.err
+}
diff --git a/vendor/github.com/leodido/go-urn/.gitignore b/vendor/github.com/leodido/go-urn/.gitignore
new file mode 100644
index 0000000..89d4bc5
--- /dev/null
+++ b/vendor/github.com/leodido/go-urn/.gitignore
@@ -0,0 +1,12 @@
+*.exe
+*.dll
+*.so
+*.dylib
+
+*.test
+
+*.out
+*.txt
+
+vendor/
+/removecomments
\ No newline at end of file
diff --git a/vendor/github.com/leodido/go-urn/LICENSE b/vendor/github.com/leodido/go-urn/LICENSE
new file mode 100644
index 0000000..8c3504a
--- /dev/null
+++ b/vendor/github.com/leodido/go-urn/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018 Leonardo Di Donato
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/leodido/go-urn/README.md b/vendor/github.com/leodido/go-urn/README.md
new file mode 100644
index 0000000..731eecb
--- /dev/null
+++ b/vendor/github.com/leodido/go-urn/README.md
@@ -0,0 +1,81 @@
+[![Build](https://img.shields.io/circleci/build/github/leodido/go-urn?style=for-the-badge)](https://app.circleci.com/pipelines/github/leodido/go-urn) [![Coverage](https://img.shields.io/codecov/c/github/leodido/go-urn.svg?style=for-the-badge)](https://codecov.io/gh/leodido/go-urn) [![Documentation](https://img.shields.io/badge/godoc-reference-blue.svg?style=for-the-badge)](https://godoc.org/github.com/leodido/go-urn)
+
+**A parser for URNs**.
+
+> As seen on [RFC 2141](https://tools.ietf.org/html/rfc2141#ref-1).
+
+[API documentation](https://godoc.org/github.com/leodido/go-urn).
+
+## Installation
+
+```
+go get github.com/leodido/go-urn
+```
+
+## Performances
+
+This implementation results to be really fast.
+
+Usually below ½ microsecond on my machine[1](#mymachine) .
+
+Notice it also performs, while parsing:
+
+1. fine-grained and informative erroring
+2. specific-string normalization
+
+```
+ok/00/urn:a:b______________________________________/-4 20000000 265 ns/op 182 B/op 6 allocs/op
+ok/01/URN:foo:a123,456_____________________________/-4 30000000 296 ns/op 200 B/op 6 allocs/op
+ok/02/urn:foo:a123%2c456___________________________/-4 20000000 331 ns/op 208 B/op 6 allocs/op
+ok/03/urn:ietf:params:scim:schemas:core:2.0:User___/-4 20000000 430 ns/op 280 B/op 6 allocs/op
+ok/04/urn:ietf:params:scim:schemas:extension:enterp/-4 20000000 411 ns/op 312 B/op 6 allocs/op
+ok/05/urn:ietf:params:scim:schemas:extension:enterp/-4 20000000 472 ns/op 344 B/op 6 allocs/op
+ok/06/urn:burnout:nss______________________________/-4 30000000 257 ns/op 192 B/op 6 allocs/op
+ok/07/urn:abcdefghilmnopqrstuvzabcdefghilm:x_______/-4 20000000 375 ns/op 213 B/op 6 allocs/op
+ok/08/urn:urnurnurn:urn____________________________/-4 30000000 265 ns/op 197 B/op 6 allocs/op
+ok/09/urn:ciao:@!=%2c(xyz)+a,b.*@g=$_'_____________/-4 20000000 307 ns/op 248 B/op 6 allocs/op
+ok/10/URN:x:abc%1dz%2f%3az_________________________/-4 30000000 259 ns/op 212 B/op 6 allocs/op
+no/11/URN:-xxx:x___________________________________/-4 20000000 445 ns/op 320 B/op 6 allocs/op
+no/12/urn::colon:nss_______________________________/-4 20000000 461 ns/op 320 B/op 6 allocs/op
+no/13/urn:abcdefghilmnopqrstuvzabcdefghilmn:specifi/-4 10000000 660 ns/op 320 B/op 6 allocs/op
+no/14/URN:a!?:x____________________________________/-4 20000000 507 ns/op 320 B/op 6 allocs/op
+no/15/urn:urn:NSS__________________________________/-4 20000000 429 ns/op 288 B/op 6 allocs/op
+no/16/urn:white_space:NSS__________________________/-4 20000000 482 ns/op 320 B/op 6 allocs/op
+no/17/urn:concat:no_spaces_________________________/-4 20000000 539 ns/op 328 B/op 7 allocs/op
+no/18/urn:a:/______________________________________/-4 20000000 470 ns/op 320 B/op 7 allocs/op
+no/19/urn:UrN:NSS__________________________________/-4 20000000 399 ns/op 288 B/op 6 allocs/op
+```
+
+---
+
+* [1] : Intel Core i7-7600U CPU @ 2.80GHz
+
+---
+
+## Example
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/leodido/go-urn"
+)
+
+func main() {
+ var uid = "URN:foo:a123,456"
+
+ u, ok := urn.Parse([]byte(uid))
+ if !ok {
+ panic("error parsing urn")
+ }
+
+ fmt.Println(u.ID)
+ fmt.Println(u.SS)
+
+ // Output:
+ // foo
+ // a123,456
+}
+```
+
+[![Analytics](https://ga-beacon.appspot.com/UA-49657176-1/go-urn?flat)](https://github.com/igrigorik/ga-beacon)
\ No newline at end of file
diff --git a/vendor/github.com/leodido/go-urn/machine.go b/vendor/github.com/leodido/go-urn/machine.go
new file mode 100644
index 0000000..f8d57b4
--- /dev/null
+++ b/vendor/github.com/leodido/go-urn/machine.go
@@ -0,0 +1,1688 @@
+package urn
+
+import (
+ "fmt"
+)
+
+var (
+ errPrefix = "expecting the prefix to be the \"urn\" string (whatever case) [col %d]"
+ errIdentifier = "expecting the identifier to be string (1..31 alnum chars, also containing dashes but not at its start) [col %d]"
+ errSpecificString = "expecting the specific string to be a string containing alnum, hex, or others ([()+,-.:=@;$_!*']) chars [col %d]"
+ errNoUrnWithinID = "expecting the identifier to not contain the \"urn\" reserved string [col %d]"
+ errHex = "expecting the specific string hex chars to be well-formed (%%alnum{2}) [col %d]"
+ errParse = "parsing error [col %d]"
+)
+
+const start int = 1
+const firstFinal int = 44
+
+const enFail int = 46
+const enMain int = 1
+
+// Machine is the interface representing the FSM
+type Machine interface {
+ Error() error
+ Parse(input []byte) (*URN, error)
+}
+
+type machine struct {
+ data []byte
+ cs int
+ p, pe, eof, pb int
+ err error
+ tolower []int
+}
+
+// NewMachine creates a new FSM able to parse RFC 2141 strings.
+func NewMachine() Machine {
+ m := &machine{}
+
+ return m
+}
+
+// Err returns the error that occurred on the last call to Parse.
+//
+// If the result is nil, then the line was parsed successfully.
+func (m *machine) Error() error {
+ return m.err
+}
+
+func (m *machine) text() []byte {
+ return m.data[m.pb:m.p]
+}
+
+// Parse parses the input byte array as a RFC 2141 string.
+func (m *machine) Parse(input []byte) (*URN, error) {
+ m.data = input
+ m.p = 0
+ m.pb = 0
+ m.pe = len(input)
+ m.eof = len(input)
+ m.err = nil
+ m.tolower = []int{}
+ output := &URN{}
+ {
+ m.cs = start
+ }
+ {
+ if (m.p) == (m.pe) {
+ goto _testEof
+ }
+ switch m.cs {
+ case 1:
+ goto stCase1
+ case 0:
+ goto stCase0
+ case 2:
+ goto stCase2
+ case 3:
+ goto stCase3
+ case 4:
+ goto stCase4
+ case 5:
+ goto stCase5
+ case 6:
+ goto stCase6
+ case 7:
+ goto stCase7
+ case 8:
+ goto stCase8
+ case 9:
+ goto stCase9
+ case 10:
+ goto stCase10
+ case 11:
+ goto stCase11
+ case 12:
+ goto stCase12
+ case 13:
+ goto stCase13
+ case 14:
+ goto stCase14
+ case 15:
+ goto stCase15
+ case 16:
+ goto stCase16
+ case 17:
+ goto stCase17
+ case 18:
+ goto stCase18
+ case 19:
+ goto stCase19
+ case 20:
+ goto stCase20
+ case 21:
+ goto stCase21
+ case 22:
+ goto stCase22
+ case 23:
+ goto stCase23
+ case 24:
+ goto stCase24
+ case 25:
+ goto stCase25
+ case 26:
+ goto stCase26
+ case 27:
+ goto stCase27
+ case 28:
+ goto stCase28
+ case 29:
+ goto stCase29
+ case 30:
+ goto stCase30
+ case 31:
+ goto stCase31
+ case 32:
+ goto stCase32
+ case 33:
+ goto stCase33
+ case 34:
+ goto stCase34
+ case 35:
+ goto stCase35
+ case 36:
+ goto stCase36
+ case 37:
+ goto stCase37
+ case 38:
+ goto stCase38
+ case 44:
+ goto stCase44
+ case 39:
+ goto stCase39
+ case 40:
+ goto stCase40
+ case 45:
+ goto stCase45
+ case 41:
+ goto stCase41
+ case 42:
+ goto stCase42
+ case 43:
+ goto stCase43
+ case 46:
+ goto stCase46
+ }
+ goto stOut
+ stCase1:
+ switch (m.data)[(m.p)] {
+ case 85:
+ goto tr1
+ case 117:
+ goto tr1
+ }
+ goto tr0
+ tr0:
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr3:
+
+ m.err = fmt.Errorf(errPrefix, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr6:
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr41:
+
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr44:
+
+ m.err = fmt.Errorf(errHex, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr50:
+
+ m.err = fmt.Errorf(errPrefix, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr52:
+
+ m.err = fmt.Errorf(errNoUrnWithinID, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ stCase0:
+ st0:
+ m.cs = 0
+ goto _out
+ tr1:
+
+ m.pb = m.p
+
+ goto st2
+ st2:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof2
+ }
+ stCase2:
+ switch (m.data)[(m.p)] {
+ case 82:
+ goto st3
+ case 114:
+ goto st3
+ }
+ goto tr0
+ st3:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof3
+ }
+ stCase3:
+ switch (m.data)[(m.p)] {
+ case 78:
+ goto st4
+ case 110:
+ goto st4
+ }
+ goto tr3
+ st4:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof4
+ }
+ stCase4:
+ if (m.data)[(m.p)] == 58 {
+ goto tr5
+ }
+ goto tr0
+ tr5:
+
+ output.prefix = string(m.text())
+
+ goto st5
+ st5:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof5
+ }
+ stCase5:
+ switch (m.data)[(m.p)] {
+ case 85:
+ goto tr8
+ case 117:
+ goto tr8
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto tr7
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto tr7
+ }
+ default:
+ goto tr7
+ }
+ goto tr6
+ tr7:
+
+ m.pb = m.p
+
+ goto st6
+ st6:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof6
+ }
+ stCase6:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st7
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st7
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st7
+ }
+ default:
+ goto st7
+ }
+ goto tr6
+ st7:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof7
+ }
+ stCase7:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st8
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st8
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st8
+ }
+ default:
+ goto st8
+ }
+ goto tr6
+ st8:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof8
+ }
+ stCase8:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st9
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st9
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st9
+ }
+ default:
+ goto st9
+ }
+ goto tr6
+ st9:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof9
+ }
+ stCase9:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st10
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st10
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st10
+ }
+ default:
+ goto st10
+ }
+ goto tr6
+ st10:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof10
+ }
+ stCase10:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st11
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st11
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st11
+ }
+ default:
+ goto st11
+ }
+ goto tr6
+ st11:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof11
+ }
+ stCase11:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st12
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st12
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st12
+ }
+ default:
+ goto st12
+ }
+ goto tr6
+ st12:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof12
+ }
+ stCase12:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st13
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st13
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st13
+ }
+ default:
+ goto st13
+ }
+ goto tr6
+ st13:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof13
+ }
+ stCase13:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st14
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st14
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st14
+ }
+ default:
+ goto st14
+ }
+ goto tr6
+ st14:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof14
+ }
+ stCase14:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st15
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st15
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st15
+ }
+ default:
+ goto st15
+ }
+ goto tr6
+ st15:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof15
+ }
+ stCase15:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st16
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st16
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st16
+ }
+ default:
+ goto st16
+ }
+ goto tr6
+ st16:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof16
+ }
+ stCase16:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st17
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st17
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st17
+ }
+ default:
+ goto st17
+ }
+ goto tr6
+ st17:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof17
+ }
+ stCase17:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st18
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st18
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st18
+ }
+ default:
+ goto st18
+ }
+ goto tr6
+ st18:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof18
+ }
+ stCase18:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st19
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st19
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st19
+ }
+ default:
+ goto st19
+ }
+ goto tr6
+ st19:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof19
+ }
+ stCase19:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st20
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st20
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st20
+ }
+ default:
+ goto st20
+ }
+ goto tr6
+ st20:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof20
+ }
+ stCase20:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st21
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st21
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st21
+ }
+ default:
+ goto st21
+ }
+ goto tr6
+ st21:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof21
+ }
+ stCase21:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st22
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st22
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st22
+ }
+ default:
+ goto st22
+ }
+ goto tr6
+ st22:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof22
+ }
+ stCase22:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st23
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st23
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st23
+ }
+ default:
+ goto st23
+ }
+ goto tr6
+ st23:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof23
+ }
+ stCase23:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st24
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st24
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st24
+ }
+ default:
+ goto st24
+ }
+ goto tr6
+ st24:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof24
+ }
+ stCase24:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st25
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st25
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st25
+ }
+ default:
+ goto st25
+ }
+ goto tr6
+ st25:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof25
+ }
+ stCase25:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st26
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st26
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st26
+ }
+ default:
+ goto st26
+ }
+ goto tr6
+ st26:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof26
+ }
+ stCase26:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st27
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st27
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st27
+ }
+ default:
+ goto st27
+ }
+ goto tr6
+ st27:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof27
+ }
+ stCase27:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st28
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st28
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st28
+ }
+ default:
+ goto st28
+ }
+ goto tr6
+ st28:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof28
+ }
+ stCase28:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st29
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st29
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st29
+ }
+ default:
+ goto st29
+ }
+ goto tr6
+ st29:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof29
+ }
+ stCase29:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st30
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st30
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st30
+ }
+ default:
+ goto st30
+ }
+ goto tr6
+ st30:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof30
+ }
+ stCase30:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st31
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st31
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st31
+ }
+ default:
+ goto st31
+ }
+ goto tr6
+ st31:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof31
+ }
+ stCase31:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st32
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st32
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st32
+ }
+ default:
+ goto st32
+ }
+ goto tr6
+ st32:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof32
+ }
+ stCase32:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st33
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st33
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st33
+ }
+ default:
+ goto st33
+ }
+ goto tr6
+ st33:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof33
+ }
+ stCase33:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st34
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st34
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st34
+ }
+ default:
+ goto st34
+ }
+ goto tr6
+ st34:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof34
+ }
+ stCase34:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st35
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st35
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st35
+ }
+ default:
+ goto st35
+ }
+ goto tr6
+ st35:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof35
+ }
+ stCase35:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st36
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st36
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st36
+ }
+ default:
+ goto st36
+ }
+ goto tr6
+ st36:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof36
+ }
+ stCase36:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st37
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st37
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st37
+ }
+ default:
+ goto st37
+ }
+ goto tr6
+ st37:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof37
+ }
+ stCase37:
+ if (m.data)[(m.p)] == 58 {
+ goto tr10
+ }
+ goto tr6
+ tr10:
+
+ output.ID = string(m.text())
+
+ goto st38
+ st38:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof38
+ }
+ stCase38:
+ switch (m.data)[(m.p)] {
+ case 33:
+ goto tr42
+ case 36:
+ goto tr42
+ case 37:
+ goto tr43
+ case 61:
+ goto tr42
+ case 95:
+ goto tr42
+ }
+ switch {
+ case (m.data)[(m.p)] < 48:
+ if 39 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 46 {
+ goto tr42
+ }
+ case (m.data)[(m.p)] > 59:
+ switch {
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto tr42
+ }
+ case (m.data)[(m.p)] >= 64:
+ goto tr42
+ }
+ default:
+ goto tr42
+ }
+ goto tr41
+ tr42:
+
+ m.pb = m.p
+
+ goto st44
+ st44:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof44
+ }
+ stCase44:
+ switch (m.data)[(m.p)] {
+ case 33:
+ goto st44
+ case 36:
+ goto st44
+ case 37:
+ goto st39
+ case 61:
+ goto st44
+ case 95:
+ goto st44
+ }
+ switch {
+ case (m.data)[(m.p)] < 48:
+ if 39 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 46 {
+ goto st44
+ }
+ case (m.data)[(m.p)] > 59:
+ switch {
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st44
+ }
+ case (m.data)[(m.p)] >= 64:
+ goto st44
+ }
+ default:
+ goto st44
+ }
+ goto tr41
+ tr43:
+
+ m.pb = m.p
+
+ goto st39
+ st39:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof39
+ }
+ stCase39:
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st40
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st40
+ }
+ default:
+ goto tr46
+ }
+ goto tr44
+ tr46:
+
+ m.tolower = append(m.tolower, m.p-m.pb)
+
+ goto st40
+ st40:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof40
+ }
+ stCase40:
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st45
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st45
+ }
+ default:
+ goto tr48
+ }
+ goto tr44
+ tr48:
+
+ m.tolower = append(m.tolower, m.p-m.pb)
+
+ goto st45
+ st45:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof45
+ }
+ stCase45:
+ switch (m.data)[(m.p)] {
+ case 33:
+ goto st44
+ case 36:
+ goto st44
+ case 37:
+ goto st39
+ case 61:
+ goto st44
+ case 95:
+ goto st44
+ }
+ switch {
+ case (m.data)[(m.p)] < 48:
+ if 39 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 46 {
+ goto st44
+ }
+ case (m.data)[(m.p)] > 59:
+ switch {
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st44
+ }
+ case (m.data)[(m.p)] >= 64:
+ goto st44
+ }
+ default:
+ goto st44
+ }
+ goto tr44
+ tr8:
+
+ m.pb = m.p
+
+ goto st41
+ st41:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof41
+ }
+ stCase41:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st7
+ case 58:
+ goto tr10
+ case 82:
+ goto st42
+ case 114:
+ goto st42
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st7
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st7
+ }
+ default:
+ goto st7
+ }
+ goto tr6
+ st42:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof42
+ }
+ stCase42:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st8
+ case 58:
+ goto tr10
+ case 78:
+ goto st43
+ case 110:
+ goto st43
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st8
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st8
+ }
+ default:
+ goto st8
+ }
+ goto tr50
+ st43:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof43
+ }
+ stCase43:
+ if (m.data)[(m.p)] == 45 {
+ goto st9
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st9
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st9
+ }
+ default:
+ goto st9
+ }
+ goto tr52
+ st46:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _testEof46
+ }
+ stCase46:
+ switch (m.data)[(m.p)] {
+ case 10:
+ goto st0
+ case 13:
+ goto st0
+ }
+ goto st46
+ stOut:
+ _testEof2:
+ m.cs = 2
+ goto _testEof
+ _testEof3:
+ m.cs = 3
+ goto _testEof
+ _testEof4:
+ m.cs = 4
+ goto _testEof
+ _testEof5:
+ m.cs = 5
+ goto _testEof
+ _testEof6:
+ m.cs = 6
+ goto _testEof
+ _testEof7:
+ m.cs = 7
+ goto _testEof
+ _testEof8:
+ m.cs = 8
+ goto _testEof
+ _testEof9:
+ m.cs = 9
+ goto _testEof
+ _testEof10:
+ m.cs = 10
+ goto _testEof
+ _testEof11:
+ m.cs = 11
+ goto _testEof
+ _testEof12:
+ m.cs = 12
+ goto _testEof
+ _testEof13:
+ m.cs = 13
+ goto _testEof
+ _testEof14:
+ m.cs = 14
+ goto _testEof
+ _testEof15:
+ m.cs = 15
+ goto _testEof
+ _testEof16:
+ m.cs = 16
+ goto _testEof
+ _testEof17:
+ m.cs = 17
+ goto _testEof
+ _testEof18:
+ m.cs = 18
+ goto _testEof
+ _testEof19:
+ m.cs = 19
+ goto _testEof
+ _testEof20:
+ m.cs = 20
+ goto _testEof
+ _testEof21:
+ m.cs = 21
+ goto _testEof
+ _testEof22:
+ m.cs = 22
+ goto _testEof
+ _testEof23:
+ m.cs = 23
+ goto _testEof
+ _testEof24:
+ m.cs = 24
+ goto _testEof
+ _testEof25:
+ m.cs = 25
+ goto _testEof
+ _testEof26:
+ m.cs = 26
+ goto _testEof
+ _testEof27:
+ m.cs = 27
+ goto _testEof
+ _testEof28:
+ m.cs = 28
+ goto _testEof
+ _testEof29:
+ m.cs = 29
+ goto _testEof
+ _testEof30:
+ m.cs = 30
+ goto _testEof
+ _testEof31:
+ m.cs = 31
+ goto _testEof
+ _testEof32:
+ m.cs = 32
+ goto _testEof
+ _testEof33:
+ m.cs = 33
+ goto _testEof
+ _testEof34:
+ m.cs = 34
+ goto _testEof
+ _testEof35:
+ m.cs = 35
+ goto _testEof
+ _testEof36:
+ m.cs = 36
+ goto _testEof
+ _testEof37:
+ m.cs = 37
+ goto _testEof
+ _testEof38:
+ m.cs = 38
+ goto _testEof
+ _testEof44:
+ m.cs = 44
+ goto _testEof
+ _testEof39:
+ m.cs = 39
+ goto _testEof
+ _testEof40:
+ m.cs = 40
+ goto _testEof
+ _testEof45:
+ m.cs = 45
+ goto _testEof
+ _testEof41:
+ m.cs = 41
+ goto _testEof
+ _testEof42:
+ m.cs = 42
+ goto _testEof
+ _testEof43:
+ m.cs = 43
+ goto _testEof
+ _testEof46:
+ m.cs = 46
+ goto _testEof
+
+ _testEof:
+ {
+ }
+ if (m.p) == (m.eof) {
+ switch m.cs {
+ case 44, 45:
+
+ raw := m.text()
+ output.SS = string(raw)
+ // Iterate upper letters lowering them
+ for _, i := range m.tolower {
+ raw[i] = raw[i] + 32
+ }
+ output.norm = string(raw)
+
+ case 1, 2, 4:
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 3:
+
+ m.err = fmt.Errorf(errPrefix, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 41:
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 38:
+
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 42:
+
+ m.err = fmt.Errorf(errPrefix, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 43:
+
+ m.err = fmt.Errorf(errNoUrnWithinID, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 39, 40:
+
+ m.err = fmt.Errorf(errHex, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+ }
+ }
+
+ _out:
+ {
+ }
+ }
+
+ if m.cs < firstFinal || m.cs == enFail {
+ return nil, m.err
+ }
+
+ return output, nil
+}
diff --git a/vendor/github.com/leodido/go-urn/machine.go.rl b/vendor/github.com/leodido/go-urn/machine.go.rl
new file mode 100644
index 0000000..3bc05a6
--- /dev/null
+++ b/vendor/github.com/leodido/go-urn/machine.go.rl
@@ -0,0 +1,159 @@
+package urn
+
+import (
+ "fmt"
+)
+
+var (
+ errPrefix = "expecting the prefix to be the \"urn\" string (whatever case) [col %d]"
+ errIdentifier = "expecting the identifier to be string (1..31 alnum chars, also containing dashes but not at its start) [col %d]"
+ errSpecificString = "expecting the specific string to be a string containing alnum, hex, or others ([()+,-.:=@;$_!*']) chars [col %d]"
+ errNoUrnWithinID = "expecting the identifier to not contain the \"urn\" reserved string [col %d]"
+ errHex = "expecting the specific string hex chars to be well-formed (%%alnum{2}) [col %d]"
+ errParse = "parsing error [col %d]"
+)
+
+%%{
+machine urn;
+
+# unsigned alphabet
+alphtype uint8;
+
+action mark {
+ m.pb = m.p
+}
+
+action tolower {
+ m.tolower = append(m.tolower, m.p - m.pb)
+}
+
+action set_pre {
+ output.prefix = string(m.text())
+}
+
+action set_nid {
+ output.ID = string(m.text())
+}
+
+action set_nss {
+ raw := m.text()
+ output.SS = string(raw)
+ // Iterate upper letters lowering them
+ for _, i := range m.tolower {
+ raw[i] = raw[i] + 32
+ }
+ output.norm = string(raw)
+}
+
+action err_pre {
+ m.err = fmt.Errorf(errPrefix, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_nid {
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_nss {
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_urn {
+ m.err = fmt.Errorf(errNoUrnWithinID, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_hex {
+ m.err = fmt.Errorf(errHex, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_parse {
+ m.err = fmt.Errorf(errParse, m.p)
+ fhold;
+ fgoto fail;
+}
+
+pre = ([uU][rR][nN] @err(err_pre)) >mark %set_pre;
+
+nid = (alnum >mark (alnum | '-'){0,31}) %set_nid;
+
+hex = '%' (digit | lower | upper >tolower){2} $err(err_hex);
+
+sss = (alnum | [()+,\-.:=@;$_!*']);
+
+nss = (sss | hex)+ $err(err_nss);
+
+fail := (any - [\n\r])* @err{ fgoto main; };
+
+main := (pre ':' (nid - pre %err(err_urn)) $err(err_nid) ':' nss >mark %set_nss) $err(err_parse);
+
+}%%
+
+%% write data noerror noprefix;
+
+// Machine is the interface representing the FSM
+type Machine interface {
+ Error() error
+ Parse(input []byte) (*URN, error)
+}
+
+type machine struct {
+ data []byte
+ cs int
+ p, pe, eof, pb int
+ err error
+ tolower []int
+}
+
+// NewMachine creates a new FSM able to parse RFC 2141 strings.
+func NewMachine() Machine {
+ m := &machine{}
+
+ %% access m.;
+ %% variable p m.p;
+ %% variable pe m.pe;
+ %% variable eof m.eof;
+ %% variable data m.data;
+
+ return m
+}
+
+// Err returns the error that occurred on the last call to Parse.
+//
+// If the result is nil, then the line was parsed successfully.
+func (m *machine) Error() error {
+ return m.err
+}
+
+func (m *machine) text() []byte {
+ return m.data[m.pb:m.p]
+}
+
+// Parse parses the input byte array as a RFC 2141 string.
+func (m *machine) Parse(input []byte) (*URN, error) {
+ m.data = input
+ m.p = 0
+ m.pb = 0
+ m.pe = len(input)
+ m.eof = len(input)
+ m.err = nil
+ m.tolower = []int{}
+ output := &URN{}
+
+ %% write init;
+ %% write exec;
+
+ if m.cs < first_final || m.cs == en_fail {
+ return nil, m.err
+ }
+
+ return output, nil
+}
diff --git a/vendor/github.com/leodido/go-urn/makefile b/vendor/github.com/leodido/go-urn/makefile
new file mode 100644
index 0000000..df87cdc
--- /dev/null
+++ b/vendor/github.com/leodido/go-urn/makefile
@@ -0,0 +1,53 @@
+SHELL := /bin/bash
+RAGEL := ragel
+GOFMT := go fmt
+
+export GO_TEST=env GOTRACEBACK=all go test $(GO_ARGS)
+
+.PHONY: build
+build: machine.go
+
+.PHONY: clean
+clean:
+ @rm -rf docs
+ @rm -f machine.go
+
+.PHONY: images
+images: docs/urn.png
+
+.PHONY: removecomments
+removecomments:
+ @cd ./tools/removecomments; go build -o ../../removecomments .
+
+machine.go: machine.go.rl
+
+machine.go: removecomments
+
+machine.go:
+ $(RAGEL) -Z -G2 -e -o $@ $<
+ @./removecomments $@
+ $(MAKE) -s file=$@ snake2camel
+ $(GOFMT) $@
+
+docs/urn.dot: machine.go.rl
+ @mkdir -p docs
+ $(RAGEL) -Z -e -Vp $< -o $@
+
+docs/urn.png: docs/urn.dot
+ dot $< -Tpng -o $@
+
+.PHONY: bench
+bench: *_test.go machine.go
+ go test -bench=. -benchmem -benchtime=5s ./...
+
+.PHONY: tests
+tests: *_test.go
+ $(GO_TEST) ./...
+
+.PHONY: snake2camel
+snake2camel:
+ @awk -i inplace '{ \
+ while ( match($$0, /(.*)([a-z]+[0-9]*)_([a-zA-Z0-9])(.*)/, cap) ) \
+ $$0 = cap[1] cap[2] toupper(cap[3]) cap[4]; \
+ print \
+ }' $(file)
diff --git a/vendor/github.com/leodido/go-urn/urn.go b/vendor/github.com/leodido/go-urn/urn.go
new file mode 100644
index 0000000..d51a6c9
--- /dev/null
+++ b/vendor/github.com/leodido/go-urn/urn.go
@@ -0,0 +1,86 @@
+package urn
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+)
+
+const errInvalidURN = "invalid URN: %s"
+
+// URN represents an Uniform Resource Name.
+//
+// The general form represented is:
+//
+// urn::
+//
+// Details at https://tools.ietf.org/html/rfc2141.
+type URN struct {
+ prefix string // Static prefix. Equal to "urn" when empty.
+ ID string // Namespace identifier
+ SS string // Namespace specific string
+ norm string // Normalized namespace specific string
+}
+
+// Normalize turns the receiving URN into its norm version.
+//
+// Which means: lowercase prefix, lowercase namespace identifier, and immutate namespace specific string chars (except tokens which are lowercased).
+func (u *URN) Normalize() *URN {
+ return &URN{
+ prefix: "urn",
+ ID: strings.ToLower(u.ID),
+ SS: u.norm,
+ }
+}
+
+// Equal checks the lexical equivalence of the current URN with another one.
+func (u *URN) Equal(x *URN) bool {
+ return *u.Normalize() == *x.Normalize()
+}
+
+// String reassembles the URN into a valid URN string.
+//
+// This requires both ID and SS fields to be non-empty.
+// Otherwise it returns an empty string.
+//
+// Default URN prefix is "urn".
+func (u *URN) String() string {
+ var res string
+ if u.ID != "" && u.SS != "" {
+ if u.prefix == "" {
+ res += "urn"
+ }
+ res += u.prefix + ":" + u.ID + ":" + u.SS
+ }
+
+ return res
+}
+
+// Parse is responsible to create an URN instance from a byte array matching the correct URN syntax.
+func Parse(u []byte) (*URN, bool) {
+ urn, err := NewMachine().Parse(u)
+ if err != nil {
+ return nil, false
+ }
+
+ return urn, true
+}
+
+// MarshalJSON marshals the URN to JSON string form (e.g. `"urn:oid:1.2.3.4"`).
+func (u URN) MarshalJSON() ([]byte, error) {
+ return json.Marshal(u.String())
+}
+
+// MarshalJSON unmarshals a URN from JSON string form (e.g. `"urn:oid:1.2.3.4"`).
+func (u *URN) UnmarshalJSON(bytes []byte) error {
+ var str string
+ if err := json.Unmarshal(bytes, &str); err != nil {
+ return err
+ }
+ if value, ok := Parse([]byte(str)); !ok {
+ return fmt.Errorf(errInvalidURN, str)
+ } else {
+ *u = *value
+ }
+ return nil
+}
\ No newline at end of file
diff --git a/vendor/github.com/lezhnev74/go-iterators/.gitignore b/vendor/github.com/lezhnev74/go-iterators/.gitignore
new file mode 100644
index 0000000..723ef36
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/.gitignore
@@ -0,0 +1 @@
+.idea
\ No newline at end of file
diff --git a/vendor/github.com/lezhnev74/go-iterators/README.md b/vendor/github.com/lezhnev74/go-iterators/README.md
new file mode 100644
index 0000000..0a18571
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/README.md
@@ -0,0 +1,52 @@
+# Go Iterators
+
+[![Build](https://github.com/lezhnev74/go-iterators/actions/workflows/go.yml/badge.svg)](https://github.com/lezhnev74/go-iterators/actions/workflows/go.yml)
+![Code Coverage](https://raw.githubusercontent.com/lezhnev74/go-iterators/badges/.badges/main/coverage.svg)
+
+Since Go does not have a default iterator type (though there are
+discussions [here](https://bitfieldconsulting.com/golang/iterators), [here](https://github.com/golang/go/issues/61897)
+and [there](https://ewencp.org/blog/golang-iterators/)), here is a set of different iterators crafted manually.
+Particularly, there is [a proposal](https://github.com/golang/go/issues/61898) for a package that defines compound
+operations on iterators, like merging/selecting. Until Go has a stdlib's iterator implementation (or at least an
+experimental standalone package), there is this package.
+
+## Iterator Interface
+
+```go
+// Iterator is used for working with sequences of possibly unknown size
+// Interface adds a performance penalty for indirection.
+type Iterator[T any] interface {
+ // Next returns EmptyIterator when no value available at the source
+ // error == nil means the returned value is good
+ Next() (T, error)
+ // Closer the client may decide to stop the iteration before EmptyIterator received
+ // After the first call it must return ClosedIterator.
+ io.Closer
+}
+```
+
+## Various Iterators
+
+Single iterators
+- `CallbackIterator` calls a function to provide the next value
+- `SliceIterator` iterates over a static precalculated slice
+- `DynamicSliceIterator` behaves like `SliceIterator` but fetches a new slice when previous slice is "empty"
+
+Compound iterators
+
+Unary:
+- `ClosingIterator` adds custom Closing logic on top of another iterator
+- `BatchingIterator` buffers internal iterator and returns slices of values
+- `FilteringIterator` filters values from internal iterator
+- `MappingIterator` maps values from the inner iterator
+
+Binary:
+- `SortedSelectingIterator` combines 2 sorted iterators into a single sorted iterator.
+- `UniqueSelectingIterator` The same as `SelectingIterator` but removes duplicates.
+- `DiffIterator` returns all from the first iterator that is not present in the second
+
+## Design notes
+
+- compound iterators proxy errors from internal iterators
+- compound iterators close internal iterators upon emptying
+- compound binary iterators enable making efficient selection trees
diff --git a/vendor/github.com/lezhnev74/go-iterators/batching_iterator.go b/vendor/github.com/lezhnev74/go-iterators/batching_iterator.go
new file mode 100644
index 0000000..564c81f
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/batching_iterator.go
@@ -0,0 +1,49 @@
+package go_iterators
+
+import (
+ "errors"
+ "fmt"
+)
+
+// BatchingIterator buffers internal iterator and returns slices of values
+type BatchingIterator[T any] struct {
+ innerIterator Iterator[T]
+ batchSize int
+}
+
+func NewBatchingIterator[T any](inner Iterator[T], batchSize int) Iterator[[]T] {
+ if batchSize < 1 {
+ panic(fmt.Sprintf("batch size is too low: %d", batchSize))
+ }
+ return &BatchingIterator[T]{inner, batchSize}
+}
+
+func (b *BatchingIterator[T]) Next() (v []T, err error) {
+
+ v = make([]T, 0, b.batchSize)
+ var item T
+
+ for {
+ item, err = b.innerIterator.Next()
+
+ if err != nil {
+ break
+ }
+
+ v = append(v, item)
+
+ if len(v) == b.batchSize {
+ break
+ }
+ }
+
+ if errors.Is(err, EmptyIterator) && len(v) > 0 {
+ err = nil
+ }
+
+ return
+}
+
+func (b *BatchingIterator[T]) Close() error {
+ return b.innerIterator.Close()
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/callback_iterator.go b/vendor/github.com/lezhnev74/go-iterators/callback_iterator.go
new file mode 100644
index 0000000..27767e3
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/callback_iterator.go
@@ -0,0 +1,27 @@
+package go_iterators
+
+// CallbackIterator calls a function to provide the next value
+type CallbackIterator[T any] struct {
+ cb func() (T, error)
+ close func() error
+ isClosed bool
+}
+
+func NewCallbackIterator[T any](
+ cb func() (T, error),
+ close func() error,
+) Iterator[T] {
+ return &CallbackIterator[T]{
+ cb: cb,
+ close: close,
+ }
+}
+
+func (c *CallbackIterator[T]) Close() error {
+ if c.isClosed {
+ return ClosedIterator
+ }
+ c.isClosed = true
+ return c.close()
+}
+func (c *CallbackIterator[T]) Next() (v T, err error) { return c.cb() }
diff --git a/vendor/github.com/lezhnev74/go-iterators/closing_iterator.go b/vendor/github.com/lezhnev74/go-iterators/closing_iterator.go
new file mode 100644
index 0000000..77ed1ff
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/closing_iterator.go
@@ -0,0 +1,38 @@
+package go_iterators
+
+import "errors"
+
+// ClosingIterator adds custom Closing logic on top of another iterator
+type ClosingIterator[T any] struct {
+ innerIterator Iterator[T]
+ // extra function called AFTER "innerErr := Close()" returns
+ close func(innerErr error) error
+ isClosed bool
+}
+
+func (c *ClosingIterator[T]) Next() (T, error) {
+ return c.innerIterator.Next()
+}
+
+func (c *ClosingIterator[T]) Close() error {
+ if c.isClosed {
+ return ClosedIterator
+ }
+ err := c.innerIterator.Close()
+ err = c.close(err)
+
+ // Close it if no errors happened or if the inner iterator has been closed already
+ if err == nil || errors.Is(err, ClosedIterator) {
+ c.isClosed = true
+ err = nil
+ }
+
+ return err
+}
+
+func NewClosingIterator[T any](innerIterator Iterator[T], close func(innerErr error) error) Iterator[T] {
+ return &ClosingIterator[T]{
+ innerIterator: innerIterator,
+ close: close,
+ }
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/diff_iterator.go b/vendor/github.com/lezhnev74/go-iterators/diff_iterator.go
new file mode 100644
index 0000000..b802253
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/diff_iterator.go
@@ -0,0 +1,41 @@
+package go_iterators
+
+// DiffIterator returns all from it1 that are not present in it2
+type DiffIterator[T any] struct {
+ SelectingIterator[T]
+}
+
+func (si *DiffIterator[T]) Next() (v T, err error) {
+ // check if both values present and collapse, fetch more if so
+ err = si.fetch()
+ for {
+ if err != nil {
+ return
+ }
+ if si.v1Fetched && si.v2Fetched && si.cmp(si.v1, si.v2) == 0 {
+ si.v1Fetched, si.v2Fetched = false, false
+ err = si.fetch()
+ continue
+ }
+ break
+ }
+
+ if si.v1Fetched {
+ si.v1Fetched = false
+ v = si.v1
+ return
+ }
+
+ err = EmptyIterator
+ return
+}
+
+func NewRemovingIterator[T any](itMain, itRemove Iterator[T], cf CmpFunc[T]) Iterator[T] {
+ return &DiffIterator[T]{
+ SelectingIterator[T]{
+ it1: itMain,
+ it2: itRemove,
+ cmp: cf,
+ },
+ }
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/dynamic_slice_iterator.go b/vendor/github.com/lezhnev74/go-iterators/dynamic_slice_iterator.go
new file mode 100644
index 0000000..7a35026
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/dynamic_slice_iterator.go
@@ -0,0 +1,45 @@
+package go_iterators
+
+// DynamicSliceIterator implements Iterator over a dynamic slice
+// whenever it needs data it calls fetch() for a new slice to iterate
+type DynamicSliceIterator[T any] struct {
+ values []T
+ fetch func() ([]T, error) // nil or empty slice stops iteration
+ close func() error
+ isClosed bool
+}
+
+func (s *DynamicSliceIterator[T]) Close() error {
+ if s.isClosed {
+ return ClosedIterator
+ }
+ s.isClosed = true
+ return s.close()
+}
+
+// Next calls underlying fetch func,
+// it is undefined if fetch function returns error AND value or error AND no value
+func (s *DynamicSliceIterator[T]) Next() (v T, err error) {
+ if len(s.values) == 0 {
+ s.values, err = s.fetch()
+ if err != nil {
+ return
+ }
+ }
+
+ if len(s.values) == 0 {
+ err = EmptyIterator
+ return
+ }
+
+ v = s.values[0]
+ s.values = s.values[1:]
+ return
+}
+
+func NewDynamicSliceIterator[T any](fetch func() ([]T, error), close func() error) Iterator[T] {
+ return &DynamicSliceIterator[T]{
+ fetch: fetch,
+ close: close,
+ }
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/filtering_iterator.go b/vendor/github.com/lezhnev74/go-iterators/filtering_iterator.go
new file mode 100644
index 0000000..cb7bf05
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/filtering_iterator.go
@@ -0,0 +1,27 @@
+package go_iterators
+
+// FilteringIterator filters values from internal iterator
+type FilteringIterator[T any] struct {
+ innerIterator Iterator[T]
+ filter func(T) bool
+}
+
+func (f FilteringIterator[T]) Next() (T, error) {
+ for {
+ v, err := f.innerIterator.Next()
+ if err != nil || f.filter(v) {
+ return v, err
+ }
+ }
+}
+
+func (f FilteringIterator[T]) Close() error {
+ return f.innerIterator.Close()
+}
+
+func NewFilteringIterator[T any](inner Iterator[T], filter func(T) bool) Iterator[T] {
+ return &FilteringIterator[T]{
+ innerIterator: inner,
+ filter: filter,
+ }
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/iterator.go b/vendor/github.com/lezhnev74/go-iterators/iterator.go
new file mode 100644
index 0000000..7b07f44
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/iterator.go
@@ -0,0 +1,35 @@
+package go_iterators
+
+import (
+ "errors"
+ "io"
+)
+
+// EmptyIterator shows that no further value will follow from the iterator
+var EmptyIterator = errors.New("iterator is empty")
+var ClosedIterator = errors.New("iterator is closed")
+
+// Iterator is used for working with sequences of possibly unknown size
+// Interface adds a performance penalty for indirection.
+type Iterator[T any] interface {
+ // Next returns EmptyIterator when no value available at the source
+ // error == nil means the returned value is good
+ Next() (T, error)
+ // Closer the client may decide to stop the iteration before EmptyIterator received
+ // After the first call it must return ClosedIterator.
+ io.Closer
+}
+
+// CmpFunc returns -1,0,1 respectively if ab
+type CmpFunc[T any] func(a, b T) int
+
+func ToSlice[T any](it Iterator[T]) (dump []T) {
+ for {
+ v, err := it.Next()
+ if err != nil {
+ break
+ }
+ dump = append(dump, v)
+ }
+ return
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/mapping_iterator.go b/vendor/github.com/lezhnev74/go-iterators/mapping_iterator.go
new file mode 100644
index 0000000..9659994
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/mapping_iterator.go
@@ -0,0 +1,23 @@
+package go_iterators
+
+// MappingIterator maps values from the inner iterator
+type MappingIterator[T any, InnerT any] struct {
+ innerIterator Iterator[InnerT]
+ mf func(InnerT) T
+}
+
+func (m *MappingIterator[T, InnerT]) Next() (T, error) {
+ v, err := m.innerIterator.Next()
+ return m.mf(v), err
+}
+
+func (m *MappingIterator[T, InnerT]) Close() error {
+ return m.innerIterator.Close()
+}
+
+func NewMappingIterator[T any, InnerT any](inner Iterator[InnerT], mf func(InnerT) T) Iterator[T] {
+ return &MappingIterator[T, InnerT]{
+ innerIterator: inner,
+ mf: mf,
+ }
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/selecting_iterator.go b/vendor/github.com/lezhnev74/go-iterators/selecting_iterator.go
new file mode 100644
index 0000000..653fd43
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/selecting_iterator.go
@@ -0,0 +1,78 @@
+package go_iterators
+
+import (
+ "errors"
+)
+
+// SelectingIterator combines two iterators together.
+// We can customize the selection logic with a custom "next" function.
+// This iterator closes an internal iterator once it returns "EmptyIterator".
+type SelectingIterator[T any] struct {
+ it1, it2 Iterator[T]
+ v1, v2 T // prefetched from internal iterators
+ v1Fetched, v2Fetched bool // is value prefetched
+ cmp CmpFunc[T]
+ next func() (v T, err error) // custom Next selector
+ isClosed bool
+}
+
+func (s *SelectingIterator[T]) Close() error {
+
+ if s.isClosed {
+ return ClosedIterator
+ }
+ s.isClosed = true
+
+ if s.it1 != nil {
+ err := s.it1.Close()
+ s.it1 = nil
+ if err != nil {
+ s.it2.Close() // close anyway
+ s.it2 = nil
+ if !errors.Is(err, ClosedIterator) {
+ return err
+ }
+ }
+ }
+
+ if s.it2 != nil {
+ err := s.it2.Close()
+ s.it2 = nil
+ if err != nil && !errors.Is(err, ClosedIterator) {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (si *SelectingIterator[T]) fetch() error {
+ var err error
+
+ if si.it1 != nil && !si.v1Fetched {
+ si.v1, err = si.it1.Next()
+ si.v1Fetched = err == nil
+
+ if errors.Is(err, EmptyIterator) {
+ err = si.it1.Close()
+ si.it1 = nil
+ }
+ }
+ if err != nil && !errors.Is(err, EmptyIterator) {
+ return err
+ }
+
+ if si.it2 != nil && !si.v2Fetched {
+ si.v2, err = si.it2.Next()
+ si.v2Fetched = err == nil
+
+ if errors.Is(err, EmptyIterator) {
+ err = si.it2.Close()
+ si.it2 = nil
+ }
+ }
+ if err != nil && !errors.Is(err, EmptyIterator) {
+ return err
+ }
+ return nil
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/slice_iterator.go b/vendor/github.com/lezhnev74/go-iterators/slice_iterator.go
new file mode 100644
index 0000000..ae50426
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/slice_iterator.go
@@ -0,0 +1,32 @@
+package go_iterators
+
+// SliceIterator implements Iterator over a static slice
+type SliceIterator[T any] struct {
+ values []T
+ pos int
+ isClosed bool
+}
+
+func (s *SliceIterator[T]) Close() error {
+ if s.isClosed {
+ return ClosedIterator
+ }
+ s.isClosed = true
+ return nil
+}
+func (s *SliceIterator[T]) Next() (v T, err error) {
+ if s.pos >= len(s.values) {
+ err = EmptyIterator
+ return
+ }
+ v = s.values[s.pos]
+ s.pos++
+ return
+}
+
+func NewSliceIterator[T any](values []T) Iterator[T] {
+ return &SliceIterator[T]{
+ values: values,
+ pos: 0,
+ }
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/sorted_selecting_iterator.go b/vendor/github.com/lezhnev74/go-iterators/sorted_selecting_iterator.go
new file mode 100644
index 0000000..b63591a
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/sorted_selecting_iterator.go
@@ -0,0 +1,57 @@
+package go_iterators
+
+// SortedSelectingIterator returns sorted values from two other iterators
+// if iterators are not sorted the behaviour is less predictable.
+type SortedSelectingIterator[T any] struct {
+ SelectingIterator[T]
+}
+
+func (si *SortedSelectingIterator[T]) Next() (v T, err error) {
+ err = si.fetch()
+ if err != nil {
+ return
+ }
+
+ if !si.v1Fetched && !si.v2Fetched {
+ err = EmptyIterator
+ return
+ }
+
+ // 1. only v1
+ if si.v1Fetched && !si.v2Fetched {
+ si.v1Fetched = false
+ v = si.v1
+ return
+ }
+ // 2. only v2
+ if si.v2Fetched && !si.v1Fetched {
+ si.v2Fetched = false
+ v = si.v2
+ return
+ }
+ // 3. both present
+ r := si.cmp(si.v1, si.v2)
+ if r == 0 {
+ si.v1Fetched = false
+ v = si.v1
+ return
+ } else if r < 0 {
+ si.v1Fetched = false
+ v = si.v1
+ return
+ } else {
+ si.v2Fetched = false
+ v = si.v2
+ return
+ }
+}
+
+func NewSortedSelectingIterator[T any](it1, it2 Iterator[T], cf CmpFunc[T]) Iterator[T] {
+ return &SortedSelectingIterator[T]{
+ SelectingIterator[T]{
+ it1: it1,
+ it2: it2,
+ cmp: cf,
+ },
+ }
+}
diff --git a/vendor/github.com/lezhnev74/go-iterators/unique_selecting_iterator.go b/vendor/github.com/lezhnev74/go-iterators/unique_selecting_iterator.go
new file mode 100644
index 0000000..8643884
--- /dev/null
+++ b/vendor/github.com/lezhnev74/go-iterators/unique_selecting_iterator.go
@@ -0,0 +1,57 @@
+package go_iterators
+
+// UniqueSelectingIterator returns sorted values from two other iterators
+// Identical values are returned just once
+type UniqueSelectingIterator[T any] struct {
+ SelectingIterator[T]
+}
+
+func (si *UniqueSelectingIterator[T]) Next() (v T, err error) {
+ err = si.fetch()
+ if err != nil {
+ return
+ }
+
+ if !si.v1Fetched && !si.v2Fetched {
+ err = EmptyIterator
+ return
+ }
+
+ // 1. only v1
+ if si.v1Fetched && !si.v2Fetched {
+ si.v1Fetched = false
+ v = si.v1
+ return
+ }
+ // 2. only v2
+ if si.v2Fetched && !si.v1Fetched {
+ si.v2Fetched = false
+ v = si.v2
+ return
+ }
+ // 3. both present
+ r := si.cmp(si.v1, si.v2)
+ if r == 0 {
+ si.v1Fetched, si.v2Fetched = false, false
+ v = si.v1
+ return
+ } else if r < 0 {
+ si.v1Fetched = false
+ v = si.v1
+ return
+ } else {
+ si.v2Fetched = false
+ v = si.v2
+ return
+ }
+}
+
+func NewUniqueSelectingIterator[T any](it1, it2 Iterator[T], cf CmpFunc[T]) Iterator[T] {
+ return &UniqueSelectingIterator[T]{
+ SelectingIterator[T]{
+ it1: it1,
+ it2: it2,
+ cmp: cf,
+ },
+ }
+}
diff --git a/vendor/github.com/magiconair/properties/.gitignore b/vendor/github.com/magiconair/properties/.gitignore
new file mode 100644
index 0000000..e7081ff
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/.gitignore
@@ -0,0 +1,6 @@
+*.sublime-project
+*.sublime-workspace
+*.un~
+*.swp
+.idea/
+*.iml
diff --git a/vendor/github.com/magiconair/properties/CHANGELOG.md b/vendor/github.com/magiconair/properties/CHANGELOG.md
new file mode 100644
index 0000000..842e8e2
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/CHANGELOG.md
@@ -0,0 +1,205 @@
+## Changelog
+
+### [1.8.7](https://github.com/magiconair/properties/tree/v1.8.7) - 08 Dec 2022
+
+ * [PR #65](https://github.com/magiconair/properties/pull/65): Speedup Merge
+
+ Thanks to [@AdityaVallabh](https://github.com/AdityaVallabh) for the patch.
+
+ * [PR #66](https://github.com/magiconair/properties/pull/66): use github actions
+
+### [1.8.6](https://github.com/magiconair/properties/tree/v1.8.6) - 23 Feb 2022
+
+ * [PR #57](https://github.com/magiconair/properties/pull/57):Fix "unreachable code" lint error
+
+ Thanks to [@ellie](https://github.com/ellie) for the patch.
+
+ * [PR #63](https://github.com/magiconair/properties/pull/63): Make TestMustGetParsedDuration backwards compatible
+
+ This patch ensures that the `TestMustGetParsedDuration` still works with `go1.3` to make the
+ author happy until it affects real users.
+
+ Thanks to [@maage](https://github.com/maage) for the patch.
+
+### [1.8.5](https://github.com/magiconair/properties/tree/v1.8.5) - 24 Mar 2021
+
+ * [PR #55](https://github.com/magiconair/properties/pull/55): Fix: Encoding Bug in Comments
+
+ When reading comments \ are loaded correctly, but when writing they are then
+ replaced by \\. This leads to wrong comments when writing and reading multiple times.
+
+ Thanks to [@doxsch](https://github.com/doxsch) for the patch.
+
+### [1.8.4](https://github.com/magiconair/properties/tree/v1.8.4) - 23 Sep 2020
+
+ * [PR #50](https://github.com/magiconair/properties/pull/50): enhance error message for circular references
+
+ Thanks to [@sriv](https://github.com/sriv) for the patch.
+
+### [1.8.3](https://github.com/magiconair/properties/tree/v1.8.3) - 14 Sep 2020
+
+ * [PR #49](https://github.com/magiconair/properties/pull/49): Include the key in error message causing the circular reference
+
+ The change is include the key in the error message which is causing the circular
+ reference when parsing/loading the properties files.
+
+ Thanks to [@haroon-sheikh](https://github.com/haroon-sheikh) for the patch.
+
+### [1.8.2](https://github.com/magiconair/properties/tree/v1.8.2) - 25 Aug 2020
+
+ * [PR #36](https://github.com/magiconair/properties/pull/36): Escape backslash on write
+
+ This patch ensures that backslashes are escaped on write. Existing applications which
+ rely on the old behavior may need to be updated.
+
+ Thanks to [@apesternikov](https://github.com/apesternikov) for the patch.
+
+ * [PR #42](https://github.com/magiconair/properties/pull/42): Made Content-Type check whitespace agnostic in LoadURL()
+
+ Thanks to [@aliras1](https://github.com/aliras1) for the patch.
+
+ * [PR #41](https://github.com/magiconair/properties/pull/41): Make key/value separator configurable on Write()
+
+ Thanks to [@mkjor](https://github.com/mkjor) for the patch.
+
+ * [PR #40](https://github.com/magiconair/properties/pull/40): Add method to return a sorted list of keys
+
+ Thanks to [@mkjor](https://github.com/mkjor) for the patch.
+
+### [1.8.1](https://github.com/magiconair/properties/tree/v1.8.1) - 10 May 2019
+
+ * [PR #35](https://github.com/magiconair/properties/pull/35): Close body always after request
+
+ This patch ensures that in `LoadURL` the response body is always closed.
+
+ Thanks to [@liubog2008](https://github.com/liubog2008) for the patch.
+
+### [1.8](https://github.com/magiconair/properties/tree/v1.8) - 15 May 2018
+
+ * [PR #26](https://github.com/magiconair/properties/pull/26): Disable expansion during loading
+
+ This adds the option to disable property expansion during loading.
+
+ Thanks to [@kmala](https://github.com/kmala) for the patch.
+
+### [1.7.6](https://github.com/magiconair/properties/tree/v1.7.6) - 14 Feb 2018
+
+ * [PR #29](https://github.com/magiconair/properties/pull/29): Reworked expansion logic to handle more complex cases.
+
+ See PR for an example.
+
+ Thanks to [@yobert](https://github.com/yobert) for the fix.
+
+### [1.7.5](https://github.com/magiconair/properties/tree/v1.7.5) - 13 Feb 2018
+
+ * [PR #28](https://github.com/magiconair/properties/pull/28): Support duplicate expansions in the same value
+
+ Values which expand the same key multiple times (e.g. `key=${a} ${a}`) will no longer fail
+ with a `circular reference error`.
+
+ Thanks to [@yobert](https://github.com/yobert) for the fix.
+
+### [1.7.4](https://github.com/magiconair/properties/tree/v1.7.4) - 31 Oct 2017
+
+ * [Issue #23](https://github.com/magiconair/properties/issues/23): Ignore blank lines with whitespaces
+
+ * [PR #24](https://github.com/magiconair/properties/pull/24): Update keys when DisableExpansion is enabled
+
+ Thanks to [@mgurov](https://github.com/mgurov) for the fix.
+
+### [1.7.3](https://github.com/magiconair/properties/tree/v1.7.3) - 10 Jul 2017
+
+ * [Issue #17](https://github.com/magiconair/properties/issues/17): Add [SetValue()](http://godoc.org/github.com/magiconair/properties#Properties.SetValue) method to set values generically
+ * [Issue #22](https://github.com/magiconair/properties/issues/22): Add [LoadMap()](http://godoc.org/github.com/magiconair/properties#LoadMap) function to load properties from a string map
+
+### [1.7.2](https://github.com/magiconair/properties/tree/v1.7.2) - 20 Mar 2017
+
+ * [Issue #15](https://github.com/magiconair/properties/issues/15): Drop gocheck dependency
+ * [PR #21](https://github.com/magiconair/properties/pull/21): Add [Map()](http://godoc.org/github.com/magiconair/properties#Properties.Map) and [FilterFunc()](http://godoc.org/github.com/magiconair/properties#Properties.FilterFunc)
+
+### [1.7.1](https://github.com/magiconair/properties/tree/v1.7.1) - 13 Jan 2017
+
+ * [Issue #14](https://github.com/magiconair/properties/issues/14): Decouple TestLoadExpandedFile from `$USER`
+ * [PR #12](https://github.com/magiconair/properties/pull/12): Load from files and URLs
+ * [PR #16](https://github.com/magiconair/properties/pull/16): Keep gofmt happy
+ * [PR #18](https://github.com/magiconair/properties/pull/18): Fix Delete() function
+
+### [1.7.0](https://github.com/magiconair/properties/tree/v1.7.0) - 20 Mar 2016
+
+ * [Issue #10](https://github.com/magiconair/properties/issues/10): Add [LoadURL,LoadURLs,MustLoadURL,MustLoadURLs](http://godoc.org/github.com/magiconair/properties#LoadURL) method to load properties from a URL.
+ * [Issue #11](https://github.com/magiconair/properties/issues/11): Add [LoadString,MustLoadString](http://godoc.org/github.com/magiconair/properties#LoadString) method to load properties from an UTF8 string.
+ * [PR #8](https://github.com/magiconair/properties/pull/8): Add [MustFlag](http://godoc.org/github.com/magiconair/properties#Properties.MustFlag) method to provide overrides via command line flags. (@pascaldekloe)
+
+### [1.6.0](https://github.com/magiconair/properties/tree/v1.6.0) - 11 Dec 2015
+
+ * Add [Decode](http://godoc.org/github.com/magiconair/properties#Properties.Decode) method to populate struct from properties via tags.
+
+### [1.5.6](https://github.com/magiconair/properties/tree/v1.5.6) - 18 Oct 2015
+
+ * Vendored in gopkg.in/check.v1
+
+### [1.5.5](https://github.com/magiconair/properties/tree/v1.5.5) - 31 Jul 2015
+
+ * [PR #6](https://github.com/magiconair/properties/pull/6): Add [Delete](http://godoc.org/github.com/magiconair/properties#Properties.Delete) method to remove keys including comments. (@gerbenjacobs)
+
+### [1.5.4](https://github.com/magiconair/properties/tree/v1.5.4) - 23 Jun 2015
+
+ * [Issue #5](https://github.com/magiconair/properties/issues/5): Allow disabling of property expansion [DisableExpansion](http://godoc.org/github.com/magiconair/properties#Properties.DisableExpansion). When property expansion is disabled Properties become a simple key/value store and don't check for circular references.
+
+### [1.5.3](https://github.com/magiconair/properties/tree/v1.5.3) - 02 Jun 2015
+
+ * [Issue #4](https://github.com/magiconair/properties/issues/4): Maintain key order in [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) and [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp)
+
+### [1.5.2](https://github.com/magiconair/properties/tree/v1.5.2) - 10 Apr 2015
+
+ * [Issue #3](https://github.com/magiconair/properties/issues/3): Don't print comments in [WriteComment()](http://godoc.org/github.com/magiconair/properties#Properties.WriteComment) if they are all empty
+ * Add clickable links to README
+
+### [1.5.1](https://github.com/magiconair/properties/tree/v1.5.1) - 08 Dec 2014
+
+ * Added [GetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.GetParsedDuration) and [MustGetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.MustGetParsedDuration) for values specified compatible with
+ [time.ParseDuration()](http://golang.org/pkg/time/#ParseDuration).
+
+### [1.5.0](https://github.com/magiconair/properties/tree/v1.5.0) - 18 Nov 2014
+
+ * Added support for single and multi-line comments (reading, writing and updating)
+ * The order of keys is now preserved
+ * Calling [Set()](http://godoc.org/github.com/magiconair/properties#Properties.Set) with an empty key now silently ignores the call and does not create a new entry
+ * Added a [MustSet()](http://godoc.org/github.com/magiconair/properties#Properties.MustSet) method
+ * Migrated test library from launchpad.net/gocheck to [gopkg.in/check.v1](http://gopkg.in/check.v1)
+
+### [1.4.2](https://github.com/magiconair/properties/tree/v1.4.2) - 15 Nov 2014
+
+ * [Issue #2](https://github.com/magiconair/properties/issues/2): Fixed goroutine leak in parser which created two lexers but cleaned up only one
+
+### [1.4.1](https://github.com/magiconair/properties/tree/v1.4.1) - 13 Nov 2014
+
+ * [Issue #1](https://github.com/magiconair/properties/issues/1): Fixed bug in Keys() method which returned an empty string
+
+### [1.4.0](https://github.com/magiconair/properties/tree/v1.4.0) - 23 Sep 2014
+
+ * Added [Keys()](http://godoc.org/github.com/magiconair/properties#Properties.Keys) to get the keys
+ * Added [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) and [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) to get a subset of the properties
+
+### [1.3.0](https://github.com/magiconair/properties/tree/v1.3.0) - 18 Mar 2014
+
+* Added support for time.Duration
+* Made MustXXX() failure beha[ior configurable (log.Fatal, panic](https://github.com/magiconair/properties/tree/vior configurable (log.Fatal, panic) - custom)
+* Changed default of MustXXX() failure from panic to log.Fatal
+
+### [1.2.0](https://github.com/magiconair/properties/tree/v1.2.0) - 05 Mar 2014
+
+* Added MustGet... functions
+* Added support for int and uint with range checks on 32 bit platforms
+
+### [1.1.0](https://github.com/magiconair/properties/tree/v1.1.0) - 20 Jan 2014
+
+* Renamed from goproperties to properties
+* Added support for expansion of environment vars in
+ filenames and value expressions
+* Fixed bug where value expressions were not at the
+ start of the string
+
+### [1.0.0](https://github.com/magiconair/properties/tree/v1.0.0) - 7 Jan 2014
+
+* Initial release
diff --git a/vendor/github.com/magiconair/properties/LICENSE.md b/vendor/github.com/magiconair/properties/LICENSE.md
new file mode 100644
index 0000000..79c87e3
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/LICENSE.md
@@ -0,0 +1,24 @@
+Copyright (c) 2013-2020, Frank Schroeder
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/magiconair/properties/README.md b/vendor/github.com/magiconair/properties/README.md
new file mode 100644
index 0000000..e2edda0
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/README.md
@@ -0,0 +1,128 @@
+[![](https://img.shields.io/github/tag/magiconair/properties.svg?style=flat-square&label=release)](https://github.com/magiconair/properties/releases)
+[![Travis CI Status](https://img.shields.io/travis/magiconair/properties.svg?branch=master&style=flat-square&label=travis)](https://travis-ci.org/magiconair/properties)
+[![License](https://img.shields.io/badge/License-BSD%202--Clause-orange.svg?style=flat-square)](https://raw.githubusercontent.com/magiconair/properties/master/LICENSE)
+[![GoDoc](http://img.shields.io/badge/godoc-reference-5272B4.svg?style=flat-square)](http://godoc.org/github.com/magiconair/properties)
+
+# Overview
+
+#### Please run `git pull --tags` to update the tags. See [below](#updated-git-tags) why.
+
+properties is a Go library for reading and writing properties files.
+
+It supports reading from multiple files or URLs and Spring style recursive
+property expansion of expressions like `${key}` to their corresponding value.
+Value expressions can refer to other keys like in `${key}` or to environment
+variables like in `${USER}`. Filenames can also contain environment variables
+like in `/home/${USER}/myapp.properties`.
+
+Properties can be decoded into structs, maps, arrays and values through
+struct tags.
+
+Comments and the order of keys are preserved. Comments can be modified
+and can be written to the output.
+
+The properties library supports both ISO-8859-1 and UTF-8 encoded data.
+
+Starting from version 1.3.0 the behavior of the MustXXX() functions is
+configurable by providing a custom `ErrorHandler` function. The default has
+changed from `panic` to `log.Fatal` but this is configurable and custom
+error handling functions can be provided. See the package documentation for
+details.
+
+Read the full documentation on [![GoDoc](http://img.shields.io/badge/godoc-reference-5272B4.svg?style=flat-square)](http://godoc.org/github.com/magiconair/properties)
+
+## Getting Started
+
+```go
+import (
+ "flag"
+ "github.com/magiconair/properties"
+)
+
+func main() {
+ // init from a file
+ p := properties.MustLoadFile("${HOME}/config.properties", properties.UTF8)
+
+ // or multiple files
+ p = properties.MustLoadFiles([]string{
+ "${HOME}/config.properties",
+ "${HOME}/config-${USER}.properties",
+ }, properties.UTF8, true)
+
+ // or from a map
+ p = properties.LoadMap(map[string]string{"key": "value", "abc": "def"})
+
+ // or from a string
+ p = properties.MustLoadString("key=value\nabc=def")
+
+ // or from a URL
+ p = properties.MustLoadURL("http://host/path")
+
+ // or from multiple URLs
+ p = properties.MustLoadURL([]string{
+ "http://host/config",
+ "http://host/config-${USER}",
+ }, true)
+
+ // or from flags
+ p.MustFlag(flag.CommandLine)
+
+ // get values through getters
+ host := p.MustGetString("host")
+ port := p.GetInt("port", 8080)
+
+ // or through Decode
+ type Config struct {
+ Host string `properties:"host"`
+ Port int `properties:"port,default=9000"`
+ Accept []string `properties:"accept,default=image/png;image;gif"`
+ Timeout time.Duration `properties:"timeout,default=5s"`
+ }
+ var cfg Config
+ if err := p.Decode(&cfg); err != nil {
+ log.Fatal(err)
+ }
+}
+
+```
+
+## Installation and Upgrade
+
+```
+$ go get -u github.com/magiconair/properties
+```
+
+## License
+
+2 clause BSD license. See [LICENSE](https://github.com/magiconair/properties/blob/master/LICENSE) file for details.
+
+## ToDo
+
+* Dump contents with passwords and secrets obscured
+
+## Updated Git tags
+
+#### 13 Feb 2018
+
+I realized that all of the git tags I had pushed before v1.7.5 were lightweight tags
+and I've only recently learned that this doesn't play well with `git describe` 😞
+
+I have replaced all lightweight tags with signed tags using this script which should
+retain the commit date, name and email address. Please run `git pull --tags` to update them.
+
+Worst case you have to reclone the repo.
+
+```shell
+#!/bin/bash
+tag=$1
+echo "Updating $tag"
+date=$(git show ${tag}^0 --format=%aD | head -1)
+email=$(git show ${tag}^0 --format=%aE | head -1)
+name=$(git show ${tag}^0 --format=%aN | head -1)
+GIT_COMMITTER_DATE="$date" GIT_COMMITTER_NAME="$name" GIT_COMMITTER_EMAIL="$email" git tag -s -f ${tag} ${tag}^0 -m ${tag}
+```
+
+I apologize for the inconvenience.
+
+Frank
+
diff --git a/vendor/github.com/magiconair/properties/decode.go b/vendor/github.com/magiconair/properties/decode.go
new file mode 100644
index 0000000..8e6aa44
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/decode.go
@@ -0,0 +1,289 @@
+// Copyright 2013-2022 Frank Schroeder. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package properties
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// Decode assigns property values to exported fields of a struct.
+//
+// Decode traverses v recursively and returns an error if a value cannot be
+// converted to the field type or a required value is missing for a field.
+//
+// The following type dependent decodings are used:
+//
+// String, boolean, numeric fields have the value of the property key assigned.
+// The property key name is the name of the field. A different key and a default
+// value can be set in the field's tag. Fields without default value are
+// required. If the value cannot be converted to the field type an error is
+// returned.
+//
+// time.Duration fields have the result of time.ParseDuration() assigned.
+//
+// time.Time fields have the vaule of time.Parse() assigned. The default layout
+// is time.RFC3339 but can be set in the field's tag.
+//
+// Arrays and slices of string, boolean, numeric, time.Duration and time.Time
+// fields have the value interpreted as a comma separated list of values. The
+// individual values are trimmed of whitespace and empty values are ignored. A
+// default value can be provided as a semicolon separated list in the field's
+// tag.
+//
+// Struct fields are decoded recursively using the field name plus "." as
+// prefix. The prefix (without dot) can be overridden in the field's tag.
+// Default values are not supported in the field's tag. Specify them on the
+// fields of the inner struct instead.
+//
+// Map fields must have a key of type string and are decoded recursively by
+// using the field's name plus ".' as prefix and the next element of the key
+// name as map key. The prefix (without dot) can be overridden in the field's
+// tag. Default values are not supported.
+//
+// Examples:
+//
+// // Field is ignored.
+// Field int `properties:"-"`
+//
+// // Field is assigned value of 'Field'.
+// Field int
+//
+// // Field is assigned value of 'myName'.
+// Field int `properties:"myName"`
+//
+// // Field is assigned value of key 'myName' and has a default
+// // value 15 if the key does not exist.
+// Field int `properties:"myName,default=15"`
+//
+// // Field is assigned value of key 'Field' and has a default
+// // value 15 if the key does not exist.
+// Field int `properties:",default=15"`
+//
+// // Field is assigned value of key 'date' and the date
+// // is in format 2006-01-02
+// Field time.Time `properties:"date,layout=2006-01-02"`
+//
+// // Field is assigned the non-empty and whitespace trimmed
+// // values of key 'Field' split by commas.
+// Field []string
+//
+// // Field is assigned the non-empty and whitespace trimmed
+// // values of key 'Field' split by commas and has a default
+// // value ["a", "b", "c"] if the key does not exist.
+// Field []string `properties:",default=a;b;c"`
+//
+// // Field is decoded recursively with "Field." as key prefix.
+// Field SomeStruct
+//
+// // Field is decoded recursively with "myName." as key prefix.
+// Field SomeStruct `properties:"myName"`
+//
+// // Field is decoded recursively with "Field." as key prefix
+// // and the next dotted element of the key as map key.
+// Field map[string]string
+//
+// // Field is decoded recursively with "myName." as key prefix
+// // and the next dotted element of the key as map key.
+// Field map[string]string `properties:"myName"`
+func (p *Properties) Decode(x interface{}) error {
+ t, v := reflect.TypeOf(x), reflect.ValueOf(x)
+ if t.Kind() != reflect.Ptr || v.Elem().Type().Kind() != reflect.Struct {
+ return fmt.Errorf("not a pointer to struct: %s", t)
+ }
+ if err := dec(p, "", nil, nil, v); err != nil {
+ return err
+ }
+ return nil
+}
+
+func dec(p *Properties, key string, def *string, opts map[string]string, v reflect.Value) error {
+ t := v.Type()
+
+ // value returns the property value for key or the default if provided.
+ value := func() (string, error) {
+ if val, ok := p.Get(key); ok {
+ return val, nil
+ }
+ if def != nil {
+ return *def, nil
+ }
+ return "", fmt.Errorf("missing required key %s", key)
+ }
+
+ // conv converts a string to a value of the given type.
+ conv := func(s string, t reflect.Type) (val reflect.Value, err error) {
+ var v interface{}
+
+ switch {
+ case isDuration(t):
+ v, err = time.ParseDuration(s)
+
+ case isTime(t):
+ layout := opts["layout"]
+ if layout == "" {
+ layout = time.RFC3339
+ }
+ v, err = time.Parse(layout, s)
+
+ case isBool(t):
+ v, err = boolVal(s), nil
+
+ case isString(t):
+ v, err = s, nil
+
+ case isFloat(t):
+ v, err = strconv.ParseFloat(s, 64)
+
+ case isInt(t):
+ v, err = strconv.ParseInt(s, 10, 64)
+
+ case isUint(t):
+ v, err = strconv.ParseUint(s, 10, 64)
+
+ default:
+ return reflect.Zero(t), fmt.Errorf("unsupported type %s", t)
+ }
+ if err != nil {
+ return reflect.Zero(t), err
+ }
+ return reflect.ValueOf(v).Convert(t), nil
+ }
+
+ // keydef returns the property key and the default value based on the
+ // name of the struct field and the options in the tag.
+ keydef := func(f reflect.StructField) (string, *string, map[string]string) {
+ _key, _opts := parseTag(f.Tag.Get("properties"))
+
+ var _def *string
+ if d, ok := _opts["default"]; ok {
+ _def = &d
+ }
+ if _key != "" {
+ return _key, _def, _opts
+ }
+ return f.Name, _def, _opts
+ }
+
+ switch {
+ case isDuration(t) || isTime(t) || isBool(t) || isString(t) || isFloat(t) || isInt(t) || isUint(t):
+ s, err := value()
+ if err != nil {
+ return err
+ }
+ val, err := conv(s, t)
+ if err != nil {
+ return err
+ }
+ v.Set(val)
+
+ case isPtr(t):
+ return dec(p, key, def, opts, v.Elem())
+
+ case isStruct(t):
+ for i := 0; i < v.NumField(); i++ {
+ fv := v.Field(i)
+ fk, def, opts := keydef(t.Field(i))
+ if !fv.CanSet() {
+ return fmt.Errorf("cannot set %s", t.Field(i).Name)
+ }
+ if fk == "-" {
+ continue
+ }
+ if key != "" {
+ fk = key + "." + fk
+ }
+ if err := dec(p, fk, def, opts, fv); err != nil {
+ return err
+ }
+ }
+ return nil
+
+ case isArray(t):
+ val, err := value()
+ if err != nil {
+ return err
+ }
+ vals := split(val, ";")
+ a := reflect.MakeSlice(t, 0, len(vals))
+ for _, s := range vals {
+ val, err := conv(s, t.Elem())
+ if err != nil {
+ return err
+ }
+ a = reflect.Append(a, val)
+ }
+ v.Set(a)
+
+ case isMap(t):
+ valT := t.Elem()
+ m := reflect.MakeMap(t)
+ for postfix := range p.FilterStripPrefix(key + ".").m {
+ pp := strings.SplitN(postfix, ".", 2)
+ mk, mv := pp[0], reflect.New(valT)
+ if err := dec(p, key+"."+mk, nil, nil, mv); err != nil {
+ return err
+ }
+ m.SetMapIndex(reflect.ValueOf(mk), mv.Elem())
+ }
+ v.Set(m)
+
+ default:
+ return fmt.Errorf("unsupported type %s", t)
+ }
+ return nil
+}
+
+// split splits a string on sep, trims whitespace of elements
+// and omits empty elements
+func split(s string, sep string) []string {
+ var a []string
+ for _, v := range strings.Split(s, sep) {
+ if v = strings.TrimSpace(v); v != "" {
+ a = append(a, v)
+ }
+ }
+ return a
+}
+
+// parseTag parses a "key,k=v,k=v,..."
+func parseTag(tag string) (key string, opts map[string]string) {
+ opts = map[string]string{}
+ for i, s := range strings.Split(tag, ",") {
+ if i == 0 {
+ key = s
+ continue
+ }
+
+ pp := strings.SplitN(s, "=", 2)
+ if len(pp) == 1 {
+ opts[pp[0]] = ""
+ } else {
+ opts[pp[0]] = pp[1]
+ }
+ }
+ return key, opts
+}
+
+func isArray(t reflect.Type) bool { return t.Kind() == reflect.Array || t.Kind() == reflect.Slice }
+func isBool(t reflect.Type) bool { return t.Kind() == reflect.Bool }
+func isDuration(t reflect.Type) bool { return t == reflect.TypeOf(time.Second) }
+func isMap(t reflect.Type) bool { return t.Kind() == reflect.Map }
+func isPtr(t reflect.Type) bool { return t.Kind() == reflect.Ptr }
+func isString(t reflect.Type) bool { return t.Kind() == reflect.String }
+func isStruct(t reflect.Type) bool { return t.Kind() == reflect.Struct }
+func isTime(t reflect.Type) bool { return t == reflect.TypeOf(time.Time{}) }
+func isFloat(t reflect.Type) bool {
+ return t.Kind() == reflect.Float32 || t.Kind() == reflect.Float64
+}
+func isInt(t reflect.Type) bool {
+ return t.Kind() == reflect.Int || t.Kind() == reflect.Int8 || t.Kind() == reflect.Int16 || t.Kind() == reflect.Int32 || t.Kind() == reflect.Int64
+}
+func isUint(t reflect.Type) bool {
+ return t.Kind() == reflect.Uint || t.Kind() == reflect.Uint8 || t.Kind() == reflect.Uint16 || t.Kind() == reflect.Uint32 || t.Kind() == reflect.Uint64
+}
diff --git a/vendor/github.com/magiconair/properties/doc.go b/vendor/github.com/magiconair/properties/doc.go
new file mode 100644
index 0000000..7c79793
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/doc.go
@@ -0,0 +1,155 @@
+// Copyright 2013-2022 Frank Schroeder. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package properties provides functions for reading and writing
+// ISO-8859-1 and UTF-8 encoded .properties files and has
+// support for recursive property expansion.
+//
+// Java properties files are ISO-8859-1 encoded and use Unicode
+// literals for characters outside the ISO character set. Unicode
+// literals can be used in UTF-8 encoded properties files but
+// aren't necessary.
+//
+// To load a single properties file use MustLoadFile():
+//
+// p := properties.MustLoadFile(filename, properties.UTF8)
+//
+// To load multiple properties files use MustLoadFiles()
+// which loads the files in the given order and merges the
+// result. Missing properties files can be ignored if the
+// 'ignoreMissing' flag is set to true.
+//
+// Filenames can contain environment variables which are expanded
+// before loading.
+//
+// f1 := "/etc/myapp/myapp.conf"
+// f2 := "/home/${USER}/myapp.conf"
+// p := MustLoadFiles([]string{f1, f2}, properties.UTF8, true)
+//
+// All of the different key/value delimiters ' ', ':' and '=' are
+// supported as well as the comment characters '!' and '#' and
+// multi-line values.
+//
+// ! this is a comment
+// # and so is this
+//
+// # the following expressions are equal
+// key value
+// key=value
+// key:value
+// key = value
+// key : value
+// key = val\
+// ue
+//
+// Properties stores all comments preceding a key and provides
+// GetComments() and SetComments() methods to retrieve and
+// update them. The convenience functions GetComment() and
+// SetComment() allow access to the last comment. The
+// WriteComment() method writes properties files including
+// the comments and with the keys in the original order.
+// This can be used for sanitizing properties files.
+//
+// Property expansion is recursive and circular references
+// and malformed expressions are not allowed and cause an
+// error. Expansion of environment variables is supported.
+//
+// # standard property
+// key = value
+//
+// # property expansion: key2 = value
+// key2 = ${key}
+//
+// # recursive expansion: key3 = value
+// key3 = ${key2}
+//
+// # circular reference (error)
+// key = ${key}
+//
+// # malformed expression (error)
+// key = ${ke
+//
+// # refers to the users' home dir
+// home = ${HOME}
+//
+// # local key takes precedence over env var: u = foo
+// USER = foo
+// u = ${USER}
+//
+// The default property expansion format is ${key} but can be
+// changed by setting different pre- and postfix values on the
+// Properties object.
+//
+// p := properties.NewProperties()
+// p.Prefix = "#["
+// p.Postfix = "]#"
+//
+// Properties provides convenience functions for getting typed
+// values with default values if the key does not exist or the
+// type conversion failed.
+//
+// # Returns true if the value is either "1", "on", "yes" or "true"
+// # Returns false for every other value and the default value if
+// # the key does not exist.
+// v = p.GetBool("key", false)
+//
+// # Returns the value if the key exists and the format conversion
+// # was successful. Otherwise, the default value is returned.
+// v = p.GetInt64("key", 999)
+// v = p.GetUint64("key", 999)
+// v = p.GetFloat64("key", 123.0)
+// v = p.GetString("key", "def")
+// v = p.GetDuration("key", 999)
+//
+// As an alternative properties may be applied with the standard
+// library's flag implementation at any time.
+//
+// # Standard configuration
+// v = flag.Int("key", 999, "help message")
+// flag.Parse()
+//
+// # Merge p into the flag set
+// p.MustFlag(flag.CommandLine)
+//
+// Properties provides several MustXXX() convenience functions
+// which will terminate the app if an error occurs. The behavior
+// of the failure is configurable and the default is to call
+// log.Fatal(err). To have the MustXXX() functions panic instead
+// of logging the error set a different ErrorHandler before
+// you use the Properties package.
+//
+// properties.ErrorHandler = properties.PanicHandler
+//
+// # Will panic instead of logging an error
+// p := properties.MustLoadFile("config.properties")
+//
+// You can also provide your own ErrorHandler function. The only requirement
+// is that the error handler function must exit after handling the error.
+//
+// properties.ErrorHandler = func(err error) {
+// fmt.Println(err)
+// os.Exit(1)
+// }
+//
+// # Will write to stdout and then exit
+// p := properties.MustLoadFile("config.properties")
+//
+// Properties can also be loaded into a struct via the `Decode`
+// method, e.g.
+//
+// type S struct {
+// A string `properties:"a,default=foo"`
+// D time.Duration `properties:"timeout,default=5s"`
+// E time.Time `properties:"expires,layout=2006-01-02,default=2015-01-01"`
+// }
+//
+// See `Decode()` method for the full documentation.
+//
+// The following documents provide a description of the properties
+// file format.
+//
+// http://en.wikipedia.org/wiki/.properties
+//
+// http://docs.oracle.com/javase/7/docs/api/java/util/Properties.html#load%28java.io.Reader%29
+package properties
diff --git a/vendor/github.com/magiconair/properties/integrate.go b/vendor/github.com/magiconair/properties/integrate.go
new file mode 100644
index 0000000..35d0ae9
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/integrate.go
@@ -0,0 +1,35 @@
+// Copyright 2013-2022 Frank Schroeder. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package properties
+
+import "flag"
+
+// MustFlag sets flags that are skipped by dst.Parse when p contains
+// the respective key for flag.Flag.Name.
+//
+// It's use is recommended with command line arguments as in:
+//
+// flag.Parse()
+// p.MustFlag(flag.CommandLine)
+func (p *Properties) MustFlag(dst *flag.FlagSet) {
+ m := make(map[string]*flag.Flag)
+ dst.VisitAll(func(f *flag.Flag) {
+ m[f.Name] = f
+ })
+ dst.Visit(func(f *flag.Flag) {
+ delete(m, f.Name) // overridden
+ })
+
+ for name, f := range m {
+ v, ok := p.Get(name)
+ if !ok {
+ continue
+ }
+
+ if err := f.Value.Set(v); err != nil {
+ ErrorHandler(err)
+ }
+ }
+}
diff --git a/vendor/github.com/magiconair/properties/lex.go b/vendor/github.com/magiconair/properties/lex.go
new file mode 100644
index 0000000..3d15a1f
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/lex.go
@@ -0,0 +1,395 @@
+// Copyright 2013-2022 Frank Schroeder. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+// Parts of the lexer are from the template/text/parser package
+// For these parts the following applies:
+//
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file of the go 1.2
+// distribution.
+
+package properties
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+)
+
+// item represents a token or text string returned from the scanner.
+type item struct {
+ typ itemType // The type of this item.
+ pos int // The starting position, in bytes, of this item in the input string.
+ val string // The value of this item.
+}
+
+func (i item) String() string {
+ switch {
+ case i.typ == itemEOF:
+ return "EOF"
+ case i.typ == itemError:
+ return i.val
+ case len(i.val) > 10:
+ return fmt.Sprintf("%.10q...", i.val)
+ }
+ return fmt.Sprintf("%q", i.val)
+}
+
+// itemType identifies the type of lex items.
+type itemType int
+
+const (
+ itemError itemType = iota // error occurred; value is text of error
+ itemEOF
+ itemKey // a key
+ itemValue // a value
+ itemComment // a comment
+)
+
+// defines a constant for EOF
+const eof = -1
+
+// permitted whitespace characters space, FF and TAB
+const whitespace = " \f\t"
+
+// stateFn represents the state of the scanner as a function that returns the next state.
+type stateFn func(*lexer) stateFn
+
+// lexer holds the state of the scanner.
+type lexer struct {
+ input string // the string being scanned
+ state stateFn // the next lexing function to enter
+ pos int // current position in the input
+ start int // start position of this item
+ width int // width of last rune read from input
+ lastPos int // position of most recent item returned by nextItem
+ runes []rune // scanned runes for this item
+ items chan item // channel of scanned items
+}
+
+// next returns the next rune in the input.
+func (l *lexer) next() rune {
+ if l.pos >= len(l.input) {
+ l.width = 0
+ return eof
+ }
+ r, w := utf8.DecodeRuneInString(l.input[l.pos:])
+ l.width = w
+ l.pos += l.width
+ return r
+}
+
+// peek returns but does not consume the next rune in the input.
+func (l *lexer) peek() rune {
+ r := l.next()
+ l.backup()
+ return r
+}
+
+// backup steps back one rune. Can only be called once per call of next.
+func (l *lexer) backup() {
+ l.pos -= l.width
+}
+
+// emit passes an item back to the client.
+func (l *lexer) emit(t itemType) {
+ i := item{t, l.start, string(l.runes)}
+ l.items <- i
+ l.start = l.pos
+ l.runes = l.runes[:0]
+}
+
+// ignore skips over the pending input before this point.
+func (l *lexer) ignore() {
+ l.start = l.pos
+}
+
+// appends the rune to the current value
+func (l *lexer) appendRune(r rune) {
+ l.runes = append(l.runes, r)
+}
+
+// accept consumes the next rune if it's from the valid set.
+func (l *lexer) accept(valid string) bool {
+ if strings.ContainsRune(valid, l.next()) {
+ return true
+ }
+ l.backup()
+ return false
+}
+
+// acceptRun consumes a run of runes from the valid set.
+func (l *lexer) acceptRun(valid string) {
+ for strings.ContainsRune(valid, l.next()) {
+ }
+ l.backup()
+}
+
+// lineNumber reports which line we're on, based on the position of
+// the previous item returned by nextItem. Doing it this way
+// means we don't have to worry about peek double counting.
+func (l *lexer) lineNumber() int {
+ return 1 + strings.Count(l.input[:l.lastPos], "\n")
+}
+
+// errorf returns an error token and terminates the scan by passing
+// back a nil pointer that will be the next state, terminating l.nextItem.
+func (l *lexer) errorf(format string, args ...interface{}) stateFn {
+ l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)}
+ return nil
+}
+
+// nextItem returns the next item from the input.
+func (l *lexer) nextItem() item {
+ i := <-l.items
+ l.lastPos = i.pos
+ return i
+}
+
+// lex creates a new scanner for the input string.
+func lex(input string) *lexer {
+ l := &lexer{
+ input: input,
+ items: make(chan item),
+ runes: make([]rune, 0, 32),
+ }
+ go l.run()
+ return l
+}
+
+// run runs the state machine for the lexer.
+func (l *lexer) run() {
+ for l.state = lexBeforeKey(l); l.state != nil; {
+ l.state = l.state(l)
+ }
+}
+
+// state functions
+
+// lexBeforeKey scans until a key begins.
+func lexBeforeKey(l *lexer) stateFn {
+ switch r := l.next(); {
+ case isEOF(r):
+ l.emit(itemEOF)
+ return nil
+
+ case isEOL(r):
+ l.ignore()
+ return lexBeforeKey
+
+ case isComment(r):
+ return lexComment
+
+ case isWhitespace(r):
+ l.ignore()
+ return lexBeforeKey
+
+ default:
+ l.backup()
+ return lexKey
+ }
+}
+
+// lexComment scans a comment line. The comment character has already been scanned.
+func lexComment(l *lexer) stateFn {
+ l.acceptRun(whitespace)
+ l.ignore()
+ for {
+ switch r := l.next(); {
+ case isEOF(r):
+ l.ignore()
+ l.emit(itemEOF)
+ return nil
+ case isEOL(r):
+ l.emit(itemComment)
+ return lexBeforeKey
+ default:
+ l.appendRune(r)
+ }
+ }
+}
+
+// lexKey scans the key up to a delimiter
+func lexKey(l *lexer) stateFn {
+ var r rune
+
+Loop:
+ for {
+ switch r = l.next(); {
+
+ case isEscape(r):
+ err := l.scanEscapeSequence()
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+
+ case isEndOfKey(r):
+ l.backup()
+ break Loop
+
+ case isEOF(r):
+ break Loop
+
+ default:
+ l.appendRune(r)
+ }
+ }
+
+ if len(l.runes) > 0 {
+ l.emit(itemKey)
+ }
+
+ if isEOF(r) {
+ l.emit(itemEOF)
+ return nil
+ }
+
+ return lexBeforeValue
+}
+
+// lexBeforeValue scans the delimiter between key and value.
+// Leading and trailing whitespace is ignored.
+// We expect to be just after the key.
+func lexBeforeValue(l *lexer) stateFn {
+ l.acceptRun(whitespace)
+ l.accept(":=")
+ l.acceptRun(whitespace)
+ l.ignore()
+ return lexValue
+}
+
+// lexValue scans text until the end of the line. We expect to be just after the delimiter.
+func lexValue(l *lexer) stateFn {
+ for {
+ switch r := l.next(); {
+ case isEscape(r):
+ if isEOL(l.peek()) {
+ l.next()
+ l.acceptRun(whitespace)
+ } else {
+ err := l.scanEscapeSequence()
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+ }
+
+ case isEOL(r):
+ l.emit(itemValue)
+ l.ignore()
+ return lexBeforeKey
+
+ case isEOF(r):
+ l.emit(itemValue)
+ l.emit(itemEOF)
+ return nil
+
+ default:
+ l.appendRune(r)
+ }
+ }
+}
+
+// scanEscapeSequence scans either one of the escaped characters
+// or a unicode literal. We expect to be after the escape character.
+func (l *lexer) scanEscapeSequence() error {
+ switch r := l.next(); {
+
+ case isEscapedCharacter(r):
+ l.appendRune(decodeEscapedCharacter(r))
+ return nil
+
+ case atUnicodeLiteral(r):
+ return l.scanUnicodeLiteral()
+
+ case isEOF(r):
+ return fmt.Errorf("premature EOF")
+
+ // silently drop the escape character and append the rune as is
+ default:
+ l.appendRune(r)
+ return nil
+ }
+}
+
+// scans a unicode literal in the form \uXXXX. We expect to be after the \u.
+func (l *lexer) scanUnicodeLiteral() error {
+ // scan the digits
+ d := make([]rune, 4)
+ for i := 0; i < 4; i++ {
+ d[i] = l.next()
+ if d[i] == eof || !strings.ContainsRune("0123456789abcdefABCDEF", d[i]) {
+ return fmt.Errorf("invalid unicode literal")
+ }
+ }
+
+ // decode the digits into a rune
+ r, err := strconv.ParseInt(string(d), 16, 0)
+ if err != nil {
+ return err
+ }
+
+ l.appendRune(rune(r))
+ return nil
+}
+
+// decodeEscapedCharacter returns the unescaped rune. We expect to be after the escape character.
+func decodeEscapedCharacter(r rune) rune {
+ switch r {
+ case 'f':
+ return '\f'
+ case 'n':
+ return '\n'
+ case 'r':
+ return '\r'
+ case 't':
+ return '\t'
+ default:
+ return r
+ }
+}
+
+// atUnicodeLiteral reports whether we are at a unicode literal.
+// The escape character has already been consumed.
+func atUnicodeLiteral(r rune) bool {
+ return r == 'u'
+}
+
+// isComment reports whether we are at the start of a comment.
+func isComment(r rune) bool {
+ return r == '#' || r == '!'
+}
+
+// isEndOfKey reports whether the rune terminates the current key.
+func isEndOfKey(r rune) bool {
+ return strings.ContainsRune(" \f\t\r\n:=", r)
+}
+
+// isEOF reports whether we are at EOF.
+func isEOF(r rune) bool {
+ return r == eof
+}
+
+// isEOL reports whether we are at a new line character.
+func isEOL(r rune) bool {
+ return r == '\n' || r == '\r'
+}
+
+// isEscape reports whether the rune is the escape character which
+// prefixes unicode literals and other escaped characters.
+func isEscape(r rune) bool {
+ return r == '\\'
+}
+
+// isEscapedCharacter reports whether we are at one of the characters that need escaping.
+// The escape character has already been consumed.
+func isEscapedCharacter(r rune) bool {
+ return strings.ContainsRune(" :=fnrt", r)
+}
+
+// isWhitespace reports whether the rune is a whitespace character.
+func isWhitespace(r rune) bool {
+ return strings.ContainsRune(whitespace, r)
+}
diff --git a/vendor/github.com/magiconair/properties/load.go b/vendor/github.com/magiconair/properties/load.go
new file mode 100644
index 0000000..635368d
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/load.go
@@ -0,0 +1,293 @@
+// Copyright 2013-2022 Frank Schroeder. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package properties
+
+import (
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "strings"
+)
+
+// Encoding specifies encoding of the input data.
+type Encoding uint
+
+const (
+ // utf8Default is a private placeholder for the zero value of Encoding to
+ // ensure that it has the correct meaning. UTF8 is the default encoding but
+ // was assigned a non-zero value which cannot be changed without breaking
+ // existing code. Clients should continue to use the public constants.
+ utf8Default Encoding = iota
+
+ // UTF8 interprets the input data as UTF-8.
+ UTF8
+
+ // ISO_8859_1 interprets the input data as ISO-8859-1.
+ ISO_8859_1
+)
+
+type Loader struct {
+ // Encoding determines how the data from files and byte buffers
+ // is interpreted. For URLs the Content-Type header is used
+ // to determine the encoding of the data.
+ Encoding Encoding
+
+ // DisableExpansion configures the property expansion of the
+ // returned property object. When set to true, the property values
+ // will not be expanded and the Property object will not be checked
+ // for invalid expansion expressions.
+ DisableExpansion bool
+
+ // IgnoreMissing configures whether missing files or URLs which return
+ // 404 are reported as errors. When set to true, missing files and 404
+ // status codes are not reported as errors.
+ IgnoreMissing bool
+}
+
+// Load reads a buffer into a Properties struct.
+func (l *Loader) LoadBytes(buf []byte) (*Properties, error) {
+ return l.loadBytes(buf, l.Encoding)
+}
+
+// LoadAll reads the content of multiple URLs or files in the given order into
+// a Properties struct. If IgnoreMissing is true then a 404 status code or
+// missing file will not be reported as error. Encoding sets the encoding for
+// files. For the URLs see LoadURL for the Content-Type header and the
+// encoding.
+func (l *Loader) LoadAll(names []string) (*Properties, error) {
+ all := NewProperties()
+ for _, name := range names {
+ n, err := expandName(name)
+ if err != nil {
+ return nil, err
+ }
+
+ var p *Properties
+ switch {
+ case strings.HasPrefix(n, "http://"):
+ p, err = l.LoadURL(n)
+ case strings.HasPrefix(n, "https://"):
+ p, err = l.LoadURL(n)
+ default:
+ p, err = l.LoadFile(n)
+ }
+ if err != nil {
+ return nil, err
+ }
+ all.Merge(p)
+ }
+
+ all.DisableExpansion = l.DisableExpansion
+ if all.DisableExpansion {
+ return all, nil
+ }
+ return all, all.check()
+}
+
+// LoadFile reads a file into a Properties struct.
+// If IgnoreMissing is true then a missing file will not be
+// reported as error.
+func (l *Loader) LoadFile(filename string) (*Properties, error) {
+ data, err := ioutil.ReadFile(filename)
+ if err != nil {
+ if l.IgnoreMissing && os.IsNotExist(err) {
+ LogPrintf("properties: %s not found. skipping", filename)
+ return NewProperties(), nil
+ }
+ return nil, err
+ }
+ return l.loadBytes(data, l.Encoding)
+}
+
+// LoadURL reads the content of the URL into a Properties struct.
+//
+// The encoding is determined via the Content-Type header which
+// should be set to 'text/plain'. If the 'charset' parameter is
+// missing, 'iso-8859-1' or 'latin1' the encoding is set to
+// ISO-8859-1. If the 'charset' parameter is set to 'utf-8' the
+// encoding is set to UTF-8. A missing content type header is
+// interpreted as 'text/plain; charset=utf-8'.
+func (l *Loader) LoadURL(url string) (*Properties, error) {
+ resp, err := http.Get(url)
+ if err != nil {
+ return nil, fmt.Errorf("properties: error fetching %q. %s", url, err)
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode == 404 && l.IgnoreMissing {
+ LogPrintf("properties: %s returned %d. skipping", url, resp.StatusCode)
+ return NewProperties(), nil
+ }
+
+ if resp.StatusCode != 200 {
+ return nil, fmt.Errorf("properties: %s returned %d", url, resp.StatusCode)
+ }
+
+ body, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, fmt.Errorf("properties: %s error reading response. %s", url, err)
+ }
+
+ ct := resp.Header.Get("Content-Type")
+ ct = strings.Join(strings.Fields(ct), "")
+ var enc Encoding
+ switch strings.ToLower(ct) {
+ case "text/plain", "text/plain;charset=iso-8859-1", "text/plain;charset=latin1":
+ enc = ISO_8859_1
+ case "", "text/plain;charset=utf-8":
+ enc = UTF8
+ default:
+ return nil, fmt.Errorf("properties: invalid content type %s", ct)
+ }
+
+ return l.loadBytes(body, enc)
+}
+
+func (l *Loader) loadBytes(buf []byte, enc Encoding) (*Properties, error) {
+ p, err := parse(convert(buf, enc))
+ if err != nil {
+ return nil, err
+ }
+ p.DisableExpansion = l.DisableExpansion
+ if p.DisableExpansion {
+ return p, nil
+ }
+ return p, p.check()
+}
+
+// Load reads a buffer into a Properties struct.
+func Load(buf []byte, enc Encoding) (*Properties, error) {
+ l := &Loader{Encoding: enc}
+ return l.LoadBytes(buf)
+}
+
+// LoadString reads an UTF8 string into a properties struct.
+func LoadString(s string) (*Properties, error) {
+ l := &Loader{Encoding: UTF8}
+ return l.LoadBytes([]byte(s))
+}
+
+// LoadMap creates a new Properties struct from a string map.
+func LoadMap(m map[string]string) *Properties {
+ p := NewProperties()
+ for k, v := range m {
+ p.Set(k, v)
+ }
+ return p
+}
+
+// LoadFile reads a file into a Properties struct.
+func LoadFile(filename string, enc Encoding) (*Properties, error) {
+ l := &Loader{Encoding: enc}
+ return l.LoadAll([]string{filename})
+}
+
+// LoadFiles reads multiple files in the given order into
+// a Properties struct. If 'ignoreMissing' is true then
+// non-existent files will not be reported as error.
+func LoadFiles(filenames []string, enc Encoding, ignoreMissing bool) (*Properties, error) {
+ l := &Loader{Encoding: enc, IgnoreMissing: ignoreMissing}
+ return l.LoadAll(filenames)
+}
+
+// LoadURL reads the content of the URL into a Properties struct.
+// See Loader#LoadURL for details.
+func LoadURL(url string) (*Properties, error) {
+ l := &Loader{Encoding: UTF8}
+ return l.LoadAll([]string{url})
+}
+
+// LoadURLs reads the content of multiple URLs in the given order into a
+// Properties struct. If IgnoreMissing is true then a 404 status code will
+// not be reported as error. See Loader#LoadURL for the Content-Type header
+// and the encoding.
+func LoadURLs(urls []string, ignoreMissing bool) (*Properties, error) {
+ l := &Loader{Encoding: UTF8, IgnoreMissing: ignoreMissing}
+ return l.LoadAll(urls)
+}
+
+// LoadAll reads the content of multiple URLs or files in the given order into a
+// Properties struct. If 'ignoreMissing' is true then a 404 status code or missing file will
+// not be reported as error. Encoding sets the encoding for files. For the URLs please see
+// LoadURL for the Content-Type header and the encoding.
+func LoadAll(names []string, enc Encoding, ignoreMissing bool) (*Properties, error) {
+ l := &Loader{Encoding: enc, IgnoreMissing: ignoreMissing}
+ return l.LoadAll(names)
+}
+
+// MustLoadString reads an UTF8 string into a Properties struct and
+// panics on error.
+func MustLoadString(s string) *Properties {
+ return must(LoadString(s))
+}
+
+// MustLoadFile reads a file into a Properties struct and
+// panics on error.
+func MustLoadFile(filename string, enc Encoding) *Properties {
+ return must(LoadFile(filename, enc))
+}
+
+// MustLoadFiles reads multiple files in the given order into
+// a Properties struct and panics on error. If 'ignoreMissing'
+// is true then non-existent files will not be reported as error.
+func MustLoadFiles(filenames []string, enc Encoding, ignoreMissing bool) *Properties {
+ return must(LoadFiles(filenames, enc, ignoreMissing))
+}
+
+// MustLoadURL reads the content of a URL into a Properties struct and
+// panics on error.
+func MustLoadURL(url string) *Properties {
+ return must(LoadURL(url))
+}
+
+// MustLoadURLs reads the content of multiple URLs in the given order into a
+// Properties struct and panics on error. If 'ignoreMissing' is true then a 404
+// status code will not be reported as error.
+func MustLoadURLs(urls []string, ignoreMissing bool) *Properties {
+ return must(LoadURLs(urls, ignoreMissing))
+}
+
+// MustLoadAll reads the content of multiple URLs or files in the given order into a
+// Properties struct. If 'ignoreMissing' is true then a 404 status code or missing file will
+// not be reported as error. Encoding sets the encoding for files. For the URLs please see
+// LoadURL for the Content-Type header and the encoding. It panics on error.
+func MustLoadAll(names []string, enc Encoding, ignoreMissing bool) *Properties {
+ return must(LoadAll(names, enc, ignoreMissing))
+}
+
+func must(p *Properties, err error) *Properties {
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return p
+}
+
+// expandName expands ${ENV_VAR} expressions in a name.
+// If the environment variable does not exist then it will be replaced
+// with an empty string. Malformed expressions like "${ENV_VAR" will
+// be reported as error.
+func expandName(name string) (string, error) {
+ return expand(name, []string{}, "${", "}", make(map[string]string))
+}
+
+// Interprets a byte buffer either as an ISO-8859-1 or UTF-8 encoded string.
+// For ISO-8859-1 we can convert each byte straight into a rune since the
+// first 256 unicode code points cover ISO-8859-1.
+func convert(buf []byte, enc Encoding) string {
+ switch enc {
+ case utf8Default, UTF8:
+ return string(buf)
+ case ISO_8859_1:
+ runes := make([]rune, len(buf))
+ for i, b := range buf {
+ runes[i] = rune(b)
+ }
+ return string(runes)
+ default:
+ ErrorHandler(fmt.Errorf("unsupported encoding %v", enc))
+ }
+ panic("ErrorHandler should exit")
+}
diff --git a/vendor/github.com/magiconair/properties/parser.go b/vendor/github.com/magiconair/properties/parser.go
new file mode 100644
index 0000000..fccfd39
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/parser.go
@@ -0,0 +1,86 @@
+// Copyright 2013-2022 Frank Schroeder. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package properties
+
+import (
+ "fmt"
+ "runtime"
+)
+
+type parser struct {
+ lex *lexer
+}
+
+func parse(input string) (properties *Properties, err error) {
+ p := &parser{lex: lex(input)}
+ defer p.recover(&err)
+
+ properties = NewProperties()
+ key := ""
+ comments := []string{}
+
+ for {
+ token := p.expectOneOf(itemComment, itemKey, itemEOF)
+ switch token.typ {
+ case itemEOF:
+ goto done
+ case itemComment:
+ comments = append(comments, token.val)
+ continue
+ case itemKey:
+ key = token.val
+ if _, ok := properties.m[key]; !ok {
+ properties.k = append(properties.k, key)
+ }
+ }
+
+ token = p.expectOneOf(itemValue, itemEOF)
+ if len(comments) > 0 {
+ properties.c[key] = comments
+ comments = []string{}
+ }
+ switch token.typ {
+ case itemEOF:
+ properties.m[key] = ""
+ goto done
+ case itemValue:
+ properties.m[key] = token.val
+ }
+ }
+
+done:
+ return properties, nil
+}
+
+func (p *parser) errorf(format string, args ...interface{}) {
+ format = fmt.Sprintf("properties: Line %d: %s", p.lex.lineNumber(), format)
+ panic(fmt.Errorf(format, args...))
+}
+
+func (p *parser) expectOneOf(expected ...itemType) (token item) {
+ token = p.lex.nextItem()
+ for _, v := range expected {
+ if token.typ == v {
+ return token
+ }
+ }
+ p.unexpected(token)
+ panic("unexpected token")
+}
+
+func (p *parser) unexpected(token item) {
+ p.errorf(token.String())
+}
+
+// recover is the handler that turns panics into returns from the top level of Parse.
+func (p *parser) recover(errp *error) {
+ e := recover()
+ if e != nil {
+ if _, ok := e.(runtime.Error); ok {
+ panic(e)
+ }
+ *errp = e.(error)
+ }
+}
diff --git a/vendor/github.com/magiconair/properties/properties.go b/vendor/github.com/magiconair/properties/properties.go
new file mode 100644
index 0000000..fb2f7b4
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/properties.go
@@ -0,0 +1,848 @@
+// Copyright 2013-2022 Frank Schroeder. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package properties
+
+// BUG(frank): Set() does not check for invalid unicode literals since this is currently handled by the lexer.
+// BUG(frank): Write() does not allow to configure the newline character. Therefore, on Windows LF is used.
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf8"
+)
+
+const maxExpansionDepth = 64
+
+// ErrorHandlerFunc defines the type of function which handles failures
+// of the MustXXX() functions. An error handler function must exit
+// the application after handling the error.
+type ErrorHandlerFunc func(error)
+
+// ErrorHandler is the function which handles failures of the MustXXX()
+// functions. The default is LogFatalHandler.
+var ErrorHandler ErrorHandlerFunc = LogFatalHandler
+
+// LogHandlerFunc defines the function prototype for logging errors.
+type LogHandlerFunc func(fmt string, args ...interface{})
+
+// LogPrintf defines a log handler which uses log.Printf.
+var LogPrintf LogHandlerFunc = log.Printf
+
+// LogFatalHandler handles the error by logging a fatal error and exiting.
+func LogFatalHandler(err error) {
+ log.Fatal(err)
+}
+
+// PanicHandler handles the error by panicking.
+func PanicHandler(err error) {
+ panic(err)
+}
+
+// -----------------------------------------------------------------------------
+
+// A Properties contains the key/value pairs from the properties input.
+// All values are stored in unexpanded form and are expanded at runtime
+type Properties struct {
+ // Pre-/Postfix for property expansion.
+ Prefix string
+ Postfix string
+
+ // DisableExpansion controls the expansion of properties on Get()
+ // and the check for circular references on Set(). When set to
+ // true Properties behaves like a simple key/value store and does
+ // not check for circular references on Get() or on Set().
+ DisableExpansion bool
+
+ // Stores the key/value pairs
+ m map[string]string
+
+ // Stores the comments per key.
+ c map[string][]string
+
+ // Stores the keys in order of appearance.
+ k []string
+
+ // WriteSeparator specifies the separator of key and value while writing the properties.
+ WriteSeparator string
+}
+
+// NewProperties creates a new Properties struct with the default
+// configuration for "${key}" expressions.
+func NewProperties() *Properties {
+ return &Properties{
+ Prefix: "${",
+ Postfix: "}",
+ m: map[string]string{},
+ c: map[string][]string{},
+ k: []string{},
+ }
+}
+
+// Load reads a buffer into the given Properties struct.
+func (p *Properties) Load(buf []byte, enc Encoding) error {
+ l := &Loader{Encoding: enc, DisableExpansion: p.DisableExpansion}
+ newProperties, err := l.LoadBytes(buf)
+ if err != nil {
+ return err
+ }
+ p.Merge(newProperties)
+ return nil
+}
+
+// Get returns the expanded value for the given key if exists.
+// Otherwise, ok is false.
+func (p *Properties) Get(key string) (value string, ok bool) {
+ v, ok := p.m[key]
+ if p.DisableExpansion {
+ return v, ok
+ }
+ if !ok {
+ return "", false
+ }
+
+ expanded, err := p.expand(key, v)
+
+ // we guarantee that the expanded value is free of
+ // circular references and malformed expressions
+ // so we panic if we still get an error here.
+ if err != nil {
+ ErrorHandler(err)
+ }
+
+ return expanded, true
+}
+
+// MustGet returns the expanded value for the given key if exists.
+// Otherwise, it panics.
+func (p *Properties) MustGet(key string) string {
+ if v, ok := p.Get(key); ok {
+ return v
+ }
+ ErrorHandler(invalidKeyError(key))
+ panic("ErrorHandler should exit")
+}
+
+// ----------------------------------------------------------------------------
+
+// ClearComments removes the comments for all keys.
+func (p *Properties) ClearComments() {
+ p.c = map[string][]string{}
+}
+
+// ----------------------------------------------------------------------------
+
+// GetComment returns the last comment before the given key or an empty string.
+func (p *Properties) GetComment(key string) string {
+ comments, ok := p.c[key]
+ if !ok || len(comments) == 0 {
+ return ""
+ }
+ return comments[len(comments)-1]
+}
+
+// ----------------------------------------------------------------------------
+
+// GetComments returns all comments that appeared before the given key or nil.
+func (p *Properties) GetComments(key string) []string {
+ if comments, ok := p.c[key]; ok {
+ return comments
+ }
+ return nil
+}
+
+// ----------------------------------------------------------------------------
+
+// SetComment sets the comment for the key.
+func (p *Properties) SetComment(key, comment string) {
+ p.c[key] = []string{comment}
+}
+
+// ----------------------------------------------------------------------------
+
+// SetComments sets the comments for the key. If the comments are nil then
+// all comments for this key are deleted.
+func (p *Properties) SetComments(key string, comments []string) {
+ if comments == nil {
+ delete(p.c, key)
+ return
+ }
+ p.c[key] = comments
+}
+
+// ----------------------------------------------------------------------------
+
+// GetBool checks if the expanded value is one of '1', 'yes',
+// 'true' or 'on' if the key exists. The comparison is case-insensitive.
+// If the key does not exist the default value is returned.
+func (p *Properties) GetBool(key string, def bool) bool {
+ v, err := p.getBool(key)
+ if err != nil {
+ return def
+ }
+ return v
+}
+
+// MustGetBool checks if the expanded value is one of '1', 'yes',
+// 'true' or 'on' if the key exists. The comparison is case-insensitive.
+// If the key does not exist the function panics.
+func (p *Properties) MustGetBool(key string) bool {
+ v, err := p.getBool(key)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return v
+}
+
+func (p *Properties) getBool(key string) (value bool, err error) {
+ if v, ok := p.Get(key); ok {
+ return boolVal(v), nil
+ }
+ return false, invalidKeyError(key)
+}
+
+func boolVal(v string) bool {
+ v = strings.ToLower(v)
+ return v == "1" || v == "true" || v == "yes" || v == "on"
+}
+
+// ----------------------------------------------------------------------------
+
+// GetDuration parses the expanded value as an time.Duration (in ns) if the
+// key exists. If key does not exist or the value cannot be parsed the default
+// value is returned. In almost all cases you want to use GetParsedDuration().
+func (p *Properties) GetDuration(key string, def time.Duration) time.Duration {
+ v, err := p.getInt64(key)
+ if err != nil {
+ return def
+ }
+ return time.Duration(v)
+}
+
+// MustGetDuration parses the expanded value as an time.Duration (in ns) if
+// the key exists. If key does not exist or the value cannot be parsed the
+// function panics. In almost all cases you want to use MustGetParsedDuration().
+func (p *Properties) MustGetDuration(key string) time.Duration {
+ v, err := p.getInt64(key)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return time.Duration(v)
+}
+
+// ----------------------------------------------------------------------------
+
+// GetParsedDuration parses the expanded value with time.ParseDuration() if the key exists.
+// If key does not exist or the value cannot be parsed the default
+// value is returned.
+func (p *Properties) GetParsedDuration(key string, def time.Duration) time.Duration {
+ s, ok := p.Get(key)
+ if !ok {
+ return def
+ }
+ v, err := time.ParseDuration(s)
+ if err != nil {
+ return def
+ }
+ return v
+}
+
+// MustGetParsedDuration parses the expanded value with time.ParseDuration() if the key exists.
+// If key does not exist or the value cannot be parsed the function panics.
+func (p *Properties) MustGetParsedDuration(key string) time.Duration {
+ s, ok := p.Get(key)
+ if !ok {
+ ErrorHandler(invalidKeyError(key))
+ }
+ v, err := time.ParseDuration(s)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return v
+}
+
+// ----------------------------------------------------------------------------
+
+// GetFloat64 parses the expanded value as a float64 if the key exists.
+// If key does not exist or the value cannot be parsed the default
+// value is returned.
+func (p *Properties) GetFloat64(key string, def float64) float64 {
+ v, err := p.getFloat64(key)
+ if err != nil {
+ return def
+ }
+ return v
+}
+
+// MustGetFloat64 parses the expanded value as a float64 if the key exists.
+// If key does not exist or the value cannot be parsed the function panics.
+func (p *Properties) MustGetFloat64(key string) float64 {
+ v, err := p.getFloat64(key)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return v
+}
+
+func (p *Properties) getFloat64(key string) (value float64, err error) {
+ if v, ok := p.Get(key); ok {
+ value, err = strconv.ParseFloat(v, 64)
+ if err != nil {
+ return 0, err
+ }
+ return value, nil
+ }
+ return 0, invalidKeyError(key)
+}
+
+// ----------------------------------------------------------------------------
+
+// GetInt parses the expanded value as an int if the key exists.
+// If key does not exist or the value cannot be parsed the default
+// value is returned. If the value does not fit into an int the
+// function panics with an out of range error.
+func (p *Properties) GetInt(key string, def int) int {
+ v, err := p.getInt64(key)
+ if err != nil {
+ return def
+ }
+ return intRangeCheck(key, v)
+}
+
+// MustGetInt parses the expanded value as an int if the key exists.
+// If key does not exist or the value cannot be parsed the function panics.
+// If the value does not fit into an int the function panics with
+// an out of range error.
+func (p *Properties) MustGetInt(key string) int {
+ v, err := p.getInt64(key)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return intRangeCheck(key, v)
+}
+
+// ----------------------------------------------------------------------------
+
+// GetInt64 parses the expanded value as an int64 if the key exists.
+// If key does not exist or the value cannot be parsed the default
+// value is returned.
+func (p *Properties) GetInt64(key string, def int64) int64 {
+ v, err := p.getInt64(key)
+ if err != nil {
+ return def
+ }
+ return v
+}
+
+// MustGetInt64 parses the expanded value as an int if the key exists.
+// If key does not exist or the value cannot be parsed the function panics.
+func (p *Properties) MustGetInt64(key string) int64 {
+ v, err := p.getInt64(key)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return v
+}
+
+func (p *Properties) getInt64(key string) (value int64, err error) {
+ if v, ok := p.Get(key); ok {
+ value, err = strconv.ParseInt(v, 10, 64)
+ if err != nil {
+ return 0, err
+ }
+ return value, nil
+ }
+ return 0, invalidKeyError(key)
+}
+
+// ----------------------------------------------------------------------------
+
+// GetUint parses the expanded value as an uint if the key exists.
+// If key does not exist or the value cannot be parsed the default
+// value is returned. If the value does not fit into an int the
+// function panics with an out of range error.
+func (p *Properties) GetUint(key string, def uint) uint {
+ v, err := p.getUint64(key)
+ if err != nil {
+ return def
+ }
+ return uintRangeCheck(key, v)
+}
+
+// MustGetUint parses the expanded value as an int if the key exists.
+// If key does not exist or the value cannot be parsed the function panics.
+// If the value does not fit into an int the function panics with
+// an out of range error.
+func (p *Properties) MustGetUint(key string) uint {
+ v, err := p.getUint64(key)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return uintRangeCheck(key, v)
+}
+
+// ----------------------------------------------------------------------------
+
+// GetUint64 parses the expanded value as an uint64 if the key exists.
+// If key does not exist or the value cannot be parsed the default
+// value is returned.
+func (p *Properties) GetUint64(key string, def uint64) uint64 {
+ v, err := p.getUint64(key)
+ if err != nil {
+ return def
+ }
+ return v
+}
+
+// MustGetUint64 parses the expanded value as an int if the key exists.
+// If key does not exist or the value cannot be parsed the function panics.
+func (p *Properties) MustGetUint64(key string) uint64 {
+ v, err := p.getUint64(key)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return v
+}
+
+func (p *Properties) getUint64(key string) (value uint64, err error) {
+ if v, ok := p.Get(key); ok {
+ value, err = strconv.ParseUint(v, 10, 64)
+ if err != nil {
+ return 0, err
+ }
+ return value, nil
+ }
+ return 0, invalidKeyError(key)
+}
+
+// ----------------------------------------------------------------------------
+
+// GetString returns the expanded value for the given key if exists or
+// the default value otherwise.
+func (p *Properties) GetString(key, def string) string {
+ if v, ok := p.Get(key); ok {
+ return v
+ }
+ return def
+}
+
+// MustGetString returns the expanded value for the given key if exists or
+// panics otherwise.
+func (p *Properties) MustGetString(key string) string {
+ if v, ok := p.Get(key); ok {
+ return v
+ }
+ ErrorHandler(invalidKeyError(key))
+ panic("ErrorHandler should exit")
+}
+
+// ----------------------------------------------------------------------------
+
+// Filter returns a new properties object which contains all properties
+// for which the key matches the pattern.
+func (p *Properties) Filter(pattern string) (*Properties, error) {
+ re, err := regexp.Compile(pattern)
+ if err != nil {
+ return nil, err
+ }
+
+ return p.FilterRegexp(re), nil
+}
+
+// FilterRegexp returns a new properties object which contains all properties
+// for which the key matches the regular expression.
+func (p *Properties) FilterRegexp(re *regexp.Regexp) *Properties {
+ pp := NewProperties()
+ for _, k := range p.k {
+ if re.MatchString(k) {
+ // TODO(fs): we are ignoring the error which flags a circular reference.
+ // TODO(fs): since we are just copying a subset of keys this cannot happen (fingers crossed)
+ pp.Set(k, p.m[k])
+ }
+ }
+ return pp
+}
+
+// FilterPrefix returns a new properties object with a subset of all keys
+// with the given prefix.
+func (p *Properties) FilterPrefix(prefix string) *Properties {
+ pp := NewProperties()
+ for _, k := range p.k {
+ if strings.HasPrefix(k, prefix) {
+ // TODO(fs): we are ignoring the error which flags a circular reference.
+ // TODO(fs): since we are just copying a subset of keys this cannot happen (fingers crossed)
+ pp.Set(k, p.m[k])
+ }
+ }
+ return pp
+}
+
+// FilterStripPrefix returns a new properties object with a subset of all keys
+// with the given prefix and the prefix removed from the keys.
+func (p *Properties) FilterStripPrefix(prefix string) *Properties {
+ pp := NewProperties()
+ n := len(prefix)
+ for _, k := range p.k {
+ if len(k) > len(prefix) && strings.HasPrefix(k, prefix) {
+ // TODO(fs): we are ignoring the error which flags a circular reference.
+ // TODO(fs): since we are modifying keys I am not entirely sure whether we can create a circular reference
+ // TODO(fs): this function should probably return an error but the signature is fixed
+ pp.Set(k[n:], p.m[k])
+ }
+ }
+ return pp
+}
+
+// Len returns the number of keys.
+func (p *Properties) Len() int {
+ return len(p.m)
+}
+
+// Keys returns all keys in the same order as in the input.
+func (p *Properties) Keys() []string {
+ keys := make([]string, len(p.k))
+ copy(keys, p.k)
+ return keys
+}
+
+// Set sets the property key to the corresponding value.
+// If a value for key existed before then ok is true and prev
+// contains the previous value. If the value contains a
+// circular reference or a malformed expression then
+// an error is returned.
+// An empty key is silently ignored.
+func (p *Properties) Set(key, value string) (prev string, ok bool, err error) {
+ if key == "" {
+ return "", false, nil
+ }
+
+ // if expansion is disabled we allow circular references
+ if p.DisableExpansion {
+ prev, ok = p.Get(key)
+ p.m[key] = value
+ if !ok {
+ p.k = append(p.k, key)
+ }
+ return prev, ok, nil
+ }
+
+ // to check for a circular reference we temporarily need
+ // to set the new value. If there is an error then revert
+ // to the previous state. Only if all tests are successful
+ // then we add the key to the p.k list.
+ prev, ok = p.Get(key)
+ p.m[key] = value
+
+ // now check for a circular reference
+ _, err = p.expand(key, value)
+ if err != nil {
+
+ // revert to the previous state
+ if ok {
+ p.m[key] = prev
+ } else {
+ delete(p.m, key)
+ }
+
+ return "", false, err
+ }
+
+ if !ok {
+ p.k = append(p.k, key)
+ }
+
+ return prev, ok, nil
+}
+
+// SetValue sets property key to the default string value
+// as defined by fmt.Sprintf("%v").
+func (p *Properties) SetValue(key string, value interface{}) error {
+ _, _, err := p.Set(key, fmt.Sprintf("%v", value))
+ return err
+}
+
+// MustSet sets the property key to the corresponding value.
+// If a value for key existed before then ok is true and prev
+// contains the previous value. An empty key is silently ignored.
+func (p *Properties) MustSet(key, value string) (prev string, ok bool) {
+ prev, ok, err := p.Set(key, value)
+ if err != nil {
+ ErrorHandler(err)
+ }
+ return prev, ok
+}
+
+// String returns a string of all expanded 'key = value' pairs.
+func (p *Properties) String() string {
+ var s string
+ for _, key := range p.k {
+ value, _ := p.Get(key)
+ s = fmt.Sprintf("%s%s = %s\n", s, key, value)
+ }
+ return s
+}
+
+// Sort sorts the properties keys in alphabetical order.
+// This is helpfully before writing the properties.
+func (p *Properties) Sort() {
+ sort.Strings(p.k)
+}
+
+// Write writes all unexpanded 'key = value' pairs to the given writer.
+// Write returns the number of bytes written and any write error encountered.
+func (p *Properties) Write(w io.Writer, enc Encoding) (n int, err error) {
+ return p.WriteComment(w, "", enc)
+}
+
+// WriteComment writes all unexpanced 'key = value' pairs to the given writer.
+// If prefix is not empty then comments are written with a blank line and the
+// given prefix. The prefix should be either "# " or "! " to be compatible with
+// the properties file format. Otherwise, the properties parser will not be
+// able to read the file back in. It returns the number of bytes written and
+// any write error encountered.
+func (p *Properties) WriteComment(w io.Writer, prefix string, enc Encoding) (n int, err error) {
+ var x int
+
+ for _, key := range p.k {
+ value := p.m[key]
+
+ if prefix != "" {
+ if comments, ok := p.c[key]; ok {
+ // don't print comments if they are all empty
+ allEmpty := true
+ for _, c := range comments {
+ if c != "" {
+ allEmpty = false
+ break
+ }
+ }
+
+ if !allEmpty {
+ // add a blank line between entries but not at the top
+ if len(comments) > 0 && n > 0 {
+ x, err = fmt.Fprintln(w)
+ if err != nil {
+ return
+ }
+ n += x
+ }
+
+ for _, c := range comments {
+ x, err = fmt.Fprintf(w, "%s%s\n", prefix, c)
+ if err != nil {
+ return
+ }
+ n += x
+ }
+ }
+ }
+ }
+ sep := " = "
+ if p.WriteSeparator != "" {
+ sep = p.WriteSeparator
+ }
+ x, err = fmt.Fprintf(w, "%s%s%s\n", encode(key, " :", enc), sep, encode(value, "", enc))
+ if err != nil {
+ return
+ }
+ n += x
+ }
+ return
+}
+
+// Map returns a copy of the properties as a map.
+func (p *Properties) Map() map[string]string {
+ m := make(map[string]string)
+ for k, v := range p.m {
+ m[k] = v
+ }
+ return m
+}
+
+// FilterFunc returns a copy of the properties which includes the values which passed all filters.
+func (p *Properties) FilterFunc(filters ...func(k, v string) bool) *Properties {
+ pp := NewProperties()
+outer:
+ for k, v := range p.m {
+ for _, f := range filters {
+ if !f(k, v) {
+ continue outer
+ }
+ pp.Set(k, v)
+ }
+ }
+ return pp
+}
+
+// ----------------------------------------------------------------------------
+
+// Delete removes the key and its comments.
+func (p *Properties) Delete(key string) {
+ delete(p.m, key)
+ delete(p.c, key)
+ newKeys := []string{}
+ for _, k := range p.k {
+ if k != key {
+ newKeys = append(newKeys, k)
+ }
+ }
+ p.k = newKeys
+}
+
+// Merge merges properties, comments and keys from other *Properties into p
+func (p *Properties) Merge(other *Properties) {
+ for _, k := range other.k {
+ if _, ok := p.m[k]; !ok {
+ p.k = append(p.k, k)
+ }
+ }
+ for k, v := range other.m {
+ p.m[k] = v
+ }
+ for k, v := range other.c {
+ p.c[k] = v
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+// check expands all values and returns an error if a circular reference or
+// a malformed expression was found.
+func (p *Properties) check() error {
+ for key, value := range p.m {
+ if _, err := p.expand(key, value); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (p *Properties) expand(key, input string) (string, error) {
+ // no pre/postfix -> nothing to expand
+ if p.Prefix == "" && p.Postfix == "" {
+ return input, nil
+ }
+
+ return expand(input, []string{key}, p.Prefix, p.Postfix, p.m)
+}
+
+// expand recursively expands expressions of '(prefix)key(postfix)' to their corresponding values.
+// The function keeps track of the keys that were already expanded and stops if it
+// detects a circular reference or a malformed expression of the form '(prefix)key'.
+func expand(s string, keys []string, prefix, postfix string, values map[string]string) (string, error) {
+ if len(keys) > maxExpansionDepth {
+ return "", fmt.Errorf("expansion too deep")
+ }
+
+ for {
+ start := strings.Index(s, prefix)
+ if start == -1 {
+ return s, nil
+ }
+
+ keyStart := start + len(prefix)
+ keyLen := strings.Index(s[keyStart:], postfix)
+ if keyLen == -1 {
+ return "", fmt.Errorf("malformed expression")
+ }
+
+ end := keyStart + keyLen + len(postfix) - 1
+ key := s[keyStart : keyStart+keyLen]
+
+ // fmt.Printf("s:%q pp:%q start:%d end:%d keyStart:%d keyLen:%d key:%q\n", s, prefix + "..." + postfix, start, end, keyStart, keyLen, key)
+
+ for _, k := range keys {
+ if key == k {
+ var b bytes.Buffer
+ b.WriteString("circular reference in:\n")
+ for _, k1 := range keys {
+ fmt.Fprintf(&b, "%s=%s\n", k1, values[k1])
+ }
+ return "", fmt.Errorf(b.String())
+ }
+ }
+
+ val, ok := values[key]
+ if !ok {
+ val = os.Getenv(key)
+ }
+ new_val, err := expand(val, append(keys, key), prefix, postfix, values)
+ if err != nil {
+ return "", err
+ }
+ s = s[:start] + new_val + s[end+1:]
+ }
+}
+
+// encode encodes a UTF-8 string to ISO-8859-1 and escapes some characters.
+func encode(s string, special string, enc Encoding) string {
+ switch enc {
+ case UTF8:
+ return encodeUtf8(s, special)
+ case ISO_8859_1:
+ return encodeIso(s, special)
+ default:
+ panic(fmt.Sprintf("unsupported encoding %v", enc))
+ }
+}
+
+func encodeUtf8(s string, special string) string {
+ v := ""
+ for pos := 0; pos < len(s); {
+ r, w := utf8.DecodeRuneInString(s[pos:])
+ pos += w
+ v += escape(r, special)
+ }
+ return v
+}
+
+func encodeIso(s string, special string) string {
+ var r rune
+ var w int
+ var v string
+ for pos := 0; pos < len(s); {
+ switch r, w = utf8.DecodeRuneInString(s[pos:]); {
+ case r < 1<<8: // single byte rune -> escape special chars only
+ v += escape(r, special)
+ case r < 1<<16: // two byte rune -> unicode literal
+ v += fmt.Sprintf("\\u%04x", r)
+ default: // more than two bytes per rune -> can't encode
+ v += "?"
+ }
+ pos += w
+ }
+ return v
+}
+
+func escape(r rune, special string) string {
+ switch r {
+ case '\f':
+ return "\\f"
+ case '\n':
+ return "\\n"
+ case '\r':
+ return "\\r"
+ case '\t':
+ return "\\t"
+ case '\\':
+ return "\\\\"
+ default:
+ if strings.ContainsRune(special, r) {
+ return "\\" + string(r)
+ }
+ return string(r)
+ }
+}
+
+func invalidKeyError(key string) error {
+ return fmt.Errorf("unknown property: %s", key)
+}
diff --git a/vendor/github.com/magiconair/properties/rangecheck.go b/vendor/github.com/magiconair/properties/rangecheck.go
new file mode 100644
index 0000000..dbd60b3
--- /dev/null
+++ b/vendor/github.com/magiconair/properties/rangecheck.go
@@ -0,0 +1,31 @@
+// Copyright 2013-2022 Frank Schroeder. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package properties
+
+import (
+ "fmt"
+ "math"
+)
+
+// make this a var to overwrite it in a test
+var is32Bit = ^uint(0) == math.MaxUint32
+
+// intRangeCheck checks if the value fits into the int type and
+// panics if it does not.
+func intRangeCheck(key string, v int64) int {
+ if is32Bit && (v < math.MinInt32 || v > math.MaxInt32) {
+ panic(fmt.Sprintf("Value %d for key %s out of range", v, key))
+ }
+ return int(v)
+}
+
+// uintRangeCheck checks if the value fits into the uint type and
+// panics if it does not.
+func uintRangeCheck(key string, v uint64) uint {
+ if is32Bit && v > math.MaxUint32 {
+ panic(fmt.Sprintf("Value %d for key %s out of range", v, key))
+ }
+ return uint(v)
+}
diff --git a/vendor/github.com/marcboeker/go-duckdb/.gitignore b/vendor/github.com/marcboeker/go-duckdb/.gitignore
new file mode 100644
index 0000000..4edda05
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/.gitignore
@@ -0,0 +1,5 @@
+.vscode
+.DS_Store
+run.sh
+duckdb/
+.idea
diff --git a/vendor/github.com/marcboeker/go-duckdb/CONTRIBUTING.md b/vendor/github.com/marcboeker/go-duckdb/CONTRIBUTING.md
new file mode 100644
index 0000000..0138384
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/CONTRIBUTING.md
@@ -0,0 +1,11 @@
+# Contributing
+
+## Upgrading DuckDB
+
+To upgrade to a new version of DuckDB:
+
+1. Create a new branch with the current version number in it. E.g. `v0.9.0`.
+2. Change `DUCKDB_VERSION` in `Makefile` to match the version in the branch name.
+3. Push the updated `Makefile` and create a PR.
+4. Wait for Github Actions to pre-compile the static libraries in `deps`. They will be committed automatically to the branch.
+5. If everything looks good, the PR will be merged.
diff --git a/vendor/github.com/marcboeker/go-duckdb/LICENSE b/vendor/github.com/marcboeker/go-duckdb/LICENSE
new file mode 100644
index 0000000..9532899
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/LICENSE
@@ -0,0 +1,7 @@
+Copyright 2019 Marc Boeker
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/vendor/github.com/marcboeker/go-duckdb/Makefile b/vendor/github.com/marcboeker/go-duckdb/Makefile
new file mode 100644
index 0000000..69fd8db
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/Makefile
@@ -0,0 +1,78 @@
+DUCKDB_VERSION=0.9.2
+
+.PHONY: install
+install:
+ go install .
+
+.PHONY: examples
+examples:
+ go run examples/simple.go
+
+.PHONY: test
+test:
+ go test -v -race -count=1 .
+
+SRC_DIR := duckdb/src/amalgamation
+FILES := $(wildcard $(SRC_DIR)/*)
+
+.PHONY: deps.header
+deps.header:
+ git clone -b v${DUCKDB_VERSION} --depth 1 https://github.com/duckdb/duckdb.git
+ cp duckdb/src/include/duckdb.h duckdb.h
+
+.PHONY: deps.darwin.amd64
+deps.darwin.amd64:
+ if [ "$(shell uname -s | tr '[:upper:]' '[:lower:]')" != "darwin" ]; then echo "Error: must run build on darwin"; false; fi
+
+ git clone -b v${DUCKDB_VERSION} --depth 1 https://github.com/duckdb/duckdb.git
+ cd duckdb && \
+ CFLAGS="-target x86_64-apple-macos11 -O3" CXXFLAGS="-target x86_64-apple-macos11 -O3" BUILD_SHELL=0 BUILD_UNITTESTS=0 make -j 2 && \
+ mkdir -p lib && \
+ for f in `find . -name '*.o'`; do cp $$f lib; done && \
+ cd lib && \
+ ar rvs ../libduckdb.a *.o && \
+ cd .. && \
+ mv libduckdb.a ../deps/darwin_amd64/libduckdb.a
+
+.PHONY: deps.darwin.arm64
+deps.darwin.arm64:
+ if [ "$(shell uname -s | tr '[:upper:]' '[:lower:]')" != "darwin" ]; then echo "Error: must run build on darwin"; false; fi
+
+ git clone -b v${DUCKDB_VERSION} --depth 1 https://github.com/duckdb/duckdb.git
+ cd duckdb && \
+ CFLAGS="-target arm64-apple-macos11 -O3" CXXFLAGS="-target arm64-apple-macos11 -O3" BUILD_SHELL=0 BUILD_UNITTESTS=0 make -j 2 && \
+ mkdir -p lib && \
+ for f in `find . -name '*.o'`; do cp $$f lib; done && \
+ cd lib && \
+ ar rvs ../libduckdb.a *.o && \
+ cd .. && \
+ mv libduckdb.a ../deps/darwin_arm64/libduckdb.a
+
+.PHONY: deps.linux.amd64
+deps.linux.amd64:
+ if [ "$(shell uname -s | tr '[:upper:]' '[:lower:]')" != "linux" ]; then echo "Error: must run build on linux"; false; fi
+
+ git clone -b v${DUCKDB_VERSION} --depth 1 https://github.com/duckdb/duckdb.git
+ cd duckdb && \
+ CFLAGS="-O3" CXXFLAGS="-O3" make -j 2 && \
+ BUILD_SHELL=0 BUILD_UNITTESTS=0 make -j 2 && \
+ mkdir -p lib && \
+ for f in `find . -name '*.o'`; do cp $$f lib; done && \
+ cd lib && \
+ ar rvs ../libduckdb.a *.o && \
+ cd .. && \
+ mv libduckdb.a ../deps/linux_amd64/libduckdb.a
+
+.PHONY: deps.linux.arm64
+deps.linux.arm64:
+ if [ "$(shell uname -s | tr '[:upper:]' '[:lower:]')" != "linux" ]; then echo "Error: must run build on linux"; false; fi
+
+ git clone -b v${DUCKDB_VERSION} --depth 1 https://github.com/duckdb/duckdb.git
+ cd duckdb && \
+ CC="aarch64-linux-gnu-gcc" CXX="aarch64-linux-gnu-g++" CFLAGS="-O3" CXXFLAGS="-O3" BUILD_SHELL=0 BUILD_UNITTESTS=0 make -j 2 && \
+ mkdir -p lib && \
+ for f in `find . -name '*.o'`; do cp $$f lib; done && \
+ cd lib && \
+ ar rvs ../libduckdb.a *.o && \
+ cd .. && \
+ mv libduckdb.a ../deps/linux_arm64/libduckdb.a
diff --git a/vendor/github.com/marcboeker/go-duckdb/README.md b/vendor/github.com/marcboeker/go-duckdb/README.md
new file mode 100644
index 0000000..5c88f03
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/README.md
@@ -0,0 +1,116 @@
+# Go SQL driver for [DuckDB](https://github.com/duckdb/duckdb)
+
+The DuckDB driver conforms to the built-in `database/sql` interface.
+
+![Tests status](https://github.com/marcboeker/go-duckdb/actions/workflows/tests.yaml/badge.svg)
+
+## Notice on v1.4.0
+
+Version `1.4.0` changed the DuckDB decimal representation from `float64` to a new [`Decimal`](https://github.com/marcboeker/go-duckdb/blob/d722d9c9d2bc9364d2f22a3afec3cbd26ac07f41/types.go#L83) type, which is much more precise. If you are upgrading to `v1.4.0` and are using DuckDBs decimals, please make sure to update your code to make use of the new `Decimal` type.
+
+## Installation
+
+```
+go get github.com/marcboeker/go-duckdb
+```
+
+`go-duckdb` uses `CGO` to make calls to DuckDB. You must build your binaries with `CGO_ENABLED=1`.
+
+## Usage
+
+`go-duckdb` hooks into the `database/sql` interface provided by the Go stdlib. To open a connection, simply specify the driver type as `duckdb`:
+
+```go
+db, err := sql.Open("duckdb", "")
+```
+
+This creates an in-memory instance of DuckDB. If you would like to store the data on the filesystem, you need to specify the path where to store the database:
+
+```go
+db, err := sql.Open("duckdb", "/path/to/foo.db")
+```
+
+If you want to set specific [config options for DuckDB](https://duckdb.org/docs/sql/configuration), you can add them as query style parameters in the form of `name=value` to the DSN, like:
+
+```go
+db, err := sql.Open("duckdb", "/path/to/foo.db?access_mode=read_only&threads=4")
+```
+
+Alternatively, you can also use `sql.OpenDB` when you want to perform some initialization before the connection is created and returned from the connection pool on call to `db.Conn`.
+Here's an example that installs and loads the JSON extension for each connection:
+
+```go
+connector, err := duckdb.NewConnector("/path/to/foo.db?access_mode=read_only&threads=4", func(execer driver.Execer) error {
+ bootQueries := []string{
+ "INSTALL 'json'",
+ "LOAD 'json'",
+ }
+
+ for _, qry := range bootQueries {
+ _, err = execer.Exec(qry, nil)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+})
+if err != nil {
+ return nil, err
+}
+
+db := sql.OpenDB(connector)
+db.SetMaxOpenConns(poolsize)
+...
+```
+
+Please refer to the [database/sql](https://godoc.org/database/sql) GoDoc for further usage instructions.
+
+## DuckDB Appender API
+
+If you want to use the [DuckDB Appender API](https://duckdb.org/docs/data/appender.html), you can obtain a new Appender by supplying a DuckDB connection to `NewAppenderFromConn()`.
+
+```go
+connector, err := NewConnector("test.db", nil)
+if err != {
+ ...
+}
+conn, err := connector.Connect(context.Background())
+if err != {
+ ...
+}
+defer conn.Close()
+
+// Retrieve appender from connection (note that you have to create the table 'test' beforehand).
+appender, err := NewAppenderFromConn(conn, "", "test")
+if err != {
+ ...
+}
+defer appender.Close()
+
+err = appender.AppendRow(...)
+if err != {
+ ...
+}
+
+// Optional, if you want to access the appended rows immediately.
+err = appender.Flush()
+if err != {
+ ...
+}
+```
+
+## Linking DuckDB
+
+By default, `go-duckdb` statically links DuckDB into your binary. Statically linking DuckDB adds around 30 MB to your binary size. On Linux (Intel) and macOS (Intel and ARM), `go-duckdb` bundles pre-compiled static libraries for fast builds.
+
+Alternatively, you can dynamically link DuckDB by passing `-tags=duckdb_use_lib` to `go build`. You must have a copy of `libduckdb` available on your system (`.so` on Linux or `.dylib` on macOS), which you can download from the DuckDB [releases page](https://github.com/duckdb/duckdb/releases). For example:
+
+```sh
+# On Linux
+CGO_ENABLED=1 CGO_LDFLAGS="-L/path/to/libs" go build -tags=duckdb_use_lib main.go
+LD_LIBRARY_PATH=/path/to/libs ./main
+
+# On macOS
+CGO_ENABLED=1 CGO_LDFLAGS="-L/path/to/libs" go build -tags=duckdb_use_lib main.go
+DYLD_LIBRARY_PATH=/path/to/libs ./main
+```
diff --git a/vendor/github.com/marcboeker/go-duckdb/abi.go b/vendor/github.com/marcboeker/go-duckdb/abi.go
new file mode 100644
index 0000000..02f8bd3
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/abi.go
@@ -0,0 +1,29 @@
+package duckdb
+
+/*
+#include
+*/
+import "C"
+
+const (
+ stringInlineLength = 12
+ stringPrefixLength = 4
+)
+
+// refer to convert_vector_list in
+// duckdb/tools/juliapkg/src/result.jl
+type duckdb_list_entry_t struct {
+ offset C.idx_t
+ length C.idx_t
+}
+
+// Refer to
+// struct string_t
+// duckdb/src/include/duckdb/common/types/string_type.hpp
+// duckdb/tools/juliapkg/src/ctypes.jl
+// duckdb/tools/juliapkg/src/result.jl
+type duckdb_string_t struct {
+ length int32
+ prefix [stringPrefixLength]byte
+ ptr *C.char
+}
diff --git a/vendor/github.com/marcboeker/go-duckdb/appender.go b/vendor/github.com/marcboeker/go-duckdb/appender.go
new file mode 100644
index 0000000..3f32811
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/appender.go
@@ -0,0 +1,158 @@
+package duckdb
+
+/*
+#include
+*/
+import "C"
+
+import (
+ "database/sql/driver"
+ "errors"
+ "fmt"
+ "time"
+ "unsafe"
+)
+
+// Appender holds the duckdb appender. It allows to load bulk data into a DuckDB database.
+type Appender struct {
+ c *conn
+ schema string
+ table string
+ appender *C.duckdb_appender
+ closed bool
+}
+
+// NewAppenderFromConn returns a new Appender from a DuckDB driver connection.
+func NewAppenderFromConn(driverConn driver.Conn, schema string, table string) (*Appender, error) {
+ dbConn, ok := driverConn.(*conn)
+ if !ok {
+ return nil, fmt.Errorf("not a duckdb driver connection")
+ }
+
+ if dbConn.closed {
+ panic("database/sql/driver: misuse of duckdb driver: Appender after Close")
+ }
+
+ var schemastr *(C.char)
+ if schema != "" {
+ schemastr = C.CString(schema)
+ defer C.free(unsafe.Pointer(schemastr))
+ }
+
+ tablestr := C.CString(table)
+ defer C.free(unsafe.Pointer(tablestr))
+
+ var a C.duckdb_appender
+ if state := C.duckdb_appender_create(*dbConn.con, schemastr, tablestr, &a); state == C.DuckDBError {
+ return nil, fmt.Errorf("can't create appender")
+ }
+
+ return &Appender{c: dbConn, schema: schema, table: table, appender: &a}, nil
+}
+
+// Error returns the last duckdb appender error.
+func (a *Appender) Error() error {
+ dbErr := C.GoString(C.duckdb_appender_error(*a.appender))
+ return errors.New(dbErr)
+}
+
+// Flush the appender to the underlying table and clear the internal cache.
+func (a *Appender) Flush() error {
+ if state := C.duckdb_appender_flush(*a.appender); state == C.DuckDBError {
+ dbErr := C.GoString(C.duckdb_appender_error(*a.appender))
+ return errors.New(dbErr)
+ }
+ return nil
+}
+
+// Closes closes the appender.
+func (a *Appender) Close() error {
+ if a.closed {
+ panic("database/sql/driver: misuse of duckdb driver: double Close of Appender")
+ }
+
+ a.closed = true
+
+ if state := C.duckdb_appender_destroy(a.appender); state == C.DuckDBError {
+ dbErr := C.GoString(C.duckdb_appender_error(*a.appender))
+ return errors.New(dbErr)
+ }
+ return nil
+}
+
+// AppendRow loads a row of values into the appender. The values are provided as separate arguments.
+func (a *Appender) AppendRow(args ...driver.Value) error {
+ return a.AppendRowArray(args)
+}
+
+// AppendRowArray loads a row of values into the appender. The values are provided as an array.
+func (a *Appender) AppendRowArray(args []driver.Value) error {
+ if a.closed {
+ panic("database/sql/driver: misuse of duckdb driver: use of closed Appender")
+ }
+
+ for i, v := range args {
+ if v == nil {
+ if rv := C.duckdb_append_null(*a.appender); rv == C.DuckDBError {
+ return fmt.Errorf("couldn't append parameter %d", i)
+ }
+ continue
+ }
+
+ var rv C.duckdb_state
+ switch v := v.(type) {
+ case uint8:
+ rv = C.duckdb_append_uint8(*a.appender, C.uint8_t(v))
+ case int8:
+ rv = C.duckdb_append_int8(*a.appender, C.int8_t(v))
+ case uint16:
+ rv = C.duckdb_append_uint16(*a.appender, C.uint16_t(v))
+ case int16:
+ rv = C.duckdb_append_int16(*a.appender, C.int16_t(v))
+ case uint32:
+ rv = C.duckdb_append_uint32(*a.appender, C.uint32_t(v))
+ case int32:
+ rv = C.duckdb_append_int32(*a.appender, C.int32_t(v))
+ case uint64:
+ rv = C.duckdb_append_uint64(*a.appender, C.uint64_t(v))
+ case int64:
+ rv = C.duckdb_append_int64(*a.appender, C.int64_t(v))
+ case uint:
+ rv = C.duckdb_append_uint64(*a.appender, C.uint64_t(v))
+ case int:
+ rv = C.duckdb_append_int64(*a.appender, C.int64_t(v))
+ case float32:
+ rv = C.duckdb_append_float(*a.appender, C.float(v))
+ case float64:
+ rv = C.duckdb_append_double(*a.appender, C.double(v))
+ case bool:
+ rv = C.duckdb_append_bool(*a.appender, C.bool(v))
+ case []byte:
+ rv = C.duckdb_append_blob(*a.appender, unsafe.Pointer(&v[0]), C.uint64_t(len(v)))
+ case string:
+ str := C.CString(v)
+ rv = C.duckdb_append_varchar(*a.appender, str)
+ C.free(unsafe.Pointer(str))
+ case time.Time:
+ var dt C.duckdb_timestamp
+ dt.micros = C.int64_t(v.UTC().UnixMicro())
+ rv = C.duckdb_append_timestamp(*a.appender, dt)
+
+ default:
+ return fmt.Errorf("couldn't append unsupported parameter %d (type %T)", i, v)
+ }
+ if rv == C.DuckDBError {
+ dbErr := C.GoString(C.duckdb_appender_error(*a.appender))
+ return fmt.Errorf("couldn't append parameter %d (type %T): %s", i, v, dbErr)
+ }
+ }
+
+ if state := C.duckdb_appender_end_row(*a.appender); state == C.DuckDBError {
+ dbErr := C.GoString(C.duckdb_appender_error(*a.appender))
+ return errors.New(dbErr)
+ }
+
+ return nil
+}
+
+var errCouldNotAppend = errors.New("could not append parameter")
diff --git a/vendor/github.com/marcboeker/go-duckdb/cgo_shared.go b/vendor/github.com/marcboeker/go-duckdb/cgo_shared.go
new file mode 100644
index 0000000..2af8fb2
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/cgo_shared.go
@@ -0,0 +1,9 @@
+//go:build duckdb_use_lib
+
+package duckdb
+
+/*
+#cgo LDFLAGS: -lduckdb
+#include
+*/
+import "C"
diff --git a/vendor/github.com/marcboeker/go-duckdb/cgo_static.go b/vendor/github.com/marcboeker/go-duckdb/cgo_static.go
new file mode 100644
index 0000000..342b22a
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/cgo_static.go
@@ -0,0 +1,13 @@
+//go:build !duckdb_use_lib && (darwin || (linux && (amd64 || arm64)))
+
+package duckdb
+
+/*
+#cgo LDFLAGS: -lduckdb
+#cgo darwin,amd64 LDFLAGS: -lc++ -L${SRCDIR}/deps/darwin_amd64
+#cgo darwin,arm64 LDFLAGS: -lc++ -L${SRCDIR}/deps/darwin_arm64
+#cgo linux,amd64 LDFLAGS: -lstdc++ -lm -ldl -L${SRCDIR}/deps/linux_amd64
+#cgo linux,arm64 LDFLAGS: -lstdc++ -lm -ldl -L${SRCDIR}/deps/linux_arm64
+#include
+*/
+import "C"
diff --git a/vendor/github.com/marcboeker/go-duckdb/connection.go b/vendor/github.com/marcboeker/go-duckdb/connection.go
new file mode 100644
index 0000000..11fb421
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/connection.go
@@ -0,0 +1,194 @@
+package duckdb
+
+/*
+#include
+*/
+import "C"
+
+import (
+ "context"
+ "database/sql"
+ "database/sql/driver"
+ "errors"
+ "math/big"
+ "unsafe"
+)
+
+type conn struct {
+ con *C.duckdb_connection
+ closed bool
+ tx bool
+}
+
+func (c *conn) CheckNamedValue(nv *driver.NamedValue) error {
+ switch nv.Value.(type) {
+ case *big.Int, Interval:
+ return nil
+ }
+ return driver.ErrSkip
+}
+
+func (c *conn) ExecContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Result, error) {
+ if c.closed {
+ panic("database/sql/driver: misuse of duckdb driver: ExecContext after Close")
+ }
+
+ stmts, size, err := c.extractStmts(query)
+ if err != nil {
+ return nil, err
+ }
+ defer C.duckdb_destroy_extracted(&stmts)
+
+ // execute all statements without args, except the last one
+ for i := C.idx_t(0); i < size-1; i++ {
+ stmt, err := c.prepareExtractedStmt(stmts, i)
+ if err != nil {
+ return nil, err
+ }
+ // send nil args to execute statement and ignore result
+ _, err = stmt.ExecContext(ctx, nil)
+ stmt.Close()
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // prepare and execute last statement with args and return result
+ stmt, err := c.prepareExtractedStmt(stmts, size-1)
+ if err != nil {
+ return nil, err
+ }
+ defer stmt.Close()
+ return stmt.ExecContext(ctx, args)
+}
+
+func (c *conn) QueryContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Rows, error) {
+ if c.closed {
+ panic("database/sql/driver: misuse of duckdb driver: QueryContext after Close")
+ }
+
+ stmts, size, err := c.extractStmts(query)
+ if err != nil {
+ return nil, err
+ }
+ defer C.duckdb_destroy_extracted(&stmts)
+
+ // execute all statements without args, except the last one
+ for i := C.idx_t(0); i < size-1; i++ {
+ stmt, err := c.prepareExtractedStmt(stmts, i)
+ if err != nil {
+ return nil, err
+ }
+ // send nil args to execute statement and ignore result (using ExecContext since we're ignoring the result anyway)
+ _, err = stmt.ExecContext(ctx, nil)
+ stmt.Close()
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // prepare and execute last statement with args and return result
+ stmt, err := c.prepareExtractedStmt(stmts, size-1)
+ if err != nil {
+ return nil, err
+ }
+
+ rows, err := stmt.QueryContext(ctx, args)
+ if err != nil {
+ stmt.Close()
+ return nil, err
+ }
+
+ // we can't close the statement before the query result rows are closed
+ stmt.closeOnRowsClose = true
+ return rows, err
+}
+
+func (c *conn) Prepare(cmd string) (driver.Stmt, error) {
+ if c.closed {
+ panic("database/sql/driver: misuse of duckdb driver: Prepare after Close")
+ }
+ return c.prepareStmt(cmd)
+}
+
+// Deprecated: Use BeginTx instead.
+func (c *conn) Begin() (driver.Tx, error) {
+ return c.BeginTx(context.Background(), driver.TxOptions{})
+}
+
+func (c *conn) BeginTx(ctx context.Context, opts driver.TxOptions) (driver.Tx, error) {
+ if c.tx {
+ panic("database/sql/driver: misuse of duckdb driver: multiple Tx")
+ }
+
+ if opts.ReadOnly {
+ return nil, errors.New("read-only transactions are not supported")
+ }
+
+ switch sql.IsolationLevel(opts.Isolation) {
+ case sql.LevelDefault:
+ default:
+ return nil, errors.New("isolation levels other than default are not supported")
+ }
+
+ if _, err := c.ExecContext(ctx, "BEGIN TRANSACTION", nil); err != nil {
+ return nil, err
+ }
+
+ c.tx = true
+ return &tx{c}, nil
+}
+
+func (c *conn) Close() error {
+ if c.closed {
+ panic("database/sql/driver: misuse of duckdb driver: Close of already closed connection")
+ }
+ c.closed = true
+
+ C.duckdb_disconnect(c.con)
+
+ return nil
+}
+
+func (c *conn) prepareStmt(cmd string) (*stmt, error) {
+ cmdstr := C.CString(cmd)
+ defer C.free(unsafe.Pointer(cmdstr))
+
+ var s C.duckdb_prepared_statement
+ if state := C.duckdb_prepare(*c.con, cmdstr, &s); state == C.DuckDBError {
+ dbErr := C.GoString(C.duckdb_prepare_error(s))
+ C.duckdb_destroy_prepare(&s)
+ return nil, errors.New(dbErr)
+ }
+
+ return &stmt{c: c, stmt: &s}, nil
+}
+
+func (c *conn) extractStmts(query string) (C.duckdb_extracted_statements, C.idx_t, error) {
+ cquery := C.CString(query)
+ defer C.free(unsafe.Pointer(cquery))
+
+ var stmts C.duckdb_extracted_statements
+ stmtsCount := C.duckdb_extract_statements(*c.con, cquery, &stmts)
+ if stmtsCount == 0 {
+ err := C.GoString(C.duckdb_extract_statements_error(stmts))
+ C.duckdb_destroy_extracted(&stmts)
+ if err != "" {
+ return nil, 0, errors.New(err)
+ }
+ return nil, 0, errors.New("no statements found")
+ }
+
+ return stmts, stmtsCount, nil
+}
+
+func (c *conn) prepareExtractedStmt(extractedStmts C.duckdb_extracted_statements, index C.idx_t) (*stmt, error) {
+ var s C.duckdb_prepared_statement
+ if state := C.duckdb_prepare_extracted_statement(*c.con, extractedStmts, index, &s); state == C.DuckDBError {
+ dbErr := C.GoString(C.duckdb_prepare_error(s))
+ C.duckdb_destroy_prepare(&s)
+ return nil, errors.New(dbErr)
+ }
+
+ return &stmt{c: c, stmt: &s}, nil
+}
diff --git a/vendor/github.com/marcboeker/go-duckdb/duckdb.go b/vendor/github.com/marcboeker/go-duckdb/duckdb.go
new file mode 100644
index 0000000..f9b2514
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/duckdb.go
@@ -0,0 +1,144 @@
+// Use of this source code is governed by a MIT-style
+// license that can be found in the LICENSE file.
+
+// Package duckdb implements a database/sql driver for the DuckDB database.
+package duckdb
+
+/*
+#include
+*/
+import "C"
+
+import (
+ "context"
+ "database/sql"
+ "database/sql/driver"
+ "errors"
+ "fmt"
+ "net/url"
+ "strings"
+ "unsafe"
+)
+
+func init() {
+ sql.Register("duckdb", Driver{})
+}
+
+type Driver struct{}
+
+func (d Driver) Open(dataSourceName string) (driver.Conn, error) {
+ connector, err := d.OpenConnector(dataSourceName)
+ if err != nil {
+ return nil, err
+ }
+ return connector.Connect(context.Background())
+}
+
+func (Driver) OpenConnector(dataSourceName string) (driver.Connector, error) {
+ return createConnector(dataSourceName, func(execerContext driver.ExecerContext) error { return nil })
+}
+
+// NewConnector creates a new Connector for the DuckDB database.
+func NewConnector(dsn string, connInitFn func(execer driver.ExecerContext) error) (driver.Connector, error) {
+ return createConnector(dsn, connInitFn)
+}
+
+func createConnector(dataSourceName string, connInitFn func(execer driver.ExecerContext) error) (driver.Connector, error) {
+ var db C.duckdb_database
+
+ parsedDSN, err := url.Parse(dataSourceName)
+ if err != nil {
+ return nil, fmt.Errorf("%w: %s", errParseConfig, err.Error())
+ }
+
+ connectionString := C.CString(extractConnectionString(dataSourceName))
+ defer C.free(unsafe.Pointer(connectionString))
+
+ // Check for config options.
+ if len(parsedDSN.RawQuery) == 0 {
+ var errMsg *C.char
+ defer C.duckdb_free(unsafe.Pointer(errMsg))
+
+ if state := C.duckdb_open_ext(connectionString, &db, nil, &errMsg); state == C.DuckDBError {
+ return nil, fmt.Errorf("%w: %s", errOpen, C.GoString(errMsg))
+ }
+ } else {
+ config, err := prepareConfig(parsedDSN.Query())
+ if err != nil {
+ return nil, err
+ }
+
+ var errMsg *C.char
+ defer C.duckdb_free(unsafe.Pointer(errMsg))
+
+ if state := C.duckdb_open_ext(connectionString, &db, config, &errMsg); state == C.DuckDBError {
+ return nil, fmt.Errorf("%w: %s", errOpen, C.GoString(errMsg))
+ }
+ }
+
+ return &connector{db: &db, connInitFn: connInitFn}, nil
+}
+
+type connector struct {
+ db *C.duckdb_database
+ connInitFn func(execer driver.ExecerContext) error
+}
+
+func (c *connector) Driver() driver.Driver {
+ return Driver{}
+}
+
+func (c *connector) Connect(context.Context) (driver.Conn, error) {
+ var con C.duckdb_connection
+ if state := C.duckdb_connect(*c.db, &con); state == C.DuckDBError {
+ return nil, errOpen
+ }
+ conn := &conn{con: &con}
+
+ // Call the connection init function if defined
+ if c.connInitFn != nil {
+ if err := c.connInitFn(conn); err != nil {
+ return nil, err
+ }
+ }
+ return conn, nil
+}
+
+func (c *connector) Close() error {
+ C.duckdb_close(c.db)
+ c.db = nil
+ return nil
+}
+
+func extractConnectionString(dataSourceName string) string {
+ var queryIndex = strings.Index(dataSourceName, "?")
+ if queryIndex < 0 {
+ queryIndex = len(dataSourceName)
+ }
+ return dataSourceName[0:queryIndex]
+}
+
+func prepareConfig(options map[string][]string) (C.duckdb_config, error) {
+ var config C.duckdb_config
+ if state := C.duckdb_create_config(&config); state == C.DuckDBError {
+ return nil, errCreateConfig
+ }
+
+ for k, v := range options {
+ if len(v) > 0 {
+ state := C.duckdb_set_config(config, C.CString(k), C.CString(v[0]))
+ if state == C.DuckDBError {
+ return nil, fmt.Errorf("%w: affected config option %s=%s", errPrepareConfig, k, v[0])
+ }
+ }
+ }
+
+ return config, nil
+}
+
+var (
+ errOpen = errors.New("could not open database")
+ errParseConfig = errors.New("could not parse config for database")
+ errCreateConfig = errors.New("could not create config for database")
+ errPrepareConfig = errors.New("could not set config for database")
+)
diff --git a/vendor/github.com/marcboeker/go-duckdb/duckdb.h b/vendor/github.com/marcboeker/go-duckdb/duckdb.h
new file mode 100644
index 0000000..86b0b1b
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/duckdb.h
@@ -0,0 +1,2516 @@
+//===----------------------------------------------------------------------===//
+//
+// DuckDB
+//
+// duckdb.h
+//
+//
+//===----------------------------------------------------------------------===//
+
+#pragma once
+
+// duplicate of duckdb/main/winapi.hpp
+#ifndef DUCKDB_API
+#ifdef _WIN32
+#if defined(DUCKDB_BUILD_LIBRARY) && !defined(DUCKDB_BUILD_LOADABLE_EXTENSION)
+#define DUCKDB_API __declspec(dllexport)
+#else
+#define DUCKDB_API __declspec(dllimport)
+#endif
+#else
+#define DUCKDB_API
+#endif
+#endif
+
+// duplicate of duckdb/main/winapi.hpp
+#ifndef DUCKDB_EXTENSION_API
+#ifdef _WIN32
+#ifdef DUCKDB_BUILD_LOADABLE_EXTENSION
+#define DUCKDB_EXTENSION_API __declspec(dllexport)
+#else
+#define DUCKDB_EXTENSION_API
+#endif
+#else
+#define DUCKDB_EXTENSION_API __attribute__((visibility("default")))
+#endif
+#endif
+
+// API versions
+// if no explicit API version is defined, the latest API version is used
+// Note that using older API versions (i.e. not using DUCKDB_API_LATEST) is deprecated.
+// These will not be supported long-term, and will be removed in future versions.
+#ifndef DUCKDB_API_0_3_1
+#define DUCKDB_API_0_3_1 1
+#endif
+#ifndef DUCKDB_API_0_3_2
+#define DUCKDB_API_0_3_2 2
+#endif
+#ifndef DUCKDB_API_LATEST
+#define DUCKDB_API_LATEST DUCKDB_API_0_3_2
+#endif
+
+#ifndef DUCKDB_API_VERSION
+#define DUCKDB_API_VERSION DUCKDB_API_LATEST
+#endif
+
+#include
+#include
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+//===--------------------------------------------------------------------===//
+// Type Information
+//===--------------------------------------------------------------------===//
+typedef uint64_t idx_t;
+
+typedef enum DUCKDB_TYPE {
+ DUCKDB_TYPE_INVALID = 0,
+ // bool
+ DUCKDB_TYPE_BOOLEAN,
+ // int8_t
+ DUCKDB_TYPE_TINYINT,
+ // int16_t
+ DUCKDB_TYPE_SMALLINT,
+ // int32_t
+ DUCKDB_TYPE_INTEGER,
+ // int64_t
+ DUCKDB_TYPE_BIGINT,
+ // uint8_t
+ DUCKDB_TYPE_UTINYINT,
+ // uint16_t
+ DUCKDB_TYPE_USMALLINT,
+ // uint32_t
+ DUCKDB_TYPE_UINTEGER,
+ // uint64_t
+ DUCKDB_TYPE_UBIGINT,
+ // float
+ DUCKDB_TYPE_FLOAT,
+ // double
+ DUCKDB_TYPE_DOUBLE,
+ // duckdb_timestamp, in microseconds
+ DUCKDB_TYPE_TIMESTAMP,
+ // duckdb_date
+ DUCKDB_TYPE_DATE,
+ // duckdb_time
+ DUCKDB_TYPE_TIME,
+ // duckdb_interval
+ DUCKDB_TYPE_INTERVAL,
+ // duckdb_hugeint
+ DUCKDB_TYPE_HUGEINT,
+ // const char*
+ DUCKDB_TYPE_VARCHAR,
+ // duckdb_blob
+ DUCKDB_TYPE_BLOB,
+ // decimal
+ DUCKDB_TYPE_DECIMAL,
+ // duckdb_timestamp, in seconds
+ DUCKDB_TYPE_TIMESTAMP_S,
+ // duckdb_timestamp, in milliseconds
+ DUCKDB_TYPE_TIMESTAMP_MS,
+ // duckdb_timestamp, in nanoseconds
+ DUCKDB_TYPE_TIMESTAMP_NS,
+ // enum type, only useful as logical type
+ DUCKDB_TYPE_ENUM,
+ // list type, only useful as logical type
+ DUCKDB_TYPE_LIST,
+ // struct type, only useful as logical type
+ DUCKDB_TYPE_STRUCT,
+ // map type, only useful as logical type
+ DUCKDB_TYPE_MAP,
+ // duckdb_hugeint
+ DUCKDB_TYPE_UUID,
+ // union type, only useful as logical type
+ DUCKDB_TYPE_UNION,
+ // duckdb_bit
+ DUCKDB_TYPE_BIT,
+} duckdb_type;
+
+//! Days are stored as days since 1970-01-01
+//! Use the duckdb_from_date/duckdb_to_date function to extract individual information
+typedef struct {
+ int32_t days;
+} duckdb_date;
+
+typedef struct {
+ int32_t year;
+ int8_t month;
+ int8_t day;
+} duckdb_date_struct;
+
+//! Time is stored as microseconds since 00:00:00
+//! Use the duckdb_from_time/duckdb_to_time function to extract individual information
+typedef struct {
+ int64_t micros;
+} duckdb_time;
+
+typedef struct {
+ int8_t hour;
+ int8_t min;
+ int8_t sec;
+ int32_t micros;
+} duckdb_time_struct;
+
+//! Timestamps are stored as microseconds since 1970-01-01
+//! Use the duckdb_from_timestamp/duckdb_to_timestamp function to extract individual information
+typedef struct {
+ int64_t micros;
+} duckdb_timestamp;
+
+typedef struct {
+ duckdb_date_struct date;
+ duckdb_time_struct time;
+} duckdb_timestamp_struct;
+
+typedef struct {
+ int32_t months;
+ int32_t days;
+ int64_t micros;
+} duckdb_interval;
+
+//! Hugeints are composed in a (lower, upper) component
+//! The value of the hugeint is upper * 2^64 + lower
+//! For easy usage, the functions duckdb_hugeint_to_double/duckdb_double_to_hugeint are recommended
+typedef struct {
+ uint64_t lower;
+ int64_t upper;
+} duckdb_hugeint;
+
+typedef struct {
+ uint8_t width;
+ uint8_t scale;
+
+ duckdb_hugeint value;
+} duckdb_decimal;
+
+typedef struct {
+ char *data;
+ idx_t size;
+} duckdb_string;
+
+/*
+ The internal data representation of a VARCHAR/BLOB column
+*/
+typedef struct {
+ union {
+ struct {
+ uint32_t length;
+ char prefix[4];
+ char *ptr;
+ } pointer;
+ struct {
+ uint32_t length;
+ char inlined[12];
+ } inlined;
+ } value;
+} duckdb_string_t;
+
+typedef struct {
+ void *data;
+ idx_t size;
+} duckdb_blob;
+
+typedef struct {
+ uint64_t offset;
+ uint64_t length;
+} duckdb_list_entry;
+
+typedef struct {
+#if DUCKDB_API_VERSION < DUCKDB_API_0_3_2
+ void *data;
+ bool *nullmask;
+ duckdb_type type;
+ char *name;
+#else
+ // deprecated, use duckdb_column_data
+ void *__deprecated_data;
+ // deprecated, use duckdb_nullmask_data
+ bool *__deprecated_nullmask;
+ // deprecated, use duckdb_column_type
+ duckdb_type __deprecated_type;
+ // deprecated, use duckdb_column_name
+ char *__deprecated_name;
+#endif
+ void *internal_data;
+} duckdb_column;
+
+typedef struct {
+#if DUCKDB_API_VERSION < DUCKDB_API_0_3_2
+ idx_t column_count;
+ idx_t row_count;
+ idx_t rows_changed;
+ duckdb_column *columns;
+ char *error_message;
+#else
+ // deprecated, use duckdb_column_count
+ idx_t __deprecated_column_count;
+ // deprecated, use duckdb_row_count
+ idx_t __deprecated_row_count;
+ // deprecated, use duckdb_rows_changed
+ idx_t __deprecated_rows_changed;
+ // deprecated, use duckdb_column_ family of functions
+ duckdb_column *__deprecated_columns;
+ // deprecated, use duckdb_result_error
+ char *__deprecated_error_message;
+#endif
+ void *internal_data;
+} duckdb_result;
+
+typedef struct _duckdb_database {
+ void *__db;
+} * duckdb_database;
+typedef struct _duckdb_connection {
+ void *__conn;
+} * duckdb_connection;
+typedef struct _duckdb_prepared_statement {
+ void *__prep;
+} * duckdb_prepared_statement;
+typedef struct _duckdb_extracted_statements {
+ void *__extrac;
+} * duckdb_extracted_statements;
+typedef struct _duckdb_pending_result {
+ void *__pend;
+} * duckdb_pending_result;
+typedef struct _duckdb_appender {
+ void *__appn;
+} * duckdb_appender;
+typedef struct _duckdb_arrow {
+ void *__arrw;
+} * duckdb_arrow;
+typedef struct _duckdb_arrow_stream {
+ void *__arrwstr;
+} * duckdb_arrow_stream;
+typedef struct _duckdb_config {
+ void *__cnfg;
+} * duckdb_config;
+typedef struct _duckdb_arrow_schema {
+ void *__arrs;
+} * duckdb_arrow_schema;
+typedef struct _duckdb_arrow_array {
+ void *__arra;
+} * duckdb_arrow_array;
+typedef struct _duckdb_logical_type {
+ void *__lglt;
+} * duckdb_logical_type;
+typedef struct _duckdb_data_chunk {
+ void *__dtck;
+} * duckdb_data_chunk;
+typedef struct _duckdb_vector {
+ void *__vctr;
+} * duckdb_vector;
+typedef struct _duckdb_value {
+ void *__val;
+} * duckdb_value;
+
+typedef enum { DuckDBSuccess = 0, DuckDBError = 1 } duckdb_state;
+typedef enum {
+ DUCKDB_PENDING_RESULT_READY = 0,
+ DUCKDB_PENDING_RESULT_NOT_READY = 1,
+ DUCKDB_PENDING_ERROR = 2,
+ DUCKDB_PENDING_NO_TASKS_AVAILABLE = 3
+} duckdb_pending_state;
+
+//===--------------------------------------------------------------------===//
+// Open/Connect
+//===--------------------------------------------------------------------===//
+
+/*!
+Creates a new database or opens an existing database file stored at the given path.
+If no path is given a new in-memory database is created instead.
+The instantiated database should be closed with 'duckdb_close'
+
+* path: Path to the database file on disk, or `nullptr` or `:memory:` to open an in-memory database.
+* out_database: The result database object.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_open(const char *path, duckdb_database *out_database);
+
+/*!
+Extended version of duckdb_open. Creates a new database or opens an existing database file stored at the given path.
+
+* path: Path to the database file on disk, or `nullptr` or `:memory:` to open an in-memory database.
+* out_database: The result database object.
+* config: (Optional) configuration used to start up the database system.
+* out_error: If set and the function returns DuckDBError, this will contain the reason why the start-up failed.
+Note that the error must be freed using `duckdb_free`.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_open_ext(const char *path, duckdb_database *out_database, duckdb_config config,
+ char **out_error);
+
+/*!
+Closes the specified database and de-allocates all memory allocated for that database.
+This should be called after you are done with any database allocated through `duckdb_open`.
+Note that failing to call `duckdb_close` (in case of e.g. a program crash) will not cause data corruption.
+Still it is recommended to always correctly close a database object after you are done with it.
+
+* database: The database object to shut down.
+*/
+DUCKDB_API void duckdb_close(duckdb_database *database);
+
+/*!
+Opens a connection to a database. Connections are required to query the database, and store transactional state
+associated with the connection.
+The instantiated connection should be closed using 'duckdb_disconnect'
+
+* database: The database file to connect to.
+* out_connection: The result connection object.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_connect(duckdb_database database, duckdb_connection *out_connection);
+
+/*!
+Interrupt running query
+
+* connection: The connection to interruot
+*/
+DUCKDB_API void duckdb_interrupt(duckdb_connection connection);
+
+/*!
+Get progress of the running query
+
+* connection: The working connection
+* returns: -1 if no progress or a percentage of the progress
+*/
+DUCKDB_API double duckdb_query_progress(duckdb_connection connection);
+
+/*!
+Closes the specified connection and de-allocates all memory allocated for that connection.
+
+* connection: The connection to close.
+*/
+DUCKDB_API void duckdb_disconnect(duckdb_connection *connection);
+
+/*!
+Returns the version of the linked DuckDB, with a version postfix for dev versions
+
+Usually used for developing C extensions that must return this for a compatibility check.
+*/
+DUCKDB_API const char *duckdb_library_version();
+
+//===--------------------------------------------------------------------===//
+// Configuration
+//===--------------------------------------------------------------------===//
+/*!
+Initializes an empty configuration object that can be used to provide start-up options for the DuckDB instance
+through `duckdb_open_ext`.
+
+This will always succeed unless there is a malloc failure.
+
+* out_config: The result configuration object.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_create_config(duckdb_config *out_config);
+
+/*!
+This returns the total amount of configuration options available for usage with `duckdb_get_config_flag`.
+
+This should not be called in a loop as it internally loops over all the options.
+
+* returns: The amount of config options available.
+*/
+DUCKDB_API size_t duckdb_config_count();
+
+/*!
+Obtains a human-readable name and description of a specific configuration option. This can be used to e.g.
+display configuration options. This will succeed unless `index` is out of range (i.e. `>= duckdb_config_count`).
+
+The result name or description MUST NOT be freed.
+
+* index: The index of the configuration option (between 0 and `duckdb_config_count`)
+* out_name: A name of the configuration flag.
+* out_description: A description of the configuration flag.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_get_config_flag(size_t index, const char **out_name, const char **out_description);
+
+/*!
+Sets the specified option for the specified configuration. The configuration option is indicated by name.
+To obtain a list of config options, see `duckdb_get_config_flag`.
+
+In the source code, configuration options are defined in `config.cpp`.
+
+This can fail if either the name is invalid, or if the value provided for the option is invalid.
+
+* duckdb_config: The configuration object to set the option on.
+* name: The name of the configuration flag to set.
+* option: The value to set the configuration flag to.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_set_config(duckdb_config config, const char *name, const char *option);
+
+/*!
+Destroys the specified configuration option and de-allocates all memory allocated for the object.
+
+* config: The configuration object to destroy.
+*/
+DUCKDB_API void duckdb_destroy_config(duckdb_config *config);
+
+//===--------------------------------------------------------------------===//
+// Query Execution
+//===--------------------------------------------------------------------===//
+/*!
+Executes a SQL query within a connection and stores the full (materialized) result in the out_result pointer.
+If the query fails to execute, DuckDBError is returned and the error message can be retrieved by calling
+`duckdb_result_error`.
+
+Note that after running `duckdb_query`, `duckdb_destroy_result` must be called on the result object even if the
+query fails, otherwise the error stored within the result will not be freed correctly.
+
+* connection: The connection to perform the query in.
+* query: The SQL query to run.
+* out_result: The query result.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_query(duckdb_connection connection, const char *query, duckdb_result *out_result);
+
+/*!
+Closes the result and de-allocates all memory allocated for that connection.
+
+* result: The result to destroy.
+*/
+DUCKDB_API void duckdb_destroy_result(duckdb_result *result);
+
+/*!
+Returns the column name of the specified column. The result should not need be freed; the column names will
+automatically be destroyed when the result is destroyed.
+
+Returns `NULL` if the column is out of range.
+
+* result: The result object to fetch the column name from.
+* col: The column index.
+* returns: The column name of the specified column.
+*/
+DUCKDB_API const char *duckdb_column_name(duckdb_result *result, idx_t col);
+
+/*!
+Returns the column type of the specified column.
+
+Returns `DUCKDB_TYPE_INVALID` if the column is out of range.
+
+* result: The result object to fetch the column type from.
+* col: The column index.
+* returns: The column type of the specified column.
+*/
+DUCKDB_API duckdb_type duckdb_column_type(duckdb_result *result, idx_t col);
+
+/*!
+Returns the logical column type of the specified column.
+
+The return type of this call should be destroyed with `duckdb_destroy_logical_type`.
+
+Returns `NULL` if the column is out of range.
+
+* result: The result object to fetch the column type from.
+* col: The column index.
+* returns: The logical column type of the specified column.
+*/
+DUCKDB_API duckdb_logical_type duckdb_column_logical_type(duckdb_result *result, idx_t col);
+
+/*!
+Returns the number of columns present in a the result object.
+
+* result: The result object.
+* returns: The number of columns present in the result object.
+*/
+DUCKDB_API idx_t duckdb_column_count(duckdb_result *result);
+
+/*!
+Returns the number of rows present in a the result object.
+
+* result: The result object.
+* returns: The number of rows present in the result object.
+*/
+DUCKDB_API idx_t duckdb_row_count(duckdb_result *result);
+
+/*!
+Returns the number of rows changed by the query stored in the result. This is relevant only for INSERT/UPDATE/DELETE
+queries. For other queries the rows_changed will be 0.
+
+* result: The result object.
+* returns: The number of rows changed.
+*/
+DUCKDB_API idx_t duckdb_rows_changed(duckdb_result *result);
+
+/*!
+**DEPRECATED**: Prefer using `duckdb_result_get_chunk` instead.
+
+Returns the data of a specific column of a result in columnar format.
+
+The function returns a dense array which contains the result data. The exact type stored in the array depends on the
+corresponding duckdb_type (as provided by `duckdb_column_type`). For the exact type by which the data should be
+accessed, see the comments in [the types section](types) or the `DUCKDB_TYPE` enum.
+
+For example, for a column of type `DUCKDB_TYPE_INTEGER`, rows can be accessed in the following manner:
+```c
+int32_t *data = (int32_t *) duckdb_column_data(&result, 0);
+printf("Data for row %d: %d\n", row, data[row]);
+```
+
+* result: The result object to fetch the column data from.
+* col: The column index.
+* returns: The column data of the specified column.
+*/
+DUCKDB_API void *duckdb_column_data(duckdb_result *result, idx_t col);
+
+/*!
+**DEPRECATED**: Prefer using `duckdb_result_get_chunk` instead.
+
+Returns the nullmask of a specific column of a result in columnar format. The nullmask indicates for every row
+whether or not the corresponding row is `NULL`. If a row is `NULL`, the values present in the array provided
+by `duckdb_column_data` are undefined.
+
+```c
+int32_t *data = (int32_t *) duckdb_column_data(&result, 0);
+bool *nullmask = duckdb_nullmask_data(&result, 0);
+if (nullmask[row]) {
+ printf("Data for row %d: NULL\n", row);
+} else {
+ printf("Data for row %d: %d\n", row, data[row]);
+}
+```
+
+* result: The result object to fetch the nullmask from.
+* col: The column index.
+* returns: The nullmask of the specified column.
+*/
+DUCKDB_API bool *duckdb_nullmask_data(duckdb_result *result, idx_t col);
+
+/*!
+Returns the error message contained within the result. The error is only set if `duckdb_query` returns `DuckDBError`.
+
+The result of this function must not be freed. It will be cleaned up when `duckdb_destroy_result` is called.
+
+* result: The result object to fetch the error from.
+* returns: The error of the result.
+*/
+DUCKDB_API const char *duckdb_result_error(duckdb_result *result);
+
+//===--------------------------------------------------------------------===//
+// Result Functions
+//===--------------------------------------------------------------------===//
+
+/*!
+Fetches a data chunk from the duckdb_result. This function should be called repeatedly until the result is exhausted.
+
+The result must be destroyed with `duckdb_destroy_data_chunk`.
+
+This function supersedes all `duckdb_value` functions, as well as the `duckdb_column_data` and `duckdb_nullmask_data`
+functions. It results in significantly better performance, and should be preferred in newer code-bases.
+
+If this function is used, none of the other result functions can be used and vice versa (i.e. this function cannot be
+mixed with the legacy result functions).
+
+Use `duckdb_result_chunk_count` to figure out how many chunks there are in the result.
+
+* result: The result object to fetch the data chunk from.
+* chunk_index: The chunk index to fetch from.
+* returns: The resulting data chunk. Returns `NULL` if the chunk index is out of bounds.
+*/
+DUCKDB_API duckdb_data_chunk duckdb_result_get_chunk(duckdb_result result, idx_t chunk_index);
+
+/*!
+Checks if the type of the internal result is StreamQueryResult.
+
+* result: The result object to check.
+* returns: Whether or not the result object is of the type StreamQueryResult
+*/
+DUCKDB_API bool duckdb_result_is_streaming(duckdb_result result);
+
+/*!
+Returns the number of data chunks present in the result.
+
+* result: The result object
+* returns: Number of data chunks present in the result.
+*/
+DUCKDB_API idx_t duckdb_result_chunk_count(duckdb_result result);
+
+// Safe fetch functions
+// These functions will perform conversions if necessary.
+// On failure (e.g. if conversion cannot be performed or if the value is NULL) a default value is returned.
+// Note that these functions are slow since they perform bounds checking and conversion
+// For fast access of values prefer using `duckdb_result_get_chunk`
+
+/*!
+ * returns: The boolean value at the specified location, or false if the value cannot be converted.
+ */
+DUCKDB_API bool duckdb_value_boolean(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The int8_t value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API int8_t duckdb_value_int8(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The int16_t value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API int16_t duckdb_value_int16(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The int32_t value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API int32_t duckdb_value_int32(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The int64_t value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API int64_t duckdb_value_int64(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The duckdb_hugeint value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API duckdb_hugeint duckdb_value_hugeint(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The duckdb_decimal value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API duckdb_decimal duckdb_value_decimal(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The uint8_t value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API uint8_t duckdb_value_uint8(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The uint16_t value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API uint16_t duckdb_value_uint16(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The uint32_t value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API uint32_t duckdb_value_uint32(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The uint64_t value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API uint64_t duckdb_value_uint64(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The float value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API float duckdb_value_float(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The double value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API double duckdb_value_double(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The duckdb_date value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API duckdb_date duckdb_value_date(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The duckdb_time value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API duckdb_time duckdb_value_time(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The duckdb_timestamp value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API duckdb_timestamp duckdb_value_timestamp(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: The duckdb_interval value at the specified location, or 0 if the value cannot be converted.
+ */
+DUCKDB_API duckdb_interval duckdb_value_interval(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+* DEPRECATED: use duckdb_value_string instead. This function does not work correctly if the string contains null bytes.
+* returns: The text value at the specified location as a null-terminated string, or nullptr if the value cannot be
+converted. The result must be freed with `duckdb_free`.
+*/
+DUCKDB_API char *duckdb_value_varchar(duckdb_result *result, idx_t col, idx_t row);
+
+/*!s
+* returns: The string value at the specified location.
+The result must be freed with `duckdb_free`.
+*/
+DUCKDB_API duckdb_string duckdb_value_string(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+* DEPRECATED: use duckdb_value_string_internal instead. This function does not work correctly if the string contains
+null bytes.
+* returns: The char* value at the specified location. ONLY works on VARCHAR columns and does not auto-cast.
+If the column is NOT a VARCHAR column this function will return NULL.
+
+The result must NOT be freed.
+*/
+DUCKDB_API char *duckdb_value_varchar_internal(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+* DEPRECATED: use duckdb_value_string_internal instead. This function does not work correctly if the string contains
+null bytes.
+* returns: The char* value at the specified location. ONLY works on VARCHAR columns and does not auto-cast.
+If the column is NOT a VARCHAR column this function will return NULL.
+
+The result must NOT be freed.
+*/
+DUCKDB_API duckdb_string duckdb_value_string_internal(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+* returns: The duckdb_blob value at the specified location. Returns a blob with blob.data set to nullptr if the
+value cannot be converted. The resulting "blob.data" must be freed with `duckdb_free.`
+*/
+DUCKDB_API duckdb_blob duckdb_value_blob(duckdb_result *result, idx_t col, idx_t row);
+
+/*!
+ * returns: Returns true if the value at the specified index is NULL, and false otherwise.
+ */
+DUCKDB_API bool duckdb_value_is_null(duckdb_result *result, idx_t col, idx_t row);
+
+//===--------------------------------------------------------------------===//
+// Helpers
+//===--------------------------------------------------------------------===//
+/*!
+Allocate `size` bytes of memory using the duckdb internal malloc function. Any memory allocated in this manner
+should be freed using `duckdb_free`.
+
+* size: The number of bytes to allocate.
+* returns: A pointer to the allocated memory region.
+*/
+DUCKDB_API void *duckdb_malloc(size_t size);
+
+/*!
+Free a value returned from `duckdb_malloc`, `duckdb_value_varchar` or `duckdb_value_blob`.
+
+* ptr: The memory region to de-allocate.
+*/
+DUCKDB_API void duckdb_free(void *ptr);
+
+/*!
+The internal vector size used by DuckDB.
+This is the amount of tuples that will fit into a data chunk created by `duckdb_create_data_chunk`.
+
+* returns: The vector size.
+*/
+DUCKDB_API idx_t duckdb_vector_size();
+
+/*!
+Whether or not the duckdb_string_t value is inlined.
+This means that the data of the string does not have a separate allocation.
+
+*/
+DUCKDB_API bool duckdb_string_is_inlined(duckdb_string_t string);
+
+//===--------------------------------------------------------------------===//
+// Date/Time/Timestamp Helpers
+//===--------------------------------------------------------------------===//
+/*!
+Decompose a `duckdb_date` object into year, month and date (stored as `duckdb_date_struct`).
+
+* date: The date object, as obtained from a `DUCKDB_TYPE_DATE` column.
+* returns: The `duckdb_date_struct` with the decomposed elements.
+*/
+DUCKDB_API duckdb_date_struct duckdb_from_date(duckdb_date date);
+
+/*!
+Re-compose a `duckdb_date` from year, month and date (`duckdb_date_struct`).
+
+* date: The year, month and date stored in a `duckdb_date_struct`.
+* returns: The `duckdb_date` element.
+*/
+DUCKDB_API duckdb_date duckdb_to_date(duckdb_date_struct date);
+
+/*!
+Decompose a `duckdb_time` object into hour, minute, second and microsecond (stored as `duckdb_time_struct`).
+
+* time: The time object, as obtained from a `DUCKDB_TYPE_TIME` column.
+* returns: The `duckdb_time_struct` with the decomposed elements.
+*/
+DUCKDB_API duckdb_time_struct duckdb_from_time(duckdb_time time);
+
+/*!
+Re-compose a `duckdb_time` from hour, minute, second and microsecond (`duckdb_time_struct`).
+
+* time: The hour, minute, second and microsecond in a `duckdb_time_struct`.
+* returns: The `duckdb_time` element.
+*/
+DUCKDB_API duckdb_time duckdb_to_time(duckdb_time_struct time);
+
+/*!
+Decompose a `duckdb_timestamp` object into a `duckdb_timestamp_struct`.
+
+* ts: The ts object, as obtained from a `DUCKDB_TYPE_TIMESTAMP` column.
+* returns: The `duckdb_timestamp_struct` with the decomposed elements.
+*/
+DUCKDB_API duckdb_timestamp_struct duckdb_from_timestamp(duckdb_timestamp ts);
+
+/*!
+Re-compose a `duckdb_timestamp` from a duckdb_timestamp_struct.
+
+* ts: The de-composed elements in a `duckdb_timestamp_struct`.
+* returns: The `duckdb_timestamp` element.
+*/
+DUCKDB_API duckdb_timestamp duckdb_to_timestamp(duckdb_timestamp_struct ts);
+
+//===--------------------------------------------------------------------===//
+// Hugeint Helpers
+//===--------------------------------------------------------------------===//
+/*!
+Converts a duckdb_hugeint object (as obtained from a `DUCKDB_TYPE_HUGEINT` column) into a double.
+
+* val: The hugeint value.
+* returns: The converted `double` element.
+*/
+DUCKDB_API double duckdb_hugeint_to_double(duckdb_hugeint val);
+
+/*!
+Converts a double value to a duckdb_hugeint object.
+
+If the conversion fails because the double value is too big the result will be 0.
+
+* val: The double value.
+* returns: The converted `duckdb_hugeint` element.
+*/
+DUCKDB_API duckdb_hugeint duckdb_double_to_hugeint(double val);
+
+/*!
+Converts a double value to a duckdb_decimal object.
+
+If the conversion fails because the double value is too big, or the width/scale are invalid the result will be 0.
+
+* val: The double value.
+* returns: The converted `duckdb_decimal` element.
+*/
+DUCKDB_API duckdb_decimal duckdb_double_to_decimal(double val, uint8_t width, uint8_t scale);
+
+//===--------------------------------------------------------------------===//
+// Decimal Helpers
+//===--------------------------------------------------------------------===//
+/*!
+Converts a duckdb_decimal object (as obtained from a `DUCKDB_TYPE_DECIMAL` column) into a double.
+
+* val: The decimal value.
+* returns: The converted `double` element.
+*/
+DUCKDB_API double duckdb_decimal_to_double(duckdb_decimal val);
+
+//===--------------------------------------------------------------------===//
+// Prepared Statements
+//===--------------------------------------------------------------------===//
+// A prepared statement is a parameterized query that allows you to bind parameters to it.
+// * This is useful to easily supply parameters to functions and avoid SQL injection attacks.
+// * This is useful to speed up queries that you will execute several times with different parameters.
+// Because the query will only be parsed, bound, optimized and planned once during the prepare stage,
+// rather than once per execution.
+// For example:
+// SELECT * FROM tbl WHERE id=?
+// Or a query with multiple parameters:
+// SELECT * FROM tbl WHERE id=$1 OR name=$2
+
+/*!
+Create a prepared statement object from a query.
+
+Note that after calling `duckdb_prepare`, the prepared statement should always be destroyed using
+`duckdb_destroy_prepare`, even if the prepare fails.
+
+If the prepare fails, `duckdb_prepare_error` can be called to obtain the reason why the prepare failed.
+
+* connection: The connection object
+* query: The SQL query to prepare
+* out_prepared_statement: The resulting prepared statement object
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_prepare(duckdb_connection connection, const char *query,
+ duckdb_prepared_statement *out_prepared_statement);
+
+/*!
+Closes the prepared statement and de-allocates all memory allocated for the statement.
+
+* prepared_statement: The prepared statement to destroy.
+*/
+DUCKDB_API void duckdb_destroy_prepare(duckdb_prepared_statement *prepared_statement);
+
+/*!
+Returns the error message associated with the given prepared statement.
+If the prepared statement has no error message, this returns `nullptr` instead.
+
+The error message should not be freed. It will be de-allocated when `duckdb_destroy_prepare` is called.
+
+* prepared_statement: The prepared statement to obtain the error from.
+* returns: The error message, or `nullptr` if there is none.
+*/
+DUCKDB_API const char *duckdb_prepare_error(duckdb_prepared_statement prepared_statement);
+
+/*!
+Returns the number of parameters that can be provided to the given prepared statement.
+
+Returns 0 if the query was not successfully prepared.
+
+* prepared_statement: The prepared statement to obtain the number of parameters for.
+*/
+DUCKDB_API idx_t duckdb_nparams(duckdb_prepared_statement prepared_statement);
+
+/*!
+Returns the name used to identify the parameter
+The returned string should be freed using `duckdb_free`.
+
+Returns NULL if the index is out of range for the provided prepared statement.
+
+* prepared_statement: The prepared statement for which to get the parameter name from.
+*/
+DUCKDB_API const char *duckdb_parameter_name(duckdb_prepared_statement prepared_statement, idx_t index);
+
+/*!
+Returns the parameter type for the parameter at the given index.
+
+Returns `DUCKDB_TYPE_INVALID` if the parameter index is out of range or the statement was not successfully prepared.
+
+* prepared_statement: The prepared statement.
+* param_idx: The parameter index.
+* returns: The parameter type
+*/
+DUCKDB_API duckdb_type duckdb_param_type(duckdb_prepared_statement prepared_statement, idx_t param_idx);
+
+/*!
+Clear the params bind to the prepared statement.
+*/
+DUCKDB_API duckdb_state duckdb_clear_bindings(duckdb_prepared_statement prepared_statement);
+
+/*!
+Binds a value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_value(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ duckdb_value val);
+
+/*!
+Retrieve the index of the parameter for the prepared statement, identified by name
+*/
+DUCKDB_API duckdb_state duckdb_bind_parameter_index(duckdb_prepared_statement prepared_statement, idx_t *param_idx_out,
+ const char *name);
+
+/*!
+Binds a bool value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_boolean(duckdb_prepared_statement prepared_statement, idx_t param_idx, bool val);
+
+/*!
+Binds an int8_t value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_int8(duckdb_prepared_statement prepared_statement, idx_t param_idx, int8_t val);
+
+/*!
+Binds an int16_t value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_int16(duckdb_prepared_statement prepared_statement, idx_t param_idx, int16_t val);
+
+/*!
+Binds an int32_t value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_int32(duckdb_prepared_statement prepared_statement, idx_t param_idx, int32_t val);
+
+/*!
+Binds an int64_t value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_int64(duckdb_prepared_statement prepared_statement, idx_t param_idx, int64_t val);
+
+/*!
+Binds a duckdb_hugeint value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_hugeint(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ duckdb_hugeint val);
+/*!
+Binds a duckdb_decimal value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_decimal(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ duckdb_decimal val);
+
+/*!
+Binds an uint8_t value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_uint8(duckdb_prepared_statement prepared_statement, idx_t param_idx, uint8_t val);
+
+/*!
+Binds an uint16_t value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_uint16(duckdb_prepared_statement prepared_statement, idx_t param_idx, uint16_t val);
+
+/*!
+Binds an uint32_t value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_uint32(duckdb_prepared_statement prepared_statement, idx_t param_idx, uint32_t val);
+
+/*!
+Binds an uint64_t value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_uint64(duckdb_prepared_statement prepared_statement, idx_t param_idx, uint64_t val);
+
+/*!
+Binds a float value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_float(duckdb_prepared_statement prepared_statement, idx_t param_idx, float val);
+
+/*!
+Binds a double value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_double(duckdb_prepared_statement prepared_statement, idx_t param_idx, double val);
+
+/*!
+Binds a duckdb_date value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_date(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ duckdb_date val);
+
+/*!
+Binds a duckdb_time value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_time(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ duckdb_time val);
+
+/*!
+Binds a duckdb_timestamp value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_timestamp(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ duckdb_timestamp val);
+
+/*!
+Binds a duckdb_interval value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_interval(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ duckdb_interval val);
+
+/*!
+Binds a null-terminated varchar value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_varchar(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ const char *val);
+
+/*!
+Binds a varchar value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_varchar_length(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ const char *val, idx_t length);
+
+/*!
+Binds a blob value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_blob(duckdb_prepared_statement prepared_statement, idx_t param_idx,
+ const void *data, idx_t length);
+
+/*!
+Binds a NULL value to the prepared statement at the specified index.
+*/
+DUCKDB_API duckdb_state duckdb_bind_null(duckdb_prepared_statement prepared_statement, idx_t param_idx);
+
+/*!
+Executes the prepared statement with the given bound parameters, and returns a materialized query result.
+
+This method can be called multiple times for each prepared statement, and the parameters can be modified
+between calls to this function.
+
+* prepared_statement: The prepared statement to execute.
+* out_result: The query result.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_execute_prepared(duckdb_prepared_statement prepared_statement,
+ duckdb_result *out_result);
+
+/*!
+Executes the prepared statement with the given bound parameters, and returns an arrow query result.
+
+* prepared_statement: The prepared statement to execute.
+* out_result: The query result.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_execute_prepared_arrow(duckdb_prepared_statement prepared_statement,
+ duckdb_arrow *out_result);
+
+/*!
+Scans the Arrow stream and creates a view with the given name.
+
+* connection: The connection on which to execute the scan.
+* table_name: Name of the temporary view to create.
+* arrow: Arrow stream wrapper.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_arrow_scan(duckdb_connection connection, const char *table_name,
+ duckdb_arrow_stream arrow);
+
+/*!
+Scans the Arrow array and creates a view with the given name.
+
+* connection: The connection on which to execute the scan.
+* table_name: Name of the temporary view to create.
+* arrow_schema: Arrow schema wrapper.
+* arrow_array: Arrow array wrapper.
+* out_stream: Output array stream that wraps around the passed schema, for releasing/deleting once done.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_arrow_array_scan(duckdb_connection connection, const char *table_name,
+ duckdb_arrow_schema arrow_schema, duckdb_arrow_array arrow_array,
+ duckdb_arrow_stream *out_stream);
+
+//===--------------------------------------------------------------------===//
+// Extract Statements
+//===--------------------------------------------------------------------===//
+// A query string can be extracted into multiple SQL statements. Each statement can be prepared and executed separately.
+
+/*!
+Extract all statements from a query.
+Note that after calling `duckdb_extract_statements`, the extracted statements should always be destroyed using
+`duckdb_destroy_extracted`, even if no statements were extracted.
+If the extract fails, `duckdb_extract_statements_error` can be called to obtain the reason why the extract failed.
+* connection: The connection object
+* query: The SQL query to extract
+* out_extracted_statements: The resulting extracted statements object
+* returns: The number of extracted statements or 0 on failure.
+*/
+DUCKDB_API idx_t duckdb_extract_statements(duckdb_connection connection, const char *query,
+ duckdb_extracted_statements *out_extracted_statements);
+
+/*!
+Prepare an extracted statement.
+Note that after calling `duckdb_prepare_extracted_statement`, the prepared statement should always be destroyed using
+`duckdb_destroy_prepare`, even if the prepare fails.
+If the prepare fails, `duckdb_prepare_error` can be called to obtain the reason why the prepare failed.
+* connection: The connection object
+* extracted_statements: The extracted statements object
+* index: The index of the extracted statement to prepare
+* out_prepared_statement: The resulting prepared statement object
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_prepare_extracted_statement(duckdb_connection connection,
+ duckdb_extracted_statements extracted_statements,
+ idx_t index,
+ duckdb_prepared_statement *out_prepared_statement);
+/*!
+Returns the error message contained within the extracted statements.
+The result of this function must not be freed. It will be cleaned up when `duckdb_destroy_extracted` is called.
+* result: The extracted statements to fetch the error from.
+* returns: The error of the extracted statements.
+*/
+DUCKDB_API const char *duckdb_extract_statements_error(duckdb_extracted_statements extracted_statements);
+
+/*!
+De-allocates all memory allocated for the extracted statements.
+* extracted_statements: The extracted statements to destroy.
+*/
+DUCKDB_API void duckdb_destroy_extracted(duckdb_extracted_statements *extracted_statements);
+
+//===--------------------------------------------------------------------===//
+// Pending Result Interface
+//===--------------------------------------------------------------------===//
+/*!
+Executes the prepared statement with the given bound parameters, and returns a pending result.
+The pending result represents an intermediate structure for a query that is not yet fully executed.
+The pending result can be used to incrementally execute a query, returning control to the client between tasks.
+
+Note that after calling `duckdb_pending_prepared`, the pending result should always be destroyed using
+`duckdb_destroy_pending`, even if this function returns DuckDBError.
+
+* prepared_statement: The prepared statement to execute.
+* out_result: The pending query result.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_pending_prepared(duckdb_prepared_statement prepared_statement,
+ duckdb_pending_result *out_result);
+
+/*!
+Executes the prepared statement with the given bound parameters, and returns a pending result.
+This pending result will create a streaming duckdb_result when executed.
+The pending result represents an intermediate structure for a query that is not yet fully executed.
+
+Note that after calling `duckdb_pending_prepared_streaming`, the pending result should always be destroyed using
+`duckdb_destroy_pending`, even if this function returns DuckDBError.
+
+* prepared_statement: The prepared statement to execute.
+* out_result: The pending query result.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_pending_prepared_streaming(duckdb_prepared_statement prepared_statement,
+ duckdb_pending_result *out_result);
+
+/*!
+Closes the pending result and de-allocates all memory allocated for the result.
+
+* pending_result: The pending result to destroy.
+*/
+DUCKDB_API void duckdb_destroy_pending(duckdb_pending_result *pending_result);
+
+/*!
+Returns the error message contained within the pending result.
+
+The result of this function must not be freed. It will be cleaned up when `duckdb_destroy_pending` is called.
+
+* result: The pending result to fetch the error from.
+* returns: The error of the pending result.
+*/
+DUCKDB_API const char *duckdb_pending_error(duckdb_pending_result pending_result);
+
+/*!
+Executes a single task within the query, returning whether or not the query is ready.
+
+If this returns DUCKDB_PENDING_RESULT_READY, the duckdb_execute_pending function can be called to obtain the result.
+If this returns DUCKDB_PENDING_RESULT_NOT_READY, the duckdb_pending_execute_task function should be called again.
+If this returns DUCKDB_PENDING_ERROR, an error occurred during execution.
+
+The error message can be obtained by calling duckdb_pending_error on the pending_result.
+
+* pending_result: The pending result to execute a task within..
+* returns: The state of the pending result after the execution.
+*/
+DUCKDB_API duckdb_pending_state duckdb_pending_execute_task(duckdb_pending_result pending_result);
+
+/*!
+Fully execute a pending query result, returning the final query result.
+
+If duckdb_pending_execute_task has been called until DUCKDB_PENDING_RESULT_READY was returned, this will return fast.
+Otherwise, all remaining tasks must be executed first.
+
+* pending_result: The pending result to execute.
+* out_result: The result object.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_execute_pending(duckdb_pending_result pending_result, duckdb_result *out_result);
+
+/*!
+Returns whether a duckdb_pending_state is finished executing. For example if `pending_state` is
+DUCKDB_PENDING_RESULT_READY, this function will return true.
+
+* pending_state: The pending state on which to decide whether to finish execution.
+* returns: Boolean indicating pending execution should be considered finished.
+*/
+DUCKDB_API bool duckdb_pending_execution_is_finished(duckdb_pending_state pending_state);
+
+//===--------------------------------------------------------------------===//
+// Value Interface
+//===--------------------------------------------------------------------===//
+/*!
+Destroys the value and de-allocates all memory allocated for that type.
+
+* value: The value to destroy.
+*/
+DUCKDB_API void duckdb_destroy_value(duckdb_value *value);
+
+/*!
+Creates a value from a null-terminated string
+
+* value: The null-terminated string
+* returns: The value. This must be destroyed with `duckdb_destroy_value`.
+*/
+DUCKDB_API duckdb_value duckdb_create_varchar(const char *text);
+
+/*!
+Creates a value from a string
+
+* value: The text
+* length: The length of the text
+* returns: The value. This must be destroyed with `duckdb_destroy_value`.
+*/
+DUCKDB_API duckdb_value duckdb_create_varchar_length(const char *text, idx_t length);
+
+/*!
+Creates a value from an int64
+
+* value: The bigint value
+* returns: The value. This must be destroyed with `duckdb_destroy_value`.
+*/
+DUCKDB_API duckdb_value duckdb_create_int64(int64_t val);
+
+/*!
+Obtains a string representation of the given value.
+The result must be destroyed with `duckdb_free`.
+
+* value: The value
+* returns: The string value. This must be destroyed with `duckdb_free`.
+*/
+DUCKDB_API char *duckdb_get_varchar(duckdb_value value);
+
+/*!
+Obtains an int64 of the given value.
+
+* value: The value
+* returns: The int64 value, or 0 if no conversion is possible
+*/
+DUCKDB_API int64_t duckdb_get_int64(duckdb_value value);
+
+//===--------------------------------------------------------------------===//
+// Logical Type Interface
+//===--------------------------------------------------------------------===//
+
+/*!
+Creates a `duckdb_logical_type` from a standard primitive type.
+The resulting type should be destroyed with `duckdb_destroy_logical_type`.
+
+This should not be used with `DUCKDB_TYPE_DECIMAL`.
+
+* type: The primitive type to create.
+* returns: The logical type.
+*/
+DUCKDB_API duckdb_logical_type duckdb_create_logical_type(duckdb_type type);
+
+/*!
+Creates a list type from its child type.
+The resulting type should be destroyed with `duckdb_destroy_logical_type`.
+
+* type: The child type of list type to create.
+* returns: The logical type.
+*/
+DUCKDB_API duckdb_logical_type duckdb_create_list_type(duckdb_logical_type type);
+
+/*!
+Creates a map type from its key type and value type.
+The resulting type should be destroyed with `duckdb_destroy_logical_type`.
+
+* type: The key type and value type of map type to create.
+* returns: The logical type.
+*/
+DUCKDB_API duckdb_logical_type duckdb_create_map_type(duckdb_logical_type key_type, duckdb_logical_type value_type);
+
+/*!
+Creates a UNION type from the passed types array
+The resulting type should be destroyed with `duckdb_destroy_logical_type`.
+
+* types: The array of types that the union should consist of.
+* type_amount: The size of the types array.
+* returns: The logical type.
+*/
+DUCKDB_API duckdb_logical_type duckdb_create_union_type(duckdb_logical_type member_types, const char **member_names,
+ idx_t member_count);
+
+/*!
+Creates a STRUCT type from the passed member name and type arrays.
+The resulting type should be destroyed with `duckdb_destroy_logical_type`.
+
+* member_types: The array of types that the struct should consist of.
+* member_names: The array of names that the struct should consist of.
+* member_count: The number of members that were specified for both arrays.
+* returns: The logical type.
+*/
+DUCKDB_API duckdb_logical_type duckdb_create_struct_type(duckdb_logical_type *member_types, const char **member_names,
+ idx_t member_count);
+
+/*!
+Creates a `duckdb_logical_type` of type decimal with the specified width and scale
+The resulting type should be destroyed with `duckdb_destroy_logical_type`.
+
+* width: The width of the decimal type
+* scale: The scale of the decimal type
+* returns: The logical type.
+*/
+DUCKDB_API duckdb_logical_type duckdb_create_decimal_type(uint8_t width, uint8_t scale);
+
+/*!
+Retrieves the type class of a `duckdb_logical_type`.
+
+* type: The logical type object
+* returns: The type id
+*/
+DUCKDB_API duckdb_type duckdb_get_type_id(duckdb_logical_type type);
+
+/*!
+Retrieves the width of a decimal type.
+
+* type: The logical type object
+* returns: The width of the decimal type
+*/
+DUCKDB_API uint8_t duckdb_decimal_width(duckdb_logical_type type);
+
+/*!
+Retrieves the scale of a decimal type.
+
+* type: The logical type object
+* returns: The scale of the decimal type
+*/
+DUCKDB_API uint8_t duckdb_decimal_scale(duckdb_logical_type type);
+
+/*!
+Retrieves the internal storage type of a decimal type.
+
+* type: The logical type object
+* returns: The internal type of the decimal type
+*/
+DUCKDB_API duckdb_type duckdb_decimal_internal_type(duckdb_logical_type type);
+
+/*!
+Retrieves the internal storage type of an enum type.
+
+* type: The logical type object
+* returns: The internal type of the enum type
+*/
+DUCKDB_API duckdb_type duckdb_enum_internal_type(duckdb_logical_type type);
+
+/*!
+Retrieves the dictionary size of the enum type
+
+* type: The logical type object
+* returns: The dictionary size of the enum type
+*/
+DUCKDB_API uint32_t duckdb_enum_dictionary_size(duckdb_logical_type type);
+
+/*!
+Retrieves the dictionary value at the specified position from the enum.
+
+The result must be freed with `duckdb_free`
+
+* type: The logical type object
+* index: The index in the dictionary
+* returns: The string value of the enum type. Must be freed with `duckdb_free`.
+*/
+DUCKDB_API char *duckdb_enum_dictionary_value(duckdb_logical_type type, idx_t index);
+
+/*!
+Retrieves the child type of the given list type.
+
+The result must be freed with `duckdb_destroy_logical_type`
+
+* type: The logical type object
+* returns: The child type of the list type. Must be destroyed with `duckdb_destroy_logical_type`.
+*/
+DUCKDB_API duckdb_logical_type duckdb_list_type_child_type(duckdb_logical_type type);
+
+/*!
+Retrieves the key type of the given map type.
+
+The result must be freed with `duckdb_destroy_logical_type`
+
+* type: The logical type object
+* returns: The key type of the map type. Must be destroyed with `duckdb_destroy_logical_type`.
+*/
+DUCKDB_API duckdb_logical_type duckdb_map_type_key_type(duckdb_logical_type type);
+
+/*!
+Retrieves the value type of the given map type.
+
+The result must be freed with `duckdb_destroy_logical_type`
+
+* type: The logical type object
+* returns: The value type of the map type. Must be destroyed with `duckdb_destroy_logical_type`.
+*/
+DUCKDB_API duckdb_logical_type duckdb_map_type_value_type(duckdb_logical_type type);
+
+/*!
+Returns the number of children of a struct type.
+
+* type: The logical type object
+* returns: The number of children of a struct type.
+*/
+DUCKDB_API idx_t duckdb_struct_type_child_count(duckdb_logical_type type);
+
+/*!
+Retrieves the name of the struct child.
+
+The result must be freed with `duckdb_free`
+
+* type: The logical type object
+* index: The child index
+* returns: The name of the struct type. Must be freed with `duckdb_free`.
+*/
+DUCKDB_API char *duckdb_struct_type_child_name(duckdb_logical_type type, idx_t index);
+
+/*!
+Retrieves the child type of the given struct type at the specified index.
+
+The result must be freed with `duckdb_destroy_logical_type`
+
+* type: The logical type object
+* index: The child index
+* returns: The child type of the struct type. Must be destroyed with `duckdb_destroy_logical_type`.
+*/
+DUCKDB_API duckdb_logical_type duckdb_struct_type_child_type(duckdb_logical_type type, idx_t index);
+
+/*!
+Returns the number of members that the union type has.
+
+* type: The logical type (union) object
+* returns: The number of members of a union type.
+*/
+DUCKDB_API idx_t duckdb_union_type_member_count(duckdb_logical_type type);
+
+/*!
+Retrieves the name of the union member.
+
+The result must be freed with `duckdb_free`
+
+* type: The logical type object
+* index: The child index
+* returns: The name of the union member. Must be freed with `duckdb_free`.
+*/
+DUCKDB_API char *duckdb_union_type_member_name(duckdb_logical_type type, idx_t index);
+
+/*!
+Retrieves the child type of the given union member at the specified index.
+
+The result must be freed with `duckdb_destroy_logical_type`
+
+* type: The logical type object
+* index: The child index
+* returns: The child type of the union member. Must be destroyed with `duckdb_destroy_logical_type`.
+*/
+DUCKDB_API duckdb_logical_type duckdb_union_type_member_type(duckdb_logical_type type, idx_t index);
+
+/*!
+Destroys the logical type and de-allocates all memory allocated for that type.
+
+* type: The logical type to destroy.
+*/
+DUCKDB_API void duckdb_destroy_logical_type(duckdb_logical_type *type);
+
+//===--------------------------------------------------------------------===//
+// Data Chunk Interface
+//===--------------------------------------------------------------------===//
+/*!
+Creates an empty DataChunk with the specified set of types.
+
+* types: An array of types of the data chunk.
+* column_count: The number of columns.
+* returns: The data chunk.
+*/
+DUCKDB_API duckdb_data_chunk duckdb_create_data_chunk(duckdb_logical_type *types, idx_t column_count);
+
+/*!
+Destroys the data chunk and de-allocates all memory allocated for that chunk.
+
+* chunk: The data chunk to destroy.
+*/
+DUCKDB_API void duckdb_destroy_data_chunk(duckdb_data_chunk *chunk);
+
+/*!
+Resets a data chunk, clearing the validity masks and setting the cardinality of the data chunk to 0.
+
+* chunk: The data chunk to reset.
+*/
+DUCKDB_API void duckdb_data_chunk_reset(duckdb_data_chunk chunk);
+
+/*!
+Retrieves the number of columns in a data chunk.
+
+* chunk: The data chunk to get the data from
+* returns: The number of columns in the data chunk
+*/
+DUCKDB_API idx_t duckdb_data_chunk_get_column_count(duckdb_data_chunk chunk);
+
+/*!
+Retrieves the vector at the specified column index in the data chunk.
+
+The pointer to the vector is valid for as long as the chunk is alive.
+It does NOT need to be destroyed.
+
+* chunk: The data chunk to get the data from
+* returns: The vector
+*/
+DUCKDB_API duckdb_vector duckdb_data_chunk_get_vector(duckdb_data_chunk chunk, idx_t col_idx);
+
+/*!
+Retrieves the current number of tuples in a data chunk.
+
+* chunk: The data chunk to get the data from
+* returns: The number of tuples in the data chunk
+*/
+DUCKDB_API idx_t duckdb_data_chunk_get_size(duckdb_data_chunk chunk);
+
+/*!
+Sets the current number of tuples in a data chunk.
+
+* chunk: The data chunk to set the size in
+* size: The number of tuples in the data chunk
+*/
+DUCKDB_API void duckdb_data_chunk_set_size(duckdb_data_chunk chunk, idx_t size);
+
+//===--------------------------------------------------------------------===//
+// Vector Interface
+//===--------------------------------------------------------------------===//
+/*!
+Retrieves the column type of the specified vector.
+
+The result must be destroyed with `duckdb_destroy_logical_type`.
+
+* vector: The vector get the data from
+* returns: The type of the vector
+*/
+DUCKDB_API duckdb_logical_type duckdb_vector_get_column_type(duckdb_vector vector);
+
+/*!
+Retrieves the data pointer of the vector.
+
+The data pointer can be used to read or write values from the vector.
+How to read or write values depends on the type of the vector.
+
+* vector: The vector to get the data from
+* returns: The data pointer
+*/
+DUCKDB_API void *duckdb_vector_get_data(duckdb_vector vector);
+
+/*!
+Retrieves the validity mask pointer of the specified vector.
+
+If all values are valid, this function MIGHT return NULL!
+
+The validity mask is a bitset that signifies null-ness within the data chunk.
+It is a series of uint64_t values, where each uint64_t value contains validity for 64 tuples.
+The bit is set to 1 if the value is valid (i.e. not NULL) or 0 if the value is invalid (i.e. NULL).
+
+Validity of a specific value can be obtained like this:
+
+idx_t entry_idx = row_idx / 64;
+idx_t idx_in_entry = row_idx % 64;
+bool is_valid = validity_mask[entry_idx] & (1 << idx_in_entry);
+
+Alternatively, the (slower) duckdb_validity_row_is_valid function can be used.
+
+* vector: The vector to get the data from
+* returns: The pointer to the validity mask, or NULL if no validity mask is present
+*/
+DUCKDB_API uint64_t *duckdb_vector_get_validity(duckdb_vector vector);
+
+/*!
+Ensures the validity mask is writable by allocating it.
+
+After this function is called, `duckdb_vector_get_validity` will ALWAYS return non-NULL.
+This allows null values to be written to the vector, regardless of whether a validity mask was present before.
+
+* vector: The vector to alter
+*/
+DUCKDB_API void duckdb_vector_ensure_validity_writable(duckdb_vector vector);
+
+/*!
+Assigns a string element in the vector at the specified location.
+
+* vector: The vector to alter
+* index: The row position in the vector to assign the string to
+* str: The null-terminated string
+*/
+DUCKDB_API void duckdb_vector_assign_string_element(duckdb_vector vector, idx_t index, const char *str);
+
+/*!
+Assigns a string element in the vector at the specified location.
+
+* vector: The vector to alter
+* index: The row position in the vector to assign the string to
+* str: The string
+* str_len: The length of the string (in bytes)
+*/
+DUCKDB_API void duckdb_vector_assign_string_element_len(duckdb_vector vector, idx_t index, const char *str,
+ idx_t str_len);
+
+/*!
+Retrieves the child vector of a list vector.
+
+The resulting vector is valid as long as the parent vector is valid.
+
+* vector: The vector
+* returns: The child vector
+*/
+DUCKDB_API duckdb_vector duckdb_list_vector_get_child(duckdb_vector vector);
+
+/*!
+Returns the size of the child vector of the list
+
+* vector: The vector
+* returns: The size of the child list
+*/
+DUCKDB_API idx_t duckdb_list_vector_get_size(duckdb_vector vector);
+
+/*!
+Sets the total size of the underlying child-vector of a list vector.
+
+* vector: The list vector.
+* size: The size of the child list.
+* returns: The duckdb state. Returns DuckDBError if the vector is nullptr.
+*/
+DUCKDB_API duckdb_state duckdb_list_vector_set_size(duckdb_vector vector, idx_t size);
+
+/*!
+Sets the total capacity of the underlying child-vector of a list.
+
+* vector: The list vector.
+* required_capacity: the total capacity to reserve.
+* return: The duckdb state. Returns DuckDBError if the vector is nullptr.
+*/
+DUCKDB_API duckdb_state duckdb_list_vector_reserve(duckdb_vector vector, idx_t required_capacity);
+
+/*!
+Retrieves the child vector of a struct vector.
+
+The resulting vector is valid as long as the parent vector is valid.
+
+* vector: The vector
+* index: The child index
+* returns: The child vector
+*/
+DUCKDB_API duckdb_vector duckdb_struct_vector_get_child(duckdb_vector vector, idx_t index);
+
+//===--------------------------------------------------------------------===//
+// Validity Mask Functions
+//===--------------------------------------------------------------------===//
+/*!
+Returns whether or not a row is valid (i.e. not NULL) in the given validity mask.
+
+* validity: The validity mask, as obtained through `duckdb_vector_get_validity`
+* row: The row index
+* returns: true if the row is valid, false otherwise
+*/
+DUCKDB_API bool duckdb_validity_row_is_valid(uint64_t *validity, idx_t row);
+
+/*!
+In a validity mask, sets a specific row to either valid or invalid.
+
+Note that `duckdb_vector_ensure_validity_writable` should be called before calling `duckdb_vector_get_validity`,
+to ensure that there is a validity mask to write to.
+
+* validity: The validity mask, as obtained through `duckdb_vector_get_validity`.
+* row: The row index
+* valid: Whether or not to set the row to valid, or invalid
+*/
+DUCKDB_API void duckdb_validity_set_row_validity(uint64_t *validity, idx_t row, bool valid);
+
+/*!
+In a validity mask, sets a specific row to invalid.
+
+Equivalent to `duckdb_validity_set_row_validity` with valid set to false.
+
+* validity: The validity mask
+* row: The row index
+*/
+DUCKDB_API void duckdb_validity_set_row_invalid(uint64_t *validity, idx_t row);
+
+/*!
+In a validity mask, sets a specific row to valid.
+
+Equivalent to `duckdb_validity_set_row_validity` with valid set to true.
+
+* validity: The validity mask
+* row: The row index
+*/
+DUCKDB_API void duckdb_validity_set_row_valid(uint64_t *validity, idx_t row);
+
+//===--------------------------------------------------------------------===//
+// Table Functions
+//===--------------------------------------------------------------------===//
+typedef void *duckdb_table_function;
+typedef void *duckdb_bind_info;
+typedef void *duckdb_init_info;
+typedef void *duckdb_function_info;
+
+typedef void (*duckdb_table_function_bind_t)(duckdb_bind_info info);
+typedef void (*duckdb_table_function_init_t)(duckdb_init_info info);
+typedef void (*duckdb_table_function_t)(duckdb_function_info info, duckdb_data_chunk output);
+typedef void (*duckdb_delete_callback_t)(void *data);
+
+/*!
+Creates a new empty table function.
+
+The return value should be destroyed with `duckdb_destroy_table_function`.
+
+* returns: The table function object.
+*/
+DUCKDB_API duckdb_table_function duckdb_create_table_function();
+
+/*!
+Destroys the given table function object.
+
+* table_function: The table function to destroy
+*/
+DUCKDB_API void duckdb_destroy_table_function(duckdb_table_function *table_function);
+
+/*!
+Sets the name of the given table function.
+
+* table_function: The table function
+* name: The name of the table function
+*/
+DUCKDB_API void duckdb_table_function_set_name(duckdb_table_function table_function, const char *name);
+
+/*!
+Adds a parameter to the table function.
+
+* table_function: The table function
+* type: The type of the parameter to add.
+*/
+DUCKDB_API void duckdb_table_function_add_parameter(duckdb_table_function table_function, duckdb_logical_type type);
+
+/*!
+Adds a named parameter to the table function.
+
+* table_function: The table function
+* name: The name of the parameter
+* type: The type of the parameter to add.
+*/
+DUCKDB_API void duckdb_table_function_add_named_parameter(duckdb_table_function table_function, const char *name,
+ duckdb_logical_type type);
+
+/*!
+Assigns extra information to the table function that can be fetched during binding, etc.
+
+* table_function: The table function
+* extra_info: The extra information
+* destroy: The callback that will be called to destroy the bind data (if any)
+*/
+DUCKDB_API void duckdb_table_function_set_extra_info(duckdb_table_function table_function, void *extra_info,
+ duckdb_delete_callback_t destroy);
+
+/*!
+Sets the bind function of the table function
+
+* table_function: The table function
+* bind: The bind function
+*/
+DUCKDB_API void duckdb_table_function_set_bind(duckdb_table_function table_function, duckdb_table_function_bind_t bind);
+
+/*!
+Sets the init function of the table function
+
+* table_function: The table function
+* init: The init function
+*/
+DUCKDB_API void duckdb_table_function_set_init(duckdb_table_function table_function, duckdb_table_function_init_t init);
+
+/*!
+Sets the thread-local init function of the table function
+
+* table_function: The table function
+* init: The init function
+*/
+DUCKDB_API void duckdb_table_function_set_local_init(duckdb_table_function table_function,
+ duckdb_table_function_init_t init);
+
+/*!
+Sets the main function of the table function
+
+* table_function: The table function
+* function: The function
+*/
+DUCKDB_API void duckdb_table_function_set_function(duckdb_table_function table_function,
+ duckdb_table_function_t function);
+
+/*!
+Sets whether or not the given table function supports projection pushdown.
+
+If this is set to true, the system will provide a list of all required columns in the `init` stage through
+the `duckdb_init_get_column_count` and `duckdb_init_get_column_index` functions.
+If this is set to false (the default), the system will expect all columns to be projected.
+
+* table_function: The table function
+* pushdown: True if the table function supports projection pushdown, false otherwise.
+*/
+DUCKDB_API void duckdb_table_function_supports_projection_pushdown(duckdb_table_function table_function, bool pushdown);
+
+/*!
+Register the table function object within the given connection.
+
+The function requires at least a name, a bind function, an init function and a main function.
+
+If the function is incomplete or a function with this name already exists DuckDBError is returned.
+
+* con: The connection to register it in.
+* function: The function pointer
+* returns: Whether or not the registration was successful.
+*/
+DUCKDB_API duckdb_state duckdb_register_table_function(duckdb_connection con, duckdb_table_function function);
+
+//===--------------------------------------------------------------------===//
+// Table Function Bind
+//===--------------------------------------------------------------------===//
+/*!
+Retrieves the extra info of the function as set in `duckdb_table_function_set_extra_info`
+
+* info: The info object
+* returns: The extra info
+*/
+DUCKDB_API void *duckdb_bind_get_extra_info(duckdb_bind_info info);
+
+/*!
+Adds a result column to the output of the table function.
+
+* info: The info object
+* name: The name of the column
+* type: The logical type of the column
+*/
+DUCKDB_API void duckdb_bind_add_result_column(duckdb_bind_info info, const char *name, duckdb_logical_type type);
+
+/*!
+Retrieves the number of regular (non-named) parameters to the function.
+
+* info: The info object
+* returns: The number of parameters
+*/
+DUCKDB_API idx_t duckdb_bind_get_parameter_count(duckdb_bind_info info);
+
+/*!
+Retrieves the parameter at the given index.
+
+The result must be destroyed with `duckdb_destroy_value`.
+
+* info: The info object
+* index: The index of the parameter to get
+* returns: The value of the parameter. Must be destroyed with `duckdb_destroy_value`.
+*/
+DUCKDB_API duckdb_value duckdb_bind_get_parameter(duckdb_bind_info info, idx_t index);
+
+/*!
+Retrieves a named parameter with the given name.
+
+The result must be destroyed with `duckdb_destroy_value`.
+
+* info: The info object
+* name: The name of the parameter
+* returns: The value of the parameter. Must be destroyed with `duckdb_destroy_value`.
+*/
+DUCKDB_API duckdb_value duckdb_bind_get_named_parameter(duckdb_bind_info info, const char *name);
+
+/*!
+Sets the user-provided bind data in the bind object. This object can be retrieved again during execution.
+
+* info: The info object
+* extra_data: The bind data object.
+* destroy: The callback that will be called to destroy the bind data (if any)
+*/
+DUCKDB_API void duckdb_bind_set_bind_data(duckdb_bind_info info, void *bind_data, duckdb_delete_callback_t destroy);
+
+/*!
+Sets the cardinality estimate for the table function, used for optimization.
+
+* info: The bind data object.
+* is_exact: Whether or not the cardinality estimate is exact, or an approximation
+*/
+DUCKDB_API void duckdb_bind_set_cardinality(duckdb_bind_info info, idx_t cardinality, bool is_exact);
+
+/*!
+Report that an error has occurred while calling bind.
+
+* info: The info object
+* error: The error message
+*/
+DUCKDB_API void duckdb_bind_set_error(duckdb_bind_info info, const char *error);
+
+//===--------------------------------------------------------------------===//
+// Table Function Init
+//===--------------------------------------------------------------------===//
+
+/*!
+Retrieves the extra info of the function as set in `duckdb_table_function_set_extra_info`
+
+* info: The info object
+* returns: The extra info
+*/
+DUCKDB_API void *duckdb_init_get_extra_info(duckdb_init_info info);
+
+/*!
+Gets the bind data set by `duckdb_bind_set_bind_data` during the bind.
+
+Note that the bind data should be considered as read-only.
+For tracking state, use the init data instead.
+
+* info: The info object
+* returns: The bind data object
+*/
+DUCKDB_API void *duckdb_init_get_bind_data(duckdb_init_info info);
+
+/*!
+Sets the user-provided init data in the init object. This object can be retrieved again during execution.
+
+* info: The info object
+* extra_data: The init data object.
+* destroy: The callback that will be called to destroy the init data (if any)
+*/
+DUCKDB_API void duckdb_init_set_init_data(duckdb_init_info info, void *init_data, duckdb_delete_callback_t destroy);
+
+/*!
+Returns the number of projected columns.
+
+This function must be used if projection pushdown is enabled to figure out which columns to emit.
+
+* info: The info object
+* returns: The number of projected columns.
+*/
+DUCKDB_API idx_t duckdb_init_get_column_count(duckdb_init_info info);
+
+/*!
+Returns the column index of the projected column at the specified position.
+
+This function must be used if projection pushdown is enabled to figure out which columns to emit.
+
+* info: The info object
+* column_index: The index at which to get the projected column index, from 0..duckdb_init_get_column_count(info)
+* returns: The column index of the projected column.
+*/
+DUCKDB_API idx_t duckdb_init_get_column_index(duckdb_init_info info, idx_t column_index);
+
+/*!
+Sets how many threads can process this table function in parallel (default: 1)
+
+* info: The info object
+* max_threads: The maximum amount of threads that can process this table function
+*/
+DUCKDB_API void duckdb_init_set_max_threads(duckdb_init_info info, idx_t max_threads);
+
+/*!
+Report that an error has occurred while calling init.
+
+* info: The info object
+* error: The error message
+*/
+DUCKDB_API void duckdb_init_set_error(duckdb_init_info info, const char *error);
+
+//===--------------------------------------------------------------------===//
+// Table Function
+//===--------------------------------------------------------------------===//
+
+/*!
+Retrieves the extra info of the function as set in `duckdb_table_function_set_extra_info`
+
+* info: The info object
+* returns: The extra info
+*/
+DUCKDB_API void *duckdb_function_get_extra_info(duckdb_function_info info);
+/*!
+Gets the bind data set by `duckdb_bind_set_bind_data` during the bind.
+
+Note that the bind data should be considered as read-only.
+For tracking state, use the init data instead.
+
+* info: The info object
+* returns: The bind data object
+*/
+DUCKDB_API void *duckdb_function_get_bind_data(duckdb_function_info info);
+
+/*!
+Gets the init data set by `duckdb_init_set_init_data` during the init.
+
+* info: The info object
+* returns: The init data object
+*/
+DUCKDB_API void *duckdb_function_get_init_data(duckdb_function_info info);
+
+/*!
+Gets the thread-local init data set by `duckdb_init_set_init_data` during the local_init.
+
+* info: The info object
+* returns: The init data object
+*/
+DUCKDB_API void *duckdb_function_get_local_init_data(duckdb_function_info info);
+
+/*!
+Report that an error has occurred while executing the function.
+
+* info: The info object
+* error: The error message
+*/
+DUCKDB_API void duckdb_function_set_error(duckdb_function_info info, const char *error);
+
+//===--------------------------------------------------------------------===//
+// Replacement Scans
+//===--------------------------------------------------------------------===//
+typedef void *duckdb_replacement_scan_info;
+
+typedef void (*duckdb_replacement_callback_t)(duckdb_replacement_scan_info info, const char *table_name, void *data);
+
+/*!
+Add a replacement scan definition to the specified database
+
+* db: The database object to add the replacement scan to
+* replacement: The replacement scan callback
+* extra_data: Extra data that is passed back into the specified callback
+* delete_callback: The delete callback to call on the extra data, if any
+*/
+DUCKDB_API void duckdb_add_replacement_scan(duckdb_database db, duckdb_replacement_callback_t replacement,
+ void *extra_data, duckdb_delete_callback_t delete_callback);
+
+/*!
+Sets the replacement function name to use. If this function is called in the replacement callback,
+ the replacement scan is performed. If it is not called, the replacement callback is not performed.
+
+* info: The info object
+* function_name: The function name to substitute.
+*/
+DUCKDB_API void duckdb_replacement_scan_set_function_name(duckdb_replacement_scan_info info, const char *function_name);
+
+/*!
+Adds a parameter to the replacement scan function.
+
+* info: The info object
+* parameter: The parameter to add.
+*/
+DUCKDB_API void duckdb_replacement_scan_add_parameter(duckdb_replacement_scan_info info, duckdb_value parameter);
+
+/*!
+Report that an error has occurred while executing the replacement scan.
+
+* info: The info object
+* error: The error message
+*/
+DUCKDB_API void duckdb_replacement_scan_set_error(duckdb_replacement_scan_info info, const char *error);
+
+//===--------------------------------------------------------------------===//
+// Appender
+//===--------------------------------------------------------------------===//
+
+// Appenders are the most efficient way of loading data into DuckDB from within the C interface, and are recommended for
+// fast data loading. The appender is much faster than using prepared statements or individual `INSERT INTO` statements.
+
+// Appends are made in row-wise format. For every column, a `duckdb_append_[type]` call should be made, after which
+// the row should be finished by calling `duckdb_appender_end_row`. After all rows have been appended,
+// `duckdb_appender_destroy` should be used to finalize the appender and clean up the resulting memory.
+
+// Note that `duckdb_appender_destroy` should always be called on the resulting appender, even if the function returns
+// `DuckDBError`.
+
+/*!
+Creates an appender object.
+
+* connection: The connection context to create the appender in.
+* schema: The schema of the table to append to, or `nullptr` for the default schema.
+* table: The table name to append to.
+* out_appender: The resulting appender object.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_appender_create(duckdb_connection connection, const char *schema, const char *table,
+ duckdb_appender *out_appender);
+
+/*!
+Returns the error message associated with the given appender.
+If the appender has no error message, this returns `nullptr` instead.
+
+The error message should not be freed. It will be de-allocated when `duckdb_appender_destroy` is called.
+
+* appender: The appender to get the error from.
+* returns: The error message, or `nullptr` if there is none.
+*/
+DUCKDB_API const char *duckdb_appender_error(duckdb_appender appender);
+
+/*!
+Flush the appender to the table, forcing the cache of the appender to be cleared and the data to be appended to the
+base table.
+
+This should generally not be used unless you know what you are doing. Instead, call `duckdb_appender_destroy` when you
+are done with the appender.
+
+* appender: The appender to flush.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_appender_flush(duckdb_appender appender);
+
+/*!
+Close the appender, flushing all intermediate state in the appender to the table and closing it for further appends.
+
+This is generally not necessary. Call `duckdb_appender_destroy` instead.
+
+* appender: The appender to flush and close.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_appender_close(duckdb_appender appender);
+
+/*!
+Close the appender and destroy it. Flushing all intermediate state in the appender to the table, and de-allocating
+all memory associated with the appender.
+
+* appender: The appender to flush, close and destroy.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_appender_destroy(duckdb_appender *appender);
+
+/*!
+A nop function, provided for backwards compatibility reasons. Does nothing. Only `duckdb_appender_end_row` is required.
+*/
+DUCKDB_API duckdb_state duckdb_appender_begin_row(duckdb_appender appender);
+
+/*!
+Finish the current row of appends. After end_row is called, the next row can be appended.
+
+* appender: The appender.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_appender_end_row(duckdb_appender appender);
+
+/*!
+Append a bool value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_bool(duckdb_appender appender, bool value);
+
+/*!
+Append an int8_t value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_int8(duckdb_appender appender, int8_t value);
+/*!
+Append an int16_t value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_int16(duckdb_appender appender, int16_t value);
+/*!
+Append an int32_t value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_int32(duckdb_appender appender, int32_t value);
+/*!
+Append an int64_t value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_int64(duckdb_appender appender, int64_t value);
+/*!
+Append a duckdb_hugeint value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_hugeint(duckdb_appender appender, duckdb_hugeint value);
+
+/*!
+Append a uint8_t value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_uint8(duckdb_appender appender, uint8_t value);
+/*!
+Append a uint16_t value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_uint16(duckdb_appender appender, uint16_t value);
+/*!
+Append a uint32_t value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_uint32(duckdb_appender appender, uint32_t value);
+/*!
+Append a uint64_t value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_uint64(duckdb_appender appender, uint64_t value);
+
+/*!
+Append a float value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_float(duckdb_appender appender, float value);
+/*!
+Append a double value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_double(duckdb_appender appender, double value);
+
+/*!
+Append a duckdb_date value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_date(duckdb_appender appender, duckdb_date value);
+/*!
+Append a duckdb_time value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_time(duckdb_appender appender, duckdb_time value);
+/*!
+Append a duckdb_timestamp value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_timestamp(duckdb_appender appender, duckdb_timestamp value);
+/*!
+Append a duckdb_interval value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_interval(duckdb_appender appender, duckdb_interval value);
+
+/*!
+Append a varchar value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_varchar(duckdb_appender appender, const char *val);
+/*!
+Append a varchar value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_varchar_length(duckdb_appender appender, const char *val, idx_t length);
+/*!
+Append a blob value to the appender.
+*/
+DUCKDB_API duckdb_state duckdb_append_blob(duckdb_appender appender, const void *data, idx_t length);
+/*!
+Append a NULL value to the appender (of any type).
+*/
+DUCKDB_API duckdb_state duckdb_append_null(duckdb_appender appender);
+
+/*!
+Appends a pre-filled data chunk to the specified appender.
+
+The types of the data chunk must exactly match the types of the table, no casting is performed.
+If the types do not match or the appender is in an invalid state, DuckDBError is returned.
+If the append is successful, DuckDBSuccess is returned.
+
+* appender: The appender to append to.
+* chunk: The data chunk to append.
+* returns: The return state.
+*/
+DUCKDB_API duckdb_state duckdb_append_data_chunk(duckdb_appender appender, duckdb_data_chunk chunk);
+
+//===--------------------------------------------------------------------===//
+// Arrow Interface
+//===--------------------------------------------------------------------===//
+/*!
+Executes a SQL query within a connection and stores the full (materialized) result in an arrow structure.
+If the query fails to execute, DuckDBError is returned and the error message can be retrieved by calling
+`duckdb_query_arrow_error`.
+
+Note that after running `duckdb_query_arrow`, `duckdb_destroy_arrow` must be called on the result object even if the
+query fails, otherwise the error stored within the result will not be freed correctly.
+
+* connection: The connection to perform the query in.
+* query: The SQL query to run.
+* out_result: The query result.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_query_arrow(duckdb_connection connection, const char *query, duckdb_arrow *out_result);
+
+/*!
+Fetch the internal arrow schema from the arrow result.
+
+* result: The result to fetch the schema from.
+* out_schema: The output schema.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_query_arrow_schema(duckdb_arrow result, duckdb_arrow_schema *out_schema);
+
+/*!
+Fetch the internal arrow schema from the prepared statement.
+
+* result: The prepared statement to fetch the schema from.
+* out_schema: The output schema.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_prepared_arrow_schema(duckdb_prepared_statement prepared,
+ duckdb_arrow_schema *out_schema);
+
+/*!
+Fetch an internal arrow array from the arrow result.
+
+This function can be called multiple time to get next chunks, which will free the previous out_array.
+So consume the out_array before calling this function again.
+
+* result: The result to fetch the array from.
+* out_array: The output array.
+* returns: `DuckDBSuccess` on success or `DuckDBError` on failure.
+*/
+DUCKDB_API duckdb_state duckdb_query_arrow_array(duckdb_arrow result, duckdb_arrow_array *out_array);
+
+/*!
+Returns the number of columns present in a the arrow result object.
+
+* result: The result object.
+* returns: The number of columns present in the result object.
+*/
+DUCKDB_API idx_t duckdb_arrow_column_count(duckdb_arrow result);
+
+/*!
+Returns the number of rows present in a the arrow result object.
+
+* result: The result object.
+* returns: The number of rows present in the result object.
+*/
+DUCKDB_API idx_t duckdb_arrow_row_count(duckdb_arrow result);
+
+/*!
+Returns the number of rows changed by the query stored in the arrow result. This is relevant only for
+INSERT/UPDATE/DELETE queries. For other queries the rows_changed will be 0.
+
+* result: The result object.
+* returns: The number of rows changed.
+*/
+DUCKDB_API idx_t duckdb_arrow_rows_changed(duckdb_arrow result);
+
+/*!
+Returns the error message contained within the result. The error is only set if `duckdb_query_arrow` returns
+`DuckDBError`.
+
+The error message should not be freed. It will be de-allocated when `duckdb_destroy_arrow` is called.
+
+* result: The result object to fetch the nullmask from.
+* returns: The error of the result.
+*/
+DUCKDB_API const char *duckdb_query_arrow_error(duckdb_arrow result);
+
+/*!
+Closes the result and de-allocates all memory allocated for the arrow result.
+
+* result: The result to destroy.
+*/
+DUCKDB_API void duckdb_destroy_arrow(duckdb_arrow *result);
+
+//===--------------------------------------------------------------------===//
+// Threading Information
+//===--------------------------------------------------------------------===//
+typedef void *duckdb_task_state;
+
+/*!
+Execute DuckDB tasks on this thread.
+
+Will return after `max_tasks` have been executed, or if there are no more tasks present.
+
+* database: The database object to execute tasks for
+* max_tasks: The maximum amount of tasks to execute
+*/
+DUCKDB_API void duckdb_execute_tasks(duckdb_database database, idx_t max_tasks);
+
+/*!
+Creates a task state that can be used with duckdb_execute_tasks_state to execute tasks until
+ duckdb_finish_execution is called on the state.
+
+duckdb_destroy_state should be called on the result in order to free memory.
+
+* database: The database object to create the task state for
+* returns: The task state that can be used with duckdb_execute_tasks_state.
+*/
+DUCKDB_API duckdb_task_state duckdb_create_task_state(duckdb_database database);
+
+/*!
+Execute DuckDB tasks on this thread.
+
+The thread will keep on executing tasks forever, until duckdb_finish_execution is called on the state.
+Multiple threads can share the same duckdb_task_state.
+
+* state: The task state of the executor
+*/
+DUCKDB_API void duckdb_execute_tasks_state(duckdb_task_state state);
+
+/*!
+Execute DuckDB tasks on this thread.
+
+The thread will keep on executing tasks until either duckdb_finish_execution is called on the state,
+max_tasks tasks have been executed or there are no more tasks to be executed.
+
+Multiple threads can share the same duckdb_task_state.
+
+* state: The task state of the executor
+* max_tasks: The maximum amount of tasks to execute
+* returns: The amount of tasks that have actually been executed
+*/
+DUCKDB_API idx_t duckdb_execute_n_tasks_state(duckdb_task_state state, idx_t max_tasks);
+
+/*!
+Finish execution on a specific task.
+
+* state: The task state to finish execution
+*/
+DUCKDB_API void duckdb_finish_execution(duckdb_task_state state);
+
+/*!
+Check if the provided duckdb_task_state has finished execution
+
+* state: The task state to inspect
+* returns: Whether or not duckdb_finish_execution has been called on the task state
+*/
+DUCKDB_API bool duckdb_task_state_is_finished(duckdb_task_state state);
+
+/*!
+Destroys the task state returned from duckdb_create_task_state.
+
+Note that this should not be called while there is an active duckdb_execute_tasks_state running
+on the task state.
+
+* state: The task state to clean up
+*/
+DUCKDB_API void duckdb_destroy_task_state(duckdb_task_state state);
+
+/*!
+Returns true if execution of the current query is finished.
+
+* con: The connection on which to check
+*/
+DUCKDB_API bool duckdb_execution_is_finished(duckdb_connection con);
+
+//===--------------------------------------------------------------------===//
+// Streaming Result Interface
+//===--------------------------------------------------------------------===//
+
+/*!
+Fetches a data chunk from the (streaming) duckdb_result. This function should be called repeatedly until the result is
+exhausted.
+
+The result must be destroyed with `duckdb_destroy_data_chunk`.
+
+This function can only be used on duckdb_results created with 'duckdb_pending_prepared_streaming'
+
+If this function is used, none of the other result functions can be used and vice versa (i.e. this function cannot be
+mixed with the legacy result functions or the materialized result functions).
+
+It is not known beforehand how many chunks will be returned by this result.
+
+* result: The result object to fetch the data chunk from.
+* returns: The resulting data chunk. Returns `NULL` if the result has an error.
+*/
+DUCKDB_API duckdb_data_chunk duckdb_stream_fetch_chunk(duckdb_result result);
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/vendor/github.com/marcboeker/go-duckdb/result.go b/vendor/github.com/marcboeker/go-duckdb/result.go
new file mode 100644
index 0000000..aef314c
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/result.go
@@ -0,0 +1,13 @@
+package duckdb
+
+type result struct {
+ ra int64
+}
+
+func (r result) LastInsertId() (int64, error) {
+ return 0, nil
+}
+
+func (r result) RowsAffected() (int64, error) {
+ return r.ra, nil
+}
diff --git a/vendor/github.com/marcboeker/go-duckdb/rows.go b/vendor/github.com/marcboeker/go-duckdb/rows.go
new file mode 100644
index 0000000..b4dfc32
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/rows.go
@@ -0,0 +1,569 @@
+package duckdb
+
+/*
+#include
+*/
+import "C"
+
+import (
+ "database/sql/driver"
+ "errors"
+ "fmt"
+ "io"
+ "math/big"
+ "reflect"
+ "strings"
+ "time"
+ "unsafe"
+)
+
+type rows struct {
+ res C.duckdb_result
+ stmt *stmt
+ chunk C.duckdb_data_chunk
+ columns []string
+ chunkCount C.idx_t
+ chunkRowCount C.idx_t
+ chunkIdx C.idx_t
+ chunkRowIdx C.idx_t
+}
+
+func newRows(res C.duckdb_result) *rows {
+ return newRowsWithStmt(res, nil)
+}
+
+func newRowsWithStmt(res C.duckdb_result, stmt *stmt) *rows {
+ n := C.duckdb_column_count(&res)
+ columns := make([]string, 0, n)
+ for i := C.idx_t(0); i < n; i++ {
+ columns = append(columns, C.GoString(C.duckdb_column_name(&res, i)))
+ }
+
+ return &rows{
+ res: res,
+ stmt: stmt,
+ columns: columns,
+ chunkCount: C.duckdb_result_chunk_count(res),
+ chunkRowCount: 0,
+ chunkIdx: 0,
+ chunkRowIdx: 0,
+ }
+}
+
+func (r *rows) Columns() []string {
+ return r.columns
+}
+
+func (r *rows) Next(dst []driver.Value) error {
+ for r.chunkRowIdx == r.chunkRowCount {
+ C.duckdb_destroy_data_chunk(&r.chunk)
+ if r.chunkIdx == r.chunkCount {
+ return io.EOF
+ }
+ r.chunk = C.duckdb_result_get_chunk(r.res, r.chunkIdx)
+ r.chunkIdx++
+ r.chunkRowCount = C.duckdb_data_chunk_get_size(r.chunk)
+ r.chunkRowIdx = 0
+ }
+
+ colCount := len(r.columns)
+
+ for colIdx := C.idx_t(0); colIdx < C.idx_t(colCount); colIdx++ {
+ vector := C.duckdb_data_chunk_get_vector(r.chunk, colIdx)
+ value, err := scanValue(vector, r.chunkRowIdx)
+ if err != nil {
+ return err
+ }
+ dst[colIdx] = value
+ }
+
+ r.chunkRowIdx++
+
+ return nil
+}
+
+func scanValue(vector C.duckdb_vector, rowIdx C.idx_t) (any, error) {
+ v, err := scan(vector, rowIdx)
+ if err != nil {
+ return nil, err
+ }
+
+ switch value := v.(type) {
+ case map[string]any, []any, Map, driver.Value:
+ return value, nil
+ case nil:
+ return nil, nil
+ default:
+ panic(fmt.Sprintf("BUG: found unexpected type when scanning: %T", value))
+ }
+}
+
+func scan(vector C.duckdb_vector, rowIdx C.idx_t) (any, error) {
+ validity := C.duckdb_vector_get_validity(vector)
+ if !C.duckdb_validity_row_is_valid(validity, rowIdx) {
+ return nil, nil
+ }
+
+ ty := C.duckdb_vector_get_column_type(vector)
+ defer C.duckdb_destroy_logical_type(&ty)
+
+ typeId := C.duckdb_get_type_id(ty)
+ switch typeId {
+ case C.DUCKDB_TYPE_INVALID:
+ return nil, errInvalidType
+ case C.DUCKDB_TYPE_BOOLEAN:
+ return get[bool](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_TINYINT:
+ return get[int8](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_SMALLINT:
+ return get[int16](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_INTEGER:
+ return get[int32](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_BIGINT:
+ return get[int64](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_UTINYINT:
+ return get[uint8](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_USMALLINT:
+ return get[uint16](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_UINTEGER:
+ return get[uint32](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_UBIGINT:
+ return get[uint64](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_FLOAT:
+ return get[float32](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_DOUBLE:
+ return get[float64](vector, rowIdx), nil
+ case C.DUCKDB_TYPE_TIMESTAMP:
+ return time.UnixMicro(int64(get[C.duckdb_timestamp](vector, rowIdx).micros)).UTC(), nil
+ case C.DUCKDB_TYPE_DATE:
+ date := C.duckdb_from_date(get[C.duckdb_date](vector, rowIdx))
+ return time.Date(int(date.year), time.Month(date.month), int(date.day), 0, 0, 0, 0, time.UTC), nil
+ case C.DUCKDB_TYPE_TIME:
+ return time.UnixMicro(int64(get[C.duckdb_time](vector, rowIdx).micros)).UTC(), nil
+ case C.DUCKDB_TYPE_INTERVAL:
+ return scanInterval(vector, rowIdx)
+ case C.DUCKDB_TYPE_HUGEINT:
+ hi := get[C.duckdb_hugeint](vector, rowIdx)
+ return hugeIntToNative(hi), nil
+ case C.DUCKDB_TYPE_VARCHAR:
+ return scanString(vector, rowIdx), nil
+ case C.DUCKDB_TYPE_ENUM:
+ return scanENUM(ty, vector, rowIdx)
+ case C.DUCKDB_TYPE_BLOB:
+ return scanBlob(vector, rowIdx), nil
+ case C.DUCKDB_TYPE_DECIMAL:
+ return scanDecimal(ty, vector, rowIdx)
+ case C.DUCKDB_TYPE_TIMESTAMP_S:
+ return time.Unix(int64(get[C.duckdb_timestamp](vector, rowIdx).micros), 0).UTC(), nil
+ case C.DUCKDB_TYPE_TIMESTAMP_MS:
+ return time.UnixMilli(int64(get[C.duckdb_timestamp](vector, rowIdx).micros)).UTC(), nil
+ case C.DUCKDB_TYPE_TIMESTAMP_NS:
+ return time.Unix(0, int64(get[C.duckdb_timestamp](vector, rowIdx).micros)).UTC(), nil
+ case C.DUCKDB_TYPE_LIST:
+ return scanList(vector, rowIdx)
+ case C.DUCKDB_TYPE_STRUCT:
+ return scanStruct(ty, vector, rowIdx)
+ case C.DUCKDB_TYPE_MAP:
+ return scanMap(ty, vector, rowIdx)
+ case C.DUCKDB_TYPE_UUID:
+ hi := get[C.duckdb_hugeint](vector, rowIdx)
+ return hugeIntToUUID(hi), nil
+ default:
+ return nil, fmt.Errorf("unsupported type %d", typeId)
+ }
+}
+
+// Implements driver.RowsColumnTypeScanType
+func (r *rows) ColumnTypeScanType(index int) reflect.Type {
+ colType := C.duckdb_column_type(&r.res, C.idx_t(index))
+ switch colType {
+ case C.DUCKDB_TYPE_INVALID:
+ return nil
+ case C.DUCKDB_TYPE_BOOLEAN:
+ return reflect.TypeOf(true)
+ case C.DUCKDB_TYPE_TINYINT:
+ return reflect.TypeOf(int8(0))
+ case C.DUCKDB_TYPE_SMALLINT:
+ return reflect.TypeOf(int16(0))
+ case C.DUCKDB_TYPE_INTEGER:
+ return reflect.TypeOf(int32(0))
+ case C.DUCKDB_TYPE_BIGINT:
+ return reflect.TypeOf(int64(0))
+ case C.DUCKDB_TYPE_UTINYINT:
+ return reflect.TypeOf(uint8(0))
+ case C.DUCKDB_TYPE_USMALLINT:
+ return reflect.TypeOf(uint16(0))
+ case C.DUCKDB_TYPE_UINTEGER:
+ return reflect.TypeOf(uint32(0))
+ case C.DUCKDB_TYPE_UBIGINT:
+ return reflect.TypeOf(uint64(0))
+ case C.DUCKDB_TYPE_FLOAT:
+ return reflect.TypeOf(float32(0))
+ case C.DUCKDB_TYPE_DOUBLE:
+ return reflect.TypeOf(float64(0))
+ case C.DUCKDB_TYPE_TIMESTAMP:
+ return reflect.TypeOf(time.Time{})
+ case C.DUCKDB_TYPE_DATE:
+ return reflect.TypeOf(time.Time{})
+ case C.DUCKDB_TYPE_TIME:
+ return reflect.TypeOf(time.Time{})
+ case C.DUCKDB_TYPE_INTERVAL:
+ return reflect.TypeOf(Interval{})
+ case C.DUCKDB_TYPE_HUGEINT:
+ return reflect.TypeOf(big.NewInt(0))
+ case C.DUCKDB_TYPE_VARCHAR:
+ return reflect.TypeOf("")
+ case C.DUCKDB_TYPE_ENUM:
+ return reflect.TypeOf("")
+ case C.DUCKDB_TYPE_BLOB:
+ return reflect.TypeOf([]byte{})
+ case C.DUCKDB_TYPE_DECIMAL:
+ return reflect.TypeOf(Decimal{})
+ case C.DUCKDB_TYPE_TIMESTAMP_S:
+ return reflect.TypeOf(time.Time{})
+ case C.DUCKDB_TYPE_TIMESTAMP_MS:
+ return reflect.TypeOf(time.Time{})
+ case C.DUCKDB_TYPE_TIMESTAMP_NS:
+ return reflect.TypeOf(time.Time{})
+ case C.DUCKDB_TYPE_LIST:
+ return reflect.TypeOf([]any{})
+ case C.DUCKDB_TYPE_STRUCT:
+ return reflect.TypeOf(map[string]any{})
+ case C.DUCKDB_TYPE_MAP:
+ return reflect.TypeOf(Map{})
+ case C.DUCKDB_TYPE_UUID:
+ return reflect.TypeOf([]byte{})
+ default:
+ return nil
+ }
+}
+
+// Implements driver.RowsColumnTypeScanType
+func (r *rows) ColumnTypeDatabaseTypeName(index int) string {
+ // Only allocate logical type if necessary
+ colType := C.duckdb_column_type(&r.res, C.idx_t(index))
+ switch colType {
+ case C.DUCKDB_TYPE_DECIMAL:
+ fallthrough
+ case C.DUCKDB_TYPE_ENUM:
+ fallthrough
+ case C.DUCKDB_TYPE_LIST:
+ fallthrough
+ case C.DUCKDB_TYPE_STRUCT:
+ fallthrough
+ case C.DUCKDB_TYPE_MAP:
+ logColType := C.duckdb_column_logical_type(&r.res, C.idx_t(index))
+ defer C.duckdb_destroy_logical_type(&logColType)
+ return logicalTypeName(logColType)
+ default:
+ // Handle as primitive type
+ return typeName(colType)
+ }
+}
+
+func (r *rows) Close() error {
+ C.duckdb_destroy_data_chunk(&r.chunk)
+ C.duckdb_destroy_result(&r.res)
+
+ var err error
+ if r.stmt != nil {
+ r.stmt.rows = false
+ if r.stmt.closeOnRowsClose {
+ err = r.stmt.Close()
+ }
+ r.stmt = nil
+ }
+
+ return err
+}
+
+func get[T any](vector C.duckdb_vector, rowIdx C.idx_t) T {
+ ptr := C.duckdb_vector_get_data(vector)
+ xs := (*[1 << 31]T)(ptr)
+ return xs[rowIdx]
+}
+
+func scanMap(ty C.duckdb_logical_type, vector C.duckdb_vector, rowIdx C.idx_t) (Map, error) {
+ list, err := scanList(vector, rowIdx)
+ if err != nil {
+ return nil, err
+ }
+
+ // DuckDB supports more map key types than Go, which only supports comparable types.
+ // To avoid a panic, we check that the map key type is comparable.
+ // All keys in a DuckDB map have the same type, so we just do this check for the first value.
+ if len(list) > 0 {
+ mapItem := list[0].(map[string]any)
+ key, ok := mapItem["key"]
+ if !ok {
+ return nil, errMissingKeyOrValue
+ }
+ if !reflect.TypeOf(key).Comparable() {
+ return nil, errUnsupportedMapKeyType
+ }
+ }
+
+ out := Map{}
+ for i := 0; i < len(list); i++ {
+ mapItem := list[i].(map[string]any)
+ key, ok := mapItem["key"]
+ if !ok {
+ return nil, errMissingKeyOrValue
+ }
+ val, ok := mapItem["value"]
+ if !ok {
+ return nil, errMissingKeyOrValue
+ }
+ out[key] = val
+ }
+
+ return out, nil
+}
+
+func scanString(vector C.duckdb_vector, rowIdx C.idx_t) string {
+ return string(scanBlob(vector, rowIdx))
+}
+
+// duckdb/tools/juliapkg/src/ctypes.jl
+// `json`, `varchar`, and `blob` have the same repr
+func scanBlob(vector C.duckdb_vector, rowIdx C.idx_t) []byte {
+ s := get[duckdb_string_t](vector, rowIdx)
+ if s.length <= stringInlineLength {
+ // inline data is stored from byte 4..16 (up to 12 bytes)
+ return C.GoBytes(unsafe.Pointer(&s.prefix), C.int(s.length))
+ } else {
+ // any longer strings are stored as a pointer in `ptr`
+ return C.GoBytes(unsafe.Pointer(s.ptr), C.int(s.length))
+ }
+}
+
+func scanList(vector C.duckdb_vector, rowIdx C.idx_t) ([]any, error) {
+ data := C.duckdb_list_vector_get_child(vector)
+ entry := get[duckdb_list_entry_t](vector, rowIdx)
+ converted := make([]any, 0, entry.length)
+
+ for i := entry.offset; i < entry.offset+entry.length; i++ {
+ value, err := scan(data, i)
+ if err != nil {
+ return nil, err
+ }
+ converted = append(converted, value)
+ }
+
+ return converted, nil
+}
+
+func scanStruct(ty C.duckdb_logical_type, vector C.duckdb_vector, rowIdx C.idx_t) (map[string]any, error) {
+ data := map[string]any{}
+
+ for j := C.idx_t(0); j < C.duckdb_struct_type_child_count(ty); j++ {
+
+ ptrToChildName := C.duckdb_struct_type_child_name(ty, j)
+ name := C.GoString(ptrToChildName)
+ C.duckdb_free(unsafe.Pointer(ptrToChildName))
+
+ child := C.duckdb_struct_vector_get_child(vector, j)
+ value, err := scan(child, rowIdx)
+ if err != nil {
+ return nil, err
+ }
+ data[name] = value
+ }
+ return data, nil
+}
+
+func scanDecimal(ty C.duckdb_logical_type, vector C.duckdb_vector, rowIdx C.idx_t) (Decimal, error) {
+ scale := C.duckdb_decimal_scale(ty)
+ width := C.duckdb_decimal_width(ty)
+ var nativeValue *big.Int
+ switch C.duckdb_decimal_internal_type(ty) {
+ case C.DUCKDB_TYPE_SMALLINT:
+ nativeValue = big.NewInt(int64(get[int16](vector, rowIdx)))
+ case C.DUCKDB_TYPE_INTEGER:
+ nativeValue = big.NewInt(int64(get[int32](vector, rowIdx)))
+ case C.DUCKDB_TYPE_BIGINT:
+ nativeValue = big.NewInt(int64(get[int64](vector, rowIdx)))
+ case C.DUCKDB_TYPE_HUGEINT:
+ i := get[C.duckdb_hugeint](vector, rowIdx)
+ nativeValue = hugeIntToNative(C.duckdb_hugeint{
+ lower: i.lower,
+ upper: i.upper,
+ })
+ default:
+ return Decimal{}, errInvalidType
+ }
+
+ if nativeValue == nil {
+ return Decimal{}, fmt.Errorf("unable to convert hugeint to native type")
+ }
+
+ return Decimal{Width: uint8(width), Scale: uint8(scale), Value: nativeValue}, nil
+}
+
+func scanInterval(vector C.duckdb_vector, rowIdx C.idx_t) (Interval, error) {
+ i := get[C.duckdb_interval](vector, rowIdx)
+ data := Interval{
+ Days: int32(i.days),
+ Months: int32(i.months),
+ Micros: int64(i.micros),
+ }
+ return data, nil
+}
+
+func scanENUM(ty C.duckdb_logical_type, vector C.duckdb_vector, rowIdx C.idx_t) (string, error) {
+ var idx uint64
+ internalType := C.duckdb_enum_internal_type(ty)
+ switch internalType {
+ case C.DUCKDB_TYPE_UTINYINT:
+ idx = uint64(get[uint8](vector, rowIdx))
+ case C.DUCKDB_TYPE_USMALLINT:
+ idx = uint64(get[uint16](vector, rowIdx))
+ case C.DUCKDB_TYPE_UINTEGER:
+ idx = uint64(get[uint32](vector, rowIdx))
+ case C.DUCKDB_TYPE_UBIGINT:
+ idx = get[uint64](vector, rowIdx)
+ default:
+ return "", errInvalidType
+ }
+
+ val := C.duckdb_enum_dictionary_value(ty, (C.idx_t)(idx))
+ defer C.duckdb_free(unsafe.Pointer(val))
+ return C.GoString(val), nil
+}
+
+var (
+ errInvalidType = errors.New("invalid data type")
+ errMissingKeyOrValue = errors.New("missing key and/or value for map item")
+ errUnsupportedMapKeyType = errors.New("map key type not supported by driver")
+)
+
+func typeName(t C.duckdb_type) string {
+ // Corresponds to enum order of duckdb_type
+ switch t {
+ case C.DUCKDB_TYPE_INVALID:
+ return "INVALID"
+ case C.DUCKDB_TYPE_BOOLEAN:
+ return "BOOLEAN"
+ case C.DUCKDB_TYPE_TINYINT:
+ return "TINYINT"
+ case C.DUCKDB_TYPE_SMALLINT:
+ return "SMALLINT"
+ case C.DUCKDB_TYPE_INTEGER:
+ return "INTEGER"
+ case C.DUCKDB_TYPE_BIGINT:
+ return "BIGINT"
+ case C.DUCKDB_TYPE_UTINYINT:
+ return "UTINYINT"
+ case C.DUCKDB_TYPE_USMALLINT:
+ return "USMALLINT"
+ case C.DUCKDB_TYPE_UINTEGER:
+ return "UINTEGER"
+ case C.DUCKDB_TYPE_UBIGINT:
+ return "UBIGINT"
+ case C.DUCKDB_TYPE_FLOAT:
+ return "FLOAT"
+ case C.DUCKDB_TYPE_DOUBLE:
+ return "DOUBLE"
+ case C.DUCKDB_TYPE_TIMESTAMP:
+ return "TIMESTAMP"
+ case C.DUCKDB_TYPE_DATE:
+ return "DATE"
+ case C.DUCKDB_TYPE_TIME:
+ return "TIME"
+ case C.DUCKDB_TYPE_INTERVAL:
+ return "INTERVAL"
+ case C.DUCKDB_TYPE_HUGEINT:
+ return "HUGEINT"
+ case C.DUCKDB_TYPE_VARCHAR:
+ return "VARCHAR"
+ case C.DUCKDB_TYPE_BLOB:
+ return "BLOB"
+ case C.DUCKDB_TYPE_DECIMAL:
+ // NOTE: should be handled as logical type
+ return "DECIMAL"
+ case C.DUCKDB_TYPE_TIMESTAMP_S:
+ return "TIMESTAMP_S"
+ case C.DUCKDB_TYPE_TIMESTAMP_MS:
+ return "TIMESTAMP_MS"
+ case C.DUCKDB_TYPE_TIMESTAMP_NS:
+ return "TIMESTAMP_NS"
+ case C.DUCKDB_TYPE_ENUM:
+ // NOTE: should be handled as logical type
+ return "ENUM"
+ case C.DUCKDB_TYPE_LIST:
+ // NOTE: should be handled as logical type
+ return "LIST"
+ case C.DUCKDB_TYPE_STRUCT:
+ // NOTE: should be handled as logical type
+ return "STRUCT"
+ case C.DUCKDB_TYPE_MAP:
+ // NOTE: should be handled as logical type
+ return "MAP"
+ case C.DUCKDB_TYPE_UUID:
+ return "UUID"
+ default:
+ // Should never happen
+ return ""
+ }
+}
+
+func logicalTypeName(lt C.duckdb_logical_type) string {
+ t := C.duckdb_get_type_id(lt)
+ switch t {
+ case C.DUCKDB_TYPE_DECIMAL:
+ width := C.duckdb_decimal_width(lt)
+ scale := C.duckdb_decimal_scale(lt)
+ return fmt.Sprintf("DECIMAL(%d,%d)", width, scale)
+ case C.DUCKDB_TYPE_ENUM:
+ // C API does not currently expose enum name
+ return "ENUM"
+ case C.DUCKDB_TYPE_LIST:
+ clt := C.duckdb_list_type_child_type(lt)
+ defer C.duckdb_destroy_logical_type(&clt)
+ return logicalTypeName(clt) + "[]"
+ case C.DUCKDB_TYPE_STRUCT:
+ return logicalTypeNameStruct(lt)
+ case C.DUCKDB_TYPE_MAP:
+ return logicalTypeNameMap(lt)
+ default:
+ return typeName(t)
+ }
+}
+
+func logicalTypeNameStruct(lt C.duckdb_logical_type) string {
+ count := int(C.duckdb_struct_type_child_count(lt))
+ name := "STRUCT("
+ for i := 0; i < count; i++ {
+
+ ptrToChildName := C.duckdb_struct_type_child_name(lt, C.idx_t(i))
+ childName := C.GoString(ptrToChildName)
+ childLogicalType := C.duckdb_struct_type_child_type(lt, C.idx_t(i))
+
+ // Add comma if not at end of list
+ name += escapeStructFieldName(childName) + " " + logicalTypeName(childLogicalType)
+ if i != count-1 {
+ name += ", "
+ }
+
+ C.duckdb_free(unsafe.Pointer(ptrToChildName))
+ C.duckdb_destroy_logical_type(&childLogicalType)
+ }
+ return name + ")"
+}
+
+func logicalTypeNameMap(lt C.duckdb_logical_type) string {
+ // Key logical type
+ klt := C.duckdb_map_type_key_type(lt)
+ defer C.duckdb_destroy_logical_type(&klt)
+
+ // Value logical type
+ vlt := C.duckdb_map_type_value_type(lt)
+ defer C.duckdb_destroy_logical_type(&vlt)
+
+ return fmt.Sprintf("MAP(%s, %s)", logicalTypeName(klt), logicalTypeName(vlt))
+}
+
+// DuckDB escapes struct field names by doubling double quotes, then wrapping in double quotes.
+func escapeStructFieldName(s string) string {
+ return `"` + strings.ReplaceAll(s, `"`, `""`) + `"`
+}
diff --git a/vendor/github.com/marcboeker/go-duckdb/statement.go b/vendor/github.com/marcboeker/go-duckdb/statement.go
new file mode 100644
index 0000000..ad777ec
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/statement.go
@@ -0,0 +1,244 @@
+package duckdb
+
+/*
+#include
+*/
+import "C"
+
+import (
+ "context"
+ "database/sql/driver"
+ "errors"
+ "fmt"
+ "math/big"
+ "time"
+ "unsafe"
+)
+
+type stmt struct {
+ c *conn
+ stmt *C.duckdb_prepared_statement
+ closeOnRowsClose bool
+ closed bool
+ rows bool
+}
+
+func (s *stmt) Close() error {
+ if s.rows {
+ panic("database/sql/driver: misuse of duckdb driver: Close with active Rows")
+ }
+ if s.closed {
+ panic("database/sql/driver: misuse of duckdb driver: double Close of Stmt")
+ }
+
+ s.closed = true
+ C.duckdb_destroy_prepare(s.stmt)
+ return nil
+}
+
+func (s *stmt) NumInput() int {
+ if s.closed {
+ panic("database/sql/driver: misuse of duckdb driver: NumInput after Close")
+ }
+ paramCount := C.duckdb_nparams(*s.stmt)
+ return int(paramCount)
+}
+
+func (s *stmt) start(args []driver.NamedValue) error {
+ if s.NumInput() != len(args) {
+ return fmt.Errorf("incorrect argument count for command: have %d want %d", len(args), s.NumInput())
+ }
+
+ for i, v := range args {
+ switch v := v.Value.(type) {
+ case bool:
+ if rv := C.duckdb_bind_boolean(*s.stmt, C.idx_t(i+1), C.bool(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case int8:
+ if rv := C.duckdb_bind_int8(*s.stmt, C.idx_t(i+1), C.int8_t(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case int16:
+ if rv := C.duckdb_bind_int16(*s.stmt, C.idx_t(i+1), C.int16_t(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case int32:
+ if rv := C.duckdb_bind_int32(*s.stmt, C.idx_t(i+1), C.int32_t(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case int64:
+ if rv := C.duckdb_bind_int64(*s.stmt, C.idx_t(i+1), C.int64_t(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case int:
+ if rv := C.duckdb_bind_int64(*s.stmt, C.idx_t(i+1), C.int64_t(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case *big.Int:
+ val, err := hugeIntFromNative(v)
+ if err != nil {
+ return err
+ }
+ if rv := C.duckdb_bind_hugeint(*s.stmt, C.idx_t(i+1), val); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case uint8:
+ if rv := C.duckdb_bind_uint8(*s.stmt, C.idx_t(i+1), C.uchar(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case uint16:
+ if rv := C.duckdb_bind_uint16(*s.stmt, C.idx_t(i+1), C.uint16_t(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case uint32:
+ if rv := C.duckdb_bind_uint32(*s.stmt, C.idx_t(i+1), C.uint32_t(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case uint64:
+ if rv := C.duckdb_bind_uint64(*s.stmt, C.idx_t(i+1), C.uint64_t(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case float32:
+ if rv := C.duckdb_bind_float(*s.stmt, C.idx_t(i+1), C.float(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case float64:
+ if rv := C.duckdb_bind_double(*s.stmt, C.idx_t(i+1), C.double(v)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case string:
+ val := C.CString(v)
+ if rv := C.duckdb_bind_varchar(*s.stmt, C.idx_t(i+1), val); rv == C.DuckDBError {
+ C.free(unsafe.Pointer(val))
+ return errCouldNotBind
+ }
+ C.free(unsafe.Pointer(val))
+ case []byte:
+ val := C.CBytes(v)
+ l := len(v)
+ if rv := C.duckdb_bind_blob(*s.stmt, C.idx_t(i+1), val, C.uint64_t(l)); rv == C.DuckDBError {
+ C.free(unsafe.Pointer(val))
+ return errCouldNotBind
+ }
+ C.free(unsafe.Pointer(val))
+ case time.Time:
+ val := C.duckdb_timestamp{
+ micros: C.int64_t(v.UTC().UnixMicro()),
+ }
+ if rv := C.duckdb_bind_timestamp(*s.stmt, C.idx_t(i+1), val); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case Interval:
+ val := C.duckdb_interval{
+ months: C.int32_t(v.Months),
+ days: C.int32_t(v.Days),
+ micros: C.int64_t(v.Micros),
+ }
+ if rv := C.duckdb_bind_interval(*s.stmt, C.idx_t(i+1), val); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ case nil:
+ if rv := C.duckdb_bind_null(*s.stmt, C.idx_t(i+1)); rv == C.DuckDBError {
+ return errCouldNotBind
+ }
+ default:
+ return driver.ErrSkip
+ }
+ }
+
+ return nil
+}
+
+// Deprecated: Use ExecContext instead.
+func (s *stmt) Exec(args []driver.Value) (driver.Result, error) {
+ return s.ExecContext(context.Background(), argsToNamedArgs(args))
+}
+
+func (s *stmt) ExecContext(ctx context.Context, nargs []driver.NamedValue) (driver.Result, error) {
+ res, err := s.execute(ctx, nargs)
+ if err != nil {
+ return nil, err
+ }
+ defer C.duckdb_destroy_result(res)
+
+ ra := int64(C.duckdb_value_int64(res, 0, 0))
+ return &result{ra}, nil
+}
+
+// Deprecated: Use QueryContext instead.
+func (s *stmt) Query(args []driver.Value) (driver.Rows, error) {
+ return s.QueryContext(context.Background(), argsToNamedArgs(args))
+}
+
+func (s *stmt) QueryContext(ctx context.Context, nargs []driver.NamedValue) (driver.Rows, error) {
+ res, err := s.execute(ctx, nargs)
+ if err != nil {
+ return nil, err
+ }
+ s.rows = true
+ return newRowsWithStmt(*res, s), nil
+}
+
+// This method executes the query in steps and checks if context is cancelled before executing each step.
+// It uses Pending Result Interface C APIs to achieve this. Reference - https://duckdb.org/docs/api/c/api#pending-result-interface
+func (s *stmt) execute(ctx context.Context, args []driver.NamedValue) (*C.duckdb_result, error) {
+ if s.closed {
+ panic("database/sql/driver: misuse of duckdb driver: ExecContext or QueryContext after Close")
+ }
+ if s.rows {
+ panic("database/sql/driver: misuse of duckdb driver: ExecContext or QueryContext with active Rows")
+ }
+
+ err := s.start(args)
+ if err != nil {
+ return nil, err
+ }
+
+ var pendingRes C.duckdb_pending_result
+ if state := C.duckdb_pending_prepared(*s.stmt, &pendingRes); state == C.DuckDBError {
+ dbErr := C.GoString(C.duckdb_pending_error(pendingRes))
+ C.duckdb_destroy_pending(&pendingRes)
+ return nil, errors.New(dbErr)
+ }
+ defer C.duckdb_destroy_pending(&pendingRes)
+
+ for {
+ select {
+ // if context is cancelled or deadline exceeded, don't execute further
+ case <-ctx.Done():
+ return nil, ctx.Err()
+ default:
+ // continue
+ }
+ state := C.duckdb_pending_execute_task(pendingRes)
+ if state == C.DUCKDB_PENDING_ERROR {
+ dbErr := C.GoString(C.duckdb_pending_error(pendingRes))
+ return nil, errors.New(dbErr)
+ }
+ if C.duckdb_pending_execution_is_finished(state) {
+ break
+ }
+ }
+
+ var res C.duckdb_result
+ if state := C.duckdb_execute_pending(pendingRes, &res); state == C.DuckDBError {
+ dbErr := C.GoString(C.duckdb_result_error(&res))
+ C.duckdb_destroy_result(&res)
+ return nil, errors.New(dbErr)
+ }
+ return &res, nil
+}
+
+func argsToNamedArgs(values []driver.Value) []driver.NamedValue {
+ args := make([]driver.NamedValue, len(values))
+ for n, param := range values {
+ args[n].Value = param
+ args[n].Ordinal = n + 1
+ }
+ return args
+}
+
+var (
+ errCouldNotBind = errors.New("could not bind parameter")
+)
diff --git a/vendor/github.com/marcboeker/go-duckdb/transaction.go b/vendor/github.com/marcboeker/go-duckdb/transaction.go
new file mode 100644
index 0000000..2262f8c
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/transaction.go
@@ -0,0 +1,31 @@
+package duckdb
+
+import "context"
+
+type tx struct {
+ c *conn
+}
+
+func (t *tx) Commit() error {
+ if t.c == nil || !t.c.tx {
+ panic("database/sql/driver: misuse of duckdb driver: extra Commit")
+ }
+
+ t.c.tx = false
+ _, err := t.c.ExecContext(context.Background(), "COMMIT TRANSACTION", nil)
+ t.c = nil
+
+ return err
+}
+
+func (t *tx) Rollback() error {
+ if t.c == nil || !t.c.tx {
+ panic("database/sql/driver: misuse of duckdb driver: extra Rollback")
+ }
+
+ t.c.tx = false
+ _, err := t.c.ExecContext(context.Background(), "ROLLBACK", nil)
+ t.c = nil
+
+ return err
+}
diff --git a/vendor/github.com/marcboeker/go-duckdb/types.go b/vendor/github.com/marcboeker/go-duckdb/types.go
new file mode 100644
index 0000000..b6fc79f
--- /dev/null
+++ b/vendor/github.com/marcboeker/go-duckdb/types.go
@@ -0,0 +1,96 @@
+package duckdb
+
+/*
+#include
+*/
+import "C"
+
+import (
+ "encoding/binary"
+ "fmt"
+ "math/big"
+
+ "github.com/mitchellh/mapstructure"
+)
+
+// duckdb_hugeint is composed of (lower, upper) components.
+// The value is computed as: upper * 2^64 + lower
+
+func hugeIntToUUID(hi C.duckdb_hugeint) []byte {
+ var uuid [16]byte
+ // We need to flip the sign bit of the signed hugeint to transform it to UUID bytes
+ binary.BigEndian.PutUint64(uuid[:8], uint64(hi.upper)^1<<63)
+ binary.BigEndian.PutUint64(uuid[8:], uint64(hi.lower))
+ return uuid[:]
+}
+
+func hugeIntToNative(hi C.duckdb_hugeint) *big.Int {
+ i := big.NewInt(int64(hi.upper))
+ i.Lsh(i, 64)
+ i.Add(i, new(big.Int).SetUint64(uint64(hi.lower)))
+ return i
+}
+
+func hugeIntFromNative(i *big.Int) (C.duckdb_hugeint, error) {
+ d := big.NewInt(1)
+ d.Lsh(d, 64)
+
+ q := new(big.Int)
+ r := new(big.Int)
+ q.DivMod(i, d, r)
+
+ if !q.IsInt64() {
+ return C.duckdb_hugeint{}, fmt.Errorf("big.Int(%s) is too big for HUGEINT", i.String())
+ }
+
+ return C.duckdb_hugeint{
+ lower: C.uint64_t(r.Uint64()),
+ upper: C.int64_t(q.Int64()),
+ }, nil
+}
+
+type Map map[any]any
+
+func (m *Map) Scan(v any) error {
+ data, ok := v.(Map)
+ if !ok {
+ return fmt.Errorf("invalid type `%T` for scanning `Map`, expected `Map`", data)
+ }
+
+ *m = data
+ return nil
+}
+
+type Interval struct {
+ Days int32 `json:"days"`
+ Months int32 `json:"months"`
+ Micros int64 `json:"micros"`
+}
+
+// Use as the `Scanner` type for any composite types (maps, lists, structs)
+type Composite[T any] struct {
+ t T
+}
+
+func (s Composite[T]) Get() T {
+ return s.t
+}
+
+func (s *Composite[T]) Scan(v any) error {
+ return mapstructure.Decode(v, &s.t)
+}
+
+type Decimal struct {
+ Width uint8
+ Scale uint8
+ Value *big.Int
+}
+
+func (d *Decimal) Float64() float64 {
+ scale := big.NewInt(int64(d.Scale))
+ factor := new(big.Float).SetInt(new(big.Int).Exp(big.NewInt(10), scale, nil))
+ value := new(big.Float).SetInt(d.Value)
+ value.Quo(value, factor)
+ f, _ := value.Float64()
+ return f
+}
diff --git a/vendor/github.com/mattn/go-colorable/LICENSE b/vendor/github.com/mattn/go-colorable/LICENSE
new file mode 100644
index 0000000..91b5cef
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Yasuhiro Matsumoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mattn/go-colorable/README.md b/vendor/github.com/mattn/go-colorable/README.md
new file mode 100644
index 0000000..ca04837
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/README.md
@@ -0,0 +1,48 @@
+# go-colorable
+
+[![Build Status](https://github.com/mattn/go-colorable/workflows/test/badge.svg)](https://github.com/mattn/go-colorable/actions?query=workflow%3Atest)
+[![Codecov](https://codecov.io/gh/mattn/go-colorable/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-colorable)
+[![GoDoc](https://godoc.org/github.com/mattn/go-colorable?status.svg)](http://godoc.org/github.com/mattn/go-colorable)
+[![Go Report Card](https://goreportcard.com/badge/mattn/go-colorable)](https://goreportcard.com/report/mattn/go-colorable)
+
+Colorable writer for windows.
+
+For example, most of logger packages doesn't show colors on windows. (I know we can do it with ansicon. But I don't want.)
+This package is possible to handle escape sequence for ansi color on windows.
+
+## Too Bad!
+
+![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/bad.png)
+
+
+## So Good!
+
+![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/good.png)
+
+## Usage
+
+```go
+logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true})
+logrus.SetOutput(colorable.NewColorableStdout())
+
+logrus.Info("succeeded")
+logrus.Warn("not correct")
+logrus.Error("something error")
+logrus.Fatal("panic")
+```
+
+You can compile above code on non-windows OSs.
+
+## Installation
+
+```
+$ go get github.com/mattn/go-colorable
+```
+
+# License
+
+MIT
+
+# Author
+
+Yasuhiro Matsumoto (a.k.a mattn)
diff --git a/vendor/github.com/mattn/go-colorable/colorable_appengine.go b/vendor/github.com/mattn/go-colorable/colorable_appengine.go
new file mode 100644
index 0000000..416d1bb
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_appengine.go
@@ -0,0 +1,38 @@
+//go:build appengine
+// +build appengine
+
+package colorable
+
+import (
+ "io"
+ "os"
+
+ _ "github.com/mattn/go-isatty"
+)
+
+// NewColorable returns new instance of Writer which handles escape sequence.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ return file
+}
+
+// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return os.Stdout
+}
+
+// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return os.Stderr
+}
+
+// EnableColorsStdout enable colors if possible.
+func EnableColorsStdout(enabled *bool) func() {
+ if enabled != nil {
+ *enabled = true
+ }
+ return func() {}
+}
diff --git a/vendor/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/mattn/go-colorable/colorable_others.go
new file mode 100644
index 0000000..766d946
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_others.go
@@ -0,0 +1,38 @@
+//go:build !windows && !appengine
+// +build !windows,!appengine
+
+package colorable
+
+import (
+ "io"
+ "os"
+
+ _ "github.com/mattn/go-isatty"
+)
+
+// NewColorable returns new instance of Writer which handles escape sequence.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ return file
+}
+
+// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return os.Stdout
+}
+
+// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return os.Stderr
+}
+
+// EnableColorsStdout enable colors if possible.
+func EnableColorsStdout(enabled *bool) func() {
+ if enabled != nil {
+ *enabled = true
+ }
+ return func() {}
+}
diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go
new file mode 100644
index 0000000..1846ad5
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go
@@ -0,0 +1,1047 @@
+//go:build windows && !appengine
+// +build windows,!appengine
+
+package colorable
+
+import (
+ "bytes"
+ "io"
+ "math"
+ "os"
+ "strconv"
+ "strings"
+ "sync"
+ "syscall"
+ "unsafe"
+
+ "github.com/mattn/go-isatty"
+)
+
+const (
+ foregroundBlue = 0x1
+ foregroundGreen = 0x2
+ foregroundRed = 0x4
+ foregroundIntensity = 0x8
+ foregroundMask = (foregroundRed | foregroundBlue | foregroundGreen | foregroundIntensity)
+ backgroundBlue = 0x10
+ backgroundGreen = 0x20
+ backgroundRed = 0x40
+ backgroundIntensity = 0x80
+ backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity)
+ commonLvbUnderscore = 0x8000
+
+ cENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
+)
+
+const (
+ genericRead = 0x80000000
+ genericWrite = 0x40000000
+)
+
+const (
+ consoleTextmodeBuffer = 0x1
+)
+
+type wchar uint16
+type short int16
+type dword uint32
+type word uint16
+
+type coord struct {
+ x short
+ y short
+}
+
+type smallRect struct {
+ left short
+ top short
+ right short
+ bottom short
+}
+
+type consoleScreenBufferInfo struct {
+ size coord
+ cursorPosition coord
+ attributes word
+ window smallRect
+ maximumWindowSize coord
+}
+
+type consoleCursorInfo struct {
+ size dword
+ visible int32
+}
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32.dll")
+ procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
+ procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
+ procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition")
+ procFillConsoleOutputCharacter = kernel32.NewProc("FillConsoleOutputCharacterW")
+ procFillConsoleOutputAttribute = kernel32.NewProc("FillConsoleOutputAttribute")
+ procGetConsoleCursorInfo = kernel32.NewProc("GetConsoleCursorInfo")
+ procSetConsoleCursorInfo = kernel32.NewProc("SetConsoleCursorInfo")
+ procSetConsoleTitle = kernel32.NewProc("SetConsoleTitleW")
+ procGetConsoleMode = kernel32.NewProc("GetConsoleMode")
+ procSetConsoleMode = kernel32.NewProc("SetConsoleMode")
+ procCreateConsoleScreenBuffer = kernel32.NewProc("CreateConsoleScreenBuffer")
+)
+
+// Writer provides colorable Writer to the console
+type Writer struct {
+ out io.Writer
+ handle syscall.Handle
+ althandle syscall.Handle
+ oldattr word
+ oldpos coord
+ rest bytes.Buffer
+ mutex sync.Mutex
+}
+
+// NewColorable returns new instance of Writer which handles escape sequence from File.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ if isatty.IsTerminal(file.Fd()) {
+ var mode uint32
+ if r, _, _ := procGetConsoleMode.Call(file.Fd(), uintptr(unsafe.Pointer(&mode))); r != 0 && mode&cENABLE_VIRTUAL_TERMINAL_PROCESSING != 0 {
+ return file
+ }
+ var csbi consoleScreenBufferInfo
+ handle := syscall.Handle(file.Fd())
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ return &Writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}}
+ }
+ return file
+}
+
+// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return NewColorable(os.Stdout)
+}
+
+// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return NewColorable(os.Stderr)
+}
+
+var color256 = map[int]int{
+ 0: 0x000000,
+ 1: 0x800000,
+ 2: 0x008000,
+ 3: 0x808000,
+ 4: 0x000080,
+ 5: 0x800080,
+ 6: 0x008080,
+ 7: 0xc0c0c0,
+ 8: 0x808080,
+ 9: 0xff0000,
+ 10: 0x00ff00,
+ 11: 0xffff00,
+ 12: 0x0000ff,
+ 13: 0xff00ff,
+ 14: 0x00ffff,
+ 15: 0xffffff,
+ 16: 0x000000,
+ 17: 0x00005f,
+ 18: 0x000087,
+ 19: 0x0000af,
+ 20: 0x0000d7,
+ 21: 0x0000ff,
+ 22: 0x005f00,
+ 23: 0x005f5f,
+ 24: 0x005f87,
+ 25: 0x005faf,
+ 26: 0x005fd7,
+ 27: 0x005fff,
+ 28: 0x008700,
+ 29: 0x00875f,
+ 30: 0x008787,
+ 31: 0x0087af,
+ 32: 0x0087d7,
+ 33: 0x0087ff,
+ 34: 0x00af00,
+ 35: 0x00af5f,
+ 36: 0x00af87,
+ 37: 0x00afaf,
+ 38: 0x00afd7,
+ 39: 0x00afff,
+ 40: 0x00d700,
+ 41: 0x00d75f,
+ 42: 0x00d787,
+ 43: 0x00d7af,
+ 44: 0x00d7d7,
+ 45: 0x00d7ff,
+ 46: 0x00ff00,
+ 47: 0x00ff5f,
+ 48: 0x00ff87,
+ 49: 0x00ffaf,
+ 50: 0x00ffd7,
+ 51: 0x00ffff,
+ 52: 0x5f0000,
+ 53: 0x5f005f,
+ 54: 0x5f0087,
+ 55: 0x5f00af,
+ 56: 0x5f00d7,
+ 57: 0x5f00ff,
+ 58: 0x5f5f00,
+ 59: 0x5f5f5f,
+ 60: 0x5f5f87,
+ 61: 0x5f5faf,
+ 62: 0x5f5fd7,
+ 63: 0x5f5fff,
+ 64: 0x5f8700,
+ 65: 0x5f875f,
+ 66: 0x5f8787,
+ 67: 0x5f87af,
+ 68: 0x5f87d7,
+ 69: 0x5f87ff,
+ 70: 0x5faf00,
+ 71: 0x5faf5f,
+ 72: 0x5faf87,
+ 73: 0x5fafaf,
+ 74: 0x5fafd7,
+ 75: 0x5fafff,
+ 76: 0x5fd700,
+ 77: 0x5fd75f,
+ 78: 0x5fd787,
+ 79: 0x5fd7af,
+ 80: 0x5fd7d7,
+ 81: 0x5fd7ff,
+ 82: 0x5fff00,
+ 83: 0x5fff5f,
+ 84: 0x5fff87,
+ 85: 0x5fffaf,
+ 86: 0x5fffd7,
+ 87: 0x5fffff,
+ 88: 0x870000,
+ 89: 0x87005f,
+ 90: 0x870087,
+ 91: 0x8700af,
+ 92: 0x8700d7,
+ 93: 0x8700ff,
+ 94: 0x875f00,
+ 95: 0x875f5f,
+ 96: 0x875f87,
+ 97: 0x875faf,
+ 98: 0x875fd7,
+ 99: 0x875fff,
+ 100: 0x878700,
+ 101: 0x87875f,
+ 102: 0x878787,
+ 103: 0x8787af,
+ 104: 0x8787d7,
+ 105: 0x8787ff,
+ 106: 0x87af00,
+ 107: 0x87af5f,
+ 108: 0x87af87,
+ 109: 0x87afaf,
+ 110: 0x87afd7,
+ 111: 0x87afff,
+ 112: 0x87d700,
+ 113: 0x87d75f,
+ 114: 0x87d787,
+ 115: 0x87d7af,
+ 116: 0x87d7d7,
+ 117: 0x87d7ff,
+ 118: 0x87ff00,
+ 119: 0x87ff5f,
+ 120: 0x87ff87,
+ 121: 0x87ffaf,
+ 122: 0x87ffd7,
+ 123: 0x87ffff,
+ 124: 0xaf0000,
+ 125: 0xaf005f,
+ 126: 0xaf0087,
+ 127: 0xaf00af,
+ 128: 0xaf00d7,
+ 129: 0xaf00ff,
+ 130: 0xaf5f00,
+ 131: 0xaf5f5f,
+ 132: 0xaf5f87,
+ 133: 0xaf5faf,
+ 134: 0xaf5fd7,
+ 135: 0xaf5fff,
+ 136: 0xaf8700,
+ 137: 0xaf875f,
+ 138: 0xaf8787,
+ 139: 0xaf87af,
+ 140: 0xaf87d7,
+ 141: 0xaf87ff,
+ 142: 0xafaf00,
+ 143: 0xafaf5f,
+ 144: 0xafaf87,
+ 145: 0xafafaf,
+ 146: 0xafafd7,
+ 147: 0xafafff,
+ 148: 0xafd700,
+ 149: 0xafd75f,
+ 150: 0xafd787,
+ 151: 0xafd7af,
+ 152: 0xafd7d7,
+ 153: 0xafd7ff,
+ 154: 0xafff00,
+ 155: 0xafff5f,
+ 156: 0xafff87,
+ 157: 0xafffaf,
+ 158: 0xafffd7,
+ 159: 0xafffff,
+ 160: 0xd70000,
+ 161: 0xd7005f,
+ 162: 0xd70087,
+ 163: 0xd700af,
+ 164: 0xd700d7,
+ 165: 0xd700ff,
+ 166: 0xd75f00,
+ 167: 0xd75f5f,
+ 168: 0xd75f87,
+ 169: 0xd75faf,
+ 170: 0xd75fd7,
+ 171: 0xd75fff,
+ 172: 0xd78700,
+ 173: 0xd7875f,
+ 174: 0xd78787,
+ 175: 0xd787af,
+ 176: 0xd787d7,
+ 177: 0xd787ff,
+ 178: 0xd7af00,
+ 179: 0xd7af5f,
+ 180: 0xd7af87,
+ 181: 0xd7afaf,
+ 182: 0xd7afd7,
+ 183: 0xd7afff,
+ 184: 0xd7d700,
+ 185: 0xd7d75f,
+ 186: 0xd7d787,
+ 187: 0xd7d7af,
+ 188: 0xd7d7d7,
+ 189: 0xd7d7ff,
+ 190: 0xd7ff00,
+ 191: 0xd7ff5f,
+ 192: 0xd7ff87,
+ 193: 0xd7ffaf,
+ 194: 0xd7ffd7,
+ 195: 0xd7ffff,
+ 196: 0xff0000,
+ 197: 0xff005f,
+ 198: 0xff0087,
+ 199: 0xff00af,
+ 200: 0xff00d7,
+ 201: 0xff00ff,
+ 202: 0xff5f00,
+ 203: 0xff5f5f,
+ 204: 0xff5f87,
+ 205: 0xff5faf,
+ 206: 0xff5fd7,
+ 207: 0xff5fff,
+ 208: 0xff8700,
+ 209: 0xff875f,
+ 210: 0xff8787,
+ 211: 0xff87af,
+ 212: 0xff87d7,
+ 213: 0xff87ff,
+ 214: 0xffaf00,
+ 215: 0xffaf5f,
+ 216: 0xffaf87,
+ 217: 0xffafaf,
+ 218: 0xffafd7,
+ 219: 0xffafff,
+ 220: 0xffd700,
+ 221: 0xffd75f,
+ 222: 0xffd787,
+ 223: 0xffd7af,
+ 224: 0xffd7d7,
+ 225: 0xffd7ff,
+ 226: 0xffff00,
+ 227: 0xffff5f,
+ 228: 0xffff87,
+ 229: 0xffffaf,
+ 230: 0xffffd7,
+ 231: 0xffffff,
+ 232: 0x080808,
+ 233: 0x121212,
+ 234: 0x1c1c1c,
+ 235: 0x262626,
+ 236: 0x303030,
+ 237: 0x3a3a3a,
+ 238: 0x444444,
+ 239: 0x4e4e4e,
+ 240: 0x585858,
+ 241: 0x626262,
+ 242: 0x6c6c6c,
+ 243: 0x767676,
+ 244: 0x808080,
+ 245: 0x8a8a8a,
+ 246: 0x949494,
+ 247: 0x9e9e9e,
+ 248: 0xa8a8a8,
+ 249: 0xb2b2b2,
+ 250: 0xbcbcbc,
+ 251: 0xc6c6c6,
+ 252: 0xd0d0d0,
+ 253: 0xdadada,
+ 254: 0xe4e4e4,
+ 255: 0xeeeeee,
+}
+
+// `\033]0;TITLESTR\007`
+func doTitleSequence(er *bytes.Reader) error {
+ var c byte
+ var err error
+
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c != '0' && c != '2' {
+ return nil
+ }
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c != ';' {
+ return nil
+ }
+ title := make([]byte, 0, 80)
+ for {
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c == 0x07 || c == '\n' {
+ break
+ }
+ title = append(title, c)
+ }
+ if len(title) > 0 {
+ title8, err := syscall.UTF16PtrFromString(string(title))
+ if err == nil {
+ procSetConsoleTitle.Call(uintptr(unsafe.Pointer(title8)))
+ }
+ }
+ return nil
+}
+
+// returns Atoi(s) unless s == "" in which case it returns def
+func atoiWithDefault(s string, def int) (int, error) {
+ if s == "" {
+ return def, nil
+ }
+ return strconv.Atoi(s)
+}
+
+// Write writes data on console
+func (w *Writer) Write(data []byte) (n int, err error) {
+ w.mutex.Lock()
+ defer w.mutex.Unlock()
+ var csbi consoleScreenBufferInfo
+ procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
+
+ handle := w.handle
+
+ var er *bytes.Reader
+ if w.rest.Len() > 0 {
+ var rest bytes.Buffer
+ w.rest.WriteTo(&rest)
+ w.rest.Reset()
+ rest.Write(data)
+ er = bytes.NewReader(rest.Bytes())
+ } else {
+ er = bytes.NewReader(data)
+ }
+ var plaintext bytes.Buffer
+loop:
+ for {
+ c1, err := er.ReadByte()
+ if err != nil {
+ plaintext.WriteTo(w.out)
+ break loop
+ }
+ if c1 != 0x1b {
+ plaintext.WriteByte(c1)
+ continue
+ }
+ _, err = plaintext.WriteTo(w.out)
+ if err != nil {
+ break loop
+ }
+ c2, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+
+ switch c2 {
+ case '>':
+ continue
+ case ']':
+ w.rest.WriteByte(c1)
+ w.rest.WriteByte(c2)
+ er.WriteTo(&w.rest)
+ if bytes.IndexByte(w.rest.Bytes(), 0x07) == -1 {
+ break loop
+ }
+ er = bytes.NewReader(w.rest.Bytes()[2:])
+ err := doTitleSequence(er)
+ if err != nil {
+ break loop
+ }
+ w.rest.Reset()
+ continue
+ // https://github.com/mattn/go-colorable/issues/27
+ case '7':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ w.oldpos = csbi.cursorPosition
+ continue
+ case '8':
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos)))
+ continue
+ case 0x5b:
+ // execute part after switch
+ default:
+ continue
+ }
+
+ w.rest.WriteByte(c1)
+ w.rest.WriteByte(c2)
+ er.WriteTo(&w.rest)
+
+ var buf bytes.Buffer
+ var m byte
+ for i, c := range w.rest.Bytes()[2:] {
+ if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' {
+ m = c
+ er = bytes.NewReader(w.rest.Bytes()[2+i+1:])
+ w.rest.Reset()
+ break
+ }
+ buf.Write([]byte(string(c)))
+ }
+ if m == 0 {
+ break loop
+ }
+
+ switch m {
+ case 'A':
+ n, err = atoiWithDefault(buf.String(), 1)
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.y -= short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'B':
+ n, err = atoiWithDefault(buf.String(), 1)
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.y += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'C':
+ n, err = atoiWithDefault(buf.String(), 1)
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'D':
+ n, err = atoiWithDefault(buf.String(), 1)
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x -= short(n)
+ if csbi.cursorPosition.x < 0 {
+ csbi.cursorPosition.x = 0
+ }
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'E':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = 0
+ csbi.cursorPosition.y += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'F':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = 0
+ csbi.cursorPosition.y -= short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'G':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ if n < 1 {
+ n = 1
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = short(n - 1)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'H', 'f':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ if buf.Len() > 0 {
+ token := strings.Split(buf.String(), ";")
+ switch len(token) {
+ case 1:
+ n1, err := strconv.Atoi(token[0])
+ if err != nil {
+ continue
+ }
+ csbi.cursorPosition.y = short(n1 - 1)
+ case 2:
+ n1, err := strconv.Atoi(token[0])
+ if err != nil {
+ continue
+ }
+ n2, err := strconv.Atoi(token[1])
+ if err != nil {
+ continue
+ }
+ csbi.cursorPosition.x = short(n2 - 1)
+ csbi.cursorPosition.y = short(n1 - 1)
+ }
+ } else {
+ csbi.cursorPosition.y = 0
+ }
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'J':
+ n := 0
+ if buf.Len() > 0 {
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ }
+ var count, written dword
+ var cursor coord
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ switch n {
+ case 0:
+ cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x)
+ case 1:
+ cursor = coord{x: csbi.window.left, y: csbi.window.top}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.window.top-csbi.cursorPosition.y)*dword(csbi.size.x)
+ case 2:
+ cursor = coord{x: csbi.window.left, y: csbi.window.top}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x)
+ }
+ procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ case 'K':
+ n := 0
+ if buf.Len() > 0 {
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ var cursor coord
+ var count, written dword
+ switch n {
+ case 0:
+ cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x - csbi.cursorPosition.x)
+ case 1:
+ cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x - csbi.cursorPosition.x)
+ case 2:
+ cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x)
+ }
+ procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ case 'X':
+ n := 0
+ if buf.Len() > 0 {
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ var cursor coord
+ var written dword
+ cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
+ procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(n), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(n), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ case 'm':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ attr := csbi.attributes
+ cs := buf.String()
+ if cs == "" {
+ procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(w.oldattr))
+ continue
+ }
+ token := strings.Split(cs, ";")
+ for i := 0; i < len(token); i++ {
+ ns := token[i]
+ if n, err = strconv.Atoi(ns); err == nil {
+ switch {
+ case n == 0 || n == 100:
+ attr = w.oldattr
+ case n == 4:
+ attr |= commonLvbUnderscore
+ case (1 <= n && n <= 3) || n == 5:
+ attr |= foregroundIntensity
+ case n == 7 || n == 27:
+ attr =
+ (attr &^ (foregroundMask | backgroundMask)) |
+ ((attr & foregroundMask) << 4) |
+ ((attr & backgroundMask) >> 4)
+ case n == 22:
+ attr &^= foregroundIntensity
+ case n == 24:
+ attr &^= commonLvbUnderscore
+ case 30 <= n && n <= 37:
+ attr &= backgroundMask
+ if (n-30)&1 != 0 {
+ attr |= foregroundRed
+ }
+ if (n-30)&2 != 0 {
+ attr |= foregroundGreen
+ }
+ if (n-30)&4 != 0 {
+ attr |= foregroundBlue
+ }
+ case n == 38: // set foreground color.
+ if i < len(token)-2 && (token[i+1] == "5" || token[i+1] == "05") {
+ if n256, err := strconv.Atoi(token[i+2]); err == nil {
+ if n256foreAttr == nil {
+ n256setup()
+ }
+ attr &= backgroundMask
+ attr |= n256foreAttr[n256%len(n256foreAttr)]
+ i += 2
+ }
+ } else if len(token) == 5 && token[i+1] == "2" {
+ var r, g, b int
+ r, _ = strconv.Atoi(token[i+2])
+ g, _ = strconv.Atoi(token[i+3])
+ b, _ = strconv.Atoi(token[i+4])
+ i += 4
+ if r > 127 {
+ attr |= foregroundRed
+ }
+ if g > 127 {
+ attr |= foregroundGreen
+ }
+ if b > 127 {
+ attr |= foregroundBlue
+ }
+ } else {
+ attr = attr & (w.oldattr & backgroundMask)
+ }
+ case n == 39: // reset foreground color.
+ attr &= backgroundMask
+ attr |= w.oldattr & foregroundMask
+ case 40 <= n && n <= 47:
+ attr &= foregroundMask
+ if (n-40)&1 != 0 {
+ attr |= backgroundRed
+ }
+ if (n-40)&2 != 0 {
+ attr |= backgroundGreen
+ }
+ if (n-40)&4 != 0 {
+ attr |= backgroundBlue
+ }
+ case n == 48: // set background color.
+ if i < len(token)-2 && token[i+1] == "5" {
+ if n256, err := strconv.Atoi(token[i+2]); err == nil {
+ if n256backAttr == nil {
+ n256setup()
+ }
+ attr &= foregroundMask
+ attr |= n256backAttr[n256%len(n256backAttr)]
+ i += 2
+ }
+ } else if len(token) == 5 && token[i+1] == "2" {
+ var r, g, b int
+ r, _ = strconv.Atoi(token[i+2])
+ g, _ = strconv.Atoi(token[i+3])
+ b, _ = strconv.Atoi(token[i+4])
+ i += 4
+ if r > 127 {
+ attr |= backgroundRed
+ }
+ if g > 127 {
+ attr |= backgroundGreen
+ }
+ if b > 127 {
+ attr |= backgroundBlue
+ }
+ } else {
+ attr = attr & (w.oldattr & foregroundMask)
+ }
+ case n == 49: // reset foreground color.
+ attr &= foregroundMask
+ attr |= w.oldattr & backgroundMask
+ case 90 <= n && n <= 97:
+ attr = (attr & backgroundMask)
+ attr |= foregroundIntensity
+ if (n-90)&1 != 0 {
+ attr |= foregroundRed
+ }
+ if (n-90)&2 != 0 {
+ attr |= foregroundGreen
+ }
+ if (n-90)&4 != 0 {
+ attr |= foregroundBlue
+ }
+ case 100 <= n && n <= 107:
+ attr = (attr & foregroundMask)
+ attr |= backgroundIntensity
+ if (n-100)&1 != 0 {
+ attr |= backgroundRed
+ }
+ if (n-100)&2 != 0 {
+ attr |= backgroundGreen
+ }
+ if (n-100)&4 != 0 {
+ attr |= backgroundBlue
+ }
+ }
+ procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(attr))
+ }
+ }
+ case 'h':
+ var ci consoleCursorInfo
+ cs := buf.String()
+ if cs == "5>" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 0
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?25" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 1
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?1049" {
+ if w.althandle == 0 {
+ h, _, _ := procCreateConsoleScreenBuffer.Call(uintptr(genericRead|genericWrite), 0, 0, uintptr(consoleTextmodeBuffer), 0, 0)
+ w.althandle = syscall.Handle(h)
+ if w.althandle != 0 {
+ handle = w.althandle
+ }
+ }
+ }
+ case 'l':
+ var ci consoleCursorInfo
+ cs := buf.String()
+ if cs == "5>" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 1
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?25" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 0
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?1049" {
+ if w.althandle != 0 {
+ syscall.CloseHandle(w.althandle)
+ w.althandle = 0
+ handle = w.handle
+ }
+ }
+ case 's':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ w.oldpos = csbi.cursorPosition
+ case 'u':
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos)))
+ }
+ }
+
+ return len(data), nil
+}
+
+type consoleColor struct {
+ rgb int
+ red bool
+ green bool
+ blue bool
+ intensity bool
+}
+
+func (c consoleColor) foregroundAttr() (attr word) {
+ if c.red {
+ attr |= foregroundRed
+ }
+ if c.green {
+ attr |= foregroundGreen
+ }
+ if c.blue {
+ attr |= foregroundBlue
+ }
+ if c.intensity {
+ attr |= foregroundIntensity
+ }
+ return
+}
+
+func (c consoleColor) backgroundAttr() (attr word) {
+ if c.red {
+ attr |= backgroundRed
+ }
+ if c.green {
+ attr |= backgroundGreen
+ }
+ if c.blue {
+ attr |= backgroundBlue
+ }
+ if c.intensity {
+ attr |= backgroundIntensity
+ }
+ return
+}
+
+var color16 = []consoleColor{
+ {0x000000, false, false, false, false},
+ {0x000080, false, false, true, false},
+ {0x008000, false, true, false, false},
+ {0x008080, false, true, true, false},
+ {0x800000, true, false, false, false},
+ {0x800080, true, false, true, false},
+ {0x808000, true, true, false, false},
+ {0xc0c0c0, true, true, true, false},
+ {0x808080, false, false, false, true},
+ {0x0000ff, false, false, true, true},
+ {0x00ff00, false, true, false, true},
+ {0x00ffff, false, true, true, true},
+ {0xff0000, true, false, false, true},
+ {0xff00ff, true, false, true, true},
+ {0xffff00, true, true, false, true},
+ {0xffffff, true, true, true, true},
+}
+
+type hsv struct {
+ h, s, v float32
+}
+
+func (a hsv) dist(b hsv) float32 {
+ dh := a.h - b.h
+ switch {
+ case dh > 0.5:
+ dh = 1 - dh
+ case dh < -0.5:
+ dh = -1 - dh
+ }
+ ds := a.s - b.s
+ dv := a.v - b.v
+ return float32(math.Sqrt(float64(dh*dh + ds*ds + dv*dv)))
+}
+
+func toHSV(rgb int) hsv {
+ r, g, b := float32((rgb&0xFF0000)>>16)/256.0,
+ float32((rgb&0x00FF00)>>8)/256.0,
+ float32(rgb&0x0000FF)/256.0
+ min, max := minmax3f(r, g, b)
+ h := max - min
+ if h > 0 {
+ if max == r {
+ h = (g - b) / h
+ if h < 0 {
+ h += 6
+ }
+ } else if max == g {
+ h = 2 + (b-r)/h
+ } else {
+ h = 4 + (r-g)/h
+ }
+ }
+ h /= 6.0
+ s := max - min
+ if max != 0 {
+ s /= max
+ }
+ v := max
+ return hsv{h: h, s: s, v: v}
+}
+
+type hsvTable []hsv
+
+func toHSVTable(rgbTable []consoleColor) hsvTable {
+ t := make(hsvTable, len(rgbTable))
+ for i, c := range rgbTable {
+ t[i] = toHSV(c.rgb)
+ }
+ return t
+}
+
+func (t hsvTable) find(rgb int) consoleColor {
+ hsv := toHSV(rgb)
+ n := 7
+ l := float32(5.0)
+ for i, p := range t {
+ d := hsv.dist(p)
+ if d < l {
+ l, n = d, i
+ }
+ }
+ return color16[n]
+}
+
+func minmax3f(a, b, c float32) (min, max float32) {
+ if a < b {
+ if b < c {
+ return a, c
+ } else if a < c {
+ return a, b
+ } else {
+ return c, b
+ }
+ } else {
+ if a < c {
+ return b, c
+ } else if b < c {
+ return b, a
+ } else {
+ return c, a
+ }
+ }
+}
+
+var n256foreAttr []word
+var n256backAttr []word
+
+func n256setup() {
+ n256foreAttr = make([]word, 256)
+ n256backAttr = make([]word, 256)
+ t := toHSVTable(color16)
+ for i, rgb := range color256 {
+ c := t.find(rgb)
+ n256foreAttr[i] = c.foregroundAttr()
+ n256backAttr[i] = c.backgroundAttr()
+ }
+}
+
+// EnableColorsStdout enable colors if possible.
+func EnableColorsStdout(enabled *bool) func() {
+ var mode uint32
+ h := os.Stdout.Fd()
+ if r, _, _ := procGetConsoleMode.Call(h, uintptr(unsafe.Pointer(&mode))); r != 0 {
+ if r, _, _ = procSetConsoleMode.Call(h, uintptr(mode|cENABLE_VIRTUAL_TERMINAL_PROCESSING)); r != 0 {
+ if enabled != nil {
+ *enabled = true
+ }
+ return func() {
+ procSetConsoleMode.Call(h, uintptr(mode))
+ }
+ }
+ }
+ if enabled != nil {
+ *enabled = true
+ }
+ return func() {}
+}
diff --git a/vendor/github.com/mattn/go-colorable/go.test.sh b/vendor/github.com/mattn/go-colorable/go.test.sh
new file mode 100644
index 0000000..012162b
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/go.test.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+
+set -e
+echo "" > coverage.txt
+
+for d in $(go list ./... | grep -v vendor); do
+ go test -race -coverprofile=profile.out -covermode=atomic "$d"
+ if [ -f profile.out ]; then
+ cat profile.out >> coverage.txt
+ rm profile.out
+ fi
+done
diff --git a/vendor/github.com/mattn/go-colorable/noncolorable.go b/vendor/github.com/mattn/go-colorable/noncolorable.go
new file mode 100644
index 0000000..05d6f74
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/noncolorable.go
@@ -0,0 +1,57 @@
+package colorable
+
+import (
+ "bytes"
+ "io"
+)
+
+// NonColorable holds writer but removes escape sequence.
+type NonColorable struct {
+ out io.Writer
+}
+
+// NewNonColorable returns new instance of Writer which removes escape sequence from Writer.
+func NewNonColorable(w io.Writer) io.Writer {
+ return &NonColorable{out: w}
+}
+
+// Write writes data on console
+func (w *NonColorable) Write(data []byte) (n int, err error) {
+ er := bytes.NewReader(data)
+ var plaintext bytes.Buffer
+loop:
+ for {
+ c1, err := er.ReadByte()
+ if err != nil {
+ plaintext.WriteTo(w.out)
+ break loop
+ }
+ if c1 != 0x1b {
+ plaintext.WriteByte(c1)
+ continue
+ }
+ _, err = plaintext.WriteTo(w.out)
+ if err != nil {
+ break loop
+ }
+ c2, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if c2 != 0x5b {
+ continue
+ }
+
+ for {
+ c, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' {
+ break
+ }
+ }
+ }
+
+ return len(data), nil
+}
diff --git a/vendor/github.com/mattn/go-isatty/LICENSE b/vendor/github.com/mattn/go-isatty/LICENSE
new file mode 100644
index 0000000..65dc692
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/LICENSE
@@ -0,0 +1,9 @@
+Copyright (c) Yasuhiro MATSUMOTO
+
+MIT License (Expat)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/mattn/go-isatty/README.md b/vendor/github.com/mattn/go-isatty/README.md
new file mode 100644
index 0000000..3841835
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/README.md
@@ -0,0 +1,50 @@
+# go-isatty
+
+[![Godoc Reference](https://godoc.org/github.com/mattn/go-isatty?status.svg)](http://godoc.org/github.com/mattn/go-isatty)
+[![Codecov](https://codecov.io/gh/mattn/go-isatty/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-isatty)
+[![Coverage Status](https://coveralls.io/repos/github/mattn/go-isatty/badge.svg?branch=master)](https://coveralls.io/github/mattn/go-isatty?branch=master)
+[![Go Report Card](https://goreportcard.com/badge/mattn/go-isatty)](https://goreportcard.com/report/mattn/go-isatty)
+
+isatty for golang
+
+## Usage
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/mattn/go-isatty"
+ "os"
+)
+
+func main() {
+ if isatty.IsTerminal(os.Stdout.Fd()) {
+ fmt.Println("Is Terminal")
+ } else if isatty.IsCygwinTerminal(os.Stdout.Fd()) {
+ fmt.Println("Is Cygwin/MSYS2 Terminal")
+ } else {
+ fmt.Println("Is Not Terminal")
+ }
+}
+```
+
+## Installation
+
+```
+$ go get github.com/mattn/go-isatty
+```
+
+## License
+
+MIT
+
+## Author
+
+Yasuhiro Matsumoto (a.k.a mattn)
+
+## Thanks
+
+* k-takata: base idea for IsCygwinTerminal
+
+ https://github.com/k-takata/go-iscygpty
diff --git a/vendor/github.com/mattn/go-isatty/doc.go b/vendor/github.com/mattn/go-isatty/doc.go
new file mode 100644
index 0000000..17d4f90
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/doc.go
@@ -0,0 +1,2 @@
+// Package isatty implements interface to isatty
+package isatty
diff --git a/vendor/github.com/mattn/go-isatty/go.test.sh b/vendor/github.com/mattn/go-isatty/go.test.sh
new file mode 100644
index 0000000..012162b
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/go.test.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+
+set -e
+echo "" > coverage.txt
+
+for d in $(go list ./... | grep -v vendor); do
+ go test -race -coverprofile=profile.out -covermode=atomic "$d"
+ if [ -f profile.out ]; then
+ cat profile.out >> coverage.txt
+ rm profile.out
+ fi
+done
diff --git a/vendor/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go
new file mode 100644
index 0000000..d0ea68f
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_bsd.go
@@ -0,0 +1,20 @@
+//go:build (darwin || freebsd || openbsd || netbsd || dragonfly || hurd) && !appengine && !tinygo
+// +build darwin freebsd openbsd netbsd dragonfly hurd
+// +build !appengine
+// +build !tinygo
+
+package isatty
+
+import "golang.org/x/sys/unix"
+
+// IsTerminal return true if the file descriptor is terminal.
+func IsTerminal(fd uintptr) bool {
+ _, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA)
+ return err == nil
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_others.go b/vendor/github.com/mattn/go-isatty/isatty_others.go
new file mode 100644
index 0000000..7402e06
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_others.go
@@ -0,0 +1,17 @@
+//go:build (appengine || js || nacl || tinygo || wasm) && !windows
+// +build appengine js nacl tinygo wasm
+// +build !windows
+
+package isatty
+
+// IsTerminal returns true if the file descriptor is terminal which
+// is always false on js and appengine classic which is a sandboxed PaaS.
+func IsTerminal(fd uintptr) bool {
+ return false
+}
+
+// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_plan9.go b/vendor/github.com/mattn/go-isatty/isatty_plan9.go
new file mode 100644
index 0000000..bae7f9b
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_plan9.go
@@ -0,0 +1,23 @@
+//go:build plan9
+// +build plan9
+
+package isatty
+
+import (
+ "syscall"
+)
+
+// IsTerminal returns true if the given file descriptor is a terminal.
+func IsTerminal(fd uintptr) bool {
+ path, err := syscall.Fd2path(int(fd))
+ if err != nil {
+ return false
+ }
+ return path == "/dev/cons" || path == "/mnt/term/dev/cons"
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_solaris.go b/vendor/github.com/mattn/go-isatty/isatty_solaris.go
new file mode 100644
index 0000000..0c3acf2
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_solaris.go
@@ -0,0 +1,21 @@
+//go:build solaris && !appengine
+// +build solaris,!appengine
+
+package isatty
+
+import (
+ "golang.org/x/sys/unix"
+)
+
+// IsTerminal returns true if the given file descriptor is a terminal.
+// see: https://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libc/port/gen/isatty.c
+func IsTerminal(fd uintptr) bool {
+ _, err := unix.IoctlGetTermio(int(fd), unix.TCGETA)
+ return err == nil
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go
new file mode 100644
index 0000000..0337d8c
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go
@@ -0,0 +1,20 @@
+//go:build (linux || aix || zos) && !appengine && !tinygo
+// +build linux aix zos
+// +build !appengine
+// +build !tinygo
+
+package isatty
+
+import "golang.org/x/sys/unix"
+
+// IsTerminal return true if the file descriptor is terminal.
+func IsTerminal(fd uintptr) bool {
+ _, err := unix.IoctlGetTermios(int(fd), unix.TCGETS)
+ return err == nil
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/mattn/go-isatty/isatty_windows.go
new file mode 100644
index 0000000..8e3c991
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_windows.go
@@ -0,0 +1,125 @@
+//go:build windows && !appengine
+// +build windows,!appengine
+
+package isatty
+
+import (
+ "errors"
+ "strings"
+ "syscall"
+ "unicode/utf16"
+ "unsafe"
+)
+
+const (
+ objectNameInfo uintptr = 1
+ fileNameInfo = 2
+ fileTypePipe = 3
+)
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32.dll")
+ ntdll = syscall.NewLazyDLL("ntdll.dll")
+ procGetConsoleMode = kernel32.NewProc("GetConsoleMode")
+ procGetFileInformationByHandleEx = kernel32.NewProc("GetFileInformationByHandleEx")
+ procGetFileType = kernel32.NewProc("GetFileType")
+ procNtQueryObject = ntdll.NewProc("NtQueryObject")
+)
+
+func init() {
+ // Check if GetFileInformationByHandleEx is available.
+ if procGetFileInformationByHandleEx.Find() != nil {
+ procGetFileInformationByHandleEx = nil
+ }
+}
+
+// IsTerminal return true if the file descriptor is terminal.
+func IsTerminal(fd uintptr) bool {
+ var st uint32
+ r, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&st)), 0)
+ return r != 0 && e == 0
+}
+
+// Check pipe name is used for cygwin/msys2 pty.
+// Cygwin/MSYS2 PTY has a name like:
+// \{cygwin,msys}-XXXXXXXXXXXXXXXX-ptyN-{from,to}-master
+func isCygwinPipeName(name string) bool {
+ token := strings.Split(name, "-")
+ if len(token) < 5 {
+ return false
+ }
+
+ if token[0] != `\msys` &&
+ token[0] != `\cygwin` &&
+ token[0] != `\Device\NamedPipe\msys` &&
+ token[0] != `\Device\NamedPipe\cygwin` {
+ return false
+ }
+
+ if token[1] == "" {
+ return false
+ }
+
+ if !strings.HasPrefix(token[2], "pty") {
+ return false
+ }
+
+ if token[3] != `from` && token[3] != `to` {
+ return false
+ }
+
+ if token[4] != "master" {
+ return false
+ }
+
+ return true
+}
+
+// getFileNameByHandle use the undocomented ntdll NtQueryObject to get file full name from file handler
+// since GetFileInformationByHandleEx is not available under windows Vista and still some old fashion
+// guys are using Windows XP, this is a workaround for those guys, it will also work on system from
+// Windows vista to 10
+// see https://stackoverflow.com/a/18792477 for details
+func getFileNameByHandle(fd uintptr) (string, error) {
+ if procNtQueryObject == nil {
+ return "", errors.New("ntdll.dll: NtQueryObject not supported")
+ }
+
+ var buf [4 + syscall.MAX_PATH]uint16
+ var result int
+ r, _, e := syscall.Syscall6(procNtQueryObject.Addr(), 5,
+ fd, objectNameInfo, uintptr(unsafe.Pointer(&buf)), uintptr(2*len(buf)), uintptr(unsafe.Pointer(&result)), 0)
+ if r != 0 {
+ return "", e
+ }
+ return string(utf16.Decode(buf[4 : 4+buf[0]/2])), nil
+}
+
+// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2
+// terminal.
+func IsCygwinTerminal(fd uintptr) bool {
+ if procGetFileInformationByHandleEx == nil {
+ name, err := getFileNameByHandle(fd)
+ if err != nil {
+ return false
+ }
+ return isCygwinPipeName(name)
+ }
+
+ // Cygwin/msys's pty is a pipe.
+ ft, _, e := syscall.Syscall(procGetFileType.Addr(), 1, fd, 0, 0)
+ if ft != fileTypePipe || e != 0 {
+ return false
+ }
+
+ var buf [2 + syscall.MAX_PATH]uint16
+ r, _, e := syscall.Syscall6(procGetFileInformationByHandleEx.Addr(),
+ 4, fd, fileNameInfo, uintptr(unsafe.Pointer(&buf)),
+ uintptr(len(buf)*2), 0, 0)
+ if r == 0 || e != 0 {
+ return false
+ }
+
+ l := *(*uint32)(unsafe.Pointer(&buf))
+ return isCygwinPipeName(string(utf16.Decode(buf[2 : 2+l/2])))
+}
diff --git a/vendor/github.com/mattn/go-runewidth/LICENSE b/vendor/github.com/mattn/go-runewidth/LICENSE
new file mode 100644
index 0000000..91b5cef
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Yasuhiro Matsumoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mattn/go-runewidth/README.md b/vendor/github.com/mattn/go-runewidth/README.md
new file mode 100644
index 0000000..5e2cfd9
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/README.md
@@ -0,0 +1,27 @@
+go-runewidth
+============
+
+[![Build Status](https://github.com/mattn/go-runewidth/workflows/test/badge.svg?branch=master)](https://github.com/mattn/go-runewidth/actions?query=workflow%3Atest)
+[![Codecov](https://codecov.io/gh/mattn/go-runewidth/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-runewidth)
+[![GoDoc](https://godoc.org/github.com/mattn/go-runewidth?status.svg)](http://godoc.org/github.com/mattn/go-runewidth)
+[![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-runewidth)](https://goreportcard.com/report/github.com/mattn/go-runewidth)
+
+Provides functions to get fixed width of the character or string.
+
+Usage
+-----
+
+```go
+runewidth.StringWidth("つのだ☆HIRO") == 12
+```
+
+
+Author
+------
+
+Yasuhiro Matsumoto
+
+License
+-------
+
+under the MIT License: http://mattn.mit-license.org/2013
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth.go b/vendor/github.com/mattn/go-runewidth/runewidth.go
new file mode 100644
index 0000000..7dfbb3b
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth.go
@@ -0,0 +1,358 @@
+package runewidth
+
+import (
+ "os"
+ "strings"
+
+ "github.com/rivo/uniseg"
+)
+
+//go:generate go run script/generate.go
+
+var (
+ // EastAsianWidth will be set true if the current locale is CJK
+ EastAsianWidth bool
+
+ // StrictEmojiNeutral should be set false if handle broken fonts
+ StrictEmojiNeutral bool = true
+
+ // DefaultCondition is a condition in current locale
+ DefaultCondition = &Condition{
+ EastAsianWidth: false,
+ StrictEmojiNeutral: true,
+ }
+)
+
+func init() {
+ handleEnv()
+}
+
+func handleEnv() {
+ env := os.Getenv("RUNEWIDTH_EASTASIAN")
+ if env == "" {
+ EastAsianWidth = IsEastAsian()
+ } else {
+ EastAsianWidth = env == "1"
+ }
+ // update DefaultCondition
+ if DefaultCondition.EastAsianWidth != EastAsianWidth {
+ DefaultCondition.EastAsianWidth = EastAsianWidth
+ if len(DefaultCondition.combinedLut) > 0 {
+ DefaultCondition.combinedLut = DefaultCondition.combinedLut[:0]
+ CreateLUT()
+ }
+ }
+}
+
+type interval struct {
+ first rune
+ last rune
+}
+
+type table []interval
+
+func inTables(r rune, ts ...table) bool {
+ for _, t := range ts {
+ if inTable(r, t) {
+ return true
+ }
+ }
+ return false
+}
+
+func inTable(r rune, t table) bool {
+ if r < t[0].first {
+ return false
+ }
+
+ bot := 0
+ top := len(t) - 1
+ for top >= bot {
+ mid := (bot + top) >> 1
+
+ switch {
+ case t[mid].last < r:
+ bot = mid + 1
+ case t[mid].first > r:
+ top = mid - 1
+ default:
+ return true
+ }
+ }
+
+ return false
+}
+
+var private = table{
+ {0x00E000, 0x00F8FF}, {0x0F0000, 0x0FFFFD}, {0x100000, 0x10FFFD},
+}
+
+var nonprint = table{
+ {0x0000, 0x001F}, {0x007F, 0x009F}, {0x00AD, 0x00AD},
+ {0x070F, 0x070F}, {0x180B, 0x180E}, {0x200B, 0x200F},
+ {0x2028, 0x202E}, {0x206A, 0x206F}, {0xD800, 0xDFFF},
+ {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB}, {0xFFFE, 0xFFFF},
+}
+
+// Condition have flag EastAsianWidth whether the current locale is CJK or not.
+type Condition struct {
+ combinedLut []byte
+ EastAsianWidth bool
+ StrictEmojiNeutral bool
+}
+
+// NewCondition return new instance of Condition which is current locale.
+func NewCondition() *Condition {
+ return &Condition{
+ EastAsianWidth: EastAsianWidth,
+ StrictEmojiNeutral: StrictEmojiNeutral,
+ }
+}
+
+// RuneWidth returns the number of cells in r.
+// See http://www.unicode.org/reports/tr11/
+func (c *Condition) RuneWidth(r rune) int {
+ if r < 0 || r > 0x10FFFF {
+ return 0
+ }
+ if len(c.combinedLut) > 0 {
+ return int(c.combinedLut[r>>1]>>(uint(r&1)*4)) & 3
+ }
+ // optimized version, verified by TestRuneWidthChecksums()
+ if !c.EastAsianWidth {
+ switch {
+ case r < 0x20:
+ return 0
+ case (r >= 0x7F && r <= 0x9F) || r == 0xAD: // nonprint
+ return 0
+ case r < 0x300:
+ return 1
+ case inTable(r, narrow):
+ return 1
+ case inTables(r, nonprint, combining):
+ return 0
+ case inTable(r, doublewidth):
+ return 2
+ default:
+ return 1
+ }
+ } else {
+ switch {
+ case inTables(r, nonprint, combining):
+ return 0
+ case inTable(r, narrow):
+ return 1
+ case inTables(r, ambiguous, doublewidth):
+ return 2
+ case !c.StrictEmojiNeutral && inTables(r, ambiguous, emoji, narrow):
+ return 2
+ default:
+ return 1
+ }
+ }
+}
+
+// CreateLUT will create an in-memory lookup table of 557056 bytes for faster operation.
+// This should not be called concurrently with other operations on c.
+// If options in c is changed, CreateLUT should be called again.
+func (c *Condition) CreateLUT() {
+ const max = 0x110000
+ lut := c.combinedLut
+ if len(c.combinedLut) != 0 {
+ // Remove so we don't use it.
+ c.combinedLut = nil
+ } else {
+ lut = make([]byte, max/2)
+ }
+ for i := range lut {
+ i32 := int32(i * 2)
+ x0 := c.RuneWidth(i32)
+ x1 := c.RuneWidth(i32 + 1)
+ lut[i] = uint8(x0) | uint8(x1)<<4
+ }
+ c.combinedLut = lut
+}
+
+// StringWidth return width as you can see
+func (c *Condition) StringWidth(s string) (width int) {
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // Our best guess at this point is to use the width of the first non-zero-width rune.
+ }
+ }
+ width += chWidth
+ }
+ return
+}
+
+// Truncate return string truncated with w cells
+func (c *Condition) Truncate(s string, w int, tail string) string {
+ if c.StringWidth(s) <= w {
+ return s
+ }
+ w -= c.StringWidth(tail)
+ var width int
+ pos := len(s)
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // See StringWidth() for details.
+ }
+ }
+ if width+chWidth > w {
+ pos, _ = g.Positions()
+ break
+ }
+ width += chWidth
+ }
+ return s[:pos] + tail
+}
+
+// TruncateLeft cuts w cells from the beginning of the `s`.
+func (c *Condition) TruncateLeft(s string, w int, prefix string) string {
+ if c.StringWidth(s) <= w {
+ return prefix
+ }
+
+ var width int
+ pos := len(s)
+
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // See StringWidth() for details.
+ }
+ }
+
+ if width+chWidth > w {
+ if width < w {
+ _, pos = g.Positions()
+ prefix += strings.Repeat(" ", width+chWidth-w)
+ } else {
+ pos, _ = g.Positions()
+ }
+
+ break
+ }
+
+ width += chWidth
+ }
+
+ return prefix + s[pos:]
+}
+
+// Wrap return string wrapped with w cells
+func (c *Condition) Wrap(s string, w int) string {
+ width := 0
+ out := ""
+ for _, r := range s {
+ cw := c.RuneWidth(r)
+ if r == '\n' {
+ out += string(r)
+ width = 0
+ continue
+ } else if width+cw > w {
+ out += "\n"
+ width = 0
+ out += string(r)
+ width += cw
+ continue
+ }
+ out += string(r)
+ width += cw
+ }
+ return out
+}
+
+// FillLeft return string filled in left by spaces in w cells
+func (c *Condition) FillLeft(s string, w int) string {
+ width := c.StringWidth(s)
+ count := w - width
+ if count > 0 {
+ b := make([]byte, count)
+ for i := range b {
+ b[i] = ' '
+ }
+ return string(b) + s
+ }
+ return s
+}
+
+// FillRight return string filled in left by spaces in w cells
+func (c *Condition) FillRight(s string, w int) string {
+ width := c.StringWidth(s)
+ count := w - width
+ if count > 0 {
+ b := make([]byte, count)
+ for i := range b {
+ b[i] = ' '
+ }
+ return s + string(b)
+ }
+ return s
+}
+
+// RuneWidth returns the number of cells in r.
+// See http://www.unicode.org/reports/tr11/
+func RuneWidth(r rune) int {
+ return DefaultCondition.RuneWidth(r)
+}
+
+// IsAmbiguousWidth returns whether is ambiguous width or not.
+func IsAmbiguousWidth(r rune) bool {
+ return inTables(r, private, ambiguous)
+}
+
+// IsNeutralWidth returns whether is neutral width or not.
+func IsNeutralWidth(r rune) bool {
+ return inTable(r, neutral)
+}
+
+// StringWidth return width as you can see
+func StringWidth(s string) (width int) {
+ return DefaultCondition.StringWidth(s)
+}
+
+// Truncate return string truncated with w cells
+func Truncate(s string, w int, tail string) string {
+ return DefaultCondition.Truncate(s, w, tail)
+}
+
+// TruncateLeft cuts w cells from the beginning of the `s`.
+func TruncateLeft(s string, w int, prefix string) string {
+ return DefaultCondition.TruncateLeft(s, w, prefix)
+}
+
+// Wrap return string wrapped with w cells
+func Wrap(s string, w int) string {
+ return DefaultCondition.Wrap(s, w)
+}
+
+// FillLeft return string filled in left by spaces in w cells
+func FillLeft(s string, w int) string {
+ return DefaultCondition.FillLeft(s, w)
+}
+
+// FillRight return string filled in left by spaces in w cells
+func FillRight(s string, w int) string {
+ return DefaultCondition.FillRight(s, w)
+}
+
+// CreateLUT will create an in-memory lookup table of 557055 bytes for faster operation.
+// This should not be called concurrently with other operations.
+func CreateLUT() {
+ if len(DefaultCondition.combinedLut) > 0 {
+ return
+ }
+ DefaultCondition.CreateLUT()
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
new file mode 100644
index 0000000..84b6528
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
@@ -0,0 +1,9 @@
+//go:build appengine
+// +build appengine
+
+package runewidth
+
+// IsEastAsian return true if the current locale is CJK
+func IsEastAsian() bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_js.go b/vendor/github.com/mattn/go-runewidth/runewidth_js.go
new file mode 100644
index 0000000..c2abbc2
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_js.go
@@ -0,0 +1,9 @@
+//go:build js && !appengine
+// +build js,!appengine
+
+package runewidth
+
+func IsEastAsian() bool {
+ // TODO: Implement this for the web. Detect east asian in a compatible way, and return true.
+ return false
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
new file mode 100644
index 0000000..5a31d73
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
@@ -0,0 +1,81 @@
+//go:build !windows && !js && !appengine
+// +build !windows,!js,!appengine
+
+package runewidth
+
+import (
+ "os"
+ "regexp"
+ "strings"
+)
+
+var reLoc = regexp.MustCompile(`^[a-z][a-z][a-z]?(?:_[A-Z][A-Z])?\.(.+)`)
+
+var mblenTable = map[string]int{
+ "utf-8": 6,
+ "utf8": 6,
+ "jis": 8,
+ "eucjp": 3,
+ "euckr": 2,
+ "euccn": 2,
+ "sjis": 2,
+ "cp932": 2,
+ "cp51932": 2,
+ "cp936": 2,
+ "cp949": 2,
+ "cp950": 2,
+ "big5": 2,
+ "gbk": 2,
+ "gb2312": 2,
+}
+
+func isEastAsian(locale string) bool {
+ charset := strings.ToLower(locale)
+ r := reLoc.FindStringSubmatch(locale)
+ if len(r) == 2 {
+ charset = strings.ToLower(r[1])
+ }
+
+ if strings.HasSuffix(charset, "@cjk_narrow") {
+ return false
+ }
+
+ for pos, b := range []byte(charset) {
+ if b == '@' {
+ charset = charset[:pos]
+ break
+ }
+ }
+ max := 1
+ if m, ok := mblenTable[charset]; ok {
+ max = m
+ }
+ if max > 1 && (charset[0] != 'u' ||
+ strings.HasPrefix(locale, "ja") ||
+ strings.HasPrefix(locale, "ko") ||
+ strings.HasPrefix(locale, "zh")) {
+ return true
+ }
+ return false
+}
+
+// IsEastAsian return true if the current locale is CJK
+func IsEastAsian() bool {
+ locale := os.Getenv("LC_ALL")
+ if locale == "" {
+ locale = os.Getenv("LC_CTYPE")
+ }
+ if locale == "" {
+ locale = os.Getenv("LANG")
+ }
+
+ // ignore C locale
+ if locale == "POSIX" || locale == "C" {
+ return false
+ }
+ if len(locale) > 1 && locale[0] == 'C' && (locale[1] == '.' || locale[1] == '-') {
+ return false
+ }
+
+ return isEastAsian(locale)
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_table.go b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
new file mode 100644
index 0000000..e5d890c
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
@@ -0,0 +1,439 @@
+// Code generated by script/generate.go. DO NOT EDIT.
+
+package runewidth
+
+var combining = table{
+ {0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3},
+ {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01},
+ {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0},
+ {0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF},
+ {0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF},
+ {0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D},
+ {0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1},
+ {0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A},
+ {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11300, 0x11301},
+ {0x1133B, 0x1133C}, {0x11366, 0x1136C}, {0x11370, 0x11374},
+ {0x16AF0, 0x16AF4}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172},
+ {0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD},
+ {0x1D242, 0x1D244}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018},
+ {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A},
+ {0x1E8D0, 0x1E8D6},
+}
+
+var doublewidth = table{
+ {0x1100, 0x115F}, {0x231A, 0x231B}, {0x2329, 0x232A},
+ {0x23E9, 0x23EC}, {0x23F0, 0x23F0}, {0x23F3, 0x23F3},
+ {0x25FD, 0x25FE}, {0x2614, 0x2615}, {0x2648, 0x2653},
+ {0x267F, 0x267F}, {0x2693, 0x2693}, {0x26A1, 0x26A1},
+ {0x26AA, 0x26AB}, {0x26BD, 0x26BE}, {0x26C4, 0x26C5},
+ {0x26CE, 0x26CE}, {0x26D4, 0x26D4}, {0x26EA, 0x26EA},
+ {0x26F2, 0x26F3}, {0x26F5, 0x26F5}, {0x26FA, 0x26FA},
+ {0x26FD, 0x26FD}, {0x2705, 0x2705}, {0x270A, 0x270B},
+ {0x2728, 0x2728}, {0x274C, 0x274C}, {0x274E, 0x274E},
+ {0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797},
+ {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C},
+ {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99},
+ {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB},
+ {0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF},
+ {0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31E3},
+ {0x31F0, 0x321E}, {0x3220, 0x3247}, {0x3250, 0x4DBF},
+ {0x4E00, 0xA48C}, {0xA490, 0xA4C6}, {0xA960, 0xA97C},
+ {0xAC00, 0xD7A3}, {0xF900, 0xFAFF}, {0xFE10, 0xFE19},
+ {0xFE30, 0xFE52}, {0xFE54, 0xFE66}, {0xFE68, 0xFE6B},
+ {0xFF01, 0xFF60}, {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4},
+ {0x16FF0, 0x16FF1}, {0x17000, 0x187F7}, {0x18800, 0x18CD5},
+ {0x18D00, 0x18D08}, {0x1B000, 0x1B11E}, {0x1B150, 0x1B152},
+ {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004},
+ {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A},
+ {0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248},
+ {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F320},
+ {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393},
+ {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0},
+ {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440},
+ {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E},
+ {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596},
+ {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5},
+ {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D7},
+ {0x1F6EB, 0x1F6EC}, {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB},
+ {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F978},
+ {0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA74},
+ {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8},
+ {0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6},
+ {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD},
+}
+
+var ambiguous = table{
+ {0x00A1, 0x00A1}, {0x00A4, 0x00A4}, {0x00A7, 0x00A8},
+ {0x00AA, 0x00AA}, {0x00AD, 0x00AE}, {0x00B0, 0x00B4},
+ {0x00B6, 0x00BA}, {0x00BC, 0x00BF}, {0x00C6, 0x00C6},
+ {0x00D0, 0x00D0}, {0x00D7, 0x00D8}, {0x00DE, 0x00E1},
+ {0x00E6, 0x00E6}, {0x00E8, 0x00EA}, {0x00EC, 0x00ED},
+ {0x00F0, 0x00F0}, {0x00F2, 0x00F3}, {0x00F7, 0x00FA},
+ {0x00FC, 0x00FC}, {0x00FE, 0x00FE}, {0x0101, 0x0101},
+ {0x0111, 0x0111}, {0x0113, 0x0113}, {0x011B, 0x011B},
+ {0x0126, 0x0127}, {0x012B, 0x012B}, {0x0131, 0x0133},
+ {0x0138, 0x0138}, {0x013F, 0x0142}, {0x0144, 0x0144},
+ {0x0148, 0x014B}, {0x014D, 0x014D}, {0x0152, 0x0153},
+ {0x0166, 0x0167}, {0x016B, 0x016B}, {0x01CE, 0x01CE},
+ {0x01D0, 0x01D0}, {0x01D2, 0x01D2}, {0x01D4, 0x01D4},
+ {0x01D6, 0x01D6}, {0x01D8, 0x01D8}, {0x01DA, 0x01DA},
+ {0x01DC, 0x01DC}, {0x0251, 0x0251}, {0x0261, 0x0261},
+ {0x02C4, 0x02C4}, {0x02C7, 0x02C7}, {0x02C9, 0x02CB},
+ {0x02CD, 0x02CD}, {0x02D0, 0x02D0}, {0x02D8, 0x02DB},
+ {0x02DD, 0x02DD}, {0x02DF, 0x02DF}, {0x0300, 0x036F},
+ {0x0391, 0x03A1}, {0x03A3, 0x03A9}, {0x03B1, 0x03C1},
+ {0x03C3, 0x03C9}, {0x0401, 0x0401}, {0x0410, 0x044F},
+ {0x0451, 0x0451}, {0x2010, 0x2010}, {0x2013, 0x2016},
+ {0x2018, 0x2019}, {0x201C, 0x201D}, {0x2020, 0x2022},
+ {0x2024, 0x2027}, {0x2030, 0x2030}, {0x2032, 0x2033},
+ {0x2035, 0x2035}, {0x203B, 0x203B}, {0x203E, 0x203E},
+ {0x2074, 0x2074}, {0x207F, 0x207F}, {0x2081, 0x2084},
+ {0x20AC, 0x20AC}, {0x2103, 0x2103}, {0x2105, 0x2105},
+ {0x2109, 0x2109}, {0x2113, 0x2113}, {0x2116, 0x2116},
+ {0x2121, 0x2122}, {0x2126, 0x2126}, {0x212B, 0x212B},
+ {0x2153, 0x2154}, {0x215B, 0x215E}, {0x2160, 0x216B},
+ {0x2170, 0x2179}, {0x2189, 0x2189}, {0x2190, 0x2199},
+ {0x21B8, 0x21B9}, {0x21D2, 0x21D2}, {0x21D4, 0x21D4},
+ {0x21E7, 0x21E7}, {0x2200, 0x2200}, {0x2202, 0x2203},
+ {0x2207, 0x2208}, {0x220B, 0x220B}, {0x220F, 0x220F},
+ {0x2211, 0x2211}, {0x2215, 0x2215}, {0x221A, 0x221A},
+ {0x221D, 0x2220}, {0x2223, 0x2223}, {0x2225, 0x2225},
+ {0x2227, 0x222C}, {0x222E, 0x222E}, {0x2234, 0x2237},
+ {0x223C, 0x223D}, {0x2248, 0x2248}, {0x224C, 0x224C},
+ {0x2252, 0x2252}, {0x2260, 0x2261}, {0x2264, 0x2267},
+ {0x226A, 0x226B}, {0x226E, 0x226F}, {0x2282, 0x2283},
+ {0x2286, 0x2287}, {0x2295, 0x2295}, {0x2299, 0x2299},
+ {0x22A5, 0x22A5}, {0x22BF, 0x22BF}, {0x2312, 0x2312},
+ {0x2460, 0x24E9}, {0x24EB, 0x254B}, {0x2550, 0x2573},
+ {0x2580, 0x258F}, {0x2592, 0x2595}, {0x25A0, 0x25A1},
+ {0x25A3, 0x25A9}, {0x25B2, 0x25B3}, {0x25B6, 0x25B7},
+ {0x25BC, 0x25BD}, {0x25C0, 0x25C1}, {0x25C6, 0x25C8},
+ {0x25CB, 0x25CB}, {0x25CE, 0x25D1}, {0x25E2, 0x25E5},
+ {0x25EF, 0x25EF}, {0x2605, 0x2606}, {0x2609, 0x2609},
+ {0x260E, 0x260F}, {0x261C, 0x261C}, {0x261E, 0x261E},
+ {0x2640, 0x2640}, {0x2642, 0x2642}, {0x2660, 0x2661},
+ {0x2663, 0x2665}, {0x2667, 0x266A}, {0x266C, 0x266D},
+ {0x266F, 0x266F}, {0x269E, 0x269F}, {0x26BF, 0x26BF},
+ {0x26C6, 0x26CD}, {0x26CF, 0x26D3}, {0x26D5, 0x26E1},
+ {0x26E3, 0x26E3}, {0x26E8, 0x26E9}, {0x26EB, 0x26F1},
+ {0x26F4, 0x26F4}, {0x26F6, 0x26F9}, {0x26FB, 0x26FC},
+ {0x26FE, 0x26FF}, {0x273D, 0x273D}, {0x2776, 0x277F},
+ {0x2B56, 0x2B59}, {0x3248, 0x324F}, {0xE000, 0xF8FF},
+ {0xFE00, 0xFE0F}, {0xFFFD, 0xFFFD}, {0x1F100, 0x1F10A},
+ {0x1F110, 0x1F12D}, {0x1F130, 0x1F169}, {0x1F170, 0x1F18D},
+ {0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF},
+ {0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD},
+}
+var narrow = table{
+ {0x0020, 0x007E}, {0x00A2, 0x00A3}, {0x00A5, 0x00A6},
+ {0x00AC, 0x00AC}, {0x00AF, 0x00AF}, {0x27E6, 0x27ED},
+ {0x2985, 0x2986},
+}
+
+var neutral = table{
+ {0x0000, 0x001F}, {0x007F, 0x00A0}, {0x00A9, 0x00A9},
+ {0x00AB, 0x00AB}, {0x00B5, 0x00B5}, {0x00BB, 0x00BB},
+ {0x00C0, 0x00C5}, {0x00C7, 0x00CF}, {0x00D1, 0x00D6},
+ {0x00D9, 0x00DD}, {0x00E2, 0x00E5}, {0x00E7, 0x00E7},
+ {0x00EB, 0x00EB}, {0x00EE, 0x00EF}, {0x00F1, 0x00F1},
+ {0x00F4, 0x00F6}, {0x00FB, 0x00FB}, {0x00FD, 0x00FD},
+ {0x00FF, 0x0100}, {0x0102, 0x0110}, {0x0112, 0x0112},
+ {0x0114, 0x011A}, {0x011C, 0x0125}, {0x0128, 0x012A},
+ {0x012C, 0x0130}, {0x0134, 0x0137}, {0x0139, 0x013E},
+ {0x0143, 0x0143}, {0x0145, 0x0147}, {0x014C, 0x014C},
+ {0x014E, 0x0151}, {0x0154, 0x0165}, {0x0168, 0x016A},
+ {0x016C, 0x01CD}, {0x01CF, 0x01CF}, {0x01D1, 0x01D1},
+ {0x01D3, 0x01D3}, {0x01D5, 0x01D5}, {0x01D7, 0x01D7},
+ {0x01D9, 0x01D9}, {0x01DB, 0x01DB}, {0x01DD, 0x0250},
+ {0x0252, 0x0260}, {0x0262, 0x02C3}, {0x02C5, 0x02C6},
+ {0x02C8, 0x02C8}, {0x02CC, 0x02CC}, {0x02CE, 0x02CF},
+ {0x02D1, 0x02D7}, {0x02DC, 0x02DC}, {0x02DE, 0x02DE},
+ {0x02E0, 0x02FF}, {0x0370, 0x0377}, {0x037A, 0x037F},
+ {0x0384, 0x038A}, {0x038C, 0x038C}, {0x038E, 0x0390},
+ {0x03AA, 0x03B0}, {0x03C2, 0x03C2}, {0x03CA, 0x0400},
+ {0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F},
+ {0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F},
+ {0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4},
+ {0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A},
+ {0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D},
+ {0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E},
+ {0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08C7},
+ {0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990},
+ {0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2},
+ {0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8},
+ {0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD},
+ {0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03},
+ {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28},
+ {0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36},
+ {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42},
+ {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51},
+ {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76},
+ {0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91},
+ {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3},
+ {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9},
+ {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3},
+ {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03},
+ {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28},
+ {0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39},
+ {0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D},
+ {0x0B55, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63},
+ {0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A},
+ {0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A},
+ {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4},
+ {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2},
+ {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0},
+ {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C},
+ {0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39},
+ {0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D},
+ {0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63},
+ {0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90},
+ {0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9},
+ {0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD},
+ {0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3},
+ {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D0C},
+ {0x0D0E, 0x0D10}, {0x0D12, 0x0D44}, {0x0D46, 0x0D48},
+ {0x0D4A, 0x0D4F}, {0x0D54, 0x0D63}, {0x0D66, 0x0D7F},
+ {0x0D81, 0x0D83}, {0x0D85, 0x0D96}, {0x0D9A, 0x0DB1},
+ {0x0DB3, 0x0DBB}, {0x0DBD, 0x0DBD}, {0x0DC0, 0x0DC6},
+ {0x0DCA, 0x0DCA}, {0x0DCF, 0x0DD4}, {0x0DD6, 0x0DD6},
+ {0x0DD8, 0x0DDF}, {0x0DE6, 0x0DEF}, {0x0DF2, 0x0DF4},
+ {0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B}, {0x0E81, 0x0E82},
+ {0x0E84, 0x0E84}, {0x0E86, 0x0E8A}, {0x0E8C, 0x0EA3},
+ {0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD}, {0x0EC0, 0x0EC4},
+ {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, {0x0ED0, 0x0ED9},
+ {0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, {0x0F49, 0x0F6C},
+ {0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, {0x0FBE, 0x0FCC},
+ {0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, {0x10C7, 0x10C7},
+ {0x10CD, 0x10CD}, {0x10D0, 0x10FF}, {0x1160, 0x1248},
+ {0x124A, 0x124D}, {0x1250, 0x1256}, {0x1258, 0x1258},
+ {0x125A, 0x125D}, {0x1260, 0x1288}, {0x128A, 0x128D},
+ {0x1290, 0x12B0}, {0x12B2, 0x12B5}, {0x12B8, 0x12BE},
+ {0x12C0, 0x12C0}, {0x12C2, 0x12C5}, {0x12C8, 0x12D6},
+ {0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A},
+ {0x135D, 0x137C}, {0x1380, 0x1399}, {0x13A0, 0x13F5},
+ {0x13F8, 0x13FD}, {0x1400, 0x169C}, {0x16A0, 0x16F8},
+ {0x1700, 0x170C}, {0x170E, 0x1714}, {0x1720, 0x1736},
+ {0x1740, 0x1753}, {0x1760, 0x176C}, {0x176E, 0x1770},
+ {0x1772, 0x1773}, {0x1780, 0x17DD}, {0x17E0, 0x17E9},
+ {0x17F0, 0x17F9}, {0x1800, 0x180E}, {0x1810, 0x1819},
+ {0x1820, 0x1878}, {0x1880, 0x18AA}, {0x18B0, 0x18F5},
+ {0x1900, 0x191E}, {0x1920, 0x192B}, {0x1930, 0x193B},
+ {0x1940, 0x1940}, {0x1944, 0x196D}, {0x1970, 0x1974},
+ {0x1980, 0x19AB}, {0x19B0, 0x19C9}, {0x19D0, 0x19DA},
+ {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, {0x1A60, 0x1A7C},
+ {0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, {0x1AA0, 0x1AAD},
+ {0x1AB0, 0x1AC0}, {0x1B00, 0x1B4B}, {0x1B50, 0x1B7C},
+ {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, {0x1C3B, 0x1C49},
+ {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CC7},
+ {0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9}, {0x1DFB, 0x1F15},
+ {0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, {0x1F48, 0x1F4D},
+ {0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B},
+ {0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4},
+ {0x1FB6, 0x1FC4}, {0x1FC6, 0x1FD3}, {0x1FD6, 0x1FDB},
+ {0x1FDD, 0x1FEF}, {0x1FF2, 0x1FF4}, {0x1FF6, 0x1FFE},
+ {0x2000, 0x200F}, {0x2011, 0x2012}, {0x2017, 0x2017},
+ {0x201A, 0x201B}, {0x201E, 0x201F}, {0x2023, 0x2023},
+ {0x2028, 0x202F}, {0x2031, 0x2031}, {0x2034, 0x2034},
+ {0x2036, 0x203A}, {0x203C, 0x203D}, {0x203F, 0x2064},
+ {0x2066, 0x2071}, {0x2075, 0x207E}, {0x2080, 0x2080},
+ {0x2085, 0x208E}, {0x2090, 0x209C}, {0x20A0, 0x20A8},
+ {0x20AA, 0x20AB}, {0x20AD, 0x20BF}, {0x20D0, 0x20F0},
+ {0x2100, 0x2102}, {0x2104, 0x2104}, {0x2106, 0x2108},
+ {0x210A, 0x2112}, {0x2114, 0x2115}, {0x2117, 0x2120},
+ {0x2123, 0x2125}, {0x2127, 0x212A}, {0x212C, 0x2152},
+ {0x2155, 0x215A}, {0x215F, 0x215F}, {0x216C, 0x216F},
+ {0x217A, 0x2188}, {0x218A, 0x218B}, {0x219A, 0x21B7},
+ {0x21BA, 0x21D1}, {0x21D3, 0x21D3}, {0x21D5, 0x21E6},
+ {0x21E8, 0x21FF}, {0x2201, 0x2201}, {0x2204, 0x2206},
+ {0x2209, 0x220A}, {0x220C, 0x220E}, {0x2210, 0x2210},
+ {0x2212, 0x2214}, {0x2216, 0x2219}, {0x221B, 0x221C},
+ {0x2221, 0x2222}, {0x2224, 0x2224}, {0x2226, 0x2226},
+ {0x222D, 0x222D}, {0x222F, 0x2233}, {0x2238, 0x223B},
+ {0x223E, 0x2247}, {0x2249, 0x224B}, {0x224D, 0x2251},
+ {0x2253, 0x225F}, {0x2262, 0x2263}, {0x2268, 0x2269},
+ {0x226C, 0x226D}, {0x2270, 0x2281}, {0x2284, 0x2285},
+ {0x2288, 0x2294}, {0x2296, 0x2298}, {0x229A, 0x22A4},
+ {0x22A6, 0x22BE}, {0x22C0, 0x2311}, {0x2313, 0x2319},
+ {0x231C, 0x2328}, {0x232B, 0x23E8}, {0x23ED, 0x23EF},
+ {0x23F1, 0x23F2}, {0x23F4, 0x2426}, {0x2440, 0x244A},
+ {0x24EA, 0x24EA}, {0x254C, 0x254F}, {0x2574, 0x257F},
+ {0x2590, 0x2591}, {0x2596, 0x259F}, {0x25A2, 0x25A2},
+ {0x25AA, 0x25B1}, {0x25B4, 0x25B5}, {0x25B8, 0x25BB},
+ {0x25BE, 0x25BF}, {0x25C2, 0x25C5}, {0x25C9, 0x25CA},
+ {0x25CC, 0x25CD}, {0x25D2, 0x25E1}, {0x25E6, 0x25EE},
+ {0x25F0, 0x25FC}, {0x25FF, 0x2604}, {0x2607, 0x2608},
+ {0x260A, 0x260D}, {0x2610, 0x2613}, {0x2616, 0x261B},
+ {0x261D, 0x261D}, {0x261F, 0x263F}, {0x2641, 0x2641},
+ {0x2643, 0x2647}, {0x2654, 0x265F}, {0x2662, 0x2662},
+ {0x2666, 0x2666}, {0x266B, 0x266B}, {0x266E, 0x266E},
+ {0x2670, 0x267E}, {0x2680, 0x2692}, {0x2694, 0x269D},
+ {0x26A0, 0x26A0}, {0x26A2, 0x26A9}, {0x26AC, 0x26BC},
+ {0x26C0, 0x26C3}, {0x26E2, 0x26E2}, {0x26E4, 0x26E7},
+ {0x2700, 0x2704}, {0x2706, 0x2709}, {0x270C, 0x2727},
+ {0x2729, 0x273C}, {0x273E, 0x274B}, {0x274D, 0x274D},
+ {0x274F, 0x2752}, {0x2756, 0x2756}, {0x2758, 0x2775},
+ {0x2780, 0x2794}, {0x2798, 0x27AF}, {0x27B1, 0x27BE},
+ {0x27C0, 0x27E5}, {0x27EE, 0x2984}, {0x2987, 0x2B1A},
+ {0x2B1D, 0x2B4F}, {0x2B51, 0x2B54}, {0x2B5A, 0x2B73},
+ {0x2B76, 0x2B95}, {0x2B97, 0x2C2E}, {0x2C30, 0x2C5E},
+ {0x2C60, 0x2CF3}, {0x2CF9, 0x2D25}, {0x2D27, 0x2D27},
+ {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D70},
+ {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE},
+ {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6},
+ {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE},
+ {0x2DE0, 0x2E52}, {0x303F, 0x303F}, {0x4DC0, 0x4DFF},
+ {0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, {0xA700, 0xA7BF},
+ {0xA7C2, 0xA7CA}, {0xA7F5, 0xA82C}, {0xA830, 0xA839},
+ {0xA840, 0xA877}, {0xA880, 0xA8C5}, {0xA8CE, 0xA8D9},
+ {0xA8E0, 0xA953}, {0xA95F, 0xA95F}, {0xA980, 0xA9CD},
+ {0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, {0xAA00, 0xAA36},
+ {0xAA40, 0xAA4D}, {0xAA50, 0xAA59}, {0xAA5C, 0xAAC2},
+ {0xAADB, 0xAAF6}, {0xAB01, 0xAB06}, {0xAB09, 0xAB0E},
+ {0xAB11, 0xAB16}, {0xAB20, 0xAB26}, {0xAB28, 0xAB2E},
+ {0xAB30, 0xAB6B}, {0xAB70, 0xABED}, {0xABF0, 0xABF9},
+ {0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, {0xD800, 0xDFFF},
+ {0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB36},
+ {0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41},
+ {0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, {0xFBD3, 0xFD3F},
+ {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFD},
+ {0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC},
+ {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, {0x10000, 0x1000B},
+ {0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D},
+ {0x1003F, 0x1004D}, {0x10050, 0x1005D}, {0x10080, 0x100FA},
+ {0x10100, 0x10102}, {0x10107, 0x10133}, {0x10137, 0x1018E},
+ {0x10190, 0x1019C}, {0x101A0, 0x101A0}, {0x101D0, 0x101FD},
+ {0x10280, 0x1029C}, {0x102A0, 0x102D0}, {0x102E0, 0x102FB},
+ {0x10300, 0x10323}, {0x1032D, 0x1034A}, {0x10350, 0x1037A},
+ {0x10380, 0x1039D}, {0x1039F, 0x103C3}, {0x103C8, 0x103D5},
+ {0x10400, 0x1049D}, {0x104A0, 0x104A9}, {0x104B0, 0x104D3},
+ {0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563},
+ {0x1056F, 0x1056F}, {0x10600, 0x10736}, {0x10740, 0x10755},
+ {0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808},
+ {0x1080A, 0x10835}, {0x10837, 0x10838}, {0x1083C, 0x1083C},
+ {0x1083F, 0x10855}, {0x10857, 0x1089E}, {0x108A7, 0x108AF},
+ {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x108FB, 0x1091B},
+ {0x1091F, 0x10939}, {0x1093F, 0x1093F}, {0x10980, 0x109B7},
+ {0x109BC, 0x109CF}, {0x109D2, 0x10A03}, {0x10A05, 0x10A06},
+ {0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35},
+ {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, {0x10A50, 0x10A58},
+ {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, {0x10AEB, 0x10AF6},
+ {0x10B00, 0x10B35}, {0x10B39, 0x10B55}, {0x10B58, 0x10B72},
+ {0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, {0x10BA9, 0x10BAF},
+ {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2},
+ {0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, {0x10E60, 0x10E7E},
+ {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD}, {0x10EB0, 0x10EB1},
+ {0x10F00, 0x10F27}, {0x10F30, 0x10F59}, {0x10FB0, 0x10FCB},
+ {0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F},
+ {0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8},
+ {0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11147},
+ {0x11150, 0x11176}, {0x11180, 0x111DF}, {0x111E1, 0x111F4},
+ {0x11200, 0x11211}, {0x11213, 0x1123E}, {0x11280, 0x11286},
+ {0x11288, 0x11288}, {0x1128A, 0x1128D}, {0x1128F, 0x1129D},
+ {0x1129F, 0x112A9}, {0x112B0, 0x112EA}, {0x112F0, 0x112F9},
+ {0x11300, 0x11303}, {0x11305, 0x1130C}, {0x1130F, 0x11310},
+ {0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333},
+ {0x11335, 0x11339}, {0x1133B, 0x11344}, {0x11347, 0x11348},
+ {0x1134B, 0x1134D}, {0x11350, 0x11350}, {0x11357, 0x11357},
+ {0x1135D, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374},
+ {0x11400, 0x1145B}, {0x1145D, 0x11461}, {0x11480, 0x114C7},
+ {0x114D0, 0x114D9}, {0x11580, 0x115B5}, {0x115B8, 0x115DD},
+ {0x11600, 0x11644}, {0x11650, 0x11659}, {0x11660, 0x1166C},
+ {0x11680, 0x116B8}, {0x116C0, 0x116C9}, {0x11700, 0x1171A},
+ {0x1171D, 0x1172B}, {0x11730, 0x1173F}, {0x11800, 0x1183B},
+ {0x118A0, 0x118F2}, {0x118FF, 0x11906}, {0x11909, 0x11909},
+ {0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x11935},
+ {0x11937, 0x11938}, {0x1193B, 0x11946}, {0x11950, 0x11959},
+ {0x119A0, 0x119A7}, {0x119AA, 0x119D7}, {0x119DA, 0x119E4},
+ {0x11A00, 0x11A47}, {0x11A50, 0x11AA2}, {0x11AC0, 0x11AF8},
+ {0x11C00, 0x11C08}, {0x11C0A, 0x11C36}, {0x11C38, 0x11C45},
+ {0x11C50, 0x11C6C}, {0x11C70, 0x11C8F}, {0x11C92, 0x11CA7},
+ {0x11CA9, 0x11CB6}, {0x11D00, 0x11D06}, {0x11D08, 0x11D09},
+ {0x11D0B, 0x11D36}, {0x11D3A, 0x11D3A}, {0x11D3C, 0x11D3D},
+ {0x11D3F, 0x11D47}, {0x11D50, 0x11D59}, {0x11D60, 0x11D65},
+ {0x11D67, 0x11D68}, {0x11D6A, 0x11D8E}, {0x11D90, 0x11D91},
+ {0x11D93, 0x11D98}, {0x11DA0, 0x11DA9}, {0x11EE0, 0x11EF8},
+ {0x11FB0, 0x11FB0}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399},
+ {0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543},
+ {0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646},
+ {0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69},
+ {0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5},
+ {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61},
+ {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A},
+ {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F},
+ {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88},
+ {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5},
+ {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245},
+ {0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378},
+ {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F},
+ {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC},
+ {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3},
+ {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514},
+ {0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E},
+ {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550},
+ {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B},
+ {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006},
+ {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024},
+ {0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D},
+ {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9},
+ {0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6},
+ {0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F},
+ {0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03},
+ {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24},
+ {0x1EE27, 0x1EE27}, {0x1EE29, 0x1EE32}, {0x1EE34, 0x1EE37},
+ {0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B}, {0x1EE42, 0x1EE42},
+ {0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49}, {0x1EE4B, 0x1EE4B},
+ {0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52}, {0x1EE54, 0x1EE54},
+ {0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59}, {0x1EE5B, 0x1EE5B},
+ {0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F}, {0x1EE61, 0x1EE62},
+ {0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A}, {0x1EE6C, 0x1EE72},
+ {0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C}, {0x1EE7E, 0x1EE7E},
+ {0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B}, {0x1EEA1, 0x1EEA3},
+ {0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB}, {0x1EEF0, 0x1EEF1},
+ {0x1F000, 0x1F003}, {0x1F005, 0x1F02B}, {0x1F030, 0x1F093},
+ {0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF}, {0x1F0C1, 0x1F0CE},
+ {0x1F0D1, 0x1F0F5}, {0x1F10B, 0x1F10F}, {0x1F12E, 0x1F12F},
+ {0x1F16A, 0x1F16F}, {0x1F1AD, 0x1F1AD}, {0x1F1E6, 0x1F1FF},
+ {0x1F321, 0x1F32C}, {0x1F336, 0x1F336}, {0x1F37D, 0x1F37D},
+ {0x1F394, 0x1F39F}, {0x1F3CB, 0x1F3CE}, {0x1F3D4, 0x1F3DF},
+ {0x1F3F1, 0x1F3F3}, {0x1F3F5, 0x1F3F7}, {0x1F43F, 0x1F43F},
+ {0x1F441, 0x1F441}, {0x1F4FD, 0x1F4FE}, {0x1F53E, 0x1F54A},
+ {0x1F54F, 0x1F54F}, {0x1F568, 0x1F579}, {0x1F57B, 0x1F594},
+ {0x1F597, 0x1F5A3}, {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F},
+ {0x1F6C6, 0x1F6CB}, {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4},
+ {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773},
+ {0x1F780, 0x1F7D8}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847},
+ {0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD},
+ {0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F90B}, {0x1F93B, 0x1F93B},
+ {0x1F946, 0x1F946}, {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D},
+ {0x1FB00, 0x1FB92}, {0x1FB94, 0x1FBCA}, {0x1FBF0, 0x1FBF9},
+ {0xE0001, 0xE0001}, {0xE0020, 0xE007F},
+}
+
+var emoji = table{
+ {0x203C, 0x203C}, {0x2049, 0x2049}, {0x2122, 0x2122},
+ {0x2139, 0x2139}, {0x2194, 0x2199}, {0x21A9, 0x21AA},
+ {0x231A, 0x231B}, {0x2328, 0x2328}, {0x2388, 0x2388},
+ {0x23CF, 0x23CF}, {0x23E9, 0x23F3}, {0x23F8, 0x23FA},
+ {0x24C2, 0x24C2}, {0x25AA, 0x25AB}, {0x25B6, 0x25B6},
+ {0x25C0, 0x25C0}, {0x25FB, 0x25FE}, {0x2600, 0x2605},
+ {0x2607, 0x2612}, {0x2614, 0x2685}, {0x2690, 0x2705},
+ {0x2708, 0x2712}, {0x2714, 0x2714}, {0x2716, 0x2716},
+ {0x271D, 0x271D}, {0x2721, 0x2721}, {0x2728, 0x2728},
+ {0x2733, 0x2734}, {0x2744, 0x2744}, {0x2747, 0x2747},
+ {0x274C, 0x274C}, {0x274E, 0x274E}, {0x2753, 0x2755},
+ {0x2757, 0x2757}, {0x2763, 0x2767}, {0x2795, 0x2797},
+ {0x27A1, 0x27A1}, {0x27B0, 0x27B0}, {0x27BF, 0x27BF},
+ {0x2934, 0x2935}, {0x2B05, 0x2B07}, {0x2B1B, 0x2B1C},
+ {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x3030, 0x3030},
+ {0x303D, 0x303D}, {0x3297, 0x3297}, {0x3299, 0x3299},
+ {0x1F000, 0x1F0FF}, {0x1F10D, 0x1F10F}, {0x1F12F, 0x1F12F},
+ {0x1F16C, 0x1F171}, {0x1F17E, 0x1F17F}, {0x1F18E, 0x1F18E},
+ {0x1F191, 0x1F19A}, {0x1F1AD, 0x1F1E5}, {0x1F201, 0x1F20F},
+ {0x1F21A, 0x1F21A}, {0x1F22F, 0x1F22F}, {0x1F232, 0x1F23A},
+ {0x1F23C, 0x1F23F}, {0x1F249, 0x1F3FA}, {0x1F400, 0x1F53D},
+ {0x1F546, 0x1F64F}, {0x1F680, 0x1F6FF}, {0x1F774, 0x1F77F},
+ {0x1F7D5, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F},
+ {0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8FF},
+ {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1FAFF},
+ {0x1FC00, 0x1FFFD},
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
new file mode 100644
index 0000000..5f987a3
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
@@ -0,0 +1,28 @@
+//go:build windows && !appengine
+// +build windows,!appengine
+
+package runewidth
+
+import (
+ "syscall"
+)
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32")
+ procGetConsoleOutputCP = kernel32.NewProc("GetConsoleOutputCP")
+)
+
+// IsEastAsian return true if the current locale is CJK
+func IsEastAsian() bool {
+ r1, _, _ := procGetConsoleOutputCP.Call()
+ if r1 == 0 {
+ return false
+ }
+
+ switch int(r1) {
+ case 932, 51932, 936, 949, 950:
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md
new file mode 100644
index 0000000..c758234
--- /dev/null
+++ b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md
@@ -0,0 +1,96 @@
+## 1.5.0
+
+* New option `IgnoreUntaggedFields` to ignore decoding to any fields
+ without `mapstructure` (or the configured tag name) set [GH-277]
+* New option `ErrorUnset` which makes it an error if any fields
+ in a target struct are not set by the decoding process. [GH-225]
+* New function `OrComposeDecodeHookFunc` to help compose decode hooks. [GH-240]
+* Decoding to slice from array no longer crashes [GH-265]
+* Decode nested struct pointers to map [GH-271]
+* Fix issue where `,squash` was ignored if `Squash` option was set. [GH-280]
+* Fix issue where fields with `,omitempty` would sometimes decode
+ into a map with an empty string key [GH-281]
+
+## 1.4.3
+
+* Fix cases where `json.Number` didn't decode properly [GH-261]
+
+## 1.4.2
+
+* Custom name matchers to support any sort of casing, formatting, etc. for
+ field names. [GH-250]
+* Fix possible panic in ComposeDecodeHookFunc [GH-251]
+
+## 1.4.1
+
+* Fix regression where `*time.Time` value would be set to empty and not be sent
+ to decode hooks properly [GH-232]
+
+## 1.4.0
+
+* A new decode hook type `DecodeHookFuncValue` has been added that has
+ access to the full values. [GH-183]
+* Squash is now supported with embedded fields that are struct pointers [GH-205]
+* Empty strings will convert to 0 for all numeric types when weakly decoding [GH-206]
+
+## 1.3.3
+
+* Decoding maps from maps creates a settable value for decode hooks [GH-203]
+
+## 1.3.2
+
+* Decode into interface type with a struct value is supported [GH-187]
+
+## 1.3.1
+
+* Squash should only squash embedded structs. [GH-194]
+
+## 1.3.0
+
+* Added `",omitempty"` support. This will ignore zero values in the source
+ structure when encoding. [GH-145]
+
+## 1.2.3
+
+* Fix duplicate entries in Keys list with pointer values. [GH-185]
+
+## 1.2.2
+
+* Do not add unsettable (unexported) values to the unused metadata key
+ or "remain" value. [GH-150]
+
+## 1.2.1
+
+* Go modules checksum mismatch fix
+
+## 1.2.0
+
+* Added support to capture unused values in a field using the `",remain"` value
+ in the mapstructure tag. There is an example to showcase usage.
+* Added `DecoderConfig` option to always squash embedded structs
+* `json.Number` can decode into `uint` types
+* Empty slices are preserved and not replaced with nil slices
+* Fix panic that can occur in when decoding a map into a nil slice of structs
+* Improved package documentation for godoc
+
+## 1.1.2
+
+* Fix error when decode hook decodes interface implementation into interface
+ type. [GH-140]
+
+## 1.1.1
+
+* Fix panic that can happen in `decodePtr`
+
+## 1.1.0
+
+* Added `StringToIPHookFunc` to convert `string` to `net.IP` and `net.IPNet` [GH-133]
+* Support struct to struct decoding [GH-137]
+* If source map value is nil, then destination map value is nil (instead of empty)
+* If source slice value is nil, then destination slice value is nil (instead of empty)
+* If source pointer is nil, then destination pointer is set to nil (instead of
+ allocated zero value of type)
+
+## 1.0.0
+
+* Initial tagged stable release.
diff --git a/vendor/github.com/mitchellh/mapstructure/LICENSE b/vendor/github.com/mitchellh/mapstructure/LICENSE
new file mode 100644
index 0000000..f9c841a
--- /dev/null
+++ b/vendor/github.com/mitchellh/mapstructure/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2013 Mitchell Hashimoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/mitchellh/mapstructure/README.md b/vendor/github.com/mitchellh/mapstructure/README.md
new file mode 100644
index 0000000..0018dc7
--- /dev/null
+++ b/vendor/github.com/mitchellh/mapstructure/README.md
@@ -0,0 +1,46 @@
+# mapstructure [![Godoc](https://godoc.org/github.com/mitchellh/mapstructure?status.svg)](https://godoc.org/github.com/mitchellh/mapstructure)
+
+mapstructure is a Go library for decoding generic map values to structures
+and vice versa, while providing helpful error handling.
+
+This library is most useful when decoding values from some data stream (JSON,
+Gob, etc.) where you don't _quite_ know the structure of the underlying data
+until you read a part of it. You can therefore read a `map[string]interface{}`
+and use this library to decode it into the proper underlying native Go
+structure.
+
+## Installation
+
+Standard `go get`:
+
+```
+$ go get github.com/mitchellh/mapstructure
+```
+
+## Usage & Example
+
+For usage and examples see the [Godoc](http://godoc.org/github.com/mitchellh/mapstructure).
+
+The `Decode` function has examples associated with it there.
+
+## But Why?!
+
+Go offers fantastic standard libraries for decoding formats such as JSON.
+The standard method is to have a struct pre-created, and populate that struct
+from the bytes of the encoded format. This is great, but the problem is if
+you have configuration or an encoding that changes slightly depending on
+specific fields. For example, consider this JSON:
+
+```json
+{
+ "type": "person",
+ "name": "Mitchell"
+}
+```
+
+Perhaps we can't populate a specific structure without first reading
+the "type" field from the JSON. We could always do two passes over the
+decoding of the JSON (reading the "type" first, and the rest later).
+However, it is much simpler to just decode this into a `map[string]interface{}`
+structure, read the "type" key, then use something like this library
+to decode it into the proper structure.
diff --git a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go
new file mode 100644
index 0000000..3a754ca
--- /dev/null
+++ b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go
@@ -0,0 +1,279 @@
+package mapstructure
+
+import (
+ "encoding"
+ "errors"
+ "fmt"
+ "net"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// typedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns
+// it into the proper DecodeHookFunc type, such as DecodeHookFuncType.
+func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc {
+ // Create variables here so we can reference them with the reflect pkg
+ var f1 DecodeHookFuncType
+ var f2 DecodeHookFuncKind
+ var f3 DecodeHookFuncValue
+
+ // Fill in the variables into this interface and the rest is done
+ // automatically using the reflect package.
+ potential := []interface{}{f1, f2, f3}
+
+ v := reflect.ValueOf(h)
+ vt := v.Type()
+ for _, raw := range potential {
+ pt := reflect.ValueOf(raw).Type()
+ if vt.ConvertibleTo(pt) {
+ return v.Convert(pt).Interface()
+ }
+ }
+
+ return nil
+}
+
+// DecodeHookExec executes the given decode hook. This should be used
+// since it'll naturally degrade to the older backwards compatible DecodeHookFunc
+// that took reflect.Kind instead of reflect.Type.
+func DecodeHookExec(
+ raw DecodeHookFunc,
+ from reflect.Value, to reflect.Value) (interface{}, error) {
+
+ switch f := typedDecodeHook(raw).(type) {
+ case DecodeHookFuncType:
+ return f(from.Type(), to.Type(), from.Interface())
+ case DecodeHookFuncKind:
+ return f(from.Kind(), to.Kind(), from.Interface())
+ case DecodeHookFuncValue:
+ return f(from, to)
+ default:
+ return nil, errors.New("invalid decode hook signature")
+ }
+}
+
+// ComposeDecodeHookFunc creates a single DecodeHookFunc that
+// automatically composes multiple DecodeHookFuncs.
+//
+// The composed funcs are called in order, with the result of the
+// previous transformation.
+func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc {
+ return func(f reflect.Value, t reflect.Value) (interface{}, error) {
+ var err error
+ data := f.Interface()
+
+ newFrom := f
+ for _, f1 := range fs {
+ data, err = DecodeHookExec(f1, newFrom, t)
+ if err != nil {
+ return nil, err
+ }
+ newFrom = reflect.ValueOf(data)
+ }
+
+ return data, nil
+ }
+}
+
+// OrComposeDecodeHookFunc executes all input hook functions until one of them returns no error. In that case its value is returned.
+// If all hooks return an error, OrComposeDecodeHookFunc returns an error concatenating all error messages.
+func OrComposeDecodeHookFunc(ff ...DecodeHookFunc) DecodeHookFunc {
+ return func(a, b reflect.Value) (interface{}, error) {
+ var allErrs string
+ var out interface{}
+ var err error
+
+ for _, f := range ff {
+ out, err = DecodeHookExec(f, a, b)
+ if err != nil {
+ allErrs += err.Error() + "\n"
+ continue
+ }
+
+ return out, nil
+ }
+
+ return nil, errors.New(allErrs)
+ }
+}
+
+// StringToSliceHookFunc returns a DecodeHookFunc that converts
+// string to []string by splitting on the given sep.
+func StringToSliceHookFunc(sep string) DecodeHookFunc {
+ return func(
+ f reflect.Kind,
+ t reflect.Kind,
+ data interface{}) (interface{}, error) {
+ if f != reflect.String || t != reflect.Slice {
+ return data, nil
+ }
+
+ raw := data.(string)
+ if raw == "" {
+ return []string{}, nil
+ }
+
+ return strings.Split(raw, sep), nil
+ }
+}
+
+// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts
+// strings to time.Duration.
+func StringToTimeDurationHookFunc() DecodeHookFunc {
+ return func(
+ f reflect.Type,
+ t reflect.Type,
+ data interface{}) (interface{}, error) {
+ if f.Kind() != reflect.String {
+ return data, nil
+ }
+ if t != reflect.TypeOf(time.Duration(5)) {
+ return data, nil
+ }
+
+ // Convert it by parsing
+ return time.ParseDuration(data.(string))
+ }
+}
+
+// StringToIPHookFunc returns a DecodeHookFunc that converts
+// strings to net.IP
+func StringToIPHookFunc() DecodeHookFunc {
+ return func(
+ f reflect.Type,
+ t reflect.Type,
+ data interface{}) (interface{}, error) {
+ if f.Kind() != reflect.String {
+ return data, nil
+ }
+ if t != reflect.TypeOf(net.IP{}) {
+ return data, nil
+ }
+
+ // Convert it by parsing
+ ip := net.ParseIP(data.(string))
+ if ip == nil {
+ return net.IP{}, fmt.Errorf("failed parsing ip %v", data)
+ }
+
+ return ip, nil
+ }
+}
+
+// StringToIPNetHookFunc returns a DecodeHookFunc that converts
+// strings to net.IPNet
+func StringToIPNetHookFunc() DecodeHookFunc {
+ return func(
+ f reflect.Type,
+ t reflect.Type,
+ data interface{}) (interface{}, error) {
+ if f.Kind() != reflect.String {
+ return data, nil
+ }
+ if t != reflect.TypeOf(net.IPNet{}) {
+ return data, nil
+ }
+
+ // Convert it by parsing
+ _, net, err := net.ParseCIDR(data.(string))
+ return net, err
+ }
+}
+
+// StringToTimeHookFunc returns a DecodeHookFunc that converts
+// strings to time.Time.
+func StringToTimeHookFunc(layout string) DecodeHookFunc {
+ return func(
+ f reflect.Type,
+ t reflect.Type,
+ data interface{}) (interface{}, error) {
+ if f.Kind() != reflect.String {
+ return data, nil
+ }
+ if t != reflect.TypeOf(time.Time{}) {
+ return data, nil
+ }
+
+ // Convert it by parsing
+ return time.Parse(layout, data.(string))
+ }
+}
+
+// WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to
+// the decoder.
+//
+// Note that this is significantly different from the WeaklyTypedInput option
+// of the DecoderConfig.
+func WeaklyTypedHook(
+ f reflect.Kind,
+ t reflect.Kind,
+ data interface{}) (interface{}, error) {
+ dataVal := reflect.ValueOf(data)
+ switch t {
+ case reflect.String:
+ switch f {
+ case reflect.Bool:
+ if dataVal.Bool() {
+ return "1", nil
+ }
+ return "0", nil
+ case reflect.Float32:
+ return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil
+ case reflect.Int:
+ return strconv.FormatInt(dataVal.Int(), 10), nil
+ case reflect.Slice:
+ dataType := dataVal.Type()
+ elemKind := dataType.Elem().Kind()
+ if elemKind == reflect.Uint8 {
+ return string(dataVal.Interface().([]uint8)), nil
+ }
+ case reflect.Uint:
+ return strconv.FormatUint(dataVal.Uint(), 10), nil
+ }
+ }
+
+ return data, nil
+}
+
+func RecursiveStructToMapHookFunc() DecodeHookFunc {
+ return func(f reflect.Value, t reflect.Value) (interface{}, error) {
+ if f.Kind() != reflect.Struct {
+ return f.Interface(), nil
+ }
+
+ var i interface{} = struct{}{}
+ if t.Type() != reflect.TypeOf(&i).Elem() {
+ return f.Interface(), nil
+ }
+
+ m := make(map[string]interface{})
+ t.Set(reflect.ValueOf(m))
+
+ return f.Interface(), nil
+ }
+}
+
+// TextUnmarshallerHookFunc returns a DecodeHookFunc that applies
+// strings to the UnmarshalText function, when the target type
+// implements the encoding.TextUnmarshaler interface
+func TextUnmarshallerHookFunc() DecodeHookFuncType {
+ return func(
+ f reflect.Type,
+ t reflect.Type,
+ data interface{}) (interface{}, error) {
+ if f.Kind() != reflect.String {
+ return data, nil
+ }
+ result := reflect.New(t).Interface()
+ unmarshaller, ok := result.(encoding.TextUnmarshaler)
+ if !ok {
+ return data, nil
+ }
+ if err := unmarshaller.UnmarshalText([]byte(data.(string))); err != nil {
+ return nil, err
+ }
+ return result, nil
+ }
+}
diff --git a/vendor/github.com/mitchellh/mapstructure/error.go b/vendor/github.com/mitchellh/mapstructure/error.go
new file mode 100644
index 0000000..47a99e5
--- /dev/null
+++ b/vendor/github.com/mitchellh/mapstructure/error.go
@@ -0,0 +1,50 @@
+package mapstructure
+
+import (
+ "errors"
+ "fmt"
+ "sort"
+ "strings"
+)
+
+// Error implements the error interface and can represents multiple
+// errors that occur in the course of a single decode.
+type Error struct {
+ Errors []string
+}
+
+func (e *Error) Error() string {
+ points := make([]string, len(e.Errors))
+ for i, err := range e.Errors {
+ points[i] = fmt.Sprintf("* %s", err)
+ }
+
+ sort.Strings(points)
+ return fmt.Sprintf(
+ "%d error(s) decoding:\n\n%s",
+ len(e.Errors), strings.Join(points, "\n"))
+}
+
+// WrappedErrors implements the errwrap.Wrapper interface to make this
+// return value more useful with the errwrap and go-multierror libraries.
+func (e *Error) WrappedErrors() []error {
+ if e == nil {
+ return nil
+ }
+
+ result := make([]error, len(e.Errors))
+ for i, e := range e.Errors {
+ result[i] = errors.New(e)
+ }
+
+ return result
+}
+
+func appendErrors(errors []string, err error) []string {
+ switch e := err.(type) {
+ case *Error:
+ return append(errors, e.Errors...)
+ default:
+ return append(errors, e.Error())
+ }
+}
diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure.go b/vendor/github.com/mitchellh/mapstructure/mapstructure.go
new file mode 100644
index 0000000..1efb22a
--- /dev/null
+++ b/vendor/github.com/mitchellh/mapstructure/mapstructure.go
@@ -0,0 +1,1540 @@
+// Package mapstructure exposes functionality to convert one arbitrary
+// Go type into another, typically to convert a map[string]interface{}
+// into a native Go structure.
+//
+// The Go structure can be arbitrarily complex, containing slices,
+// other structs, etc. and the decoder will properly decode nested
+// maps and so on into the proper structures in the native Go struct.
+// See the examples to see what the decoder is capable of.
+//
+// The simplest function to start with is Decode.
+//
+// Field Tags
+//
+// When decoding to a struct, mapstructure will use the field name by
+// default to perform the mapping. For example, if a struct has a field
+// "Username" then mapstructure will look for a key in the source value
+// of "username" (case insensitive).
+//
+// type User struct {
+// Username string
+// }
+//
+// You can change the behavior of mapstructure by using struct tags.
+// The default struct tag that mapstructure looks for is "mapstructure"
+// but you can customize it using DecoderConfig.
+//
+// Renaming Fields
+//
+// To rename the key that mapstructure looks for, use the "mapstructure"
+// tag and set a value directly. For example, to change the "username" example
+// above to "user":
+//
+// type User struct {
+// Username string `mapstructure:"user"`
+// }
+//
+// Embedded Structs and Squashing
+//
+// Embedded structs are treated as if they're another field with that name.
+// By default, the two structs below are equivalent when decoding with
+// mapstructure:
+//
+// type Person struct {
+// Name string
+// }
+//
+// type Friend struct {
+// Person
+// }
+//
+// type Friend struct {
+// Person Person
+// }
+//
+// This would require an input that looks like below:
+//
+// map[string]interface{}{
+// "person": map[string]interface{}{"name": "alice"},
+// }
+//
+// If your "person" value is NOT nested, then you can append ",squash" to
+// your tag value and mapstructure will treat it as if the embedded struct
+// were part of the struct directly. Example:
+//
+// type Friend struct {
+// Person `mapstructure:",squash"`
+// }
+//
+// Now the following input would be accepted:
+//
+// map[string]interface{}{
+// "name": "alice",
+// }
+//
+// When decoding from a struct to a map, the squash tag squashes the struct
+// fields into a single map. Using the example structs from above:
+//
+// Friend{Person: Person{Name: "alice"}}
+//
+// Will be decoded into a map:
+//
+// map[string]interface{}{
+// "name": "alice",
+// }
+//
+// DecoderConfig has a field that changes the behavior of mapstructure
+// to always squash embedded structs.
+//
+// Remainder Values
+//
+// If there are any unmapped keys in the source value, mapstructure by
+// default will silently ignore them. You can error by setting ErrorUnused
+// in DecoderConfig. If you're using Metadata you can also maintain a slice
+// of the unused keys.
+//
+// You can also use the ",remain" suffix on your tag to collect all unused
+// values in a map. The field with this tag MUST be a map type and should
+// probably be a "map[string]interface{}" or "map[interface{}]interface{}".
+// See example below:
+//
+// type Friend struct {
+// Name string
+// Other map[string]interface{} `mapstructure:",remain"`
+// }
+//
+// Given the input below, Other would be populated with the other
+// values that weren't used (everything but "name"):
+//
+// map[string]interface{}{
+// "name": "bob",
+// "address": "123 Maple St.",
+// }
+//
+// Omit Empty Values
+//
+// When decoding from a struct to any other value, you may use the
+// ",omitempty" suffix on your tag to omit that value if it equates to
+// the zero value. The zero value of all types is specified in the Go
+// specification.
+//
+// For example, the zero type of a numeric type is zero ("0"). If the struct
+// field value is zero and a numeric type, the field is empty, and it won't
+// be encoded into the destination type.
+//
+// type Source struct {
+// Age int `mapstructure:",omitempty"`
+// }
+//
+// Unexported fields
+//
+// Since unexported (private) struct fields cannot be set outside the package
+// where they are defined, the decoder will simply skip them.
+//
+// For this output type definition:
+//
+// type Exported struct {
+// private string // this unexported field will be skipped
+// Public string
+// }
+//
+// Using this map as input:
+//
+// map[string]interface{}{
+// "private": "I will be ignored",
+// "Public": "I made it through!",
+// }
+//
+// The following struct will be decoded:
+//
+// type Exported struct {
+// private: "" // field is left with an empty string (zero value)
+// Public: "I made it through!"
+// }
+//
+// Other Configuration
+//
+// mapstructure is highly configurable. See the DecoderConfig struct
+// for other features and options that are supported.
+package mapstructure
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+// DecodeHookFunc is the callback function that can be used for
+// data transformations. See "DecodeHook" in the DecoderConfig
+// struct.
+//
+// The type must be one of DecodeHookFuncType, DecodeHookFuncKind, or
+// DecodeHookFuncValue.
+// Values are a superset of Types (Values can return types), and Types are a
+// superset of Kinds (Types can return Kinds) and are generally a richer thing
+// to use, but Kinds are simpler if you only need those.
+//
+// The reason DecodeHookFunc is multi-typed is for backwards compatibility:
+// we started with Kinds and then realized Types were the better solution,
+// but have a promise to not break backwards compat so we now support
+// both.
+type DecodeHookFunc interface{}
+
+// DecodeHookFuncType is a DecodeHookFunc which has complete information about
+// the source and target types.
+type DecodeHookFuncType func(reflect.Type, reflect.Type, interface{}) (interface{}, error)
+
+// DecodeHookFuncKind is a DecodeHookFunc which knows only the Kinds of the
+// source and target types.
+type DecodeHookFuncKind func(reflect.Kind, reflect.Kind, interface{}) (interface{}, error)
+
+// DecodeHookFuncValue is a DecodeHookFunc which has complete access to both the source and target
+// values.
+type DecodeHookFuncValue func(from reflect.Value, to reflect.Value) (interface{}, error)
+
+// DecoderConfig is the configuration that is used to create a new decoder
+// and allows customization of various aspects of decoding.
+type DecoderConfig struct {
+ // DecodeHook, if set, will be called before any decoding and any
+ // type conversion (if WeaklyTypedInput is on). This lets you modify
+ // the values before they're set down onto the resulting struct. The
+ // DecodeHook is called for every map and value in the input. This means
+ // that if a struct has embedded fields with squash tags the decode hook
+ // is called only once with all of the input data, not once for each
+ // embedded struct.
+ //
+ // If an error is returned, the entire decode will fail with that error.
+ DecodeHook DecodeHookFunc
+
+ // If ErrorUnused is true, then it is an error for there to exist
+ // keys in the original map that were unused in the decoding process
+ // (extra keys).
+ ErrorUnused bool
+
+ // If ErrorUnset is true, then it is an error for there to exist
+ // fields in the result that were not set in the decoding process
+ // (extra fields). This only applies to decoding to a struct. This
+ // will affect all nested structs as well.
+ ErrorUnset bool
+
+ // ZeroFields, if set to true, will zero fields before writing them.
+ // For example, a map will be emptied before decoded values are put in
+ // it. If this is false, a map will be merged.
+ ZeroFields bool
+
+ // If WeaklyTypedInput is true, the decoder will make the following
+ // "weak" conversions:
+ //
+ // - bools to string (true = "1", false = "0")
+ // - numbers to string (base 10)
+ // - bools to int/uint (true = 1, false = 0)
+ // - strings to int/uint (base implied by prefix)
+ // - int to bool (true if value != 0)
+ // - string to bool (accepts: 1, t, T, TRUE, true, True, 0, f, F,
+ // FALSE, false, False. Anything else is an error)
+ // - empty array = empty map and vice versa
+ // - negative numbers to overflowed uint values (base 10)
+ // - slice of maps to a merged map
+ // - single values are converted to slices if required. Each
+ // element is weakly decoded. For example: "4" can become []int{4}
+ // if the target type is an int slice.
+ //
+ WeaklyTypedInput bool
+
+ // Squash will squash embedded structs. A squash tag may also be
+ // added to an individual struct field using a tag. For example:
+ //
+ // type Parent struct {
+ // Child `mapstructure:",squash"`
+ // }
+ Squash bool
+
+ // Metadata is the struct that will contain extra metadata about
+ // the decoding. If this is nil, then no metadata will be tracked.
+ Metadata *Metadata
+
+ // Result is a pointer to the struct that will contain the decoded
+ // value.
+ Result interface{}
+
+ // The tag name that mapstructure reads for field names. This
+ // defaults to "mapstructure"
+ TagName string
+
+ // IgnoreUntaggedFields ignores all struct fields without explicit
+ // TagName, comparable to `mapstructure:"-"` as default behaviour.
+ IgnoreUntaggedFields bool
+
+ // MatchName is the function used to match the map key to the struct
+ // field name or tag. Defaults to `strings.EqualFold`. This can be used
+ // to implement case-sensitive tag values, support snake casing, etc.
+ MatchName func(mapKey, fieldName string) bool
+}
+
+// A Decoder takes a raw interface value and turns it into structured
+// data, keeping track of rich error information along the way in case
+// anything goes wrong. Unlike the basic top-level Decode method, you can
+// more finely control how the Decoder behaves using the DecoderConfig
+// structure. The top-level Decode method is just a convenience that sets
+// up the most basic Decoder.
+type Decoder struct {
+ config *DecoderConfig
+}
+
+// Metadata contains information about decoding a structure that
+// is tedious or difficult to get otherwise.
+type Metadata struct {
+ // Keys are the keys of the structure which were successfully decoded
+ Keys []string
+
+ // Unused is a slice of keys that were found in the raw value but
+ // weren't decoded since there was no matching field in the result interface
+ Unused []string
+
+ // Unset is a slice of field names that were found in the result interface
+ // but weren't set in the decoding process since there was no matching value
+ // in the input
+ Unset []string
+}
+
+// Decode takes an input structure and uses reflection to translate it to
+// the output structure. output must be a pointer to a map or struct.
+func Decode(input interface{}, output interface{}) error {
+ config := &DecoderConfig{
+ Metadata: nil,
+ Result: output,
+ }
+
+ decoder, err := NewDecoder(config)
+ if err != nil {
+ return err
+ }
+
+ return decoder.Decode(input)
+}
+
+// WeakDecode is the same as Decode but is shorthand to enable
+// WeaklyTypedInput. See DecoderConfig for more info.
+func WeakDecode(input, output interface{}) error {
+ config := &DecoderConfig{
+ Metadata: nil,
+ Result: output,
+ WeaklyTypedInput: true,
+ }
+
+ decoder, err := NewDecoder(config)
+ if err != nil {
+ return err
+ }
+
+ return decoder.Decode(input)
+}
+
+// DecodeMetadata is the same as Decode, but is shorthand to
+// enable metadata collection. See DecoderConfig for more info.
+func DecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error {
+ config := &DecoderConfig{
+ Metadata: metadata,
+ Result: output,
+ }
+
+ decoder, err := NewDecoder(config)
+ if err != nil {
+ return err
+ }
+
+ return decoder.Decode(input)
+}
+
+// WeakDecodeMetadata is the same as Decode, but is shorthand to
+// enable both WeaklyTypedInput and metadata collection. See
+// DecoderConfig for more info.
+func WeakDecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error {
+ config := &DecoderConfig{
+ Metadata: metadata,
+ Result: output,
+ WeaklyTypedInput: true,
+ }
+
+ decoder, err := NewDecoder(config)
+ if err != nil {
+ return err
+ }
+
+ return decoder.Decode(input)
+}
+
+// NewDecoder returns a new decoder for the given configuration. Once
+// a decoder has been returned, the same configuration must not be used
+// again.
+func NewDecoder(config *DecoderConfig) (*Decoder, error) {
+ val := reflect.ValueOf(config.Result)
+ if val.Kind() != reflect.Ptr {
+ return nil, errors.New("result must be a pointer")
+ }
+
+ val = val.Elem()
+ if !val.CanAddr() {
+ return nil, errors.New("result must be addressable (a pointer)")
+ }
+
+ if config.Metadata != nil {
+ if config.Metadata.Keys == nil {
+ config.Metadata.Keys = make([]string, 0)
+ }
+
+ if config.Metadata.Unused == nil {
+ config.Metadata.Unused = make([]string, 0)
+ }
+
+ if config.Metadata.Unset == nil {
+ config.Metadata.Unset = make([]string, 0)
+ }
+ }
+
+ if config.TagName == "" {
+ config.TagName = "mapstructure"
+ }
+
+ if config.MatchName == nil {
+ config.MatchName = strings.EqualFold
+ }
+
+ result := &Decoder{
+ config: config,
+ }
+
+ return result, nil
+}
+
+// Decode decodes the given raw interface to the target pointer specified
+// by the configuration.
+func (d *Decoder) Decode(input interface{}) error {
+ return d.decode("", input, reflect.ValueOf(d.config.Result).Elem())
+}
+
+// Decodes an unknown data type into a specific reflection value.
+func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error {
+ var inputVal reflect.Value
+ if input != nil {
+ inputVal = reflect.ValueOf(input)
+
+ // We need to check here if input is a typed nil. Typed nils won't
+ // match the "input == nil" below so we check that here.
+ if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() {
+ input = nil
+ }
+ }
+
+ if input == nil {
+ // If the data is nil, then we don't set anything, unless ZeroFields is set
+ // to true.
+ if d.config.ZeroFields {
+ outVal.Set(reflect.Zero(outVal.Type()))
+
+ if d.config.Metadata != nil && name != "" {
+ d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+ }
+ }
+ return nil
+ }
+
+ if !inputVal.IsValid() {
+ // If the input value is invalid, then we just set the value
+ // to be the zero value.
+ outVal.Set(reflect.Zero(outVal.Type()))
+ if d.config.Metadata != nil && name != "" {
+ d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+ }
+ return nil
+ }
+
+ if d.config.DecodeHook != nil {
+ // We have a DecodeHook, so let's pre-process the input.
+ var err error
+ input, err = DecodeHookExec(d.config.DecodeHook, inputVal, outVal)
+ if err != nil {
+ return fmt.Errorf("error decoding '%s': %s", name, err)
+ }
+ }
+
+ var err error
+ outputKind := getKind(outVal)
+ addMetaKey := true
+ switch outputKind {
+ case reflect.Bool:
+ err = d.decodeBool(name, input, outVal)
+ case reflect.Interface:
+ err = d.decodeBasic(name, input, outVal)
+ case reflect.String:
+ err = d.decodeString(name, input, outVal)
+ case reflect.Int:
+ err = d.decodeInt(name, input, outVal)
+ case reflect.Uint:
+ err = d.decodeUint(name, input, outVal)
+ case reflect.Float32:
+ err = d.decodeFloat(name, input, outVal)
+ case reflect.Struct:
+ err = d.decodeStruct(name, input, outVal)
+ case reflect.Map:
+ err = d.decodeMap(name, input, outVal)
+ case reflect.Ptr:
+ addMetaKey, err = d.decodePtr(name, input, outVal)
+ case reflect.Slice:
+ err = d.decodeSlice(name, input, outVal)
+ case reflect.Array:
+ err = d.decodeArray(name, input, outVal)
+ case reflect.Func:
+ err = d.decodeFunc(name, input, outVal)
+ default:
+ // If we reached this point then we weren't able to decode it
+ return fmt.Errorf("%s: unsupported type: %s", name, outputKind)
+ }
+
+ // If we reached here, then we successfully decoded SOMETHING, so
+ // mark the key as used if we're tracking metainput.
+ if addMetaKey && d.config.Metadata != nil && name != "" {
+ d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+ }
+
+ return err
+}
+
+// This decodes a basic type (bool, int, string, etc.) and sets the
+// value to "data" of that type.
+func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error {
+ if val.IsValid() && val.Elem().IsValid() {
+ elem := val.Elem()
+
+ // If we can't address this element, then its not writable. Instead,
+ // we make a copy of the value (which is a pointer and therefore
+ // writable), decode into that, and replace the whole value.
+ copied := false
+ if !elem.CanAddr() {
+ copied = true
+
+ // Make *T
+ copy := reflect.New(elem.Type())
+
+ // *T = elem
+ copy.Elem().Set(elem)
+
+ // Set elem so we decode into it
+ elem = copy
+ }
+
+ // Decode. If we have an error then return. We also return right
+ // away if we're not a copy because that means we decoded directly.
+ if err := d.decode(name, data, elem); err != nil || !copied {
+ return err
+ }
+
+ // If we're a copy, we need to set te final result
+ val.Set(elem.Elem())
+ return nil
+ }
+
+ dataVal := reflect.ValueOf(data)
+
+ // If the input data is a pointer, and the assigned type is the dereference
+ // of that exact pointer, then indirect it so that we can assign it.
+ // Example: *string to string
+ if dataVal.Kind() == reflect.Ptr && dataVal.Type().Elem() == val.Type() {
+ dataVal = reflect.Indirect(dataVal)
+ }
+
+ if !dataVal.IsValid() {
+ dataVal = reflect.Zero(val.Type())
+ }
+
+ dataValType := dataVal.Type()
+ if !dataValType.AssignableTo(val.Type()) {
+ return fmt.Errorf(
+ "'%s' expected type '%s', got '%s'",
+ name, val.Type(), dataValType)
+ }
+
+ val.Set(dataVal)
+ return nil
+}
+
+func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error {
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ dataKind := getKind(dataVal)
+
+ converted := true
+ switch {
+ case dataKind == reflect.String:
+ val.SetString(dataVal.String())
+ case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
+ if dataVal.Bool() {
+ val.SetString("1")
+ } else {
+ val.SetString("0")
+ }
+ case dataKind == reflect.Int && d.config.WeaklyTypedInput:
+ val.SetString(strconv.FormatInt(dataVal.Int(), 10))
+ case dataKind == reflect.Uint && d.config.WeaklyTypedInput:
+ val.SetString(strconv.FormatUint(dataVal.Uint(), 10))
+ case dataKind == reflect.Float32 && d.config.WeaklyTypedInput:
+ val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64))
+ case dataKind == reflect.Slice && d.config.WeaklyTypedInput,
+ dataKind == reflect.Array && d.config.WeaklyTypedInput:
+ dataType := dataVal.Type()
+ elemKind := dataType.Elem().Kind()
+ switch elemKind {
+ case reflect.Uint8:
+ var uints []uint8
+ if dataKind == reflect.Array {
+ uints = make([]uint8, dataVal.Len(), dataVal.Len())
+ for i := range uints {
+ uints[i] = dataVal.Index(i).Interface().(uint8)
+ }
+ } else {
+ uints = dataVal.Interface().([]uint8)
+ }
+ val.SetString(string(uints))
+ default:
+ converted = false
+ }
+ default:
+ converted = false
+ }
+
+ if !converted {
+ return fmt.Errorf(
+ "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+ name, val.Type(), dataVal.Type(), data)
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error {
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ dataKind := getKind(dataVal)
+ dataType := dataVal.Type()
+
+ switch {
+ case dataKind == reflect.Int:
+ val.SetInt(dataVal.Int())
+ case dataKind == reflect.Uint:
+ val.SetInt(int64(dataVal.Uint()))
+ case dataKind == reflect.Float32:
+ val.SetInt(int64(dataVal.Float()))
+ case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
+ if dataVal.Bool() {
+ val.SetInt(1)
+ } else {
+ val.SetInt(0)
+ }
+ case dataKind == reflect.String && d.config.WeaklyTypedInput:
+ str := dataVal.String()
+ if str == "" {
+ str = "0"
+ }
+
+ i, err := strconv.ParseInt(str, 0, val.Type().Bits())
+ if err == nil {
+ val.SetInt(i)
+ } else {
+ return fmt.Errorf("cannot parse '%s' as int: %s", name, err)
+ }
+ case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
+ jn := data.(json.Number)
+ i, err := jn.Int64()
+ if err != nil {
+ return fmt.Errorf(
+ "error decoding json.Number into %s: %s", name, err)
+ }
+ val.SetInt(i)
+ default:
+ return fmt.Errorf(
+ "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+ name, val.Type(), dataVal.Type(), data)
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error {
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ dataKind := getKind(dataVal)
+ dataType := dataVal.Type()
+
+ switch {
+ case dataKind == reflect.Int:
+ i := dataVal.Int()
+ if i < 0 && !d.config.WeaklyTypedInput {
+ return fmt.Errorf("cannot parse '%s', %d overflows uint",
+ name, i)
+ }
+ val.SetUint(uint64(i))
+ case dataKind == reflect.Uint:
+ val.SetUint(dataVal.Uint())
+ case dataKind == reflect.Float32:
+ f := dataVal.Float()
+ if f < 0 && !d.config.WeaklyTypedInput {
+ return fmt.Errorf("cannot parse '%s', %f overflows uint",
+ name, f)
+ }
+ val.SetUint(uint64(f))
+ case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
+ if dataVal.Bool() {
+ val.SetUint(1)
+ } else {
+ val.SetUint(0)
+ }
+ case dataKind == reflect.String && d.config.WeaklyTypedInput:
+ str := dataVal.String()
+ if str == "" {
+ str = "0"
+ }
+
+ i, err := strconv.ParseUint(str, 0, val.Type().Bits())
+ if err == nil {
+ val.SetUint(i)
+ } else {
+ return fmt.Errorf("cannot parse '%s' as uint: %s", name, err)
+ }
+ case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
+ jn := data.(json.Number)
+ i, err := strconv.ParseUint(string(jn), 0, 64)
+ if err != nil {
+ return fmt.Errorf(
+ "error decoding json.Number into %s: %s", name, err)
+ }
+ val.SetUint(i)
+ default:
+ return fmt.Errorf(
+ "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+ name, val.Type(), dataVal.Type(), data)
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error {
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ dataKind := getKind(dataVal)
+
+ switch {
+ case dataKind == reflect.Bool:
+ val.SetBool(dataVal.Bool())
+ case dataKind == reflect.Int && d.config.WeaklyTypedInput:
+ val.SetBool(dataVal.Int() != 0)
+ case dataKind == reflect.Uint && d.config.WeaklyTypedInput:
+ val.SetBool(dataVal.Uint() != 0)
+ case dataKind == reflect.Float32 && d.config.WeaklyTypedInput:
+ val.SetBool(dataVal.Float() != 0)
+ case dataKind == reflect.String && d.config.WeaklyTypedInput:
+ b, err := strconv.ParseBool(dataVal.String())
+ if err == nil {
+ val.SetBool(b)
+ } else if dataVal.String() == "" {
+ val.SetBool(false)
+ } else {
+ return fmt.Errorf("cannot parse '%s' as bool: %s", name, err)
+ }
+ default:
+ return fmt.Errorf(
+ "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+ name, val.Type(), dataVal.Type(), data)
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error {
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ dataKind := getKind(dataVal)
+ dataType := dataVal.Type()
+
+ switch {
+ case dataKind == reflect.Int:
+ val.SetFloat(float64(dataVal.Int()))
+ case dataKind == reflect.Uint:
+ val.SetFloat(float64(dataVal.Uint()))
+ case dataKind == reflect.Float32:
+ val.SetFloat(dataVal.Float())
+ case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
+ if dataVal.Bool() {
+ val.SetFloat(1)
+ } else {
+ val.SetFloat(0)
+ }
+ case dataKind == reflect.String && d.config.WeaklyTypedInput:
+ str := dataVal.String()
+ if str == "" {
+ str = "0"
+ }
+
+ f, err := strconv.ParseFloat(str, val.Type().Bits())
+ if err == nil {
+ val.SetFloat(f)
+ } else {
+ return fmt.Errorf("cannot parse '%s' as float: %s", name, err)
+ }
+ case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
+ jn := data.(json.Number)
+ i, err := jn.Float64()
+ if err != nil {
+ return fmt.Errorf(
+ "error decoding json.Number into %s: %s", name, err)
+ }
+ val.SetFloat(i)
+ default:
+ return fmt.Errorf(
+ "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+ name, val.Type(), dataVal.Type(), data)
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) error {
+ valType := val.Type()
+ valKeyType := valType.Key()
+ valElemType := valType.Elem()
+
+ // By default we overwrite keys in the current map
+ valMap := val
+
+ // If the map is nil or we're purposely zeroing fields, make a new map
+ if valMap.IsNil() || d.config.ZeroFields {
+ // Make a new map to hold our result
+ mapType := reflect.MapOf(valKeyType, valElemType)
+ valMap = reflect.MakeMap(mapType)
+ }
+
+ // Check input type and based on the input type jump to the proper func
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ switch dataVal.Kind() {
+ case reflect.Map:
+ return d.decodeMapFromMap(name, dataVal, val, valMap)
+
+ case reflect.Struct:
+ return d.decodeMapFromStruct(name, dataVal, val, valMap)
+
+ case reflect.Array, reflect.Slice:
+ if d.config.WeaklyTypedInput {
+ return d.decodeMapFromSlice(name, dataVal, val, valMap)
+ }
+
+ fallthrough
+
+ default:
+ return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind())
+ }
+}
+
+func (d *Decoder) decodeMapFromSlice(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
+ // Special case for BC reasons (covered by tests)
+ if dataVal.Len() == 0 {
+ val.Set(valMap)
+ return nil
+ }
+
+ for i := 0; i < dataVal.Len(); i++ {
+ err := d.decode(
+ name+"["+strconv.Itoa(i)+"]",
+ dataVal.Index(i).Interface(), val)
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeMapFromMap(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
+ valType := val.Type()
+ valKeyType := valType.Key()
+ valElemType := valType.Elem()
+
+ // Accumulate errors
+ errors := make([]string, 0)
+
+ // If the input data is empty, then we just match what the input data is.
+ if dataVal.Len() == 0 {
+ if dataVal.IsNil() {
+ if !val.IsNil() {
+ val.Set(dataVal)
+ }
+ } else {
+ // Set to empty allocated value
+ val.Set(valMap)
+ }
+
+ return nil
+ }
+
+ for _, k := range dataVal.MapKeys() {
+ fieldName := name + "[" + k.String() + "]"
+
+ // First decode the key into the proper type
+ currentKey := reflect.Indirect(reflect.New(valKeyType))
+ if err := d.decode(fieldName, k.Interface(), currentKey); err != nil {
+ errors = appendErrors(errors, err)
+ continue
+ }
+
+ // Next decode the data into the proper type
+ v := dataVal.MapIndex(k).Interface()
+ currentVal := reflect.Indirect(reflect.New(valElemType))
+ if err := d.decode(fieldName, v, currentVal); err != nil {
+ errors = appendErrors(errors, err)
+ continue
+ }
+
+ valMap.SetMapIndex(currentKey, currentVal)
+ }
+
+ // Set the built up map to the value
+ val.Set(valMap)
+
+ // If we had errors, return those
+ if len(errors) > 0 {
+ return &Error{errors}
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
+ typ := dataVal.Type()
+ for i := 0; i < typ.NumField(); i++ {
+ // Get the StructField first since this is a cheap operation. If the
+ // field is unexported, then ignore it.
+ f := typ.Field(i)
+ if f.PkgPath != "" {
+ continue
+ }
+
+ // Next get the actual value of this field and verify it is assignable
+ // to the map value.
+ v := dataVal.Field(i)
+ if !v.Type().AssignableTo(valMap.Type().Elem()) {
+ return fmt.Errorf("cannot assign type '%s' to map value field of type '%s'", v.Type(), valMap.Type().Elem())
+ }
+
+ tagValue := f.Tag.Get(d.config.TagName)
+ keyName := f.Name
+
+ if tagValue == "" && d.config.IgnoreUntaggedFields {
+ continue
+ }
+
+ // If Squash is set in the config, we squash the field down.
+ squash := d.config.Squash && v.Kind() == reflect.Struct && f.Anonymous
+
+ v = dereferencePtrToStructIfNeeded(v, d.config.TagName)
+
+ // Determine the name of the key in the map
+ if index := strings.Index(tagValue, ","); index != -1 {
+ if tagValue[:index] == "-" {
+ continue
+ }
+ // If "omitempty" is specified in the tag, it ignores empty values.
+ if strings.Index(tagValue[index+1:], "omitempty") != -1 && isEmptyValue(v) {
+ continue
+ }
+
+ // If "squash" is specified in the tag, we squash the field down.
+ squash = squash || strings.Index(tagValue[index+1:], "squash") != -1
+ if squash {
+ // When squashing, the embedded type can be a pointer to a struct.
+ if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Struct {
+ v = v.Elem()
+ }
+
+ // The final type must be a struct
+ if v.Kind() != reflect.Struct {
+ return fmt.Errorf("cannot squash non-struct type '%s'", v.Type())
+ }
+ }
+ if keyNameTagValue := tagValue[:index]; keyNameTagValue != "" {
+ keyName = keyNameTagValue
+ }
+ } else if len(tagValue) > 0 {
+ if tagValue == "-" {
+ continue
+ }
+ keyName = tagValue
+ }
+
+ switch v.Kind() {
+ // this is an embedded struct, so handle it differently
+ case reflect.Struct:
+ x := reflect.New(v.Type())
+ x.Elem().Set(v)
+
+ vType := valMap.Type()
+ vKeyType := vType.Key()
+ vElemType := vType.Elem()
+ mType := reflect.MapOf(vKeyType, vElemType)
+ vMap := reflect.MakeMap(mType)
+
+ // Creating a pointer to a map so that other methods can completely
+ // overwrite the map if need be (looking at you decodeMapFromMap). The
+ // indirection allows the underlying map to be settable (CanSet() == true)
+ // where as reflect.MakeMap returns an unsettable map.
+ addrVal := reflect.New(vMap.Type())
+ reflect.Indirect(addrVal).Set(vMap)
+
+ err := d.decode(keyName, x.Interface(), reflect.Indirect(addrVal))
+ if err != nil {
+ return err
+ }
+
+ // the underlying map may have been completely overwritten so pull
+ // it indirectly out of the enclosing value.
+ vMap = reflect.Indirect(addrVal)
+
+ if squash {
+ for _, k := range vMap.MapKeys() {
+ valMap.SetMapIndex(k, vMap.MapIndex(k))
+ }
+ } else {
+ valMap.SetMapIndex(reflect.ValueOf(keyName), vMap)
+ }
+
+ default:
+ valMap.SetMapIndex(reflect.ValueOf(keyName), v)
+ }
+ }
+
+ if val.CanAddr() {
+ val.Set(valMap)
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) (bool, error) {
+ // If the input data is nil, then we want to just set the output
+ // pointer to be nil as well.
+ isNil := data == nil
+ if !isNil {
+ switch v := reflect.Indirect(reflect.ValueOf(data)); v.Kind() {
+ case reflect.Chan,
+ reflect.Func,
+ reflect.Interface,
+ reflect.Map,
+ reflect.Ptr,
+ reflect.Slice:
+ isNil = v.IsNil()
+ }
+ }
+ if isNil {
+ if !val.IsNil() && val.CanSet() {
+ nilValue := reflect.New(val.Type()).Elem()
+ val.Set(nilValue)
+ }
+
+ return true, nil
+ }
+
+ // Create an element of the concrete (non pointer) type and decode
+ // into that. Then set the value of the pointer to this type.
+ valType := val.Type()
+ valElemType := valType.Elem()
+ if val.CanSet() {
+ realVal := val
+ if realVal.IsNil() || d.config.ZeroFields {
+ realVal = reflect.New(valElemType)
+ }
+
+ if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil {
+ return false, err
+ }
+
+ val.Set(realVal)
+ } else {
+ if err := d.decode(name, data, reflect.Indirect(val)); err != nil {
+ return false, err
+ }
+ }
+ return false, nil
+}
+
+func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error {
+ // Create an element of the concrete (non pointer) type and decode
+ // into that. Then set the value of the pointer to this type.
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ if val.Type() != dataVal.Type() {
+ return fmt.Errorf(
+ "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+ name, val.Type(), dataVal.Type(), data)
+ }
+ val.Set(dataVal)
+ return nil
+}
+
+func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error {
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ dataValKind := dataVal.Kind()
+ valType := val.Type()
+ valElemType := valType.Elem()
+ sliceType := reflect.SliceOf(valElemType)
+
+ // If we have a non array/slice type then we first attempt to convert.
+ if dataValKind != reflect.Array && dataValKind != reflect.Slice {
+ if d.config.WeaklyTypedInput {
+ switch {
+ // Slice and array we use the normal logic
+ case dataValKind == reflect.Slice, dataValKind == reflect.Array:
+ break
+
+ // Empty maps turn into empty slices
+ case dataValKind == reflect.Map:
+ if dataVal.Len() == 0 {
+ val.Set(reflect.MakeSlice(sliceType, 0, 0))
+ return nil
+ }
+ // Create slice of maps of other sizes
+ return d.decodeSlice(name, []interface{}{data}, val)
+
+ case dataValKind == reflect.String && valElemType.Kind() == reflect.Uint8:
+ return d.decodeSlice(name, []byte(dataVal.String()), val)
+
+ // All other types we try to convert to the slice type
+ // and "lift" it into it. i.e. a string becomes a string slice.
+ default:
+ // Just re-try this function with data as a slice.
+ return d.decodeSlice(name, []interface{}{data}, val)
+ }
+ }
+
+ return fmt.Errorf(
+ "'%s': source data must be an array or slice, got %s", name, dataValKind)
+ }
+
+ // If the input value is nil, then don't allocate since empty != nil
+ if dataValKind != reflect.Array && dataVal.IsNil() {
+ return nil
+ }
+
+ valSlice := val
+ if valSlice.IsNil() || d.config.ZeroFields {
+ // Make a new slice to hold our result, same size as the original data.
+ valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len())
+ }
+
+ // Accumulate any errors
+ errors := make([]string, 0)
+
+ for i := 0; i < dataVal.Len(); i++ {
+ currentData := dataVal.Index(i).Interface()
+ for valSlice.Len() <= i {
+ valSlice = reflect.Append(valSlice, reflect.Zero(valElemType))
+ }
+ currentField := valSlice.Index(i)
+
+ fieldName := name + "[" + strconv.Itoa(i) + "]"
+ if err := d.decode(fieldName, currentData, currentField); err != nil {
+ errors = appendErrors(errors, err)
+ }
+ }
+
+ // Finally, set the value to the slice we built up
+ val.Set(valSlice)
+
+ // If there were errors, we return those
+ if len(errors) > 0 {
+ return &Error{errors}
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeArray(name string, data interface{}, val reflect.Value) error {
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+ dataValKind := dataVal.Kind()
+ valType := val.Type()
+ valElemType := valType.Elem()
+ arrayType := reflect.ArrayOf(valType.Len(), valElemType)
+
+ valArray := val
+
+ if valArray.Interface() == reflect.Zero(valArray.Type()).Interface() || d.config.ZeroFields {
+ // Check input type
+ if dataValKind != reflect.Array && dataValKind != reflect.Slice {
+ if d.config.WeaklyTypedInput {
+ switch {
+ // Empty maps turn into empty arrays
+ case dataValKind == reflect.Map:
+ if dataVal.Len() == 0 {
+ val.Set(reflect.Zero(arrayType))
+ return nil
+ }
+
+ // All other types we try to convert to the array type
+ // and "lift" it into it. i.e. a string becomes a string array.
+ default:
+ // Just re-try this function with data as a slice.
+ return d.decodeArray(name, []interface{}{data}, val)
+ }
+ }
+
+ return fmt.Errorf(
+ "'%s': source data must be an array or slice, got %s", name, dataValKind)
+
+ }
+ if dataVal.Len() > arrayType.Len() {
+ return fmt.Errorf(
+ "'%s': expected source data to have length less or equal to %d, got %d", name, arrayType.Len(), dataVal.Len())
+
+ }
+
+ // Make a new array to hold our result, same size as the original data.
+ valArray = reflect.New(arrayType).Elem()
+ }
+
+ // Accumulate any errors
+ errors := make([]string, 0)
+
+ for i := 0; i < dataVal.Len(); i++ {
+ currentData := dataVal.Index(i).Interface()
+ currentField := valArray.Index(i)
+
+ fieldName := name + "[" + strconv.Itoa(i) + "]"
+ if err := d.decode(fieldName, currentData, currentField); err != nil {
+ errors = appendErrors(errors, err)
+ }
+ }
+
+ // Finally, set the value to the array we built up
+ val.Set(valArray)
+
+ // If there were errors, we return those
+ if len(errors) > 0 {
+ return &Error{errors}
+ }
+
+ return nil
+}
+
+func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) error {
+ dataVal := reflect.Indirect(reflect.ValueOf(data))
+
+ // If the type of the value to write to and the data match directly,
+ // then we just set it directly instead of recursing into the structure.
+ if dataVal.Type() == val.Type() {
+ val.Set(dataVal)
+ return nil
+ }
+
+ dataValKind := dataVal.Kind()
+ switch dataValKind {
+ case reflect.Map:
+ return d.decodeStructFromMap(name, dataVal, val)
+
+ case reflect.Struct:
+ // Not the most efficient way to do this but we can optimize later if
+ // we want to. To convert from struct to struct we go to map first
+ // as an intermediary.
+
+ // Make a new map to hold our result
+ mapType := reflect.TypeOf((map[string]interface{})(nil))
+ mval := reflect.MakeMap(mapType)
+
+ // Creating a pointer to a map so that other methods can completely
+ // overwrite the map if need be (looking at you decodeMapFromMap). The
+ // indirection allows the underlying map to be settable (CanSet() == true)
+ // where as reflect.MakeMap returns an unsettable map.
+ addrVal := reflect.New(mval.Type())
+
+ reflect.Indirect(addrVal).Set(mval)
+ if err := d.decodeMapFromStruct(name, dataVal, reflect.Indirect(addrVal), mval); err != nil {
+ return err
+ }
+
+ result := d.decodeStructFromMap(name, reflect.Indirect(addrVal), val)
+ return result
+
+ default:
+ return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind())
+ }
+}
+
+func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) error {
+ dataValType := dataVal.Type()
+ if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface {
+ return fmt.Errorf(
+ "'%s' needs a map with string keys, has '%s' keys",
+ name, dataValType.Key().Kind())
+ }
+
+ dataValKeys := make(map[reflect.Value]struct{})
+ dataValKeysUnused := make(map[interface{}]struct{})
+ for _, dataValKey := range dataVal.MapKeys() {
+ dataValKeys[dataValKey] = struct{}{}
+ dataValKeysUnused[dataValKey.Interface()] = struct{}{}
+ }
+
+ targetValKeysUnused := make(map[interface{}]struct{})
+ errors := make([]string, 0)
+
+ // This slice will keep track of all the structs we'll be decoding.
+ // There can be more than one struct if there are embedded structs
+ // that are squashed.
+ structs := make([]reflect.Value, 1, 5)
+ structs[0] = val
+
+ // Compile the list of all the fields that we're going to be decoding
+ // from all the structs.
+ type field struct {
+ field reflect.StructField
+ val reflect.Value
+ }
+
+ // remainField is set to a valid field set with the "remain" tag if
+ // we are keeping track of remaining values.
+ var remainField *field
+
+ fields := []field{}
+ for len(structs) > 0 {
+ structVal := structs[0]
+ structs = structs[1:]
+
+ structType := structVal.Type()
+
+ for i := 0; i < structType.NumField(); i++ {
+ fieldType := structType.Field(i)
+ fieldVal := structVal.Field(i)
+ if fieldVal.Kind() == reflect.Ptr && fieldVal.Elem().Kind() == reflect.Struct {
+ // Handle embedded struct pointers as embedded structs.
+ fieldVal = fieldVal.Elem()
+ }
+
+ // If "squash" is specified in the tag, we squash the field down.
+ squash := d.config.Squash && fieldVal.Kind() == reflect.Struct && fieldType.Anonymous
+ remain := false
+
+ // We always parse the tags cause we're looking for other tags too
+ tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",")
+ for _, tag := range tagParts[1:] {
+ if tag == "squash" {
+ squash = true
+ break
+ }
+
+ if tag == "remain" {
+ remain = true
+ break
+ }
+ }
+
+ if squash {
+ if fieldVal.Kind() != reflect.Struct {
+ errors = appendErrors(errors,
+ fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldVal.Kind()))
+ } else {
+ structs = append(structs, fieldVal)
+ }
+ continue
+ }
+
+ // Build our field
+ if remain {
+ remainField = &field{fieldType, fieldVal}
+ } else {
+ // Normal struct field, store it away
+ fields = append(fields, field{fieldType, fieldVal})
+ }
+ }
+ }
+
+ // for fieldType, field := range fields {
+ for _, f := range fields {
+ field, fieldValue := f.field, f.val
+ fieldName := field.Name
+
+ tagValue := field.Tag.Get(d.config.TagName)
+ tagValue = strings.SplitN(tagValue, ",", 2)[0]
+ if tagValue != "" {
+ fieldName = tagValue
+ }
+
+ rawMapKey := reflect.ValueOf(fieldName)
+ rawMapVal := dataVal.MapIndex(rawMapKey)
+ if !rawMapVal.IsValid() {
+ // Do a slower search by iterating over each key and
+ // doing case-insensitive search.
+ for dataValKey := range dataValKeys {
+ mK, ok := dataValKey.Interface().(string)
+ if !ok {
+ // Not a string key
+ continue
+ }
+
+ if d.config.MatchName(mK, fieldName) {
+ rawMapKey = dataValKey
+ rawMapVal = dataVal.MapIndex(dataValKey)
+ break
+ }
+ }
+
+ if !rawMapVal.IsValid() {
+ // There was no matching key in the map for the value in
+ // the struct. Remember it for potential errors and metadata.
+ targetValKeysUnused[fieldName] = struct{}{}
+ continue
+ }
+ }
+
+ if !fieldValue.IsValid() {
+ // This should never happen
+ panic("field is not valid")
+ }
+
+ // If we can't set the field, then it is unexported or something,
+ // and we just continue onwards.
+ if !fieldValue.CanSet() {
+ continue
+ }
+
+ // Delete the key we're using from the unused map so we stop tracking
+ delete(dataValKeysUnused, rawMapKey.Interface())
+
+ // If the name is empty string, then we're at the root, and we
+ // don't dot-join the fields.
+ if name != "" {
+ fieldName = name + "." + fieldName
+ }
+
+ if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil {
+ errors = appendErrors(errors, err)
+ }
+ }
+
+ // If we have a "remain"-tagged field and we have unused keys then
+ // we put the unused keys directly into the remain field.
+ if remainField != nil && len(dataValKeysUnused) > 0 {
+ // Build a map of only the unused values
+ remain := map[interface{}]interface{}{}
+ for key := range dataValKeysUnused {
+ remain[key] = dataVal.MapIndex(reflect.ValueOf(key)).Interface()
+ }
+
+ // Decode it as-if we were just decoding this map onto our map.
+ if err := d.decodeMap(name, remain, remainField.val); err != nil {
+ errors = appendErrors(errors, err)
+ }
+
+ // Set the map to nil so we have none so that the next check will
+ // not error (ErrorUnused)
+ dataValKeysUnused = nil
+ }
+
+ if d.config.ErrorUnused && len(dataValKeysUnused) > 0 {
+ keys := make([]string, 0, len(dataValKeysUnused))
+ for rawKey := range dataValKeysUnused {
+ keys = append(keys, rawKey.(string))
+ }
+ sort.Strings(keys)
+
+ err := fmt.Errorf("'%s' has invalid keys: %s", name, strings.Join(keys, ", "))
+ errors = appendErrors(errors, err)
+ }
+
+ if d.config.ErrorUnset && len(targetValKeysUnused) > 0 {
+ keys := make([]string, 0, len(targetValKeysUnused))
+ for rawKey := range targetValKeysUnused {
+ keys = append(keys, rawKey.(string))
+ }
+ sort.Strings(keys)
+
+ err := fmt.Errorf("'%s' has unset fields: %s", name, strings.Join(keys, ", "))
+ errors = appendErrors(errors, err)
+ }
+
+ if len(errors) > 0 {
+ return &Error{errors}
+ }
+
+ // Add the unused keys to the list of unused keys if we're tracking metadata
+ if d.config.Metadata != nil {
+ for rawKey := range dataValKeysUnused {
+ key := rawKey.(string)
+ if name != "" {
+ key = name + "." + key
+ }
+
+ d.config.Metadata.Unused = append(d.config.Metadata.Unused, key)
+ }
+ for rawKey := range targetValKeysUnused {
+ key := rawKey.(string)
+ if name != "" {
+ key = name + "." + key
+ }
+
+ d.config.Metadata.Unset = append(d.config.Metadata.Unset, key)
+ }
+ }
+
+ return nil
+}
+
+func isEmptyValue(v reflect.Value) bool {
+ switch getKind(v) {
+ case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+ return v.Len() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Interface, reflect.Ptr:
+ return v.IsNil()
+ }
+ return false
+}
+
+func getKind(val reflect.Value) reflect.Kind {
+ kind := val.Kind()
+
+ switch {
+ case kind >= reflect.Int && kind <= reflect.Int64:
+ return reflect.Int
+ case kind >= reflect.Uint && kind <= reflect.Uint64:
+ return reflect.Uint
+ case kind >= reflect.Float32 && kind <= reflect.Float64:
+ return reflect.Float32
+ default:
+ return kind
+ }
+}
+
+func isStructTypeConvertibleToMap(typ reflect.Type, checkMapstructureTags bool, tagName string) bool {
+ for i := 0; i < typ.NumField(); i++ {
+ f := typ.Field(i)
+ if f.PkgPath == "" && !checkMapstructureTags { // check for unexported fields
+ return true
+ }
+ if checkMapstructureTags && f.Tag.Get(tagName) != "" { // check for mapstructure tags inside
+ return true
+ }
+ }
+ return false
+}
+
+func dereferencePtrToStructIfNeeded(v reflect.Value, tagName string) reflect.Value {
+ if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct {
+ return v
+ }
+ deref := v.Elem()
+ derefT := deref.Type()
+ if isStructTypeConvertibleToMap(derefT, true, tagName) {
+ return deref
+ }
+ return v
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/.dockerignore b/vendor/github.com/pelletier/go-toml/v2/.dockerignore
new file mode 100644
index 0000000..7b58834
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/.dockerignore
@@ -0,0 +1,2 @@
+cmd/tomll/tomll
+cmd/tomljson/tomljson
diff --git a/vendor/github.com/pelletier/go-toml/v2/.gitattributes b/vendor/github.com/pelletier/go-toml/v2/.gitattributes
new file mode 100644
index 0000000..34a0a21
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/.gitattributes
@@ -0,0 +1,4 @@
+* text=auto
+
+benchmark/benchmark.toml text eol=lf
+testdata/** text eol=lf
diff --git a/vendor/github.com/pelletier/go-toml/v2/.gitignore b/vendor/github.com/pelletier/go-toml/v2/.gitignore
new file mode 100644
index 0000000..4b7c4ed
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/.gitignore
@@ -0,0 +1,7 @@
+test_program/test_program_bin
+fuzz/
+cmd/tomll/tomll
+cmd/tomljson/tomljson
+cmd/tomltestgen/tomltestgen
+dist
+tests/
diff --git a/vendor/github.com/pelletier/go-toml/v2/.golangci.toml b/vendor/github.com/pelletier/go-toml/v2/.golangci.toml
new file mode 100644
index 0000000..067db55
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/.golangci.toml
@@ -0,0 +1,84 @@
+[service]
+golangci-lint-version = "1.39.0"
+
+[linters-settings.wsl]
+allow-assign-and-anything = true
+
+[linters-settings.exhaustive]
+default-signifies-exhaustive = true
+
+[linters]
+disable-all = true
+enable = [
+ "asciicheck",
+ "bodyclose",
+ "cyclop",
+ "deadcode",
+ "depguard",
+ "dogsled",
+ "dupl",
+ "durationcheck",
+ "errcheck",
+ "errorlint",
+ "exhaustive",
+ # "exhaustivestruct",
+ "exportloopref",
+ "forbidigo",
+ # "forcetypeassert",
+ "funlen",
+ "gci",
+ # "gochecknoglobals",
+ "gochecknoinits",
+ "gocognit",
+ "goconst",
+ "gocritic",
+ "gocyclo",
+ "godot",
+ "godox",
+ # "goerr113",
+ "gofmt",
+ "gofumpt",
+ "goheader",
+ "goimports",
+ "golint",
+ "gomnd",
+ # "gomoddirectives",
+ "gomodguard",
+ "goprintffuncname",
+ "gosec",
+ "gosimple",
+ "govet",
+ # "ifshort",
+ "importas",
+ "ineffassign",
+ "lll",
+ "makezero",
+ "misspell",
+ "nakedret",
+ "nestif",
+ "nilerr",
+ # "nlreturn",
+ "noctx",
+ "nolintlint",
+ #"paralleltest",
+ "prealloc",
+ "predeclared",
+ "revive",
+ "rowserrcheck",
+ "sqlclosecheck",
+ "staticcheck",
+ "structcheck",
+ "stylecheck",
+ # "testpackage",
+ "thelper",
+ "tparallel",
+ "typecheck",
+ "unconvert",
+ "unparam",
+ "unused",
+ "varcheck",
+ "wastedassign",
+ "whitespace",
+ # "wrapcheck",
+ # "wsl"
+]
diff --git a/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml b/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml
new file mode 100644
index 0000000..1d8b69e
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml
@@ -0,0 +1,126 @@
+before:
+ hooks:
+ - go mod tidy
+ - go fmt ./...
+ - go test ./...
+builds:
+ - id: tomll
+ main: ./cmd/tomll
+ binary: tomll
+ env:
+ - CGO_ENABLED=0
+ flags:
+ - -trimpath
+ ldflags:
+ - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}}
+ mod_timestamp: '{{ .CommitTimestamp }}'
+ targets:
+ - linux_amd64
+ - linux_arm64
+ - linux_arm
+ - linux_riscv64
+ - windows_amd64
+ - windows_arm64
+ - windows_arm
+ - darwin_amd64
+ - darwin_arm64
+ - id: tomljson
+ main: ./cmd/tomljson
+ binary: tomljson
+ env:
+ - CGO_ENABLED=0
+ flags:
+ - -trimpath
+ ldflags:
+ - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}}
+ mod_timestamp: '{{ .CommitTimestamp }}'
+ targets:
+ - linux_amd64
+ - linux_arm64
+ - linux_arm
+ - linux_riscv64
+ - windows_amd64
+ - windows_arm64
+ - windows_arm
+ - darwin_amd64
+ - darwin_arm64
+ - id: jsontoml
+ main: ./cmd/jsontoml
+ binary: jsontoml
+ env:
+ - CGO_ENABLED=0
+ flags:
+ - -trimpath
+ ldflags:
+ - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}}
+ mod_timestamp: '{{ .CommitTimestamp }}'
+ targets:
+ - linux_amd64
+ - linux_arm64
+ - linux_riscv64
+ - linux_arm
+ - windows_amd64
+ - windows_arm64
+ - windows_arm
+ - darwin_amd64
+ - darwin_arm64
+universal_binaries:
+ - id: tomll
+ replace: true
+ name_template: tomll
+ - id: tomljson
+ replace: true
+ name_template: tomljson
+ - id: jsontoml
+ replace: true
+ name_template: jsontoml
+archives:
+- id: jsontoml
+ format: tar.xz
+ builds:
+ - jsontoml
+ files:
+ - none*
+ name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}"
+- id: tomljson
+ format: tar.xz
+ builds:
+ - tomljson
+ files:
+ - none*
+ name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}"
+- id: tomll
+ format: tar.xz
+ builds:
+ - tomll
+ files:
+ - none*
+ name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}"
+dockers:
+ - id: tools
+ goos: linux
+ goarch: amd64
+ ids:
+ - jsontoml
+ - tomljson
+ - tomll
+ image_templates:
+ - "ghcr.io/pelletier/go-toml:latest"
+ - "ghcr.io/pelletier/go-toml:{{ .Tag }}"
+ - "ghcr.io/pelletier/go-toml:v{{ .Major }}"
+ skip_push: false
+checksum:
+ name_template: 'sha256sums.txt'
+snapshot:
+ name_template: "{{ incpatch .Version }}-next"
+release:
+ github:
+ owner: pelletier
+ name: go-toml
+ draft: true
+ prerelease: auto
+ mode: replace
+changelog:
+ use: github-native
+announce:
+ skip: true
diff --git a/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md b/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md
new file mode 100644
index 0000000..04dd12b
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md
@@ -0,0 +1,196 @@
+# Contributing
+
+Thank you for your interest in go-toml! We appreciate you considering
+contributing to go-toml!
+
+The main goal is the project is to provide an easy-to-use and efficient TOML
+implementation for Go that gets the job done and gets out of your way – dealing
+with TOML is probably not the central piece of your project.
+
+As the single maintainer of go-toml, time is scarce. All help, big or small, is
+more than welcomed!
+
+## Ask questions
+
+Any question you may have, somebody else might have it too. Always feel free to
+ask them on the [discussion board][discussions]. We will try to answer them as
+clearly and quickly as possible, time permitting.
+
+Asking questions also helps us identify areas where the documentation needs
+improvement, or new features that weren't envisioned before. Sometimes, a
+seemingly innocent question leads to the fix of a bug. Don't hesitate and ask
+away!
+
+[discussions]: https://github.com/pelletier/go-toml/discussions
+
+## Improve the documentation
+
+The best way to share your knowledge and experience with go-toml is to improve
+the documentation. Fix a typo, clarify an interface, add an example, anything
+goes!
+
+The documentation is present in the [README][readme] and thorough the source
+code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a change
+to the documentation, create a pull request with your proposed changes. For
+simple changes like that, the easiest way to go is probably the "Fork this
+project and edit the file" button on Github, displayed at the top right of the
+file. Unless it's a trivial change (for example a typo), provide a little bit of
+context in your pull request description or commit message.
+
+## Report a bug
+
+Found a bug! Sorry to hear that :(. Help us and other track them down and fix by
+reporting it. [File a new bug report][bug-report] on the [issues
+tracker][issues-tracker]. The template should provide enough guidance on what to
+include. When in doubt: add more details! By reducing ambiguity and providing
+more information, it decreases back and forth and saves everyone time.
+
+## Code changes
+
+Want to contribute a patch? Very happy to hear that!
+
+First, some high-level rules:
+
+- A short proposal with some POC code is better than a lengthy piece of text
+ with no code. Code speaks louder than words. That being said, bigger changes
+ should probably start with a [discussion][discussions].
+- No backward-incompatible patch will be accepted unless discussed. Sometimes
+ it's hard, but we try not to break people's programs unless we absolutely have
+ to.
+- If you are writing a new feature or extending an existing one, make sure to
+ write some documentation.
+- Bug fixes need to be accompanied with regression tests.
+- New code needs to be tested.
+- Your commit messages need to explain why the change is needed, even if already
+ included in the PR description.
+
+It does sound like a lot, but those best practices are here to save time overall
+and continuously improve the quality of the project, which is something everyone
+benefits from.
+
+### Get started
+
+The fairly standard code contribution process looks like that:
+
+1. [Fork the project][fork].
+2. Make your changes, commit on any branch you like.
+3. [Open up a pull request][pull-request]
+4. Review, potential ask for changes.
+5. Merge.
+
+Feel free to ask for help! You can create draft pull requests to gather
+some early feedback!
+
+### Run the tests
+
+You can run tests for go-toml using Go's test tool: `go test -race ./...`.
+
+During the pull request process, all tests will be ran on Linux, Windows, and
+MacOS on the last two versions of Go.
+
+However, given GitHub's new policy to _not_ run Actions on pull requests until a
+maintainer clicks on button, it is highly recommended that you run them locally
+as you make changes.
+
+### Check coverage
+
+We use `go tool cover` to compute test coverage. Most code editors have a way to
+run and display code coverage, but at the end of the day, we do this:
+
+```
+go test -covermode=atomic -coverprofile=coverage.out
+go tool cover -func=coverage.out
+```
+
+and verify that the overall percentage of tested code does not go down. This is
+a requirement. As a rule of thumb, all lines of code touched by your changes
+should be covered. On Unix you can use `./ci.sh coverage -d v2` to check if your
+code lowers the coverage.
+
+### Verify performance
+
+Go-toml aims to stay efficient. We rely on a set of scenarios executed with Go's
+builtin benchmark systems. Because of their noisy nature, containers provided by
+Github Actions cannot be reliably used for benchmarking. As a result, you are
+responsible for checking that your changes do not incur a performance penalty.
+You can run their following to execute benchmarks:
+
+```
+go test ./... -bench=. -count=10
+```
+
+Benchmark results should be compared against each other with
+[benchstat][benchstat]. Typical flow looks like this:
+
+1. On the `v2` branch, run `go test ./... -bench=. -count 10` and save output to
+ a file (for example `old.txt`).
+2. Make some code changes.
+3. Run `go test ....` again, and save the output to an other file (for example
+ `new.txt`).
+4. Run `benchstat old.txt new.txt` to check that time/op does not go up in any
+ test.
+
+On Unix you can use `./ci.sh benchmark -d v2` to verify how your code impacts
+performance.
+
+It is highly encouraged to add the benchstat results to your pull request
+description. Pull requests that lower performance will receive more scrutiny.
+
+[benchstat]: https://pkg.go.dev/golang.org/x/perf/cmd/benchstat
+
+### Style
+
+Try to look around and follow the same format and structure as the rest of the
+code. We enforce using `go fmt` on the whole code base.
+
+---
+
+## Maintainers-only
+
+### Merge pull request
+
+Checklist:
+
+- Passing CI.
+- Does not introduce backward-incompatible changes (unless discussed).
+- Has relevant doc changes.
+- Benchstat does not show performance regression.
+- Pull request is [labeled appropriately][pr-labels].
+- Title will be understandable in the changelog.
+
+1. Merge using "squash and merge".
+2. Make sure to edit the commit message to keep all the useful information
+ nice and clean.
+3. Make sure the commit title is clear and contains the PR number (#123).
+
+### New release
+
+1. Decide on the next version number. Use semver.
+2. Generate release notes using [`gh`][gh]. Example:
+```
+$ gh api -X POST \
+ -F tag_name='v2.0.0-beta.5' \
+ -F target_commitish='v2' \
+ -F previous_tag_name='v2.0.0-beta.4' \
+ --jq '.body' \
+ repos/pelletier/go-toml/releases/generate-notes
+```
+3. Look for "Other changes". That would indicate a pull request not labeled
+ properly. Tweak labels and pull request titles until changelog looks good for
+ users.
+4. [Draft new release][new-release].
+5. Fill tag and target with the same value used to generate the changelog.
+6. Set title to the new tag value.
+7. Paste the generated changelog.
+8. Check "create discussion", in the "Releases" category.
+9. Check pre-release if new version is an alpha or beta.
+
+[issues-tracker]: https://github.com/pelletier/go-toml/issues
+[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
+[pkg.go.dev]: https://pkg.go.dev/github.com/pelletier/go-toml
+[readme]: ./README.md
+[fork]: https://help.github.com/articles/fork-a-repo
+[pull-request]: https://help.github.com/en/articles/creating-a-pull-request
+[new-release]: https://github.com/pelletier/go-toml/releases/new
+[gh]: https://github.com/cli/cli
+[pr-labels]: https://github.com/pelletier/go-toml/blob/v2/.github/release.yml
diff --git a/vendor/github.com/pelletier/go-toml/v2/Dockerfile b/vendor/github.com/pelletier/go-toml/v2/Dockerfile
new file mode 100644
index 0000000..b9e9332
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/Dockerfile
@@ -0,0 +1,5 @@
+FROM scratch
+ENV PATH "$PATH:/bin"
+COPY tomll /bin/tomll
+COPY tomljson /bin/tomljson
+COPY jsontoml /bin/jsontoml
diff --git a/vendor/github.com/pelletier/go-toml/v2/LICENSE b/vendor/github.com/pelletier/go-toml/v2/LICENSE
new file mode 100644
index 0000000..991e2ae
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+go-toml v2
+Copyright (c) 2021 - 2023 Thomas Pelletier
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/pelletier/go-toml/v2/README.md b/vendor/github.com/pelletier/go-toml/v2/README.md
new file mode 100644
index 0000000..b10f97f
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/README.md
@@ -0,0 +1,575 @@
+# go-toml v2
+
+Go library for the [TOML](https://toml.io/en/) format.
+
+This library supports [TOML v1.0.0](https://toml.io/en/v1.0.0).
+
+[🐞 Bug Reports](https://github.com/pelletier/go-toml/issues)
+
+[💬 Anything else](https://github.com/pelletier/go-toml/discussions)
+
+## Documentation
+
+Full API, examples, and implementation notes are available in the Go
+documentation.
+
+[![Go Reference](https://pkg.go.dev/badge/github.com/pelletier/go-toml/v2.svg)](https://pkg.go.dev/github.com/pelletier/go-toml/v2)
+
+## Import
+
+```go
+import "github.com/pelletier/go-toml/v2"
+```
+
+See [Modules](#Modules).
+
+## Features
+
+### Stdlib behavior
+
+As much as possible, this library is designed to behave similarly as the
+standard library's `encoding/json`.
+
+### Performance
+
+While go-toml favors usability, it is written with performance in mind. Most
+operations should not be shockingly slow. See [benchmarks](#benchmarks).
+
+### Strict mode
+
+`Decoder` can be set to "strict mode", which makes it error when some parts of
+the TOML document was not present in the target structure. This is a great way
+to check for typos. [See example in the documentation][strict].
+
+[strict]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Decoder.DisallowUnknownFields
+
+### Contextualized errors
+
+When most decoding errors occur, go-toml returns [`DecodeError`][decode-err],
+which contains a human readable contextualized version of the error. For
+example:
+
+```
+1| [server]
+2| path = 100
+ | ~~~ cannot decode TOML integer into struct field toml_test.Server.Path of type string
+3| port = 50
+```
+
+[decode-err]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#DecodeError
+
+### Local date and time support
+
+TOML supports native [local date/times][ldt]. It allows to represent a given
+date, time, or date-time without relation to a timezone or offset. To support
+this use-case, go-toml provides [`LocalDate`][tld], [`LocalTime`][tlt], and
+[`LocalDateTime`][tldt]. Those types can be transformed to and from `time.Time`,
+making them convenient yet unambiguous structures for their respective TOML
+representation.
+
+[ldt]: https://toml.io/en/v1.0.0#local-date-time
+[tld]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalDate
+[tlt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalTime
+[tldt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalDateTime
+
+### Commented config
+
+Since TOML is often used for configuration files, go-toml can emit documents
+annotated with [comments and commented-out values][comments-example]. For
+example, it can generate the following file:
+
+```toml
+# Host IP to connect to.
+host = '127.0.0.1'
+# Port of the remote server.
+port = 4242
+
+# Encryption parameters (optional)
+# [TLS]
+# cipher = 'AEAD-AES128-GCM-SHA256'
+# version = 'TLS 1.3'
+```
+
+[comments-example]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Marshal-Commented
+
+## Getting started
+
+Given the following struct, let's see how to read it and write it as TOML:
+
+```go
+type MyConfig struct {
+ Version int
+ Name string
+ Tags []string
+}
+```
+
+### Unmarshaling
+
+[`Unmarshal`][unmarshal] reads a TOML document and fills a Go structure with its
+content. For example:
+
+```go
+doc := `
+version = 2
+name = "go-toml"
+tags = ["go", "toml"]
+`
+
+var cfg MyConfig
+err := toml.Unmarshal([]byte(doc), &cfg)
+if err != nil {
+ panic(err)
+}
+fmt.Println("version:", cfg.Version)
+fmt.Println("name:", cfg.Name)
+fmt.Println("tags:", cfg.Tags)
+
+// Output:
+// version: 2
+// name: go-toml
+// tags: [go toml]
+```
+
+[unmarshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Unmarshal
+
+### Marshaling
+
+[`Marshal`][marshal] is the opposite of Unmarshal: it represents a Go structure
+as a TOML document:
+
+```go
+cfg := MyConfig{
+ Version: 2,
+ Name: "go-toml",
+ Tags: []string{"go", "toml"},
+}
+
+b, err := toml.Marshal(cfg)
+if err != nil {
+ panic(err)
+}
+fmt.Println(string(b))
+
+// Output:
+// Version = 2
+// Name = 'go-toml'
+// Tags = ['go', 'toml']
+```
+
+[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal
+
+## Unstable API
+
+This API does not yet follow the backward compatibility guarantees of this
+library. They provide early access to features that may have rough edges or an
+API subject to change.
+
+### Parser
+
+Parser is the unstable API that allows iterative parsing of a TOML document at
+the AST level. See https://pkg.go.dev/github.com/pelletier/go-toml/v2/unstable.
+
+## Benchmarks
+
+Execution time speedup compared to other Go TOML libraries:
+
+
+
+ Benchmark go-toml v1 BurntSushi/toml
+
+
+ Marshal/HugoFrontMatter-2 1.9x 2.2x
+ Marshal/ReferenceFile/map-2 1.7x 2.1x
+ Marshal/ReferenceFile/struct-2 2.2x 3.0x
+ Unmarshal/HugoFrontMatter-2 2.9x 2.7x
+ Unmarshal/ReferenceFile/map-2 2.6x 2.7x
+ Unmarshal/ReferenceFile/struct-2 4.6x 5.1x
+
+
+See more
+The table above has the results of the most common use-cases. The table below
+contains the results of all benchmarks, including unrealistic ones. It is
+provided for completeness.
+
+
+
+ Benchmark go-toml v1 BurntSushi/toml
+
+
+ Marshal/SimpleDocument/map-2 1.8x 2.7x
+ Marshal/SimpleDocument/struct-2 2.7x 3.8x
+ Unmarshal/SimpleDocument/map-2 3.8x 3.0x
+ Unmarshal/SimpleDocument/struct-2 5.6x 4.1x
+ UnmarshalDataset/example-2 3.0x 3.2x
+ UnmarshalDataset/code-2 2.3x 2.9x
+ UnmarshalDataset/twitter-2 2.6x 2.7x
+ UnmarshalDataset/citm_catalog-2 2.2x 2.3x
+ UnmarshalDataset/canada-2 1.8x 1.5x
+ UnmarshalDataset/config-2 4.1x 2.9x
+ geomean 2.7x 2.8x
+
+
+This table can be generated with ./ci.sh benchmark -a -html
.
+
+
+## Modules
+
+go-toml uses Go's standard modules system.
+
+Installation instructions:
+
+- Go ≥ 1.16: Nothing to do. Use the import in your code. The `go` command deals
+ with it automatically.
+- Go ≥ 1.13: `GO111MODULE=on go get github.com/pelletier/go-toml/v2`.
+
+In case of trouble: [Go Modules FAQ][mod-faq].
+
+[mod-faq]: https://github.com/golang/go/wiki/Modules#why-does-installing-a-tool-via-go-get-fail-with-error-cannot-find-main-module
+
+## Tools
+
+Go-toml provides three handy command line tools:
+
+ * `tomljson`: Reads a TOML file and outputs its JSON representation.
+
+ ```
+ $ go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
+ $ tomljson --help
+ ```
+
+ * `jsontoml`: Reads a JSON file and outputs a TOML representation.
+
+ ```
+ $ go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
+ $ jsontoml --help
+ ```
+
+ * `tomll`: Lints and reformats a TOML file.
+
+ ```
+ $ go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
+ $ tomll --help
+ ```
+
+### Docker image
+
+Those tools are also available as a [Docker image][docker]. For example, to use
+`tomljson`:
+
+```
+docker run -i ghcr.io/pelletier/go-toml:v2 tomljson < example.toml
+```
+
+Multiple versions are availble on [ghcr.io][docker].
+
+[docker]: https://github.com/pelletier/go-toml/pkgs/container/go-toml
+
+## Migrating from v1
+
+This section describes the differences between v1 and v2, with some pointers on
+how to get the original behavior when possible.
+
+### Decoding / Unmarshal
+
+#### Automatic field name guessing
+
+When unmarshaling to a struct, if a key in the TOML document does not exactly
+match the name of a struct field or any of the `toml`-tagged field, v1 tries
+multiple variations of the key ([code][v1-keys]).
+
+V2 instead does a case-insensitive matching, like `encoding/json`.
+
+This could impact you if you are relying on casing to differentiate two fields,
+and one of them is a not using the `toml` struct tag. The recommended solution
+is to be specific about tag names for those fields using the `toml` struct tag.
+
+[v1-keys]: https://github.com/pelletier/go-toml/blob/a2e52561804c6cd9392ebf0048ca64fe4af67a43/marshal.go#L775-L781
+
+#### Ignore preexisting value in interface
+
+When decoding into a non-nil `interface{}`, go-toml v1 uses the type of the
+element in the interface to decode the object. For example:
+
+```go
+type inner struct {
+ B interface{}
+}
+type doc struct {
+ A interface{}
+}
+
+d := doc{
+ A: inner{
+ B: "Before",
+ },
+}
+
+data := `
+[A]
+B = "After"
+`
+
+toml.Unmarshal([]byte(data), &d)
+fmt.Printf("toml v1: %#v\n", d)
+
+// toml v1: main.doc{A:main.inner{B:"After"}}
+```
+
+In this case, field `A` is of type `interface{}`, containing a `inner` struct.
+V1 sees that type and uses it when decoding the object.
+
+When decoding an object into an `interface{}`, V2 instead disregards whatever
+value the `interface{}` may contain and replaces it with a
+`map[string]interface{}`. With the same data structure as above, here is what
+the result looks like:
+
+```go
+toml.Unmarshal([]byte(data), &d)
+fmt.Printf("toml v2: %#v\n", d)
+
+// toml v2: main.doc{A:map[string]interface {}{"B":"After"}}
+```
+
+This is to match `encoding/json`'s behavior. There is no way to make the v2
+decoder behave like v1.
+
+#### Values out of array bounds ignored
+
+When decoding into an array, v1 returns an error when the number of elements
+contained in the doc is superior to the capacity of the array. For example:
+
+```go
+type doc struct {
+ A [2]string
+}
+d := doc{}
+err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d)
+fmt.Println(err)
+
+// (1, 1): unmarshal: TOML array length (3) exceeds destination array length (2)
+```
+
+In the same situation, v2 ignores the last value:
+
+```go
+err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d)
+fmt.Println("err:", err, "d:", d)
+// err: d: {[one two]}
+```
+
+This is to match `encoding/json`'s behavior. There is no way to make the v2
+decoder behave like v1.
+
+#### Support for `toml.Unmarshaler` has been dropped
+
+This method was not widely used, poorly defined, and added a lot of complexity.
+A similar effect can be achieved by implementing the `encoding.TextUnmarshaler`
+interface and use strings.
+
+#### Support for `default` struct tag has been dropped
+
+This feature adds complexity and a poorly defined API for an effect that can be
+accomplished outside of the library.
+
+It does not seem like other format parsers in Go support that feature (the
+project referenced in the original ticket #202 has not been updated since 2017).
+Given that go-toml v2 should not touch values not in the document, the same
+effect can be achieved by pre-filling the struct with defaults (libraries like
+[go-defaults][go-defaults] can help). Also, string representation is not well
+defined for all types: it creates issues like #278.
+
+The recommended replacement is pre-filling the struct before unmarshaling.
+
+[go-defaults]: https://github.com/mcuadros/go-defaults
+
+#### `toml.Tree` replacement
+
+This structure was the initial attempt at providing a document model for
+go-toml. It allows manipulating the structure of any document, encoding and
+decoding from their TOML representation. While a more robust feature was
+initially planned in go-toml v2, this has been ultimately [removed from
+scope][nodoc] of this library, with no plan to add it back at the moment. The
+closest equivalent at the moment would be to unmarshal into an `interface{}` and
+use type assertions and/or reflection to manipulate the arbitrary
+structure. However this would fall short of providing all of the TOML features
+such as adding comments and be specific about whitespace.
+
+
+#### `toml.Position` are not retrievable anymore
+
+The API for retrieving the position (line, column) of a specific TOML element do
+not exist anymore. This was done to minimize the amount of concepts introduced
+by the library (query path), and avoid the performance hit related to storing
+positions in the absence of a document model, for a feature that seemed to have
+little use. Errors however have gained more detailed position
+information. Position retrieval seems better fitted for a document model, which
+has been [removed from the scope][nodoc] of go-toml v2 at the moment.
+
+### Encoding / Marshal
+
+#### Default struct fields order
+
+V1 emits struct fields order alphabetically by default. V2 struct fields are
+emitted in order they are defined. For example:
+
+```go
+type S struct {
+ B string
+ A string
+}
+
+data := S{
+ B: "B",
+ A: "A",
+}
+
+b, _ := tomlv1.Marshal(data)
+fmt.Println("v1:\n" + string(b))
+
+b, _ = tomlv2.Marshal(data)
+fmt.Println("v2:\n" + string(b))
+
+// Output:
+// v1:
+// A = "A"
+// B = "B"
+
+// v2:
+// B = 'B'
+// A = 'A'
+```
+
+There is no way to make v2 encoder behave like v1. A workaround could be to
+manually sort the fields alphabetically in the struct definition, or generate
+struct types using `reflect.StructOf`.
+
+#### No indentation by default
+
+V1 automatically indents content of tables by default. V2 does not. However the
+same behavior can be obtained using [`Encoder.SetIndentTables`][sit]. For example:
+
+```go
+data := map[string]interface{}{
+ "table": map[string]string{
+ "key": "value",
+ },
+}
+
+b, _ := tomlv1.Marshal(data)
+fmt.Println("v1:\n" + string(b))
+
+b, _ = tomlv2.Marshal(data)
+fmt.Println("v2:\n" + string(b))
+
+buf := bytes.Buffer{}
+enc := tomlv2.NewEncoder(&buf)
+enc.SetIndentTables(true)
+enc.Encode(data)
+fmt.Println("v2 Encoder:\n" + string(buf.Bytes()))
+
+// Output:
+// v1:
+//
+// [table]
+// key = "value"
+//
+// v2:
+// [table]
+// key = 'value'
+//
+//
+// v2 Encoder:
+// [table]
+// key = 'value'
+```
+
+[sit]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Encoder.SetIndentTables
+
+#### Keys and strings are single quoted
+
+V1 always uses double quotes (`"`) around strings and keys that cannot be
+represented bare (unquoted). V2 uses single quotes instead by default (`'`),
+unless a character cannot be represented, then falls back to double quotes. As a
+result of this change, `Encoder.QuoteMapKeys` has been removed, as it is not
+useful anymore.
+
+There is no way to make v2 encoder behave like v1.
+
+#### `TextMarshaler` emits as a string, not TOML
+
+Types that implement [`encoding.TextMarshaler`][tm] can emit arbitrary TOML in
+v1. The encoder would append the result to the output directly. In v2 the result
+is wrapped in a string. As a result, this interface cannot be implemented by the
+root object.
+
+There is no way to make v2 encoder behave like v1.
+
+[tm]: https://golang.org/pkg/encoding/#TextMarshaler
+
+#### `Encoder.CompactComments` has been removed
+
+Emitting compact comments is now the default behavior of go-toml. This option
+is not necessary anymore.
+
+#### Struct tags have been merged
+
+V1 used to provide multiple struct tags: `comment`, `commented`, `multiline`,
+`toml`, and `omitempty`. To behave more like the standard library, v2 has merged
+`toml`, `multiline`, `commented`, and `omitempty`. For example:
+
+```go
+type doc struct {
+ // v1
+ F string `toml:"field" multiline:"true" omitempty:"true" commented:"true"`
+ // v2
+ F string `toml:"field,multiline,omitempty,commented"`
+}
+```
+
+Has a result, the `Encoder.SetTag*` methods have been removed, as there is just
+one tag now.
+
+#### `Encoder.ArraysWithOneElementPerLine` has been renamed
+
+The new name is `Encoder.SetArraysMultiline`. The behavior should be the same.
+
+#### `Encoder.Indentation` has been renamed
+
+The new name is `Encoder.SetIndentSymbol`. The behavior should be the same.
+
+
+#### Embedded structs behave like stdlib
+
+V1 defaults to merging embedded struct fields into the embedding struct. This
+behavior was unexpected because it does not follow the standard library. To
+avoid breaking backward compatibility, the `Encoder.PromoteAnonymous` method was
+added to make the encoder behave correctly. Given backward compatibility is not
+a problem anymore, v2 does the right thing by default: it follows the behavior
+of `encoding/json`. `Encoder.PromoteAnonymous` has been removed.
+
+[nodoc]: https://github.com/pelletier/go-toml/discussions/506#discussioncomment-1526038
+
+### `query`
+
+go-toml v1 provided the [`go-toml/query`][query] package. It allowed to run
+JSONPath-style queries on TOML files. This feature is not available in v2. For a
+replacement, check out [dasel][dasel].
+
+This package has been removed because it was essentially not supported anymore
+(last commit May 2020), increased the complexity of the code base, and more
+complete solutions exist out there.
+
+[query]: https://github.com/pelletier/go-toml/tree/f99d6bbca119636aeafcf351ee52b3d202782627/query
+[dasel]: https://github.com/TomWright/dasel
+
+## Versioning
+
+Go-toml follows [Semantic Versioning](https://semver.org). The supported version
+of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of
+this document. The last two major versions of Go are supported
+(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)).
+
+## License
+
+The MIT License (MIT). Read [LICENSE](LICENSE).
diff --git a/vendor/github.com/pelletier/go-toml/v2/SECURITY.md b/vendor/github.com/pelletier/go-toml/v2/SECURITY.md
new file mode 100644
index 0000000..d4d554f
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/SECURITY.md
@@ -0,0 +1,16 @@
+# Security Policy
+
+## Supported Versions
+
+| Version | Supported |
+| ---------- | ------------------ |
+| Latest 2.x | :white_check_mark: |
+| All 1.x | :x: |
+| All 0.x | :x: |
+
+## Reporting a Vulnerability
+
+Email a vulnerability report to `security@pelletier.codes`. Make sure to include
+as many details as possible to reproduce the vulnerability. This is a
+side-project: I will try to get back to you as quickly as possible, time
+permitting in my personal life. Providing a working patch helps very much!
diff --git a/vendor/github.com/pelletier/go-toml/v2/ci.sh b/vendor/github.com/pelletier/go-toml/v2/ci.sh
new file mode 100644
index 0000000..86217a9
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/ci.sh
@@ -0,0 +1,284 @@
+#!/usr/bin/env bash
+
+
+stderr() {
+ echo "$@" 1>&2
+}
+
+usage() {
+ b=$(basename "$0")
+ echo $b: ERROR: "$@" 1>&2
+
+ cat 1>&2 < coverage.out
+ go tool cover -func=coverage.out
+ echo "Coverage profile for ${branch}: ${dir}/coverage.out" >&2
+ popd
+
+ if [ "${branch}" != "HEAD" ]; then
+ git worktree remove --force "$dir"
+ fi
+}
+
+coverage() {
+ case "$1" in
+ -d)
+ shift
+ target="${1?Need to provide a target branch argument}"
+
+ output_dir="$(mktemp -d)"
+ target_out="${output_dir}/target.txt"
+ head_out="${output_dir}/head.txt"
+
+ cover "${target}" > "${target_out}"
+ cover "HEAD" > "${head_out}"
+
+ cat "${target_out}"
+ cat "${head_out}"
+
+ echo ""
+
+ target_pct="$(tail -n2 ${target_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%.*/\1/')"
+ head_pct="$(tail -n2 ${head_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%/\1/')"
+ echo "Results: ${target} ${target_pct}% HEAD ${head_pct}%"
+
+ delta_pct=$(echo "$head_pct - $target_pct" | bc -l)
+ echo "Delta: ${delta_pct}"
+
+ if [[ $delta_pct = \-* ]]; then
+ echo "Regression!";
+
+ target_diff="${output_dir}/target.diff.txt"
+ head_diff="${output_dir}/head.diff.txt"
+ cat "${target_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${target_diff}"
+ cat "${head_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${head_diff}"
+
+ diff --side-by-side --suppress-common-lines "${target_diff}" "${head_diff}"
+ return 1
+ fi
+ return 0
+ ;;
+ esac
+
+ cover "${1-HEAD}"
+}
+
+bench() {
+ branch="${1}"
+ out="${2}"
+ replace="${3}"
+ dir="$(mktemp -d)"
+
+ stderr "Executing benchmark for ${branch} at ${dir}"
+
+ if [ "${branch}" = "HEAD" ]; then
+ cp -r . "${dir}/"
+ else
+ git worktree add "$dir" "$branch"
+ fi
+
+ pushd "$dir"
+
+ if [ "${replace}" != "" ]; then
+ find ./benchmark/ -iname '*.go' -exec sed -i -E "s|github.com/pelletier/go-toml/v2|${replace}|g" {} \;
+ go get "${replace}"
+ fi
+
+ export GOMAXPROCS=2
+ go test '-bench=^Benchmark(Un)?[mM]arshal' -count=10 -run=Nothing ./... | tee "${out}"
+ popd
+
+ if [ "${branch}" != "HEAD" ]; then
+ git worktree remove --force "$dir"
+ fi
+}
+
+fmktemp() {
+ if mktemp --version &> /dev/null; then
+ # GNU
+ mktemp --suffix=-$1
+ else
+ # BSD
+ mktemp -t $1
+ fi
+}
+
+benchstathtml() {
+python3 - $1 <<'EOF'
+import sys
+
+lines = []
+stop = False
+
+with open(sys.argv[1]) as f:
+ for line in f.readlines():
+ line = line.strip()
+ if line == "":
+ stop = True
+ if not stop:
+ lines.append(line.split(','))
+
+results = []
+for line in reversed(lines[2:]):
+ if len(line) < 8 or line[0] == "":
+ continue
+ v2 = float(line[1])
+ results.append([
+ line[0].replace("-32", ""),
+ "%.1fx" % (float(line[3])/v2), # v1
+ "%.1fx" % (float(line[7])/v2), # bs
+ ])
+# move geomean to the end
+results.append(results[0])
+del results[0]
+
+
+def printtable(data):
+ print("""
+
+
+ Benchmark go-toml v1 BurntSushi/toml
+
+ """)
+
+ for r in data:
+ print(" {} {} {} ".format(*r))
+
+ print("""
+
""")
+
+
+def match(x):
+ return "ReferenceFile" in x[0] or "HugoFrontMatter" in x[0]
+
+above = [x for x in results if match(x)]
+below = [x for x in results if not match(x)]
+
+printtable(above)
+print("See more ")
+print("""The table above has the results of the most common use-cases. The table below
+contains the results of all benchmarks, including unrealistic ones. It is
+provided for completeness.
""")
+printtable(below)
+print('This table can be generated with ./ci.sh benchmark -a -html
.
')
+print(" ")
+
+EOF
+}
+
+benchmark() {
+ case "$1" in
+ -d)
+ shift
+ target="${1?Need to provide a target branch argument}"
+
+ old=`fmktemp ${target}`
+ bench "${target}" "${old}"
+
+ new=`fmktemp HEAD`
+ bench HEAD "${new}"
+
+ benchstat "${old}" "${new}"
+ return 0
+ ;;
+ -a)
+ shift
+
+ v2stats=`fmktemp go-toml-v2`
+ bench HEAD "${v2stats}" "github.com/pelletier/go-toml/v2"
+ v1stats=`fmktemp go-toml-v1`
+ bench HEAD "${v1stats}" "github.com/pelletier/go-toml"
+ bsstats=`fmktemp bs-toml`
+ bench HEAD "${bsstats}" "github.com/BurntSushi/toml"
+
+ cp "${v2stats}" go-toml-v2.txt
+ cp "${v1stats}" go-toml-v1.txt
+ cp "${bsstats}" bs-toml.txt
+
+ if [ "$1" = "-html" ]; then
+ tmpcsv=`fmktemp csv`
+ benchstat -format csv go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv
+ benchstathtml $tmpcsv
+ else
+ benchstat go-toml-v2.txt go-toml-v1.txt bs-toml.txt
+ fi
+
+ rm -f go-toml-v2.txt go-toml-v1.txt bs-toml.txt
+ return $?
+ esac
+
+ bench "${1-HEAD}" `mktemp`
+}
+
+case "$1" in
+ coverage) shift; coverage $@;;
+ benchmark) shift; benchmark $@;;
+ *) usage "bad argument $1";;
+esac
diff --git a/vendor/github.com/pelletier/go-toml/v2/decode.go b/vendor/github.com/pelletier/go-toml/v2/decode.go
new file mode 100644
index 0000000..f0ec3b1
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/decode.go
@@ -0,0 +1,550 @@
+package toml
+
+import (
+ "fmt"
+ "math"
+ "strconv"
+ "time"
+
+ "github.com/pelletier/go-toml/v2/unstable"
+)
+
+func parseInteger(b []byte) (int64, error) {
+ if len(b) > 2 && b[0] == '0' {
+ switch b[1] {
+ case 'x':
+ return parseIntHex(b)
+ case 'b':
+ return parseIntBin(b)
+ case 'o':
+ return parseIntOct(b)
+ default:
+ panic(fmt.Errorf("invalid base '%c', should have been checked by scanIntOrFloat", b[1]))
+ }
+ }
+
+ return parseIntDec(b)
+}
+
+func parseLocalDate(b []byte) (LocalDate, error) {
+ // full-date = date-fullyear "-" date-month "-" date-mday
+ // date-fullyear = 4DIGIT
+ // date-month = 2DIGIT ; 01-12
+ // date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year
+ var date LocalDate
+
+ if len(b) != 10 || b[4] != '-' || b[7] != '-' {
+ return date, unstable.NewParserError(b, "dates are expected to have the format YYYY-MM-DD")
+ }
+
+ var err error
+
+ date.Year, err = parseDecimalDigits(b[0:4])
+ if err != nil {
+ return LocalDate{}, err
+ }
+
+ date.Month, err = parseDecimalDigits(b[5:7])
+ if err != nil {
+ return LocalDate{}, err
+ }
+
+ date.Day, err = parseDecimalDigits(b[8:10])
+ if err != nil {
+ return LocalDate{}, err
+ }
+
+ if !isValidDate(date.Year, date.Month, date.Day) {
+ return LocalDate{}, unstable.NewParserError(b, "impossible date")
+ }
+
+ return date, nil
+}
+
+func parseDecimalDigits(b []byte) (int, error) {
+ v := 0
+
+ for i, c := range b {
+ if c < '0' || c > '9' {
+ return 0, unstable.NewParserError(b[i:i+1], "expected digit (0-9)")
+ }
+ v *= 10
+ v += int(c - '0')
+ }
+
+ return v, nil
+}
+
+func parseDateTime(b []byte) (time.Time, error) {
+ // offset-date-time = full-date time-delim full-time
+ // full-time = partial-time time-offset
+ // time-offset = "Z" / time-numoffset
+ // time-numoffset = ( "+" / "-" ) time-hour ":" time-minute
+
+ dt, b, err := parseLocalDateTime(b)
+ if err != nil {
+ return time.Time{}, err
+ }
+
+ var zone *time.Location
+
+ if len(b) == 0 {
+ // parser should have checked that when assigning the date time node
+ panic("date time should have a timezone")
+ }
+
+ if b[0] == 'Z' || b[0] == 'z' {
+ b = b[1:]
+ zone = time.UTC
+ } else {
+ const dateTimeByteLen = 6
+ if len(b) != dateTimeByteLen {
+ return time.Time{}, unstable.NewParserError(b, "invalid date-time timezone")
+ }
+ var direction int
+ switch b[0] {
+ case '-':
+ direction = -1
+ case '+':
+ direction = +1
+ default:
+ return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset character")
+ }
+
+ if b[3] != ':' {
+ return time.Time{}, unstable.NewParserError(b[3:4], "expected a : separator")
+ }
+
+ hours, err := parseDecimalDigits(b[1:3])
+ if err != nil {
+ return time.Time{}, err
+ }
+ if hours > 23 {
+ return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset hours")
+ }
+
+ minutes, err := parseDecimalDigits(b[4:6])
+ if err != nil {
+ return time.Time{}, err
+ }
+ if minutes > 59 {
+ return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset minutes")
+ }
+
+ seconds := direction * (hours*3600 + minutes*60)
+ if seconds == 0 {
+ zone = time.UTC
+ } else {
+ zone = time.FixedZone("", seconds)
+ }
+ b = b[dateTimeByteLen:]
+ }
+
+ if len(b) > 0 {
+ return time.Time{}, unstable.NewParserError(b, "extra bytes at the end of the timezone")
+ }
+
+ t := time.Date(
+ dt.Year,
+ time.Month(dt.Month),
+ dt.Day,
+ dt.Hour,
+ dt.Minute,
+ dt.Second,
+ dt.Nanosecond,
+ zone)
+
+ return t, nil
+}
+
+func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) {
+ var dt LocalDateTime
+
+ const localDateTimeByteMinLen = 11
+ if len(b) < localDateTimeByteMinLen {
+ return dt, nil, unstable.NewParserError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]")
+ }
+
+ date, err := parseLocalDate(b[:10])
+ if err != nil {
+ return dt, nil, err
+ }
+ dt.LocalDate = date
+
+ sep := b[10]
+ if sep != 'T' && sep != ' ' && sep != 't' {
+ return dt, nil, unstable.NewParserError(b[10:11], "datetime separator is expected to be T or a space")
+ }
+
+ t, rest, err := parseLocalTime(b[11:])
+ if err != nil {
+ return dt, nil, err
+ }
+ dt.LocalTime = t
+
+ return dt, rest, nil
+}
+
+// parseLocalTime is a bit different because it also returns the remaining
+// []byte that is didn't need. This is to allow parseDateTime to parse those
+// remaining bytes as a timezone.
+func parseLocalTime(b []byte) (LocalTime, []byte, error) {
+ var (
+ nspow = [10]int{0, 1e8, 1e7, 1e6, 1e5, 1e4, 1e3, 1e2, 1e1, 1e0}
+ t LocalTime
+ )
+
+ // check if b matches to have expected format HH:MM:SS[.NNNNNN]
+ const localTimeByteLen = 8
+ if len(b) < localTimeByteLen {
+ return t, nil, unstable.NewParserError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]")
+ }
+
+ var err error
+
+ t.Hour, err = parseDecimalDigits(b[0:2])
+ if err != nil {
+ return t, nil, err
+ }
+
+ if t.Hour > 23 {
+ return t, nil, unstable.NewParserError(b[0:2], "hour cannot be greater 23")
+ }
+ if b[2] != ':' {
+ return t, nil, unstable.NewParserError(b[2:3], "expecting colon between hours and minutes")
+ }
+
+ t.Minute, err = parseDecimalDigits(b[3:5])
+ if err != nil {
+ return t, nil, err
+ }
+ if t.Minute > 59 {
+ return t, nil, unstable.NewParserError(b[3:5], "minutes cannot be greater 59")
+ }
+ if b[5] != ':' {
+ return t, nil, unstable.NewParserError(b[5:6], "expecting colon between minutes and seconds")
+ }
+
+ t.Second, err = parseDecimalDigits(b[6:8])
+ if err != nil {
+ return t, nil, err
+ }
+
+ if t.Second > 60 {
+ return t, nil, unstable.NewParserError(b[6:8], "seconds cannot be greater 60")
+ }
+
+ b = b[8:]
+
+ if len(b) >= 1 && b[0] == '.' {
+ frac := 0
+ precision := 0
+ digits := 0
+
+ for i, c := range b[1:] {
+ if !isDigit(c) {
+ if i == 0 {
+ return t, nil, unstable.NewParserError(b[0:1], "need at least one digit after fraction point")
+ }
+ break
+ }
+ digits++
+
+ const maxFracPrecision = 9
+ if i >= maxFracPrecision {
+ // go-toml allows decoding fractional seconds
+ // beyond the supported precision of 9
+ // digits. It truncates the fractional component
+ // to the supported precision and ignores the
+ // remaining digits.
+ //
+ // https://github.com/pelletier/go-toml/discussions/707
+ continue
+ }
+
+ frac *= 10
+ frac += int(c - '0')
+ precision++
+ }
+
+ if precision == 0 {
+ return t, nil, unstable.NewParserError(b[:1], "nanoseconds need at least one digit")
+ }
+
+ t.Nanosecond = frac * nspow[precision]
+ t.Precision = precision
+
+ return t, b[1+digits:], nil
+ }
+ return t, b, nil
+}
+
+//nolint:cyclop
+func parseFloat(b []byte) (float64, error) {
+ if len(b) == 4 && (b[0] == '+' || b[0] == '-') && b[1] == 'n' && b[2] == 'a' && b[3] == 'n' {
+ return math.NaN(), nil
+ }
+
+ cleaned, err := checkAndRemoveUnderscoresFloats(b)
+ if err != nil {
+ return 0, err
+ }
+
+ if cleaned[0] == '.' {
+ return 0, unstable.NewParserError(b, "float cannot start with a dot")
+ }
+
+ if cleaned[len(cleaned)-1] == '.' {
+ return 0, unstable.NewParserError(b, "float cannot end with a dot")
+ }
+
+ dotAlreadySeen := false
+ for i, c := range cleaned {
+ if c == '.' {
+ if dotAlreadySeen {
+ return 0, unstable.NewParserError(b[i:i+1], "float can have at most one decimal point")
+ }
+ if !isDigit(cleaned[i-1]) {
+ return 0, unstable.NewParserError(b[i-1:i+1], "float decimal point must be preceded by a digit")
+ }
+ if !isDigit(cleaned[i+1]) {
+ return 0, unstable.NewParserError(b[i:i+2], "float decimal point must be followed by a digit")
+ }
+ dotAlreadySeen = true
+ }
+ }
+
+ start := 0
+ if cleaned[0] == '+' || cleaned[0] == '-' {
+ start = 1
+ }
+ if cleaned[start] == '0' && len(cleaned) > start+1 && isDigit(cleaned[start+1]) {
+ return 0, unstable.NewParserError(b, "float integer part cannot have leading zeroes")
+ }
+
+ f, err := strconv.ParseFloat(string(cleaned), 64)
+ if err != nil {
+ return 0, unstable.NewParserError(b, "unable to parse float: %w", err)
+ }
+
+ return f, nil
+}
+
+func parseIntHex(b []byte) (int64, error) {
+ cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:])
+ if err != nil {
+ return 0, err
+ }
+
+ i, err := strconv.ParseInt(string(cleaned), 16, 64)
+ if err != nil {
+ return 0, unstable.NewParserError(b, "couldn't parse hexadecimal number: %w", err)
+ }
+
+ return i, nil
+}
+
+func parseIntOct(b []byte) (int64, error) {
+ cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:])
+ if err != nil {
+ return 0, err
+ }
+
+ i, err := strconv.ParseInt(string(cleaned), 8, 64)
+ if err != nil {
+ return 0, unstable.NewParserError(b, "couldn't parse octal number: %w", err)
+ }
+
+ return i, nil
+}
+
+func parseIntBin(b []byte) (int64, error) {
+ cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:])
+ if err != nil {
+ return 0, err
+ }
+
+ i, err := strconv.ParseInt(string(cleaned), 2, 64)
+ if err != nil {
+ return 0, unstable.NewParserError(b, "couldn't parse binary number: %w", err)
+ }
+
+ return i, nil
+}
+
+func isSign(b byte) bool {
+ return b == '+' || b == '-'
+}
+
+func parseIntDec(b []byte) (int64, error) {
+ cleaned, err := checkAndRemoveUnderscoresIntegers(b)
+ if err != nil {
+ return 0, err
+ }
+
+ startIdx := 0
+
+ if isSign(cleaned[0]) {
+ startIdx++
+ }
+
+ if len(cleaned) > startIdx+1 && cleaned[startIdx] == '0' {
+ return 0, unstable.NewParserError(b, "leading zero not allowed on decimal number")
+ }
+
+ i, err := strconv.ParseInt(string(cleaned), 10, 64)
+ if err != nil {
+ return 0, unstable.NewParserError(b, "couldn't parse decimal number: %w", err)
+ }
+
+ return i, nil
+}
+
+func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
+ start := 0
+ if b[start] == '+' || b[start] == '-' {
+ start++
+ }
+
+ if len(b) == start {
+ return b, nil
+ }
+
+ if b[start] == '_' {
+ return nil, unstable.NewParserError(b[start:start+1], "number cannot start with underscore")
+ }
+
+ if b[len(b)-1] == '_' {
+ return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore")
+ }
+
+ // fast path
+ i := 0
+ for ; i < len(b); i++ {
+ if b[i] == '_' {
+ break
+ }
+ }
+ if i == len(b) {
+ return b, nil
+ }
+
+ before := false
+ cleaned := make([]byte, i, len(b))
+ copy(cleaned, b)
+
+ for i++; i < len(b); i++ {
+ c := b[i]
+ if c == '_' {
+ if !before {
+ return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores")
+ }
+ before = false
+ } else {
+ before = true
+ cleaned = append(cleaned, c)
+ }
+ }
+
+ return cleaned, nil
+}
+
+func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
+ if b[0] == '_' {
+ return nil, unstable.NewParserError(b[0:1], "number cannot start with underscore")
+ }
+
+ if b[len(b)-1] == '_' {
+ return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore")
+ }
+
+ // fast path
+ i := 0
+ for ; i < len(b); i++ {
+ if b[i] == '_' {
+ break
+ }
+ }
+ if i == len(b) {
+ return b, nil
+ }
+
+ before := false
+ cleaned := make([]byte, 0, len(b))
+
+ for i := 0; i < len(b); i++ {
+ c := b[i]
+
+ switch c {
+ case '_':
+ if !before {
+ return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores")
+ }
+ if i < len(b)-1 && (b[i+1] == 'e' || b[i+1] == 'E') {
+ return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore before exponent")
+ }
+ before = false
+ case '+', '-':
+ // signed exponents
+ cleaned = append(cleaned, c)
+ before = false
+ case 'e', 'E':
+ if i < len(b)-1 && b[i+1] == '_' {
+ return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after exponent")
+ }
+ cleaned = append(cleaned, c)
+ case '.':
+ if i < len(b)-1 && b[i+1] == '_' {
+ return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after decimal point")
+ }
+ if i > 0 && b[i-1] == '_' {
+ return nil, unstable.NewParserError(b[i-1:i], "cannot have underscore before decimal point")
+ }
+ cleaned = append(cleaned, c)
+ default:
+ before = true
+ cleaned = append(cleaned, c)
+ }
+ }
+
+ return cleaned, nil
+}
+
+// isValidDate checks if a provided date is a date that exists.
+func isValidDate(year int, month int, day int) bool {
+ return month > 0 && month < 13 && day > 0 && day <= daysIn(month, year)
+}
+
+// daysBefore[m] counts the number of days in a non-leap year
+// before month m begins. There is an entry for m=12, counting
+// the number of days before January of next year (365).
+var daysBefore = [...]int32{
+ 0,
+ 31,
+ 31 + 28,
+ 31 + 28 + 31,
+ 31 + 28 + 31 + 30,
+ 31 + 28 + 31 + 30 + 31,
+ 31 + 28 + 31 + 30 + 31 + 30,
+ 31 + 28 + 31 + 30 + 31 + 30 + 31,
+ 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31,
+ 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30,
+ 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31,
+ 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30,
+ 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31,
+}
+
+func daysIn(m int, year int) int {
+ if m == 2 && isLeap(year) {
+ return 29
+ }
+ return int(daysBefore[m] - daysBefore[m-1])
+}
+
+func isLeap(year int) bool {
+ return year%4 == 0 && (year%100 != 0 || year%400 == 0)
+}
+
+func isDigit(r byte) bool {
+ return r >= '0' && r <= '9'
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/doc.go b/vendor/github.com/pelletier/go-toml/v2/doc.go
new file mode 100644
index 0000000..b7bc599
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/doc.go
@@ -0,0 +1,2 @@
+// Package toml is a library to read and write TOML documents.
+package toml
diff --git a/vendor/github.com/pelletier/go-toml/v2/errors.go b/vendor/github.com/pelletier/go-toml/v2/errors.go
new file mode 100644
index 0000000..309733f
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/errors.go
@@ -0,0 +1,252 @@
+package toml
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "github.com/pelletier/go-toml/v2/internal/danger"
+ "github.com/pelletier/go-toml/v2/unstable"
+)
+
+// DecodeError represents an error encountered during the parsing or decoding
+// of a TOML document.
+//
+// In addition to the error message, it contains the position in the document
+// where it happened, as well as a human-readable representation that shows
+// where the error occurred in the document.
+type DecodeError struct {
+ message string
+ line int
+ column int
+ key Key
+
+ human string
+}
+
+// StrictMissingError occurs in a TOML document that does not have a
+// corresponding field in the target value. It contains all the missing fields
+// in Errors.
+//
+// Emitted by Decoder when DisallowUnknownFields() was called.
+type StrictMissingError struct {
+ // One error per field that could not be found.
+ Errors []DecodeError
+}
+
+// Error returns the canonical string for this error.
+func (s *StrictMissingError) Error() string {
+ return "strict mode: fields in the document are missing in the target struct"
+}
+
+// String returns a human readable description of all errors.
+func (s *StrictMissingError) String() string {
+ var buf strings.Builder
+
+ for i, e := range s.Errors {
+ if i > 0 {
+ buf.WriteString("\n---\n")
+ }
+
+ buf.WriteString(e.String())
+ }
+
+ return buf.String()
+}
+
+type Key []string
+
+// Error returns the error message contained in the DecodeError.
+func (e *DecodeError) Error() string {
+ return "toml: " + e.message
+}
+
+// String returns the human-readable contextualized error. This string is multi-line.
+func (e *DecodeError) String() string {
+ return e.human
+}
+
+// Position returns the (line, column) pair indicating where the error
+// occurred in the document. Positions are 1-indexed.
+func (e *DecodeError) Position() (row int, column int) {
+ return e.line, e.column
+}
+
+// Key that was being processed when the error occurred. The key is present only
+// if this DecodeError is part of a StrictMissingError.
+func (e *DecodeError) Key() Key {
+ return e.key
+}
+
+// decodeErrorFromHighlight creates a DecodeError referencing a highlighted
+// range of bytes from document.
+//
+// highlight needs to be a sub-slice of document, or this function panics.
+//
+// The function copies all bytes used in DecodeError, so that document and
+// highlight can be freely deallocated.
+//
+//nolint:funlen
+func wrapDecodeError(document []byte, de *unstable.ParserError) *DecodeError {
+ offset := danger.SubsliceOffset(document, de.Highlight)
+
+ errMessage := de.Error()
+ errLine, errColumn := positionAtEnd(document[:offset])
+ before, after := linesOfContext(document, de.Highlight, offset, 3)
+
+ var buf strings.Builder
+
+ maxLine := errLine + len(after) - 1
+ lineColumnWidth := len(strconv.Itoa(maxLine))
+
+ // Write the lines of context strictly before the error.
+ for i := len(before) - 1; i > 0; i-- {
+ line := errLine - i
+ buf.WriteString(formatLineNumber(line, lineColumnWidth))
+ buf.WriteString("|")
+
+ if len(before[i]) > 0 {
+ buf.WriteString(" ")
+ buf.Write(before[i])
+ }
+
+ buf.WriteRune('\n')
+ }
+
+ // Write the document line that contains the error.
+
+ buf.WriteString(formatLineNumber(errLine, lineColumnWidth))
+ buf.WriteString("| ")
+
+ if len(before) > 0 {
+ buf.Write(before[0])
+ }
+
+ buf.Write(de.Highlight)
+
+ if len(after) > 0 {
+ buf.Write(after[0])
+ }
+
+ buf.WriteRune('\n')
+
+ // Write the line with the error message itself (so it does not have a line
+ // number).
+
+ buf.WriteString(strings.Repeat(" ", lineColumnWidth))
+ buf.WriteString("| ")
+
+ if len(before) > 0 {
+ buf.WriteString(strings.Repeat(" ", len(before[0])))
+ }
+
+ buf.WriteString(strings.Repeat("~", len(de.Highlight)))
+
+ if len(errMessage) > 0 {
+ buf.WriteString(" ")
+ buf.WriteString(errMessage)
+ }
+
+ // Write the lines of context strictly after the error.
+
+ for i := 1; i < len(after); i++ {
+ buf.WriteRune('\n')
+ line := errLine + i
+ buf.WriteString(formatLineNumber(line, lineColumnWidth))
+ buf.WriteString("|")
+
+ if len(after[i]) > 0 {
+ buf.WriteString(" ")
+ buf.Write(after[i])
+ }
+ }
+
+ return &DecodeError{
+ message: errMessage,
+ line: errLine,
+ column: errColumn,
+ key: de.Key,
+ human: buf.String(),
+ }
+}
+
+func formatLineNumber(line int, width int) string {
+ format := "%" + strconv.Itoa(width) + "d"
+
+ return fmt.Sprintf(format, line)
+}
+
+func linesOfContext(document []byte, highlight []byte, offset int, linesAround int) ([][]byte, [][]byte) {
+ return beforeLines(document, offset, linesAround), afterLines(document, highlight, offset, linesAround)
+}
+
+func beforeLines(document []byte, offset int, linesAround int) [][]byte {
+ var beforeLines [][]byte
+
+ // Walk the document backward from the highlight to find previous lines
+ // of context.
+ rest := document[:offset]
+backward:
+ for o := len(rest) - 1; o >= 0 && len(beforeLines) <= linesAround && len(rest) > 0; {
+ switch {
+ case rest[o] == '\n':
+ // handle individual lines
+ beforeLines = append(beforeLines, rest[o+1:])
+ rest = rest[:o]
+ o = len(rest) - 1
+ case o == 0:
+ // add the first line only if it's non-empty
+ beforeLines = append(beforeLines, rest)
+
+ break backward
+ default:
+ o--
+ }
+ }
+
+ return beforeLines
+}
+
+func afterLines(document []byte, highlight []byte, offset int, linesAround int) [][]byte {
+ var afterLines [][]byte
+
+ // Walk the document forward from the highlight to find the following
+ // lines of context.
+ rest := document[offset+len(highlight):]
+forward:
+ for o := 0; o < len(rest) && len(afterLines) <= linesAround; {
+ switch {
+ case rest[o] == '\n':
+ // handle individual lines
+ afterLines = append(afterLines, rest[:o])
+ rest = rest[o+1:]
+ o = 0
+
+ case o == len(rest)-1:
+ // add last line only if it's non-empty
+ afterLines = append(afterLines, rest)
+
+ break forward
+ default:
+ o++
+ }
+ }
+
+ return afterLines
+}
+
+func positionAtEnd(b []byte) (row int, column int) {
+ row = 1
+ column = 1
+
+ for _, c := range b {
+ if c == '\n' {
+ row++
+ column = 1
+ } else {
+ column++
+ }
+ }
+
+ return
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go b/vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go
new file mode 100644
index 0000000..80f698d
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go
@@ -0,0 +1,42 @@
+package characters
+
+var invalidAsciiTable = [256]bool{
+ 0x00: true,
+ 0x01: true,
+ 0x02: true,
+ 0x03: true,
+ 0x04: true,
+ 0x05: true,
+ 0x06: true,
+ 0x07: true,
+ 0x08: true,
+ // 0x09 TAB
+ // 0x0A LF
+ 0x0B: true,
+ 0x0C: true,
+ // 0x0D CR
+ 0x0E: true,
+ 0x0F: true,
+ 0x10: true,
+ 0x11: true,
+ 0x12: true,
+ 0x13: true,
+ 0x14: true,
+ 0x15: true,
+ 0x16: true,
+ 0x17: true,
+ 0x18: true,
+ 0x19: true,
+ 0x1A: true,
+ 0x1B: true,
+ 0x1C: true,
+ 0x1D: true,
+ 0x1E: true,
+ 0x1F: true,
+ // 0x20 - 0x7E Printable ASCII characters
+ 0x7F: true,
+}
+
+func InvalidAscii(b byte) bool {
+ return invalidAsciiTable[b]
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/characters/utf8.go b/vendor/github.com/pelletier/go-toml/v2/internal/characters/utf8.go
new file mode 100644
index 0000000..db4f45a
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/internal/characters/utf8.go
@@ -0,0 +1,199 @@
+package characters
+
+import (
+ "unicode/utf8"
+)
+
+type utf8Err struct {
+ Index int
+ Size int
+}
+
+func (u utf8Err) Zero() bool {
+ return u.Size == 0
+}
+
+// Verified that a given string is only made of valid UTF-8 characters allowed
+// by the TOML spec:
+//
+// Any Unicode character may be used except those that must be escaped:
+// quotation mark, backslash, and the control characters other than tab (U+0000
+// to U+0008, U+000A to U+001F, U+007F).
+//
+// It is a copy of the Go 1.17 utf8.Valid implementation, tweaked to exit early
+// when a character is not allowed.
+//
+// The returned utf8Err is Zero() if the string is valid, or contains the byte
+// index and size of the invalid character.
+//
+// quotation mark => already checked
+// backslash => already checked
+// 0-0x8 => invalid
+// 0x9 => tab, ok
+// 0xA - 0x1F => invalid
+// 0x7F => invalid
+func Utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
+ // Fast path. Check for and skip 8 bytes of ASCII characters per iteration.
+ offset := 0
+ for len(p) >= 8 {
+ // Combining two 32 bit loads allows the same code to be used
+ // for 32 and 64 bit platforms.
+ // The compiler can generate a 32bit load for first32 and second32
+ // on many platforms. See test/codegen/memcombine.go.
+ first32 := uint32(p[0]) | uint32(p[1])<<8 | uint32(p[2])<<16 | uint32(p[3])<<24
+ second32 := uint32(p[4]) | uint32(p[5])<<8 | uint32(p[6])<<16 | uint32(p[7])<<24
+ if (first32|second32)&0x80808080 != 0 {
+ // Found a non ASCII byte (>= RuneSelf).
+ break
+ }
+
+ for i, b := range p[:8] {
+ if InvalidAscii(b) {
+ err.Index = offset + i
+ err.Size = 1
+ return
+ }
+ }
+
+ p = p[8:]
+ offset += 8
+ }
+ n := len(p)
+ for i := 0; i < n; {
+ pi := p[i]
+ if pi < utf8.RuneSelf {
+ if InvalidAscii(pi) {
+ err.Index = offset + i
+ err.Size = 1
+ return
+ }
+ i++
+ continue
+ }
+ x := first[pi]
+ if x == xx {
+ // Illegal starter byte.
+ err.Index = offset + i
+ err.Size = 1
+ return
+ }
+ size := int(x & 7)
+ if i+size > n {
+ // Short or invalid.
+ err.Index = offset + i
+ err.Size = n - i
+ return
+ }
+ accept := acceptRanges[x>>4]
+ if c := p[i+1]; c < accept.lo || accept.hi < c {
+ err.Index = offset + i
+ err.Size = 2
+ return
+ } else if size == 2 {
+ } else if c := p[i+2]; c < locb || hicb < c {
+ err.Index = offset + i
+ err.Size = 3
+ return
+ } else if size == 3 {
+ } else if c := p[i+3]; c < locb || hicb < c {
+ err.Index = offset + i
+ err.Size = 4
+ return
+ }
+ i += size
+ }
+ return
+}
+
+// Return the size of the next rune if valid, 0 otherwise.
+func Utf8ValidNext(p []byte) int {
+ c := p[0]
+
+ if c < utf8.RuneSelf {
+ if InvalidAscii(c) {
+ return 0
+ }
+ return 1
+ }
+
+ x := first[c]
+ if x == xx {
+ // Illegal starter byte.
+ return 0
+ }
+ size := int(x & 7)
+ if size > len(p) {
+ // Short or invalid.
+ return 0
+ }
+ accept := acceptRanges[x>>4]
+ if c := p[1]; c < accept.lo || accept.hi < c {
+ return 0
+ } else if size == 2 {
+ } else if c := p[2]; c < locb || hicb < c {
+ return 0
+ } else if size == 3 {
+ } else if c := p[3]; c < locb || hicb < c {
+ return 0
+ }
+
+ return size
+}
+
+// acceptRange gives the range of valid values for the second byte in a UTF-8
+// sequence.
+type acceptRange struct {
+ lo uint8 // lowest value for second byte.
+ hi uint8 // highest value for second byte.
+}
+
+// acceptRanges has size 16 to avoid bounds checks in the code that uses it.
+var acceptRanges = [16]acceptRange{
+ 0: {locb, hicb},
+ 1: {0xA0, hicb},
+ 2: {locb, 0x9F},
+ 3: {0x90, hicb},
+ 4: {locb, 0x8F},
+}
+
+// first is information about the first byte in a UTF-8 sequence.
+var first = [256]uint8{
+ // 1 2 3 4 5 6 7 8 9 A B C D E F
+ as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x00-0x0F
+ as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x10-0x1F
+ as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x20-0x2F
+ as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x30-0x3F
+ as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x40-0x4F
+ as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x50-0x5F
+ as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x60-0x6F
+ as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x70-0x7F
+ // 1 2 3 4 5 6 7 8 9 A B C D E F
+ xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x80-0x8F
+ xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x90-0x9F
+ xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xA0-0xAF
+ xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xB0-0xBF
+ xx, xx, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xC0-0xCF
+ s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xD0-0xDF
+ s2, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s4, s3, s3, // 0xE0-0xEF
+ s5, s6, s6, s6, s7, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xF0-0xFF
+}
+
+const (
+ // The default lowest and highest continuation byte.
+ locb = 0b10000000
+ hicb = 0b10111111
+
+ // These names of these constants are chosen to give nice alignment in the
+ // table below. The first nibble is an index into acceptRanges or F for
+ // special one-byte cases. The second nibble is the Rune length or the
+ // Status for the special one-byte case.
+ xx = 0xF1 // invalid: size 1
+ as = 0xF0 // ASCII: size 1
+ s1 = 0x02 // accept 0, size 2
+ s2 = 0x13 // accept 1, size 3
+ s3 = 0x03 // accept 0, size 3
+ s4 = 0x23 // accept 2, size 3
+ s5 = 0x34 // accept 3, size 4
+ s6 = 0x04 // accept 0, size 4
+ s7 = 0x44 // accept 4, size 4
+)
diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go b/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go
new file mode 100644
index 0000000..e38e113
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go
@@ -0,0 +1,65 @@
+package danger
+
+import (
+ "fmt"
+ "reflect"
+ "unsafe"
+)
+
+const maxInt = uintptr(int(^uint(0) >> 1))
+
+func SubsliceOffset(data []byte, subslice []byte) int {
+ datap := (*reflect.SliceHeader)(unsafe.Pointer(&data))
+ hlp := (*reflect.SliceHeader)(unsafe.Pointer(&subslice))
+
+ if hlp.Data < datap.Data {
+ panic(fmt.Errorf("subslice address (%d) is before data address (%d)", hlp.Data, datap.Data))
+ }
+ offset := hlp.Data - datap.Data
+
+ if offset > maxInt {
+ panic(fmt.Errorf("slice offset larger than int (%d)", offset))
+ }
+
+ intoffset := int(offset)
+
+ if intoffset > datap.Len {
+ panic(fmt.Errorf("slice offset (%d) is farther than data length (%d)", intoffset, datap.Len))
+ }
+
+ if intoffset+hlp.Len > datap.Len {
+ panic(fmt.Errorf("slice ends (%d+%d) is farther than data length (%d)", intoffset, hlp.Len, datap.Len))
+ }
+
+ return intoffset
+}
+
+func BytesRange(start []byte, end []byte) []byte {
+ if start == nil || end == nil {
+ panic("cannot call BytesRange with nil")
+ }
+ startp := (*reflect.SliceHeader)(unsafe.Pointer(&start))
+ endp := (*reflect.SliceHeader)(unsafe.Pointer(&end))
+
+ if startp.Data > endp.Data {
+ panic(fmt.Errorf("start pointer address (%d) is after end pointer address (%d)", startp.Data, endp.Data))
+ }
+
+ l := startp.Len
+ endLen := int(endp.Data-startp.Data) + endp.Len
+ if endLen > l {
+ l = endLen
+ }
+
+ if l > startp.Cap {
+ panic(fmt.Errorf("range length is larger than capacity"))
+ }
+
+ return start[:l]
+}
+
+func Stride(ptr unsafe.Pointer, size uintptr, offset int) unsafe.Pointer {
+ // TODO: replace with unsafe.Add when Go 1.17 is released
+ // https://github.com/golang/go/issues/40481
+ return unsafe.Pointer(uintptr(ptr) + uintptr(int(size)*offset))
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go b/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go
new file mode 100644
index 0000000..9d41c28
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go
@@ -0,0 +1,23 @@
+package danger
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+// typeID is used as key in encoder and decoder caches to enable using
+// the optimize runtime.mapaccess2_fast64 function instead of the more
+// expensive lookup if we were to use reflect.Type as map key.
+//
+// typeID holds the pointer to the reflect.Type value, which is unique
+// in the program.
+//
+// https://github.com/segmentio/encoding/blob/master/json/codec.go#L59-L61
+type TypeID unsafe.Pointer
+
+func MakeTypeID(t reflect.Type) TypeID {
+ // reflect.Type has the fields:
+ // typ unsafe.Pointer
+ // ptr unsafe.Pointer
+ return TypeID((*[2]unsafe.Pointer)(unsafe.Pointer(&t))[1])
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go
new file mode 100644
index 0000000..149b17f
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go
@@ -0,0 +1,48 @@
+package tracker
+
+import "github.com/pelletier/go-toml/v2/unstable"
+
+// KeyTracker is a tracker that keeps track of the current Key as the AST is
+// walked.
+type KeyTracker struct {
+ k []string
+}
+
+// UpdateTable sets the state of the tracker with the AST table node.
+func (t *KeyTracker) UpdateTable(node *unstable.Node) {
+ t.reset()
+ t.Push(node)
+}
+
+// UpdateArrayTable sets the state of the tracker with the AST array table node.
+func (t *KeyTracker) UpdateArrayTable(node *unstable.Node) {
+ t.reset()
+ t.Push(node)
+}
+
+// Push the given key on the stack.
+func (t *KeyTracker) Push(node *unstable.Node) {
+ it := node.Key()
+ for it.Next() {
+ t.k = append(t.k, string(it.Node().Data))
+ }
+}
+
+// Pop key from stack.
+func (t *KeyTracker) Pop(node *unstable.Node) {
+ it := node.Key()
+ for it.Next() {
+ t.k = t.k[:len(t.k)-1]
+ }
+}
+
+// Key returns the current key
+func (t *KeyTracker) Key() []string {
+ k := make([]string, len(t.k))
+ copy(k, t.k)
+ return k
+}
+
+func (t *KeyTracker) reset() {
+ t.k = t.k[:0]
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go
new file mode 100644
index 0000000..40e23f8
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go
@@ -0,0 +1,356 @@
+package tracker
+
+import (
+ "bytes"
+ "fmt"
+ "sync"
+
+ "github.com/pelletier/go-toml/v2/unstable"
+)
+
+type keyKind uint8
+
+const (
+ invalidKind keyKind = iota
+ valueKind
+ tableKind
+ arrayTableKind
+)
+
+func (k keyKind) String() string {
+ switch k {
+ case invalidKind:
+ return "invalid"
+ case valueKind:
+ return "value"
+ case tableKind:
+ return "table"
+ case arrayTableKind:
+ return "array table"
+ }
+ panic("missing keyKind string mapping")
+}
+
+// SeenTracker tracks which keys have been seen with which TOML type to flag
+// duplicates and mismatches according to the spec.
+//
+// Each node in the visited tree is represented by an entry. Each entry has an
+// identifier, which is provided by a counter. Entries are stored in the array
+// entries. As new nodes are discovered (referenced for the first time in the
+// TOML document), entries are created and appended to the array. An entry
+// points to its parent using its id.
+//
+// To find whether a given key (sequence of []byte) has already been visited,
+// the entries are linearly searched, looking for one with the right name and
+// parent id.
+//
+// Given that all keys appear in the document after their parent, it is
+// guaranteed that all descendants of a node are stored after the node, this
+// speeds up the search process.
+//
+// When encountering [[array tables]], the descendants of that node are removed
+// to allow that branch of the tree to be "rediscovered". To maintain the
+// invariant above, the deletion process needs to keep the order of entries.
+// This results in more copies in that case.
+type SeenTracker struct {
+ entries []entry
+ currentIdx int
+}
+
+var pool sync.Pool
+
+func (s *SeenTracker) reset() {
+ // Always contains a root element at index 0.
+ s.currentIdx = 0
+ if len(s.entries) == 0 {
+ s.entries = make([]entry, 1, 2)
+ } else {
+ s.entries = s.entries[:1]
+ }
+ s.entries[0].child = -1
+ s.entries[0].next = -1
+}
+
+type entry struct {
+ // Use -1 to indicate no child or no sibling.
+ child int
+ next int
+
+ name []byte
+ kind keyKind
+ explicit bool
+ kv bool
+}
+
+// Find the index of the child of parentIdx with key k. Returns -1 if
+// it does not exist.
+func (s *SeenTracker) find(parentIdx int, k []byte) int {
+ for i := s.entries[parentIdx].child; i >= 0; i = s.entries[i].next {
+ if bytes.Equal(s.entries[i].name, k) {
+ return i
+ }
+ }
+ return -1
+}
+
+// Remove all descendants of node at position idx.
+func (s *SeenTracker) clear(idx int) {
+ if idx >= len(s.entries) {
+ return
+ }
+
+ for i := s.entries[idx].child; i >= 0; {
+ next := s.entries[i].next
+ n := s.entries[0].next
+ s.entries[0].next = i
+ s.entries[i].next = n
+ s.entries[i].name = nil
+ s.clear(i)
+ i = next
+ }
+
+ s.entries[idx].child = -1
+}
+
+func (s *SeenTracker) create(parentIdx int, name []byte, kind keyKind, explicit bool, kv bool) int {
+ e := entry{
+ child: -1,
+ next: s.entries[parentIdx].child,
+
+ name: name,
+ kind: kind,
+ explicit: explicit,
+ kv: kv,
+ }
+ var idx int
+ if s.entries[0].next >= 0 {
+ idx = s.entries[0].next
+ s.entries[0].next = s.entries[idx].next
+ s.entries[idx] = e
+ } else {
+ idx = len(s.entries)
+ s.entries = append(s.entries, e)
+ }
+
+ s.entries[parentIdx].child = idx
+
+ return idx
+}
+
+func (s *SeenTracker) setExplicitFlag(parentIdx int) {
+ for i := s.entries[parentIdx].child; i >= 0; i = s.entries[i].next {
+ if s.entries[i].kv {
+ s.entries[i].explicit = true
+ s.entries[i].kv = false
+ }
+ s.setExplicitFlag(i)
+ }
+}
+
+// CheckExpression takes a top-level node and checks that it does not contain
+// keys that have been seen in previous calls, and validates that types are
+// consistent.
+func (s *SeenTracker) CheckExpression(node *unstable.Node) error {
+ if s.entries == nil {
+ s.reset()
+ }
+ switch node.Kind {
+ case unstable.KeyValue:
+ return s.checkKeyValue(node)
+ case unstable.Table:
+ return s.checkTable(node)
+ case unstable.ArrayTable:
+ return s.checkArrayTable(node)
+ default:
+ panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind))
+ }
+}
+
+func (s *SeenTracker) checkTable(node *unstable.Node) error {
+ if s.currentIdx >= 0 {
+ s.setExplicitFlag(s.currentIdx)
+ }
+
+ it := node.Key()
+
+ parentIdx := 0
+
+ // This code is duplicated in checkArrayTable. This is because factoring
+ // it in a function requires to copy the iterator, or allocate it to the
+ // heap, which is not cheap.
+ for it.Next() {
+ if it.IsLast() {
+ break
+ }
+
+ k := it.Node().Data
+
+ idx := s.find(parentIdx, k)
+
+ if idx < 0 {
+ idx = s.create(parentIdx, k, tableKind, false, false)
+ } else {
+ entry := s.entries[idx]
+ if entry.kind == valueKind {
+ return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
+ }
+ }
+ parentIdx = idx
+ }
+
+ k := it.Node().Data
+ idx := s.find(parentIdx, k)
+
+ if idx >= 0 {
+ kind := s.entries[idx].kind
+ if kind != tableKind {
+ return fmt.Errorf("toml: key %s should be a table, not a %s", string(k), kind)
+ }
+ if s.entries[idx].explicit {
+ return fmt.Errorf("toml: table %s already exists", string(k))
+ }
+ s.entries[idx].explicit = true
+ } else {
+ idx = s.create(parentIdx, k, tableKind, true, false)
+ }
+
+ s.currentIdx = idx
+
+ return nil
+}
+
+func (s *SeenTracker) checkArrayTable(node *unstable.Node) error {
+ if s.currentIdx >= 0 {
+ s.setExplicitFlag(s.currentIdx)
+ }
+
+ it := node.Key()
+
+ parentIdx := 0
+
+ for it.Next() {
+ if it.IsLast() {
+ break
+ }
+
+ k := it.Node().Data
+
+ idx := s.find(parentIdx, k)
+
+ if idx < 0 {
+ idx = s.create(parentIdx, k, tableKind, false, false)
+ } else {
+ entry := s.entries[idx]
+ if entry.kind == valueKind {
+ return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
+ }
+ }
+
+ parentIdx = idx
+ }
+
+ k := it.Node().Data
+ idx := s.find(parentIdx, k)
+
+ if idx >= 0 {
+ kind := s.entries[idx].kind
+ if kind != arrayTableKind {
+ return fmt.Errorf("toml: key %s already exists as a %s, but should be an array table", kind, string(k))
+ }
+ s.clear(idx)
+ } else {
+ idx = s.create(parentIdx, k, arrayTableKind, true, false)
+ }
+
+ s.currentIdx = idx
+
+ return nil
+}
+
+func (s *SeenTracker) checkKeyValue(node *unstable.Node) error {
+ parentIdx := s.currentIdx
+ it := node.Key()
+
+ for it.Next() {
+ k := it.Node().Data
+
+ idx := s.find(parentIdx, k)
+
+ if idx < 0 {
+ idx = s.create(parentIdx, k, tableKind, false, true)
+ } else {
+ entry := s.entries[idx]
+ if it.IsLast() {
+ return fmt.Errorf("toml: key %s is already defined", string(k))
+ } else if entry.kind != tableKind {
+ return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
+ } else if entry.explicit {
+ return fmt.Errorf("toml: cannot redefine table %s that has already been explicitly defined", string(k))
+ }
+ }
+
+ parentIdx = idx
+ }
+
+ s.entries[parentIdx].kind = valueKind
+
+ value := node.Value()
+
+ switch value.Kind {
+ case unstable.InlineTable:
+ return s.checkInlineTable(value)
+ case unstable.Array:
+ return s.checkArray(value)
+ }
+
+ return nil
+}
+
+func (s *SeenTracker) checkArray(node *unstable.Node) error {
+ it := node.Children()
+ for it.Next() {
+ n := it.Node()
+ switch n.Kind {
+ case unstable.InlineTable:
+ err := s.checkInlineTable(n)
+ if err != nil {
+ return err
+ }
+ case unstable.Array:
+ err := s.checkArray(n)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func (s *SeenTracker) checkInlineTable(node *unstable.Node) error {
+ if pool.New == nil {
+ pool.New = func() interface{} {
+ return &SeenTracker{}
+ }
+ }
+
+ s = pool.Get().(*SeenTracker)
+ s.reset()
+
+ it := node.Children()
+ for it.Next() {
+ n := it.Node()
+ err := s.checkKeyValue(n)
+ if err != nil {
+ return err
+ }
+ }
+
+ // As inline tables are self-contained, the tracker does not
+ // need to retain the details of what they contain. The
+ // keyValue element that creates the inline table is kept to
+ // mark the presence of the inline table and prevent
+ // redefinition of its keys: check* functions cannot walk into
+ // a value.
+ pool.Put(s)
+ return nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go
new file mode 100644
index 0000000..bf03173
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go
@@ -0,0 +1 @@
+package tracker
diff --git a/vendor/github.com/pelletier/go-toml/v2/localtime.go b/vendor/github.com/pelletier/go-toml/v2/localtime.go
new file mode 100644
index 0000000..a856bfd
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/localtime.go
@@ -0,0 +1,122 @@
+package toml
+
+import (
+ "fmt"
+ "strings"
+ "time"
+
+ "github.com/pelletier/go-toml/v2/unstable"
+)
+
+// LocalDate represents a calendar day in no specific timezone.
+type LocalDate struct {
+ Year int
+ Month int
+ Day int
+}
+
+// AsTime converts d into a specific time instance at midnight in zone.
+func (d LocalDate) AsTime(zone *time.Location) time.Time {
+ return time.Date(d.Year, time.Month(d.Month), d.Day, 0, 0, 0, 0, zone)
+}
+
+// String returns RFC 3339 representation of d.
+func (d LocalDate) String() string {
+ return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day)
+}
+
+// MarshalText returns RFC 3339 representation of d.
+func (d LocalDate) MarshalText() ([]byte, error) {
+ return []byte(d.String()), nil
+}
+
+// UnmarshalText parses b using RFC 3339 to fill d.
+func (d *LocalDate) UnmarshalText(b []byte) error {
+ res, err := parseLocalDate(b)
+ if err != nil {
+ return err
+ }
+ *d = res
+ return nil
+}
+
+// LocalTime represents a time of day of no specific day in no specific
+// timezone.
+type LocalTime struct {
+ Hour int // Hour of the day: [0; 24[
+ Minute int // Minute of the hour: [0; 60[
+ Second int // Second of the minute: [0; 60[
+ Nanosecond int // Nanoseconds within the second: [0, 1000000000[
+ Precision int // Number of digits to display for Nanosecond.
+}
+
+// String returns RFC 3339 representation of d.
+// If d.Nanosecond and d.Precision are zero, the time won't have a nanosecond
+// component. If d.Nanosecond > 0 but d.Precision = 0, then the minimum number
+// of digits for nanoseconds is provided.
+func (d LocalTime) String() string {
+ s := fmt.Sprintf("%02d:%02d:%02d", d.Hour, d.Minute, d.Second)
+
+ if d.Precision > 0 {
+ s += fmt.Sprintf(".%09d", d.Nanosecond)[:d.Precision+1]
+ } else if d.Nanosecond > 0 {
+ // Nanoseconds are specified, but precision is not provided. Use the
+ // minimum.
+ s += strings.Trim(fmt.Sprintf(".%09d", d.Nanosecond), "0")
+ }
+
+ return s
+}
+
+// MarshalText returns RFC 3339 representation of d.
+func (d LocalTime) MarshalText() ([]byte, error) {
+ return []byte(d.String()), nil
+}
+
+// UnmarshalText parses b using RFC 3339 to fill d.
+func (d *LocalTime) UnmarshalText(b []byte) error {
+ res, left, err := parseLocalTime(b)
+ if err == nil && len(left) != 0 {
+ err = unstable.NewParserError(left, "extra characters")
+ }
+ if err != nil {
+ return err
+ }
+ *d = res
+ return nil
+}
+
+// LocalDateTime represents a time of a specific day in no specific timezone.
+type LocalDateTime struct {
+ LocalDate
+ LocalTime
+}
+
+// AsTime converts d into a specific time instance in zone.
+func (d LocalDateTime) AsTime(zone *time.Location) time.Time {
+ return time.Date(d.Year, time.Month(d.Month), d.Day, d.Hour, d.Minute, d.Second, d.Nanosecond, zone)
+}
+
+// String returns RFC 3339 representation of d.
+func (d LocalDateTime) String() string {
+ return d.LocalDate.String() + "T" + d.LocalTime.String()
+}
+
+// MarshalText returns RFC 3339 representation of d.
+func (d LocalDateTime) MarshalText() ([]byte, error) {
+ return []byte(d.String()), nil
+}
+
+// UnmarshalText parses b using RFC 3339 to fill d.
+func (d *LocalDateTime) UnmarshalText(data []byte) error {
+ res, left, err := parseLocalDateTime(data)
+ if err == nil && len(left) != 0 {
+ err = unstable.NewParserError(left, "extra characters")
+ }
+ if err != nil {
+ return err
+ }
+
+ *d = res
+ return nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/marshaler.go b/vendor/github.com/pelletier/go-toml/v2/marshaler.go
new file mode 100644
index 0000000..6fe7853
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/marshaler.go
@@ -0,0 +1,1090 @@
+package toml
+
+import (
+ "bytes"
+ "encoding"
+ "fmt"
+ "io"
+ "math"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+ "unicode"
+
+ "github.com/pelletier/go-toml/v2/internal/characters"
+)
+
+// Marshal serializes a Go value as a TOML document.
+//
+// It is a shortcut for Encoder.Encode() with the default options.
+func Marshal(v interface{}) ([]byte, error) {
+ var buf bytes.Buffer
+ enc := NewEncoder(&buf)
+
+ err := enc.Encode(v)
+ if err != nil {
+ return nil, err
+ }
+
+ return buf.Bytes(), nil
+}
+
+// Encoder writes a TOML document to an output stream.
+type Encoder struct {
+ // output
+ w io.Writer
+
+ // global settings
+ tablesInline bool
+ arraysMultiline bool
+ indentSymbol string
+ indentTables bool
+}
+
+// NewEncoder returns a new Encoder that writes to w.
+func NewEncoder(w io.Writer) *Encoder {
+ return &Encoder{
+ w: w,
+ indentSymbol: " ",
+ }
+}
+
+// SetTablesInline forces the encoder to emit all tables inline.
+//
+// This behavior can be controlled on an individual struct field basis with the
+// inline tag:
+//
+// MyField `toml:",inline"`
+func (enc *Encoder) SetTablesInline(inline bool) *Encoder {
+ enc.tablesInline = inline
+ return enc
+}
+
+// SetArraysMultiline forces the encoder to emit all arrays with one element per
+// line.
+//
+// This behavior can be controlled on an individual struct field basis with the multiline tag:
+//
+// MyField `multiline:"true"`
+func (enc *Encoder) SetArraysMultiline(multiline bool) *Encoder {
+ enc.arraysMultiline = multiline
+ return enc
+}
+
+// SetIndentSymbol defines the string that should be used for indentation. The
+// provided string is repeated for each indentation level. Defaults to two
+// spaces.
+func (enc *Encoder) SetIndentSymbol(s string) *Encoder {
+ enc.indentSymbol = s
+ return enc
+}
+
+// SetIndentTables forces the encoder to intent tables and array tables.
+func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
+ enc.indentTables = indent
+ return enc
+}
+
+// Encode writes a TOML representation of v to the stream.
+//
+// If v cannot be represented to TOML it returns an error.
+//
+// # Encoding rules
+//
+// A top level slice containing only maps or structs is encoded as [[table
+// array]].
+//
+// All slices not matching rule 1 are encoded as [array]. As a result, any map
+// or struct they contain is encoded as an {inline table}.
+//
+// Nil interfaces and nil pointers are not supported.
+//
+// Keys in key-values always have one part.
+//
+// Intermediate tables are always printed.
+//
+// By default, strings are encoded as literal string, unless they contain either
+// a newline character or a single quote. In that case they are emitted as
+// quoted strings.
+//
+// Unsigned integers larger than math.MaxInt64 cannot be encoded. Doing so
+// results in an error. This rule exists because the TOML specification only
+// requires parsers to support at least the 64 bits integer range. Allowing
+// larger numbers would create non-standard TOML documents, which may not be
+// readable (at best) by other implementations. To encode such numbers, a
+// solution is a custom type that implements encoding.TextMarshaler.
+//
+// When encoding structs, fields are encoded in order of definition, with their
+// exact name.
+//
+// Tables and array tables are separated by empty lines. However, consecutive
+// subtables definitions are not. For example:
+//
+// [top1]
+//
+// [top2]
+// [top2.child1]
+//
+// [[array]]
+//
+// [[array]]
+// [array.child2]
+//
+// # Struct tags
+//
+// The encoding of each public struct field can be customized by the format
+// string in the "toml" key of the struct field's tag. This follows
+// encoding/json's convention. The format string starts with the name of the
+// field, optionally followed by a comma-separated list of options. The name may
+// be empty in order to provide options without overriding the default name.
+//
+// The "multiline" option emits strings as quoted multi-line TOML strings. It
+// has no effect on fields that would not be encoded as strings.
+//
+// The "inline" option turns fields that would be emitted as tables into inline
+// tables instead. It has no effect on other fields.
+//
+// The "omitempty" option prevents empty values or groups from being emitted.
+//
+// The "commented" option prefixes the value and all its children with a comment
+// symbol.
+//
+// In addition to the "toml" tag struct tag, a "comment" tag can be used to emit
+// a TOML comment before the value being annotated. Comments are ignored inside
+// inline tables. For array tables, the comment is only present before the first
+// element of the array.
+func (enc *Encoder) Encode(v interface{}) error {
+ var (
+ b []byte
+ ctx encoderCtx
+ )
+
+ ctx.inline = enc.tablesInline
+
+ if v == nil {
+ return fmt.Errorf("toml: cannot encode a nil interface")
+ }
+
+ b, err := enc.encode(b, ctx, reflect.ValueOf(v))
+ if err != nil {
+ return err
+ }
+
+ _, err = enc.w.Write(b)
+ if err != nil {
+ return fmt.Errorf("toml: cannot write: %w", err)
+ }
+
+ return nil
+}
+
+type valueOptions struct {
+ multiline bool
+ omitempty bool
+ commented bool
+ comment string
+}
+
+type encoderCtx struct {
+ // Current top-level key.
+ parentKey []string
+
+ // Key that should be used for a KV.
+ key string
+ // Extra flag to account for the empty string
+ hasKey bool
+
+ // Set to true to indicate that the encoder is inside a KV, so that all
+ // tables need to be inlined.
+ insideKv bool
+
+ // Set to true to skip the first table header in an array table.
+ skipTableHeader bool
+
+ // Should the next table be encoded as inline
+ inline bool
+
+ // Indentation level
+ indent int
+
+ // Prefix the current value with a comment.
+ commented bool
+
+ // Options coming from struct tags
+ options valueOptions
+}
+
+func (ctx *encoderCtx) shiftKey() {
+ if ctx.hasKey {
+ ctx.parentKey = append(ctx.parentKey, ctx.key)
+ ctx.clearKey()
+ }
+}
+
+func (ctx *encoderCtx) setKey(k string) {
+ ctx.key = k
+ ctx.hasKey = true
+}
+
+func (ctx *encoderCtx) clearKey() {
+ ctx.key = ""
+ ctx.hasKey = false
+}
+
+func (ctx *encoderCtx) isRoot() bool {
+ return len(ctx.parentKey) == 0 && !ctx.hasKey
+}
+
+func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
+ i := v.Interface()
+
+ switch x := i.(type) {
+ case time.Time:
+ if x.Nanosecond() > 0 {
+ return x.AppendFormat(b, time.RFC3339Nano), nil
+ }
+ return x.AppendFormat(b, time.RFC3339), nil
+ case LocalTime:
+ return append(b, x.String()...), nil
+ case LocalDate:
+ return append(b, x.String()...), nil
+ case LocalDateTime:
+ return append(b, x.String()...), nil
+ }
+
+ hasTextMarshaler := v.Type().Implements(textMarshalerType)
+ if hasTextMarshaler || (v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) {
+ if !hasTextMarshaler {
+ v = v.Addr()
+ }
+
+ if ctx.isRoot() {
+ return nil, fmt.Errorf("toml: type %s implementing the TextMarshaler interface cannot be a root element", v.Type())
+ }
+
+ text, err := v.Interface().(encoding.TextMarshaler).MarshalText()
+ if err != nil {
+ return nil, err
+ }
+
+ b = enc.encodeString(b, string(text), ctx.options)
+
+ return b, nil
+ }
+
+ switch v.Kind() {
+ // containers
+ case reflect.Map:
+ return enc.encodeMap(b, ctx, v)
+ case reflect.Struct:
+ return enc.encodeStruct(b, ctx, v)
+ case reflect.Slice, reflect.Array:
+ return enc.encodeSlice(b, ctx, v)
+ case reflect.Interface:
+ if v.IsNil() {
+ return nil, fmt.Errorf("toml: encoding a nil interface is not supported")
+ }
+
+ return enc.encode(b, ctx, v.Elem())
+ case reflect.Ptr:
+ if v.IsNil() {
+ return enc.encode(b, ctx, reflect.Zero(v.Type().Elem()))
+ }
+
+ return enc.encode(b, ctx, v.Elem())
+
+ // values
+ case reflect.String:
+ b = enc.encodeString(b, v.String(), ctx.options)
+ case reflect.Float32:
+ f := v.Float()
+
+ if math.IsNaN(f) {
+ b = append(b, "nan"...)
+ } else if f > math.MaxFloat32 {
+ b = append(b, "inf"...)
+ } else if f < -math.MaxFloat32 {
+ b = append(b, "-inf"...)
+ } else if math.Trunc(f) == f {
+ b = strconv.AppendFloat(b, f, 'f', 1, 32)
+ } else {
+ b = strconv.AppendFloat(b, f, 'f', -1, 32)
+ }
+ case reflect.Float64:
+ f := v.Float()
+ if math.IsNaN(f) {
+ b = append(b, "nan"...)
+ } else if f > math.MaxFloat64 {
+ b = append(b, "inf"...)
+ } else if f < -math.MaxFloat64 {
+ b = append(b, "-inf"...)
+ } else if math.Trunc(f) == f {
+ b = strconv.AppendFloat(b, f, 'f', 1, 64)
+ } else {
+ b = strconv.AppendFloat(b, f, 'f', -1, 64)
+ }
+ case reflect.Bool:
+ if v.Bool() {
+ b = append(b, "true"...)
+ } else {
+ b = append(b, "false"...)
+ }
+ case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint:
+ x := v.Uint()
+ if x > uint64(math.MaxInt64) {
+ return nil, fmt.Errorf("toml: not encoding uint (%d) greater than max int64 (%d)", x, int64(math.MaxInt64))
+ }
+ b = strconv.AppendUint(b, x, 10)
+ case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int:
+ b = strconv.AppendInt(b, v.Int(), 10)
+ default:
+ return nil, fmt.Errorf("toml: cannot encode value of type %s", v.Kind())
+ }
+
+ return b, nil
+}
+
+func isNil(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.Ptr, reflect.Interface, reflect.Map:
+ return v.IsNil()
+ default:
+ return false
+ }
+}
+
+func shouldOmitEmpty(options valueOptions, v reflect.Value) bool {
+ return options.omitempty && isEmptyValue(v)
+}
+
+func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) {
+ var err error
+
+ if !ctx.inline {
+ b = enc.encodeComment(ctx.indent, options.comment, b)
+ b = enc.commented(ctx.commented, b)
+ b = enc.indent(ctx.indent, b)
+ }
+
+ b = enc.encodeKey(b, ctx.key)
+ b = append(b, " = "...)
+
+ // create a copy of the context because the value of a KV shouldn't
+ // modify the global context.
+ subctx := ctx
+ subctx.insideKv = true
+ subctx.shiftKey()
+ subctx.options = options
+
+ b, err = enc.encode(b, subctx, v)
+ if err != nil {
+ return nil, err
+ }
+
+ return b, nil
+}
+
+func (enc *Encoder) commented(commented bool, b []byte) []byte {
+ if commented {
+ return append(b, "# "...)
+ }
+ return b
+}
+
+func isEmptyValue(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.Struct:
+ return isEmptyStruct(v)
+ case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+ return v.Len() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Interface, reflect.Ptr:
+ return v.IsNil()
+ }
+ return false
+}
+
+func isEmptyStruct(v reflect.Value) bool {
+ // TODO: merge with walkStruct and cache.
+ typ := v.Type()
+ for i := 0; i < typ.NumField(); i++ {
+ fieldType := typ.Field(i)
+
+ // only consider exported fields
+ if fieldType.PkgPath != "" {
+ continue
+ }
+
+ tag := fieldType.Tag.Get("toml")
+
+ // special field name to skip field
+ if tag == "-" {
+ continue
+ }
+
+ f := v.Field(i)
+
+ if !isEmptyValue(f) {
+ return false
+ }
+ }
+
+ return true
+}
+
+const literalQuote = '\''
+
+func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byte {
+ if needsQuoting(v) {
+ return enc.encodeQuotedString(options.multiline, b, v)
+ }
+
+ return enc.encodeLiteralString(b, v)
+}
+
+func needsQuoting(v string) bool {
+ // TODO: vectorize
+ for _, b := range []byte(v) {
+ if b == '\'' || b == '\r' || b == '\n' || characters.InvalidAscii(b) {
+ return true
+ }
+ }
+ return false
+}
+
+// caller should have checked that the string does not contain new lines or ' .
+func (enc *Encoder) encodeLiteralString(b []byte, v string) []byte {
+ b = append(b, literalQuote)
+ b = append(b, v...)
+ b = append(b, literalQuote)
+
+ return b
+}
+
+func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byte {
+ stringQuote := `"`
+
+ if multiline {
+ stringQuote = `"""`
+ }
+
+ b = append(b, stringQuote...)
+ if multiline {
+ b = append(b, '\n')
+ }
+
+ const (
+ hextable = "0123456789ABCDEF"
+ // U+0000 to U+0008, U+000A to U+001F, U+007F
+ nul = 0x0
+ bs = 0x8
+ lf = 0xa
+ us = 0x1f
+ del = 0x7f
+ )
+
+ for _, r := range []byte(v) {
+ switch r {
+ case '\\':
+ b = append(b, `\\`...)
+ case '"':
+ b = append(b, `\"`...)
+ case '\b':
+ b = append(b, `\b`...)
+ case '\f':
+ b = append(b, `\f`...)
+ case '\n':
+ if multiline {
+ b = append(b, r)
+ } else {
+ b = append(b, `\n`...)
+ }
+ case '\r':
+ b = append(b, `\r`...)
+ case '\t':
+ b = append(b, `\t`...)
+ default:
+ switch {
+ case r >= nul && r <= bs, r >= lf && r <= us, r == del:
+ b = append(b, `\u00`...)
+ b = append(b, hextable[r>>4])
+ b = append(b, hextable[r&0x0f])
+ default:
+ b = append(b, r)
+ }
+ }
+ }
+
+ b = append(b, stringQuote...)
+
+ return b
+}
+
+// caller should have checked that the string is in A-Z / a-z / 0-9 / - / _ .
+func (enc *Encoder) encodeUnquotedKey(b []byte, v string) []byte {
+ return append(b, v...)
+}
+
+func (enc *Encoder) encodeTableHeader(ctx encoderCtx, b []byte) ([]byte, error) {
+ if len(ctx.parentKey) == 0 {
+ return b, nil
+ }
+
+ b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
+
+ b = enc.commented(ctx.commented, b)
+
+ b = enc.indent(ctx.indent, b)
+
+ b = append(b, '[')
+
+ b = enc.encodeKey(b, ctx.parentKey[0])
+
+ for _, k := range ctx.parentKey[1:] {
+ b = append(b, '.')
+ b = enc.encodeKey(b, k)
+ }
+
+ b = append(b, "]\n"...)
+
+ return b, nil
+}
+
+//nolint:cyclop
+func (enc *Encoder) encodeKey(b []byte, k string) []byte {
+ needsQuotation := false
+ cannotUseLiteral := false
+
+ if len(k) == 0 {
+ return append(b, "''"...)
+ }
+
+ for _, c := range k {
+ if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '-' || c == '_' {
+ continue
+ }
+
+ if c == literalQuote {
+ cannotUseLiteral = true
+ }
+
+ needsQuotation = true
+ }
+
+ if needsQuotation && needsQuoting(k) {
+ cannotUseLiteral = true
+ }
+
+ switch {
+ case cannotUseLiteral:
+ return enc.encodeQuotedString(false, b, k)
+ case needsQuotation:
+ return enc.encodeLiteralString(b, k)
+ default:
+ return enc.encodeUnquotedKey(b, k)
+ }
+}
+
+func (enc *Encoder) keyToString(k reflect.Value) (string, error) {
+ keyType := k.Type()
+ switch {
+ case keyType.Kind() == reflect.String:
+ return k.String(), nil
+
+ case keyType.Implements(textMarshalerType):
+ keyB, err := k.Interface().(encoding.TextMarshaler).MarshalText()
+ if err != nil {
+ return "", fmt.Errorf("toml: error marshalling key %v from text: %w", k, err)
+ }
+ return string(keyB), nil
+ }
+ return "", fmt.Errorf("toml: type %s is not supported as a map key", keyType.Kind())
+}
+
+func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
+ var (
+ t table
+ emptyValueOptions valueOptions
+ )
+
+ iter := v.MapRange()
+ for iter.Next() {
+ v := iter.Value()
+
+ if isNil(v) {
+ continue
+ }
+
+ k, err := enc.keyToString(iter.Key())
+ if err != nil {
+ return nil, err
+ }
+
+ if willConvertToTableOrArrayTable(ctx, v) {
+ t.pushTable(k, v, emptyValueOptions)
+ } else {
+ t.pushKV(k, v, emptyValueOptions)
+ }
+ }
+
+ sortEntriesByKey(t.kvs)
+ sortEntriesByKey(t.tables)
+
+ return enc.encodeTable(b, ctx, t)
+}
+
+func sortEntriesByKey(e []entry) {
+ sort.Slice(e, func(i, j int) bool {
+ return e[i].Key < e[j].Key
+ })
+}
+
+type entry struct {
+ Key string
+ Value reflect.Value
+ Options valueOptions
+}
+
+type table struct {
+ kvs []entry
+ tables []entry
+}
+
+func (t *table) pushKV(k string, v reflect.Value, options valueOptions) {
+ for _, e := range t.kvs {
+ if e.Key == k {
+ return
+ }
+ }
+
+ t.kvs = append(t.kvs, entry{Key: k, Value: v, Options: options})
+}
+
+func (t *table) pushTable(k string, v reflect.Value, options valueOptions) {
+ for _, e := range t.tables {
+ if e.Key == k {
+ return
+ }
+ }
+ t.tables = append(t.tables, entry{Key: k, Value: v, Options: options})
+}
+
+func walkStruct(ctx encoderCtx, t *table, v reflect.Value) {
+ // TODO: cache this
+ typ := v.Type()
+ for i := 0; i < typ.NumField(); i++ {
+ fieldType := typ.Field(i)
+
+ // only consider exported fields
+ if fieldType.PkgPath != "" {
+ continue
+ }
+
+ tag := fieldType.Tag.Get("toml")
+
+ // special field name to skip field
+ if tag == "-" {
+ continue
+ }
+
+ k, opts := parseTag(tag)
+ if !isValidName(k) {
+ k = ""
+ }
+
+ f := v.Field(i)
+
+ if k == "" {
+ if fieldType.Anonymous {
+ if fieldType.Type.Kind() == reflect.Struct {
+ walkStruct(ctx, t, f)
+ }
+ continue
+ } else {
+ k = fieldType.Name
+ }
+ }
+
+ if isNil(f) {
+ continue
+ }
+
+ options := valueOptions{
+ multiline: opts.multiline,
+ omitempty: opts.omitempty,
+ commented: opts.commented,
+ comment: fieldType.Tag.Get("comment"),
+ }
+
+ if opts.inline || !willConvertToTableOrArrayTable(ctx, f) {
+ t.pushKV(k, f, options)
+ } else {
+ t.pushTable(k, f, options)
+ }
+ }
+}
+
+func (enc *Encoder) encodeStruct(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
+ var t table
+
+ walkStruct(ctx, &t, v)
+
+ return enc.encodeTable(b, ctx, t)
+}
+
+func (enc *Encoder) encodeComment(indent int, comment string, b []byte) []byte {
+ for len(comment) > 0 {
+ var line string
+ idx := strings.IndexByte(comment, '\n')
+ if idx >= 0 {
+ line = comment[:idx]
+ comment = comment[idx+1:]
+ } else {
+ line = comment
+ comment = ""
+ }
+ b = enc.indent(indent, b)
+ b = append(b, "# "...)
+ b = append(b, line...)
+ b = append(b, '\n')
+ }
+ return b
+}
+
+func isValidName(s string) bool {
+ if s == "" {
+ return false
+ }
+ for _, c := range s {
+ switch {
+ case strings.ContainsRune("!#$%&()*+-./:;<=>?@[]^_{|}~ ", c):
+ // Backslash and quote chars are reserved, but
+ // otherwise any punctuation chars are allowed
+ // in a tag name.
+ case !unicode.IsLetter(c) && !unicode.IsDigit(c):
+ return false
+ }
+ }
+ return true
+}
+
+type tagOptions struct {
+ multiline bool
+ inline bool
+ omitempty bool
+ commented bool
+}
+
+func parseTag(tag string) (string, tagOptions) {
+ opts := tagOptions{}
+
+ idx := strings.Index(tag, ",")
+ if idx == -1 {
+ return tag, opts
+ }
+
+ raw := tag[idx+1:]
+ tag = string(tag[:idx])
+ for raw != "" {
+ var o string
+ i := strings.Index(raw, ",")
+ if i >= 0 {
+ o, raw = raw[:i], raw[i+1:]
+ } else {
+ o, raw = raw, ""
+ }
+ switch o {
+ case "multiline":
+ opts.multiline = true
+ case "inline":
+ opts.inline = true
+ case "omitempty":
+ opts.omitempty = true
+ case "commented":
+ opts.commented = true
+ }
+ }
+
+ return tag, opts
+}
+
+func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, error) {
+ var err error
+
+ ctx.shiftKey()
+
+ if ctx.insideKv || (ctx.inline && !ctx.isRoot()) {
+ return enc.encodeTableInline(b, ctx, t)
+ }
+
+ if !ctx.skipTableHeader {
+ b, err = enc.encodeTableHeader(ctx, b)
+ if err != nil {
+ return nil, err
+ }
+
+ if enc.indentTables && len(ctx.parentKey) > 0 {
+ ctx.indent++
+ }
+ }
+ ctx.skipTableHeader = false
+
+ hasNonEmptyKV := false
+ for _, kv := range t.kvs {
+ if shouldOmitEmpty(kv.Options, kv.Value) {
+ continue
+ }
+ hasNonEmptyKV = true
+
+ ctx.setKey(kv.Key)
+ ctx2 := ctx
+ ctx2.commented = kv.Options.commented || ctx2.commented
+
+ b, err = enc.encodeKv(b, ctx2, kv.Options, kv.Value)
+ if err != nil {
+ return nil, err
+ }
+
+ b = append(b, '\n')
+ }
+
+ first := true
+ for _, table := range t.tables {
+ if shouldOmitEmpty(table.Options, table.Value) {
+ continue
+ }
+ if first {
+ first = false
+ if hasNonEmptyKV {
+ b = append(b, '\n')
+ }
+ } else {
+ b = append(b, "\n"...)
+ }
+
+ ctx.setKey(table.Key)
+
+ ctx.options = table.Options
+ ctx2 := ctx
+ ctx2.commented = ctx2.commented || ctx.options.commented
+
+ b, err = enc.encode(b, ctx2, table.Value)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return b, nil
+}
+
+func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte, error) {
+ var err error
+
+ b = append(b, '{')
+
+ first := true
+ for _, kv := range t.kvs {
+ if shouldOmitEmpty(kv.Options, kv.Value) {
+ continue
+ }
+
+ if first {
+ first = false
+ } else {
+ b = append(b, `, `...)
+ }
+
+ ctx.setKey(kv.Key)
+
+ b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ if len(t.tables) > 0 {
+ panic("inline table cannot contain nested tables, only key-values")
+ }
+
+ b = append(b, "}"...)
+
+ return b, nil
+}
+
+func willConvertToTable(ctx encoderCtx, v reflect.Value) bool {
+ if !v.IsValid() {
+ return false
+ }
+ if v.Type() == timeType || v.Type().Implements(textMarshalerType) || (v.Kind() != reflect.Ptr && v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) {
+ return false
+ }
+
+ t := v.Type()
+ switch t.Kind() {
+ case reflect.Map, reflect.Struct:
+ return !ctx.inline
+ case reflect.Interface:
+ return willConvertToTable(ctx, v.Elem())
+ case reflect.Ptr:
+ if v.IsNil() {
+ return false
+ }
+
+ return willConvertToTable(ctx, v.Elem())
+ default:
+ return false
+ }
+}
+
+func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) bool {
+ if ctx.insideKv {
+ return false
+ }
+ t := v.Type()
+
+ if t.Kind() == reflect.Interface {
+ return willConvertToTableOrArrayTable(ctx, v.Elem())
+ }
+
+ if t.Kind() == reflect.Slice || t.Kind() == reflect.Array {
+ if v.Len() == 0 {
+ // An empty slice should be a kv = [].
+ return false
+ }
+
+ for i := 0; i < v.Len(); i++ {
+ t := willConvertToTable(ctx, v.Index(i))
+
+ if !t {
+ return false
+ }
+ }
+
+ return true
+ }
+
+ return willConvertToTable(ctx, v)
+}
+
+func (enc *Encoder) encodeSlice(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
+ if v.Len() == 0 {
+ b = append(b, "[]"...)
+
+ return b, nil
+ }
+
+ if willConvertToTableOrArrayTable(ctx, v) {
+ return enc.encodeSliceAsArrayTable(b, ctx, v)
+ }
+
+ return enc.encodeSliceAsArray(b, ctx, v)
+}
+
+// caller should have checked that v is a slice that only contains values that
+// encode into tables.
+func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
+ ctx.shiftKey()
+
+ scratch := make([]byte, 0, 64)
+
+ scratch = enc.commented(ctx.commented, scratch)
+
+ scratch = append(scratch, "[["...)
+
+ for i, k := range ctx.parentKey {
+ if i > 0 {
+ scratch = append(scratch, '.')
+ }
+
+ scratch = enc.encodeKey(scratch, k)
+ }
+
+ scratch = append(scratch, "]]\n"...)
+ ctx.skipTableHeader = true
+
+ b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
+
+ if enc.indentTables {
+ ctx.indent++
+ }
+
+ for i := 0; i < v.Len(); i++ {
+ if i != 0 {
+ b = append(b, "\n"...)
+ }
+
+ b = append(b, scratch...)
+
+ var err error
+ b, err = enc.encode(b, ctx, v.Index(i))
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return b, nil
+}
+
+func (enc *Encoder) encodeSliceAsArray(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
+ multiline := ctx.options.multiline || enc.arraysMultiline
+ separator := ", "
+
+ b = append(b, '[')
+
+ subCtx := ctx
+ subCtx.options = valueOptions{}
+
+ if multiline {
+ separator = ",\n"
+
+ b = append(b, '\n')
+
+ subCtx.indent++
+ }
+
+ var err error
+ first := true
+
+ for i := 0; i < v.Len(); i++ {
+ if first {
+ first = false
+ } else {
+ b = append(b, separator...)
+ }
+
+ if multiline {
+ b = enc.indent(subCtx.indent, b)
+ }
+
+ b, err = enc.encode(b, subCtx, v.Index(i))
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ if multiline {
+ b = append(b, '\n')
+ b = enc.indent(ctx.indent, b)
+ }
+
+ b = append(b, ']')
+
+ return b, nil
+}
+
+func (enc *Encoder) indent(level int, b []byte) []byte {
+ for i := 0; i < level; i++ {
+ b = append(b, enc.indentSymbol...)
+ }
+
+ return b
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/strict.go b/vendor/github.com/pelletier/go-toml/v2/strict.go
new file mode 100644
index 0000000..802e7e4
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/strict.go
@@ -0,0 +1,107 @@
+package toml
+
+import (
+ "github.com/pelletier/go-toml/v2/internal/danger"
+ "github.com/pelletier/go-toml/v2/internal/tracker"
+ "github.com/pelletier/go-toml/v2/unstable"
+)
+
+type strict struct {
+ Enabled bool
+
+ // Tracks the current key being processed.
+ key tracker.KeyTracker
+
+ missing []unstable.ParserError
+}
+
+func (s *strict) EnterTable(node *unstable.Node) {
+ if !s.Enabled {
+ return
+ }
+
+ s.key.UpdateTable(node)
+}
+
+func (s *strict) EnterArrayTable(node *unstable.Node) {
+ if !s.Enabled {
+ return
+ }
+
+ s.key.UpdateArrayTable(node)
+}
+
+func (s *strict) EnterKeyValue(node *unstable.Node) {
+ if !s.Enabled {
+ return
+ }
+
+ s.key.Push(node)
+}
+
+func (s *strict) ExitKeyValue(node *unstable.Node) {
+ if !s.Enabled {
+ return
+ }
+
+ s.key.Pop(node)
+}
+
+func (s *strict) MissingTable(node *unstable.Node) {
+ if !s.Enabled {
+ return
+ }
+
+ s.missing = append(s.missing, unstable.ParserError{
+ Highlight: keyLocation(node),
+ Message: "missing table",
+ Key: s.key.Key(),
+ })
+}
+
+func (s *strict) MissingField(node *unstable.Node) {
+ if !s.Enabled {
+ return
+ }
+
+ s.missing = append(s.missing, unstable.ParserError{
+ Highlight: keyLocation(node),
+ Message: "missing field",
+ Key: s.key.Key(),
+ })
+}
+
+func (s *strict) Error(doc []byte) error {
+ if !s.Enabled || len(s.missing) == 0 {
+ return nil
+ }
+
+ err := &StrictMissingError{
+ Errors: make([]DecodeError, 0, len(s.missing)),
+ }
+
+ for _, derr := range s.missing {
+ derr := derr
+ err.Errors = append(err.Errors, *wrapDecodeError(doc, &derr))
+ }
+
+ return err
+}
+
+func keyLocation(node *unstable.Node) []byte {
+ k := node.Key()
+
+ hasOne := k.Next()
+ if !hasOne {
+ panic("should not be called with empty key")
+ }
+
+ start := k.Node().Data
+ end := k.Node().Data
+
+ for k.Next() {
+ end = k.Node().Data
+ }
+
+ return danger.BytesRange(start, end)
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/toml.abnf b/vendor/github.com/pelletier/go-toml/v2/toml.abnf
new file mode 100644
index 0000000..473f374
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/toml.abnf
@@ -0,0 +1,243 @@
+;; This document describes TOML's syntax, using the ABNF format (defined in
+;; RFC 5234 -- https://www.ietf.org/rfc/rfc5234.txt).
+;;
+;; All valid TOML documents will match this description, however certain
+;; invalid documents would need to be rejected as per the semantics described
+;; in the supporting text description.
+
+;; It is possible to try this grammar interactively, using instaparse.
+;; http://instaparse.mojombo.com/
+;;
+;; To do so, in the lower right, click on Options and change `:input-format` to
+;; ':abnf'. Then paste this entire ABNF document into the grammar entry box
+;; (above the options). Then you can type or paste a sample TOML document into
+;; the beige box on the left. Tada!
+
+;; Overall Structure
+
+toml = expression *( newline expression )
+
+expression = ws [ comment ]
+expression =/ ws keyval ws [ comment ]
+expression =/ ws table ws [ comment ]
+
+;; Whitespace
+
+ws = *wschar
+wschar = %x20 ; Space
+wschar =/ %x09 ; Horizontal tab
+
+;; Newline
+
+newline = %x0A ; LF
+newline =/ %x0D.0A ; CRLF
+
+;; Comment
+
+comment-start-symbol = %x23 ; #
+non-ascii = %x80-D7FF / %xE000-10FFFF
+non-eol = %x09 / %x20-7F / non-ascii
+
+comment = comment-start-symbol *non-eol
+
+;; Key-Value pairs
+
+keyval = key keyval-sep val
+
+key = simple-key / dotted-key
+simple-key = quoted-key / unquoted-key
+
+unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
+quoted-key = basic-string / literal-string
+dotted-key = simple-key 1*( dot-sep simple-key )
+
+dot-sep = ws %x2E ws ; . Period
+keyval-sep = ws %x3D ws ; =
+
+val = string / boolean / array / inline-table / date-time / float / integer
+
+;; String
+
+string = ml-basic-string / basic-string / ml-literal-string / literal-string
+
+;; Basic String
+
+basic-string = quotation-mark *basic-char quotation-mark
+
+quotation-mark = %x22 ; "
+
+basic-char = basic-unescaped / escaped
+basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+escaped = escape escape-seq-char
+
+escape = %x5C ; \
+escape-seq-char = %x22 ; " quotation mark U+0022
+escape-seq-char =/ %x5C ; \ reverse solidus U+005C
+escape-seq-char =/ %x62 ; b backspace U+0008
+escape-seq-char =/ %x66 ; f form feed U+000C
+escape-seq-char =/ %x6E ; n line feed U+000A
+escape-seq-char =/ %x72 ; r carriage return U+000D
+escape-seq-char =/ %x74 ; t tab U+0009
+escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX
+escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX
+
+;; Multiline Basic String
+
+ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
+ ml-basic-string-delim
+ml-basic-string-delim = 3quotation-mark
+ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ]
+
+mlb-content = mlb-char / newline / mlb-escaped-nl
+mlb-char = mlb-unescaped / escaped
+mlb-quotes = 1*2quotation-mark
+mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+mlb-escaped-nl = escape ws newline *( wschar / newline )
+
+;; Literal String
+
+literal-string = apostrophe *literal-char apostrophe
+
+apostrophe = %x27 ; ' apostrophe
+
+literal-char = %x09 / %x20-26 / %x28-7E / non-ascii
+
+;; Multiline Literal String
+
+ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body
+ ml-literal-string-delim
+ml-literal-string-delim = 3apostrophe
+ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ]
+
+mll-content = mll-char / newline
+mll-char = %x09 / %x20-26 / %x28-7E / non-ascii
+mll-quotes = 1*2apostrophe
+
+;; Integer
+
+integer = dec-int / hex-int / oct-int / bin-int
+
+minus = %x2D ; -
+plus = %x2B ; +
+underscore = %x5F ; _
+digit1-9 = %x31-39 ; 1-9
+digit0-7 = %x30-37 ; 0-7
+digit0-1 = %x30-31 ; 0-1
+
+hex-prefix = %x30.78 ; 0x
+oct-prefix = %x30.6F ; 0o
+bin-prefix = %x30.62 ; 0b
+
+dec-int = [ minus / plus ] unsigned-dec-int
+unsigned-dec-int = DIGIT / digit1-9 1*( DIGIT / underscore DIGIT )
+
+hex-int = hex-prefix HEXDIG *( HEXDIG / underscore HEXDIG )
+oct-int = oct-prefix digit0-7 *( digit0-7 / underscore digit0-7 )
+bin-int = bin-prefix digit0-1 *( digit0-1 / underscore digit0-1 )
+
+;; Float
+
+float = float-int-part ( exp / frac [ exp ] )
+float =/ special-float
+
+float-int-part = dec-int
+frac = decimal-point zero-prefixable-int
+decimal-point = %x2E ; .
+zero-prefixable-int = DIGIT *( DIGIT / underscore DIGIT )
+
+exp = "e" float-exp-part
+float-exp-part = [ minus / plus ] zero-prefixable-int
+
+special-float = [ minus / plus ] ( inf / nan )
+inf = %x69.6e.66 ; inf
+nan = %x6e.61.6e ; nan
+
+;; Boolean
+
+boolean = true / false
+
+true = %x74.72.75.65 ; true
+false = %x66.61.6C.73.65 ; false
+
+;; Date and Time (as defined in RFC 3339)
+
+date-time = offset-date-time / local-date-time / local-date / local-time
+
+date-fullyear = 4DIGIT
+date-month = 2DIGIT ; 01-12
+date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year
+time-delim = "T" / %x20 ; T, t, or space
+time-hour = 2DIGIT ; 00-23
+time-minute = 2DIGIT ; 00-59
+time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second rules
+time-secfrac = "." 1*DIGIT
+time-numoffset = ( "+" / "-" ) time-hour ":" time-minute
+time-offset = "Z" / time-numoffset
+
+partial-time = time-hour ":" time-minute ":" time-second [ time-secfrac ]
+full-date = date-fullyear "-" date-month "-" date-mday
+full-time = partial-time time-offset
+
+;; Offset Date-Time
+
+offset-date-time = full-date time-delim full-time
+
+;; Local Date-Time
+
+local-date-time = full-date time-delim partial-time
+
+;; Local Date
+
+local-date = full-date
+
+;; Local Time
+
+local-time = partial-time
+
+;; Array
+
+array = array-open [ array-values ] ws-comment-newline array-close
+
+array-open = %x5B ; [
+array-close = %x5D ; ]
+
+array-values = ws-comment-newline val ws-comment-newline array-sep array-values
+array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ]
+
+array-sep = %x2C ; , Comma
+
+ws-comment-newline = *( wschar / [ comment ] newline )
+
+;; Table
+
+table = std-table / array-table
+
+;; Standard Table
+
+std-table = std-table-open key std-table-close
+
+std-table-open = %x5B ws ; [ Left square bracket
+std-table-close = ws %x5D ; ] Right square bracket
+
+;; Inline Table
+
+inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
+
+inline-table-open = %x7B ws ; {
+inline-table-close = ws %x7D ; }
+inline-table-sep = ws %x2C ws ; , Comma
+
+inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
+
+;; Array Table
+
+array-table = array-table-open key array-table-close
+
+array-table-open = %x5B.5B ws ; [[ Double left square bracket
+array-table-close = ws %x5D.5D ; ]] Double right square bracket
+
+;; Built-in ABNF terms, reproduced here for clarity
+
+ALPHA = %x41-5A / %x61-7A ; A-Z / a-z
+DIGIT = %x30-39 ; 0-9
+HEXDIG = DIGIT / "A" / "B" / "C" / "D" / "E" / "F"
diff --git a/vendor/github.com/pelletier/go-toml/v2/types.go b/vendor/github.com/pelletier/go-toml/v2/types.go
new file mode 100644
index 0000000..3c6b8fe
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/types.go
@@ -0,0 +1,14 @@
+package toml
+
+import (
+ "encoding"
+ "reflect"
+ "time"
+)
+
+var timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
+var textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
+var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
+var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}(nil))
+var sliceInterfaceType = reflect.TypeOf([]interface{}(nil))
+var stringType = reflect.TypeOf("")
diff --git a/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go
new file mode 100644
index 0000000..c5e5f33
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go
@@ -0,0 +1,1264 @@
+package toml
+
+import (
+ "encoding"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "math"
+ "reflect"
+ "strings"
+ "sync/atomic"
+ "time"
+
+ "github.com/pelletier/go-toml/v2/internal/danger"
+ "github.com/pelletier/go-toml/v2/internal/tracker"
+ "github.com/pelletier/go-toml/v2/unstable"
+)
+
+// Unmarshal deserializes a TOML document into a Go value.
+//
+// It is a shortcut for Decoder.Decode() with the default options.
+func Unmarshal(data []byte, v interface{}) error {
+ p := unstable.Parser{}
+ p.Reset(data)
+ d := decoder{p: &p}
+
+ return d.FromParser(v)
+}
+
+// Decoder reads and decode a TOML document from an input stream.
+type Decoder struct {
+ // input
+ r io.Reader
+
+ // global settings
+ strict bool
+}
+
+// NewDecoder creates a new Decoder that will read from r.
+func NewDecoder(r io.Reader) *Decoder {
+ return &Decoder{r: r}
+}
+
+// DisallowUnknownFields causes the Decoder to return an error when the
+// destination is a struct and the input contains a key that does not match a
+// non-ignored field.
+//
+// In that case, the Decoder returns a StrictMissingError that can be used to
+// retrieve the individual errors as well as generate a human readable
+// description of the missing fields.
+func (d *Decoder) DisallowUnknownFields() *Decoder {
+ d.strict = true
+ return d
+}
+
+// Decode the whole content of r into v.
+//
+// By default, values in the document that don't exist in the target Go value
+// are ignored. See Decoder.DisallowUnknownFields() to change this behavior.
+//
+// When a TOML local date, time, or date-time is decoded into a time.Time, its
+// value is represented in time.Local timezone. Otherwise the appropriate Local*
+// structure is used. For time values, precision up to the nanosecond is
+// supported by truncating extra digits.
+//
+// Empty tables decoded in an interface{} create an empty initialized
+// map[string]interface{}.
+//
+// Types implementing the encoding.TextUnmarshaler interface are decoded from a
+// TOML string.
+//
+// When decoding a number, go-toml will return an error if the number is out of
+// bounds for the target type (which includes negative numbers when decoding
+// into an unsigned int).
+//
+// If an error occurs while decoding the content of the document, this function
+// returns a toml.DecodeError, providing context about the issue. When using
+// strict mode and a field is missing, a `toml.StrictMissingError` is
+// returned. In any other case, this function returns a standard Go error.
+//
+// # Type mapping
+//
+// List of supported TOML types and their associated accepted Go types:
+//
+// String -> string
+// Integer -> uint*, int*, depending on size
+// Float -> float*, depending on size
+// Boolean -> bool
+// Offset Date-Time -> time.Time
+// Local Date-time -> LocalDateTime, time.Time
+// Local Date -> LocalDate, time.Time
+// Local Time -> LocalTime, time.Time
+// Array -> slice and array, depending on elements types
+// Table -> map and struct
+// Inline Table -> same as Table
+// Array of Tables -> same as Array and Table
+func (d *Decoder) Decode(v interface{}) error {
+ b, err := ioutil.ReadAll(d.r)
+ if err != nil {
+ return fmt.Errorf("toml: %w", err)
+ }
+
+ p := unstable.Parser{}
+ p.Reset(b)
+ dec := decoder{
+ p: &p,
+ strict: strict{
+ Enabled: d.strict,
+ },
+ }
+
+ return dec.FromParser(v)
+}
+
+type decoder struct {
+ // Which parser instance in use for this decoding session.
+ p *unstable.Parser
+
+ // Flag indicating that the current expression is stashed.
+ // If set to true, calling nextExpr will not actually pull a new expression
+ // but turn off the flag instead.
+ stashedExpr bool
+
+ // Skip expressions until a table is found. This is set to true when a
+ // table could not be created (missing field in map), so all KV expressions
+ // need to be skipped.
+ skipUntilTable bool
+
+ // Tracks position in Go arrays.
+ // This is used when decoding [[array tables]] into Go arrays. Given array
+ // tables are separate TOML expression, we need to keep track of where we
+ // are at in the Go array, as we can't just introspect its size.
+ arrayIndexes map[reflect.Value]int
+
+ // Tracks keys that have been seen, with which type.
+ seen tracker.SeenTracker
+
+ // Strict mode
+ strict strict
+
+ // Current context for the error.
+ errorContext *errorContext
+}
+
+type errorContext struct {
+ Struct reflect.Type
+ Field []int
+}
+
+func (d *decoder) typeMismatchError(toml string, target reflect.Type) error {
+ return fmt.Errorf("toml: %s", d.typeMismatchString(toml, target))
+}
+
+func (d *decoder) typeMismatchString(toml string, target reflect.Type) string {
+ if d.errorContext != nil && d.errorContext.Struct != nil {
+ ctx := d.errorContext
+ f := ctx.Struct.FieldByIndex(ctx.Field)
+ return fmt.Sprintf("cannot decode TOML %s into struct field %s.%s of type %s", toml, ctx.Struct, f.Name, f.Type)
+ }
+ return fmt.Sprintf("cannot decode TOML %s into a Go value of type %s", toml, target)
+}
+
+func (d *decoder) expr() *unstable.Node {
+ return d.p.Expression()
+}
+
+func (d *decoder) nextExpr() bool {
+ if d.stashedExpr {
+ d.stashedExpr = false
+ return true
+ }
+ return d.p.NextExpression()
+}
+
+func (d *decoder) stashExpr() {
+ d.stashedExpr = true
+}
+
+func (d *decoder) arrayIndex(shouldAppend bool, v reflect.Value) int {
+ if d.arrayIndexes == nil {
+ d.arrayIndexes = make(map[reflect.Value]int, 1)
+ }
+
+ idx, ok := d.arrayIndexes[v]
+
+ if !ok {
+ d.arrayIndexes[v] = 0
+ } else if shouldAppend {
+ idx++
+ d.arrayIndexes[v] = idx
+ }
+
+ return idx
+}
+
+func (d *decoder) FromParser(v interface{}) error {
+ r := reflect.ValueOf(v)
+ if r.Kind() != reflect.Ptr {
+ return fmt.Errorf("toml: decoding can only be performed into a pointer, not %s", r.Kind())
+ }
+
+ if r.IsNil() {
+ return fmt.Errorf("toml: decoding pointer target cannot be nil")
+ }
+
+ r = r.Elem()
+ if r.Kind() == reflect.Interface && r.IsNil() {
+ newMap := map[string]interface{}{}
+ r.Set(reflect.ValueOf(newMap))
+ }
+
+ err := d.fromParser(r)
+ if err == nil {
+ return d.strict.Error(d.p.Data())
+ }
+
+ var e *unstable.ParserError
+ if errors.As(err, &e) {
+ return wrapDecodeError(d.p.Data(), e)
+ }
+
+ return err
+}
+
+func (d *decoder) fromParser(root reflect.Value) error {
+ for d.nextExpr() {
+ err := d.handleRootExpression(d.expr(), root)
+ if err != nil {
+ return err
+ }
+ }
+
+ return d.p.Error()
+}
+
+/*
+Rules for the unmarshal code:
+
+- The stack is used to keep track of which values need to be set where.
+- handle* functions <=> switch on a given unstable.Kind.
+- unmarshalX* functions need to unmarshal a node of kind X.
+- An "object" is either a struct or a map.
+*/
+
+func (d *decoder) handleRootExpression(expr *unstable.Node, v reflect.Value) error {
+ var x reflect.Value
+ var err error
+
+ if !(d.skipUntilTable && expr.Kind == unstable.KeyValue) {
+ err = d.seen.CheckExpression(expr)
+ if err != nil {
+ return err
+ }
+ }
+
+ switch expr.Kind {
+ case unstable.KeyValue:
+ if d.skipUntilTable {
+ return nil
+ }
+ x, err = d.handleKeyValue(expr, v)
+ case unstable.Table:
+ d.skipUntilTable = false
+ d.strict.EnterTable(expr)
+ x, err = d.handleTable(expr.Key(), v)
+ case unstable.ArrayTable:
+ d.skipUntilTable = false
+ d.strict.EnterArrayTable(expr)
+ x, err = d.handleArrayTable(expr.Key(), v)
+ default:
+ panic(fmt.Errorf("parser should not permit expression of kind %s at document root", expr.Kind))
+ }
+
+ if d.skipUntilTable {
+ if expr.Kind == unstable.Table || expr.Kind == unstable.ArrayTable {
+ d.strict.MissingTable(expr)
+ }
+ } else if err == nil && x.IsValid() {
+ v.Set(x)
+ }
+
+ return err
+}
+
+func (d *decoder) handleArrayTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
+ if key.Next() {
+ return d.handleArrayTablePart(key, v)
+ }
+ return d.handleKeyValues(v)
+}
+
+func (d *decoder) handleArrayTableCollectionLast(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
+ switch v.Kind() {
+ case reflect.Interface:
+ elem := v.Elem()
+ if !elem.IsValid() {
+ elem = reflect.New(sliceInterfaceType).Elem()
+ elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16))
+ } else if elem.Kind() == reflect.Slice {
+ if elem.Type() != sliceInterfaceType {
+ elem = reflect.New(sliceInterfaceType).Elem()
+ elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16))
+ } else if !elem.CanSet() {
+ nelem := reflect.New(sliceInterfaceType).Elem()
+ nelem.Set(reflect.MakeSlice(sliceInterfaceType, elem.Len(), elem.Cap()))
+ reflect.Copy(nelem, elem)
+ elem = nelem
+ }
+ }
+ return d.handleArrayTableCollectionLast(key, elem)
+ case reflect.Ptr:
+ elem := v.Elem()
+ if !elem.IsValid() {
+ ptr := reflect.New(v.Type().Elem())
+ v.Set(ptr)
+ elem = ptr.Elem()
+ }
+
+ elem, err := d.handleArrayTableCollectionLast(key, elem)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ v.Elem().Set(elem)
+
+ return v, nil
+ case reflect.Slice:
+ elemType := v.Type().Elem()
+ var elem reflect.Value
+ if elemType.Kind() == reflect.Interface {
+ elem = makeMapStringInterface()
+ } else {
+ elem = reflect.New(elemType).Elem()
+ }
+ elem2, err := d.handleArrayTable(key, elem)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if elem2.IsValid() {
+ elem = elem2
+ }
+ return reflect.Append(v, elem), nil
+ case reflect.Array:
+ idx := d.arrayIndex(true, v)
+ if idx >= v.Len() {
+ return v, fmt.Errorf("%s at position %d", d.typeMismatchError("array table", v.Type()), idx)
+ }
+ elem := v.Index(idx)
+ _, err := d.handleArrayTable(key, elem)
+ return v, err
+ default:
+ return reflect.Value{}, d.typeMismatchError("array table", v.Type())
+ }
+}
+
+// When parsing an array table expression, each part of the key needs to be
+// evaluated like a normal key, but if it returns a collection, it also needs to
+// point to the last element of the collection. Unless it is the last part of
+// the key, then it needs to create a new element at the end.
+func (d *decoder) handleArrayTableCollection(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
+ if key.IsLast() {
+ return d.handleArrayTableCollectionLast(key, v)
+ }
+
+ switch v.Kind() {
+ case reflect.Ptr:
+ elem := v.Elem()
+ if !elem.IsValid() {
+ ptr := reflect.New(v.Type().Elem())
+ v.Set(ptr)
+ elem = ptr.Elem()
+ }
+
+ elem, err := d.handleArrayTableCollection(key, elem)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if elem.IsValid() {
+ v.Elem().Set(elem)
+ }
+
+ return v, nil
+ case reflect.Slice:
+ elem := v.Index(v.Len() - 1)
+ x, err := d.handleArrayTable(key, elem)
+ if err != nil || d.skipUntilTable {
+ return reflect.Value{}, err
+ }
+ if x.IsValid() {
+ elem.Set(x)
+ }
+
+ return v, err
+ case reflect.Array:
+ idx := d.arrayIndex(false, v)
+ if idx >= v.Len() {
+ return v, fmt.Errorf("%s at position %d", d.typeMismatchError("array table", v.Type()), idx)
+ }
+ elem := v.Index(idx)
+ _, err := d.handleArrayTable(key, elem)
+ return v, err
+ }
+
+ return d.handleArrayTable(key, v)
+}
+
+func (d *decoder) handleKeyPart(key unstable.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
+ var rv reflect.Value
+
+ // First, dispatch over v to make sure it is a valid object.
+ // There is no guarantee over what it could be.
+ switch v.Kind() {
+ case reflect.Ptr:
+ elem := v.Elem()
+ if !elem.IsValid() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ elem = v.Elem()
+ return d.handleKeyPart(key, elem, nextFn, makeFn)
+ case reflect.Map:
+ vt := v.Type()
+
+ // Create the key for the map element. Convert to key type.
+ mk, err := d.keyFromData(vt.Key(), key.Node().Data)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+
+ // If the map does not exist, create it.
+ if v.IsNil() {
+ vt := v.Type()
+ v = reflect.MakeMap(vt)
+ rv = v
+ }
+
+ mv := v.MapIndex(mk)
+ set := false
+ if !mv.IsValid() {
+ // If there is no value in the map, create a new one according to
+ // the map type. If the element type is interface, create either a
+ // map[string]interface{} or a []interface{} depending on whether
+ // this is the last part of the array table key.
+
+ t := vt.Elem()
+ if t.Kind() == reflect.Interface {
+ mv = makeFn()
+ } else {
+ mv = reflect.New(t).Elem()
+ }
+ set = true
+ } else if mv.Kind() == reflect.Interface {
+ mv = mv.Elem()
+ if !mv.IsValid() {
+ mv = makeFn()
+ }
+ set = true
+ } else if !mv.CanAddr() {
+ vt := v.Type()
+ t := vt.Elem()
+ oldmv := mv
+ mv = reflect.New(t).Elem()
+ mv.Set(oldmv)
+ set = true
+ }
+
+ x, err := nextFn(key, mv)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+
+ if x.IsValid() {
+ mv = x
+ set = true
+ }
+
+ if set {
+ v.SetMapIndex(mk, mv)
+ }
+ case reflect.Struct:
+ path, found := structFieldPath(v, string(key.Node().Data))
+ if !found {
+ d.skipUntilTable = true
+ return reflect.Value{}, nil
+ }
+
+ if d.errorContext == nil {
+ d.errorContext = new(errorContext)
+ }
+ t := v.Type()
+ d.errorContext.Struct = t
+ d.errorContext.Field = path
+
+ f := fieldByIndex(v, path)
+ x, err := nextFn(key, f)
+ if err != nil || d.skipUntilTable {
+ return reflect.Value{}, err
+ }
+ if x.IsValid() {
+ f.Set(x)
+ }
+ d.errorContext.Field = nil
+ d.errorContext.Struct = nil
+ case reflect.Interface:
+ if v.Elem().IsValid() {
+ v = v.Elem()
+ } else {
+ v = makeMapStringInterface()
+ }
+
+ x, err := d.handleKeyPart(key, v, nextFn, makeFn)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if x.IsValid() {
+ v = x
+ }
+ rv = v
+ default:
+ panic(fmt.Errorf("unhandled part: %s", v.Kind()))
+ }
+
+ return rv, nil
+}
+
+// HandleArrayTablePart navigates the Go structure v using the key v. It is
+// only used for the prefix (non-last) parts of an array-table. When
+// encountering a collection, it should go to the last element.
+func (d *decoder) handleArrayTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
+ var makeFn valueMakerFn
+ if key.IsLast() {
+ makeFn = makeSliceInterface
+ } else {
+ makeFn = makeMapStringInterface
+ }
+ return d.handleKeyPart(key, v, d.handleArrayTableCollection, makeFn)
+}
+
+// HandleTable returns a reference when it has checked the next expression but
+// cannot handle it.
+func (d *decoder) handleTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
+ if v.Kind() == reflect.Slice {
+ if v.Len() == 0 {
+ return reflect.Value{}, unstable.NewParserError(key.Node().Data, "cannot store a table in a slice")
+ }
+ elem := v.Index(v.Len() - 1)
+ x, err := d.handleTable(key, elem)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if x.IsValid() {
+ elem.Set(x)
+ }
+ return reflect.Value{}, nil
+ }
+ if key.Next() {
+ // Still scoping the key
+ return d.handleTablePart(key, v)
+ }
+ // Done scoping the key.
+ // Now handle all the key-value expressions in this table.
+ return d.handleKeyValues(v)
+}
+
+// Handle root expressions until the end of the document or the next
+// non-key-value.
+func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
+ var rv reflect.Value
+ for d.nextExpr() {
+ expr := d.expr()
+ if expr.Kind != unstable.KeyValue {
+ // Stash the expression so that fromParser can just loop and use
+ // the right handler.
+ // We could just recurse ourselves here, but at least this gives a
+ // chance to pop the stack a bit.
+ d.stashExpr()
+ break
+ }
+
+ err := d.seen.CheckExpression(expr)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+
+ x, err := d.handleKeyValue(expr, v)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if x.IsValid() {
+ v = x
+ rv = x
+ }
+ }
+ return rv, nil
+}
+
+type (
+ handlerFn func(key unstable.Iterator, v reflect.Value) (reflect.Value, error)
+ valueMakerFn func() reflect.Value
+)
+
+func makeMapStringInterface() reflect.Value {
+ return reflect.MakeMap(mapStringInterfaceType)
+}
+
+func makeSliceInterface() reflect.Value {
+ return reflect.MakeSlice(sliceInterfaceType, 0, 16)
+}
+
+func (d *decoder) handleTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
+ return d.handleKeyPart(key, v, d.handleTable, makeMapStringInterface)
+}
+
+func (d *decoder) tryTextUnmarshaler(node *unstable.Node, v reflect.Value) (bool, error) {
+ // Special case for time, because we allow to unmarshal to it from
+ // different kind of AST nodes.
+ if v.Type() == timeType {
+ return false, nil
+ }
+
+ if v.CanAddr() && v.Addr().Type().Implements(textUnmarshalerType) {
+ err := v.Addr().Interface().(encoding.TextUnmarshaler).UnmarshalText(node.Data)
+ if err != nil {
+ return false, unstable.NewParserError(d.p.Raw(node.Raw), "%w", err)
+ }
+
+ return true, nil
+ }
+
+ return false, nil
+}
+
+func (d *decoder) handleValue(value *unstable.Node, v reflect.Value) error {
+ for v.Kind() == reflect.Ptr {
+ v = initAndDereferencePointer(v)
+ }
+
+ ok, err := d.tryTextUnmarshaler(value, v)
+ if ok || err != nil {
+ return err
+ }
+
+ switch value.Kind {
+ case unstable.String:
+ return d.unmarshalString(value, v)
+ case unstable.Integer:
+ return d.unmarshalInteger(value, v)
+ case unstable.Float:
+ return d.unmarshalFloat(value, v)
+ case unstable.Bool:
+ return d.unmarshalBool(value, v)
+ case unstable.DateTime:
+ return d.unmarshalDateTime(value, v)
+ case unstable.LocalDate:
+ return d.unmarshalLocalDate(value, v)
+ case unstable.LocalTime:
+ return d.unmarshalLocalTime(value, v)
+ case unstable.LocalDateTime:
+ return d.unmarshalLocalDateTime(value, v)
+ case unstable.InlineTable:
+ return d.unmarshalInlineTable(value, v)
+ case unstable.Array:
+ return d.unmarshalArray(value, v)
+ default:
+ panic(fmt.Errorf("handleValue not implemented for %s", value.Kind))
+ }
+}
+
+func (d *decoder) unmarshalArray(array *unstable.Node, v reflect.Value) error {
+ switch v.Kind() {
+ case reflect.Slice:
+ if v.IsNil() {
+ v.Set(reflect.MakeSlice(v.Type(), 0, 16))
+ } else {
+ v.SetLen(0)
+ }
+ case reflect.Array:
+ // arrays are always initialized
+ case reflect.Interface:
+ elem := v.Elem()
+ if !elem.IsValid() {
+ elem = reflect.New(sliceInterfaceType).Elem()
+ elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16))
+ } else if elem.Kind() == reflect.Slice {
+ if elem.Type() != sliceInterfaceType {
+ elem = reflect.New(sliceInterfaceType).Elem()
+ elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16))
+ } else if !elem.CanSet() {
+ nelem := reflect.New(sliceInterfaceType).Elem()
+ nelem.Set(reflect.MakeSlice(sliceInterfaceType, elem.Len(), elem.Cap()))
+ reflect.Copy(nelem, elem)
+ elem = nelem
+ }
+ }
+ err := d.unmarshalArray(array, elem)
+ if err != nil {
+ return err
+ }
+ v.Set(elem)
+ return nil
+ default:
+ // TODO: use newDecodeError, but first the parser needs to fill
+ // array.Data.
+ return d.typeMismatchError("array", v.Type())
+ }
+
+ elemType := v.Type().Elem()
+
+ it := array.Children()
+ idx := 0
+ for it.Next() {
+ n := it.Node()
+
+ // TODO: optimize
+ if v.Kind() == reflect.Slice {
+ elem := reflect.New(elemType).Elem()
+
+ err := d.handleValue(n, elem)
+ if err != nil {
+ return err
+ }
+
+ v.Set(reflect.Append(v, elem))
+ } else { // array
+ if idx >= v.Len() {
+ return nil
+ }
+ elem := v.Index(idx)
+ err := d.handleValue(n, elem)
+ if err != nil {
+ return err
+ }
+ idx++
+ }
+ }
+
+ return nil
+}
+
+func (d *decoder) unmarshalInlineTable(itable *unstable.Node, v reflect.Value) error {
+ // Make sure v is an initialized object.
+ switch v.Kind() {
+ case reflect.Map:
+ if v.IsNil() {
+ v.Set(reflect.MakeMap(v.Type()))
+ }
+ case reflect.Struct:
+ // structs are always initialized.
+ case reflect.Interface:
+ elem := v.Elem()
+ if !elem.IsValid() {
+ elem = makeMapStringInterface()
+ v.Set(elem)
+ }
+ return d.unmarshalInlineTable(itable, elem)
+ default:
+ return unstable.NewParserError(d.p.Raw(itable.Raw), "cannot store inline table in Go type %s", v.Kind())
+ }
+
+ it := itable.Children()
+ for it.Next() {
+ n := it.Node()
+
+ x, err := d.handleKeyValue(n, v)
+ if err != nil {
+ return err
+ }
+ if x.IsValid() {
+ v = x
+ }
+ }
+
+ return nil
+}
+
+func (d *decoder) unmarshalDateTime(value *unstable.Node, v reflect.Value) error {
+ dt, err := parseDateTime(value.Data)
+ if err != nil {
+ return err
+ }
+
+ v.Set(reflect.ValueOf(dt))
+ return nil
+}
+
+func (d *decoder) unmarshalLocalDate(value *unstable.Node, v reflect.Value) error {
+ ld, err := parseLocalDate(value.Data)
+ if err != nil {
+ return err
+ }
+
+ if v.Type() == timeType {
+ cast := ld.AsTime(time.Local)
+ v.Set(reflect.ValueOf(cast))
+ return nil
+ }
+
+ v.Set(reflect.ValueOf(ld))
+
+ return nil
+}
+
+func (d *decoder) unmarshalLocalTime(value *unstable.Node, v reflect.Value) error {
+ lt, rest, err := parseLocalTime(value.Data)
+ if err != nil {
+ return err
+ }
+
+ if len(rest) > 0 {
+ return unstable.NewParserError(rest, "extra characters at the end of a local time")
+ }
+
+ v.Set(reflect.ValueOf(lt))
+ return nil
+}
+
+func (d *decoder) unmarshalLocalDateTime(value *unstable.Node, v reflect.Value) error {
+ ldt, rest, err := parseLocalDateTime(value.Data)
+ if err != nil {
+ return err
+ }
+
+ if len(rest) > 0 {
+ return unstable.NewParserError(rest, "extra characters at the end of a local date time")
+ }
+
+ if v.Type() == timeType {
+ cast := ldt.AsTime(time.Local)
+
+ v.Set(reflect.ValueOf(cast))
+ return nil
+ }
+
+ v.Set(reflect.ValueOf(ldt))
+
+ return nil
+}
+
+func (d *decoder) unmarshalBool(value *unstable.Node, v reflect.Value) error {
+ b := value.Data[0] == 't'
+
+ switch v.Kind() {
+ case reflect.Bool:
+ v.SetBool(b)
+ case reflect.Interface:
+ v.Set(reflect.ValueOf(b))
+ default:
+ return unstable.NewParserError(value.Data, "cannot assign boolean to a %t", b)
+ }
+
+ return nil
+}
+
+func (d *decoder) unmarshalFloat(value *unstable.Node, v reflect.Value) error {
+ f, err := parseFloat(value.Data)
+ if err != nil {
+ return err
+ }
+
+ switch v.Kind() {
+ case reflect.Float64:
+ v.SetFloat(f)
+ case reflect.Float32:
+ if f > math.MaxFloat32 {
+ return unstable.NewParserError(value.Data, "number %f does not fit in a float32", f)
+ }
+ v.SetFloat(f)
+ case reflect.Interface:
+ v.Set(reflect.ValueOf(f))
+ default:
+ return unstable.NewParserError(value.Data, "float cannot be assigned to %s", v.Kind())
+ }
+
+ return nil
+}
+
+const (
+ maxInt = int64(^uint(0) >> 1)
+ minInt = -maxInt - 1
+)
+
+// Maximum value of uint for decoding. Currently the decoder parses the integer
+// into an int64. As a result, on architectures where uint is 64 bits, the
+// effective maximum uint we can decode is the maximum of int64. On
+// architectures where uint is 32 bits, the maximum value we can decode is
+// lower: the maximum of uint32. I didn't find a way to figure out this value at
+// compile time, so it is computed during initialization.
+var maxUint int64 = math.MaxInt64
+
+func init() {
+ m := uint64(^uint(0))
+ if m < uint64(maxUint) {
+ maxUint = int64(m)
+ }
+}
+
+func (d *decoder) unmarshalInteger(value *unstable.Node, v reflect.Value) error {
+ kind := v.Kind()
+ if kind == reflect.Float32 || kind == reflect.Float64 {
+ return d.unmarshalFloat(value, v)
+ }
+
+ i, err := parseInteger(value.Data)
+ if err != nil {
+ return err
+ }
+
+ var r reflect.Value
+
+ switch kind {
+ case reflect.Int64:
+ v.SetInt(i)
+ return nil
+ case reflect.Int32:
+ if i < math.MinInt32 || i > math.MaxInt32 {
+ return fmt.Errorf("toml: number %d does not fit in an int32", i)
+ }
+
+ r = reflect.ValueOf(int32(i))
+ case reflect.Int16:
+ if i < math.MinInt16 || i > math.MaxInt16 {
+ return fmt.Errorf("toml: number %d does not fit in an int16", i)
+ }
+
+ r = reflect.ValueOf(int16(i))
+ case reflect.Int8:
+ if i < math.MinInt8 || i > math.MaxInt8 {
+ return fmt.Errorf("toml: number %d does not fit in an int8", i)
+ }
+
+ r = reflect.ValueOf(int8(i))
+ case reflect.Int:
+ if i < minInt || i > maxInt {
+ return fmt.Errorf("toml: number %d does not fit in an int", i)
+ }
+
+ r = reflect.ValueOf(int(i))
+ case reflect.Uint64:
+ if i < 0 {
+ return fmt.Errorf("toml: negative number %d does not fit in an uint64", i)
+ }
+
+ r = reflect.ValueOf(uint64(i))
+ case reflect.Uint32:
+ if i < 0 || i > math.MaxUint32 {
+ return fmt.Errorf("toml: negative number %d does not fit in an uint32", i)
+ }
+
+ r = reflect.ValueOf(uint32(i))
+ case reflect.Uint16:
+ if i < 0 || i > math.MaxUint16 {
+ return fmt.Errorf("toml: negative number %d does not fit in an uint16", i)
+ }
+
+ r = reflect.ValueOf(uint16(i))
+ case reflect.Uint8:
+ if i < 0 || i > math.MaxUint8 {
+ return fmt.Errorf("toml: negative number %d does not fit in an uint8", i)
+ }
+
+ r = reflect.ValueOf(uint8(i))
+ case reflect.Uint:
+ if i < 0 || i > maxUint {
+ return fmt.Errorf("toml: negative number %d does not fit in an uint", i)
+ }
+
+ r = reflect.ValueOf(uint(i))
+ case reflect.Interface:
+ r = reflect.ValueOf(i)
+ default:
+ return unstable.NewParserError(d.p.Raw(value.Raw), d.typeMismatchString("integer", v.Type()))
+ }
+
+ if !r.Type().AssignableTo(v.Type()) {
+ r = r.Convert(v.Type())
+ }
+
+ v.Set(r)
+
+ return nil
+}
+
+func (d *decoder) unmarshalString(value *unstable.Node, v reflect.Value) error {
+ switch v.Kind() {
+ case reflect.String:
+ v.SetString(string(value.Data))
+ case reflect.Interface:
+ v.Set(reflect.ValueOf(string(value.Data)))
+ default:
+ return unstable.NewParserError(d.p.Raw(value.Raw), d.typeMismatchString("string", v.Type()))
+ }
+
+ return nil
+}
+
+func (d *decoder) handleKeyValue(expr *unstable.Node, v reflect.Value) (reflect.Value, error) {
+ d.strict.EnterKeyValue(expr)
+
+ v, err := d.handleKeyValueInner(expr.Key(), expr.Value(), v)
+ if d.skipUntilTable {
+ d.strict.MissingField(expr)
+ d.skipUntilTable = false
+ }
+
+ d.strict.ExitKeyValue(expr)
+
+ return v, err
+}
+
+func (d *decoder) handleKeyValueInner(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) {
+ if key.Next() {
+ // Still scoping the key
+ return d.handleKeyValuePart(key, value, v)
+ }
+ // Done scoping the key.
+ // v is whatever Go value we need to fill.
+ return reflect.Value{}, d.handleValue(value, v)
+}
+
+func (d *decoder) keyFromData(keyType reflect.Type, data []byte) (reflect.Value, error) {
+ switch {
+ case stringType.AssignableTo(keyType):
+ return reflect.ValueOf(string(data)), nil
+
+ case stringType.ConvertibleTo(keyType):
+ return reflect.ValueOf(string(data)).Convert(keyType), nil
+
+ case keyType.Implements(textUnmarshalerType):
+ mk := reflect.New(keyType.Elem())
+ if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
+ return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
+ }
+ return mk, nil
+
+ case reflect.PtrTo(keyType).Implements(textUnmarshalerType):
+ mk := reflect.New(keyType)
+ if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
+ return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
+ }
+ return mk.Elem(), nil
+ }
+ return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", stringType, keyType)
+}
+
+func (d *decoder) handleKeyValuePart(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) {
+ // contains the replacement for v
+ var rv reflect.Value
+
+ // First, dispatch over v to make sure it is a valid object.
+ // There is no guarantee over what it could be.
+ switch v.Kind() {
+ case reflect.Map:
+ vt := v.Type()
+
+ mk, err := d.keyFromData(vt.Key(), key.Node().Data)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+
+ // If the map does not exist, create it.
+ if v.IsNil() {
+ v = reflect.MakeMap(vt)
+ rv = v
+ }
+
+ mv := v.MapIndex(mk)
+ set := false
+ if !mv.IsValid() || key.IsLast() {
+ set = true
+ mv = reflect.New(v.Type().Elem()).Elem()
+ }
+
+ nv, err := d.handleKeyValueInner(key, value, mv)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if nv.IsValid() {
+ mv = nv
+ set = true
+ }
+
+ if set {
+ v.SetMapIndex(mk, mv)
+ }
+ case reflect.Struct:
+ path, found := structFieldPath(v, string(key.Node().Data))
+ if !found {
+ d.skipUntilTable = true
+ break
+ }
+
+ if d.errorContext == nil {
+ d.errorContext = new(errorContext)
+ }
+ t := v.Type()
+ d.errorContext.Struct = t
+ d.errorContext.Field = path
+
+ f := fieldByIndex(v, path)
+
+ if !f.CanAddr() {
+ // If the field is not addressable, need to take a slower path and
+ // make a copy of the struct itself to a new location.
+ nvp := reflect.New(v.Type())
+ nvp.Elem().Set(v)
+ v = nvp.Elem()
+ _, err := d.handleKeyValuePart(key, value, v)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ return nvp.Elem(), nil
+ }
+ x, err := d.handleKeyValueInner(key, value, f)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+
+ if x.IsValid() {
+ f.Set(x)
+ }
+ d.errorContext.Struct = nil
+ d.errorContext.Field = nil
+ case reflect.Interface:
+ v = v.Elem()
+
+ // Following encoding/json: decoding an object into an
+ // interface{}, it needs to always hold a
+ // map[string]interface{}. This is for the types to be
+ // consistent whether a previous value was set or not.
+ if !v.IsValid() || v.Type() != mapStringInterfaceType {
+ v = makeMapStringInterface()
+ }
+
+ x, err := d.handleKeyValuePart(key, value, v)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if x.IsValid() {
+ v = x
+ }
+ rv = v
+ case reflect.Ptr:
+ elem := v.Elem()
+ if !elem.IsValid() {
+ ptr := reflect.New(v.Type().Elem())
+ v.Set(ptr)
+ rv = v
+ elem = ptr.Elem()
+ }
+
+ elem2, err := d.handleKeyValuePart(key, value, elem)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if elem2.IsValid() {
+ elem = elem2
+ }
+ v.Elem().Set(elem)
+ default:
+ return reflect.Value{}, fmt.Errorf("unhandled kv part: %s", v.Kind())
+ }
+
+ return rv, nil
+}
+
+func initAndDereferencePointer(v reflect.Value) reflect.Value {
+ var elem reflect.Value
+ if v.IsNil() {
+ ptr := reflect.New(v.Type().Elem())
+ v.Set(ptr)
+ }
+ elem = v.Elem()
+ return elem
+}
+
+// Same as reflect.Value.FieldByIndex, but creates pointers if needed.
+func fieldByIndex(v reflect.Value, path []int) reflect.Value {
+ for _, x := range path {
+ v = v.Field(x)
+
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ v = v.Elem()
+ }
+ }
+ return v
+}
+
+type fieldPathsMap = map[string][]int
+
+var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap
+
+func structFieldPath(v reflect.Value, name string) ([]int, bool) {
+ t := v.Type()
+
+ cache, _ := globalFieldPathsCache.Load().(map[danger.TypeID]fieldPathsMap)
+ fieldPaths, ok := cache[danger.MakeTypeID(t)]
+
+ if !ok {
+ fieldPaths = map[string][]int{}
+
+ forEachField(t, nil, func(name string, path []int) {
+ fieldPaths[name] = path
+ // extra copy for the case-insensitive match
+ fieldPaths[strings.ToLower(name)] = path
+ })
+
+ newCache := make(map[danger.TypeID]fieldPathsMap, len(cache)+1)
+ newCache[danger.MakeTypeID(t)] = fieldPaths
+ for k, v := range cache {
+ newCache[k] = v
+ }
+ globalFieldPathsCache.Store(newCache)
+ }
+
+ path, ok := fieldPaths[name]
+ if !ok {
+ path, ok = fieldPaths[strings.ToLower(name)]
+ }
+ return path, ok
+}
+
+func forEachField(t reflect.Type, path []int, do func(name string, path []int)) {
+ n := t.NumField()
+ for i := 0; i < n; i++ {
+ f := t.Field(i)
+
+ if !f.Anonymous && f.PkgPath != "" {
+ // only consider exported fields.
+ continue
+ }
+
+ fieldPath := append(path, i)
+ fieldPath = fieldPath[:len(fieldPath):len(fieldPath)]
+
+ name := f.Tag.Get("toml")
+ if name == "-" {
+ continue
+ }
+
+ if i := strings.IndexByte(name, ','); i >= 0 {
+ name = name[:i]
+ }
+
+ if f.Anonymous && name == "" {
+ t2 := f.Type
+ if t2.Kind() == reflect.Ptr {
+ t2 = t2.Elem()
+ }
+
+ if t2.Kind() == reflect.Struct {
+ forEachField(t2, fieldPath, do)
+ }
+ continue
+ }
+
+ if name == "" {
+ name = f.Name
+ }
+
+ do(name, fieldPath)
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/ast.go b/vendor/github.com/pelletier/go-toml/v2/unstable/ast.go
new file mode 100644
index 0000000..f526bf2
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/unstable/ast.go
@@ -0,0 +1,136 @@
+package unstable
+
+import (
+ "fmt"
+ "unsafe"
+
+ "github.com/pelletier/go-toml/v2/internal/danger"
+)
+
+// Iterator over a sequence of nodes.
+//
+// Starts uninitialized, you need to call Next() first.
+//
+// For example:
+//
+// it := n.Children()
+// for it.Next() {
+// n := it.Node()
+// // do something with n
+// }
+type Iterator struct {
+ started bool
+ node *Node
+}
+
+// Next moves the iterator forward and returns true if points to a
+// node, false otherwise.
+func (c *Iterator) Next() bool {
+ if !c.started {
+ c.started = true
+ } else if c.node.Valid() {
+ c.node = c.node.Next()
+ }
+ return c.node.Valid()
+}
+
+// IsLast returns true if the current node of the iterator is the last
+// one. Subsequent calls to Next() will return false.
+func (c *Iterator) IsLast() bool {
+ return c.node.next == 0
+}
+
+// Node returns a pointer to the node pointed at by the iterator.
+func (c *Iterator) Node() *Node {
+ return c.node
+}
+
+// Node in a TOML expression AST.
+//
+// Depending on Kind, its sequence of children should be interpreted
+// differently.
+//
+// - Array have one child per element in the array.
+// - InlineTable have one child per key-value in the table (each of kind
+// InlineTable).
+// - KeyValue have at least two children. The first one is the value. The rest
+// make a potentially dotted key.
+// - Table and ArrayTable's children represent a dotted key (same as
+// KeyValue, but without the first node being the value).
+//
+// When relevant, Raw describes the range of bytes this node is referring to in
+// the input document. Use Parser.Raw() to retrieve the actual bytes.
+type Node struct {
+ Kind Kind
+ Raw Range // Raw bytes from the input.
+ Data []byte // Node value (either allocated or referencing the input).
+
+ // References to other nodes, as offsets in the backing array
+ // from this node. References can go backward, so those can be
+ // negative.
+ next int // 0 if last element
+ child int // 0 if no child
+}
+
+// Range of bytes in the document.
+type Range struct {
+ Offset uint32
+ Length uint32
+}
+
+// Next returns a pointer to the next node, or nil if there is no next node.
+func (n *Node) Next() *Node {
+ if n.next == 0 {
+ return nil
+ }
+ ptr := unsafe.Pointer(n)
+ size := unsafe.Sizeof(Node{})
+ return (*Node)(danger.Stride(ptr, size, n.next))
+}
+
+// Child returns a pointer to the first child node of this node. Other children
+// can be accessed calling Next on the first child. Returns an nil if this Node
+// has no child.
+func (n *Node) Child() *Node {
+ if n.child == 0 {
+ return nil
+ }
+ ptr := unsafe.Pointer(n)
+ size := unsafe.Sizeof(Node{})
+ return (*Node)(danger.Stride(ptr, size, n.child))
+}
+
+// Valid returns true if the node's kind is set (not to Invalid).
+func (n *Node) Valid() bool {
+ return n != nil
+}
+
+// Key returns the children nodes making the Key on a supported node. Panics
+// otherwise. They are guaranteed to be all be of the Kind Key. A simple key
+// would return just one element.
+func (n *Node) Key() Iterator {
+ switch n.Kind {
+ case KeyValue:
+ value := n.Child()
+ if !value.Valid() {
+ panic(fmt.Errorf("KeyValue should have at least two children"))
+ }
+ return Iterator{node: value.Next()}
+ case Table, ArrayTable:
+ return Iterator{node: n.Child()}
+ default:
+ panic(fmt.Errorf("Key() is not supported on a %s", n.Kind))
+ }
+}
+
+// Value returns a pointer to the value node of a KeyValue.
+// Guaranteed to be non-nil. Panics if not called on a KeyValue node,
+// or if the Children are malformed.
+func (n *Node) Value() *Node {
+ return n.Child()
+}
+
+// Children returns an iterator over a node's children.
+func (n *Node) Children() Iterator {
+ return Iterator{node: n.Child()}
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/builder.go b/vendor/github.com/pelletier/go-toml/v2/unstable/builder.go
new file mode 100644
index 0000000..9538e30
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/unstable/builder.go
@@ -0,0 +1,71 @@
+package unstable
+
+// root contains a full AST.
+//
+// It is immutable once constructed with Builder.
+type root struct {
+ nodes []Node
+}
+
+// Iterator over the top level nodes.
+func (r *root) Iterator() Iterator {
+ it := Iterator{}
+ if len(r.nodes) > 0 {
+ it.node = &r.nodes[0]
+ }
+ return it
+}
+
+func (r *root) at(idx reference) *Node {
+ return &r.nodes[idx]
+}
+
+type reference int
+
+const invalidReference reference = -1
+
+func (r reference) Valid() bool {
+ return r != invalidReference
+}
+
+type builder struct {
+ tree root
+ lastIdx int
+}
+
+func (b *builder) Tree() *root {
+ return &b.tree
+}
+
+func (b *builder) NodeAt(ref reference) *Node {
+ return b.tree.at(ref)
+}
+
+func (b *builder) Reset() {
+ b.tree.nodes = b.tree.nodes[:0]
+ b.lastIdx = 0
+}
+
+func (b *builder) Push(n Node) reference {
+ b.lastIdx = len(b.tree.nodes)
+ b.tree.nodes = append(b.tree.nodes, n)
+ return reference(b.lastIdx)
+}
+
+func (b *builder) PushAndChain(n Node) reference {
+ newIdx := len(b.tree.nodes)
+ b.tree.nodes = append(b.tree.nodes, n)
+ if b.lastIdx >= 0 {
+ b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx
+ }
+ b.lastIdx = newIdx
+ return reference(b.lastIdx)
+}
+
+func (b *builder) AttachChild(parent reference, child reference) {
+ b.tree.nodes[parent].child = int(child) - int(parent)
+}
+
+func (b *builder) Chain(from reference, to reference) {
+ b.tree.nodes[from].next = int(to) - int(from)
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/doc.go b/vendor/github.com/pelletier/go-toml/v2/unstable/doc.go
new file mode 100644
index 0000000..7ff26c5
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/unstable/doc.go
@@ -0,0 +1,3 @@
+// Package unstable provides APIs that do not meet the backward compatibility
+// guarantees yet.
+package unstable
diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/kind.go b/vendor/github.com/pelletier/go-toml/v2/unstable/kind.go
new file mode 100644
index 0000000..ff9df1b
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/unstable/kind.go
@@ -0,0 +1,71 @@
+package unstable
+
+import "fmt"
+
+// Kind represents the type of TOML structure contained in a given Node.
+type Kind int
+
+const (
+ // Meta
+ Invalid Kind = iota
+ Comment
+ Key
+
+ // Top level structures
+ Table
+ ArrayTable
+ KeyValue
+
+ // Containers values
+ Array
+ InlineTable
+
+ // Values
+ String
+ Bool
+ Float
+ Integer
+ LocalDate
+ LocalTime
+ LocalDateTime
+ DateTime
+)
+
+// String implementation of fmt.Stringer.
+func (k Kind) String() string {
+ switch k {
+ case Invalid:
+ return "Invalid"
+ case Comment:
+ return "Comment"
+ case Key:
+ return "Key"
+ case Table:
+ return "Table"
+ case ArrayTable:
+ return "ArrayTable"
+ case KeyValue:
+ return "KeyValue"
+ case Array:
+ return "Array"
+ case InlineTable:
+ return "InlineTable"
+ case String:
+ return "String"
+ case Bool:
+ return "Bool"
+ case Float:
+ return "Float"
+ case Integer:
+ return "Integer"
+ case LocalDate:
+ return "LocalDate"
+ case LocalTime:
+ return "LocalTime"
+ case LocalDateTime:
+ return "LocalDateTime"
+ case DateTime:
+ return "DateTime"
+ }
+ panic(fmt.Errorf("Kind.String() not implemented for '%d'", k))
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/parser.go b/vendor/github.com/pelletier/go-toml/v2/unstable/parser.go
new file mode 100644
index 0000000..50358a4
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/unstable/parser.go
@@ -0,0 +1,1245 @@
+package unstable
+
+import (
+ "bytes"
+ "fmt"
+ "unicode"
+
+ "github.com/pelletier/go-toml/v2/internal/characters"
+ "github.com/pelletier/go-toml/v2/internal/danger"
+)
+
+// ParserError describes an error relative to the content of the document.
+//
+// It cannot outlive the instance of Parser it refers to, and may cause panics
+// if the parser is reset.
+type ParserError struct {
+ Highlight []byte
+ Message string
+ Key []string // optional
+}
+
+// Error is the implementation of the error interface.
+func (e *ParserError) Error() string {
+ return e.Message
+}
+
+// NewParserError is a convenience function to create a ParserError
+//
+// Warning: Highlight needs to be a subslice of Parser.data, so only slices
+// returned by Parser.Raw are valid candidates.
+func NewParserError(highlight []byte, format string, args ...interface{}) error {
+ return &ParserError{
+ Highlight: highlight,
+ Message: fmt.Errorf(format, args...).Error(),
+ }
+}
+
+// Parser scans over a TOML-encoded document and generates an iterative AST.
+//
+// To prime the Parser, first reset it with the contents of a TOML document.
+// Then, process all top-level expressions sequentially. See Example.
+//
+// Don't forget to check Error() after you're done parsing.
+//
+// Each top-level expression needs to be fully processed before calling
+// NextExpression() again. Otherwise, calls to various Node methods may panic if
+// the parser has moved on the next expression.
+//
+// For performance reasons, go-toml doesn't make a copy of the input bytes to
+// the parser. Make sure to copy all the bytes you need to outlive the slice
+// given to the parser.
+type Parser struct {
+ data []byte
+ builder builder
+ ref reference
+ left []byte
+ err error
+ first bool
+
+ KeepComments bool
+}
+
+// Data returns the slice provided to the last call to Reset.
+func (p *Parser) Data() []byte {
+ return p.data
+}
+
+// Range returns a range description that corresponds to a given slice of the
+// input. If the argument is not a subslice of the parser input, this function
+// panics.
+func (p *Parser) Range(b []byte) Range {
+ return Range{
+ Offset: uint32(danger.SubsliceOffset(p.data, b)),
+ Length: uint32(len(b)),
+ }
+}
+
+// Raw returns the slice corresponding to the bytes in the given range.
+func (p *Parser) Raw(raw Range) []byte {
+ return p.data[raw.Offset : raw.Offset+raw.Length]
+}
+
+// Reset brings the parser to its initial state for a given input. It wipes an
+// reuses internal storage to reduce allocation.
+func (p *Parser) Reset(b []byte) {
+ p.builder.Reset()
+ p.ref = invalidReference
+ p.data = b
+ p.left = b
+ p.err = nil
+ p.first = true
+}
+
+// NextExpression parses the next top-level expression. If an expression was
+// successfully parsed, it returns true. If the parser is at the end of the
+// document or an error occurred, it returns false.
+//
+// Retrieve the parsed expression with Expression().
+func (p *Parser) NextExpression() bool {
+ if len(p.left) == 0 || p.err != nil {
+ return false
+ }
+
+ p.builder.Reset()
+ p.ref = invalidReference
+
+ for {
+ if len(p.left) == 0 || p.err != nil {
+ return false
+ }
+
+ if !p.first {
+ p.left, p.err = p.parseNewline(p.left)
+ }
+
+ if len(p.left) == 0 || p.err != nil {
+ return false
+ }
+
+ p.ref, p.left, p.err = p.parseExpression(p.left)
+
+ if p.err != nil {
+ return false
+ }
+
+ p.first = false
+
+ if p.ref.Valid() {
+ return true
+ }
+ }
+}
+
+// Expression returns a pointer to the node representing the last successfully
+// parsed expression.
+func (p *Parser) Expression() *Node {
+ return p.builder.NodeAt(p.ref)
+}
+
+// Error returns any error that has occurred during parsing.
+func (p *Parser) Error() error {
+ return p.err
+}
+
+// Position describes a position in the input.
+type Position struct {
+ // Number of bytes from the beginning of the input.
+ Offset int
+ // Line number, starting at 1.
+ Line int
+ // Column number, starting at 1.
+ Column int
+}
+
+// Shape describes the position of a range in the input.
+type Shape struct {
+ Start Position
+ End Position
+}
+
+func (p *Parser) position(b []byte) Position {
+ offset := danger.SubsliceOffset(p.data, b)
+
+ lead := p.data[:offset]
+
+ return Position{
+ Offset: offset,
+ Line: bytes.Count(lead, []byte{'\n'}) + 1,
+ Column: len(lead) - bytes.LastIndex(lead, []byte{'\n'}),
+ }
+}
+
+// Shape returns the shape of the given range in the input. Will
+// panic if the range is not a subslice of the input.
+func (p *Parser) Shape(r Range) Shape {
+ raw := p.Raw(r)
+ return Shape{
+ Start: p.position(raw),
+ End: p.position(raw[r.Length:]),
+ }
+}
+
+func (p *Parser) parseNewline(b []byte) ([]byte, error) {
+ if b[0] == '\n' {
+ return b[1:], nil
+ }
+
+ if b[0] == '\r' {
+ _, rest, err := scanWindowsNewline(b)
+ return rest, err
+ }
+
+ return nil, NewParserError(b[0:1], "expected newline but got %#U", b[0])
+}
+
+func (p *Parser) parseComment(b []byte) (reference, []byte, error) {
+ ref := invalidReference
+ data, rest, err := scanComment(b)
+ if p.KeepComments && err == nil {
+ ref = p.builder.Push(Node{
+ Kind: Comment,
+ Raw: p.Range(data),
+ Data: data,
+ })
+ }
+ return ref, rest, err
+}
+
+func (p *Parser) parseExpression(b []byte) (reference, []byte, error) {
+ // expression = ws [ comment ]
+ // expression =/ ws keyval ws [ comment ]
+ // expression =/ ws table ws [ comment ]
+ ref := invalidReference
+
+ b = p.parseWhitespace(b)
+
+ if len(b) == 0 {
+ return ref, b, nil
+ }
+
+ if b[0] == '#' {
+ ref, rest, err := p.parseComment(b)
+ return ref, rest, err
+ }
+
+ if b[0] == '\n' || b[0] == '\r' {
+ return ref, b, nil
+ }
+
+ var err error
+ if b[0] == '[' {
+ ref, b, err = p.parseTable(b)
+ } else {
+ ref, b, err = p.parseKeyval(b)
+ }
+
+ if err != nil {
+ return ref, nil, err
+ }
+
+ b = p.parseWhitespace(b)
+
+ if len(b) > 0 && b[0] == '#' {
+ cref, rest, err := p.parseComment(b)
+ if cref != invalidReference {
+ p.builder.Chain(ref, cref)
+ }
+ return ref, rest, err
+ }
+
+ return ref, b, nil
+}
+
+func (p *Parser) parseTable(b []byte) (reference, []byte, error) {
+ // table = std-table / array-table
+ if len(b) > 1 && b[1] == '[' {
+ return p.parseArrayTable(b)
+ }
+
+ return p.parseStdTable(b)
+}
+
+func (p *Parser) parseArrayTable(b []byte) (reference, []byte, error) {
+ // array-table = array-table-open key array-table-close
+ // array-table-open = %x5B.5B ws ; [[ Double left square bracket
+ // array-table-close = ws %x5D.5D ; ]] Double right square bracket
+ ref := p.builder.Push(Node{
+ Kind: ArrayTable,
+ })
+
+ b = b[2:]
+ b = p.parseWhitespace(b)
+
+ k, b, err := p.parseKey(b)
+ if err != nil {
+ return ref, nil, err
+ }
+
+ p.builder.AttachChild(ref, k)
+ b = p.parseWhitespace(b)
+
+ b, err = expect(']', b)
+ if err != nil {
+ return ref, nil, err
+ }
+
+ b, err = expect(']', b)
+
+ return ref, b, err
+}
+
+func (p *Parser) parseStdTable(b []byte) (reference, []byte, error) {
+ // std-table = std-table-open key std-table-close
+ // std-table-open = %x5B ws ; [ Left square bracket
+ // std-table-close = ws %x5D ; ] Right square bracket
+ ref := p.builder.Push(Node{
+ Kind: Table,
+ })
+
+ b = b[1:]
+ b = p.parseWhitespace(b)
+
+ key, b, err := p.parseKey(b)
+ if err != nil {
+ return ref, nil, err
+ }
+
+ p.builder.AttachChild(ref, key)
+
+ b = p.parseWhitespace(b)
+
+ b, err = expect(']', b)
+
+ return ref, b, err
+}
+
+func (p *Parser) parseKeyval(b []byte) (reference, []byte, error) {
+ // keyval = key keyval-sep val
+ ref := p.builder.Push(Node{
+ Kind: KeyValue,
+ })
+
+ key, b, err := p.parseKey(b)
+ if err != nil {
+ return invalidReference, nil, err
+ }
+
+ // keyval-sep = ws %x3D ws ; =
+
+ b = p.parseWhitespace(b)
+
+ if len(b) == 0 {
+ return invalidReference, nil, NewParserError(b, "expected = after a key, but the document ends there")
+ }
+
+ b, err = expect('=', b)
+ if err != nil {
+ return invalidReference, nil, err
+ }
+
+ b = p.parseWhitespace(b)
+
+ valRef, b, err := p.parseVal(b)
+ if err != nil {
+ return ref, b, err
+ }
+
+ p.builder.Chain(valRef, key)
+ p.builder.AttachChild(ref, valRef)
+
+ return ref, b, err
+}
+
+//nolint:cyclop,funlen
+func (p *Parser) parseVal(b []byte) (reference, []byte, error) {
+ // val = string / boolean / array / inline-table / date-time / float / integer
+ ref := invalidReference
+
+ if len(b) == 0 {
+ return ref, nil, NewParserError(b, "expected value, not eof")
+ }
+
+ var err error
+ c := b[0]
+
+ switch c {
+ case '"':
+ var raw []byte
+ var v []byte
+ if scanFollowsMultilineBasicStringDelimiter(b) {
+ raw, v, b, err = p.parseMultilineBasicString(b)
+ } else {
+ raw, v, b, err = p.parseBasicString(b)
+ }
+
+ if err == nil {
+ ref = p.builder.Push(Node{
+ Kind: String,
+ Raw: p.Range(raw),
+ Data: v,
+ })
+ }
+
+ return ref, b, err
+ case '\'':
+ var raw []byte
+ var v []byte
+ if scanFollowsMultilineLiteralStringDelimiter(b) {
+ raw, v, b, err = p.parseMultilineLiteralString(b)
+ } else {
+ raw, v, b, err = p.parseLiteralString(b)
+ }
+
+ if err == nil {
+ ref = p.builder.Push(Node{
+ Kind: String,
+ Raw: p.Range(raw),
+ Data: v,
+ })
+ }
+
+ return ref, b, err
+ case 't':
+ if !scanFollowsTrue(b) {
+ return ref, nil, NewParserError(atmost(b, 4), "expected 'true'")
+ }
+
+ ref = p.builder.Push(Node{
+ Kind: Bool,
+ Data: b[:4],
+ })
+
+ return ref, b[4:], nil
+ case 'f':
+ if !scanFollowsFalse(b) {
+ return ref, nil, NewParserError(atmost(b, 5), "expected 'false'")
+ }
+
+ ref = p.builder.Push(Node{
+ Kind: Bool,
+ Data: b[:5],
+ })
+
+ return ref, b[5:], nil
+ case '[':
+ return p.parseValArray(b)
+ case '{':
+ return p.parseInlineTable(b)
+ default:
+ return p.parseIntOrFloatOrDateTime(b)
+ }
+}
+
+func atmost(b []byte, n int) []byte {
+ if n >= len(b) {
+ return b
+ }
+
+ return b[:n]
+}
+
+func (p *Parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
+ v, rest, err := scanLiteralString(b)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ return v, v[1 : len(v)-1], rest, nil
+}
+
+func (p *Parser) parseInlineTable(b []byte) (reference, []byte, error) {
+ // inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
+ // inline-table-open = %x7B ws ; {
+ // inline-table-close = ws %x7D ; }
+ // inline-table-sep = ws %x2C ws ; , Comma
+ // inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
+ parent := p.builder.Push(Node{
+ Kind: InlineTable,
+ Raw: p.Range(b[:1]),
+ })
+
+ first := true
+
+ var child reference
+
+ b = b[1:]
+
+ var err error
+
+ for len(b) > 0 {
+ previousB := b
+ b = p.parseWhitespace(b)
+
+ if len(b) == 0 {
+ return parent, nil, NewParserError(previousB[:1], "inline table is incomplete")
+ }
+
+ if b[0] == '}' {
+ break
+ }
+
+ if !first {
+ b, err = expect(',', b)
+ if err != nil {
+ return parent, nil, err
+ }
+ b = p.parseWhitespace(b)
+ }
+
+ var kv reference
+
+ kv, b, err = p.parseKeyval(b)
+ if err != nil {
+ return parent, nil, err
+ }
+
+ if first {
+ p.builder.AttachChild(parent, kv)
+ } else {
+ p.builder.Chain(child, kv)
+ }
+ child = kv
+
+ first = false
+ }
+
+ rest, err := expect('}', b)
+
+ return parent, rest, err
+}
+
+//nolint:funlen,cyclop
+func (p *Parser) parseValArray(b []byte) (reference, []byte, error) {
+ // array = array-open [ array-values ] ws-comment-newline array-close
+ // array-open = %x5B ; [
+ // array-close = %x5D ; ]
+ // array-values = ws-comment-newline val ws-comment-newline array-sep array-values
+ // array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ]
+ // array-sep = %x2C ; , Comma
+ // ws-comment-newline = *( wschar / [ comment ] newline )
+ arrayStart := b
+ b = b[1:]
+
+ parent := p.builder.Push(Node{
+ Kind: Array,
+ })
+
+ // First indicates whether the parser is looking for the first element
+ // (non-comment) of the array.
+ first := true
+
+ lastChild := invalidReference
+
+ addChild := func(valueRef reference) {
+ if lastChild == invalidReference {
+ p.builder.AttachChild(parent, valueRef)
+ } else {
+ p.builder.Chain(lastChild, valueRef)
+ }
+ lastChild = valueRef
+ }
+
+ var err error
+ for len(b) > 0 {
+ cref := invalidReference
+ cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
+ if err != nil {
+ return parent, nil, err
+ }
+
+ if cref != invalidReference {
+ addChild(cref)
+ }
+
+ if len(b) == 0 {
+ return parent, nil, NewParserError(arrayStart[:1], "array is incomplete")
+ }
+
+ if b[0] == ']' {
+ break
+ }
+
+ if b[0] == ',' {
+ if first {
+ return parent, nil, NewParserError(b[0:1], "array cannot start with comma")
+ }
+ b = b[1:]
+
+ cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
+ if err != nil {
+ return parent, nil, err
+ }
+ if cref != invalidReference {
+ addChild(cref)
+ }
+ } else if !first {
+ return parent, nil, NewParserError(b[0:1], "array elements must be separated by commas")
+ }
+
+ // TOML allows trailing commas in arrays.
+ if len(b) > 0 && b[0] == ']' {
+ break
+ }
+
+ var valueRef reference
+ valueRef, b, err = p.parseVal(b)
+ if err != nil {
+ return parent, nil, err
+ }
+
+ addChild(valueRef)
+
+ cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
+ if err != nil {
+ return parent, nil, err
+ }
+ if cref != invalidReference {
+ addChild(cref)
+ }
+
+ first = false
+ }
+
+ rest, err := expect(']', b)
+
+ return parent, rest, err
+}
+
+func (p *Parser) parseOptionalWhitespaceCommentNewline(b []byte) (reference, []byte, error) {
+ rootCommentRef := invalidReference
+ latestCommentRef := invalidReference
+
+ addComment := func(ref reference) {
+ if rootCommentRef == invalidReference {
+ rootCommentRef = ref
+ } else if latestCommentRef == invalidReference {
+ p.builder.AttachChild(rootCommentRef, ref)
+ latestCommentRef = ref
+ } else {
+ p.builder.Chain(latestCommentRef, ref)
+ latestCommentRef = ref
+ }
+ }
+
+ for len(b) > 0 {
+ var err error
+ b = p.parseWhitespace(b)
+
+ if len(b) > 0 && b[0] == '#' {
+ var ref reference
+ ref, b, err = p.parseComment(b)
+ if err != nil {
+ return invalidReference, nil, err
+ }
+ if ref != invalidReference {
+ addComment(ref)
+ }
+ }
+
+ if len(b) == 0 {
+ break
+ }
+
+ if b[0] == '\n' || b[0] == '\r' {
+ b, err = p.parseNewline(b)
+ if err != nil {
+ return invalidReference, nil, err
+ }
+ } else {
+ break
+ }
+ }
+
+ return rootCommentRef, b, nil
+}
+
+func (p *Parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
+ token, rest, err := scanMultilineLiteralString(b)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ i := 3
+
+ // skip the immediate new line
+ if token[i] == '\n' {
+ i++
+ } else if token[i] == '\r' && token[i+1] == '\n' {
+ i += 2
+ }
+
+ return token, token[i : len(token)-3], rest, err
+}
+
+//nolint:funlen,gocognit,cyclop
+func (p *Parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) {
+ // ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
+ // ml-basic-string-delim
+ // ml-basic-string-delim = 3quotation-mark
+ // ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ]
+ //
+ // mlb-content = mlb-char / newline / mlb-escaped-nl
+ // mlb-char = mlb-unescaped / escaped
+ // mlb-quotes = 1*2quotation-mark
+ // mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+ // mlb-escaped-nl = escape ws newline *( wschar / newline )
+ token, escaped, rest, err := scanMultilineBasicString(b)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ i := 3
+
+ // skip the immediate new line
+ if token[i] == '\n' {
+ i++
+ } else if token[i] == '\r' && token[i+1] == '\n' {
+ i += 2
+ }
+
+ // fast path
+ startIdx := i
+ endIdx := len(token) - len(`"""`)
+
+ if !escaped {
+ str := token[startIdx:endIdx]
+ verr := characters.Utf8TomlValidAlreadyEscaped(str)
+ if verr.Zero() {
+ return token, str, rest, nil
+ }
+ return nil, nil, nil, NewParserError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
+ }
+
+ var builder bytes.Buffer
+
+ // The scanner ensures that the token starts and ends with quotes and that
+ // escapes are balanced.
+ for i < len(token)-3 {
+ c := token[i]
+
+ //nolint:nestif
+ if c == '\\' {
+ // When the last non-whitespace character on a line is an unescaped \,
+ // it will be trimmed along with all whitespace (including newlines) up
+ // to the next non-whitespace character or closing delimiter.
+
+ isLastNonWhitespaceOnLine := false
+ j := 1
+ findEOLLoop:
+ for ; j < len(token)-3-i; j++ {
+ switch token[i+j] {
+ case ' ', '\t':
+ continue
+ case '\r':
+ if token[i+j+1] == '\n' {
+ continue
+ }
+ case '\n':
+ isLastNonWhitespaceOnLine = true
+ }
+ break findEOLLoop
+ }
+ if isLastNonWhitespaceOnLine {
+ i += j
+ for ; i < len(token)-3; i++ {
+ c := token[i]
+ if !(c == '\n' || c == '\r' || c == ' ' || c == '\t') {
+ i--
+ break
+ }
+ }
+ i++
+ continue
+ }
+
+ // handle escaping
+ i++
+ c = token[i]
+
+ switch c {
+ case '"', '\\':
+ builder.WriteByte(c)
+ case 'b':
+ builder.WriteByte('\b')
+ case 'f':
+ builder.WriteByte('\f')
+ case 'n':
+ builder.WriteByte('\n')
+ case 'r':
+ builder.WriteByte('\r')
+ case 't':
+ builder.WriteByte('\t')
+ case 'e':
+ builder.WriteByte(0x1B)
+ case 'u':
+ x, err := hexToRune(atmost(token[i+1:], 4), 4)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+ builder.WriteRune(x)
+ i += 4
+ case 'U':
+ x, err := hexToRune(atmost(token[i+1:], 8), 8)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ builder.WriteRune(x)
+ i += 8
+ default:
+ return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c)
+ }
+ i++
+ } else {
+ size := characters.Utf8ValidNext(token[i:])
+ if size == 0 {
+ return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c)
+ }
+ builder.Write(token[i : i+size])
+ i += size
+ }
+ }
+
+ return token, builder.Bytes(), rest, nil
+}
+
+func (p *Parser) parseKey(b []byte) (reference, []byte, error) {
+ // key = simple-key / dotted-key
+ // simple-key = quoted-key / unquoted-key
+ //
+ // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
+ // quoted-key = basic-string / literal-string
+ // dotted-key = simple-key 1*( dot-sep simple-key )
+ //
+ // dot-sep = ws %x2E ws ; . Period
+ raw, key, b, err := p.parseSimpleKey(b)
+ if err != nil {
+ return invalidReference, nil, err
+ }
+
+ ref := p.builder.Push(Node{
+ Kind: Key,
+ Raw: p.Range(raw),
+ Data: key,
+ })
+
+ for {
+ b = p.parseWhitespace(b)
+ if len(b) > 0 && b[0] == '.' {
+ b = p.parseWhitespace(b[1:])
+
+ raw, key, b, err = p.parseSimpleKey(b)
+ if err != nil {
+ return ref, nil, err
+ }
+
+ p.builder.PushAndChain(Node{
+ Kind: Key,
+ Raw: p.Range(raw),
+ Data: key,
+ })
+ } else {
+ break
+ }
+ }
+
+ return ref, b, nil
+}
+
+func (p *Parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
+ if len(b) == 0 {
+ return nil, nil, nil, NewParserError(b, "expected key but found none")
+ }
+
+ // simple-key = quoted-key / unquoted-key
+ // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
+ // quoted-key = basic-string / literal-string
+ switch {
+ case b[0] == '\'':
+ return p.parseLiteralString(b)
+ case b[0] == '"':
+ return p.parseBasicString(b)
+ case isUnquotedKeyChar(b[0]):
+ key, rest = scanUnquotedKey(b)
+ return key, key, rest, nil
+ default:
+ return nil, nil, nil, NewParserError(b[0:1], "invalid character at start of key: %c", b[0])
+ }
+}
+
+//nolint:funlen,cyclop
+func (p *Parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
+ // basic-string = quotation-mark *basic-char quotation-mark
+ // quotation-mark = %x22 ; "
+ // basic-char = basic-unescaped / escaped
+ // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+ // escaped = escape escape-seq-char
+ // escape-seq-char = %x22 ; " quotation mark U+0022
+ // escape-seq-char =/ %x5C ; \ reverse solidus U+005C
+ // escape-seq-char =/ %x62 ; b backspace U+0008
+ // escape-seq-char =/ %x66 ; f form feed U+000C
+ // escape-seq-char =/ %x6E ; n line feed U+000A
+ // escape-seq-char =/ %x72 ; r carriage return U+000D
+ // escape-seq-char =/ %x74 ; t tab U+0009
+ // escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX
+ // escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX
+ token, escaped, rest, err := scanBasicString(b)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ startIdx := len(`"`)
+ endIdx := len(token) - len(`"`)
+
+ // Fast path. If there is no escape sequence, the string should just be
+ // an UTF-8 encoded string, which is the same as Go. In that case,
+ // validate the string and return a direct reference to the buffer.
+ if !escaped {
+ str := token[startIdx:endIdx]
+ verr := characters.Utf8TomlValidAlreadyEscaped(str)
+ if verr.Zero() {
+ return token, str, rest, nil
+ }
+ return nil, nil, nil, NewParserError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
+ }
+
+ i := startIdx
+
+ var builder bytes.Buffer
+
+ // The scanner ensures that the token starts and ends with quotes and that
+ // escapes are balanced.
+ for i < len(token)-1 {
+ c := token[i]
+ if c == '\\' {
+ i++
+ c = token[i]
+
+ switch c {
+ case '"', '\\':
+ builder.WriteByte(c)
+ case 'b':
+ builder.WriteByte('\b')
+ case 'f':
+ builder.WriteByte('\f')
+ case 'n':
+ builder.WriteByte('\n')
+ case 'r':
+ builder.WriteByte('\r')
+ case 't':
+ builder.WriteByte('\t')
+ case 'e':
+ builder.WriteByte(0x1B)
+ case 'u':
+ x, err := hexToRune(token[i+1:len(token)-1], 4)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ builder.WriteRune(x)
+ i += 4
+ case 'U':
+ x, err := hexToRune(token[i+1:len(token)-1], 8)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ builder.WriteRune(x)
+ i += 8
+ default:
+ return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c)
+ }
+ i++
+ } else {
+ size := characters.Utf8ValidNext(token[i:])
+ if size == 0 {
+ return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c)
+ }
+ builder.Write(token[i : i+size])
+ i += size
+ }
+ }
+
+ return token, builder.Bytes(), rest, nil
+}
+
+func hexToRune(b []byte, length int) (rune, error) {
+ if len(b) < length {
+ return -1, NewParserError(b, "unicode point needs %d character, not %d", length, len(b))
+ }
+ b = b[:length]
+
+ var r uint32
+ for i, c := range b {
+ d := uint32(0)
+ switch {
+ case '0' <= c && c <= '9':
+ d = uint32(c - '0')
+ case 'a' <= c && c <= 'f':
+ d = uint32(c - 'a' + 10)
+ case 'A' <= c && c <= 'F':
+ d = uint32(c - 'A' + 10)
+ default:
+ return -1, NewParserError(b[i:i+1], "non-hex character")
+ }
+ r = r*16 + d
+ }
+
+ if r > unicode.MaxRune || 0xD800 <= r && r < 0xE000 {
+ return -1, NewParserError(b, "escape sequence is invalid Unicode code point")
+ }
+
+ return rune(r), nil
+}
+
+func (p *Parser) parseWhitespace(b []byte) []byte {
+ // ws = *wschar
+ // wschar = %x20 ; Space
+ // wschar =/ %x09 ; Horizontal tab
+ _, rest := scanWhitespace(b)
+
+ return rest
+}
+
+//nolint:cyclop
+func (p *Parser) parseIntOrFloatOrDateTime(b []byte) (reference, []byte, error) {
+ switch b[0] {
+ case 'i':
+ if !scanFollowsInf(b) {
+ return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'inf'")
+ }
+
+ return p.builder.Push(Node{
+ Kind: Float,
+ Data: b[:3],
+ Raw: p.Range(b[:3]),
+ }), b[3:], nil
+ case 'n':
+ if !scanFollowsNan(b) {
+ return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'nan'")
+ }
+
+ return p.builder.Push(Node{
+ Kind: Float,
+ Data: b[:3],
+ Raw: p.Range(b[:3]),
+ }), b[3:], nil
+ case '+', '-':
+ return p.scanIntOrFloat(b)
+ }
+
+ if len(b) < 3 {
+ return p.scanIntOrFloat(b)
+ }
+
+ s := 5
+ if len(b) < s {
+ s = len(b)
+ }
+
+ for idx, c := range b[:s] {
+ if isDigit(c) {
+ continue
+ }
+
+ if idx == 2 && c == ':' || (idx == 4 && c == '-') {
+ return p.scanDateTime(b)
+ }
+
+ break
+ }
+
+ return p.scanIntOrFloat(b)
+}
+
+func (p *Parser) scanDateTime(b []byte) (reference, []byte, error) {
+ // scans for contiguous characters in [0-9T:Z.+-], and up to one space if
+ // followed by a digit.
+ hasDate := false
+ hasTime := false
+ hasTz := false
+ seenSpace := false
+
+ i := 0
+byteLoop:
+ for ; i < len(b); i++ {
+ c := b[i]
+
+ switch {
+ case isDigit(c):
+ case c == '-':
+ hasDate = true
+ const minOffsetOfTz = 8
+ if i >= minOffsetOfTz {
+ hasTz = true
+ }
+ case c == 'T' || c == 't' || c == ':' || c == '.':
+ hasTime = true
+ case c == '+' || c == '-' || c == 'Z' || c == 'z':
+ hasTz = true
+ case c == ' ':
+ if !seenSpace && i+1 < len(b) && isDigit(b[i+1]) {
+ i += 2
+ // Avoid reaching past the end of the document in case the time
+ // is malformed. See TestIssue585.
+ if i >= len(b) {
+ i--
+ }
+ seenSpace = true
+ hasTime = true
+ } else {
+ break byteLoop
+ }
+ default:
+ break byteLoop
+ }
+ }
+
+ var kind Kind
+
+ if hasTime {
+ if hasDate {
+ if hasTz {
+ kind = DateTime
+ } else {
+ kind = LocalDateTime
+ }
+ } else {
+ kind = LocalTime
+ }
+ } else {
+ kind = LocalDate
+ }
+
+ return p.builder.Push(Node{
+ Kind: kind,
+ Data: b[:i],
+ }), b[i:], nil
+}
+
+//nolint:funlen,gocognit,cyclop
+func (p *Parser) scanIntOrFloat(b []byte) (reference, []byte, error) {
+ i := 0
+
+ if len(b) > 2 && b[0] == '0' && b[1] != '.' && b[1] != 'e' && b[1] != 'E' {
+ var isValidRune validRuneFn
+
+ switch b[1] {
+ case 'x':
+ isValidRune = isValidHexRune
+ case 'o':
+ isValidRune = isValidOctalRune
+ case 'b':
+ isValidRune = isValidBinaryRune
+ default:
+ i++
+ }
+
+ if isValidRune != nil {
+ i += 2
+ for ; i < len(b); i++ {
+ if !isValidRune(b[i]) {
+ break
+ }
+ }
+ }
+
+ return p.builder.Push(Node{
+ Kind: Integer,
+ Data: b[:i],
+ Raw: p.Range(b[:i]),
+ }), b[i:], nil
+ }
+
+ isFloat := false
+
+ for ; i < len(b); i++ {
+ c := b[i]
+
+ if c >= '0' && c <= '9' || c == '+' || c == '-' || c == '_' {
+ continue
+ }
+
+ if c == '.' || c == 'e' || c == 'E' {
+ isFloat = true
+
+ continue
+ }
+
+ if c == 'i' {
+ if scanFollowsInf(b[i:]) {
+ return p.builder.Push(Node{
+ Kind: Float,
+ Data: b[:i+3],
+ Raw: p.Range(b[:i+3]),
+ }), b[i+3:], nil
+ }
+
+ return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'i' while scanning for a number")
+ }
+
+ if c == 'n' {
+ if scanFollowsNan(b[i:]) {
+ return p.builder.Push(Node{
+ Kind: Float,
+ Data: b[:i+3],
+ Raw: p.Range(b[:i+3]),
+ }), b[i+3:], nil
+ }
+
+ return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'n' while scanning for a number")
+ }
+
+ break
+ }
+
+ if i == 0 {
+ return invalidReference, b, NewParserError(b, "incomplete number")
+ }
+
+ kind := Integer
+
+ if isFloat {
+ kind = Float
+ }
+
+ return p.builder.Push(Node{
+ Kind: kind,
+ Data: b[:i],
+ Raw: p.Range(b[:i]),
+ }), b[i:], nil
+}
+
+func isDigit(r byte) bool {
+ return r >= '0' && r <= '9'
+}
+
+type validRuneFn func(r byte) bool
+
+func isValidHexRune(r byte) bool {
+ return r >= 'a' && r <= 'f' ||
+ r >= 'A' && r <= 'F' ||
+ r >= '0' && r <= '9' ||
+ r == '_'
+}
+
+func isValidOctalRune(r byte) bool {
+ return r >= '0' && r <= '7' || r == '_'
+}
+
+func isValidBinaryRune(r byte) bool {
+ return r == '0' || r == '1' || r == '_'
+}
+
+func expect(x byte, b []byte) ([]byte, error) {
+ if len(b) == 0 {
+ return nil, NewParserError(b, "expected character %c but the document ended here", x)
+ }
+
+ if b[0] != x {
+ return nil, NewParserError(b[0:1], "expected character %c", x)
+ }
+
+ return b[1:], nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/scanner.go b/vendor/github.com/pelletier/go-toml/v2/unstable/scanner.go
new file mode 100644
index 0000000..0512181
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/unstable/scanner.go
@@ -0,0 +1,270 @@
+package unstable
+
+import "github.com/pelletier/go-toml/v2/internal/characters"
+
+func scanFollows(b []byte, pattern string) bool {
+ n := len(pattern)
+
+ return len(b) >= n && string(b[:n]) == pattern
+}
+
+func scanFollowsMultilineBasicStringDelimiter(b []byte) bool {
+ return scanFollows(b, `"""`)
+}
+
+func scanFollowsMultilineLiteralStringDelimiter(b []byte) bool {
+ return scanFollows(b, `'''`)
+}
+
+func scanFollowsTrue(b []byte) bool {
+ return scanFollows(b, `true`)
+}
+
+func scanFollowsFalse(b []byte) bool {
+ return scanFollows(b, `false`)
+}
+
+func scanFollowsInf(b []byte) bool {
+ return scanFollows(b, `inf`)
+}
+
+func scanFollowsNan(b []byte) bool {
+ return scanFollows(b, `nan`)
+}
+
+func scanUnquotedKey(b []byte) ([]byte, []byte) {
+ // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
+ for i := 0; i < len(b); i++ {
+ if !isUnquotedKeyChar(b[i]) {
+ return b[:i], b[i:]
+ }
+ }
+
+ return b, b[len(b):]
+}
+
+func isUnquotedKeyChar(r byte) bool {
+ return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '-' || r == '_'
+}
+
+func scanLiteralString(b []byte) ([]byte, []byte, error) {
+ // literal-string = apostrophe *literal-char apostrophe
+ // apostrophe = %x27 ; ' apostrophe
+ // literal-char = %x09 / %x20-26 / %x28-7E / non-ascii
+ for i := 1; i < len(b); {
+ switch b[i] {
+ case '\'':
+ return b[:i+1], b[i+1:], nil
+ case '\n', '\r':
+ return nil, nil, NewParserError(b[i:i+1], "literal strings cannot have new lines")
+ }
+ size := characters.Utf8ValidNext(b[i:])
+ if size == 0 {
+ return nil, nil, NewParserError(b[i:i+1], "invalid character")
+ }
+ i += size
+ }
+
+ return nil, nil, NewParserError(b[len(b):], "unterminated literal string")
+}
+
+func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
+ // ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body
+ // ml-literal-string-delim
+ // ml-literal-string-delim = 3apostrophe
+ // ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ]
+ //
+ // mll-content = mll-char / newline
+ // mll-char = %x09 / %x20-26 / %x28-7E / non-ascii
+ // mll-quotes = 1*2apostrophe
+ for i := 3; i < len(b); {
+ switch b[i] {
+ case '\'':
+ if scanFollowsMultilineLiteralStringDelimiter(b[i:]) {
+ i += 3
+
+ // At that point we found 3 apostrophe, and i is the
+ // index of the byte after the third one. The scanner
+ // needs to be eager, because there can be an extra 2
+ // apostrophe that can be accepted at the end of the
+ // string.
+
+ if i >= len(b) || b[i] != '\'' {
+ return b[:i], b[i:], nil
+ }
+ i++
+
+ if i >= len(b) || b[i] != '\'' {
+ return b[:i], b[i:], nil
+ }
+ i++
+
+ if i < len(b) && b[i] == '\'' {
+ return nil, nil, NewParserError(b[i-3:i+1], "''' not allowed in multiline literal string")
+ }
+
+ return b[:i], b[i:], nil
+ }
+ case '\r':
+ if len(b) < i+2 {
+ return nil, nil, NewParserError(b[len(b):], `need a \n after \r`)
+ }
+ if b[i+1] != '\n' {
+ return nil, nil, NewParserError(b[i:i+2], `need a \n after \r`)
+ }
+ i += 2 // skip the \n
+ continue
+ }
+ size := characters.Utf8ValidNext(b[i:])
+ if size == 0 {
+ return nil, nil, NewParserError(b[i:i+1], "invalid character")
+ }
+ i += size
+ }
+
+ return nil, nil, NewParserError(b[len(b):], `multiline literal string not terminated by '''`)
+}
+
+func scanWindowsNewline(b []byte) ([]byte, []byte, error) {
+ const lenCRLF = 2
+ if len(b) < lenCRLF {
+ return nil, nil, NewParserError(b, "windows new line expected")
+ }
+
+ if b[1] != '\n' {
+ return nil, nil, NewParserError(b, `windows new line should be \r\n`)
+ }
+
+ return b[:lenCRLF], b[lenCRLF:], nil
+}
+
+func scanWhitespace(b []byte) ([]byte, []byte) {
+ for i := 0; i < len(b); i++ {
+ switch b[i] {
+ case ' ', '\t':
+ continue
+ default:
+ return b[:i], b[i:]
+ }
+ }
+
+ return b, b[len(b):]
+}
+
+func scanComment(b []byte) ([]byte, []byte, error) {
+ // comment-start-symbol = %x23 ; #
+ // non-ascii = %x80-D7FF / %xE000-10FFFF
+ // non-eol = %x09 / %x20-7F / non-ascii
+ //
+ // comment = comment-start-symbol *non-eol
+
+ for i := 1; i < len(b); {
+ if b[i] == '\n' {
+ return b[:i], b[i:], nil
+ }
+ if b[i] == '\r' {
+ if i+1 < len(b) && b[i+1] == '\n' {
+ return b[:i+1], b[i+1:], nil
+ }
+ return nil, nil, NewParserError(b[i:i+1], "invalid character in comment")
+ }
+ size := characters.Utf8ValidNext(b[i:])
+ if size == 0 {
+ return nil, nil, NewParserError(b[i:i+1], "invalid character in comment")
+ }
+
+ i += size
+ }
+
+ return b, b[len(b):], nil
+}
+
+func scanBasicString(b []byte) ([]byte, bool, []byte, error) {
+ // basic-string = quotation-mark *basic-char quotation-mark
+ // quotation-mark = %x22 ; "
+ // basic-char = basic-unescaped / escaped
+ // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+ // escaped = escape escape-seq-char
+ escaped := false
+ i := 1
+
+ for ; i < len(b); i++ {
+ switch b[i] {
+ case '"':
+ return b[:i+1], escaped, b[i+1:], nil
+ case '\n', '\r':
+ return nil, escaped, nil, NewParserError(b[i:i+1], "basic strings cannot have new lines")
+ case '\\':
+ if len(b) < i+2 {
+ return nil, escaped, nil, NewParserError(b[i:i+1], "need a character after \\")
+ }
+ escaped = true
+ i++ // skip the next character
+ }
+ }
+
+ return nil, escaped, nil, NewParserError(b[len(b):], `basic string not terminated by "`)
+}
+
+func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
+ // ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
+ // ml-basic-string-delim
+ // ml-basic-string-delim = 3quotation-mark
+ // ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ]
+ //
+ // mlb-content = mlb-char / newline / mlb-escaped-nl
+ // mlb-char = mlb-unescaped / escaped
+ // mlb-quotes = 1*2quotation-mark
+ // mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
+ // mlb-escaped-nl = escape ws newline *( wschar / newline )
+
+ escaped := false
+ i := 3
+
+ for ; i < len(b); i++ {
+ switch b[i] {
+ case '"':
+ if scanFollowsMultilineBasicStringDelimiter(b[i:]) {
+ i += 3
+
+ // At that point we found 3 apostrophe, and i is the
+ // index of the byte after the third one. The scanner
+ // needs to be eager, because there can be an extra 2
+ // apostrophe that can be accepted at the end of the
+ // string.
+
+ if i >= len(b) || b[i] != '"' {
+ return b[:i], escaped, b[i:], nil
+ }
+ i++
+
+ if i >= len(b) || b[i] != '"' {
+ return b[:i], escaped, b[i:], nil
+ }
+ i++
+
+ if i < len(b) && b[i] == '"' {
+ return nil, escaped, nil, NewParserError(b[i-3:i+1], `""" not allowed in multiline basic string`)
+ }
+
+ return b[:i], escaped, b[i:], nil
+ }
+ case '\\':
+ if len(b) < i+2 {
+ return nil, escaped, nil, NewParserError(b[len(b):], "need a character after \\")
+ }
+ escaped = true
+ i++ // skip the next character
+ case '\r':
+ if len(b) < i+2 {
+ return nil, escaped, nil, NewParserError(b[len(b):], `need a \n after \r`)
+ }
+ if b[i+1] != '\n' {
+ return nil, escaped, nil, NewParserError(b[i:i+2], `need a \n after \r`)
+ }
+ i++ // skip the \n
+ }
+ }
+
+ return nil, escaped, nil, NewParserError(b[len(b):], `multiline basic string not terminated by """`)
+}
diff --git a/vendor/github.com/pkg/errors/.gitignore b/vendor/github.com/pkg/errors/.gitignore
new file mode 100644
index 0000000..daf913b
--- /dev/null
+++ b/vendor/github.com/pkg/errors/.gitignore
@@ -0,0 +1,24 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
diff --git a/vendor/github.com/pkg/errors/.travis.yml b/vendor/github.com/pkg/errors/.travis.yml
new file mode 100644
index 0000000..9159de0
--- /dev/null
+++ b/vendor/github.com/pkg/errors/.travis.yml
@@ -0,0 +1,10 @@
+language: go
+go_import_path: github.com/pkg/errors
+go:
+ - 1.11.x
+ - 1.12.x
+ - 1.13.x
+ - tip
+
+script:
+ - make check
diff --git a/vendor/github.com/pkg/errors/LICENSE b/vendor/github.com/pkg/errors/LICENSE
new file mode 100644
index 0000000..835ba3e
--- /dev/null
+++ b/vendor/github.com/pkg/errors/LICENSE
@@ -0,0 +1,23 @@
+Copyright (c) 2015, Dave Cheney
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/pkg/errors/Makefile b/vendor/github.com/pkg/errors/Makefile
new file mode 100644
index 0000000..ce9d7cd
--- /dev/null
+++ b/vendor/github.com/pkg/errors/Makefile
@@ -0,0 +1,44 @@
+PKGS := github.com/pkg/errors
+SRCDIRS := $(shell go list -f '{{.Dir}}' $(PKGS))
+GO := go
+
+check: test vet gofmt misspell unconvert staticcheck ineffassign unparam
+
+test:
+ $(GO) test $(PKGS)
+
+vet: | test
+ $(GO) vet $(PKGS)
+
+staticcheck:
+ $(GO) get honnef.co/go/tools/cmd/staticcheck
+ staticcheck -checks all $(PKGS)
+
+misspell:
+ $(GO) get github.com/client9/misspell/cmd/misspell
+ misspell \
+ -locale GB \
+ -error \
+ *.md *.go
+
+unconvert:
+ $(GO) get github.com/mdempsky/unconvert
+ unconvert -v $(PKGS)
+
+ineffassign:
+ $(GO) get github.com/gordonklaus/ineffassign
+ find $(SRCDIRS) -name '*.go' | xargs ineffassign
+
+pedantic: check errcheck
+
+unparam:
+ $(GO) get mvdan.cc/unparam
+ unparam ./...
+
+errcheck:
+ $(GO) get github.com/kisielk/errcheck
+ errcheck $(PKGS)
+
+gofmt:
+ @echo Checking code is gofmted
+ @test -z "$(shell gofmt -s -l -d -e $(SRCDIRS) | tee /dev/stderr)"
diff --git a/vendor/github.com/pkg/errors/README.md b/vendor/github.com/pkg/errors/README.md
new file mode 100644
index 0000000..54dfdcb
--- /dev/null
+++ b/vendor/github.com/pkg/errors/README.md
@@ -0,0 +1,59 @@
+# errors [![Travis-CI](https://travis-ci.org/pkg/errors.svg)](https://travis-ci.org/pkg/errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/b98mptawhudj53ep/branch/master?svg=true)](https://ci.appveyor.com/project/davecheney/errors/branch/master) [![GoDoc](https://godoc.org/github.com/pkg/errors?status.svg)](http://godoc.org/github.com/pkg/errors) [![Report card](https://goreportcard.com/badge/github.com/pkg/errors)](https://goreportcard.com/report/github.com/pkg/errors) [![Sourcegraph](https://sourcegraph.com/github.com/pkg/errors/-/badge.svg)](https://sourcegraph.com/github.com/pkg/errors?badge)
+
+Package errors provides simple error handling primitives.
+
+`go get github.com/pkg/errors`
+
+The traditional error handling idiom in Go is roughly akin to
+```go
+if err != nil {
+ return err
+}
+```
+which applied recursively up the call stack results in error reports without context or debugging information. The errors package allows programmers to add context to the failure path in their code in a way that does not destroy the original value of the error.
+
+## Adding context to an error
+
+The errors.Wrap function returns a new error that adds context to the original error. For example
+```go
+_, err := ioutil.ReadAll(r)
+if err != nil {
+ return errors.Wrap(err, "read failed")
+}
+```
+## Retrieving the cause of an error
+
+Using `errors.Wrap` constructs a stack of errors, adding context to the preceding error. Depending on the nature of the error it may be necessary to reverse the operation of errors.Wrap to retrieve the original error for inspection. Any error value which implements this interface can be inspected by `errors.Cause`.
+```go
+type causer interface {
+ Cause() error
+}
+```
+`errors.Cause` will recursively retrieve the topmost error which does not implement `causer`, which is assumed to be the original cause. For example:
+```go
+switch err := errors.Cause(err).(type) {
+case *MyError:
+ // handle specifically
+default:
+ // unknown error
+}
+```
+
+[Read the package documentation for more information](https://godoc.org/github.com/pkg/errors).
+
+## Roadmap
+
+With the upcoming [Go2 error proposals](https://go.googlesource.com/proposal/+/master/design/go2draft.md) this package is moving into maintenance mode. The roadmap for a 1.0 release is as follows:
+
+- 0.9. Remove pre Go 1.9 and Go 1.10 support, address outstanding pull requests (if possible)
+- 1.0. Final release.
+
+## Contributing
+
+Because of the Go2 errors changes, this package is not accepting proposals for new functionality. With that said, we welcome pull requests, bug fixes and issue reports.
+
+Before sending a PR, please discuss your change by raising an issue.
+
+## License
+
+BSD-2-Clause
diff --git a/vendor/github.com/pkg/errors/appveyor.yml b/vendor/github.com/pkg/errors/appveyor.yml
new file mode 100644
index 0000000..a932ead
--- /dev/null
+++ b/vendor/github.com/pkg/errors/appveyor.yml
@@ -0,0 +1,32 @@
+version: build-{build}.{branch}
+
+clone_folder: C:\gopath\src\github.com\pkg\errors
+shallow_clone: true # for startup speed
+
+environment:
+ GOPATH: C:\gopath
+
+platform:
+ - x64
+
+# http://www.appveyor.com/docs/installed-software
+install:
+ # some helpful output for debugging builds
+ - go version
+ - go env
+ # pre-installed MinGW at C:\MinGW is 32bit only
+ # but MSYS2 at C:\msys64 has mingw64
+ - set PATH=C:\msys64\mingw64\bin;%PATH%
+ - gcc --version
+ - g++ --version
+
+build_script:
+ - go install -v ./...
+
+test_script:
+ - set PATH=C:\gopath\bin;%PATH%
+ - go test -v ./...
+
+#artifacts:
+# - path: '%GOPATH%\bin\*.exe'
+deploy: off
diff --git a/vendor/github.com/pkg/errors/errors.go b/vendor/github.com/pkg/errors/errors.go
new file mode 100644
index 0000000..161aea2
--- /dev/null
+++ b/vendor/github.com/pkg/errors/errors.go
@@ -0,0 +1,288 @@
+// Package errors provides simple error handling primitives.
+//
+// The traditional error handling idiom in Go is roughly akin to
+//
+// if err != nil {
+// return err
+// }
+//
+// which when applied recursively up the call stack results in error reports
+// without context or debugging information. The errors package allows
+// programmers to add context to the failure path in their code in a way
+// that does not destroy the original value of the error.
+//
+// Adding context to an error
+//
+// The errors.Wrap function returns a new error that adds context to the
+// original error by recording a stack trace at the point Wrap is called,
+// together with the supplied message. For example
+//
+// _, err := ioutil.ReadAll(r)
+// if err != nil {
+// return errors.Wrap(err, "read failed")
+// }
+//
+// If additional control is required, the errors.WithStack and
+// errors.WithMessage functions destructure errors.Wrap into its component
+// operations: annotating an error with a stack trace and with a message,
+// respectively.
+//
+// Retrieving the cause of an error
+//
+// Using errors.Wrap constructs a stack of errors, adding context to the
+// preceding error. Depending on the nature of the error it may be necessary
+// to reverse the operation of errors.Wrap to retrieve the original error
+// for inspection. Any error value which implements this interface
+//
+// type causer interface {
+// Cause() error
+// }
+//
+// can be inspected by errors.Cause. errors.Cause will recursively retrieve
+// the topmost error that does not implement causer, which is assumed to be
+// the original cause. For example:
+//
+// switch err := errors.Cause(err).(type) {
+// case *MyError:
+// // handle specifically
+// default:
+// // unknown error
+// }
+//
+// Although the causer interface is not exported by this package, it is
+// considered a part of its stable public interface.
+//
+// Formatted printing of errors
+//
+// All error values returned from this package implement fmt.Formatter and can
+// be formatted by the fmt package. The following verbs are supported:
+//
+// %s print the error. If the error has a Cause it will be
+// printed recursively.
+// %v see %s
+// %+v extended format. Each Frame of the error's StackTrace will
+// be printed in detail.
+//
+// Retrieving the stack trace of an error or wrapper
+//
+// New, Errorf, Wrap, and Wrapf record a stack trace at the point they are
+// invoked. This information can be retrieved with the following interface:
+//
+// type stackTracer interface {
+// StackTrace() errors.StackTrace
+// }
+//
+// The returned errors.StackTrace type is defined as
+//
+// type StackTrace []Frame
+//
+// The Frame type represents a call site in the stack trace. Frame supports
+// the fmt.Formatter interface that can be used for printing information about
+// the stack trace of this error. For example:
+//
+// if err, ok := err.(stackTracer); ok {
+// for _, f := range err.StackTrace() {
+// fmt.Printf("%+s:%d\n", f, f)
+// }
+// }
+//
+// Although the stackTracer interface is not exported by this package, it is
+// considered a part of its stable public interface.
+//
+// See the documentation for Frame.Format for more details.
+package errors
+
+import (
+ "fmt"
+ "io"
+)
+
+// New returns an error with the supplied message.
+// New also records the stack trace at the point it was called.
+func New(message string) error {
+ return &fundamental{
+ msg: message,
+ stack: callers(),
+ }
+}
+
+// Errorf formats according to a format specifier and returns the string
+// as a value that satisfies error.
+// Errorf also records the stack trace at the point it was called.
+func Errorf(format string, args ...interface{}) error {
+ return &fundamental{
+ msg: fmt.Sprintf(format, args...),
+ stack: callers(),
+ }
+}
+
+// fundamental is an error that has a message and a stack, but no caller.
+type fundamental struct {
+ msg string
+ *stack
+}
+
+func (f *fundamental) Error() string { return f.msg }
+
+func (f *fundamental) Format(s fmt.State, verb rune) {
+ switch verb {
+ case 'v':
+ if s.Flag('+') {
+ io.WriteString(s, f.msg)
+ f.stack.Format(s, verb)
+ return
+ }
+ fallthrough
+ case 's':
+ io.WriteString(s, f.msg)
+ case 'q':
+ fmt.Fprintf(s, "%q", f.msg)
+ }
+}
+
+// WithStack annotates err with a stack trace at the point WithStack was called.
+// If err is nil, WithStack returns nil.
+func WithStack(err error) error {
+ if err == nil {
+ return nil
+ }
+ return &withStack{
+ err,
+ callers(),
+ }
+}
+
+type withStack struct {
+ error
+ *stack
+}
+
+func (w *withStack) Cause() error { return w.error }
+
+// Unwrap provides compatibility for Go 1.13 error chains.
+func (w *withStack) Unwrap() error { return w.error }
+
+func (w *withStack) Format(s fmt.State, verb rune) {
+ switch verb {
+ case 'v':
+ if s.Flag('+') {
+ fmt.Fprintf(s, "%+v", w.Cause())
+ w.stack.Format(s, verb)
+ return
+ }
+ fallthrough
+ case 's':
+ io.WriteString(s, w.Error())
+ case 'q':
+ fmt.Fprintf(s, "%q", w.Error())
+ }
+}
+
+// Wrap returns an error annotating err with a stack trace
+// at the point Wrap is called, and the supplied message.
+// If err is nil, Wrap returns nil.
+func Wrap(err error, message string) error {
+ if err == nil {
+ return nil
+ }
+ err = &withMessage{
+ cause: err,
+ msg: message,
+ }
+ return &withStack{
+ err,
+ callers(),
+ }
+}
+
+// Wrapf returns an error annotating err with a stack trace
+// at the point Wrapf is called, and the format specifier.
+// If err is nil, Wrapf returns nil.
+func Wrapf(err error, format string, args ...interface{}) error {
+ if err == nil {
+ return nil
+ }
+ err = &withMessage{
+ cause: err,
+ msg: fmt.Sprintf(format, args...),
+ }
+ return &withStack{
+ err,
+ callers(),
+ }
+}
+
+// WithMessage annotates err with a new message.
+// If err is nil, WithMessage returns nil.
+func WithMessage(err error, message string) error {
+ if err == nil {
+ return nil
+ }
+ return &withMessage{
+ cause: err,
+ msg: message,
+ }
+}
+
+// WithMessagef annotates err with the format specifier.
+// If err is nil, WithMessagef returns nil.
+func WithMessagef(err error, format string, args ...interface{}) error {
+ if err == nil {
+ return nil
+ }
+ return &withMessage{
+ cause: err,
+ msg: fmt.Sprintf(format, args...),
+ }
+}
+
+type withMessage struct {
+ cause error
+ msg string
+}
+
+func (w *withMessage) Error() string { return w.msg + ": " + w.cause.Error() }
+func (w *withMessage) Cause() error { return w.cause }
+
+// Unwrap provides compatibility for Go 1.13 error chains.
+func (w *withMessage) Unwrap() error { return w.cause }
+
+func (w *withMessage) Format(s fmt.State, verb rune) {
+ switch verb {
+ case 'v':
+ if s.Flag('+') {
+ fmt.Fprintf(s, "%+v\n", w.Cause())
+ io.WriteString(s, w.msg)
+ return
+ }
+ fallthrough
+ case 's', 'q':
+ io.WriteString(s, w.Error())
+ }
+}
+
+// Cause returns the underlying cause of the error, if possible.
+// An error value has a cause if it implements the following
+// interface:
+//
+// type causer interface {
+// Cause() error
+// }
+//
+// If the error does not implement Cause, the original error will
+// be returned. If the error is nil, nil will be returned without further
+// investigation.
+func Cause(err error) error {
+ type causer interface {
+ Cause() error
+ }
+
+ for err != nil {
+ cause, ok := err.(causer)
+ if !ok {
+ break
+ }
+ err = cause.Cause()
+ }
+ return err
+}
diff --git a/vendor/github.com/pkg/errors/go113.go b/vendor/github.com/pkg/errors/go113.go
new file mode 100644
index 0000000..be0d10d
--- /dev/null
+++ b/vendor/github.com/pkg/errors/go113.go
@@ -0,0 +1,38 @@
+// +build go1.13
+
+package errors
+
+import (
+ stderrors "errors"
+)
+
+// Is reports whether any error in err's chain matches target.
+//
+// The chain consists of err itself followed by the sequence of errors obtained by
+// repeatedly calling Unwrap.
+//
+// An error is considered to match a target if it is equal to that target or if
+// it implements a method Is(error) bool such that Is(target) returns true.
+func Is(err, target error) bool { return stderrors.Is(err, target) }
+
+// As finds the first error in err's chain that matches target, and if so, sets
+// target to that error value and returns true.
+//
+// The chain consists of err itself followed by the sequence of errors obtained by
+// repeatedly calling Unwrap.
+//
+// An error matches target if the error's concrete value is assignable to the value
+// pointed to by target, or if the error has a method As(interface{}) bool such that
+// As(target) returns true. In the latter case, the As method is responsible for
+// setting target.
+//
+// As will panic if target is not a non-nil pointer to either a type that implements
+// error, or to any interface type. As returns false if err is nil.
+func As(err error, target interface{}) bool { return stderrors.As(err, target) }
+
+// Unwrap returns the result of calling the Unwrap method on err, if err's
+// type contains an Unwrap method returning error.
+// Otherwise, Unwrap returns nil.
+func Unwrap(err error) error {
+ return stderrors.Unwrap(err)
+}
diff --git a/vendor/github.com/pkg/errors/stack.go b/vendor/github.com/pkg/errors/stack.go
new file mode 100644
index 0000000..779a834
--- /dev/null
+++ b/vendor/github.com/pkg/errors/stack.go
@@ -0,0 +1,177 @@
+package errors
+
+import (
+ "fmt"
+ "io"
+ "path"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+// Frame represents a program counter inside a stack frame.
+// For historical reasons if Frame is interpreted as a uintptr
+// its value represents the program counter + 1.
+type Frame uintptr
+
+// pc returns the program counter for this frame;
+// multiple frames may have the same PC value.
+func (f Frame) pc() uintptr { return uintptr(f) - 1 }
+
+// file returns the full path to the file that contains the
+// function for this Frame's pc.
+func (f Frame) file() string {
+ fn := runtime.FuncForPC(f.pc())
+ if fn == nil {
+ return "unknown"
+ }
+ file, _ := fn.FileLine(f.pc())
+ return file
+}
+
+// line returns the line number of source code of the
+// function for this Frame's pc.
+func (f Frame) line() int {
+ fn := runtime.FuncForPC(f.pc())
+ if fn == nil {
+ return 0
+ }
+ _, line := fn.FileLine(f.pc())
+ return line
+}
+
+// name returns the name of this function, if known.
+func (f Frame) name() string {
+ fn := runtime.FuncForPC(f.pc())
+ if fn == nil {
+ return "unknown"
+ }
+ return fn.Name()
+}
+
+// Format formats the frame according to the fmt.Formatter interface.
+//
+// %s source file
+// %d source line
+// %n function name
+// %v equivalent to %s:%d
+//
+// Format accepts flags that alter the printing of some verbs, as follows:
+//
+// %+s function name and path of source file relative to the compile time
+// GOPATH separated by \n\t (\n\t)
+// %+v equivalent to %+s:%d
+func (f Frame) Format(s fmt.State, verb rune) {
+ switch verb {
+ case 's':
+ switch {
+ case s.Flag('+'):
+ io.WriteString(s, f.name())
+ io.WriteString(s, "\n\t")
+ io.WriteString(s, f.file())
+ default:
+ io.WriteString(s, path.Base(f.file()))
+ }
+ case 'd':
+ io.WriteString(s, strconv.Itoa(f.line()))
+ case 'n':
+ io.WriteString(s, funcname(f.name()))
+ case 'v':
+ f.Format(s, 's')
+ io.WriteString(s, ":")
+ f.Format(s, 'd')
+ }
+}
+
+// MarshalText formats a stacktrace Frame as a text string. The output is the
+// same as that of fmt.Sprintf("%+v", f), but without newlines or tabs.
+func (f Frame) MarshalText() ([]byte, error) {
+ name := f.name()
+ if name == "unknown" {
+ return []byte(name), nil
+ }
+ return []byte(fmt.Sprintf("%s %s:%d", name, f.file(), f.line())), nil
+}
+
+// StackTrace is stack of Frames from innermost (newest) to outermost (oldest).
+type StackTrace []Frame
+
+// Format formats the stack of Frames according to the fmt.Formatter interface.
+//
+// %s lists source files for each Frame in the stack
+// %v lists the source file and line number for each Frame in the stack
+//
+// Format accepts flags that alter the printing of some verbs, as follows:
+//
+// %+v Prints filename, function, and line number for each Frame in the stack.
+func (st StackTrace) Format(s fmt.State, verb rune) {
+ switch verb {
+ case 'v':
+ switch {
+ case s.Flag('+'):
+ for _, f := range st {
+ io.WriteString(s, "\n")
+ f.Format(s, verb)
+ }
+ case s.Flag('#'):
+ fmt.Fprintf(s, "%#v", []Frame(st))
+ default:
+ st.formatSlice(s, verb)
+ }
+ case 's':
+ st.formatSlice(s, verb)
+ }
+}
+
+// formatSlice will format this StackTrace into the given buffer as a slice of
+// Frame, only valid when called with '%s' or '%v'.
+func (st StackTrace) formatSlice(s fmt.State, verb rune) {
+ io.WriteString(s, "[")
+ for i, f := range st {
+ if i > 0 {
+ io.WriteString(s, " ")
+ }
+ f.Format(s, verb)
+ }
+ io.WriteString(s, "]")
+}
+
+// stack represents a stack of program counters.
+type stack []uintptr
+
+func (s *stack) Format(st fmt.State, verb rune) {
+ switch verb {
+ case 'v':
+ switch {
+ case st.Flag('+'):
+ for _, pc := range *s {
+ f := Frame(pc)
+ fmt.Fprintf(st, "\n%+v", f)
+ }
+ }
+ }
+}
+
+func (s *stack) StackTrace() StackTrace {
+ f := make([]Frame, len(*s))
+ for i := 0; i < len(f); i++ {
+ f[i] = Frame((*s)[i])
+ }
+ return f
+}
+
+func callers() *stack {
+ const depth = 32
+ var pcs [depth]uintptr
+ n := runtime.Callers(3, pcs[:])
+ var st stack = pcs[0:n]
+ return &st
+}
+
+// funcname removes the path prefix component of a function's name reported by func.Name().
+func funcname(name string) string {
+ i := strings.LastIndex(name, "/")
+ name = name[i+1:]
+ i = strings.Index(name, ".")
+ return name[i+1:]
+}
diff --git a/vendor/github.com/pmezard/go-difflib/LICENSE b/vendor/github.com/pmezard/go-difflib/LICENSE
new file mode 100644
index 0000000..c67dad6
--- /dev/null
+++ b/vendor/github.com/pmezard/go-difflib/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2013, Patrick Mezard
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+ The names of its contributors may not be used to endorse or promote
+products derived from this software without specific prior written
+permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/pmezard/go-difflib/difflib/difflib.go b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go
new file mode 100644
index 0000000..003e99f
--- /dev/null
+++ b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go
@@ -0,0 +1,772 @@
+// Package difflib is a partial port of Python difflib module.
+//
+// It provides tools to compare sequences of strings and generate textual diffs.
+//
+// The following class and functions have been ported:
+//
+// - SequenceMatcher
+//
+// - unified_diff
+//
+// - context_diff
+//
+// Getting unified diffs was the main goal of the port. Keep in mind this code
+// is mostly suitable to output text differences in a human friendly way, there
+// are no guarantees generated diffs are consumable by patch(1).
+package difflib
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "strings"
+)
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
+
+func max(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
+
+func calculateRatio(matches, length int) float64 {
+ if length > 0 {
+ return 2.0 * float64(matches) / float64(length)
+ }
+ return 1.0
+}
+
+type Match struct {
+ A int
+ B int
+ Size int
+}
+
+type OpCode struct {
+ Tag byte
+ I1 int
+ I2 int
+ J1 int
+ J2 int
+}
+
+// SequenceMatcher compares sequence of strings. The basic
+// algorithm predates, and is a little fancier than, an algorithm
+// published in the late 1980's by Ratcliff and Obershelp under the
+// hyperbolic name "gestalt pattern matching". The basic idea is to find
+// the longest contiguous matching subsequence that contains no "junk"
+// elements (R-O doesn't address junk). The same idea is then applied
+// recursively to the pieces of the sequences to the left and to the right
+// of the matching subsequence. This does not yield minimal edit
+// sequences, but does tend to yield matches that "look right" to people.
+//
+// SequenceMatcher tries to compute a "human-friendly diff" between two
+// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the
+// longest *contiguous* & junk-free matching subsequence. That's what
+// catches peoples' eyes. The Windows(tm) windiff has another interesting
+// notion, pairing up elements that appear uniquely in each sequence.
+// That, and the method here, appear to yield more intuitive difference
+// reports than does diff. This method appears to be the least vulnerable
+// to synching up on blocks of "junk lines", though (like blank lines in
+// ordinary text files, or maybe "" lines in HTML files). That may be
+// because this is the only method of the 3 that has a *concept* of
+// "junk" .
+//
+// Timing: Basic R-O is cubic time worst case and quadratic time expected
+// case. SequenceMatcher is quadratic time for the worst case and has
+// expected-case behavior dependent in a complicated way on how many
+// elements the sequences have in common; best case time is linear.
+type SequenceMatcher struct {
+ a []string
+ b []string
+ b2j map[string][]int
+ IsJunk func(string) bool
+ autoJunk bool
+ bJunk map[string]struct{}
+ matchingBlocks []Match
+ fullBCount map[string]int
+ bPopular map[string]struct{}
+ opCodes []OpCode
+}
+
+func NewMatcher(a, b []string) *SequenceMatcher {
+ m := SequenceMatcher{autoJunk: true}
+ m.SetSeqs(a, b)
+ return &m
+}
+
+func NewMatcherWithJunk(a, b []string, autoJunk bool,
+ isJunk func(string) bool) *SequenceMatcher {
+
+ m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk}
+ m.SetSeqs(a, b)
+ return &m
+}
+
+// Set two sequences to be compared.
+func (m *SequenceMatcher) SetSeqs(a, b []string) {
+ m.SetSeq1(a)
+ m.SetSeq2(b)
+}
+
+// Set the first sequence to be compared. The second sequence to be compared is
+// not changed.
+//
+// SequenceMatcher computes and caches detailed information about the second
+// sequence, so if you want to compare one sequence S against many sequences,
+// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other
+// sequences.
+//
+// See also SetSeqs() and SetSeq2().
+func (m *SequenceMatcher) SetSeq1(a []string) {
+ if &a == &m.a {
+ return
+ }
+ m.a = a
+ m.matchingBlocks = nil
+ m.opCodes = nil
+}
+
+// Set the second sequence to be compared. The first sequence to be compared is
+// not changed.
+func (m *SequenceMatcher) SetSeq2(b []string) {
+ if &b == &m.b {
+ return
+ }
+ m.b = b
+ m.matchingBlocks = nil
+ m.opCodes = nil
+ m.fullBCount = nil
+ m.chainB()
+}
+
+func (m *SequenceMatcher) chainB() {
+ // Populate line -> index mapping
+ b2j := map[string][]int{}
+ for i, s := range m.b {
+ indices := b2j[s]
+ indices = append(indices, i)
+ b2j[s] = indices
+ }
+
+ // Purge junk elements
+ m.bJunk = map[string]struct{}{}
+ if m.IsJunk != nil {
+ junk := m.bJunk
+ for s, _ := range b2j {
+ if m.IsJunk(s) {
+ junk[s] = struct{}{}
+ }
+ }
+ for s, _ := range junk {
+ delete(b2j, s)
+ }
+ }
+
+ // Purge remaining popular elements
+ popular := map[string]struct{}{}
+ n := len(m.b)
+ if m.autoJunk && n >= 200 {
+ ntest := n/100 + 1
+ for s, indices := range b2j {
+ if len(indices) > ntest {
+ popular[s] = struct{}{}
+ }
+ }
+ for s, _ := range popular {
+ delete(b2j, s)
+ }
+ }
+ m.bPopular = popular
+ m.b2j = b2j
+}
+
+func (m *SequenceMatcher) isBJunk(s string) bool {
+ _, ok := m.bJunk[s]
+ return ok
+}
+
+// Find longest matching block in a[alo:ahi] and b[blo:bhi].
+//
+// If IsJunk is not defined:
+//
+// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
+// alo <= i <= i+k <= ahi
+// blo <= j <= j+k <= bhi
+// and for all (i',j',k') meeting those conditions,
+// k >= k'
+// i <= i'
+// and if i == i', j <= j'
+//
+// In other words, of all maximal matching blocks, return one that
+// starts earliest in a, and of all those maximal matching blocks that
+// start earliest in a, return the one that starts earliest in b.
+//
+// If IsJunk is defined, first the longest matching block is
+// determined as above, but with the additional restriction that no
+// junk element appears in the block. Then that block is extended as
+// far as possible by matching (only) junk elements on both sides. So
+// the resulting block never matches on junk except as identical junk
+// happens to be adjacent to an "interesting" match.
+//
+// If no blocks match, return (alo, blo, 0).
+func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match {
+ // CAUTION: stripping common prefix or suffix would be incorrect.
+ // E.g.,
+ // ab
+ // acab
+ // Longest matching block is "ab", but if common prefix is
+ // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so
+ // strip, so ends up claiming that ab is changed to acab by
+ // inserting "ca" in the middle. That's minimal but unintuitive:
+ // "it's obvious" that someone inserted "ac" at the front.
+ // Windiff ends up at the same place as diff, but by pairing up
+ // the unique 'b's and then matching the first two 'a's.
+ besti, bestj, bestsize := alo, blo, 0
+
+ // find longest junk-free match
+ // during an iteration of the loop, j2len[j] = length of longest
+ // junk-free match ending with a[i-1] and b[j]
+ j2len := map[int]int{}
+ for i := alo; i != ahi; i++ {
+ // look at all instances of a[i] in b; note that because
+ // b2j has no junk keys, the loop is skipped if a[i] is junk
+ newj2len := map[int]int{}
+ for _, j := range m.b2j[m.a[i]] {
+ // a[i] matches b[j]
+ if j < blo {
+ continue
+ }
+ if j >= bhi {
+ break
+ }
+ k := j2len[j-1] + 1
+ newj2len[j] = k
+ if k > bestsize {
+ besti, bestj, bestsize = i-k+1, j-k+1, k
+ }
+ }
+ j2len = newj2len
+ }
+
+ // Extend the best by non-junk elements on each end. In particular,
+ // "popular" non-junk elements aren't in b2j, which greatly speeds
+ // the inner loop above, but also means "the best" match so far
+ // doesn't contain any junk *or* popular non-junk elements.
+ for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) &&
+ m.a[besti-1] == m.b[bestj-1] {
+ besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+ }
+ for besti+bestsize < ahi && bestj+bestsize < bhi &&
+ !m.isBJunk(m.b[bestj+bestsize]) &&
+ m.a[besti+bestsize] == m.b[bestj+bestsize] {
+ bestsize += 1
+ }
+
+ // Now that we have a wholly interesting match (albeit possibly
+ // empty!), we may as well suck up the matching junk on each
+ // side of it too. Can't think of a good reason not to, and it
+ // saves post-processing the (possibly considerable) expense of
+ // figuring out what to do with it. In the case of an empty
+ // interesting match, this is clearly the right thing to do,
+ // because no other kind of match is possible in the regions.
+ for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) &&
+ m.a[besti-1] == m.b[bestj-1] {
+ besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+ }
+ for besti+bestsize < ahi && bestj+bestsize < bhi &&
+ m.isBJunk(m.b[bestj+bestsize]) &&
+ m.a[besti+bestsize] == m.b[bestj+bestsize] {
+ bestsize += 1
+ }
+
+ return Match{A: besti, B: bestj, Size: bestsize}
+}
+
+// Return list of triples describing matching subsequences.
+//
+// Each triple is of the form (i, j, n), and means that
+// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in
+// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are
+// adjacent triples in the list, and the second is not the last triple in the
+// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe
+// adjacent equal blocks.
+//
+// The last triple is a dummy, (len(a), len(b), 0), and is the only
+// triple with n==0.
+func (m *SequenceMatcher) GetMatchingBlocks() []Match {
+ if m.matchingBlocks != nil {
+ return m.matchingBlocks
+ }
+
+ var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match
+ matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match {
+ match := m.findLongestMatch(alo, ahi, blo, bhi)
+ i, j, k := match.A, match.B, match.Size
+ if match.Size > 0 {
+ if alo < i && blo < j {
+ matched = matchBlocks(alo, i, blo, j, matched)
+ }
+ matched = append(matched, match)
+ if i+k < ahi && j+k < bhi {
+ matched = matchBlocks(i+k, ahi, j+k, bhi, matched)
+ }
+ }
+ return matched
+ }
+ matched := matchBlocks(0, len(m.a), 0, len(m.b), nil)
+
+ // It's possible that we have adjacent equal blocks in the
+ // matching_blocks list now.
+ nonAdjacent := []Match{}
+ i1, j1, k1 := 0, 0, 0
+ for _, b := range matched {
+ // Is this block adjacent to i1, j1, k1?
+ i2, j2, k2 := b.A, b.B, b.Size
+ if i1+k1 == i2 && j1+k1 == j2 {
+ // Yes, so collapse them -- this just increases the length of
+ // the first block by the length of the second, and the first
+ // block so lengthened remains the block to compare against.
+ k1 += k2
+ } else {
+ // Not adjacent. Remember the first block (k1==0 means it's
+ // the dummy we started with), and make the second block the
+ // new block to compare against.
+ if k1 > 0 {
+ nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
+ }
+ i1, j1, k1 = i2, j2, k2
+ }
+ }
+ if k1 > 0 {
+ nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
+ }
+
+ nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0})
+ m.matchingBlocks = nonAdjacent
+ return m.matchingBlocks
+}
+
+// Return list of 5-tuples describing how to turn a into b.
+//
+// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple
+// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the
+// tuple preceding it, and likewise for j1 == the previous j2.
+//
+// The tags are characters, with these meanings:
+//
+// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2]
+//
+// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case.
+//
+// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case.
+//
+// 'e' (equal): a[i1:i2] == b[j1:j2]
+func (m *SequenceMatcher) GetOpCodes() []OpCode {
+ if m.opCodes != nil {
+ return m.opCodes
+ }
+ i, j := 0, 0
+ matching := m.GetMatchingBlocks()
+ opCodes := make([]OpCode, 0, len(matching))
+ for _, m := range matching {
+ // invariant: we've pumped out correct diffs to change
+ // a[:i] into b[:j], and the next matching block is
+ // a[ai:ai+size] == b[bj:bj+size]. So we need to pump
+ // out a diff to change a[i:ai] into b[j:bj], pump out
+ // the matching block, and move (i,j) beyond the match
+ ai, bj, size := m.A, m.B, m.Size
+ tag := byte(0)
+ if i < ai && j < bj {
+ tag = 'r'
+ } else if i < ai {
+ tag = 'd'
+ } else if j < bj {
+ tag = 'i'
+ }
+ if tag > 0 {
+ opCodes = append(opCodes, OpCode{tag, i, ai, j, bj})
+ }
+ i, j = ai+size, bj+size
+ // the list of matching blocks is terminated by a
+ // sentinel with size 0
+ if size > 0 {
+ opCodes = append(opCodes, OpCode{'e', ai, i, bj, j})
+ }
+ }
+ m.opCodes = opCodes
+ return m.opCodes
+}
+
+// Isolate change clusters by eliminating ranges with no changes.
+//
+// Return a generator of groups with up to n lines of context.
+// Each group is in the same format as returned by GetOpCodes().
+func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode {
+ if n < 0 {
+ n = 3
+ }
+ codes := m.GetOpCodes()
+ if len(codes) == 0 {
+ codes = []OpCode{OpCode{'e', 0, 1, 0, 1}}
+ }
+ // Fixup leading and trailing groups if they show no changes.
+ if codes[0].Tag == 'e' {
+ c := codes[0]
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2}
+ }
+ if codes[len(codes)-1].Tag == 'e' {
+ c := codes[len(codes)-1]
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)}
+ }
+ nn := n + n
+ groups := [][]OpCode{}
+ group := []OpCode{}
+ for _, c := range codes {
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ // End the current group and start a new one whenever
+ // there is a large range with no changes.
+ if c.Tag == 'e' && i2-i1 > nn {
+ group = append(group, OpCode{c.Tag, i1, min(i2, i1+n),
+ j1, min(j2, j1+n)})
+ groups = append(groups, group)
+ group = []OpCode{}
+ i1, j1 = max(i1, i2-n), max(j1, j2-n)
+ }
+ group = append(group, OpCode{c.Tag, i1, i2, j1, j2})
+ }
+ if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') {
+ groups = append(groups, group)
+ }
+ return groups
+}
+
+// Return a measure of the sequences' similarity (float in [0,1]).
+//
+// Where T is the total number of elements in both sequences, and
+// M is the number of matches, this is 2.0*M / T.
+// Note that this is 1 if the sequences are identical, and 0 if
+// they have nothing in common.
+//
+// .Ratio() is expensive to compute if you haven't already computed
+// .GetMatchingBlocks() or .GetOpCodes(), in which case you may
+// want to try .QuickRatio() or .RealQuickRation() first to get an
+// upper bound.
+func (m *SequenceMatcher) Ratio() float64 {
+ matches := 0
+ for _, m := range m.GetMatchingBlocks() {
+ matches += m.Size
+ }
+ return calculateRatio(matches, len(m.a)+len(m.b))
+}
+
+// Return an upper bound on ratio() relatively quickly.
+//
+// This isn't defined beyond that it is an upper bound on .Ratio(), and
+// is faster to compute.
+func (m *SequenceMatcher) QuickRatio() float64 {
+ // viewing a and b as multisets, set matches to the cardinality
+ // of their intersection; this counts the number of matches
+ // without regard to order, so is clearly an upper bound
+ if m.fullBCount == nil {
+ m.fullBCount = map[string]int{}
+ for _, s := range m.b {
+ m.fullBCount[s] = m.fullBCount[s] + 1
+ }
+ }
+
+ // avail[x] is the number of times x appears in 'b' less the
+ // number of times we've seen it in 'a' so far ... kinda
+ avail := map[string]int{}
+ matches := 0
+ for _, s := range m.a {
+ n, ok := avail[s]
+ if !ok {
+ n = m.fullBCount[s]
+ }
+ avail[s] = n - 1
+ if n > 0 {
+ matches += 1
+ }
+ }
+ return calculateRatio(matches, len(m.a)+len(m.b))
+}
+
+// Return an upper bound on ratio() very quickly.
+//
+// This isn't defined beyond that it is an upper bound on .Ratio(), and
+// is faster to compute than either .Ratio() or .QuickRatio().
+func (m *SequenceMatcher) RealQuickRatio() float64 {
+ la, lb := len(m.a), len(m.b)
+ return calculateRatio(min(la, lb), la+lb)
+}
+
+// Convert range to the "ed" format
+func formatRangeUnified(start, stop int) string {
+ // Per the diff spec at http://www.unix.org/single_unix_specification/
+ beginning := start + 1 // lines start numbering with one
+ length := stop - start
+ if length == 1 {
+ return fmt.Sprintf("%d", beginning)
+ }
+ if length == 0 {
+ beginning -= 1 // empty ranges begin at line just before the range
+ }
+ return fmt.Sprintf("%d,%d", beginning, length)
+}
+
+// Unified diff parameters
+type UnifiedDiff struct {
+ A []string // First sequence lines
+ FromFile string // First file name
+ FromDate string // First file time
+ B []string // Second sequence lines
+ ToFile string // Second file name
+ ToDate string // Second file time
+ Eol string // Headers end of line, defaults to LF
+ Context int // Number of context lines
+}
+
+// Compare two sequences of lines; generate the delta as a unified diff.
+//
+// Unified diffs are a compact way of showing line changes and a few
+// lines of context. The number of context lines is set by 'n' which
+// defaults to three.
+//
+// By default, the diff control lines (those with ---, +++, or @@) are
+// created with a trailing newline. This is helpful so that inputs
+// created from file.readlines() result in diffs that are suitable for
+// file.writelines() since both the inputs and outputs have trailing
+// newlines.
+//
+// For inputs that do not have trailing newlines, set the lineterm
+// argument to "" so that the output will be uniformly newline free.
+//
+// The unidiff format normally has a header for filenames and modification
+// times. Any or all of these may be specified using strings for
+// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
+// The modification times are normally expressed in the ISO 8601 format.
+func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error {
+ buf := bufio.NewWriter(writer)
+ defer buf.Flush()
+ wf := func(format string, args ...interface{}) error {
+ _, err := buf.WriteString(fmt.Sprintf(format, args...))
+ return err
+ }
+ ws := func(s string) error {
+ _, err := buf.WriteString(s)
+ return err
+ }
+
+ if len(diff.Eol) == 0 {
+ diff.Eol = "\n"
+ }
+
+ started := false
+ m := NewMatcher(diff.A, diff.B)
+ for _, g := range m.GetGroupedOpCodes(diff.Context) {
+ if !started {
+ started = true
+ fromDate := ""
+ if len(diff.FromDate) > 0 {
+ fromDate = "\t" + diff.FromDate
+ }
+ toDate := ""
+ if len(diff.ToDate) > 0 {
+ toDate = "\t" + diff.ToDate
+ }
+ if diff.FromFile != "" || diff.ToFile != "" {
+ err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol)
+ if err != nil {
+ return err
+ }
+ err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ first, last := g[0], g[len(g)-1]
+ range1 := formatRangeUnified(first.I1, last.I2)
+ range2 := formatRangeUnified(first.J1, last.J2)
+ if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil {
+ return err
+ }
+ for _, c := range g {
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ if c.Tag == 'e' {
+ for _, line := range diff.A[i1:i2] {
+ if err := ws(" " + line); err != nil {
+ return err
+ }
+ }
+ continue
+ }
+ if c.Tag == 'r' || c.Tag == 'd' {
+ for _, line := range diff.A[i1:i2] {
+ if err := ws("-" + line); err != nil {
+ return err
+ }
+ }
+ }
+ if c.Tag == 'r' || c.Tag == 'i' {
+ for _, line := range diff.B[j1:j2] {
+ if err := ws("+" + line); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// Like WriteUnifiedDiff but returns the diff a string.
+func GetUnifiedDiffString(diff UnifiedDiff) (string, error) {
+ w := &bytes.Buffer{}
+ err := WriteUnifiedDiff(w, diff)
+ return string(w.Bytes()), err
+}
+
+// Convert range to the "ed" format.
+func formatRangeContext(start, stop int) string {
+ // Per the diff spec at http://www.unix.org/single_unix_specification/
+ beginning := start + 1 // lines start numbering with one
+ length := stop - start
+ if length == 0 {
+ beginning -= 1 // empty ranges begin at line just before the range
+ }
+ if length <= 1 {
+ return fmt.Sprintf("%d", beginning)
+ }
+ return fmt.Sprintf("%d,%d", beginning, beginning+length-1)
+}
+
+type ContextDiff UnifiedDiff
+
+// Compare two sequences of lines; generate the delta as a context diff.
+//
+// Context diffs are a compact way of showing line changes and a few
+// lines of context. The number of context lines is set by diff.Context
+// which defaults to three.
+//
+// By default, the diff control lines (those with *** or ---) are
+// created with a trailing newline.
+//
+// For inputs that do not have trailing newlines, set the diff.Eol
+// argument to "" so that the output will be uniformly newline free.
+//
+// The context diff format normally has a header for filenames and
+// modification times. Any or all of these may be specified using
+// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate.
+// The modification times are normally expressed in the ISO 8601 format.
+// If not specified, the strings default to blanks.
+func WriteContextDiff(writer io.Writer, diff ContextDiff) error {
+ buf := bufio.NewWriter(writer)
+ defer buf.Flush()
+ var diffErr error
+ wf := func(format string, args ...interface{}) {
+ _, err := buf.WriteString(fmt.Sprintf(format, args...))
+ if diffErr == nil && err != nil {
+ diffErr = err
+ }
+ }
+ ws := func(s string) {
+ _, err := buf.WriteString(s)
+ if diffErr == nil && err != nil {
+ diffErr = err
+ }
+ }
+
+ if len(diff.Eol) == 0 {
+ diff.Eol = "\n"
+ }
+
+ prefix := map[byte]string{
+ 'i': "+ ",
+ 'd': "- ",
+ 'r': "! ",
+ 'e': " ",
+ }
+
+ started := false
+ m := NewMatcher(diff.A, diff.B)
+ for _, g := range m.GetGroupedOpCodes(diff.Context) {
+ if !started {
+ started = true
+ fromDate := ""
+ if len(diff.FromDate) > 0 {
+ fromDate = "\t" + diff.FromDate
+ }
+ toDate := ""
+ if len(diff.ToDate) > 0 {
+ toDate = "\t" + diff.ToDate
+ }
+ if diff.FromFile != "" || diff.ToFile != "" {
+ wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol)
+ wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol)
+ }
+ }
+
+ first, last := g[0], g[len(g)-1]
+ ws("***************" + diff.Eol)
+
+ range1 := formatRangeContext(first.I1, last.I2)
+ wf("*** %s ****%s", range1, diff.Eol)
+ for _, c := range g {
+ if c.Tag == 'r' || c.Tag == 'd' {
+ for _, cc := range g {
+ if cc.Tag == 'i' {
+ continue
+ }
+ for _, line := range diff.A[cc.I1:cc.I2] {
+ ws(prefix[cc.Tag] + line)
+ }
+ }
+ break
+ }
+ }
+
+ range2 := formatRangeContext(first.J1, last.J2)
+ wf("--- %s ----%s", range2, diff.Eol)
+ for _, c := range g {
+ if c.Tag == 'r' || c.Tag == 'i' {
+ for _, cc := range g {
+ if cc.Tag == 'd' {
+ continue
+ }
+ for _, line := range diff.B[cc.J1:cc.J2] {
+ ws(prefix[cc.Tag] + line)
+ }
+ }
+ break
+ }
+ }
+ }
+ return diffErr
+}
+
+// Like WriteContextDiff but returns the diff a string.
+func GetContextDiffString(diff ContextDiff) (string, error) {
+ w := &bytes.Buffer{}
+ err := WriteContextDiff(w, diff)
+ return string(w.Bytes()), err
+}
+
+// Split a string on "\n" while preserving them. The output can be used
+// as input for UnifiedDiff and ContextDiff structures.
+func SplitLines(s string) []string {
+ lines := strings.SplitAfter(s, "\n")
+ lines[len(lines)-1] += "\n"
+ return lines
+}
diff --git a/vendor/github.com/rivo/uniseg/LICENSE.txt b/vendor/github.com/rivo/uniseg/LICENSE.txt
new file mode 100644
index 0000000..5040f1e
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 Oliver Kuederle
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/rivo/uniseg/README.md b/vendor/github.com/rivo/uniseg/README.md
new file mode 100644
index 0000000..25e9346
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/README.md
@@ -0,0 +1,157 @@
+# Unicode Text Segmentation for Go
+
+[![Go Reference](https://pkg.go.dev/badge/github.com/rivo/uniseg.svg)](https://pkg.go.dev/github.com/rivo/uniseg)
+[![Go Report](https://img.shields.io/badge/go%20report-A%2B-brightgreen.svg)](https://goreportcard.com/report/github.com/rivo/uniseg)
+
+This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](https://unicode.org/reports/tr29/), Unicode Line Breaking according to [Unicode Standard Annex #14](https://unicode.org/reports/tr14/) (Unicode version 14.0.0), and monospace font string width calculation similar to [wcwidth](https://man7.org/linux/man-pages/man3/wcwidth.3.html).
+
+## Background
+
+### Grapheme Clusters
+
+In Go, [strings are read-only slices of bytes](https://go.dev/blog/strings). They can be turned into Unicode code points using the `for` loop or by casting: `[]rune(str)`. However, multiple code points may be combined into one user-perceived character or what the Unicode specification calls "grapheme cluster". Here are some examples:
+
+|String|Bytes (UTF-8)|Code points (runes)|Grapheme clusters|
+|-|-|-|-|
+|Käse|6 bytes: `4b 61 cc 88 73 65`|5 code points: `4b 61 308 73 65`|4 clusters: `[4b],[61 308],[73],[65]`|
+|🏳️🌈|14 bytes: `f0 9f 8f b3 ef b8 8f e2 80 8d f0 9f 8c 88`|4 code points: `1f3f3 fe0f 200d 1f308`|1 cluster: `[1f3f3 fe0f 200d 1f308]`|
+|🇩🇪|8 bytes: `f0 9f 87 a9 f0 9f 87 aa`|2 code points: `1f1e9 1f1ea`|1 cluster: `[1f1e9 1f1ea]`|
+
+This package provides tools to iterate over these grapheme clusters. This may be used to determine the number of user-perceived characters, to split strings in their intended places, or to extract individual characters which form a unit.
+
+### Word Boundaries
+
+Word boundaries are used in a number of different contexts. The most familiar ones are selection (double-click mouse selection), cursor movement ("move to next word" control-arrow keys), and the dialog option "Whole Word Search" for search and replace. They are also used in database queries, to determine whether elements are within a certain number of words of one another. Searching may also use word boundaries in determining matching items. This package provides tools to determine word boundaries within strings.
+
+### Sentence Boundaries
+
+Sentence boundaries are often used for triple-click or some other method of selecting or iterating through blocks of text that are larger than single words. They are also used to determine whether words occur within the same sentence in database queries. This package provides tools to determine sentence boundaries within strings.
+
+### Line Breaking
+
+Line breaking, also known as word wrapping, is the process of breaking a section of text into lines such that it will fit in the available width of a page, window or other display area. This package provides tools to determine where a string may or may not be broken and where it must be broken (for example after newline characters).
+
+### Monospace Width
+
+Most terminals or text displays / text editors using a monospace font (for example source code editors) use a fixed width for each character. Some characters such as emojis or characters found in Asian and other languages may take up more than one character cell. This package provides tools to determine the number of cells a string will take up when displayed in a monospace font. See [here](https://pkg.go.dev/github.com/rivo/uniseg#hdr-Monospace_Width) for more information.
+
+## Installation
+
+```bash
+go get github.com/rivo/uniseg
+```
+
+## Examples
+
+### Counting Characters in a String
+
+```go
+n := uniseg.GraphemeClusterCount("🇩🇪🏳️🌈")
+fmt.Println(n)
+// 2
+```
+
+### Calculating the Monospace String Width
+
+```go
+width := uniseg.StringWidth("🇩🇪🏳️🌈!")
+fmt.Println(width)
+// 5
+```
+
+### Using the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) Class
+
+This is the most convenient method of iterating over grapheme clusters:
+
+```go
+gr := uniseg.NewGraphemes("👍🏼!")
+for gr.Next() {
+ fmt.Printf("%x ", gr.Runes())
+}
+// [1f44d 1f3fc] [21]
+```
+
+### Using the [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) Function
+
+This is orders of magnitude faster than the `Graphemes` class, but it requires the handling of states and boundaries:
+
+```go
+str := "🇩🇪🏳️🌈"
+state := -1
+var c string
+for len(str) > 0 {
+ c, str, _, state = uniseg.StepString(str, state)
+ fmt.Printf("%x ", []rune(c))
+}
+// [1f1e9 1f1ea] [1f3f3 fe0f 200d 1f308]
+```
+
+### Advanced Examples
+
+Breaking into grapheme clusters and evaluating line breaks:
+
+```go
+str := "First line.\nSecond line."
+state := -1
+var (
+ c string
+ boundaries int
+)
+for len(str) > 0 {
+ c, str, boundaries, state = uniseg.StepString(str, state)
+ fmt.Print(c)
+ if boundaries&uniseg.MaskLine == uniseg.LineCanBreak {
+ fmt.Print("|")
+ } else if boundaries&uniseg.MaskLine == uniseg.LineMustBreak {
+ fmt.Print("‖")
+ }
+}
+// First |line.
+// ‖Second |line.‖
+```
+
+If you're only interested in word segmentation, use [`FirstWord`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWord) or [`FirstWordInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWordInString):
+
+```go
+str := "Hello, world!"
+state := -1
+var c string
+for len(str) > 0 {
+ c, str, state = uniseg.FirstWordInString(str, state)
+ fmt.Printf("(%s)\n", c)
+}
+// (Hello)
+// (,)
+// ( )
+// (world)
+// (!)
+```
+
+Similarly, use
+
+- [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString) for grapheme cluster determination only,
+- [`FirstSentence`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentence) or [`FirstSentenceInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentenceInString) for sentence segmentation only, and
+- [`FirstLineSegment`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegment) or [`FirstLineSegmentInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegmentInString) for line breaking / word wrapping (although using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) is preferred as it will observe grapheme cluster boundaries).
+
+Finally, if you need to reverse a string while preserving grapheme clusters, use [`ReverseString`](https://pkg.go.dev/github.com/rivo/uniseg#ReverseString):
+
+```go
+fmt.Println(uniseg.ReverseString("🇩🇪🏳️🌈"))
+// 🏳️🌈🇩🇪
+```
+
+## Documentation
+
+Refer to https://pkg.go.dev/github.com/rivo/uniseg for the package's documentation.
+
+## Dependencies
+
+This package does not depend on any packages outside the standard library.
+
+## Sponsor this Project
+
+[Become a Sponsor on GitHub](https://github.com/sponsors/rivo?metadata_source=uniseg_readme) to support this project!
+
+## Your Feedback
+
+Add your issue here on GitHub, preferably before submitting any PR's. Feel free to get in touch if you have any questions.
\ No newline at end of file
diff --git a/vendor/github.com/rivo/uniseg/doc.go b/vendor/github.com/rivo/uniseg/doc.go
new file mode 100644
index 0000000..11224ae
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/doc.go
@@ -0,0 +1,108 @@
+/*
+Package uniseg implements Unicode Text Segmentation, Unicode Line Breaking, and
+string width calculation for monospace fonts. Unicode Text Segmentation conforms
+to Unicode Standard Annex #29 (https://unicode.org/reports/tr29/) and Unicode
+Line Breaking conforms to Unicode Standard Annex #14
+(https://unicode.org/reports/tr14/).
+
+In short, using this package, you can split a string into grapheme clusters
+(what people would usually refer to as a "character"), into words, and into
+sentences. Or, in its simplest case, this package allows you to count the number
+of characters in a string, especially when it contains complex characters such
+as emojis, combining characters, or characters from Asian, Arabic, Hebrew, or
+other languages. Additionally, you can use it to implement line breaking (or
+"word wrapping"), that is, to determine where text can be broken over to the
+next line when the width of the line is not big enough to fit the entire text.
+Finally, you can use it to calculate the display width of a string for monospace
+fonts.
+
+# Getting Started
+
+If you just want to count the number of characters in a string, you can use
+[GraphemeClusterCount]. If you want to determine the display width of a string,
+you can use [StringWidth]. If you want to iterate over a string, you can use
+[Step], [StepString], or the [Graphemes] class (more convenient but less
+performant). This will provide you with all information: grapheme clusters,
+word boundaries, sentence boundaries, line breaks, and monospace character
+widths. The specialized functions [FirstGraphemeCluster],
+[FirstGraphemeClusterInString], [FirstWord], [FirstWordInString],
+[FirstSentence], and [FirstSentenceInString] can be used if only one type of
+information is needed.
+
+# Grapheme Clusters
+
+Consider the rainbow flag emoji: 🏳️🌈. On most modern systems, it appears as one
+character. But its string representation actually has 14 bytes, so counting
+bytes (or using len("🏳️🌈")) will not work as expected. Counting runes won't,
+either: The flag has 4 Unicode code points, thus 4 runes. The stdlib function
+utf8.RuneCountInString("🏳️🌈") and len([]rune("🏳️🌈")) will both return 4.
+
+The [GraphemeClusterCount] function will return 1 for the rainbow flag emoji.
+The Graphemes class and a variety of functions in this package will allow you to
+split strings into its grapheme clusters.
+
+# Word Boundaries
+
+Word boundaries are used in a number of different contexts. The most familiar
+ones are selection (double-click mouse selection), cursor movement ("move to
+next word" control-arrow keys), and the dialog option "Whole Word Search" for
+search and replace. This package provides methods for determining word
+boundaries.
+
+# Sentence Boundaries
+
+Sentence boundaries are often used for triple-click or some other method of
+selecting or iterating through blocks of text that are larger than single words.
+They are also used to determine whether words occur within the same sentence in
+database queries. This package provides methods for determining sentence
+boundaries.
+
+# Line Breaking
+
+Line breaking, also known as word wrapping, is the process of breaking a section
+of text into lines such that it will fit in the available width of a page,
+window or other display area. This package provides methods to determine the
+positions in a string where a line must be broken, may be broken, or must not be
+broken.
+
+# Monospace Width
+
+Monospace width, as referred to in this package, is the width of a string in a
+monospace font. This is commonly used in terminal user interfaces or text
+displays or editors that don't support proportional fonts. A width of 1
+corresponds to a single character cell. The C function [wcswidth()] and its
+implementation in other programming languages is in widespread use for the same
+purpose. However, there is no standard for the calculation of such widths, and
+this package differs from wcswidth() in a number of ways, presumably to generate
+more visually pleasing results.
+
+To start, we assume that every code point has a width of 1, with the following
+exceptions:
+
+ - Code points with grapheme cluster break properties Control, CR, LF, Extend,
+ and ZWJ have a width of 0.
+ - U+2E3A, Two-Em Dash, has a width of 3.
+ - U+2E3B, Three-Em Dash, has a width of 4.
+ - Characters with the East-Asian Width properties "Fullwidth" (F) and "Wide"
+ (W) have a width of 2. (Properties "Ambiguous" (A) and "Neutral" (N) both
+ have a width of 1.)
+ - Code points with grapheme cluster break property Regional Indicator have a
+ width of 2.
+ - Code points with grapheme cluster break property Extended Pictographic have
+ a width of 2, unless their Emoji Presentation flag is "No", in which case
+ the width is 1.
+
+For Hangul grapheme clusters composed of conjoining Jamo and for Regional
+Indicators (flags), all code points except the first one have a width of 0. For
+grapheme clusters starting with an Extended Pictographic, any additional code
+point will force a total width of 2, except if the Variation Selector-15
+(U+FE0E) is included, in which case the total width is always 1. Grapheme
+clusters ending with Variation Selector-16 (U+FE0F) have a width of 2.
+
+Note that whether these widths appear correct depends on your application's
+render engine, to which extent it conforms to the Unicode Standard, and its
+choice of font.
+
+[wcswidth()]: https://man7.org/linux/man-pages/man3/wcswidth.3.html
+*/
+package uniseg
diff --git a/vendor/github.com/rivo/uniseg/eastasianwidth.go b/vendor/github.com/rivo/uniseg/eastasianwidth.go
new file mode 100644
index 0000000..661934a
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/eastasianwidth.go
@@ -0,0 +1,2556 @@
+package uniseg
+
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+// eastAsianWidth are taken from
+// https://www.unicode.org/Public/14.0.0/ucd/EastAsianWidth.txt
+// and
+// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var eastAsianWidth = [][3]int{
+ {0x0000, 0x001F, prN}, // Cc [32] ..
+ {0x0020, 0x0020, prNa}, // Zs SPACE
+ {0x0021, 0x0023, prNa}, // Po [3] EXCLAMATION MARK..NUMBER SIGN
+ {0x0024, 0x0024, prNa}, // Sc DOLLAR SIGN
+ {0x0025, 0x0027, prNa}, // Po [3] PERCENT SIGN..APOSTROPHE
+ {0x0028, 0x0028, prNa}, // Ps LEFT PARENTHESIS
+ {0x0029, 0x0029, prNa}, // Pe RIGHT PARENTHESIS
+ {0x002A, 0x002A, prNa}, // Po ASTERISK
+ {0x002B, 0x002B, prNa}, // Sm PLUS SIGN
+ {0x002C, 0x002C, prNa}, // Po COMMA
+ {0x002D, 0x002D, prNa}, // Pd HYPHEN-MINUS
+ {0x002E, 0x002F, prNa}, // Po [2] FULL STOP..SOLIDUS
+ {0x0030, 0x0039, prNa}, // Nd [10] DIGIT ZERO..DIGIT NINE
+ {0x003A, 0x003B, prNa}, // Po [2] COLON..SEMICOLON
+ {0x003C, 0x003E, prNa}, // Sm [3] LESS-THAN SIGN..GREATER-THAN SIGN
+ {0x003F, 0x0040, prNa}, // Po [2] QUESTION MARK..COMMERCIAL AT
+ {0x0041, 0x005A, prNa}, // Lu [26] LATIN CAPITAL LETTER A..LATIN CAPITAL LETTER Z
+ {0x005B, 0x005B, prNa}, // Ps LEFT SQUARE BRACKET
+ {0x005C, 0x005C, prNa}, // Po REVERSE SOLIDUS
+ {0x005D, 0x005D, prNa}, // Pe RIGHT SQUARE BRACKET
+ {0x005E, 0x005E, prNa}, // Sk CIRCUMFLEX ACCENT
+ {0x005F, 0x005F, prNa}, // Pc LOW LINE
+ {0x0060, 0x0060, prNa}, // Sk GRAVE ACCENT
+ {0x0061, 0x007A, prNa}, // Ll [26] LATIN SMALL LETTER A..LATIN SMALL LETTER Z
+ {0x007B, 0x007B, prNa}, // Ps LEFT CURLY BRACKET
+ {0x007C, 0x007C, prNa}, // Sm VERTICAL LINE
+ {0x007D, 0x007D, prNa}, // Pe RIGHT CURLY BRACKET
+ {0x007E, 0x007E, prNa}, // Sm TILDE
+ {0x007F, 0x007F, prN}, // Cc
+ {0x0080, 0x009F, prN}, // Cc [32] ..
+ {0x00A0, 0x00A0, prN}, // Zs NO-BREAK SPACE
+ {0x00A1, 0x00A1, prA}, // Po INVERTED EXCLAMATION MARK
+ {0x00A2, 0x00A3, prNa}, // Sc [2] CENT SIGN..POUND SIGN
+ {0x00A4, 0x00A4, prA}, // Sc CURRENCY SIGN
+ {0x00A5, 0x00A5, prNa}, // Sc YEN SIGN
+ {0x00A6, 0x00A6, prNa}, // So BROKEN BAR
+ {0x00A7, 0x00A7, prA}, // Po SECTION SIGN
+ {0x00A8, 0x00A8, prA}, // Sk DIAERESIS
+ {0x00A9, 0x00A9, prN}, // So COPYRIGHT SIGN
+ {0x00AA, 0x00AA, prA}, // Lo FEMININE ORDINAL INDICATOR
+ {0x00AB, 0x00AB, prN}, // Pi LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00AC, 0x00AC, prNa}, // Sm NOT SIGN
+ {0x00AD, 0x00AD, prA}, // Cf SOFT HYPHEN
+ {0x00AE, 0x00AE, prA}, // So REGISTERED SIGN
+ {0x00AF, 0x00AF, prNa}, // Sk MACRON
+ {0x00B0, 0x00B0, prA}, // So DEGREE SIGN
+ {0x00B1, 0x00B1, prA}, // Sm PLUS-MINUS SIGN
+ {0x00B2, 0x00B3, prA}, // No [2] SUPERSCRIPT TWO..SUPERSCRIPT THREE
+ {0x00B4, 0x00B4, prA}, // Sk ACUTE ACCENT
+ {0x00B5, 0x00B5, prN}, // Ll MICRO SIGN
+ {0x00B6, 0x00B7, prA}, // Po [2] PILCROW SIGN..MIDDLE DOT
+ {0x00B8, 0x00B8, prA}, // Sk CEDILLA
+ {0x00B9, 0x00B9, prA}, // No SUPERSCRIPT ONE
+ {0x00BA, 0x00BA, prA}, // Lo MASCULINE ORDINAL INDICATOR
+ {0x00BB, 0x00BB, prN}, // Pf RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00BC, 0x00BE, prA}, // No [3] VULGAR FRACTION ONE QUARTER..VULGAR FRACTION THREE QUARTERS
+ {0x00BF, 0x00BF, prA}, // Po INVERTED QUESTION MARK
+ {0x00C0, 0x00C5, prN}, // Lu [6] LATIN CAPITAL LETTER A WITH GRAVE..LATIN CAPITAL LETTER A WITH RING ABOVE
+ {0x00C6, 0x00C6, prA}, // Lu LATIN CAPITAL LETTER AE
+ {0x00C7, 0x00CF, prN}, // Lu [9] LATIN CAPITAL LETTER C WITH CEDILLA..LATIN CAPITAL LETTER I WITH DIAERESIS
+ {0x00D0, 0x00D0, prA}, // Lu LATIN CAPITAL LETTER ETH
+ {0x00D1, 0x00D6, prN}, // Lu [6] LATIN CAPITAL LETTER N WITH TILDE..LATIN CAPITAL LETTER O WITH DIAERESIS
+ {0x00D7, 0x00D7, prA}, // Sm MULTIPLICATION SIGN
+ {0x00D8, 0x00D8, prA}, // Lu LATIN CAPITAL LETTER O WITH STROKE
+ {0x00D9, 0x00DD, prN}, // Lu [5] LATIN CAPITAL LETTER U WITH GRAVE..LATIN CAPITAL LETTER Y WITH ACUTE
+ {0x00DE, 0x00E1, prA}, // L& [4] LATIN CAPITAL LETTER THORN..LATIN SMALL LETTER A WITH ACUTE
+ {0x00E2, 0x00E5, prN}, // Ll [4] LATIN SMALL LETTER A WITH CIRCUMFLEX..LATIN SMALL LETTER A WITH RING ABOVE
+ {0x00E6, 0x00E6, prA}, // Ll LATIN SMALL LETTER AE
+ {0x00E7, 0x00E7, prN}, // Ll LATIN SMALL LETTER C WITH CEDILLA
+ {0x00E8, 0x00EA, prA}, // Ll [3] LATIN SMALL LETTER E WITH GRAVE..LATIN SMALL LETTER E WITH CIRCUMFLEX
+ {0x00EB, 0x00EB, prN}, // Ll LATIN SMALL LETTER E WITH DIAERESIS
+ {0x00EC, 0x00ED, prA}, // Ll [2] LATIN SMALL LETTER I WITH GRAVE..LATIN SMALL LETTER I WITH ACUTE
+ {0x00EE, 0x00EF, prN}, // Ll [2] LATIN SMALL LETTER I WITH CIRCUMFLEX..LATIN SMALL LETTER I WITH DIAERESIS
+ {0x00F0, 0x00F0, prA}, // Ll LATIN SMALL LETTER ETH
+ {0x00F1, 0x00F1, prN}, // Ll LATIN SMALL LETTER N WITH TILDE
+ {0x00F2, 0x00F3, prA}, // Ll [2] LATIN SMALL LETTER O WITH GRAVE..LATIN SMALL LETTER O WITH ACUTE
+ {0x00F4, 0x00F6, prN}, // Ll [3] LATIN SMALL LETTER O WITH CIRCUMFLEX..LATIN SMALL LETTER O WITH DIAERESIS
+ {0x00F7, 0x00F7, prA}, // Sm DIVISION SIGN
+ {0x00F8, 0x00FA, prA}, // Ll [3] LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER U WITH ACUTE
+ {0x00FB, 0x00FB, prN}, // Ll LATIN SMALL LETTER U WITH CIRCUMFLEX
+ {0x00FC, 0x00FC, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS
+ {0x00FD, 0x00FD, prN}, // Ll LATIN SMALL LETTER Y WITH ACUTE
+ {0x00FE, 0x00FE, prA}, // Ll LATIN SMALL LETTER THORN
+ {0x00FF, 0x00FF, prN}, // Ll LATIN SMALL LETTER Y WITH DIAERESIS
+ {0x0100, 0x0100, prN}, // Lu LATIN CAPITAL LETTER A WITH MACRON
+ {0x0101, 0x0101, prA}, // Ll LATIN SMALL LETTER A WITH MACRON
+ {0x0102, 0x0110, prN}, // L& [15] LATIN CAPITAL LETTER A WITH BREVE..LATIN CAPITAL LETTER D WITH STROKE
+ {0x0111, 0x0111, prA}, // Ll LATIN SMALL LETTER D WITH STROKE
+ {0x0112, 0x0112, prN}, // Lu LATIN CAPITAL LETTER E WITH MACRON
+ {0x0113, 0x0113, prA}, // Ll LATIN SMALL LETTER E WITH MACRON
+ {0x0114, 0x011A, prN}, // L& [7] LATIN CAPITAL LETTER E WITH BREVE..LATIN CAPITAL LETTER E WITH CARON
+ {0x011B, 0x011B, prA}, // Ll LATIN SMALL LETTER E WITH CARON
+ {0x011C, 0x0125, prN}, // L& [10] LATIN CAPITAL LETTER G WITH CIRCUMFLEX..LATIN SMALL LETTER H WITH CIRCUMFLEX
+ {0x0126, 0x0127, prA}, // L& [2] LATIN CAPITAL LETTER H WITH STROKE..LATIN SMALL LETTER H WITH STROKE
+ {0x0128, 0x012A, prN}, // L& [3] LATIN CAPITAL LETTER I WITH TILDE..LATIN CAPITAL LETTER I WITH MACRON
+ {0x012B, 0x012B, prA}, // Ll LATIN SMALL LETTER I WITH MACRON
+ {0x012C, 0x0130, prN}, // L& [5] LATIN CAPITAL LETTER I WITH BREVE..LATIN CAPITAL LETTER I WITH DOT ABOVE
+ {0x0131, 0x0133, prA}, // L& [3] LATIN SMALL LETTER DOTLESS I..LATIN SMALL LIGATURE IJ
+ {0x0134, 0x0137, prN}, // L& [4] LATIN CAPITAL LETTER J WITH CIRCUMFLEX..LATIN SMALL LETTER K WITH CEDILLA
+ {0x0138, 0x0138, prA}, // Ll LATIN SMALL LETTER KRA
+ {0x0139, 0x013E, prN}, // L& [6] LATIN CAPITAL LETTER L WITH ACUTE..LATIN SMALL LETTER L WITH CARON
+ {0x013F, 0x0142, prA}, // L& [4] LATIN CAPITAL LETTER L WITH MIDDLE DOT..LATIN SMALL LETTER L WITH STROKE
+ {0x0143, 0x0143, prN}, // Lu LATIN CAPITAL LETTER N WITH ACUTE
+ {0x0144, 0x0144, prA}, // Ll LATIN SMALL LETTER N WITH ACUTE
+ {0x0145, 0x0147, prN}, // L& [3] LATIN CAPITAL LETTER N WITH CEDILLA..LATIN CAPITAL LETTER N WITH CARON
+ {0x0148, 0x014B, prA}, // L& [4] LATIN SMALL LETTER N WITH CARON..LATIN SMALL LETTER ENG
+ {0x014C, 0x014C, prN}, // Lu LATIN CAPITAL LETTER O WITH MACRON
+ {0x014D, 0x014D, prA}, // Ll LATIN SMALL LETTER O WITH MACRON
+ {0x014E, 0x0151, prN}, // L& [4] LATIN CAPITAL LETTER O WITH BREVE..LATIN SMALL LETTER O WITH DOUBLE ACUTE
+ {0x0152, 0x0153, prA}, // L& [2] LATIN CAPITAL LIGATURE OE..LATIN SMALL LIGATURE OE
+ {0x0154, 0x0165, prN}, // L& [18] LATIN CAPITAL LETTER R WITH ACUTE..LATIN SMALL LETTER T WITH CARON
+ {0x0166, 0x0167, prA}, // L& [2] LATIN CAPITAL LETTER T WITH STROKE..LATIN SMALL LETTER T WITH STROKE
+ {0x0168, 0x016A, prN}, // L& [3] LATIN CAPITAL LETTER U WITH TILDE..LATIN CAPITAL LETTER U WITH MACRON
+ {0x016B, 0x016B, prA}, // Ll LATIN SMALL LETTER U WITH MACRON
+ {0x016C, 0x017F, prN}, // L& [20] LATIN CAPITAL LETTER U WITH BREVE..LATIN SMALL LETTER LONG S
+ {0x0180, 0x01BA, prN}, // L& [59] LATIN SMALL LETTER B WITH STROKE..LATIN SMALL LETTER EZH WITH TAIL
+ {0x01BB, 0x01BB, prN}, // Lo LATIN LETTER TWO WITH STROKE
+ {0x01BC, 0x01BF, prN}, // L& [4] LATIN CAPITAL LETTER TONE FIVE..LATIN LETTER WYNN
+ {0x01C0, 0x01C3, prN}, // Lo [4] LATIN LETTER DENTAL CLICK..LATIN LETTER RETROFLEX CLICK
+ {0x01C4, 0x01CD, prN}, // L& [10] LATIN CAPITAL LETTER DZ WITH CARON..LATIN CAPITAL LETTER A WITH CARON
+ {0x01CE, 0x01CE, prA}, // Ll LATIN SMALL LETTER A WITH CARON
+ {0x01CF, 0x01CF, prN}, // Lu LATIN CAPITAL LETTER I WITH CARON
+ {0x01D0, 0x01D0, prA}, // Ll LATIN SMALL LETTER I WITH CARON
+ {0x01D1, 0x01D1, prN}, // Lu LATIN CAPITAL LETTER O WITH CARON
+ {0x01D2, 0x01D2, prA}, // Ll LATIN SMALL LETTER O WITH CARON
+ {0x01D3, 0x01D3, prN}, // Lu LATIN CAPITAL LETTER U WITH CARON
+ {0x01D4, 0x01D4, prA}, // Ll LATIN SMALL LETTER U WITH CARON
+ {0x01D5, 0x01D5, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON
+ {0x01D6, 0x01D6, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND MACRON
+ {0x01D7, 0x01D7, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE
+ {0x01D8, 0x01D8, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE
+ {0x01D9, 0x01D9, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON
+ {0x01DA, 0x01DA, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND CARON
+ {0x01DB, 0x01DB, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE
+ {0x01DC, 0x01DC, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE
+ {0x01DD, 0x024F, prN}, // L& [115] LATIN SMALL LETTER TURNED E..LATIN SMALL LETTER Y WITH STROKE
+ {0x0250, 0x0250, prN}, // Ll LATIN SMALL LETTER TURNED A
+ {0x0251, 0x0251, prA}, // Ll LATIN SMALL LETTER ALPHA
+ {0x0252, 0x0260, prN}, // Ll [15] LATIN SMALL LETTER TURNED ALPHA..LATIN SMALL LETTER G WITH HOOK
+ {0x0261, 0x0261, prA}, // Ll LATIN SMALL LETTER SCRIPT G
+ {0x0262, 0x0293, prN}, // Ll [50] LATIN LETTER SMALL CAPITAL G..LATIN SMALL LETTER EZH WITH CURL
+ {0x0294, 0x0294, prN}, // Lo LATIN LETTER GLOTTAL STOP
+ {0x0295, 0x02AF, prN}, // Ll [27] LATIN LETTER PHARYNGEAL VOICED FRICATIVE..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL
+ {0x02B0, 0x02C1, prN}, // Lm [18] MODIFIER LETTER SMALL H..MODIFIER LETTER REVERSED GLOTTAL STOP
+ {0x02C2, 0x02C3, prN}, // Sk [2] MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER RIGHT ARROWHEAD
+ {0x02C4, 0x02C4, prA}, // Sk MODIFIER LETTER UP ARROWHEAD
+ {0x02C5, 0x02C5, prN}, // Sk MODIFIER LETTER DOWN ARROWHEAD
+ {0x02C6, 0x02C6, prN}, // Lm MODIFIER LETTER CIRCUMFLEX ACCENT
+ {0x02C7, 0x02C7, prA}, // Lm CARON
+ {0x02C8, 0x02C8, prN}, // Lm MODIFIER LETTER VERTICAL LINE
+ {0x02C9, 0x02CB, prA}, // Lm [3] MODIFIER LETTER MACRON..MODIFIER LETTER GRAVE ACCENT
+ {0x02CC, 0x02CC, prN}, // Lm MODIFIER LETTER LOW VERTICAL LINE
+ {0x02CD, 0x02CD, prA}, // Lm MODIFIER LETTER LOW MACRON
+ {0x02CE, 0x02CF, prN}, // Lm [2] MODIFIER LETTER LOW GRAVE ACCENT..MODIFIER LETTER LOW ACUTE ACCENT
+ {0x02D0, 0x02D0, prA}, // Lm MODIFIER LETTER TRIANGULAR COLON
+ {0x02D1, 0x02D1, prN}, // Lm MODIFIER LETTER HALF TRIANGULAR COLON
+ {0x02D2, 0x02D7, prN}, // Sk [6] MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN
+ {0x02D8, 0x02DB, prA}, // Sk [4] BREVE..OGONEK
+ {0x02DC, 0x02DC, prN}, // Sk SMALL TILDE
+ {0x02DD, 0x02DD, prA}, // Sk DOUBLE ACUTE ACCENT
+ {0x02DE, 0x02DE, prN}, // Sk MODIFIER LETTER RHOTIC HOOK
+ {0x02DF, 0x02DF, prA}, // Sk MODIFIER LETTER CROSS ACCENT
+ {0x02E0, 0x02E4, prN}, // Lm [5] MODIFIER LETTER SMALL GAMMA..MODIFIER LETTER SMALL REVERSED GLOTTAL STOP
+ {0x02E5, 0x02EB, prN}, // Sk [7] MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER YANG DEPARTING TONE MARK
+ {0x02EC, 0x02EC, prN}, // Lm MODIFIER LETTER VOICING
+ {0x02ED, 0x02ED, prN}, // Sk MODIFIER LETTER UNASPIRATED
+ {0x02EE, 0x02EE, prN}, // Lm MODIFIER LETTER DOUBLE APOSTROPHE
+ {0x02EF, 0x02FF, prN}, // Sk [17] MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW
+ {0x0300, 0x036F, prA}, // Mn [112] COMBINING GRAVE ACCENT..COMBINING LATIN SMALL LETTER X
+ {0x0370, 0x0373, prN}, // L& [4] GREEK CAPITAL LETTER HETA..GREEK SMALL LETTER ARCHAIC SAMPI
+ {0x0374, 0x0374, prN}, // Lm GREEK NUMERAL SIGN
+ {0x0375, 0x0375, prN}, // Sk GREEK LOWER NUMERAL SIGN
+ {0x0376, 0x0377, prN}, // L& [2] GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA..GREEK SMALL LETTER PAMPHYLIAN DIGAMMA
+ {0x037A, 0x037A, prN}, // Lm GREEK YPOGEGRAMMENI
+ {0x037B, 0x037D, prN}, // Ll [3] GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x037E, 0x037E, prN}, // Po GREEK QUESTION MARK
+ {0x037F, 0x037F, prN}, // Lu GREEK CAPITAL LETTER YOT
+ {0x0384, 0x0385, prN}, // Sk [2] GREEK TONOS..GREEK DIALYTIKA TONOS
+ {0x0386, 0x0386, prN}, // Lu GREEK CAPITAL LETTER ALPHA WITH TONOS
+ {0x0387, 0x0387, prN}, // Po GREEK ANO TELEIA
+ {0x0388, 0x038A, prN}, // Lu [3] GREEK CAPITAL LETTER EPSILON WITH TONOS..GREEK CAPITAL LETTER IOTA WITH TONOS
+ {0x038C, 0x038C, prN}, // Lu GREEK CAPITAL LETTER OMICRON WITH TONOS
+ {0x038E, 0x0390, prN}, // L& [3] GREEK CAPITAL LETTER UPSILON WITH TONOS..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
+ {0x0391, 0x03A1, prA}, // Lu [17] GREEK CAPITAL LETTER ALPHA..GREEK CAPITAL LETTER RHO
+ {0x03A3, 0x03A9, prA}, // Lu [7] GREEK CAPITAL LETTER SIGMA..GREEK CAPITAL LETTER OMEGA
+ {0x03AA, 0x03B0, prN}, // L& [7] GREEK CAPITAL LETTER IOTA WITH DIALYTIKA..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
+ {0x03B1, 0x03C1, prA}, // Ll [17] GREEK SMALL LETTER ALPHA..GREEK SMALL LETTER RHO
+ {0x03C2, 0x03C2, prN}, // Ll GREEK SMALL LETTER FINAL SIGMA
+ {0x03C3, 0x03C9, prA}, // Ll [7] GREEK SMALL LETTER SIGMA..GREEK SMALL LETTER OMEGA
+ {0x03CA, 0x03F5, prN}, // L& [44] GREEK SMALL LETTER IOTA WITH DIALYTIKA..GREEK LUNATE EPSILON SYMBOL
+ {0x03F6, 0x03F6, prN}, // Sm GREEK REVERSED LUNATE EPSILON SYMBOL
+ {0x03F7, 0x03FF, prN}, // L& [9] GREEK CAPITAL LETTER SHO..GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x0400, 0x0400, prN}, // Lu CYRILLIC CAPITAL LETTER IE WITH GRAVE
+ {0x0401, 0x0401, prA}, // Lu CYRILLIC CAPITAL LETTER IO
+ {0x0402, 0x040F, prN}, // Lu [14] CYRILLIC CAPITAL LETTER DJE..CYRILLIC CAPITAL LETTER DZHE
+ {0x0410, 0x044F, prA}, // L& [64] CYRILLIC CAPITAL LETTER A..CYRILLIC SMALL LETTER YA
+ {0x0450, 0x0450, prN}, // Ll CYRILLIC SMALL LETTER IE WITH GRAVE
+ {0x0451, 0x0451, prA}, // Ll CYRILLIC SMALL LETTER IO
+ {0x0452, 0x0481, prN}, // L& [48] CYRILLIC SMALL LETTER DJE..CYRILLIC SMALL LETTER KOPPA
+ {0x0482, 0x0482, prN}, // So CYRILLIC THOUSANDS SIGN
+ {0x0483, 0x0487, prN}, // Mn [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prN}, // Me [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x048A, 0x04FF, prN}, // L& [118] CYRILLIC CAPITAL LETTER SHORT I WITH TAIL..CYRILLIC SMALL LETTER HA WITH STROKE
+ {0x0500, 0x052F, prN}, // L& [48] CYRILLIC CAPITAL LETTER KOMI DE..CYRILLIC SMALL LETTER EL WITH DESCENDER
+ {0x0531, 0x0556, prN}, // Lu [38] ARMENIAN CAPITAL LETTER AYB..ARMENIAN CAPITAL LETTER FEH
+ {0x0559, 0x0559, prN}, // Lm ARMENIAN MODIFIER LETTER LEFT HALF RING
+ {0x055A, 0x055F, prN}, // Po [6] ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK
+ {0x0560, 0x0588, prN}, // Ll [41] ARMENIAN SMALL LETTER TURNED AYB..ARMENIAN SMALL LETTER YI WITH STROKE
+ {0x0589, 0x0589, prN}, // Po ARMENIAN FULL STOP
+ {0x058A, 0x058A, prN}, // Pd ARMENIAN HYPHEN
+ {0x058D, 0x058E, prN}, // So [2] RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN
+ {0x058F, 0x058F, prN}, // Sc ARMENIAN DRAM SIGN
+ {0x0591, 0x05BD, prN}, // Mn [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BE, 0x05BE, prN}, // Pd HEBREW PUNCTUATION MAQAF
+ {0x05BF, 0x05BF, prN}, // Mn HEBREW POINT RAFE
+ {0x05C0, 0x05C0, prN}, // Po HEBREW PUNCTUATION PASEQ
+ {0x05C1, 0x05C2, prN}, // Mn [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C3, 0x05C3, prN}, // Po HEBREW PUNCTUATION SOF PASUQ
+ {0x05C4, 0x05C5, prN}, // Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C6, 0x05C6, prN}, // Po HEBREW PUNCTUATION NUN HAFUKHA
+ {0x05C7, 0x05C7, prN}, // Mn HEBREW POINT QAMATS QATAN
+ {0x05D0, 0x05EA, prN}, // Lo [27] HEBREW LETTER ALEF..HEBREW LETTER TAV
+ {0x05EF, 0x05F2, prN}, // Lo [4] HEBREW YOD TRIANGLE..HEBREW LIGATURE YIDDISH DOUBLE YOD
+ {0x05F3, 0x05F4, prN}, // Po [2] HEBREW PUNCTUATION GERESH..HEBREW PUNCTUATION GERSHAYIM
+ {0x0600, 0x0605, prN}, // Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0606, 0x0608, prN}, // Sm [3] ARABIC-INDIC CUBE ROOT..ARABIC RAY
+ {0x0609, 0x060A, prN}, // Po [2] ARABIC-INDIC PER MILLE SIGN..ARABIC-INDIC PER TEN THOUSAND SIGN
+ {0x060B, 0x060B, prN}, // Sc AFGHANI SIGN
+ {0x060C, 0x060D, prN}, // Po [2] ARABIC COMMA..ARABIC DATE SEPARATOR
+ {0x060E, 0x060F, prN}, // So [2] ARABIC POETIC VERSE SIGN..ARABIC SIGN MISRA
+ {0x0610, 0x061A, prN}, // Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061B, 0x061B, prN}, // Po ARABIC SEMICOLON
+ {0x061C, 0x061C, prN}, // Cf ARABIC LETTER MARK
+ {0x061D, 0x061F, prN}, // Po [3] ARABIC END OF TEXT MARK..ARABIC QUESTION MARK
+ {0x0620, 0x063F, prN}, // Lo [32] ARABIC LETTER KASHMIRI YEH..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE
+ {0x0640, 0x0640, prN}, // Lm ARABIC TATWEEL
+ {0x0641, 0x064A, prN}, // Lo [10] ARABIC LETTER FEH..ARABIC LETTER YEH
+ {0x064B, 0x065F, prN}, // Mn [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0660, 0x0669, prN}, // Nd [10] ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE
+ {0x066A, 0x066D, prN}, // Po [4] ARABIC PERCENT SIGN..ARABIC FIVE POINTED STAR
+ {0x066E, 0x066F, prN}, // Lo [2] ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF
+ {0x0670, 0x0670, prN}, // Mn ARABIC LETTER SUPERSCRIPT ALEF
+ {0x0671, 0x06D3, prN}, // Lo [99] ARABIC LETTER ALEF WASLA..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE
+ {0x06D4, 0x06D4, prN}, // Po ARABIC FULL STOP
+ {0x06D5, 0x06D5, prN}, // Lo ARABIC LETTER AE
+ {0x06D6, 0x06DC, prN}, // Mn [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prN}, // Cf ARABIC END OF AYAH
+ {0x06DE, 0x06DE, prN}, // So ARABIC START OF RUB EL HIZB
+ {0x06DF, 0x06E4, prN}, // Mn [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E5, 0x06E6, prN}, // Lm [2] ARABIC SMALL WAW..ARABIC SMALL YEH
+ {0x06E7, 0x06E8, prN}, // Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06E9, 0x06E9, prN}, // So ARABIC PLACE OF SAJDAH
+ {0x06EA, 0x06ED, prN}, // Mn [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x06EE, 0x06EF, prN}, // Lo [2] ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V
+ {0x06F0, 0x06F9, prN}, // Nd [10] EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE
+ {0x06FA, 0x06FC, prN}, // Lo [3] ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC LETTER GHAIN WITH DOT BELOW
+ {0x06FD, 0x06FE, prN}, // So [2] ARABIC SIGN SINDHI AMPERSAND..ARABIC SIGN SINDHI POSTPOSITION MEN
+ {0x06FF, 0x06FF, prN}, // Lo ARABIC LETTER HEH WITH INVERTED V
+ {0x0700, 0x070D, prN}, // Po [14] SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS
+ {0x070F, 0x070F, prN}, // Cf SYRIAC ABBREVIATION MARK
+ {0x0710, 0x0710, prN}, // Lo SYRIAC LETTER ALAPH
+ {0x0711, 0x0711, prN}, // Mn SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0712, 0x072F, prN}, // Lo [30] SYRIAC LETTER BETH..SYRIAC LETTER PERSIAN DHALATH
+ {0x0730, 0x074A, prN}, // Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x074D, 0x074F, prN}, // Lo [3] SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE
+ {0x0750, 0x077F, prN}, // Lo [48] ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE
+ {0x0780, 0x07A5, prN}, // Lo [38] THAANA LETTER HAA..THAANA LETTER WAAVU
+ {0x07A6, 0x07B0, prN}, // Mn [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07B1, 0x07B1, prN}, // Lo THAANA LETTER NAA
+ {0x07C0, 0x07C9, prN}, // Nd [10] NKO DIGIT ZERO..NKO DIGIT NINE
+ {0x07CA, 0x07EA, prN}, // Lo [33] NKO LETTER A..NKO LETTER JONA RA
+ {0x07EB, 0x07F3, prN}, // Mn [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07F4, 0x07F5, prN}, // Lm [2] NKO HIGH TONE APOSTROPHE..NKO LOW TONE APOSTROPHE
+ {0x07F6, 0x07F6, prN}, // So NKO SYMBOL OO DENNEN
+ {0x07F7, 0x07F9, prN}, // Po [3] NKO SYMBOL GBAKURUNEN..NKO EXCLAMATION MARK
+ {0x07FA, 0x07FA, prN}, // Lm NKO LAJANYALAN
+ {0x07FD, 0x07FD, prN}, // Mn NKO DANTAYALAN
+ {0x07FE, 0x07FF, prN}, // Sc [2] NKO DOROME SIGN..NKO TAMAN SIGN
+ {0x0800, 0x0815, prN}, // Lo [22] SAMARITAN LETTER ALAF..SAMARITAN LETTER TAAF
+ {0x0816, 0x0819, prN}, // Mn [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081A, 0x081A, prN}, // Lm SAMARITAN MODIFIER LETTER EPENTHETIC YUT
+ {0x081B, 0x0823, prN}, // Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0824, 0x0824, prN}, // Lm SAMARITAN MODIFIER LETTER SHORT A
+ {0x0825, 0x0827, prN}, // Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0828, 0x0828, prN}, // Lm SAMARITAN MODIFIER LETTER I
+ {0x0829, 0x082D, prN}, // Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0830, 0x083E, prN}, // Po [15] SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU
+ {0x0840, 0x0858, prN}, // Lo [25] MANDAIC LETTER HALQA..MANDAIC LETTER AIN
+ {0x0859, 0x085B, prN}, // Mn [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x085E, 0x085E, prN}, // Po MANDAIC PUNCTUATION
+ {0x0860, 0x086A, prN}, // Lo [11] SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA
+ {0x0870, 0x0887, prN}, // Lo [24] ARABIC LETTER ALEF WITH ATTACHED FATHA..ARABIC BASELINE ROUND DOT
+ {0x0888, 0x0888, prN}, // Sk ARABIC RAISED ROUND DOT
+ {0x0889, 0x088E, prN}, // Lo [6] ARABIC LETTER NOON WITH INVERTED SMALL V..ARABIC VERTICAL TAIL
+ {0x0890, 0x0891, prN}, // Cf [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prN}, // Mn [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08A0, 0x08C8, prN}, // Lo [41] ARABIC LETTER BEH WITH SMALL V BELOW..ARABIC LETTER GRAF
+ {0x08C9, 0x08C9, prN}, // Lm ARABIC SMALL FARSI YEH
+ {0x08CA, 0x08E1, prN}, // Mn [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prN}, // Cf ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x08FF, prN}, // Mn [29] ARABIC TURNED DAMMA BELOW..ARABIC MARK SIDEWAYS NOON GHUNNA
+ {0x0900, 0x0902, prN}, // Mn [3] DEVANAGARI SIGN INVERTED CANDRABINDU..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prN}, // Mc DEVANAGARI SIGN VISARGA
+ {0x0904, 0x0939, prN}, // Lo [54] DEVANAGARI LETTER SHORT A..DEVANAGARI LETTER HA
+ {0x093A, 0x093A, prN}, // Mn DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prN}, // Mc DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prN}, // Mn DEVANAGARI SIGN NUKTA
+ {0x093D, 0x093D, prN}, // Lo DEVANAGARI SIGN AVAGRAHA
+ {0x093E, 0x0940, prN}, // Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prN}, // Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prN}, // Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prN}, // Mn DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prN}, // Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0950, 0x0950, prN}, // Lo DEVANAGARI OM
+ {0x0951, 0x0957, prN}, // Mn [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0958, 0x0961, prN}, // Lo [10] DEVANAGARI LETTER QA..DEVANAGARI LETTER VOCALIC LL
+ {0x0962, 0x0963, prN}, // Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0964, 0x0965, prN}, // Po [2] DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA
+ {0x0966, 0x096F, prN}, // Nd [10] DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE
+ {0x0970, 0x0970, prN}, // Po DEVANAGARI ABBREVIATION SIGN
+ {0x0971, 0x0971, prN}, // Lm DEVANAGARI SIGN HIGH SPACING DOT
+ {0x0972, 0x097F, prN}, // Lo [14] DEVANAGARI LETTER CANDRA A..DEVANAGARI LETTER BBA
+ {0x0980, 0x0980, prN}, // Lo BENGALI ANJI
+ {0x0981, 0x0981, prN}, // Mn BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prN}, // Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x0985, 0x098C, prN}, // Lo [8] BENGALI LETTER A..BENGALI LETTER VOCALIC L
+ {0x098F, 0x0990, prN}, // Lo [2] BENGALI LETTER E..BENGALI LETTER AI
+ {0x0993, 0x09A8, prN}, // Lo [22] BENGALI LETTER O..BENGALI LETTER NA
+ {0x09AA, 0x09B0, prN}, // Lo [7] BENGALI LETTER PA..BENGALI LETTER RA
+ {0x09B2, 0x09B2, prN}, // Lo BENGALI LETTER LA
+ {0x09B6, 0x09B9, prN}, // Lo [4] BENGALI LETTER SHA..BENGALI LETTER HA
+ {0x09BC, 0x09BC, prN}, // Mn BENGALI SIGN NUKTA
+ {0x09BD, 0x09BD, prN}, // Lo BENGALI SIGN AVAGRAHA
+ {0x09BE, 0x09C0, prN}, // Mc [3] BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prN}, // Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prN}, // Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prN}, // Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prN}, // Mn BENGALI SIGN VIRAMA
+ {0x09CE, 0x09CE, prN}, // Lo BENGALI LETTER KHANDA TA
+ {0x09D7, 0x09D7, prN}, // Mc BENGALI AU LENGTH MARK
+ {0x09DC, 0x09DD, prN}, // Lo [2] BENGALI LETTER RRA..BENGALI LETTER RHA
+ {0x09DF, 0x09E1, prN}, // Lo [3] BENGALI LETTER YYA..BENGALI LETTER VOCALIC LL
+ {0x09E2, 0x09E3, prN}, // Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09E6, 0x09EF, prN}, // Nd [10] BENGALI DIGIT ZERO..BENGALI DIGIT NINE
+ {0x09F0, 0x09F1, prN}, // Lo [2] BENGALI LETTER RA WITH MIDDLE DIAGONAL..BENGALI LETTER RA WITH LOWER DIAGONAL
+ {0x09F2, 0x09F3, prN}, // Sc [2] BENGALI RUPEE MARK..BENGALI RUPEE SIGN
+ {0x09F4, 0x09F9, prN}, // No [6] BENGALI CURRENCY NUMERATOR ONE..BENGALI CURRENCY DENOMINATOR SIXTEEN
+ {0x09FA, 0x09FA, prN}, // So BENGALI ISSHAR
+ {0x09FB, 0x09FB, prN}, // Sc BENGALI GANDA MARK
+ {0x09FC, 0x09FC, prN}, // Lo BENGALI LETTER VEDIC ANUSVARA
+ {0x09FD, 0x09FD, prN}, // Po BENGALI ABBREVIATION SIGN
+ {0x09FE, 0x09FE, prN}, // Mn BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prN}, // Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prN}, // Mc GURMUKHI SIGN VISARGA
+ {0x0A05, 0x0A0A, prN}, // Lo [6] GURMUKHI LETTER A..GURMUKHI LETTER UU
+ {0x0A0F, 0x0A10, prN}, // Lo [2] GURMUKHI LETTER EE..GURMUKHI LETTER AI
+ {0x0A13, 0x0A28, prN}, // Lo [22] GURMUKHI LETTER OO..GURMUKHI LETTER NA
+ {0x0A2A, 0x0A30, prN}, // Lo [7] GURMUKHI LETTER PA..GURMUKHI LETTER RA
+ {0x0A32, 0x0A33, prN}, // Lo [2] GURMUKHI LETTER LA..GURMUKHI LETTER LLA
+ {0x0A35, 0x0A36, prN}, // Lo [2] GURMUKHI LETTER VA..GURMUKHI LETTER SHA
+ {0x0A38, 0x0A39, prN}, // Lo [2] GURMUKHI LETTER SA..GURMUKHI LETTER HA
+ {0x0A3C, 0x0A3C, prN}, // Mn GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prN}, // Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prN}, // Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prN}, // Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prN}, // Mn [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prN}, // Mn GURMUKHI SIGN UDAAT
+ {0x0A59, 0x0A5C, prN}, // Lo [4] GURMUKHI LETTER KHHA..GURMUKHI LETTER RRA
+ {0x0A5E, 0x0A5E, prN}, // Lo GURMUKHI LETTER FA
+ {0x0A66, 0x0A6F, prN}, // Nd [10] GURMUKHI DIGIT ZERO..GURMUKHI DIGIT NINE
+ {0x0A70, 0x0A71, prN}, // Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A72, 0x0A74, prN}, // Lo [3] GURMUKHI IRI..GURMUKHI EK ONKAR
+ {0x0A75, 0x0A75, prN}, // Mn GURMUKHI SIGN YAKASH
+ {0x0A76, 0x0A76, prN}, // Po GURMUKHI ABBREVIATION SIGN
+ {0x0A81, 0x0A82, prN}, // Mn [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prN}, // Mc GUJARATI SIGN VISARGA
+ {0x0A85, 0x0A8D, prN}, // Lo [9] GUJARATI LETTER A..GUJARATI VOWEL CANDRA E
+ {0x0A8F, 0x0A91, prN}, // Lo [3] GUJARATI LETTER E..GUJARATI VOWEL CANDRA O
+ {0x0A93, 0x0AA8, prN}, // Lo [22] GUJARATI LETTER O..GUJARATI LETTER NA
+ {0x0AAA, 0x0AB0, prN}, // Lo [7] GUJARATI LETTER PA..GUJARATI LETTER RA
+ {0x0AB2, 0x0AB3, prN}, // Lo [2] GUJARATI LETTER LA..GUJARATI LETTER LLA
+ {0x0AB5, 0x0AB9, prN}, // Lo [5] GUJARATI LETTER VA..GUJARATI LETTER HA
+ {0x0ABC, 0x0ABC, prN}, // Mn GUJARATI SIGN NUKTA
+ {0x0ABD, 0x0ABD, prN}, // Lo GUJARATI SIGN AVAGRAHA
+ {0x0ABE, 0x0AC0, prN}, // Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prN}, // Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prN}, // Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prN}, // Mc GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prN}, // Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prN}, // Mn GUJARATI SIGN VIRAMA
+ {0x0AD0, 0x0AD0, prN}, // Lo GUJARATI OM
+ {0x0AE0, 0x0AE1, prN}, // Lo [2] GUJARATI LETTER VOCALIC RR..GUJARATI LETTER VOCALIC LL
+ {0x0AE2, 0x0AE3, prN}, // Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AE6, 0x0AEF, prN}, // Nd [10] GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE
+ {0x0AF0, 0x0AF0, prN}, // Po GUJARATI ABBREVIATION SIGN
+ {0x0AF1, 0x0AF1, prN}, // Sc GUJARATI RUPEE SIGN
+ {0x0AF9, 0x0AF9, prN}, // Lo GUJARATI LETTER ZHA
+ {0x0AFA, 0x0AFF, prN}, // Mn [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prN}, // Mn ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prN}, // Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B05, 0x0B0C, prN}, // Lo [8] ORIYA LETTER A..ORIYA LETTER VOCALIC L
+ {0x0B0F, 0x0B10, prN}, // Lo [2] ORIYA LETTER E..ORIYA LETTER AI
+ {0x0B13, 0x0B28, prN}, // Lo [22] ORIYA LETTER O..ORIYA LETTER NA
+ {0x0B2A, 0x0B30, prN}, // Lo [7] ORIYA LETTER PA..ORIYA LETTER RA
+ {0x0B32, 0x0B33, prN}, // Lo [2] ORIYA LETTER LA..ORIYA LETTER LLA
+ {0x0B35, 0x0B39, prN}, // Lo [5] ORIYA LETTER VA..ORIYA LETTER HA
+ {0x0B3C, 0x0B3C, prN}, // Mn ORIYA SIGN NUKTA
+ {0x0B3D, 0x0B3D, prN}, // Lo ORIYA SIGN AVAGRAHA
+ {0x0B3E, 0x0B3E, prN}, // Mc ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prN}, // Mn ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prN}, // Mc ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prN}, // Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prN}, // Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prN}, // Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prN}, // Mn ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prN}, // Mn [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prN}, // Mc ORIYA AU LENGTH MARK
+ {0x0B5C, 0x0B5D, prN}, // Lo [2] ORIYA LETTER RRA..ORIYA LETTER RHA
+ {0x0B5F, 0x0B61, prN}, // Lo [3] ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL
+ {0x0B62, 0x0B63, prN}, // Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B66, 0x0B6F, prN}, // Nd [10] ORIYA DIGIT ZERO..ORIYA DIGIT NINE
+ {0x0B70, 0x0B70, prN}, // So ORIYA ISSHAR
+ {0x0B71, 0x0B71, prN}, // Lo ORIYA LETTER WA
+ {0x0B72, 0x0B77, prN}, // No [6] ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS
+ {0x0B82, 0x0B82, prN}, // Mn TAMIL SIGN ANUSVARA
+ {0x0B83, 0x0B83, prN}, // Lo TAMIL SIGN VISARGA
+ {0x0B85, 0x0B8A, prN}, // Lo [6] TAMIL LETTER A..TAMIL LETTER UU
+ {0x0B8E, 0x0B90, prN}, // Lo [3] TAMIL LETTER E..TAMIL LETTER AI
+ {0x0B92, 0x0B95, prN}, // Lo [4] TAMIL LETTER O..TAMIL LETTER KA
+ {0x0B99, 0x0B9A, prN}, // Lo [2] TAMIL LETTER NGA..TAMIL LETTER CA
+ {0x0B9C, 0x0B9C, prN}, // Lo TAMIL LETTER JA
+ {0x0B9E, 0x0B9F, prN}, // Lo [2] TAMIL LETTER NYA..TAMIL LETTER TTA
+ {0x0BA3, 0x0BA4, prN}, // Lo [2] TAMIL LETTER NNA..TAMIL LETTER TA
+ {0x0BA8, 0x0BAA, prN}, // Lo [3] TAMIL LETTER NA..TAMIL LETTER PA
+ {0x0BAE, 0x0BB9, prN}, // Lo [12] TAMIL LETTER MA..TAMIL LETTER HA
+ {0x0BBE, 0x0BBF, prN}, // Mc [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prN}, // Mn TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prN}, // Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prN}, // Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prN}, // Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prN}, // Mn TAMIL SIGN VIRAMA
+ {0x0BD0, 0x0BD0, prN}, // Lo TAMIL OM
+ {0x0BD7, 0x0BD7, prN}, // Mc TAMIL AU LENGTH MARK
+ {0x0BE6, 0x0BEF, prN}, // Nd [10] TAMIL DIGIT ZERO..TAMIL DIGIT NINE
+ {0x0BF0, 0x0BF2, prN}, // No [3] TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND
+ {0x0BF3, 0x0BF8, prN}, // So [6] TAMIL DAY SIGN..TAMIL AS ABOVE SIGN
+ {0x0BF9, 0x0BF9, prN}, // Sc TAMIL RUPEE SIGN
+ {0x0BFA, 0x0BFA, prN}, // So TAMIL NUMBER SIGN
+ {0x0C00, 0x0C00, prN}, // Mn TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prN}, // Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prN}, // Mn TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C05, 0x0C0C, prN}, // Lo [8] TELUGU LETTER A..TELUGU LETTER VOCALIC L
+ {0x0C0E, 0x0C10, prN}, // Lo [3] TELUGU LETTER E..TELUGU LETTER AI
+ {0x0C12, 0x0C28, prN}, // Lo [23] TELUGU LETTER O..TELUGU LETTER NA
+ {0x0C2A, 0x0C39, prN}, // Lo [16] TELUGU LETTER PA..TELUGU LETTER HA
+ {0x0C3C, 0x0C3C, prN}, // Mn TELUGU SIGN NUKTA
+ {0x0C3D, 0x0C3D, prN}, // Lo TELUGU SIGN AVAGRAHA
+ {0x0C3E, 0x0C40, prN}, // Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prN}, // Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prN}, // Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prN}, // Mn [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prN}, // Mn [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C58, 0x0C5A, prN}, // Lo [3] TELUGU LETTER TSA..TELUGU LETTER RRRA
+ {0x0C5D, 0x0C5D, prN}, // Lo TELUGU LETTER NAKAARA POLLU
+ {0x0C60, 0x0C61, prN}, // Lo [2] TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL
+ {0x0C62, 0x0C63, prN}, // Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C66, 0x0C6F, prN}, // Nd [10] TELUGU DIGIT ZERO..TELUGU DIGIT NINE
+ {0x0C77, 0x0C77, prN}, // Po TELUGU SIGN SIDDHAM
+ {0x0C78, 0x0C7E, prN}, // No [7] TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU FRACTION DIGIT THREE FOR EVEN POWERS OF FOUR
+ {0x0C7F, 0x0C7F, prN}, // So TELUGU SIGN TUUMU
+ {0x0C80, 0x0C80, prN}, // Lo KANNADA SIGN SPACING CANDRABINDU
+ {0x0C81, 0x0C81, prN}, // Mn KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prN}, // Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0C84, 0x0C84, prN}, // Po KANNADA SIGN SIDDHAM
+ {0x0C85, 0x0C8C, prN}, // Lo [8] KANNADA LETTER A..KANNADA LETTER VOCALIC L
+ {0x0C8E, 0x0C90, prN}, // Lo [3] KANNADA LETTER E..KANNADA LETTER AI
+ {0x0C92, 0x0CA8, prN}, // Lo [23] KANNADA LETTER O..KANNADA LETTER NA
+ {0x0CAA, 0x0CB3, prN}, // Lo [10] KANNADA LETTER PA..KANNADA LETTER LLA
+ {0x0CB5, 0x0CB9, prN}, // Lo [5] KANNADA LETTER VA..KANNADA LETTER HA
+ {0x0CBC, 0x0CBC, prN}, // Mn KANNADA SIGN NUKTA
+ {0x0CBD, 0x0CBD, prN}, // Lo KANNADA SIGN AVAGRAHA
+ {0x0CBE, 0x0CBE, prN}, // Mc KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prN}, // Mn KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC4, prN}, // Mc [5] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prN}, // Mn KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prN}, // Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prN}, // Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prN}, // Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prN}, // Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CDD, 0x0CDE, prN}, // Lo [2] KANNADA LETTER NAKAARA POLLU..KANNADA LETTER FA
+ {0x0CE0, 0x0CE1, prN}, // Lo [2] KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL
+ {0x0CE2, 0x0CE3, prN}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CE6, 0x0CEF, prN}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
+ {0x0CF1, 0x0CF2, prN}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0D00, 0x0D01, prN}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prN}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D04, 0x0D0C, prN}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
+ {0x0D0E, 0x0D10, prN}, // Lo [3] MALAYALAM LETTER E..MALAYALAM LETTER AI
+ {0x0D12, 0x0D3A, prN}, // Lo [41] MALAYALAM LETTER O..MALAYALAM LETTER TTTA
+ {0x0D3B, 0x0D3C, prN}, // Mn [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3D, 0x0D3D, prN}, // Lo MALAYALAM SIGN AVAGRAHA
+ {0x0D3E, 0x0D40, prN}, // Mc [3] MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prN}, // Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prN}, // Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prN}, // Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prN}, // Mn MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prN}, // Lo MALAYALAM LETTER DOT REPH
+ {0x0D4F, 0x0D4F, prN}, // So MALAYALAM SIGN PARA
+ {0x0D54, 0x0D56, prN}, // Lo [3] MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL
+ {0x0D57, 0x0D57, prN}, // Mc MALAYALAM AU LENGTH MARK
+ {0x0D58, 0x0D5E, prN}, // No [7] MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH
+ {0x0D5F, 0x0D61, prN}, // Lo [3] MALAYALAM LETTER ARCHAIC II..MALAYALAM LETTER VOCALIC LL
+ {0x0D62, 0x0D63, prN}, // Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D66, 0x0D6F, prN}, // Nd [10] MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE
+ {0x0D70, 0x0D78, prN}, // No [9] MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE SIXTEENTHS
+ {0x0D79, 0x0D79, prN}, // So MALAYALAM DATE MARK
+ {0x0D7A, 0x0D7F, prN}, // Lo [6] MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K
+ {0x0D81, 0x0D81, prN}, // Mn SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prN}, // Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0D85, 0x0D96, prN}, // Lo [18] SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA
+ {0x0D9A, 0x0DB1, prN}, // Lo [24] SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA
+ {0x0DB3, 0x0DBB, prN}, // Lo [9] SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA
+ {0x0DBD, 0x0DBD, prN}, // Lo SINHALA LETTER DANTAJA LAYANNA
+ {0x0DC0, 0x0DC6, prN}, // Lo [7] SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA
+ {0x0DCA, 0x0DCA, prN}, // Mn SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DD1, prN}, // Mc [3] SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prN}, // Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prN}, // Mn SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDF, prN}, // Mc [8] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DE6, 0x0DEF, prN}, // Nd [10] SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE
+ {0x0DF2, 0x0DF3, prN}, // Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0DF4, 0x0DF4, prN}, // Po SINHALA PUNCTUATION KUNDDALIYA
+ {0x0E01, 0x0E30, prN}, // Lo [48] THAI CHARACTER KO KAI..THAI CHARACTER SARA A
+ {0x0E31, 0x0E31, prN}, // Mn THAI CHARACTER MAI HAN-AKAT
+ {0x0E32, 0x0E33, prN}, // Lo [2] THAI CHARACTER SARA AA..THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prN}, // Mn [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E3F, 0x0E3F, prN}, // Sc THAI CURRENCY SYMBOL BAHT
+ {0x0E40, 0x0E45, prN}, // Lo [6] THAI CHARACTER SARA E..THAI CHARACTER LAKKHANGYAO
+ {0x0E46, 0x0E46, prN}, // Lm THAI CHARACTER MAIYAMOK
+ {0x0E47, 0x0E4E, prN}, // Mn [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0E4F, 0x0E4F, prN}, // Po THAI CHARACTER FONGMAN
+ {0x0E50, 0x0E59, prN}, // Nd [10] THAI DIGIT ZERO..THAI DIGIT NINE
+ {0x0E5A, 0x0E5B, prN}, // Po [2] THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT
+ {0x0E81, 0x0E82, prN}, // Lo [2] LAO LETTER KO..LAO LETTER KHO SUNG
+ {0x0E84, 0x0E84, prN}, // Lo LAO LETTER KHO TAM
+ {0x0E86, 0x0E8A, prN}, // Lo [5] LAO LETTER PALI GHA..LAO LETTER SO TAM
+ {0x0E8C, 0x0EA3, prN}, // Lo [24] LAO LETTER PALI JHA..LAO LETTER LO LING
+ {0x0EA5, 0x0EA5, prN}, // Lo LAO LETTER LO LOOT
+ {0x0EA7, 0x0EB0, prN}, // Lo [10] LAO LETTER WO..LAO VOWEL SIGN A
+ {0x0EB1, 0x0EB1, prN}, // Mn LAO VOWEL SIGN MAI KAN
+ {0x0EB2, 0x0EB3, prN}, // Lo [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prN}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EBD, 0x0EBD, prN}, // Lo LAO SEMIVOWEL SIGN NYO
+ {0x0EC0, 0x0EC4, prN}, // Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
+ {0x0EC6, 0x0EC6, prN}, // Lm LAO KO LA
+ {0x0EC8, 0x0ECD, prN}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA
+ {0x0ED0, 0x0ED9, prN}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE
+ {0x0EDC, 0x0EDF, prN}, // Lo [4] LAO HO NO..LAO LETTER KHMU NYO
+ {0x0F00, 0x0F00, prN}, // Lo TIBETAN SYLLABLE OM
+ {0x0F01, 0x0F03, prN}, // So [3] TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK GTER YIG MGO -UM GTER TSHEG MA
+ {0x0F04, 0x0F12, prN}, // Po [15] TIBETAN MARK INITIAL YIG MGO MDUN MA..TIBETAN MARK RGYA GRAM SHAD
+ {0x0F13, 0x0F13, prN}, // So TIBETAN MARK CARET -DZUD RTAGS ME LONG CAN
+ {0x0F14, 0x0F14, prN}, // Po TIBETAN MARK GTER TSHEG
+ {0x0F15, 0x0F17, prN}, // So [3] TIBETAN LOGOTYPE SIGN CHAD RTAGS..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS
+ {0x0F18, 0x0F19, prN}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F1A, 0x0F1F, prN}, // So [6] TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG
+ {0x0F20, 0x0F29, prN}, // Nd [10] TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE
+ {0x0F2A, 0x0F33, prN}, // No [10] TIBETAN DIGIT HALF ONE..TIBETAN DIGIT HALF ZERO
+ {0x0F34, 0x0F34, prN}, // So TIBETAN MARK BSDUS RTAGS
+ {0x0F35, 0x0F35, prN}, // Mn TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F36, 0x0F36, prN}, // So TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN
+ {0x0F37, 0x0F37, prN}, // Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F38, 0x0F38, prN}, // So TIBETAN MARK CHE MGO
+ {0x0F39, 0x0F39, prN}, // Mn TIBETAN MARK TSA -PHRU
+ {0x0F3A, 0x0F3A, prN}, // Ps TIBETAN MARK GUG RTAGS GYON
+ {0x0F3B, 0x0F3B, prN}, // Pe TIBETAN MARK GUG RTAGS GYAS
+ {0x0F3C, 0x0F3C, prN}, // Ps TIBETAN MARK ANG KHANG GYON
+ {0x0F3D, 0x0F3D, prN}, // Pe TIBETAN MARK ANG KHANG GYAS
+ {0x0F3E, 0x0F3F, prN}, // Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F40, 0x0F47, prN}, // Lo [8] TIBETAN LETTER KA..TIBETAN LETTER JA
+ {0x0F49, 0x0F6C, prN}, // Lo [36] TIBETAN LETTER NYA..TIBETAN LETTER RRA
+ {0x0F71, 0x0F7E, prN}, // Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prN}, // Mc TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prN}, // Mn [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F85, 0x0F85, prN}, // Po TIBETAN MARK PALUTA
+ {0x0F86, 0x0F87, prN}, // Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F88, 0x0F8C, prN}, // Lo [5] TIBETAN SIGN LCE TSA CAN..TIBETAN SIGN INVERTED MCHU CAN
+ {0x0F8D, 0x0F97, prN}, // Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prN}, // Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FBE, 0x0FC5, prN}, // So [8] TIBETAN KU RU KHA..TIBETAN SYMBOL RDO RJE
+ {0x0FC6, 0x0FC6, prN}, // Mn TIBETAN SYMBOL PADMA GDAN
+ {0x0FC7, 0x0FCC, prN}, // So [6] TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL
+ {0x0FCE, 0x0FCF, prN}, // So [2] TIBETAN SIGN RDEL NAG RDEL DKAR..TIBETAN SIGN RDEL NAG GSUM
+ {0x0FD0, 0x0FD4, prN}, // Po [5] TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA
+ {0x0FD5, 0x0FD8, prN}, // So [4] RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS
+ {0x0FD9, 0x0FDA, prN}, // Po [2] TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS
+ {0x1000, 0x102A, prN}, // Lo [43] MYANMAR LETTER KA..MYANMAR LETTER AU
+ {0x102B, 0x102C, prN}, // Mc [2] MYANMAR VOWEL SIGN TALL AA..MYANMAR VOWEL SIGN AA
+ {0x102D, 0x1030, prN}, // Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prN}, // Mc MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prN}, // Mn [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1038, 0x1038, prN}, // Mc MYANMAR SIGN VISARGA
+ {0x1039, 0x103A, prN}, // Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prN}, // Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prN}, // Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x103F, 0x103F, prN}, // Lo MYANMAR LETTER GREAT SA
+ {0x1040, 0x1049, prN}, // Nd [10] MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE
+ {0x104A, 0x104F, prN}, // Po [6] MYANMAR SIGN LITTLE SECTION..MYANMAR SYMBOL GENITIVE
+ {0x1050, 0x1055, prN}, // Lo [6] MYANMAR LETTER SHA..MYANMAR LETTER VOCALIC LL
+ {0x1056, 0x1057, prN}, // Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prN}, // Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105A, 0x105D, prN}, // Lo [4] MYANMAR LETTER MON NGA..MYANMAR LETTER MON BBE
+ {0x105E, 0x1060, prN}, // Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1061, 0x1061, prN}, // Lo MYANMAR LETTER SGAW KAREN SHA
+ {0x1062, 0x1064, prN}, // Mc [3] MYANMAR VOWEL SIGN SGAW KAREN EU..MYANMAR TONE MARK SGAW KAREN KE PHO
+ {0x1065, 0x1066, prN}, // Lo [2] MYANMAR LETTER WESTERN PWO KAREN THA..MYANMAR LETTER WESTERN PWO KAREN PWA
+ {0x1067, 0x106D, prN}, // Mc [7] MYANMAR VOWEL SIGN WESTERN PWO KAREN EU..MYANMAR SIGN WESTERN PWO KAREN TONE-5
+ {0x106E, 0x1070, prN}, // Lo [3] MYANMAR LETTER EASTERN PWO KAREN NNA..MYANMAR LETTER EASTERN PWO KAREN GHWA
+ {0x1071, 0x1074, prN}, // Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1075, 0x1081, prN}, // Lo [13] MYANMAR LETTER SHAN KA..MYANMAR LETTER SHAN HA
+ {0x1082, 0x1082, prN}, // Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1083, 0x1084, prN}, // Mc [2] MYANMAR VOWEL SIGN SHAN AA..MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prN}, // Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x1087, 0x108C, prN}, // Mc [6] MYANMAR SIGN SHAN TONE-2..MYANMAR SIGN SHAN COUNCIL TONE-3
+ {0x108D, 0x108D, prN}, // Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x108E, 0x108E, prN}, // Lo MYANMAR LETTER RUMAI PALAUNG FA
+ {0x108F, 0x108F, prN}, // Mc MYANMAR SIGN RUMAI PALAUNG TONE-5
+ {0x1090, 0x1099, prN}, // Nd [10] MYANMAR SHAN DIGIT ZERO..MYANMAR SHAN DIGIT NINE
+ {0x109A, 0x109C, prN}, // Mc [3] MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON A
+ {0x109D, 0x109D, prN}, // Mn MYANMAR VOWEL SIGN AITON AI
+ {0x109E, 0x109F, prN}, // So [2] MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION
+ {0x10A0, 0x10C5, prN}, // Lu [38] GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE
+ {0x10C7, 0x10C7, prN}, // Lu GEORGIAN CAPITAL LETTER YN
+ {0x10CD, 0x10CD, prN}, // Lu GEORGIAN CAPITAL LETTER AEN
+ {0x10D0, 0x10FA, prN}, // Ll [43] GEORGIAN LETTER AN..GEORGIAN LETTER AIN
+ {0x10FB, 0x10FB, prN}, // Po GEORGIAN PARAGRAPH SEPARATOR
+ {0x10FC, 0x10FC, prN}, // Lm MODIFIER LETTER GEORGIAN NAR
+ {0x10FD, 0x10FF, prN}, // Ll [3] GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN
+ {0x1100, 0x115F, prW}, // Lo [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11FF, prN}, // Lo [160] HANGUL JUNGSEONG FILLER..HANGUL JONGSEONG SSANGNIEUN
+ {0x1200, 0x1248, prN}, // Lo [73] ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE QWA
+ {0x124A, 0x124D, prN}, // Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE
+ {0x1250, 0x1256, prN}, // Lo [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO
+ {0x1258, 0x1258, prN}, // Lo ETHIOPIC SYLLABLE QHWA
+ {0x125A, 0x125D, prN}, // Lo [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE
+ {0x1260, 0x1288, prN}, // Lo [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XWA
+ {0x128A, 0x128D, prN}, // Lo [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE
+ {0x1290, 0x12B0, prN}, // Lo [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KWA
+ {0x12B2, 0x12B5, prN}, // Lo [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE
+ {0x12B8, 0x12BE, prN}, // Lo [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO
+ {0x12C0, 0x12C0, prN}, // Lo ETHIOPIC SYLLABLE KXWA
+ {0x12C2, 0x12C5, prN}, // Lo [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE
+ {0x12C8, 0x12D6, prN}, // Lo [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE PHARYNGEAL O
+ {0x12D8, 0x1310, prN}, // Lo [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE GWA
+ {0x1312, 0x1315, prN}, // Lo [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE
+ {0x1318, 0x135A, prN}, // Lo [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE FYA
+ {0x135D, 0x135F, prN}, // Mn [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1360, 0x1368, prN}, // Po [9] ETHIOPIC SECTION MARK..ETHIOPIC PARAGRAPH SEPARATOR
+ {0x1369, 0x137C, prN}, // No [20] ETHIOPIC DIGIT ONE..ETHIOPIC NUMBER TEN THOUSAND
+ {0x1380, 0x138F, prN}, // Lo [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE
+ {0x1390, 0x1399, prN}, // So [10] ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT
+ {0x13A0, 0x13F5, prN}, // Lu [86] CHEROKEE LETTER A..CHEROKEE LETTER MV
+ {0x13F8, 0x13FD, prN}, // Ll [6] CHEROKEE SMALL LETTER YE..CHEROKEE SMALL LETTER MV
+ {0x1400, 0x1400, prN}, // Pd CANADIAN SYLLABICS HYPHEN
+ {0x1401, 0x166C, prN}, // Lo [620] CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA
+ {0x166D, 0x166D, prN}, // So CANADIAN SYLLABICS CHI SIGN
+ {0x166E, 0x166E, prN}, // Po CANADIAN SYLLABICS FULL STOP
+ {0x166F, 0x167F, prN}, // Lo [17] CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS BLACKFOOT W
+ {0x1680, 0x1680, prN}, // Zs OGHAM SPACE MARK
+ {0x1681, 0x169A, prN}, // Lo [26] OGHAM LETTER BEITH..OGHAM LETTER PEITH
+ {0x169B, 0x169B, prN}, // Ps OGHAM FEATHER MARK
+ {0x169C, 0x169C, prN}, // Pe OGHAM REVERSED FEATHER MARK
+ {0x16A0, 0x16EA, prN}, // Lo [75] RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X
+ {0x16EB, 0x16ED, prN}, // Po [3] RUNIC SINGLE PUNCTUATION..RUNIC CROSS PUNCTUATION
+ {0x16EE, 0x16F0, prN}, // Nl [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHOR SYMBOL
+ {0x16F1, 0x16F8, prN}, // Lo [8] RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC
+ {0x1700, 0x1711, prN}, // Lo [18] TAGALOG LETTER A..TAGALOG LETTER HA
+ {0x1712, 0x1714, prN}, // Mn [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prN}, // Mc TAGALOG SIGN PAMUDPOD
+ {0x171F, 0x171F, prN}, // Lo TAGALOG LETTER ARCHAIC RA
+ {0x1720, 0x1731, prN}, // Lo [18] HANUNOO LETTER A..HANUNOO LETTER HA
+ {0x1732, 0x1733, prN}, // Mn [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prN}, // Mc HANUNOO SIGN PAMUDPOD
+ {0x1735, 0x1736, prN}, // Po [2] PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION
+ {0x1740, 0x1751, prN}, // Lo [18] BUHID LETTER A..BUHID LETTER HA
+ {0x1752, 0x1753, prN}, // Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1760, 0x176C, prN}, // Lo [13] TAGBANWA LETTER A..TAGBANWA LETTER YA
+ {0x176E, 0x1770, prN}, // Lo [3] TAGBANWA LETTER LA..TAGBANWA LETTER SA
+ {0x1772, 0x1773, prN}, // Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x1780, 0x17B3, prN}, // Lo [52] KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU
+ {0x17B4, 0x17B5, prN}, // Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prN}, // Mc KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prN}, // Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prN}, // Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prN}, // Mn KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prN}, // Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prN}, // Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17D4, 0x17D6, prN}, // Po [3] KHMER SIGN KHAN..KHMER SIGN CAMNUC PII KUUH
+ {0x17D7, 0x17D7, prN}, // Lm KHMER SIGN LEK TOO
+ {0x17D8, 0x17DA, prN}, // Po [3] KHMER SIGN BEYYAL..KHMER SIGN KOOMUUT
+ {0x17DB, 0x17DB, prN}, // Sc KHMER CURRENCY SYMBOL RIEL
+ {0x17DC, 0x17DC, prN}, // Lo KHMER SIGN AVAKRAHASANYA
+ {0x17DD, 0x17DD, prN}, // Mn KHMER SIGN ATTHACAN
+ {0x17E0, 0x17E9, prN}, // Nd [10] KHMER DIGIT ZERO..KHMER DIGIT NINE
+ {0x17F0, 0x17F9, prN}, // No [10] KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON
+ {0x1800, 0x1805, prN}, // Po [6] MONGOLIAN BIRGA..MONGOLIAN FOUR DOTS
+ {0x1806, 0x1806, prN}, // Pd MONGOLIAN TODO SOFT HYPHEN
+ {0x1807, 0x180A, prN}, // Po [4] MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER..MONGOLIAN NIRUGU
+ {0x180B, 0x180D, prN}, // Mn [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prN}, // Cf MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prN}, // Mn MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1810, 0x1819, prN}, // Nd [10] MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE
+ {0x1820, 0x1842, prN}, // Lo [35] MONGOLIAN LETTER A..MONGOLIAN LETTER CHI
+ {0x1843, 0x1843, prN}, // Lm MONGOLIAN LETTER TODO LONG VOWEL SIGN
+ {0x1844, 0x1878, prN}, // Lo [53] MONGOLIAN LETTER TODO E..MONGOLIAN LETTER CHA WITH TWO DOTS
+ {0x1880, 0x1884, prN}, // Lo [5] MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI INVERTED UBADAMA
+ {0x1885, 0x1886, prN}, // Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x1887, 0x18A8, prN}, // Lo [34] MONGOLIAN LETTER ALI GALI A..MONGOLIAN LETTER MANCHU ALI GALI BHA
+ {0x18A9, 0x18A9, prN}, // Mn MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x18AA, 0x18AA, prN}, // Lo MONGOLIAN LETTER MANCHU ALI GALI LHA
+ {0x18B0, 0x18F5, prN}, // Lo [70] CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S
+ {0x1900, 0x191E, prN}, // Lo [31] LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER TRA
+ {0x1920, 0x1922, prN}, // Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prN}, // Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prN}, // Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prN}, // Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prN}, // Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prN}, // Mn LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prN}, // Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prN}, // Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1940, 0x1940, prN}, // So LIMBU SIGN LOO
+ {0x1944, 0x1945, prN}, // Po [2] LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK
+ {0x1946, 0x194F, prN}, // Nd [10] LIMBU DIGIT ZERO..LIMBU DIGIT NINE
+ {0x1950, 0x196D, prN}, // Lo [30] TAI LE LETTER KA..TAI LE LETTER AI
+ {0x1970, 0x1974, prN}, // Lo [5] TAI LE LETTER TONE-2..TAI LE LETTER TONE-6
+ {0x1980, 0x19AB, prN}, // Lo [44] NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW SUA
+ {0x19B0, 0x19C9, prN}, // Lo [26] NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2
+ {0x19D0, 0x19D9, prN}, // Nd [10] NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE
+ {0x19DA, 0x19DA, prN}, // No NEW TAI LUE THAM DIGIT ONE
+ {0x19DE, 0x19DF, prN}, // So [2] NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV
+ {0x19E0, 0x19FF, prN}, // So [32] KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC
+ {0x1A00, 0x1A16, prN}, // Lo [23] BUGINESE LETTER KA..BUGINESE LETTER HA
+ {0x1A17, 0x1A18, prN}, // Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prN}, // Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prN}, // Mn BUGINESE VOWEL SIGN AE
+ {0x1A1E, 0x1A1F, prN}, // Po [2] BUGINESE PALLAWA..BUGINESE END OF SECTION
+ {0x1A20, 0x1A54, prN}, // Lo [53] TAI THAM LETTER HIGH KA..TAI THAM LETTER GREAT SA
+ {0x1A55, 0x1A55, prN}, // Mc TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prN}, // Mn TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prN}, // Mc TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prN}, // Mn [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prN}, // Mn TAI THAM SIGN SAKOT
+ {0x1A61, 0x1A61, prN}, // Mc TAI THAM VOWEL SIGN A
+ {0x1A62, 0x1A62, prN}, // Mn TAI THAM VOWEL SIGN MAI SAT
+ {0x1A63, 0x1A64, prN}, // Mc [2] TAI THAM VOWEL SIGN AA..TAI THAM VOWEL SIGN TALL AA
+ {0x1A65, 0x1A6C, prN}, // Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prN}, // Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prN}, // Mn [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prN}, // Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1A80, 0x1A89, prN}, // Nd [10] TAI THAM HORA DIGIT ZERO..TAI THAM HORA DIGIT NINE
+ {0x1A90, 0x1A99, prN}, // Nd [10] TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE
+ {0x1AA0, 0x1AA6, prN}, // Po [7] TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA
+ {0x1AA7, 0x1AA7, prN}, // Lm TAI THAM SIGN MAI YAMOK
+ {0x1AA8, 0x1AAD, prN}, // Po [6] TAI THAM SIGN KAAN..TAI THAM SIGN CAANG
+ {0x1AB0, 0x1ABD, prN}, // Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prN}, // Me COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prN}, // Mn [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prN}, // Mn [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prN}, // Mc BALINESE SIGN BISAH
+ {0x1B05, 0x1B33, prN}, // Lo [47] BALINESE LETTER AKARA..BALINESE LETTER HA
+ {0x1B34, 0x1B34, prN}, // Mn BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prN}, // Mc BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prN}, // Mn [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prN}, // Mc BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prN}, // Mn BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prN}, // Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prN}, // Mn BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prN}, // Mc [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B45, 0x1B4C, prN}, // Lo [8] BALINESE LETTER KAF SASAK..BALINESE LETTER ARCHAIC JNYA
+ {0x1B50, 0x1B59, prN}, // Nd [10] BALINESE DIGIT ZERO..BALINESE DIGIT NINE
+ {0x1B5A, 0x1B60, prN}, // Po [7] BALINESE PANTI..BALINESE PAMENENG
+ {0x1B61, 0x1B6A, prN}, // So [10] BALINESE MUSICAL SYMBOL DONG..BALINESE MUSICAL SYMBOL DANG GEDE
+ {0x1B6B, 0x1B73, prN}, // Mn [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B74, 0x1B7C, prN}, // So [9] BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING
+ {0x1B7D, 0x1B7E, prN}, // Po [2] BALINESE PANTI LANTANG..BALINESE PAMADA LANTANG
+ {0x1B80, 0x1B81, prN}, // Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prN}, // Mc SUNDANESE SIGN PANGWISAD
+ {0x1B83, 0x1BA0, prN}, // Lo [30] SUNDANESE LETTER A..SUNDANESE LETTER HA
+ {0x1BA1, 0x1BA1, prN}, // Mc SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prN}, // Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prN}, // Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prN}, // Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prN}, // Mc SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prN}, // Mn [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BAE, 0x1BAF, prN}, // Lo [2] SUNDANESE LETTER KHA..SUNDANESE LETTER SYA
+ {0x1BB0, 0x1BB9, prN}, // Nd [10] SUNDANESE DIGIT ZERO..SUNDANESE DIGIT NINE
+ {0x1BBA, 0x1BBF, prN}, // Lo [6] SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M
+ {0x1BC0, 0x1BE5, prN}, // Lo [38] BATAK LETTER A..BATAK LETTER U
+ {0x1BE6, 0x1BE6, prN}, // Mn BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prN}, // Mc BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prN}, // Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prN}, // Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prN}, // Mn BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prN}, // Mc BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prN}, // Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prN}, // Mc [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1BFC, 0x1BFF, prN}, // Po [4] BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT
+ {0x1C00, 0x1C23, prN}, // Lo [36] LEPCHA LETTER KA..LEPCHA LETTER A
+ {0x1C24, 0x1C2B, prN}, // Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prN}, // Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prN}, // Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prN}, // Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1C3B, 0x1C3F, prN}, // Po [5] LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK
+ {0x1C40, 0x1C49, prN}, // Nd [10] LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE
+ {0x1C4D, 0x1C4F, prN}, // Lo [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA
+ {0x1C50, 0x1C59, prN}, // Nd [10] OL CHIKI DIGIT ZERO..OL CHIKI DIGIT NINE
+ {0x1C5A, 0x1C77, prN}, // Lo [30] OL CHIKI LETTER LA..OL CHIKI LETTER OH
+ {0x1C78, 0x1C7D, prN}, // Lm [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD
+ {0x1C7E, 0x1C7F, prN}, // Po [2] OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD
+ {0x1C80, 0x1C88, prN}, // Ll [9] CYRILLIC SMALL LETTER ROUNDED VE..CYRILLIC SMALL LETTER UNBLENDED UK
+ {0x1C90, 0x1CBA, prN}, // Lu [43] GEORGIAN MTAVRULI CAPITAL LETTER AN..GEORGIAN MTAVRULI CAPITAL LETTER AIN
+ {0x1CBD, 0x1CBF, prN}, // Lu [3] GEORGIAN MTAVRULI CAPITAL LETTER AEN..GEORGIAN MTAVRULI CAPITAL LETTER LABIAL SIGN
+ {0x1CC0, 0x1CC7, prN}, // Po [8] SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA
+ {0x1CD0, 0x1CD2, prN}, // Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD3, 0x1CD3, prN}, // Po VEDIC SIGN NIHSHVASA
+ {0x1CD4, 0x1CE0, prN}, // Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prN}, // Mc VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prN}, // Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CE9, 0x1CEC, prN}, // Lo [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA..VEDIC SIGN ANUSVARA VAMAGOMUKHA WITH TAIL
+ {0x1CED, 0x1CED, prN}, // Mn VEDIC SIGN TIRYAK
+ {0x1CEE, 0x1CF3, prN}, // Lo [6] VEDIC SIGN HEXIFORM LONG ANUSVARA..VEDIC SIGN ROTATED ARDHAVISARGA
+ {0x1CF4, 0x1CF4, prN}, // Mn VEDIC TONE CANDRA ABOVE
+ {0x1CF5, 0x1CF6, prN}, // Lo [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN UPADHMANIYA
+ {0x1CF7, 0x1CF7, prN}, // Mc VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prN}, // Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1CFA, 0x1CFA, prN}, // Lo VEDIC SIGN DOUBLE ANUSVARA ANTARGOMUKHA
+ {0x1D00, 0x1D2B, prN}, // Ll [44] LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL
+ {0x1D2C, 0x1D6A, prN}, // Lm [63] MODIFIER LETTER CAPITAL A..GREEK SUBSCRIPT SMALL LETTER CHI
+ {0x1D6B, 0x1D77, prN}, // Ll [13] LATIN SMALL LETTER UE..LATIN SMALL LETTER TURNED G
+ {0x1D78, 0x1D78, prN}, // Lm MODIFIER LETTER CYRILLIC EN
+ {0x1D79, 0x1D7F, prN}, // Ll [7] LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER UPSILON WITH STROKE
+ {0x1D80, 0x1D9A, prN}, // Ll [27] LATIN SMALL LETTER B WITH PALATAL HOOK..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK
+ {0x1D9B, 0x1DBF, prN}, // Lm [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA
+ {0x1DC0, 0x1DFF, prN}, // Mn [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x1E00, 0x1EFF, prN}, // L& [256] LATIN CAPITAL LETTER A WITH RING BELOW..LATIN SMALL LETTER Y WITH LOOP
+ {0x1F00, 0x1F15, prN}, // L& [22] GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F18, 0x1F1D, prN}, // Lu [6] GREEK CAPITAL LETTER EPSILON WITH PSILI..GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F20, 0x1F45, prN}, // L& [38] GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F48, 0x1F4D, prN}, // Lu [6] GREEK CAPITAL LETTER OMICRON WITH PSILI..GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F50, 0x1F57, prN}, // Ll [8] GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI
+ {0x1F59, 0x1F59, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA
+ {0x1F5B, 0x1F5B, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA
+ {0x1F5D, 0x1F5D, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA
+ {0x1F5F, 0x1F7D, prN}, // L& [31] GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI..GREEK SMALL LETTER OMEGA WITH OXIA
+ {0x1F80, 0x1FB4, prN}, // L& [53] GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI..GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FB6, 0x1FBC, prN}, // L& [7] GREEK SMALL LETTER ALPHA WITH PERISPOMENI..GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
+ {0x1FBD, 0x1FBD, prN}, // Sk GREEK KORONIS
+ {0x1FBE, 0x1FBE, prN}, // Ll GREEK PROSGEGRAMMENI
+ {0x1FBF, 0x1FC1, prN}, // Sk [3] GREEK PSILI..GREEK DIALYTIKA AND PERISPOMENI
+ {0x1FC2, 0x1FC4, prN}, // Ll [3] GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FC6, 0x1FCC, prN}, // L& [7] GREEK SMALL LETTER ETA WITH PERISPOMENI..GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
+ {0x1FCD, 0x1FCF, prN}, // Sk [3] GREEK PSILI AND VARIA..GREEK PSILI AND PERISPOMENI
+ {0x1FD0, 0x1FD3, prN}, // Ll [4] GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
+ {0x1FD6, 0x1FDB, prN}, // L& [6] GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK CAPITAL LETTER IOTA WITH OXIA
+ {0x1FDD, 0x1FDF, prN}, // Sk [3] GREEK DASIA AND VARIA..GREEK DASIA AND PERISPOMENI
+ {0x1FE0, 0x1FEC, prN}, // L& [13] GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK CAPITAL LETTER RHO WITH DASIA
+ {0x1FED, 0x1FEF, prN}, // Sk [3] GREEK DIALYTIKA AND VARIA..GREEK VARIA
+ {0x1FF2, 0x1FF4, prN}, // Ll [3] GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FF6, 0x1FFC, prN}, // L& [7] GREEK SMALL LETTER OMEGA WITH PERISPOMENI..GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
+ {0x1FFD, 0x1FFE, prN}, // Sk [2] GREEK OXIA..GREEK DASIA
+ {0x2000, 0x200A, prN}, // Zs [11] EN QUAD..HAIR SPACE
+ {0x200B, 0x200F, prN}, // Cf [5] ZERO WIDTH SPACE..RIGHT-TO-LEFT MARK
+ {0x2010, 0x2010, prA}, // Pd HYPHEN
+ {0x2011, 0x2012, prN}, // Pd [2] NON-BREAKING HYPHEN..FIGURE DASH
+ {0x2013, 0x2015, prA}, // Pd [3] EN DASH..HORIZONTAL BAR
+ {0x2016, 0x2016, prA}, // Po DOUBLE VERTICAL LINE
+ {0x2017, 0x2017, prN}, // Po DOUBLE LOW LINE
+ {0x2018, 0x2018, prA}, // Pi LEFT SINGLE QUOTATION MARK
+ {0x2019, 0x2019, prA}, // Pf RIGHT SINGLE QUOTATION MARK
+ {0x201A, 0x201A, prN}, // Ps SINGLE LOW-9 QUOTATION MARK
+ {0x201B, 0x201B, prN}, // Pi SINGLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x201C, 0x201C, prA}, // Pi LEFT DOUBLE QUOTATION MARK
+ {0x201D, 0x201D, prA}, // Pf RIGHT DOUBLE QUOTATION MARK
+ {0x201E, 0x201E, prN}, // Ps DOUBLE LOW-9 QUOTATION MARK
+ {0x201F, 0x201F, prN}, // Pi DOUBLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x2020, 0x2022, prA}, // Po [3] DAGGER..BULLET
+ {0x2023, 0x2023, prN}, // Po TRIANGULAR BULLET
+ {0x2024, 0x2027, prA}, // Po [4] ONE DOT LEADER..HYPHENATION POINT
+ {0x2028, 0x2028, prN}, // Zl LINE SEPARATOR
+ {0x2029, 0x2029, prN}, // Zp PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prN}, // Cf [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x202F, 0x202F, prN}, // Zs NARROW NO-BREAK SPACE
+ {0x2030, 0x2030, prA}, // Po PER MILLE SIGN
+ {0x2031, 0x2031, prN}, // Po PER TEN THOUSAND SIGN
+ {0x2032, 0x2033, prA}, // Po [2] PRIME..DOUBLE PRIME
+ {0x2034, 0x2034, prN}, // Po TRIPLE PRIME
+ {0x2035, 0x2035, prA}, // Po REVERSED PRIME
+ {0x2036, 0x2038, prN}, // Po [3] REVERSED DOUBLE PRIME..CARET
+ {0x2039, 0x2039, prN}, // Pi SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ {0x203A, 0x203A, prN}, // Pf SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ {0x203B, 0x203B, prA}, // Po REFERENCE MARK
+ {0x203C, 0x203D, prN}, // Po [2] DOUBLE EXCLAMATION MARK..INTERROBANG
+ {0x203E, 0x203E, prA}, // Po OVERLINE
+ {0x203F, 0x2040, prN}, // Pc [2] UNDERTIE..CHARACTER TIE
+ {0x2041, 0x2043, prN}, // Po [3] CARET INSERTION POINT..HYPHEN BULLET
+ {0x2044, 0x2044, prN}, // Sm FRACTION SLASH
+ {0x2045, 0x2045, prN}, // Ps LEFT SQUARE BRACKET WITH QUILL
+ {0x2046, 0x2046, prN}, // Pe RIGHT SQUARE BRACKET WITH QUILL
+ {0x2047, 0x2051, prN}, // Po [11] DOUBLE QUESTION MARK..TWO ASTERISKS ALIGNED VERTICALLY
+ {0x2052, 0x2052, prN}, // Sm COMMERCIAL MINUS SIGN
+ {0x2053, 0x2053, prN}, // Po SWUNG DASH
+ {0x2054, 0x2054, prN}, // Pc INVERTED UNDERTIE
+ {0x2055, 0x205E, prN}, // Po [10] FLOWER PUNCTUATION MARK..VERTICAL FOUR DOTS
+ {0x205F, 0x205F, prN}, // Zs MEDIUM MATHEMATICAL SPACE
+ {0x2060, 0x2064, prN}, // Cf [5] WORD JOINER..INVISIBLE PLUS
+ {0x2066, 0x206F, prN}, // Cf [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x2070, 0x2070, prN}, // No SUPERSCRIPT ZERO
+ {0x2071, 0x2071, prN}, // Lm SUPERSCRIPT LATIN SMALL LETTER I
+ {0x2074, 0x2074, prA}, // No SUPERSCRIPT FOUR
+ {0x2075, 0x2079, prN}, // No [5] SUPERSCRIPT FIVE..SUPERSCRIPT NINE
+ {0x207A, 0x207C, prN}, // Sm [3] SUPERSCRIPT PLUS SIGN..SUPERSCRIPT EQUALS SIGN
+ {0x207D, 0x207D, prN}, // Ps SUPERSCRIPT LEFT PARENTHESIS
+ {0x207E, 0x207E, prN}, // Pe SUPERSCRIPT RIGHT PARENTHESIS
+ {0x207F, 0x207F, prA}, // Lm SUPERSCRIPT LATIN SMALL LETTER N
+ {0x2080, 0x2080, prN}, // No SUBSCRIPT ZERO
+ {0x2081, 0x2084, prA}, // No [4] SUBSCRIPT ONE..SUBSCRIPT FOUR
+ {0x2085, 0x2089, prN}, // No [5] SUBSCRIPT FIVE..SUBSCRIPT NINE
+ {0x208A, 0x208C, prN}, // Sm [3] SUBSCRIPT PLUS SIGN..SUBSCRIPT EQUALS SIGN
+ {0x208D, 0x208D, prN}, // Ps SUBSCRIPT LEFT PARENTHESIS
+ {0x208E, 0x208E, prN}, // Pe SUBSCRIPT RIGHT PARENTHESIS
+ {0x2090, 0x209C, prN}, // Lm [13] LATIN SUBSCRIPT SMALL LETTER A..LATIN SUBSCRIPT SMALL LETTER T
+ {0x20A0, 0x20A8, prN}, // Sc [9] EURO-CURRENCY SIGN..RUPEE SIGN
+ {0x20A9, 0x20A9, prH}, // Sc WON SIGN
+ {0x20AA, 0x20AB, prN}, // Sc [2] NEW SHEQEL SIGN..DONG SIGN
+ {0x20AC, 0x20AC, prA}, // Sc EURO SIGN
+ {0x20AD, 0x20C0, prN}, // Sc [20] KIP SIGN..SOM SIGN
+ {0x20D0, 0x20DC, prN}, // Mn [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prN}, // Me [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prN}, // Mn COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prN}, // Me [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prN}, // Mn [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2100, 0x2101, prN}, // So [2] ACCOUNT OF..ADDRESSED TO THE SUBJECT
+ {0x2102, 0x2102, prN}, // Lu DOUBLE-STRUCK CAPITAL C
+ {0x2103, 0x2103, prA}, // So DEGREE CELSIUS
+ {0x2104, 0x2104, prN}, // So CENTRE LINE SYMBOL
+ {0x2105, 0x2105, prA}, // So CARE OF
+ {0x2106, 0x2106, prN}, // So CADA UNA
+ {0x2107, 0x2107, prN}, // Lu EULER CONSTANT
+ {0x2108, 0x2108, prN}, // So SCRUPLE
+ {0x2109, 0x2109, prA}, // So DEGREE FAHRENHEIT
+ {0x210A, 0x2112, prN}, // L& [9] SCRIPT SMALL G..SCRIPT CAPITAL L
+ {0x2113, 0x2113, prA}, // Ll SCRIPT SMALL L
+ {0x2114, 0x2114, prN}, // So L B BAR SYMBOL
+ {0x2115, 0x2115, prN}, // Lu DOUBLE-STRUCK CAPITAL N
+ {0x2116, 0x2116, prA}, // So NUMERO SIGN
+ {0x2117, 0x2117, prN}, // So SOUND RECORDING COPYRIGHT
+ {0x2118, 0x2118, prN}, // Sm SCRIPT CAPITAL P
+ {0x2119, 0x211D, prN}, // Lu [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-STRUCK CAPITAL R
+ {0x211E, 0x2120, prN}, // So [3] PRESCRIPTION TAKE..SERVICE MARK
+ {0x2121, 0x2122, prA}, // So [2] TELEPHONE SIGN..TRADE MARK SIGN
+ {0x2123, 0x2123, prN}, // So VERSICLE
+ {0x2124, 0x2124, prN}, // Lu DOUBLE-STRUCK CAPITAL Z
+ {0x2125, 0x2125, prN}, // So OUNCE SIGN
+ {0x2126, 0x2126, prA}, // Lu OHM SIGN
+ {0x2127, 0x2127, prN}, // So INVERTED OHM SIGN
+ {0x2128, 0x2128, prN}, // Lu BLACK-LETTER CAPITAL Z
+ {0x2129, 0x2129, prN}, // So TURNED GREEK SMALL LETTER IOTA
+ {0x212A, 0x212A, prN}, // Lu KELVIN SIGN
+ {0x212B, 0x212B, prA}, // Lu ANGSTROM SIGN
+ {0x212C, 0x212D, prN}, // Lu [2] SCRIPT CAPITAL B..BLACK-LETTER CAPITAL C
+ {0x212E, 0x212E, prN}, // So ESTIMATED SYMBOL
+ {0x212F, 0x2134, prN}, // L& [6] SCRIPT SMALL E..SCRIPT SMALL O
+ {0x2135, 0x2138, prN}, // Lo [4] ALEF SYMBOL..DALET SYMBOL
+ {0x2139, 0x2139, prN}, // Ll INFORMATION SOURCE
+ {0x213A, 0x213B, prN}, // So [2] ROTATED CAPITAL Q..FACSIMILE SIGN
+ {0x213C, 0x213F, prN}, // L& [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STRUCK CAPITAL PI
+ {0x2140, 0x2144, prN}, // Sm [5] DOUBLE-STRUCK N-ARY SUMMATION..TURNED SANS-SERIF CAPITAL Y
+ {0x2145, 0x2149, prN}, // L& [5] DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL J
+ {0x214A, 0x214A, prN}, // So PROPERTY LINE
+ {0x214B, 0x214B, prN}, // Sm TURNED AMPERSAND
+ {0x214C, 0x214D, prN}, // So [2] PER SIGN..AKTIESELSKAB
+ {0x214E, 0x214E, prN}, // Ll TURNED SMALL F
+ {0x214F, 0x214F, prN}, // So SYMBOL FOR SAMARITAN SOURCE
+ {0x2150, 0x2152, prN}, // No [3] VULGAR FRACTION ONE SEVENTH..VULGAR FRACTION ONE TENTH
+ {0x2153, 0x2154, prA}, // No [2] VULGAR FRACTION ONE THIRD..VULGAR FRACTION TWO THIRDS
+ {0x2155, 0x215A, prN}, // No [6] VULGAR FRACTION ONE FIFTH..VULGAR FRACTION FIVE SIXTHS
+ {0x215B, 0x215E, prA}, // No [4] VULGAR FRACTION ONE EIGHTH..VULGAR FRACTION SEVEN EIGHTHS
+ {0x215F, 0x215F, prN}, // No FRACTION NUMERATOR ONE
+ {0x2160, 0x216B, prA}, // Nl [12] ROMAN NUMERAL ONE..ROMAN NUMERAL TWELVE
+ {0x216C, 0x216F, prN}, // Nl [4] ROMAN NUMERAL FIFTY..ROMAN NUMERAL ONE THOUSAND
+ {0x2170, 0x2179, prA}, // Nl [10] SMALL ROMAN NUMERAL ONE..SMALL ROMAN NUMERAL TEN
+ {0x217A, 0x2182, prN}, // Nl [9] SMALL ROMAN NUMERAL ELEVEN..ROMAN NUMERAL TEN THOUSAND
+ {0x2183, 0x2184, prN}, // L& [2] ROMAN NUMERAL REVERSED ONE HUNDRED..LATIN SMALL LETTER REVERSED C
+ {0x2185, 0x2188, prN}, // Nl [4] ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND
+ {0x2189, 0x2189, prA}, // No VULGAR FRACTION ZERO THIRDS
+ {0x218A, 0x218B, prN}, // So [2] TURNED DIGIT TWO..TURNED DIGIT THREE
+ {0x2190, 0x2194, prA}, // Sm [5] LEFTWARDS ARROW..LEFT RIGHT ARROW
+ {0x2195, 0x2199, prA}, // So [5] UP DOWN ARROW..SOUTH WEST ARROW
+ {0x219A, 0x219B, prN}, // Sm [2] LEFTWARDS ARROW WITH STROKE..RIGHTWARDS ARROW WITH STROKE
+ {0x219C, 0x219F, prN}, // So [4] LEFTWARDS WAVE ARROW..UPWARDS TWO HEADED ARROW
+ {0x21A0, 0x21A0, prN}, // Sm RIGHTWARDS TWO HEADED ARROW
+ {0x21A1, 0x21A2, prN}, // So [2] DOWNWARDS TWO HEADED ARROW..LEFTWARDS ARROW WITH TAIL
+ {0x21A3, 0x21A3, prN}, // Sm RIGHTWARDS ARROW WITH TAIL
+ {0x21A4, 0x21A5, prN}, // So [2] LEFTWARDS ARROW FROM BAR..UPWARDS ARROW FROM BAR
+ {0x21A6, 0x21A6, prN}, // Sm RIGHTWARDS ARROW FROM BAR
+ {0x21A7, 0x21AD, prN}, // So [7] DOWNWARDS ARROW FROM BAR..LEFT RIGHT WAVE ARROW
+ {0x21AE, 0x21AE, prN}, // Sm LEFT RIGHT ARROW WITH STROKE
+ {0x21AF, 0x21B7, prN}, // So [9] DOWNWARDS ZIGZAG ARROW..CLOCKWISE TOP SEMICIRCLE ARROW
+ {0x21B8, 0x21B9, prA}, // So [2] NORTH WEST ARROW TO LONG BAR..LEFTWARDS ARROW TO BAR OVER RIGHTWARDS ARROW TO BAR
+ {0x21BA, 0x21CD, prN}, // So [20] ANTICLOCKWISE OPEN CIRCLE ARROW..LEFTWARDS DOUBLE ARROW WITH STROKE
+ {0x21CE, 0x21CF, prN}, // Sm [2] LEFT RIGHT DOUBLE ARROW WITH STROKE..RIGHTWARDS DOUBLE ARROW WITH STROKE
+ {0x21D0, 0x21D1, prN}, // So [2] LEFTWARDS DOUBLE ARROW..UPWARDS DOUBLE ARROW
+ {0x21D2, 0x21D2, prA}, // Sm RIGHTWARDS DOUBLE ARROW
+ {0x21D3, 0x21D3, prN}, // So DOWNWARDS DOUBLE ARROW
+ {0x21D4, 0x21D4, prA}, // Sm LEFT RIGHT DOUBLE ARROW
+ {0x21D5, 0x21E6, prN}, // So [18] UP DOWN DOUBLE ARROW..LEFTWARDS WHITE ARROW
+ {0x21E7, 0x21E7, prA}, // So UPWARDS WHITE ARROW
+ {0x21E8, 0x21F3, prN}, // So [12] RIGHTWARDS WHITE ARROW..UP DOWN WHITE ARROW
+ {0x21F4, 0x21FF, prN}, // Sm [12] RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW
+ {0x2200, 0x2200, prA}, // Sm FOR ALL
+ {0x2201, 0x2201, prN}, // Sm COMPLEMENT
+ {0x2202, 0x2203, prA}, // Sm [2] PARTIAL DIFFERENTIAL..THERE EXISTS
+ {0x2204, 0x2206, prN}, // Sm [3] THERE DOES NOT EXIST..INCREMENT
+ {0x2207, 0x2208, prA}, // Sm [2] NABLA..ELEMENT OF
+ {0x2209, 0x220A, prN}, // Sm [2] NOT AN ELEMENT OF..SMALL ELEMENT OF
+ {0x220B, 0x220B, prA}, // Sm CONTAINS AS MEMBER
+ {0x220C, 0x220E, prN}, // Sm [3] DOES NOT CONTAIN AS MEMBER..END OF PROOF
+ {0x220F, 0x220F, prA}, // Sm N-ARY PRODUCT
+ {0x2210, 0x2210, prN}, // Sm N-ARY COPRODUCT
+ {0x2211, 0x2211, prA}, // Sm N-ARY SUMMATION
+ {0x2212, 0x2214, prN}, // Sm [3] MINUS SIGN..DOT PLUS
+ {0x2215, 0x2215, prA}, // Sm DIVISION SLASH
+ {0x2216, 0x2219, prN}, // Sm [4] SET MINUS..BULLET OPERATOR
+ {0x221A, 0x221A, prA}, // Sm SQUARE ROOT
+ {0x221B, 0x221C, prN}, // Sm [2] CUBE ROOT..FOURTH ROOT
+ {0x221D, 0x2220, prA}, // Sm [4] PROPORTIONAL TO..ANGLE
+ {0x2221, 0x2222, prN}, // Sm [2] MEASURED ANGLE..SPHERICAL ANGLE
+ {0x2223, 0x2223, prA}, // Sm DIVIDES
+ {0x2224, 0x2224, prN}, // Sm DOES NOT DIVIDE
+ {0x2225, 0x2225, prA}, // Sm PARALLEL TO
+ {0x2226, 0x2226, prN}, // Sm NOT PARALLEL TO
+ {0x2227, 0x222C, prA}, // Sm [6] LOGICAL AND..DOUBLE INTEGRAL
+ {0x222D, 0x222D, prN}, // Sm TRIPLE INTEGRAL
+ {0x222E, 0x222E, prA}, // Sm CONTOUR INTEGRAL
+ {0x222F, 0x2233, prN}, // Sm [5] SURFACE INTEGRAL..ANTICLOCKWISE CONTOUR INTEGRAL
+ {0x2234, 0x2237, prA}, // Sm [4] THEREFORE..PROPORTION
+ {0x2238, 0x223B, prN}, // Sm [4] DOT MINUS..HOMOTHETIC
+ {0x223C, 0x223D, prA}, // Sm [2] TILDE OPERATOR..REVERSED TILDE
+ {0x223E, 0x2247, prN}, // Sm [10] INVERTED LAZY S..NEITHER APPROXIMATELY NOR ACTUALLY EQUAL TO
+ {0x2248, 0x2248, prA}, // Sm ALMOST EQUAL TO
+ {0x2249, 0x224B, prN}, // Sm [3] NOT ALMOST EQUAL TO..TRIPLE TILDE
+ {0x224C, 0x224C, prA}, // Sm ALL EQUAL TO
+ {0x224D, 0x2251, prN}, // Sm [5] EQUIVALENT TO..GEOMETRICALLY EQUAL TO
+ {0x2252, 0x2252, prA}, // Sm APPROXIMATELY EQUAL TO OR THE IMAGE OF
+ {0x2253, 0x225F, prN}, // Sm [13] IMAGE OF OR APPROXIMATELY EQUAL TO..QUESTIONED EQUAL TO
+ {0x2260, 0x2261, prA}, // Sm [2] NOT EQUAL TO..IDENTICAL TO
+ {0x2262, 0x2263, prN}, // Sm [2] NOT IDENTICAL TO..STRICTLY EQUIVALENT TO
+ {0x2264, 0x2267, prA}, // Sm [4] LESS-THAN OR EQUAL TO..GREATER-THAN OVER EQUAL TO
+ {0x2268, 0x2269, prN}, // Sm [2] LESS-THAN BUT NOT EQUAL TO..GREATER-THAN BUT NOT EQUAL TO
+ {0x226A, 0x226B, prA}, // Sm [2] MUCH LESS-THAN..MUCH GREATER-THAN
+ {0x226C, 0x226D, prN}, // Sm [2] BETWEEN..NOT EQUIVALENT TO
+ {0x226E, 0x226F, prA}, // Sm [2] NOT LESS-THAN..NOT GREATER-THAN
+ {0x2270, 0x2281, prN}, // Sm [18] NEITHER LESS-THAN NOR EQUAL TO..DOES NOT SUCCEED
+ {0x2282, 0x2283, prA}, // Sm [2] SUBSET OF..SUPERSET OF
+ {0x2284, 0x2285, prN}, // Sm [2] NOT A SUBSET OF..NOT A SUPERSET OF
+ {0x2286, 0x2287, prA}, // Sm [2] SUBSET OF OR EQUAL TO..SUPERSET OF OR EQUAL TO
+ {0x2288, 0x2294, prN}, // Sm [13] NEITHER A SUBSET OF NOR EQUAL TO..SQUARE CUP
+ {0x2295, 0x2295, prA}, // Sm CIRCLED PLUS
+ {0x2296, 0x2298, prN}, // Sm [3] CIRCLED MINUS..CIRCLED DIVISION SLASH
+ {0x2299, 0x2299, prA}, // Sm CIRCLED DOT OPERATOR
+ {0x229A, 0x22A4, prN}, // Sm [11] CIRCLED RING OPERATOR..DOWN TACK
+ {0x22A5, 0x22A5, prA}, // Sm UP TACK
+ {0x22A6, 0x22BE, prN}, // Sm [25] ASSERTION..RIGHT ANGLE WITH ARC
+ {0x22BF, 0x22BF, prA}, // Sm RIGHT TRIANGLE
+ {0x22C0, 0x22FF, prN}, // Sm [64] N-ARY LOGICAL AND..Z NOTATION BAG MEMBERSHIP
+ {0x2300, 0x2307, prN}, // So [8] DIAMETER SIGN..WAVY LINE
+ {0x2308, 0x2308, prN}, // Ps LEFT CEILING
+ {0x2309, 0x2309, prN}, // Pe RIGHT CEILING
+ {0x230A, 0x230A, prN}, // Ps LEFT FLOOR
+ {0x230B, 0x230B, prN}, // Pe RIGHT FLOOR
+ {0x230C, 0x2311, prN}, // So [6] BOTTOM RIGHT CROP..SQUARE LOZENGE
+ {0x2312, 0x2312, prA}, // So ARC
+ {0x2313, 0x2319, prN}, // So [7] SEGMENT..TURNED NOT SIGN
+ {0x231A, 0x231B, prW}, // So [2] WATCH..HOURGLASS
+ {0x231C, 0x231F, prN}, // So [4] TOP LEFT CORNER..BOTTOM RIGHT CORNER
+ {0x2320, 0x2321, prN}, // Sm [2] TOP HALF INTEGRAL..BOTTOM HALF INTEGRAL
+ {0x2322, 0x2328, prN}, // So [7] FROWN..KEYBOARD
+ {0x2329, 0x2329, prW}, // Ps LEFT-POINTING ANGLE BRACKET
+ {0x232A, 0x232A, prW}, // Pe RIGHT-POINTING ANGLE BRACKET
+ {0x232B, 0x237B, prN}, // So [81] ERASE TO THE LEFT..NOT CHECK MARK
+ {0x237C, 0x237C, prN}, // Sm RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW
+ {0x237D, 0x239A, prN}, // So [30] SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL
+ {0x239B, 0x23B3, prN}, // Sm [25] LEFT PARENTHESIS UPPER HOOK..SUMMATION BOTTOM
+ {0x23B4, 0x23DB, prN}, // So [40] TOP SQUARE BRACKET..FUSE
+ {0x23DC, 0x23E1, prN}, // Sm [6] TOP PARENTHESIS..BOTTOM TORTOISE SHELL BRACKET
+ {0x23E2, 0x23E8, prN}, // So [7] WHITE TRAPEZIUM..DECIMAL EXPONENT SYMBOL
+ {0x23E9, 0x23EC, prW}, // So [4] BLACK RIGHT-POINTING DOUBLE TRIANGLE..BLACK DOWN-POINTING DOUBLE TRIANGLE
+ {0x23ED, 0x23EF, prN}, // So [3] BLACK RIGHT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR..BLACK RIGHT-POINTING TRIANGLE WITH DOUBLE VERTICAL BAR
+ {0x23F0, 0x23F0, prW}, // So ALARM CLOCK
+ {0x23F1, 0x23F2, prN}, // So [2] STOPWATCH..TIMER CLOCK
+ {0x23F3, 0x23F3, prW}, // So HOURGLASS WITH FLOWING SAND
+ {0x23F4, 0x23FF, prN}, // So [12] BLACK MEDIUM LEFT-POINTING TRIANGLE..OBSERVER EYE SYMBOL
+ {0x2400, 0x2426, prN}, // So [39] SYMBOL FOR NULL..SYMBOL FOR SUBSTITUTE FORM TWO
+ {0x2440, 0x244A, prN}, // So [11] OCR HOOK..OCR DOUBLE BACKSLASH
+ {0x2460, 0x249B, prA}, // No [60] CIRCLED DIGIT ONE..NUMBER TWENTY FULL STOP
+ {0x249C, 0x24E9, prA}, // So [78] PARENTHESIZED LATIN SMALL LETTER A..CIRCLED LATIN SMALL LETTER Z
+ {0x24EA, 0x24EA, prN}, // No CIRCLED DIGIT ZERO
+ {0x24EB, 0x24FF, prA}, // No [21] NEGATIVE CIRCLED NUMBER ELEVEN..NEGATIVE CIRCLED DIGIT ZERO
+ {0x2500, 0x254B, prA}, // So [76] BOX DRAWINGS LIGHT HORIZONTAL..BOX DRAWINGS HEAVY VERTICAL AND HORIZONTAL
+ {0x254C, 0x254F, prN}, // So [4] BOX DRAWINGS LIGHT DOUBLE DASH HORIZONTAL..BOX DRAWINGS HEAVY DOUBLE DASH VERTICAL
+ {0x2550, 0x2573, prA}, // So [36] BOX DRAWINGS DOUBLE HORIZONTAL..BOX DRAWINGS LIGHT DIAGONAL CROSS
+ {0x2574, 0x257F, prN}, // So [12] BOX DRAWINGS LIGHT LEFT..BOX DRAWINGS HEAVY UP AND LIGHT DOWN
+ {0x2580, 0x258F, prA}, // So [16] UPPER HALF BLOCK..LEFT ONE EIGHTH BLOCK
+ {0x2590, 0x2591, prN}, // So [2] RIGHT HALF BLOCK..LIGHT SHADE
+ {0x2592, 0x2595, prA}, // So [4] MEDIUM SHADE..RIGHT ONE EIGHTH BLOCK
+ {0x2596, 0x259F, prN}, // So [10] QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT
+ {0x25A0, 0x25A1, prA}, // So [2] BLACK SQUARE..WHITE SQUARE
+ {0x25A2, 0x25A2, prN}, // So WHITE SQUARE WITH ROUNDED CORNERS
+ {0x25A3, 0x25A9, prA}, // So [7] WHITE SQUARE CONTAINING BLACK SMALL SQUARE..SQUARE WITH DIAGONAL CROSSHATCH FILL
+ {0x25AA, 0x25B1, prN}, // So [8] BLACK SMALL SQUARE..WHITE PARALLELOGRAM
+ {0x25B2, 0x25B3, prA}, // So [2] BLACK UP-POINTING TRIANGLE..WHITE UP-POINTING TRIANGLE
+ {0x25B4, 0x25B5, prN}, // So [2] BLACK UP-POINTING SMALL TRIANGLE..WHITE UP-POINTING SMALL TRIANGLE
+ {0x25B6, 0x25B6, prA}, // So BLACK RIGHT-POINTING TRIANGLE
+ {0x25B7, 0x25B7, prA}, // Sm WHITE RIGHT-POINTING TRIANGLE
+ {0x25B8, 0x25BB, prN}, // So [4] BLACK RIGHT-POINTING SMALL TRIANGLE..WHITE RIGHT-POINTING POINTER
+ {0x25BC, 0x25BD, prA}, // So [2] BLACK DOWN-POINTING TRIANGLE..WHITE DOWN-POINTING TRIANGLE
+ {0x25BE, 0x25BF, prN}, // So [2] BLACK DOWN-POINTING SMALL TRIANGLE..WHITE DOWN-POINTING SMALL TRIANGLE
+ {0x25C0, 0x25C0, prA}, // So BLACK LEFT-POINTING TRIANGLE
+ {0x25C1, 0x25C1, prA}, // Sm WHITE LEFT-POINTING TRIANGLE
+ {0x25C2, 0x25C5, prN}, // So [4] BLACK LEFT-POINTING SMALL TRIANGLE..WHITE LEFT-POINTING POINTER
+ {0x25C6, 0x25C8, prA}, // So [3] BLACK DIAMOND..WHITE DIAMOND CONTAINING BLACK SMALL DIAMOND
+ {0x25C9, 0x25CA, prN}, // So [2] FISHEYE..LOZENGE
+ {0x25CB, 0x25CB, prA}, // So WHITE CIRCLE
+ {0x25CC, 0x25CD, prN}, // So [2] DOTTED CIRCLE..CIRCLE WITH VERTICAL FILL
+ {0x25CE, 0x25D1, prA}, // So [4] BULLSEYE..CIRCLE WITH RIGHT HALF BLACK
+ {0x25D2, 0x25E1, prN}, // So [16] CIRCLE WITH LOWER HALF BLACK..LOWER HALF CIRCLE
+ {0x25E2, 0x25E5, prA}, // So [4] BLACK LOWER RIGHT TRIANGLE..BLACK UPPER RIGHT TRIANGLE
+ {0x25E6, 0x25EE, prN}, // So [9] WHITE BULLET..UP-POINTING TRIANGLE WITH RIGHT HALF BLACK
+ {0x25EF, 0x25EF, prA}, // So LARGE CIRCLE
+ {0x25F0, 0x25F7, prN}, // So [8] WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT
+ {0x25F8, 0x25FC, prN}, // Sm [5] UPPER LEFT TRIANGLE..BLACK MEDIUM SQUARE
+ {0x25FD, 0x25FE, prW}, // Sm [2] WHITE MEDIUM SMALL SQUARE..BLACK MEDIUM SMALL SQUARE
+ {0x25FF, 0x25FF, prN}, // Sm LOWER RIGHT TRIANGLE
+ {0x2600, 0x2604, prN}, // So [5] BLACK SUN WITH RAYS..COMET
+ {0x2605, 0x2606, prA}, // So [2] BLACK STAR..WHITE STAR
+ {0x2607, 0x2608, prN}, // So [2] LIGHTNING..THUNDERSTORM
+ {0x2609, 0x2609, prA}, // So SUN
+ {0x260A, 0x260D, prN}, // So [4] ASCENDING NODE..OPPOSITION
+ {0x260E, 0x260F, prA}, // So [2] BLACK TELEPHONE..WHITE TELEPHONE
+ {0x2610, 0x2613, prN}, // So [4] BALLOT BOX..SALTIRE
+ {0x2614, 0x2615, prW}, // So [2] UMBRELLA WITH RAIN DROPS..HOT BEVERAGE
+ {0x2616, 0x261B, prN}, // So [6] WHITE SHOGI PIECE..BLACK RIGHT POINTING INDEX
+ {0x261C, 0x261C, prA}, // So WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prN}, // So WHITE UP POINTING INDEX
+ {0x261E, 0x261E, prA}, // So WHITE RIGHT POINTING INDEX
+ {0x261F, 0x263F, prN}, // So [33] WHITE DOWN POINTING INDEX..MERCURY
+ {0x2640, 0x2640, prA}, // So FEMALE SIGN
+ {0x2641, 0x2641, prN}, // So EARTH
+ {0x2642, 0x2642, prA}, // So MALE SIGN
+ {0x2643, 0x2647, prN}, // So [5] JUPITER..PLUTO
+ {0x2648, 0x2653, prW}, // So [12] ARIES..PISCES
+ {0x2654, 0x265F, prN}, // So [12] WHITE CHESS KING..BLACK CHESS PAWN
+ {0x2660, 0x2661, prA}, // So [2] BLACK SPADE SUIT..WHITE HEART SUIT
+ {0x2662, 0x2662, prN}, // So WHITE DIAMOND SUIT
+ {0x2663, 0x2665, prA}, // So [3] BLACK CLUB SUIT..BLACK HEART SUIT
+ {0x2666, 0x2666, prN}, // So BLACK DIAMOND SUIT
+ {0x2667, 0x266A, prA}, // So [4] WHITE CLUB SUIT..EIGHTH NOTE
+ {0x266B, 0x266B, prN}, // So BEAMED EIGHTH NOTES
+ {0x266C, 0x266D, prA}, // So [2] BEAMED SIXTEENTH NOTES..MUSIC FLAT SIGN
+ {0x266E, 0x266E, prN}, // So MUSIC NATURAL SIGN
+ {0x266F, 0x266F, prA}, // Sm MUSIC SHARP SIGN
+ {0x2670, 0x267E, prN}, // So [15] WEST SYRIAC CROSS..PERMANENT PAPER SIGN
+ {0x267F, 0x267F, prW}, // So WHEELCHAIR SYMBOL
+ {0x2680, 0x2692, prN}, // So [19] DIE FACE-1..HAMMER AND PICK
+ {0x2693, 0x2693, prW}, // So ANCHOR
+ {0x2694, 0x269D, prN}, // So [10] CROSSED SWORDS..OUTLINED WHITE STAR
+ {0x269E, 0x269F, prA}, // So [2] THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26A0, prN}, // So WARNING SIGN
+ {0x26A1, 0x26A1, prW}, // So HIGH VOLTAGE SIGN
+ {0x26A2, 0x26A9, prN}, // So [8] DOUBLED FEMALE SIGN..HORIZONTAL MALE WITH STROKE SIGN
+ {0x26AA, 0x26AB, prW}, // So [2] MEDIUM WHITE CIRCLE..MEDIUM BLACK CIRCLE
+ {0x26AC, 0x26BC, prN}, // So [17] MEDIUM SMALL WHITE CIRCLE..SESQUIQUADRATE
+ {0x26BD, 0x26BE, prW}, // So [2] SOCCER BALL..BASEBALL
+ {0x26BF, 0x26BF, prA}, // So SQUARED KEY
+ {0x26C0, 0x26C3, prN}, // So [4] WHITE DRAUGHTS MAN..BLACK DRAUGHTS KING
+ {0x26C4, 0x26C5, prW}, // So [2] SNOWMAN WITHOUT SNOW..SUN BEHIND CLOUD
+ {0x26C6, 0x26CD, prA}, // So [8] RAIN..DISABLED CAR
+ {0x26CE, 0x26CE, prW}, // So OPHIUCHUS
+ {0x26CF, 0x26D3, prA}, // So [5] PICK..CHAINS
+ {0x26D4, 0x26D4, prW}, // So NO ENTRY
+ {0x26D5, 0x26E1, prA}, // So [13] ALTERNATE ONE-WAY LEFT WAY TRAFFIC..RESTRICTED LEFT ENTRY-2
+ {0x26E2, 0x26E2, prN}, // So ASTRONOMICAL SYMBOL FOR URANUS
+ {0x26E3, 0x26E3, prA}, // So HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE
+ {0x26E4, 0x26E7, prN}, // So [4] PENTAGRAM..INVERTED PENTAGRAM
+ {0x26E8, 0x26E9, prA}, // So [2] BLACK CROSS ON SHIELD..SHINTO SHRINE
+ {0x26EA, 0x26EA, prW}, // So CHURCH
+ {0x26EB, 0x26F1, prA}, // So [7] CASTLE..UMBRELLA ON GROUND
+ {0x26F2, 0x26F3, prW}, // So [2] FOUNTAIN..FLAG IN HOLE
+ {0x26F4, 0x26F4, prA}, // So FERRY
+ {0x26F5, 0x26F5, prW}, // So SAILBOAT
+ {0x26F6, 0x26F9, prA}, // So [4] SQUARE FOUR CORNERS..PERSON WITH BALL
+ {0x26FA, 0x26FA, prW}, // So TENT
+ {0x26FB, 0x26FC, prA}, // So [2] JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FD, prW}, // So FUEL PUMP
+ {0x26FE, 0x26FF, prA}, // So [2] CUP ON BLACK SQUARE..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE
+ {0x2700, 0x2704, prN}, // So [5] BLACK SAFETY SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2705, prW}, // So WHITE HEAVY CHECK MARK
+ {0x2706, 0x2709, prN}, // So [4] TELEPHONE LOCATION SIGN..ENVELOPE
+ {0x270A, 0x270B, prW}, // So [2] RAISED FIST..RAISED HAND
+ {0x270C, 0x2727, prN}, // So [28] VICTORY HAND..WHITE FOUR POINTED STAR
+ {0x2728, 0x2728, prW}, // So SPARKLES
+ {0x2729, 0x273C, prN}, // So [20] STRESS OUTLINED WHITE STAR..OPEN CENTRE TEARDROP-SPOKED ASTERISK
+ {0x273D, 0x273D, prA}, // So HEAVY TEARDROP-SPOKED ASTERISK
+ {0x273E, 0x274B, prN}, // So [14] SIX PETALLED BLACK AND WHITE FLORETTE..HEAVY EIGHT TEARDROP-SPOKED PROPELLER ASTERISK
+ {0x274C, 0x274C, prW}, // So CROSS MARK
+ {0x274D, 0x274D, prN}, // So SHADOWED WHITE CIRCLE
+ {0x274E, 0x274E, prW}, // So NEGATIVE SQUARED CROSS MARK
+ {0x274F, 0x2752, prN}, // So [4] LOWER RIGHT DROP-SHADOWED WHITE SQUARE..UPPER RIGHT SHADOWED WHITE SQUARE
+ {0x2753, 0x2755, prW}, // So [3] BLACK QUESTION MARK ORNAMENT..WHITE EXCLAMATION MARK ORNAMENT
+ {0x2756, 0x2756, prN}, // So BLACK DIAMOND MINUS WHITE X
+ {0x2757, 0x2757, prW}, // So HEAVY EXCLAMATION MARK SYMBOL
+ {0x2758, 0x2767, prN}, // So [16] LIGHT VERTICAL BAR..ROTATED FLORAL HEART BULLET
+ {0x2768, 0x2768, prN}, // Ps MEDIUM LEFT PARENTHESIS ORNAMENT
+ {0x2769, 0x2769, prN}, // Pe MEDIUM RIGHT PARENTHESIS ORNAMENT
+ {0x276A, 0x276A, prN}, // Ps MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT
+ {0x276B, 0x276B, prN}, // Pe MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT
+ {0x276C, 0x276C, prN}, // Ps MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276D, 0x276D, prN}, // Pe MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276E, 0x276E, prN}, // Ps HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x276F, 0x276F, prN}, // Pe HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x2770, 0x2770, prN}, // Ps HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2771, 0x2771, prN}, // Pe HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2772, 0x2772, prN}, // Ps LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2773, 0x2773, prN}, // Pe LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2774, 0x2774, prN}, // Ps MEDIUM LEFT CURLY BRACKET ORNAMENT
+ {0x2775, 0x2775, prN}, // Pe MEDIUM RIGHT CURLY BRACKET ORNAMENT
+ {0x2776, 0x277F, prA}, // No [10] DINGBAT NEGATIVE CIRCLED DIGIT ONE..DINGBAT NEGATIVE CIRCLED NUMBER TEN
+ {0x2780, 0x2793, prN}, // No [20] DINGBAT CIRCLED SANS-SERIF DIGIT ONE..DINGBAT NEGATIVE CIRCLED SANS-SERIF NUMBER TEN
+ {0x2794, 0x2794, prN}, // So HEAVY WIDE-HEADED RIGHTWARDS ARROW
+ {0x2795, 0x2797, prW}, // So [3] HEAVY PLUS SIGN..HEAVY DIVISION SIGN
+ {0x2798, 0x27AF, prN}, // So [24] HEAVY SOUTH EAST ARROW..NOTCHED LOWER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW
+ {0x27B0, 0x27B0, prW}, // So CURLY LOOP
+ {0x27B1, 0x27BE, prN}, // So [14] NOTCHED UPPER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW..OPEN-OUTLINED RIGHTWARDS ARROW
+ {0x27BF, 0x27BF, prW}, // So DOUBLE CURLY LOOP
+ {0x27C0, 0x27C4, prN}, // Sm [5] THREE DIMENSIONAL ANGLE..OPEN SUPERSET
+ {0x27C5, 0x27C5, prN}, // Ps LEFT S-SHAPED BAG DELIMITER
+ {0x27C6, 0x27C6, prN}, // Pe RIGHT S-SHAPED BAG DELIMITER
+ {0x27C7, 0x27E5, prN}, // Sm [31] OR WITH DOT INSIDE..WHITE SQUARE WITH RIGHTWARDS TICK
+ {0x27E6, 0x27E6, prNa}, // Ps MATHEMATICAL LEFT WHITE SQUARE BRACKET
+ {0x27E7, 0x27E7, prNa}, // Pe MATHEMATICAL RIGHT WHITE SQUARE BRACKET
+ {0x27E8, 0x27E8, prNa}, // Ps MATHEMATICAL LEFT ANGLE BRACKET
+ {0x27E9, 0x27E9, prNa}, // Pe MATHEMATICAL RIGHT ANGLE BRACKET
+ {0x27EA, 0x27EA, prNa}, // Ps MATHEMATICAL LEFT DOUBLE ANGLE BRACKET
+ {0x27EB, 0x27EB, prNa}, // Pe MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET
+ {0x27EC, 0x27EC, prNa}, // Ps MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET
+ {0x27ED, 0x27ED, prNa}, // Pe MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x27EE, 0x27EE, prN}, // Ps MATHEMATICAL LEFT FLATTENED PARENTHESIS
+ {0x27EF, 0x27EF, prN}, // Pe MATHEMATICAL RIGHT FLATTENED PARENTHESIS
+ {0x27F0, 0x27FF, prN}, // Sm [16] UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW
+ {0x2800, 0x28FF, prN}, // So [256] BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678
+ {0x2900, 0x297F, prN}, // Sm [128] RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..DOWN FISH TAIL
+ {0x2980, 0x2982, prN}, // Sm [3] TRIPLE VERTICAL BAR DELIMITER..Z NOTATION TYPE COLON
+ {0x2983, 0x2983, prN}, // Ps LEFT WHITE CURLY BRACKET
+ {0x2984, 0x2984, prN}, // Pe RIGHT WHITE CURLY BRACKET
+ {0x2985, 0x2985, prNa}, // Ps LEFT WHITE PARENTHESIS
+ {0x2986, 0x2986, prNa}, // Pe RIGHT WHITE PARENTHESIS
+ {0x2987, 0x2987, prN}, // Ps Z NOTATION LEFT IMAGE BRACKET
+ {0x2988, 0x2988, prN}, // Pe Z NOTATION RIGHT IMAGE BRACKET
+ {0x2989, 0x2989, prN}, // Ps Z NOTATION LEFT BINDING BRACKET
+ {0x298A, 0x298A, prN}, // Pe Z NOTATION RIGHT BINDING BRACKET
+ {0x298B, 0x298B, prN}, // Ps LEFT SQUARE BRACKET WITH UNDERBAR
+ {0x298C, 0x298C, prN}, // Pe RIGHT SQUARE BRACKET WITH UNDERBAR
+ {0x298D, 0x298D, prN}, // Ps LEFT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x298E, 0x298E, prN}, // Pe RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x298F, 0x298F, prN}, // Ps LEFT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x2990, 0x2990, prN}, // Pe RIGHT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x2991, 0x2991, prN}, // Ps LEFT ANGLE BRACKET WITH DOT
+ {0x2992, 0x2992, prN}, // Pe RIGHT ANGLE BRACKET WITH DOT
+ {0x2993, 0x2993, prN}, // Ps LEFT ARC LESS-THAN BRACKET
+ {0x2994, 0x2994, prN}, // Pe RIGHT ARC GREATER-THAN BRACKET
+ {0x2995, 0x2995, prN}, // Ps DOUBLE LEFT ARC GREATER-THAN BRACKET
+ {0x2996, 0x2996, prN}, // Pe DOUBLE RIGHT ARC LESS-THAN BRACKET
+ {0x2997, 0x2997, prN}, // Ps LEFT BLACK TORTOISE SHELL BRACKET
+ {0x2998, 0x2998, prN}, // Pe RIGHT BLACK TORTOISE SHELL BRACKET
+ {0x2999, 0x29D7, prN}, // Sm [63] DOTTED FENCE..BLACK HOURGLASS
+ {0x29D8, 0x29D8, prN}, // Ps LEFT WIGGLY FENCE
+ {0x29D9, 0x29D9, prN}, // Pe RIGHT WIGGLY FENCE
+ {0x29DA, 0x29DA, prN}, // Ps LEFT DOUBLE WIGGLY FENCE
+ {0x29DB, 0x29DB, prN}, // Pe RIGHT DOUBLE WIGGLY FENCE
+ {0x29DC, 0x29FB, prN}, // Sm [32] INCOMPLETE INFINITY..TRIPLE PLUS
+ {0x29FC, 0x29FC, prN}, // Ps LEFT-POINTING CURVED ANGLE BRACKET
+ {0x29FD, 0x29FD, prN}, // Pe RIGHT-POINTING CURVED ANGLE BRACKET
+ {0x29FE, 0x29FF, prN}, // Sm [2] TINY..MINY
+ {0x2A00, 0x2AFF, prN}, // Sm [256] N-ARY CIRCLED DOT OPERATOR..N-ARY WHITE VERTICAL BAR
+ {0x2B00, 0x2B1A, prN}, // So [27] NORTH EAST WHITE ARROW..DOTTED SQUARE
+ {0x2B1B, 0x2B1C, prW}, // So [2] BLACK LARGE SQUARE..WHITE LARGE SQUARE
+ {0x2B1D, 0x2B2F, prN}, // So [19] BLACK VERY SMALL SQUARE..WHITE VERTICAL ELLIPSE
+ {0x2B30, 0x2B44, prN}, // Sm [21] LEFT ARROW WITH SMALL CIRCLE..RIGHTWARDS ARROW THROUGH SUPERSET
+ {0x2B45, 0x2B46, prN}, // So [2] LEFTWARDS QUADRUPLE ARROW..RIGHTWARDS QUADRUPLE ARROW
+ {0x2B47, 0x2B4C, prN}, // Sm [6] REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR
+ {0x2B4D, 0x2B4F, prN}, // So [3] DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..SHORT BACKSLANTED SOUTH ARROW
+ {0x2B50, 0x2B50, prW}, // So WHITE MEDIUM STAR
+ {0x2B51, 0x2B54, prN}, // So [4] BLACK SMALL STAR..WHITE RIGHT-POINTING PENTAGON
+ {0x2B55, 0x2B55, prW}, // So HEAVY LARGE CIRCLE
+ {0x2B56, 0x2B59, prA}, // So [4] HEAVY OVAL WITH OVAL INSIDE..HEAVY CIRCLED SALTIRE
+ {0x2B5A, 0x2B73, prN}, // So [26] SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR
+ {0x2B76, 0x2B95, prN}, // So [32] NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW
+ {0x2B97, 0x2BFF, prN}, // So [105] SYMBOL FOR TYPE A ELECTRONICS..HELLSCHREIBER PAUSE SYMBOL
+ {0x2C00, 0x2C5F, prN}, // L& [96] GLAGOLITIC CAPITAL LETTER AZU..GLAGOLITIC SMALL LETTER CAUDATE CHRIVI
+ {0x2C60, 0x2C7B, prN}, // L& [28] LATIN CAPITAL LETTER L WITH DOUBLE BAR..LATIN LETTER SMALL CAPITAL TURNED E
+ {0x2C7C, 0x2C7D, prN}, // Lm [2] LATIN SUBSCRIPT SMALL LETTER J..MODIFIER LETTER CAPITAL V
+ {0x2C7E, 0x2C7F, prN}, // Lu [2] LATIN CAPITAL LETTER S WITH SWASH TAIL..LATIN CAPITAL LETTER Z WITH SWASH TAIL
+ {0x2C80, 0x2CE4, prN}, // L& [101] COPTIC CAPITAL LETTER ALFA..COPTIC SYMBOL KAI
+ {0x2CE5, 0x2CEA, prN}, // So [6] COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA
+ {0x2CEB, 0x2CEE, prN}, // L& [4] COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI..COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA
+ {0x2CEF, 0x2CF1, prN}, // Mn [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2CF2, 0x2CF3, prN}, // L& [2] COPTIC CAPITAL LETTER BOHAIRIC KHEI..COPTIC SMALL LETTER BOHAIRIC KHEI
+ {0x2CF9, 0x2CFC, prN}, // Po [4] COPTIC OLD NUBIAN FULL STOP..COPTIC OLD NUBIAN VERSE DIVIDER
+ {0x2CFD, 0x2CFD, prN}, // No COPTIC FRACTION ONE HALF
+ {0x2CFE, 0x2CFF, prN}, // Po [2] COPTIC FULL STOP..COPTIC MORPHOLOGICAL DIVIDER
+ {0x2D00, 0x2D25, prN}, // Ll [38] GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE
+ {0x2D27, 0x2D27, prN}, // Ll GEORGIAN SMALL LETTER YN
+ {0x2D2D, 0x2D2D, prN}, // Ll GEORGIAN SMALL LETTER AEN
+ {0x2D30, 0x2D67, prN}, // Lo [56] TIFINAGH LETTER YA..TIFINAGH LETTER YO
+ {0x2D6F, 0x2D6F, prN}, // Lm TIFINAGH MODIFIER LETTER LABIALIZATION MARK
+ {0x2D70, 0x2D70, prN}, // Po TIFINAGH SEPARATOR MARK
+ {0x2D7F, 0x2D7F, prN}, // Mn TIFINAGH CONSONANT JOINER
+ {0x2D80, 0x2D96, prN}, // Lo [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE
+ {0x2DA0, 0x2DA6, prN}, // Lo [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO
+ {0x2DA8, 0x2DAE, prN}, // Lo [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO
+ {0x2DB0, 0x2DB6, prN}, // Lo [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO
+ {0x2DB8, 0x2DBE, prN}, // Lo [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO
+ {0x2DC0, 0x2DC6, prN}, // Lo [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO
+ {0x2DC8, 0x2DCE, prN}, // Lo [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO
+ {0x2DD0, 0x2DD6, prN}, // Lo [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO
+ {0x2DD8, 0x2DDE, prN}, // Lo [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO
+ {0x2DE0, 0x2DFF, prN}, // Mn [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x2E00, 0x2E01, prN}, // Po [2] RIGHT ANGLE SUBSTITUTION MARKER..RIGHT ANGLE DOTTED SUBSTITUTION MARKER
+ {0x2E02, 0x2E02, prN}, // Pi LEFT SUBSTITUTION BRACKET
+ {0x2E03, 0x2E03, prN}, // Pf RIGHT SUBSTITUTION BRACKET
+ {0x2E04, 0x2E04, prN}, // Pi LEFT DOTTED SUBSTITUTION BRACKET
+ {0x2E05, 0x2E05, prN}, // Pf RIGHT DOTTED SUBSTITUTION BRACKET
+ {0x2E06, 0x2E08, prN}, // Po [3] RAISED INTERPOLATION MARKER..DOTTED TRANSPOSITION MARKER
+ {0x2E09, 0x2E09, prN}, // Pi LEFT TRANSPOSITION BRACKET
+ {0x2E0A, 0x2E0A, prN}, // Pf RIGHT TRANSPOSITION BRACKET
+ {0x2E0B, 0x2E0B, prN}, // Po RAISED SQUARE
+ {0x2E0C, 0x2E0C, prN}, // Pi LEFT RAISED OMISSION BRACKET
+ {0x2E0D, 0x2E0D, prN}, // Pf RIGHT RAISED OMISSION BRACKET
+ {0x2E0E, 0x2E16, prN}, // Po [9] EDITORIAL CORONIS..DOTTED RIGHT-POINTING ANGLE
+ {0x2E17, 0x2E17, prN}, // Pd DOUBLE OBLIQUE HYPHEN
+ {0x2E18, 0x2E19, prN}, // Po [2] INVERTED INTERROBANG..PALM BRANCH
+ {0x2E1A, 0x2E1A, prN}, // Pd HYPHEN WITH DIAERESIS
+ {0x2E1B, 0x2E1B, prN}, // Po TILDE WITH RING ABOVE
+ {0x2E1C, 0x2E1C, prN}, // Pi LEFT LOW PARAPHRASE BRACKET
+ {0x2E1D, 0x2E1D, prN}, // Pf RIGHT LOW PARAPHRASE BRACKET
+ {0x2E1E, 0x2E1F, prN}, // Po [2] TILDE WITH DOT ABOVE..TILDE WITH DOT BELOW
+ {0x2E20, 0x2E20, prN}, // Pi LEFT VERTICAL BAR WITH QUILL
+ {0x2E21, 0x2E21, prN}, // Pf RIGHT VERTICAL BAR WITH QUILL
+ {0x2E22, 0x2E22, prN}, // Ps TOP LEFT HALF BRACKET
+ {0x2E23, 0x2E23, prN}, // Pe TOP RIGHT HALF BRACKET
+ {0x2E24, 0x2E24, prN}, // Ps BOTTOM LEFT HALF BRACKET
+ {0x2E25, 0x2E25, prN}, // Pe BOTTOM RIGHT HALF BRACKET
+ {0x2E26, 0x2E26, prN}, // Ps LEFT SIDEWAYS U BRACKET
+ {0x2E27, 0x2E27, prN}, // Pe RIGHT SIDEWAYS U BRACKET
+ {0x2E28, 0x2E28, prN}, // Ps LEFT DOUBLE PARENTHESIS
+ {0x2E29, 0x2E29, prN}, // Pe RIGHT DOUBLE PARENTHESIS
+ {0x2E2A, 0x2E2E, prN}, // Po [5] TWO DOTS OVER ONE DOT PUNCTUATION..REVERSED QUESTION MARK
+ {0x2E2F, 0x2E2F, prN}, // Lm VERTICAL TILDE
+ {0x2E30, 0x2E39, prN}, // Po [10] RING POINT..TOP HALF SECTION SIGN
+ {0x2E3A, 0x2E3B, prN}, // Pd [2] TWO-EM DASH..THREE-EM DASH
+ {0x2E3C, 0x2E3F, prN}, // Po [4] STENOGRAPHIC FULL STOP..CAPITULUM
+ {0x2E40, 0x2E40, prN}, // Pd DOUBLE HYPHEN
+ {0x2E41, 0x2E41, prN}, // Po REVERSED COMMA
+ {0x2E42, 0x2E42, prN}, // Ps DOUBLE LOW-REVERSED-9 QUOTATION MARK
+ {0x2E43, 0x2E4F, prN}, // Po [13] DASH WITH LEFT UPTURN..CORNISH VERSE DIVIDER
+ {0x2E50, 0x2E51, prN}, // So [2] CROSS PATTY WITH RIGHT CROSSBAR..CROSS PATTY WITH LEFT CROSSBAR
+ {0x2E52, 0x2E54, prN}, // Po [3] TIRONIAN SIGN CAPITAL ET..MEDIEVAL QUESTION MARK
+ {0x2E55, 0x2E55, prN}, // Ps LEFT SQUARE BRACKET WITH STROKE
+ {0x2E56, 0x2E56, prN}, // Pe RIGHT SQUARE BRACKET WITH STROKE
+ {0x2E57, 0x2E57, prN}, // Ps LEFT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E58, 0x2E58, prN}, // Pe RIGHT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E59, 0x2E59, prN}, // Ps TOP HALF LEFT PARENTHESIS
+ {0x2E5A, 0x2E5A, prN}, // Pe TOP HALF RIGHT PARENTHESIS
+ {0x2E5B, 0x2E5B, prN}, // Ps BOTTOM HALF LEFT PARENTHESIS
+ {0x2E5C, 0x2E5C, prN}, // Pe BOTTOM HALF RIGHT PARENTHESIS
+ {0x2E5D, 0x2E5D, prN}, // Pd OBLIQUE HYPHEN
+ {0x2E80, 0x2E99, prW}, // So [26] CJK RADICAL REPEAT..CJK RADICAL RAP
+ {0x2E9B, 0x2EF3, prW}, // So [89] CJK RADICAL CHOKE..CJK RADICAL C-SIMPLIFIED TURTLE
+ {0x2F00, 0x2FD5, prW}, // So [214] KANGXI RADICAL ONE..KANGXI RADICAL FLUTE
+ {0x2FF0, 0x2FFB, prW}, // So [12] IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID
+ {0x3000, 0x3000, prF}, // Zs IDEOGRAPHIC SPACE
+ {0x3001, 0x3003, prW}, // Po [3] IDEOGRAPHIC COMMA..DITTO MARK
+ {0x3004, 0x3004, prW}, // So JAPANESE INDUSTRIAL STANDARD SYMBOL
+ {0x3005, 0x3005, prW}, // Lm IDEOGRAPHIC ITERATION MARK
+ {0x3006, 0x3006, prW}, // Lo IDEOGRAPHIC CLOSING MARK
+ {0x3007, 0x3007, prW}, // Nl IDEOGRAPHIC NUMBER ZERO
+ {0x3008, 0x3008, prW}, // Ps LEFT ANGLE BRACKET
+ {0x3009, 0x3009, prW}, // Pe RIGHT ANGLE BRACKET
+ {0x300A, 0x300A, prW}, // Ps LEFT DOUBLE ANGLE BRACKET
+ {0x300B, 0x300B, prW}, // Pe RIGHT DOUBLE ANGLE BRACKET
+ {0x300C, 0x300C, prW}, // Ps LEFT CORNER BRACKET
+ {0x300D, 0x300D, prW}, // Pe RIGHT CORNER BRACKET
+ {0x300E, 0x300E, prW}, // Ps LEFT WHITE CORNER BRACKET
+ {0x300F, 0x300F, prW}, // Pe RIGHT WHITE CORNER BRACKET
+ {0x3010, 0x3010, prW}, // Ps LEFT BLACK LENTICULAR BRACKET
+ {0x3011, 0x3011, prW}, // Pe RIGHT BLACK LENTICULAR BRACKET
+ {0x3012, 0x3013, prW}, // So [2] POSTAL MARK..GETA MARK
+ {0x3014, 0x3014, prW}, // Ps LEFT TORTOISE SHELL BRACKET
+ {0x3015, 0x3015, prW}, // Pe RIGHT TORTOISE SHELL BRACKET
+ {0x3016, 0x3016, prW}, // Ps LEFT WHITE LENTICULAR BRACKET
+ {0x3017, 0x3017, prW}, // Pe RIGHT WHITE LENTICULAR BRACKET
+ {0x3018, 0x3018, prW}, // Ps LEFT WHITE TORTOISE SHELL BRACKET
+ {0x3019, 0x3019, prW}, // Pe RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x301A, 0x301A, prW}, // Ps LEFT WHITE SQUARE BRACKET
+ {0x301B, 0x301B, prW}, // Pe RIGHT WHITE SQUARE BRACKET
+ {0x301C, 0x301C, prW}, // Pd WAVE DASH
+ {0x301D, 0x301D, prW}, // Ps REVERSED DOUBLE PRIME QUOTATION MARK
+ {0x301E, 0x301F, prW}, // Pe [2] DOUBLE PRIME QUOTATION MARK..LOW DOUBLE PRIME QUOTATION MARK
+ {0x3020, 0x3020, prW}, // So POSTAL MARK FACE
+ {0x3021, 0x3029, prW}, // Nl [9] HANGZHOU NUMERAL ONE..HANGZHOU NUMERAL NINE
+ {0x302A, 0x302D, prW}, // Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prW}, // Mc [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prW}, // Pd WAVY DASH
+ {0x3031, 0x3035, prW}, // Lm [5] VERTICAL KANA REPEAT MARK..VERTICAL KANA REPEAT MARK LOWER HALF
+ {0x3036, 0x3037, prW}, // So [2] CIRCLED POSTAL MARK..IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL
+ {0x3038, 0x303A, prW}, // Nl [3] HANGZHOU NUMERAL TEN..HANGZHOU NUMERAL THIRTY
+ {0x303B, 0x303B, prW}, // Lm VERTICAL IDEOGRAPHIC ITERATION MARK
+ {0x303C, 0x303C, prW}, // Lo MASU MARK
+ {0x303D, 0x303D, prW}, // Po PART ALTERNATION MARK
+ {0x303E, 0x303E, prW}, // So IDEOGRAPHIC VARIATION INDICATOR
+ {0x303F, 0x303F, prN}, // So IDEOGRAPHIC HALF FILL SPACE
+ {0x3041, 0x3096, prW}, // Lo [86] HIRAGANA LETTER SMALL A..HIRAGANA LETTER SMALL KE
+ {0x3099, 0x309A, prW}, // Mn [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309B, 0x309C, prW}, // Sk [2] KATAKANA-HIRAGANA VOICED SOUND MARK..KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309D, 0x309E, prW}, // Lm [2] HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK
+ {0x309F, 0x309F, prW}, // Lo HIRAGANA DIGRAPH YORI
+ {0x30A0, 0x30A0, prW}, // Pd KATAKANA-HIRAGANA DOUBLE HYPHEN
+ {0x30A1, 0x30FA, prW}, // Lo [90] KATAKANA LETTER SMALL A..KATAKANA LETTER VO
+ {0x30FB, 0x30FB, prW}, // Po KATAKANA MIDDLE DOT
+ {0x30FC, 0x30FE, prW}, // Lm [3] KATAKANA-HIRAGANA PROLONGED SOUND MARK..KATAKANA VOICED ITERATION MARK
+ {0x30FF, 0x30FF, prW}, // Lo KATAKANA DIGRAPH KOTO
+ {0x3105, 0x312F, prW}, // Lo [43] BOPOMOFO LETTER B..BOPOMOFO LETTER NN
+ {0x3131, 0x318E, prW}, // Lo [94] HANGUL LETTER KIYEOK..HANGUL LETTER ARAEAE
+ {0x3190, 0x3191, prW}, // So [2] IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK
+ {0x3192, 0x3195, prW}, // No [4] IDEOGRAPHIC ANNOTATION ONE MARK..IDEOGRAPHIC ANNOTATION FOUR MARK
+ {0x3196, 0x319F, prW}, // So [10] IDEOGRAPHIC ANNOTATION TOP MARK..IDEOGRAPHIC ANNOTATION MAN MARK
+ {0x31A0, 0x31BF, prW}, // Lo [32] BOPOMOFO LETTER BU..BOPOMOFO LETTER AH
+ {0x31C0, 0x31E3, prW}, // So [36] CJK STROKE T..CJK STROKE Q
+ {0x31F0, 0x31FF, prW}, // Lo [16] KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO
+ {0x3200, 0x321E, prW}, // So [31] PARENTHESIZED HANGUL KIYEOK..PARENTHESIZED KOREAN CHARACTER O HU
+ {0x3220, 0x3229, prW}, // No [10] PARENTHESIZED IDEOGRAPH ONE..PARENTHESIZED IDEOGRAPH TEN
+ {0x322A, 0x3247, prW}, // So [30] PARENTHESIZED IDEOGRAPH MOON..CIRCLED IDEOGRAPH KOTO
+ {0x3248, 0x324F, prA}, // No [8] CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE
+ {0x3250, 0x3250, prW}, // So PARTNERSHIP SIGN
+ {0x3251, 0x325F, prW}, // No [15] CIRCLED NUMBER TWENTY ONE..CIRCLED NUMBER THIRTY FIVE
+ {0x3260, 0x327F, prW}, // So [32] CIRCLED HANGUL KIYEOK..KOREAN STANDARD SYMBOL
+ {0x3280, 0x3289, prW}, // No [10] CIRCLED IDEOGRAPH ONE..CIRCLED IDEOGRAPH TEN
+ {0x328A, 0x32B0, prW}, // So [39] CIRCLED IDEOGRAPH MOON..CIRCLED IDEOGRAPH NIGHT
+ {0x32B1, 0x32BF, prW}, // No [15] CIRCLED NUMBER THIRTY SIX..CIRCLED NUMBER FIFTY
+ {0x32C0, 0x32FF, prW}, // So [64] IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY..SQUARE ERA NAME REIWA
+ {0x3300, 0x33FF, prW}, // So [256] SQUARE APAATO..SQUARE GAL
+ {0x3400, 0x4DBF, prW}, // Lo [6592] CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DBF
+ {0x4DC0, 0x4DFF, prN}, // So [64] HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION
+ {0x4E00, 0x9FFF, prW}, // Lo [20992] CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FFF
+ {0xA000, 0xA014, prW}, // Lo [21] YI SYLLABLE IT..YI SYLLABLE E
+ {0xA015, 0xA015, prW}, // Lm YI SYLLABLE WU
+ {0xA016, 0xA48C, prW}, // Lo [1143] YI SYLLABLE BIT..YI SYLLABLE YYR
+ {0xA490, 0xA4C6, prW}, // So [55] YI RADICAL QOT..YI RADICAL KE
+ {0xA4D0, 0xA4F7, prN}, // Lo [40] LISU LETTER BA..LISU LETTER OE
+ {0xA4F8, 0xA4FD, prN}, // Lm [6] LISU LETTER TONE MYA TI..LISU LETTER TONE MYA JEU
+ {0xA4FE, 0xA4FF, prN}, // Po [2] LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP
+ {0xA500, 0xA60B, prN}, // Lo [268] VAI SYLLABLE EE..VAI SYLLABLE NG
+ {0xA60C, 0xA60C, prN}, // Lm VAI SYLLABLE LENGTHENER
+ {0xA60D, 0xA60F, prN}, // Po [3] VAI COMMA..VAI QUESTION MARK
+ {0xA610, 0xA61F, prN}, // Lo [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL JONG
+ {0xA620, 0xA629, prN}, // Nd [10] VAI DIGIT ZERO..VAI DIGIT NINE
+ {0xA62A, 0xA62B, prN}, // Lo [2] VAI SYLLABLE NDOLE MA..VAI SYLLABLE NDOLE DO
+ {0xA640, 0xA66D, prN}, // L& [46] CYRILLIC CAPITAL LETTER ZEMLYA..CYRILLIC SMALL LETTER DOUBLE MONOCULAR O
+ {0xA66E, 0xA66E, prN}, // Lo CYRILLIC LETTER MULTIOCULAR O
+ {0xA66F, 0xA66F, prN}, // Mn COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prN}, // Me [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA673, 0xA673, prN}, // Po SLAVONIC ASTERISK
+ {0xA674, 0xA67D, prN}, // Mn [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA67E, 0xA67E, prN}, // Po CYRILLIC KAVYKA
+ {0xA67F, 0xA67F, prN}, // Lm CYRILLIC PAYEROK
+ {0xA680, 0xA69B, prN}, // L& [28] CYRILLIC CAPITAL LETTER DWE..CYRILLIC SMALL LETTER CROSSED O
+ {0xA69C, 0xA69D, prN}, // Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN..MODIFIER LETTER CYRILLIC SOFT SIGN
+ {0xA69E, 0xA69F, prN}, // Mn [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6A0, 0xA6E5, prN}, // Lo [70] BAMUM LETTER A..BAMUM LETTER KI
+ {0xA6E6, 0xA6EF, prN}, // Nl [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM
+ {0xA6F0, 0xA6F1, prN}, // Mn [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA6F2, 0xA6F7, prN}, // Po [6] BAMUM NJAEMLI..BAMUM QUESTION MARK
+ {0xA700, 0xA716, prN}, // Sk [23] MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR
+ {0xA717, 0xA71F, prN}, // Lm [9] MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK
+ {0xA720, 0xA721, prN}, // Sk [2] MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE
+ {0xA722, 0xA76F, prN}, // L& [78] LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF..LATIN SMALL LETTER CON
+ {0xA770, 0xA770, prN}, // Lm MODIFIER LETTER US
+ {0xA771, 0xA787, prN}, // L& [23] LATIN SMALL LETTER DUM..LATIN SMALL LETTER INSULAR T
+ {0xA788, 0xA788, prN}, // Lm MODIFIER LETTER LOW CIRCUMFLEX ACCENT
+ {0xA789, 0xA78A, prN}, // Sk [2] MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN
+ {0xA78B, 0xA78E, prN}, // L& [4] LATIN CAPITAL LETTER SALTILLO..LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT
+ {0xA78F, 0xA78F, prN}, // Lo LATIN LETTER SINOLOGICAL DOT
+ {0xA790, 0xA7CA, prN}, // L& [59] LATIN CAPITAL LETTER N WITH DESCENDER..LATIN SMALL LETTER S WITH SHORT STROKE OVERLAY
+ {0xA7D0, 0xA7D1, prN}, // L& [2] LATIN CAPITAL LETTER CLOSED INSULAR G..LATIN SMALL LETTER CLOSED INSULAR G
+ {0xA7D3, 0xA7D3, prN}, // Ll LATIN SMALL LETTER DOUBLE THORN
+ {0xA7D5, 0xA7D9, prN}, // L& [5] LATIN SMALL LETTER DOUBLE WYNN..LATIN SMALL LETTER SIGMOID S
+ {0xA7F2, 0xA7F4, prN}, // Lm [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q
+ {0xA7F5, 0xA7F6, prN}, // L& [2] LATIN CAPITAL LETTER REVERSED HALF H..LATIN SMALL LETTER REVERSED HALF H
+ {0xA7F7, 0xA7F7, prN}, // Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I
+ {0xA7F8, 0xA7F9, prN}, // Lm [2] MODIFIER LETTER CAPITAL H WITH STROKE..MODIFIER LETTER SMALL LIGATURE OE
+ {0xA7FA, 0xA7FA, prN}, // Ll LATIN LETTER SMALL CAPITAL TURNED M
+ {0xA7FB, 0xA7FF, prN}, // Lo [5] LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M
+ {0xA800, 0xA801, prN}, // Lo [2] SYLOTI NAGRI LETTER A..SYLOTI NAGRI LETTER I
+ {0xA802, 0xA802, prN}, // Mn SYLOTI NAGRI SIGN DVISVARA
+ {0xA803, 0xA805, prN}, // Lo [3] SYLOTI NAGRI LETTER U..SYLOTI NAGRI LETTER O
+ {0xA806, 0xA806, prN}, // Mn SYLOTI NAGRI SIGN HASANTA
+ {0xA807, 0xA80A, prN}, // Lo [4] SYLOTI NAGRI LETTER KO..SYLOTI NAGRI LETTER GHO
+ {0xA80B, 0xA80B, prN}, // Mn SYLOTI NAGRI SIGN ANUSVARA
+ {0xA80C, 0xA822, prN}, // Lo [23] SYLOTI NAGRI LETTER CO..SYLOTI NAGRI LETTER HO
+ {0xA823, 0xA824, prN}, // Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prN}, // Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prN}, // Mc SYLOTI NAGRI VOWEL SIGN OO
+ {0xA828, 0xA82B, prN}, // So [4] SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4
+ {0xA82C, 0xA82C, prN}, // Mn SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA830, 0xA835, prN}, // No [6] NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC FRACTION THREE SIXTEENTHS
+ {0xA836, 0xA837, prN}, // So [2] NORTH INDIC QUARTER MARK..NORTH INDIC PLACEHOLDER MARK
+ {0xA838, 0xA838, prN}, // Sc NORTH INDIC RUPEE MARK
+ {0xA839, 0xA839, prN}, // So NORTH INDIC QUANTITY MARK
+ {0xA840, 0xA873, prN}, // Lo [52] PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU
+ {0xA874, 0xA877, prN}, // Po [4] PHAGS-PA SINGLE HEAD MARK..PHAGS-PA MARK DOUBLE SHAD
+ {0xA880, 0xA881, prN}, // Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA882, 0xA8B3, prN}, // Lo [50] SAURASHTRA LETTER A..SAURASHTRA LETTER LLA
+ {0xA8B4, 0xA8C3, prN}, // Mc [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prN}, // Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8CE, 0xA8CF, prN}, // Po [2] SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA
+ {0xA8D0, 0xA8D9, prN}, // Nd [10] SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE
+ {0xA8E0, 0xA8F1, prN}, // Mn [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8F2, 0xA8F7, prN}, // Lo [6] DEVANAGARI SIGN SPACING CANDRABINDU..DEVANAGARI SIGN CANDRABINDU AVAGRAHA
+ {0xA8F8, 0xA8FA, prN}, // Po [3] DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET
+ {0xA8FB, 0xA8FB, prN}, // Lo DEVANAGARI HEADSTROKE
+ {0xA8FC, 0xA8FC, prN}, // Po DEVANAGARI SIGN SIDDHAM
+ {0xA8FD, 0xA8FE, prN}, // Lo [2] DEVANAGARI JAIN OM..DEVANAGARI LETTER AY
+ {0xA8FF, 0xA8FF, prN}, // Mn DEVANAGARI VOWEL SIGN AY
+ {0xA900, 0xA909, prN}, // Nd [10] KAYAH LI DIGIT ZERO..KAYAH LI DIGIT NINE
+ {0xA90A, 0xA925, prN}, // Lo [28] KAYAH LI LETTER KA..KAYAH LI LETTER OO
+ {0xA926, 0xA92D, prN}, // Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA92E, 0xA92F, prN}, // Po [2] KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA
+ {0xA930, 0xA946, prN}, // Lo [23] REJANG LETTER KA..REJANG LETTER A
+ {0xA947, 0xA951, prN}, // Mn [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prN}, // Mc [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA95F, 0xA95F, prN}, // Po REJANG SECTION MARK
+ {0xA960, 0xA97C, prW}, // Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prN}, // Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prN}, // Mc JAVANESE SIGN WIGNYAN
+ {0xA984, 0xA9B2, prN}, // Lo [47] JAVANESE LETTER A..JAVANESE LETTER HA
+ {0xA9B3, 0xA9B3, prN}, // Mn JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prN}, // Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prN}, // Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prN}, // Mc [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prN}, // Mn [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prN}, // Mc [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9C1, 0xA9CD, prN}, // Po [13] JAVANESE LEFT RERENGGAN..JAVANESE TURNED PADA PISELEH
+ {0xA9CF, 0xA9CF, prN}, // Lm JAVANESE PANGRANGKEP
+ {0xA9D0, 0xA9D9, prN}, // Nd [10] JAVANESE DIGIT ZERO..JAVANESE DIGIT NINE
+ {0xA9DE, 0xA9DF, prN}, // Po [2] JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN
+ {0xA9E0, 0xA9E4, prN}, // Lo [5] MYANMAR LETTER SHAN GHA..MYANMAR LETTER SHAN BHA
+ {0xA9E5, 0xA9E5, prN}, // Mn MYANMAR SIGN SHAN SAW
+ {0xA9E6, 0xA9E6, prN}, // Lm MYANMAR MODIFIER LETTER SHAN REDUPLICATION
+ {0xA9E7, 0xA9EF, prN}, // Lo [9] MYANMAR LETTER TAI LAING NYA..MYANMAR LETTER TAI LAING NNA
+ {0xA9F0, 0xA9F9, prN}, // Nd [10] MYANMAR TAI LAING DIGIT ZERO..MYANMAR TAI LAING DIGIT NINE
+ {0xA9FA, 0xA9FE, prN}, // Lo [5] MYANMAR LETTER TAI LAING LLA..MYANMAR LETTER TAI LAING BHA
+ {0xAA00, 0xAA28, prN}, // Lo [41] CHAM LETTER A..CHAM LETTER HA
+ {0xAA29, 0xAA2E, prN}, // Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prN}, // Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prN}, // Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prN}, // Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prN}, // Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA40, 0xAA42, prN}, // Lo [3] CHAM LETTER FINAL K..CHAM LETTER FINAL NG
+ {0xAA43, 0xAA43, prN}, // Mn CHAM CONSONANT SIGN FINAL NG
+ {0xAA44, 0xAA4B, prN}, // Lo [8] CHAM LETTER FINAL CH..CHAM LETTER FINAL SS
+ {0xAA4C, 0xAA4C, prN}, // Mn CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prN}, // Mc CHAM CONSONANT SIGN FINAL H
+ {0xAA50, 0xAA59, prN}, // Nd [10] CHAM DIGIT ZERO..CHAM DIGIT NINE
+ {0xAA5C, 0xAA5F, prN}, // Po [4] CHAM PUNCTUATION SPIRAL..CHAM PUNCTUATION TRIPLE DANDA
+ {0xAA60, 0xAA6F, prN}, // Lo [16] MYANMAR LETTER KHAMTI GA..MYANMAR LETTER KHAMTI FA
+ {0xAA70, 0xAA70, prN}, // Lm MYANMAR MODIFIER LETTER KHAMTI REDUPLICATION
+ {0xAA71, 0xAA76, prN}, // Lo [6] MYANMAR LETTER KHAMTI XA..MYANMAR LOGOGRAM KHAMTI HM
+ {0xAA77, 0xAA79, prN}, // So [3] MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO
+ {0xAA7A, 0xAA7A, prN}, // Lo MYANMAR LETTER AITON RA
+ {0xAA7B, 0xAA7B, prN}, // Mc MYANMAR SIGN PAO KAREN TONE
+ {0xAA7C, 0xAA7C, prN}, // Mn MYANMAR SIGN TAI LAING TONE-2
+ {0xAA7D, 0xAA7D, prN}, // Mc MYANMAR SIGN TAI LAING TONE-5
+ {0xAA7E, 0xAA7F, prN}, // Lo [2] MYANMAR LETTER SHWE PALAUNG CHA..MYANMAR LETTER SHWE PALAUNG SHA
+ {0xAA80, 0xAAAF, prN}, // Lo [48] TAI VIET LETTER LOW KO..TAI VIET LETTER HIGH O
+ {0xAAB0, 0xAAB0, prN}, // Mn TAI VIET MAI KANG
+ {0xAAB1, 0xAAB1, prN}, // Lo TAI VIET VOWEL AA
+ {0xAAB2, 0xAAB4, prN}, // Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB5, 0xAAB6, prN}, // Lo [2] TAI VIET VOWEL E..TAI VIET VOWEL O
+ {0xAAB7, 0xAAB8, prN}, // Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAAB9, 0xAABD, prN}, // Lo [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN
+ {0xAABE, 0xAABF, prN}, // Mn [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC0, 0xAAC0, prN}, // Lo TAI VIET TONE MAI NUENG
+ {0xAAC1, 0xAAC1, prN}, // Mn TAI VIET TONE MAI THO
+ {0xAAC2, 0xAAC2, prN}, // Lo TAI VIET TONE MAI SONG
+ {0xAADB, 0xAADC, prN}, // Lo [2] TAI VIET SYMBOL KON..TAI VIET SYMBOL NUENG
+ {0xAADD, 0xAADD, prN}, // Lm TAI VIET SYMBOL SAM
+ {0xAADE, 0xAADF, prN}, // Po [2] TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI
+ {0xAAE0, 0xAAEA, prN}, // Lo [11] MEETEI MAYEK LETTER E..MEETEI MAYEK LETTER SSA
+ {0xAAEB, 0xAAEB, prN}, // Mc MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prN}, // Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF0, 0xAAF1, prN}, // Po [2] MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM
+ {0xAAF2, 0xAAF2, prN}, // Lo MEETEI MAYEK ANJI
+ {0xAAF3, 0xAAF4, prN}, // Lm [2] MEETEI MAYEK SYLLABLE REPETITION MARK..MEETEI MAYEK WORD REPETITION MARK
+ {0xAAF5, 0xAAF5, prN}, // Mc MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prN}, // Mn MEETEI MAYEK VIRAMA
+ {0xAB01, 0xAB06, prN}, // Lo [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO
+ {0xAB09, 0xAB0E, prN}, // Lo [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO
+ {0xAB11, 0xAB16, prN}, // Lo [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO
+ {0xAB20, 0xAB26, prN}, // Lo [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO
+ {0xAB28, 0xAB2E, prN}, // Lo [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO
+ {0xAB30, 0xAB5A, prN}, // Ll [43] LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG
+ {0xAB5B, 0xAB5B, prN}, // Sk MODIFIER BREVE WITH INVERTED BREVE
+ {0xAB5C, 0xAB5F, prN}, // Lm [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK
+ {0xAB60, 0xAB68, prN}, // Ll [9] LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE
+ {0xAB69, 0xAB69, prN}, // Lm MODIFIER LETTER SMALL TURNED W
+ {0xAB6A, 0xAB6B, prN}, // Sk [2] MODIFIER LETTER LEFT TACK..MODIFIER LETTER RIGHT TACK
+ {0xAB70, 0xABBF, prN}, // Ll [80] CHEROKEE SMALL LETTER A..CHEROKEE SMALL LETTER YA
+ {0xABC0, 0xABE2, prN}, // Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MAYEK LETTER I LONSUM
+ {0xABE3, 0xABE4, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prN}, // Mn MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prN}, // Mn MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEB, 0xABEB, prN}, // Po MEETEI MAYEK CHEIKHEI
+ {0xABEC, 0xABEC, prN}, // Mc MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prN}, // Mn MEETEI MAYEK APUN IYEK
+ {0xABF0, 0xABF9, prN}, // Nd [10] MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE
+ {0xAC00, 0xD7A3, prW}, // Lo [11172] HANGUL SYLLABLE GA..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prN}, // Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prN}, // Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xD800, 0xDB7F, prN}, // Cs [896] ..
+ {0xDB80, 0xDBFF, prN}, // Cs [128] ..
+ {0xDC00, 0xDFFF, prN}, // Cs [1024] ..
+ {0xE000, 0xF8FF, prA}, // Co [6400] ..
+ {0xF900, 0xFA6D, prW}, // Lo [366] CJK COMPATIBILITY IDEOGRAPH-F900..CJK COMPATIBILITY IDEOGRAPH-FA6D
+ {0xFA6E, 0xFA6F, prW}, // Cn [2] ..
+ {0xFA70, 0xFAD9, prW}, // Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA70..CJK COMPATIBILITY IDEOGRAPH-FAD9
+ {0xFADA, 0xFAFF, prW}, // Cn [38] ..
+ {0xFB00, 0xFB06, prN}, // Ll [7] LATIN SMALL LIGATURE FF..LATIN SMALL LIGATURE ST
+ {0xFB13, 0xFB17, prN}, // Ll [5] ARMENIAN SMALL LIGATURE MEN NOW..ARMENIAN SMALL LIGATURE MEN XEH
+ {0xFB1D, 0xFB1D, prN}, // Lo HEBREW LETTER YOD WITH HIRIQ
+ {0xFB1E, 0xFB1E, prN}, // Mn HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFB1F, 0xFB28, prN}, // Lo [10] HEBREW LIGATURE YIDDISH YOD YOD PATAH..HEBREW LETTER WIDE TAV
+ {0xFB29, 0xFB29, prN}, // Sm HEBREW LETTER ALTERNATIVE PLUS SIGN
+ {0xFB2A, 0xFB36, prN}, // Lo [13] HEBREW LETTER SHIN WITH SHIN DOT..HEBREW LETTER ZAYIN WITH DAGESH
+ {0xFB38, 0xFB3C, prN}, // Lo [5] HEBREW LETTER TET WITH DAGESH..HEBREW LETTER LAMED WITH DAGESH
+ {0xFB3E, 0xFB3E, prN}, // Lo HEBREW LETTER MEM WITH DAGESH
+ {0xFB40, 0xFB41, prN}, // Lo [2] HEBREW LETTER NUN WITH DAGESH..HEBREW LETTER SAMEKH WITH DAGESH
+ {0xFB43, 0xFB44, prN}, // Lo [2] HEBREW LETTER FINAL PE WITH DAGESH..HEBREW LETTER PE WITH DAGESH
+ {0xFB46, 0xFB4F, prN}, // Lo [10] HEBREW LETTER TSADI WITH DAGESH..HEBREW LIGATURE ALEF LAMED
+ {0xFB50, 0xFBB1, prN}, // Lo [98] ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM
+ {0xFBB2, 0xFBC2, prN}, // Sk [17] ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL WASLA ABOVE
+ {0xFBD3, 0xFD3D, prN}, // Lo [363] ARABIC LETTER NG ISOLATED FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM
+ {0xFD3E, 0xFD3E, prN}, // Pe ORNATE LEFT PARENTHESIS
+ {0xFD3F, 0xFD3F, prN}, // Ps ORNATE RIGHT PARENTHESIS
+ {0xFD40, 0xFD4F, prN}, // So [16] ARABIC LIGATURE RAHIMAHU ALLAAH..ARABIC LIGATURE RAHIMAHUM ALLAAH
+ {0xFD50, 0xFD8F, prN}, // Lo [64] ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM..ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM
+ {0xFD92, 0xFDC7, prN}, // Lo [54] ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM
+ {0xFDCF, 0xFDCF, prN}, // So ARABIC LIGATURE SALAAMUHU ALAYNAA
+ {0xFDF0, 0xFDFB, prN}, // Lo [12] ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM..ARABIC LIGATURE JALLAJALALOUHOU
+ {0xFDFC, 0xFDFC, prN}, // Sc RIAL SIGN
+ {0xFDFD, 0xFDFF, prN}, // So [3] ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM..ARABIC LIGATURE AZZA WA JALL
+ {0xFE00, 0xFE0F, prA}, // Mn [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE10, 0xFE16, prW}, // Po [7] PRESENTATION FORM FOR VERTICAL COMMA..PRESENTATION FORM FOR VERTICAL QUESTION MARK
+ {0xFE17, 0xFE17, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET
+ {0xFE18, 0xFE18, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET
+ {0xFE19, 0xFE19, prW}, // Po PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS
+ {0xFE20, 0xFE2F, prN}, // Mn [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFE30, 0xFE30, prW}, // Po PRESENTATION FORM FOR VERTICAL TWO DOT LEADER
+ {0xFE31, 0xFE32, prW}, // Pd [2] PRESENTATION FORM FOR VERTICAL EM DASH..PRESENTATION FORM FOR VERTICAL EN DASH
+ {0xFE33, 0xFE34, prW}, // Pc [2] PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE
+ {0xFE35, 0xFE35, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
+ {0xFE36, 0xFE36, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS
+ {0xFE37, 0xFE37, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET
+ {0xFE38, 0xFE38, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET
+ {0xFE39, 0xFE39, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET
+ {0xFE3A, 0xFE3A, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET
+ {0xFE3B, 0xFE3B, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET
+ {0xFE3C, 0xFE3C, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET
+ {0xFE3D, 0xFE3D, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET
+ {0xFE3E, 0xFE3E, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET
+ {0xFE3F, 0xFE3F, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET
+ {0xFE40, 0xFE40, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET
+ {0xFE41, 0xFE41, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET
+ {0xFE42, 0xFE42, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET
+ {0xFE43, 0xFE43, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET
+ {0xFE44, 0xFE44, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET
+ {0xFE45, 0xFE46, prW}, // Po [2] SESAME DOT..WHITE SESAME DOT
+ {0xFE47, 0xFE47, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET
+ {0xFE48, 0xFE48, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET
+ {0xFE49, 0xFE4C, prW}, // Po [4] DASHED OVERLINE..DOUBLE WAVY OVERLINE
+ {0xFE4D, 0xFE4F, prW}, // Pc [3] DASHED LOW LINE..WAVY LOW LINE
+ {0xFE50, 0xFE52, prW}, // Po [3] SMALL COMMA..SMALL FULL STOP
+ {0xFE54, 0xFE57, prW}, // Po [4] SMALL SEMICOLON..SMALL EXCLAMATION MARK
+ {0xFE58, 0xFE58, prW}, // Pd SMALL EM DASH
+ {0xFE59, 0xFE59, prW}, // Ps SMALL LEFT PARENTHESIS
+ {0xFE5A, 0xFE5A, prW}, // Pe SMALL RIGHT PARENTHESIS
+ {0xFE5B, 0xFE5B, prW}, // Ps SMALL LEFT CURLY BRACKET
+ {0xFE5C, 0xFE5C, prW}, // Pe SMALL RIGHT CURLY BRACKET
+ {0xFE5D, 0xFE5D, prW}, // Ps SMALL LEFT TORTOISE SHELL BRACKET
+ {0xFE5E, 0xFE5E, prW}, // Pe SMALL RIGHT TORTOISE SHELL BRACKET
+ {0xFE5F, 0xFE61, prW}, // Po [3] SMALL NUMBER SIGN..SMALL ASTERISK
+ {0xFE62, 0xFE62, prW}, // Sm SMALL PLUS SIGN
+ {0xFE63, 0xFE63, prW}, // Pd SMALL HYPHEN-MINUS
+ {0xFE64, 0xFE66, prW}, // Sm [3] SMALL LESS-THAN SIGN..SMALL EQUALS SIGN
+ {0xFE68, 0xFE68, prW}, // Po SMALL REVERSE SOLIDUS
+ {0xFE69, 0xFE69, prW}, // Sc SMALL DOLLAR SIGN
+ {0xFE6A, 0xFE6B, prW}, // Po [2] SMALL PERCENT SIGN..SMALL COMMERCIAL AT
+ {0xFE70, 0xFE74, prN}, // Lo [5] ARABIC FATHATAN ISOLATED FORM..ARABIC KASRATAN ISOLATED FORM
+ {0xFE76, 0xFEFC, prN}, // Lo [135] ARABIC FATHA ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM
+ {0xFEFF, 0xFEFF, prN}, // Cf ZERO WIDTH NO-BREAK SPACE
+ {0xFF01, 0xFF03, prF}, // Po [3] FULLWIDTH EXCLAMATION MARK..FULLWIDTH NUMBER SIGN
+ {0xFF04, 0xFF04, prF}, // Sc FULLWIDTH DOLLAR SIGN
+ {0xFF05, 0xFF07, prF}, // Po [3] FULLWIDTH PERCENT SIGN..FULLWIDTH APOSTROPHE
+ {0xFF08, 0xFF08, prF}, // Ps FULLWIDTH LEFT PARENTHESIS
+ {0xFF09, 0xFF09, prF}, // Pe FULLWIDTH RIGHT PARENTHESIS
+ {0xFF0A, 0xFF0A, prF}, // Po FULLWIDTH ASTERISK
+ {0xFF0B, 0xFF0B, prF}, // Sm FULLWIDTH PLUS SIGN
+ {0xFF0C, 0xFF0C, prF}, // Po FULLWIDTH COMMA
+ {0xFF0D, 0xFF0D, prF}, // Pd FULLWIDTH HYPHEN-MINUS
+ {0xFF0E, 0xFF0F, prF}, // Po [2] FULLWIDTH FULL STOP..FULLWIDTH SOLIDUS
+ {0xFF10, 0xFF19, prF}, // Nd [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DIGIT NINE
+ {0xFF1A, 0xFF1B, prF}, // Po [2] FULLWIDTH COLON..FULLWIDTH SEMICOLON
+ {0xFF1C, 0xFF1E, prF}, // Sm [3] FULLWIDTH LESS-THAN SIGN..FULLWIDTH GREATER-THAN SIGN
+ {0xFF1F, 0xFF20, prF}, // Po [2] FULLWIDTH QUESTION MARK..FULLWIDTH COMMERCIAL AT
+ {0xFF21, 0xFF3A, prF}, // Lu [26] FULLWIDTH LATIN CAPITAL LETTER A..FULLWIDTH LATIN CAPITAL LETTER Z
+ {0xFF3B, 0xFF3B, prF}, // Ps FULLWIDTH LEFT SQUARE BRACKET
+ {0xFF3C, 0xFF3C, prF}, // Po FULLWIDTH REVERSE SOLIDUS
+ {0xFF3D, 0xFF3D, prF}, // Pe FULLWIDTH RIGHT SQUARE BRACKET
+ {0xFF3E, 0xFF3E, prF}, // Sk FULLWIDTH CIRCUMFLEX ACCENT
+ {0xFF3F, 0xFF3F, prF}, // Pc FULLWIDTH LOW LINE
+ {0xFF40, 0xFF40, prF}, // Sk FULLWIDTH GRAVE ACCENT
+ {0xFF41, 0xFF5A, prF}, // Ll [26] FULLWIDTH LATIN SMALL LETTER A..FULLWIDTH LATIN SMALL LETTER Z
+ {0xFF5B, 0xFF5B, prF}, // Ps FULLWIDTH LEFT CURLY BRACKET
+ {0xFF5C, 0xFF5C, prF}, // Sm FULLWIDTH VERTICAL LINE
+ {0xFF5D, 0xFF5D, prF}, // Pe FULLWIDTH RIGHT CURLY BRACKET
+ {0xFF5E, 0xFF5E, prF}, // Sm FULLWIDTH TILDE
+ {0xFF5F, 0xFF5F, prF}, // Ps FULLWIDTH LEFT WHITE PARENTHESIS
+ {0xFF60, 0xFF60, prF}, // Pe FULLWIDTH RIGHT WHITE PARENTHESIS
+ {0xFF61, 0xFF61, prH}, // Po HALFWIDTH IDEOGRAPHIC FULL STOP
+ {0xFF62, 0xFF62, prH}, // Ps HALFWIDTH LEFT CORNER BRACKET
+ {0xFF63, 0xFF63, prH}, // Pe HALFWIDTH RIGHT CORNER BRACKET
+ {0xFF64, 0xFF65, prH}, // Po [2] HALFWIDTH IDEOGRAPHIC COMMA..HALFWIDTH KATAKANA MIDDLE DOT
+ {0xFF66, 0xFF6F, prH}, // Lo [10] HALFWIDTH KATAKANA LETTER WO..HALFWIDTH KATAKANA LETTER SMALL TU
+ {0xFF70, 0xFF70, prH}, // Lm HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0xFF71, 0xFF9D, prH}, // Lo [45] HALFWIDTH KATAKANA LETTER A..HALFWIDTH KATAKANA LETTER N
+ {0xFF9E, 0xFF9F, prH}, // Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFA0, 0xFFBE, prH}, // Lo [31] HALFWIDTH HANGUL FILLER..HALFWIDTH HANGUL LETTER HIEUH
+ {0xFFC2, 0xFFC7, prH}, // Lo [6] HALFWIDTH HANGUL LETTER A..HALFWIDTH HANGUL LETTER E
+ {0xFFCA, 0xFFCF, prH}, // Lo [6] HALFWIDTH HANGUL LETTER YEO..HALFWIDTH HANGUL LETTER OE
+ {0xFFD2, 0xFFD7, prH}, // Lo [6] HALFWIDTH HANGUL LETTER YO..HALFWIDTH HANGUL LETTER YU
+ {0xFFDA, 0xFFDC, prH}, // Lo [3] HALFWIDTH HANGUL LETTER EU..HALFWIDTH HANGUL LETTER I
+ {0xFFE0, 0xFFE1, prF}, // Sc [2] FULLWIDTH CENT SIGN..FULLWIDTH POUND SIGN
+ {0xFFE2, 0xFFE2, prF}, // Sm FULLWIDTH NOT SIGN
+ {0xFFE3, 0xFFE3, prF}, // Sk FULLWIDTH MACRON
+ {0xFFE4, 0xFFE4, prF}, // So FULLWIDTH BROKEN BAR
+ {0xFFE5, 0xFFE6, prF}, // Sc [2] FULLWIDTH YEN SIGN..FULLWIDTH WON SIGN
+ {0xFFE8, 0xFFE8, prH}, // So HALFWIDTH FORMS LIGHT VERTICAL
+ {0xFFE9, 0xFFEC, prH}, // Sm [4] HALFWIDTH LEFTWARDS ARROW..HALFWIDTH DOWNWARDS ARROW
+ {0xFFED, 0xFFEE, prH}, // So [2] HALFWIDTH BLACK SQUARE..HALFWIDTH WHITE CIRCLE
+ {0xFFF9, 0xFFFB, prN}, // Cf [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0xFFFC, 0xFFFC, prN}, // So OBJECT REPLACEMENT CHARACTER
+ {0xFFFD, 0xFFFD, prA}, // So REPLACEMENT CHARACTER
+ {0x10000, 0x1000B, prN}, // Lo [12] LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE
+ {0x1000D, 0x10026, prN}, // Lo [26] LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO
+ {0x10028, 0x1003A, prN}, // Lo [19] LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO
+ {0x1003C, 0x1003D, prN}, // Lo [2] LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE
+ {0x1003F, 0x1004D, prN}, // Lo [15] LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO
+ {0x10050, 0x1005D, prN}, // Lo [14] LINEAR B SYMBOL B018..LINEAR B SYMBOL B089
+ {0x10080, 0x100FA, prN}, // Lo [123] LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305
+ {0x10100, 0x10102, prN}, // Po [3] AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK
+ {0x10107, 0x10133, prN}, // No [45] AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND
+ {0x10137, 0x1013F, prN}, // So [9] AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT
+ {0x10140, 0x10174, prN}, // Nl [53] GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ACROPHONIC STRATIAN FIFTY MNAS
+ {0x10175, 0x10178, prN}, // No [4] GREEK ONE HALF SIGN..GREEK THREE QUARTERS SIGN
+ {0x10179, 0x10189, prN}, // So [17] GREEK YEAR SIGN..GREEK TRYBLION BASE SIGN
+ {0x1018A, 0x1018B, prN}, // No [2] GREEK ZERO SIGN..GREEK ONE QUARTER SIGN
+ {0x1018C, 0x1018E, prN}, // So [3] GREEK SINUSOID SIGN..NOMISMA SIGN
+ {0x10190, 0x1019C, prN}, // So [13] ROMAN SEXTANS SIGN..ASCIA SYMBOL
+ {0x101A0, 0x101A0, prN}, // So GREEK SYMBOL TAU RHO
+ {0x101D0, 0x101FC, prN}, // So [45] PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND
+ {0x101FD, 0x101FD, prN}, // Mn PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x10280, 0x1029C, prN}, // Lo [29] LYCIAN LETTER A..LYCIAN LETTER X
+ {0x102A0, 0x102D0, prN}, // Lo [49] CARIAN LETTER A..CARIAN LETTER UUU3
+ {0x102E0, 0x102E0, prN}, // Mn COPTIC EPACT THOUSANDS MARK
+ {0x102E1, 0x102FB, prN}, // No [27] COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED
+ {0x10300, 0x1031F, prN}, // Lo [32] OLD ITALIC LETTER A..OLD ITALIC LETTER ESS
+ {0x10320, 0x10323, prN}, // No [4] OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY
+ {0x1032D, 0x1032F, prN}, // Lo [3] OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE
+ {0x10330, 0x10340, prN}, // Lo [17] GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA
+ {0x10341, 0x10341, prN}, // Nl GOTHIC LETTER NINETY
+ {0x10342, 0x10349, prN}, // Lo [8] GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL
+ {0x1034A, 0x1034A, prN}, // Nl GOTHIC LETTER NINE HUNDRED
+ {0x10350, 0x10375, prN}, // Lo [38] OLD PERMIC LETTER AN..OLD PERMIC LETTER IA
+ {0x10376, 0x1037A, prN}, // Mn [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10380, 0x1039D, prN}, // Lo [30] UGARITIC LETTER ALPA..UGARITIC LETTER SSU
+ {0x1039F, 0x1039F, prN}, // Po UGARITIC WORD DIVIDER
+ {0x103A0, 0x103C3, prN}, // Lo [36] OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA
+ {0x103C8, 0x103CF, prN}, // Lo [8] OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH
+ {0x103D0, 0x103D0, prN}, // Po OLD PERSIAN WORD DIVIDER
+ {0x103D1, 0x103D5, prN}, // Nl [5] OLD PERSIAN NUMBER ONE..OLD PERSIAN NUMBER HUNDRED
+ {0x10400, 0x1044F, prN}, // L& [80] DESERET CAPITAL LETTER LONG I..DESERET SMALL LETTER EW
+ {0x10450, 0x1047F, prN}, // Lo [48] SHAVIAN LETTER PEEP..SHAVIAN LETTER YEW
+ {0x10480, 0x1049D, prN}, // Lo [30] OSMANYA LETTER ALEF..OSMANYA LETTER OO
+ {0x104A0, 0x104A9, prN}, // Nd [10] OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE
+ {0x104B0, 0x104D3, prN}, // Lu [36] OSAGE CAPITAL LETTER A..OSAGE CAPITAL LETTER ZHA
+ {0x104D8, 0x104FB, prN}, // Ll [36] OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA
+ {0x10500, 0x10527, prN}, // Lo [40] ELBASAN LETTER A..ELBASAN LETTER KHE
+ {0x10530, 0x10563, prN}, // Lo [52] CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW
+ {0x1056F, 0x1056F, prN}, // Po CAUCASIAN ALBANIAN CITATION MARK
+ {0x10570, 0x1057A, prN}, // Lu [11] VITHKUQI CAPITAL LETTER A..VITHKUQI CAPITAL LETTER GA
+ {0x1057C, 0x1058A, prN}, // Lu [15] VITHKUQI CAPITAL LETTER HA..VITHKUQI CAPITAL LETTER RE
+ {0x1058C, 0x10592, prN}, // Lu [7] VITHKUQI CAPITAL LETTER SE..VITHKUQI CAPITAL LETTER XE
+ {0x10594, 0x10595, prN}, // Lu [2] VITHKUQI CAPITAL LETTER Y..VITHKUQI CAPITAL LETTER ZE
+ {0x10597, 0x105A1, prN}, // Ll [11] VITHKUQI SMALL LETTER A..VITHKUQI SMALL LETTER GA
+ {0x105A3, 0x105B1, prN}, // Ll [15] VITHKUQI SMALL LETTER HA..VITHKUQI SMALL LETTER RE
+ {0x105B3, 0x105B9, prN}, // Ll [7] VITHKUQI SMALL LETTER SE..VITHKUQI SMALL LETTER XE
+ {0x105BB, 0x105BC, prN}, // Ll [2] VITHKUQI SMALL LETTER Y..VITHKUQI SMALL LETTER ZE
+ {0x10600, 0x10736, prN}, // Lo [311] LINEAR A SIGN AB001..LINEAR A SIGN A664
+ {0x10740, 0x10755, prN}, // Lo [22] LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE
+ {0x10760, 0x10767, prN}, // Lo [8] LINEAR A SIGN A800..LINEAR A SIGN A807
+ {0x10780, 0x10785, prN}, // Lm [6] MODIFIER LETTER SMALL CAPITAL AA..MODIFIER LETTER SMALL B WITH HOOK
+ {0x10787, 0x107B0, prN}, // Lm [42] MODIFIER LETTER SMALL DZ DIGRAPH..MODIFIER LETTER SMALL V WITH RIGHT HOOK
+ {0x107B2, 0x107BA, prN}, // Lm [9] MODIFIER LETTER SMALL CAPITAL Y..MODIFIER LETTER SMALL S WITH CURL
+ {0x10800, 0x10805, prN}, // Lo [6] CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA
+ {0x10808, 0x10808, prN}, // Lo CYPRIOT SYLLABLE JO
+ {0x1080A, 0x10835, prN}, // Lo [44] CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO
+ {0x10837, 0x10838, prN}, // Lo [2] CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE
+ {0x1083C, 0x1083C, prN}, // Lo CYPRIOT SYLLABLE ZA
+ {0x1083F, 0x1083F, prN}, // Lo CYPRIOT SYLLABLE ZO
+ {0x10840, 0x10855, prN}, // Lo [22] IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW
+ {0x10857, 0x10857, prN}, // Po IMPERIAL ARAMAIC SECTION SIGN
+ {0x10858, 0x1085F, prN}, // No [8] IMPERIAL ARAMAIC NUMBER ONE..IMPERIAL ARAMAIC NUMBER TEN THOUSAND
+ {0x10860, 0x10876, prN}, // Lo [23] PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW
+ {0x10877, 0x10878, prN}, // So [2] PALMYRENE LEFT-POINTING FLEURON..PALMYRENE RIGHT-POINTING FLEURON
+ {0x10879, 0x1087F, prN}, // No [7] PALMYRENE NUMBER ONE..PALMYRENE NUMBER TWENTY
+ {0x10880, 0x1089E, prN}, // Lo [31] NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW
+ {0x108A7, 0x108AF, prN}, // No [9] NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED
+ {0x108E0, 0x108F2, prN}, // Lo [19] HATRAN LETTER ALEPH..HATRAN LETTER QOPH
+ {0x108F4, 0x108F5, prN}, // Lo [2] HATRAN LETTER SHIN..HATRAN LETTER TAW
+ {0x108FB, 0x108FF, prN}, // No [5] HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED
+ {0x10900, 0x10915, prN}, // Lo [22] PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU
+ {0x10916, 0x1091B, prN}, // No [6] PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER THREE
+ {0x1091F, 0x1091F, prN}, // Po PHOENICIAN WORD SEPARATOR
+ {0x10920, 0x10939, prN}, // Lo [26] LYDIAN LETTER A..LYDIAN LETTER C
+ {0x1093F, 0x1093F, prN}, // Po LYDIAN TRIANGULAR MARK
+ {0x10980, 0x1099F, prN}, // Lo [32] MEROITIC HIEROGLYPHIC LETTER A..MEROITIC HIEROGLYPHIC SYMBOL VIDJ-2
+ {0x109A0, 0x109B7, prN}, // Lo [24] MEROITIC CURSIVE LETTER A..MEROITIC CURSIVE LETTER DA
+ {0x109BC, 0x109BD, prN}, // No [2] MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF
+ {0x109BE, 0x109BF, prN}, // Lo [2] MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN
+ {0x109C0, 0x109CF, prN}, // No [16] MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY
+ {0x109D2, 0x109FF, prN}, // No [46] MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS
+ {0x10A00, 0x10A00, prN}, // Lo KHAROSHTHI LETTER A
+ {0x10A01, 0x10A03, prN}, // Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prN}, // Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prN}, // Mn [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A10, 0x10A13, prN}, // Lo [4] KHAROSHTHI LETTER KA..KHAROSHTHI LETTER GHA
+ {0x10A15, 0x10A17, prN}, // Lo [3] KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA
+ {0x10A19, 0x10A35, prN}, // Lo [29] KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER VHA
+ {0x10A38, 0x10A3A, prN}, // Mn [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prN}, // Mn KHAROSHTHI VIRAMA
+ {0x10A40, 0x10A48, prN}, // No [9] KHAROSHTHI DIGIT ONE..KHAROSHTHI FRACTION ONE HALF
+ {0x10A50, 0x10A58, prN}, // Po [9] KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION LINES
+ {0x10A60, 0x10A7C, prN}, // Lo [29] OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH
+ {0x10A7D, 0x10A7E, prN}, // No [2] OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMBER FIFTY
+ {0x10A7F, 0x10A7F, prN}, // Po OLD SOUTH ARABIAN NUMERIC INDICATOR
+ {0x10A80, 0x10A9C, prN}, // Lo [29] OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH
+ {0x10A9D, 0x10A9F, prN}, // No [3] OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY
+ {0x10AC0, 0x10AC7, prN}, // Lo [8] MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW
+ {0x10AC8, 0x10AC8, prN}, // So MANICHAEAN SIGN UD
+ {0x10AC9, 0x10AE4, prN}, // Lo [28] MANICHAEAN LETTER ZAYIN..MANICHAEAN LETTER TAW
+ {0x10AE5, 0x10AE6, prN}, // Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10AEB, 0x10AEF, prN}, // No [5] MANICHAEAN NUMBER ONE..MANICHAEAN NUMBER ONE HUNDRED
+ {0x10AF0, 0x10AF6, prN}, // Po [7] MANICHAEAN PUNCTUATION STAR..MANICHAEAN PUNCTUATION LINE FILLER
+ {0x10B00, 0x10B35, prN}, // Lo [54] AVESTAN LETTER A..AVESTAN LETTER HE
+ {0x10B39, 0x10B3F, prN}, // Po [7] AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION
+ {0x10B40, 0x10B55, prN}, // Lo [22] INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW
+ {0x10B58, 0x10B5F, prN}, // No [8] INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND
+ {0x10B60, 0x10B72, prN}, // Lo [19] INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW
+ {0x10B78, 0x10B7F, prN}, // No [8] INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND
+ {0x10B80, 0x10B91, prN}, // Lo [18] PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW
+ {0x10B99, 0x10B9C, prN}, // Po [4] PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT
+ {0x10BA9, 0x10BAF, prN}, // No [7] PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED
+ {0x10C00, 0x10C48, prN}, // Lo [73] OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH
+ {0x10C80, 0x10CB2, prN}, // Lu [51] OLD HUNGARIAN CAPITAL LETTER A..OLD HUNGARIAN CAPITAL LETTER US
+ {0x10CC0, 0x10CF2, prN}, // Ll [51] OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US
+ {0x10CFA, 0x10CFF, prN}, // No [6] OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND
+ {0x10D00, 0x10D23, prN}, // Lo [36] HANIFI ROHINGYA LETTER A..HANIFI ROHINGYA MARK NA KHONNA
+ {0x10D24, 0x10D27, prN}, // Mn [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10D30, 0x10D39, prN}, // Nd [10] HANIFI ROHINGYA DIGIT ZERO..HANIFI ROHINGYA DIGIT NINE
+ {0x10E60, 0x10E7E, prN}, // No [31] RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS
+ {0x10E80, 0x10EA9, prN}, // Lo [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET
+ {0x10EAB, 0x10EAC, prN}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EAD, 0x10EAD, prN}, // Pd YEZIDI HYPHENATION MARK
+ {0x10EB0, 0x10EB1, prN}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10F00, 0x10F1C, prN}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
+ {0x10F1D, 0x10F26, prN}, // No [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF
+ {0x10F27, 0x10F27, prN}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH
+ {0x10F30, 0x10F45, prN}, // Lo [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN
+ {0x10F46, 0x10F50, prN}, // Mn [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F51, 0x10F54, prN}, // No [4] SOGDIAN NUMBER ONE..SOGDIAN NUMBER ONE HUNDRED
+ {0x10F55, 0x10F59, prN}, // Po [5] SOGDIAN PUNCTUATION TWO VERTICAL BARS..SOGDIAN PUNCTUATION HALF CIRCLE WITH DOT
+ {0x10F70, 0x10F81, prN}, // Lo [18] OLD UYGHUR LETTER ALEPH..OLD UYGHUR LETTER LESH
+ {0x10F82, 0x10F85, prN}, // Mn [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x10F86, 0x10F89, prN}, // Po [4] OLD UYGHUR PUNCTUATION BAR..OLD UYGHUR PUNCTUATION FOUR DOTS
+ {0x10FB0, 0x10FC4, prN}, // Lo [21] CHORASMIAN LETTER ALEPH..CHORASMIAN LETTER TAW
+ {0x10FC5, 0x10FCB, prN}, // No [7] CHORASMIAN NUMBER ONE..CHORASMIAN NUMBER ONE HUNDRED
+ {0x10FE0, 0x10FF6, prN}, // Lo [23] ELYMAIC LETTER ALEPH..ELYMAIC LIGATURE ZAYIN-YODH
+ {0x11000, 0x11000, prN}, // Mc BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prN}, // Mn BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prN}, // Mc BRAHMI SIGN VISARGA
+ {0x11003, 0x11037, prN}, // Lo [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI LETTER OLD TAMIL NNNA
+ {0x11038, 0x11046, prN}, // Mn [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11047, 0x1104D, prN}, // Po [7] BRAHMI DANDA..BRAHMI PUNCTUATION LOTUS
+ {0x11052, 0x11065, prN}, // No [20] BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND
+ {0x11066, 0x1106F, prN}, // Nd [10] BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE
+ {0x11070, 0x11070, prN}, // Mn BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11071, 0x11072, prN}, // Lo [2] BRAHMI LETTER OLD TAMIL SHORT E..BRAHMI LETTER OLD TAMIL SHORT O
+ {0x11073, 0x11074, prN}, // Mn [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x11075, 0x11075, prN}, // Lo BRAHMI LETTER OLD TAMIL LLA
+ {0x1107F, 0x1107F, prN}, // Mn BRAHMI NUMBER JOINER
+ {0x11080, 0x11081, prN}, // Mn [2] KAITHI SIGN CANDRABINDU..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prN}, // Mc KAITHI SIGN VISARGA
+ {0x11083, 0x110AF, prN}, // Lo [45] KAITHI LETTER A..KAITHI LETTER HA
+ {0x110B0, 0x110B2, prN}, // Mc [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prN}, // Mn [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prN}, // Mc [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prN}, // Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BB, 0x110BC, prN}, // Po [2] KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN
+ {0x110BD, 0x110BD, prN}, // Cf KAITHI NUMBER SIGN
+ {0x110BE, 0x110C1, prN}, // Po [4] KAITHI SECTION MARK..KAITHI DOUBLE DANDA
+ {0x110C2, 0x110C2, prN}, // Mn KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prN}, // Cf KAITHI NUMBER SIGN ABOVE
+ {0x110D0, 0x110E8, prN}, // Lo [25] SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE
+ {0x110F0, 0x110F9, prN}, // Nd [10] SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE
+ {0x11100, 0x11102, prN}, // Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11103, 0x11126, prN}, // Lo [36] CHAKMA LETTER AA..CHAKMA LETTER HAA
+ {0x11127, 0x1112B, prN}, // Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prN}, // Mc CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prN}, // Mn [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11136, 0x1113F, prN}, // Nd [10] CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE
+ {0x11140, 0x11143, prN}, // Po [4] CHAKMA SECTION MARK..CHAKMA QUESTION MARK
+ {0x11144, 0x11144, prN}, // Lo CHAKMA LETTER LHAA
+ {0x11145, 0x11146, prN}, // Mc [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11147, 0x11147, prN}, // Lo CHAKMA LETTER VAA
+ {0x11150, 0x11172, prN}, // Lo [35] MAHAJANI LETTER A..MAHAJANI LETTER RRA
+ {0x11173, 0x11173, prN}, // Mn MAHAJANI SIGN NUKTA
+ {0x11174, 0x11175, prN}, // Po [2] MAHAJANI ABBREVIATION SIGN..MAHAJANI SECTION MARK
+ {0x11176, 0x11176, prN}, // Lo MAHAJANI LIGATURE SHRI
+ {0x11180, 0x11181, prN}, // Mn [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prN}, // Mc SHARADA SIGN VISARGA
+ {0x11183, 0x111B2, prN}, // Lo [48] SHARADA LETTER A..SHARADA LETTER HA
+ {0x111B3, 0x111B5, prN}, // Mc [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prN}, // Mn [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prN}, // Mc [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C1, 0x111C4, prN}, // Lo [4] SHARADA SIGN AVAGRAHA..SHARADA OM
+ {0x111C5, 0x111C8, prN}, // Po [4] SHARADA DANDA..SHARADA SEPARATOR
+ {0x111C9, 0x111CC, prN}, // Mn [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CD, 0x111CD, prN}, // Po SHARADA SUTRA MARK
+ {0x111CE, 0x111CE, prN}, // Mc SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prN}, // Mn SHARADA SIGN INVERTED CANDRABINDU
+ {0x111D0, 0x111D9, prN}, // Nd [10] SHARADA DIGIT ZERO..SHARADA DIGIT NINE
+ {0x111DA, 0x111DA, prN}, // Lo SHARADA EKAM
+ {0x111DB, 0x111DB, prN}, // Po SHARADA SIGN SIDDHAM
+ {0x111DC, 0x111DC, prN}, // Lo SHARADA HEADSTROKE
+ {0x111DD, 0x111DF, prN}, // Po [3] SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2
+ {0x111E1, 0x111F4, prN}, // No [20] SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND
+ {0x11200, 0x11211, prN}, // Lo [18] KHOJKI LETTER A..KHOJKI LETTER JJA
+ {0x11213, 0x1122B, prN}, // Lo [25] KHOJKI LETTER NYA..KHOJKI LETTER LLA
+ {0x1122C, 0x1122E, prN}, // Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prN}, // Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prN}, // Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prN}, // Mn KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prN}, // Mc KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prN}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x11238, 0x1123D, prN}, // Po [6] KHOJKI DANDA..KHOJKI ABBREVIATION SIGN
+ {0x1123E, 0x1123E, prN}, // Mn KHOJKI SIGN SUKUN
+ {0x11280, 0x11286, prN}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA
+ {0x11288, 0x11288, prN}, // Lo MULTANI LETTER GHA
+ {0x1128A, 0x1128D, prN}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA
+ {0x1128F, 0x1129D, prN}, // Lo [15] MULTANI LETTER NYA..MULTANI LETTER BA
+ {0x1129F, 0x112A8, prN}, // Lo [10] MULTANI LETTER BHA..MULTANI LETTER RHA
+ {0x112A9, 0x112A9, prN}, // Po MULTANI SECTION MARK
+ {0x112B0, 0x112DE, prN}, // Lo [47] KHUDAWADI LETTER A..KHUDAWADI LETTER HA
+ {0x112DF, 0x112DF, prN}, // Mn KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prN}, // Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prN}, // Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x112F0, 0x112F9, prN}, // Nd [10] KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE
+ {0x11300, 0x11301, prN}, // Mn [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prN}, // Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x11305, 0x1130C, prN}, // Lo [8] GRANTHA LETTER A..GRANTHA LETTER VOCALIC L
+ {0x1130F, 0x11310, prN}, // Lo [2] GRANTHA LETTER EE..GRANTHA LETTER AI
+ {0x11313, 0x11328, prN}, // Lo [22] GRANTHA LETTER OO..GRANTHA LETTER NA
+ {0x1132A, 0x11330, prN}, // Lo [7] GRANTHA LETTER PA..GRANTHA LETTER RA
+ {0x11332, 0x11333, prN}, // Lo [2] GRANTHA LETTER LA..GRANTHA LETTER LLA
+ {0x11335, 0x11339, prN}, // Lo [5] GRANTHA LETTER VA..GRANTHA LETTER HA
+ {0x1133B, 0x1133C, prN}, // Mn [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133D, 0x1133D, prN}, // Lo GRANTHA SIGN AVAGRAHA
+ {0x1133E, 0x1133F, prN}, // Mc [2] GRANTHA VOWEL SIGN AA..GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prN}, // Mn GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prN}, // Mc [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prN}, // Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prN}, // Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11350, 0x11350, prN}, // Lo GRANTHA OM
+ {0x11357, 0x11357, prN}, // Mc GRANTHA AU LENGTH MARK
+ {0x1135D, 0x11361, prN}, // Lo [5] GRANTHA SIGN PLUTA..GRANTHA LETTER VOCALIC LL
+ {0x11362, 0x11363, prN}, // Mc [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prN}, // Mn [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prN}, // Mn [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11400, 0x11434, prN}, // Lo [53] NEWA LETTER A..NEWA LETTER HA
+ {0x11435, 0x11437, prN}, // Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prN}, // Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prN}, // Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prN}, // Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prN}, // Mc NEWA SIGN VISARGA
+ {0x11446, 0x11446, prN}, // Mn NEWA SIGN NUKTA
+ {0x11447, 0x1144A, prN}, // Lo [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI
+ {0x1144B, 0x1144F, prN}, // Po [5] NEWA DANDA..NEWA ABBREVIATION SIGN
+ {0x11450, 0x11459, prN}, // Nd [10] NEWA DIGIT ZERO..NEWA DIGIT NINE
+ {0x1145A, 0x1145B, prN}, // Po [2] NEWA DOUBLE COMMA..NEWA PLACEHOLDER MARK
+ {0x1145D, 0x1145D, prN}, // Po NEWA INSERTION SIGN
+ {0x1145E, 0x1145E, prN}, // Mn NEWA SANDHI MARK
+ {0x1145F, 0x11461, prN}, // Lo [3] NEWA LETTER VEDIC ANUSVARA..NEWA SIGN UPADHMANIYA
+ {0x11480, 0x114AF, prN}, // Lo [48] TIRHUTA ANJI..TIRHUTA LETTER HA
+ {0x114B0, 0x114B2, prN}, // Mc [3] TIRHUTA VOWEL SIGN AA..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prN}, // Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prN}, // Mc TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prN}, // Mn TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BE, prN}, // Mc [4] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prN}, // Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prN}, // Mc TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prN}, // Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x114C4, 0x114C5, prN}, // Lo [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA GVANG
+ {0x114C6, 0x114C6, prN}, // Po TIRHUTA ABBREVIATION SIGN
+ {0x114C7, 0x114C7, prN}, // Lo TIRHUTA OM
+ {0x114D0, 0x114D9, prN}, // Nd [10] TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE
+ {0x11580, 0x115AE, prN}, // Lo [47] SIDDHAM LETTER A..SIDDHAM LETTER HA
+ {0x115AF, 0x115B1, prN}, // Mc [3] SIDDHAM VOWEL SIGN AA..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prN}, // Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prN}, // Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prN}, // Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prN}, // Mc SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prN}, // Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115C1, 0x115D7, prN}, // Po [23] SIDDHAM SIGN SIDDHAM..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES
+ {0x115D8, 0x115DB, prN}, // Lo [4] SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM LETTER ALTERNATE U
+ {0x115DC, 0x115DD, prN}, // Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11600, 0x1162F, prN}, // Lo [48] MODI LETTER A..MODI LETTER LLA
+ {0x11630, 0x11632, prN}, // Mc [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prN}, // Mn [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prN}, // Mc [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prN}, // Mn MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prN}, // Mc MODI SIGN VISARGA
+ {0x1163F, 0x11640, prN}, // Mn [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x11641, 0x11643, prN}, // Po [3] MODI DANDA..MODI ABBREVIATION SIGN
+ {0x11644, 0x11644, prN}, // Lo MODI SIGN HUVA
+ {0x11650, 0x11659, prN}, // Nd [10] MODI DIGIT ZERO..MODI DIGIT NINE
+ {0x11660, 0x1166C, prN}, // Po [13] MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT
+ {0x11680, 0x116AA, prN}, // Lo [43] TAKRI LETTER A..TAKRI LETTER RRA
+ {0x116AB, 0x116AB, prN}, // Mn TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prN}, // Mc TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prN}, // Mn TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prN}, // Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prN}, // Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prN}, // Mc TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prN}, // Mn TAKRI SIGN NUKTA
+ {0x116B8, 0x116B8, prN}, // Lo TAKRI LETTER ARCHAIC KHA
+ {0x116B9, 0x116B9, prN}, // Po TAKRI ABBREVIATION SIGN
+ {0x116C0, 0x116C9, prN}, // Nd [10] TAKRI DIGIT ZERO..TAKRI DIGIT NINE
+ {0x11700, 0x1171A, prN}, // Lo [27] AHOM LETTER KA..AHOM LETTER ALTERNATE BA
+ {0x1171D, 0x1171F, prN}, // Mn [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11720, 0x11721, prN}, // Mc [2] AHOM VOWEL SIGN A..AHOM VOWEL SIGN AA
+ {0x11722, 0x11725, prN}, // Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prN}, // Mc AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prN}, // Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x11730, 0x11739, prN}, // Nd [10] AHOM DIGIT ZERO..AHOM DIGIT NINE
+ {0x1173A, 0x1173B, prN}, // No [2] AHOM NUMBER TEN..AHOM NUMBER TWENTY
+ {0x1173C, 0x1173E, prN}, // Po [3] AHOM SIGN SMALL SECTION..AHOM SIGN RULAI
+ {0x1173F, 0x1173F, prN}, // So AHOM SYMBOL VI
+ {0x11740, 0x11746, prN}, // Lo [7] AHOM LETTER CA..AHOM LETTER LLA
+ {0x11800, 0x1182B, prN}, // Lo [44] DOGRA LETTER A..DOGRA LETTER RRA
+ {0x1182C, 0x1182E, prN}, // Mc [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prN}, // Mn [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prN}, // Mc DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prN}, // Mn [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x1183B, 0x1183B, prN}, // Po DOGRA ABBREVIATION SIGN
+ {0x118A0, 0x118DF, prN}, // L& [64] WARANG CITI CAPITAL LETTER NGAA..WARANG CITI SMALL LETTER VIYO
+ {0x118E0, 0x118E9, prN}, // Nd [10] WARANG CITI DIGIT ZERO..WARANG CITI DIGIT NINE
+ {0x118EA, 0x118F2, prN}, // No [9] WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY
+ {0x118FF, 0x118FF, prN}, // Lo WARANG CITI OM
+ {0x11900, 0x11906, prN}, // Lo [7] DIVES AKURU LETTER A..DIVES AKURU LETTER E
+ {0x11909, 0x11909, prN}, // Lo DIVES AKURU LETTER O
+ {0x1190C, 0x11913, prN}, // Lo [8] DIVES AKURU LETTER KA..DIVES AKURU LETTER JA
+ {0x11915, 0x11916, prN}, // Lo [2] DIVES AKURU LETTER NYA..DIVES AKURU LETTER TTA
+ {0x11918, 0x1192F, prN}, // Lo [24] DIVES AKURU LETTER DDA..DIVES AKURU LETTER ZA
+ {0x11930, 0x11935, prN}, // Mc [6] DIVES AKURU VOWEL SIGN AA..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prN}, // Mc [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prN}, // Mn [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prN}, // Mc DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prN}, // Mn DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prN}, // Lo DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prN}, // Mc DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prN}, // Lo DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prN}, // Mc DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prN}, // Mn DIVES AKURU SIGN NUKTA
+ {0x11944, 0x11946, prN}, // Po [3] DIVES AKURU DOUBLE DANDA..DIVES AKURU END OF TEXT MARK
+ {0x11950, 0x11959, prN}, // Nd [10] DIVES AKURU DIGIT ZERO..DIVES AKURU DIGIT NINE
+ {0x119A0, 0x119A7, prN}, // Lo [8] NANDINAGARI LETTER A..NANDINAGARI LETTER VOCALIC RR
+ {0x119AA, 0x119D0, prN}, // Lo [39] NANDINAGARI LETTER E..NANDINAGARI LETTER RRA
+ {0x119D1, 0x119D3, prN}, // Mc [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prN}, // Mn [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prN}, // Mn [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prN}, // Mc [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prN}, // Mn NANDINAGARI SIGN VIRAMA
+ {0x119E1, 0x119E1, prN}, // Lo NANDINAGARI SIGN AVAGRAHA
+ {0x119E2, 0x119E2, prN}, // Po NANDINAGARI SIGN SIDDHAM
+ {0x119E3, 0x119E3, prN}, // Lo NANDINAGARI HEADSTROKE
+ {0x119E4, 0x119E4, prN}, // Mc NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A00, 0x11A00, prN}, // Lo ZANABAZAR SQUARE LETTER A
+ {0x11A01, 0x11A0A, prN}, // Mn [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A0B, 0x11A32, prN}, // Lo [40] ZANABAZAR SQUARE LETTER KA..ZANABAZAR SQUARE LETTER KSSA
+ {0x11A33, 0x11A38, prN}, // Mn [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prN}, // Mc ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prN}, // Lo ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prN}, // Mn [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A3F, 0x11A46, prN}, // Po [8] ZANABAZAR SQUARE INITIAL HEAD MARK..ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK
+ {0x11A47, 0x11A47, prN}, // Mn ZANABAZAR SQUARE SUBJOINER
+ {0x11A50, 0x11A50, prN}, // Lo SOYOMBO LETTER A
+ {0x11A51, 0x11A56, prN}, // Mn [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prN}, // Mc [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prN}, // Mn [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A5C, 0x11A89, prN}, // Lo [46] SOYOMBO LETTER KA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prN}, // Mn [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prN}, // Mc SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prN}, // Mn [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11A9A, 0x11A9C, prN}, // Po [3] SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD
+ {0x11A9D, 0x11A9D, prN}, // Lo SOYOMBO MARK PLUTA
+ {0x11A9E, 0x11AA2, prN}, // Po [5] SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO TERMINAL MARK-2
+ {0x11AB0, 0x11ABF, prN}, // Lo [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA
+ {0x11AC0, 0x11AF8, prN}, // Lo [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
+ {0x11C00, 0x11C08, prN}, // Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
+ {0x11C0A, 0x11C2E, prN}, // Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA
+ {0x11C2F, 0x11C2F, prN}, // Mc BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prN}, // Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prN}, // Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prN}, // Mc BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prN}, // Mn BHAIKSUKI SIGN VIRAMA
+ {0x11C40, 0x11C40, prN}, // Lo BHAIKSUKI SIGN AVAGRAHA
+ {0x11C41, 0x11C45, prN}, // Po [5] BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2
+ {0x11C50, 0x11C59, prN}, // Nd [10] BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE
+ {0x11C5A, 0x11C6C, prN}, // No [19] BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK
+ {0x11C70, 0x11C71, prN}, // Po [2] MARCHEN HEAD MARK..MARCHEN MARK SHAD
+ {0x11C72, 0x11C8F, prN}, // Lo [30] MARCHEN LETTER KA..MARCHEN LETTER A
+ {0x11C92, 0x11CA7, prN}, // Mn [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prN}, // Mc MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prN}, // Mn [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prN}, // Mc MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prN}, // Mn [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prN}, // Mc MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prN}, // Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D00, 0x11D06, prN}, // Lo [7] MASARAM GONDI LETTER A..MASARAM GONDI LETTER E
+ {0x11D08, 0x11D09, prN}, // Lo [2] MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O
+ {0x11D0B, 0x11D30, prN}, // Lo [38] MASARAM GONDI LETTER AU..MASARAM GONDI LETTER TRA
+ {0x11D31, 0x11D36, prN}, // Mn [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prN}, // Mn MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prN}, // Mn [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prN}, // Mn [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prN}, // Lo MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prN}, // Mn MASARAM GONDI RA-KARA
+ {0x11D50, 0x11D59, prN}, // Nd [10] MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE
+ {0x11D60, 0x11D65, prN}, // Lo [6] GUNJALA GONDI LETTER A..GUNJALA GONDI LETTER UU
+ {0x11D67, 0x11D68, prN}, // Lo [2] GUNJALA GONDI LETTER EE..GUNJALA GONDI LETTER AI
+ {0x11D6A, 0x11D89, prN}, // Lo [32] GUNJALA GONDI LETTER OO..GUNJALA GONDI LETTER SA
+ {0x11D8A, 0x11D8E, prN}, // Mc [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prN}, // Mn [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prN}, // Mc [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prN}, // Mn GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prN}, // Mc GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prN}, // Mn GUNJALA GONDI VIRAMA
+ {0x11D98, 0x11D98, prN}, // Lo GUNJALA GONDI OM
+ {0x11DA0, 0x11DA9, prN}, // Nd [10] GUNJALA GONDI DIGIT ZERO..GUNJALA GONDI DIGIT NINE
+ {0x11EE0, 0x11EF2, prN}, // Lo [19] MAKASAR LETTER KA..MAKASAR ANGKA
+ {0x11EF3, 0x11EF4, prN}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prN}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11EF7, 0x11EF8, prN}, // Po [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11FB0, 0x11FB0, prN}, // Lo LISU LETTER YHA
+ {0x11FC0, 0x11FD4, prN}, // No [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH
+ {0x11FD5, 0x11FDC, prN}, // So [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI
+ {0x11FDD, 0x11FE0, prN}, // Sc [4] TAMIL SIGN KAACU..TAMIL SIGN VARAAKAN
+ {0x11FE1, 0x11FF1, prN}, // So [17] TAMIL SIGN PAARAM..TAMIL SIGN VAKAIYARAA
+ {0x11FFF, 0x11FFF, prN}, // Po TAMIL PUNCTUATION END OF TEXT
+ {0x12000, 0x12399, prN}, // Lo [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U
+ {0x12400, 0x1246E, prN}, // Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM
+ {0x12470, 0x12474, prN}, // Po [5] CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON
+ {0x12480, 0x12543, prN}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
+ {0x12F90, 0x12FF0, prN}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
+ {0x12FF1, 0x12FF2, prN}, // Po [2] CYPRO-MINOAN SIGN CM301..CYPRO-MINOAN SIGN CM302
+ {0x13000, 0x1342E, prN}, // Lo [1071] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032
+ {0x13430, 0x13438, prN}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x14400, 0x14646, prN}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530
+ {0x16800, 0x16A38, prN}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
+ {0x16A40, 0x16A5E, prN}, // Lo [31] MRO LETTER TA..MRO LETTER TEK
+ {0x16A60, 0x16A69, prN}, // Nd [10] MRO DIGIT ZERO..MRO DIGIT NINE
+ {0x16A6E, 0x16A6F, prN}, // Po [2] MRO DANDA..MRO DOUBLE DANDA
+ {0x16A70, 0x16ABE, prN}, // Lo [79] TANGSA LETTER OZ..TANGSA LETTER ZA
+ {0x16AC0, 0x16AC9, prN}, // Nd [10] TANGSA DIGIT ZERO..TANGSA DIGIT NINE
+ {0x16AD0, 0x16AED, prN}, // Lo [30] BASSA VAH LETTER ENNI..BASSA VAH LETTER I
+ {0x16AF0, 0x16AF4, prN}, // Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16AF5, 0x16AF5, prN}, // Po BASSA VAH FULL STOP
+ {0x16B00, 0x16B2F, prN}, // Lo [48] PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG CONSONANT CAU
+ {0x16B30, 0x16B36, prN}, // Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16B37, 0x16B3B, prN}, // Po [5] PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN VOS FEEM
+ {0x16B3C, 0x16B3F, prN}, // So [4] PAHAWH HMONG SIGN XYEEM NTXIV..PAHAWH HMONG SIGN XYEEM FAIB
+ {0x16B40, 0x16B43, prN}, // Lm [4] PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM
+ {0x16B44, 0x16B44, prN}, // Po PAHAWH HMONG SIGN XAUS
+ {0x16B45, 0x16B45, prN}, // So PAHAWH HMONG SIGN CIM TSOV ROG
+ {0x16B50, 0x16B59, prN}, // Nd [10] PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE
+ {0x16B5B, 0x16B61, prN}, // No [7] PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS
+ {0x16B63, 0x16B77, prN}, // Lo [21] PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS
+ {0x16B7D, 0x16B8F, prN}, // Lo [19] PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ
+ {0x16E40, 0x16E7F, prN}, // L& [64] MEDEFAIDRIN CAPITAL LETTER M..MEDEFAIDRIN SMALL LETTER Y
+ {0x16E80, 0x16E96, prN}, // No [23] MEDEFAIDRIN DIGIT ZERO..MEDEFAIDRIN DIGIT THREE ALTERNATE FORM
+ {0x16E97, 0x16E9A, prN}, // Po [4] MEDEFAIDRIN COMMA..MEDEFAIDRIN EXCLAMATION OH
+ {0x16F00, 0x16F4A, prN}, // Lo [75] MIAO LETTER PA..MIAO LETTER RTE
+ {0x16F4F, 0x16F4F, prN}, // Mn MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F50, 0x16F50, prN}, // Lo MIAO LETTER NASALIZATION
+ {0x16F51, 0x16F87, prN}, // Mc [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prN}, // Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16F93, 0x16F9F, prN}, // Lm [13] MIAO LETTER TONE-2..MIAO LETTER REFORMED TONE-8
+ {0x16FE0, 0x16FE1, prW}, // Lm [2] TANGUT ITERATION MARK..NUSHU ITERATION MARK
+ {0x16FE2, 0x16FE2, prW}, // Po OLD CHINESE HOOK MARK
+ {0x16FE3, 0x16FE3, prW}, // Lm OLD CHINESE ITERATION MARK
+ {0x16FE4, 0x16FE4, prW}, // Mn KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prW}, // Mc [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x17000, 0x187F7, prW}, // Lo [6136] TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187F7
+ {0x18800, 0x18AFF, prW}, // Lo [768] TANGUT COMPONENT-001..TANGUT COMPONENT-768
+ {0x18B00, 0x18CD5, prW}, // Lo [470] KHITAN SMALL SCRIPT CHARACTER-18B00..KHITAN SMALL SCRIPT CHARACTER-18CD5
+ {0x18D00, 0x18D08, prW}, // Lo [9] TANGUT IDEOGRAPH-18D00..TANGUT IDEOGRAPH-18D08
+ {0x1AFF0, 0x1AFF3, prW}, // Lm [4] KATAKANA LETTER MINNAN TONE-2..KATAKANA LETTER MINNAN TONE-5
+ {0x1AFF5, 0x1AFFB, prW}, // Lm [7] KATAKANA LETTER MINNAN TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-5
+ {0x1AFFD, 0x1AFFE, prW}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
+ {0x1B000, 0x1B0FF, prW}, // Lo [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2
+ {0x1B100, 0x1B122, prW}, // Lo [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU
+ {0x1B150, 0x1B152, prW}, // Lo [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B164, 0x1B167, prW}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
+ {0x1B170, 0x1B2FB, prW}, // Lo [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
+ {0x1BC00, 0x1BC6A, prN}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
+ {0x1BC70, 0x1BC7C, prN}, // Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK
+ {0x1BC80, 0x1BC88, prN}, // Lo [9] DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL
+ {0x1BC90, 0x1BC99, prN}, // Lo [10] DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW
+ {0x1BC9C, 0x1BC9C, prN}, // So DUPLOYAN SIGN O WITH CROSS
+ {0x1BC9D, 0x1BC9E, prN}, // Mn [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BC9F, 0x1BC9F, prN}, // Po DUPLOYAN PUNCTUATION CHINOOK FULL STOP
+ {0x1BCA0, 0x1BCA3, prN}, // Cf [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prN}, // Mn [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prN}, // Mn [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1CF50, 0x1CFC3, prN}, // So [116] ZNAMENNY NEUME KRYUK..ZNAMENNY NEUME PAUK
+ {0x1D000, 0x1D0F5, prN}, // So [246] BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO
+ {0x1D100, 0x1D126, prN}, // So [39] MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2
+ {0x1D129, 0x1D164, prN}, // So [60] MUSICAL SYMBOL MULTIPLE MEASURE REST..MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE
+ {0x1D165, 0x1D166, prN}, // Mc [2] MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prN}, // Mn [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16A, 0x1D16C, prN}, // So [3] MUSICAL SYMBOL FINGERED TREMOLO-1..MUSICAL SYMBOL FINGERED TREMOLO-3
+ {0x1D16D, 0x1D172, prN}, // Mc [6] MUSICAL SYMBOL COMBINING AUGMENTATION DOT..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prN}, // Cf [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prN}, // Mn [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D183, 0x1D184, prN}, // So [2] MUSICAL SYMBOL ARPEGGIATO UP..MUSICAL SYMBOL ARPEGGIATO DOWN
+ {0x1D185, 0x1D18B, prN}, // Mn [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D18C, 0x1D1A9, prN}, // So [30] MUSICAL SYMBOL RINFORZANDO..MUSICAL SYMBOL DEGREE SLASH
+ {0x1D1AA, 0x1D1AD, prN}, // Mn [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D1AE, 0x1D1EA, prN}, // So [61] MUSICAL SYMBOL PEDAL MARK..MUSICAL SYMBOL KORON
+ {0x1D200, 0x1D241, prN}, // So [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54
+ {0x1D242, 0x1D244, prN}, // Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1D245, 0x1D245, prN}, // So GREEK MUSICAL LEIMMA
+ {0x1D2E0, 0x1D2F3, prN}, // No [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN
+ {0x1D300, 0x1D356, prN}, // So [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING
+ {0x1D360, 0x1D378, prN}, // No [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE
+ {0x1D400, 0x1D454, prN}, // L& [85] MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G
+ {0x1D456, 0x1D49C, prN}, // L& [71] MATHEMATICAL ITALIC SMALL I..MATHEMATICAL SCRIPT CAPITAL A
+ {0x1D49E, 0x1D49F, prN}, // Lu [2] MATHEMATICAL SCRIPT CAPITAL C..MATHEMATICAL SCRIPT CAPITAL D
+ {0x1D4A2, 0x1D4A2, prN}, // Lu MATHEMATICAL SCRIPT CAPITAL G
+ {0x1D4A5, 0x1D4A6, prN}, // Lu [2] MATHEMATICAL SCRIPT CAPITAL J..MATHEMATICAL SCRIPT CAPITAL K
+ {0x1D4A9, 0x1D4AC, prN}, // Lu [4] MATHEMATICAL SCRIPT CAPITAL N..MATHEMATICAL SCRIPT CAPITAL Q
+ {0x1D4AE, 0x1D4B9, prN}, // L& [12] MATHEMATICAL SCRIPT CAPITAL S..MATHEMATICAL SCRIPT SMALL D
+ {0x1D4BB, 0x1D4BB, prN}, // Ll MATHEMATICAL SCRIPT SMALL F
+ {0x1D4BD, 0x1D4C3, prN}, // Ll [7] MATHEMATICAL SCRIPT SMALL H..MATHEMATICAL SCRIPT SMALL N
+ {0x1D4C5, 0x1D505, prN}, // L& [65] MATHEMATICAL SCRIPT SMALL P..MATHEMATICAL FRAKTUR CAPITAL B
+ {0x1D507, 0x1D50A, prN}, // Lu [4] MATHEMATICAL FRAKTUR CAPITAL D..MATHEMATICAL FRAKTUR CAPITAL G
+ {0x1D50D, 0x1D514, prN}, // Lu [8] MATHEMATICAL FRAKTUR CAPITAL J..MATHEMATICAL FRAKTUR CAPITAL Q
+ {0x1D516, 0x1D51C, prN}, // Lu [7] MATHEMATICAL FRAKTUR CAPITAL S..MATHEMATICAL FRAKTUR CAPITAL Y
+ {0x1D51E, 0x1D539, prN}, // L& [28] MATHEMATICAL FRAKTUR SMALL A..MATHEMATICAL DOUBLE-STRUCK CAPITAL B
+ {0x1D53B, 0x1D53E, prN}, // Lu [4] MATHEMATICAL DOUBLE-STRUCK CAPITAL D..MATHEMATICAL DOUBLE-STRUCK CAPITAL G
+ {0x1D540, 0x1D544, prN}, // Lu [5] MATHEMATICAL DOUBLE-STRUCK CAPITAL I..MATHEMATICAL DOUBLE-STRUCK CAPITAL M
+ {0x1D546, 0x1D546, prN}, // Lu MATHEMATICAL DOUBLE-STRUCK CAPITAL O
+ {0x1D54A, 0x1D550, prN}, // Lu [7] MATHEMATICAL DOUBLE-STRUCK CAPITAL S..MATHEMATICAL DOUBLE-STRUCK CAPITAL Y
+ {0x1D552, 0x1D6A5, prN}, // L& [340] MATHEMATICAL DOUBLE-STRUCK SMALL A..MATHEMATICAL ITALIC SMALL DOTLESS J
+ {0x1D6A8, 0x1D6C0, prN}, // Lu [25] MATHEMATICAL BOLD CAPITAL ALPHA..MATHEMATICAL BOLD CAPITAL OMEGA
+ {0x1D6C1, 0x1D6C1, prN}, // Sm MATHEMATICAL BOLD NABLA
+ {0x1D6C2, 0x1D6DA, prN}, // Ll [25] MATHEMATICAL BOLD SMALL ALPHA..MATHEMATICAL BOLD SMALL OMEGA
+ {0x1D6DB, 0x1D6DB, prN}, // Sm MATHEMATICAL BOLD PARTIAL DIFFERENTIAL
+ {0x1D6DC, 0x1D6FA, prN}, // L& [31] MATHEMATICAL BOLD EPSILON SYMBOL..MATHEMATICAL ITALIC CAPITAL OMEGA
+ {0x1D6FB, 0x1D6FB, prN}, // Sm MATHEMATICAL ITALIC NABLA
+ {0x1D6FC, 0x1D714, prN}, // Ll [25] MATHEMATICAL ITALIC SMALL ALPHA..MATHEMATICAL ITALIC SMALL OMEGA
+ {0x1D715, 0x1D715, prN}, // Sm MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL
+ {0x1D716, 0x1D734, prN}, // L& [31] MATHEMATICAL ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD ITALIC CAPITAL OMEGA
+ {0x1D735, 0x1D735, prN}, // Sm MATHEMATICAL BOLD ITALIC NABLA
+ {0x1D736, 0x1D74E, prN}, // Ll [25] MATHEMATICAL BOLD ITALIC SMALL ALPHA..MATHEMATICAL BOLD ITALIC SMALL OMEGA
+ {0x1D74F, 0x1D74F, prN}, // Sm MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D750, 0x1D76E, prN}, // L& [31] MATHEMATICAL BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA
+ {0x1D76F, 0x1D76F, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD NABLA
+ {0x1D770, 0x1D788, prN}, // Ll [25] MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA
+ {0x1D789, 0x1D789, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL
+ {0x1D78A, 0x1D7A8, prN}, // L& [31] MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA
+ {0x1D7A9, 0x1D7A9, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA
+ {0x1D7AA, 0x1D7C2, prN}, // Ll [25] MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA
+ {0x1D7C3, 0x1D7C3, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D7C4, 0x1D7CB, prN}, // L& [8] MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD SMALL DIGAMMA
+ {0x1D7CE, 0x1D7FF, prN}, // Nd [50] MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE
+ {0x1D800, 0x1D9FF, prN}, // So [512] SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD
+ {0x1DA00, 0x1DA36, prN}, // Mn [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA37, 0x1DA3A, prN}, // So [4] SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE
+ {0x1DA3B, 0x1DA6C, prN}, // Mn [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA6D, 0x1DA74, prN}, // So [8] SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING
+ {0x1DA75, 0x1DA75, prN}, // Mn SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA76, 0x1DA83, prN}, // So [14] SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH
+ {0x1DA84, 0x1DA84, prN}, // Mn SIGNWRITING LOCATION HEAD NECK
+ {0x1DA85, 0x1DA86, prN}, // So [2] SIGNWRITING LOCATION TORSO..SIGNWRITING LOCATION LIMBS DIGITS
+ {0x1DA87, 0x1DA8B, prN}, // Po [5] SIGNWRITING COMMA..SIGNWRITING PARENTHESIS
+ {0x1DA9B, 0x1DA9F, prN}, // Mn [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prN}, // Mn [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1DF00, 0x1DF09, prN}, // Ll [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
+ {0x1DF0A, 0x1DF0A, prN}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
+ {0x1DF0B, 0x1DF1E, prN}, // Ll [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1E000, 0x1E006, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prN}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prN}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prN}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E100, 0x1E12C, prN}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
+ {0x1E130, 0x1E136, prN}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E137, 0x1E13D, prN}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
+ {0x1E140, 0x1E149, prN}, // Nd [10] NYIAKENG PUACHUE HMONG DIGIT ZERO..NYIAKENG PUACHUE HMONG DIGIT NINE
+ {0x1E14E, 0x1E14E, prN}, // Lo NYIAKENG PUACHUE HMONG LOGOGRAM NYAJ
+ {0x1E14F, 0x1E14F, prN}, // So NYIAKENG PUACHUE HMONG CIRCLED CA
+ {0x1E290, 0x1E2AD, prN}, // Lo [30] TOTO LETTER PA..TOTO LETTER A
+ {0x1E2AE, 0x1E2AE, prN}, // Mn TOTO SIGN RISING TONE
+ {0x1E2C0, 0x1E2EB, prN}, // Lo [44] WANCHO LETTER AA..WANCHO LETTER YIH
+ {0x1E2EC, 0x1E2EF, prN}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E2F0, 0x1E2F9, prN}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
+ {0x1E2FF, 0x1E2FF, prN}, // Sc WANCHO NGUN SIGN
+ {0x1E7E0, 0x1E7E6, prN}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
+ {0x1E7E8, 0x1E7EB, prN}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
+ {0x1E7ED, 0x1E7EE, prN}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
+ {0x1E7F0, 0x1E7FE, prN}, // Lo [15] ETHIOPIC SYLLABLE GURAGE QWI..ETHIOPIC SYLLABLE GURAGE PWEE
+ {0x1E800, 0x1E8C4, prN}, // Lo [197] MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON
+ {0x1E8C7, 0x1E8CF, prN}, // No [9] MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE
+ {0x1E8D0, 0x1E8D6, prN}, // Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E900, 0x1E943, prN}, // L& [68] ADLAM CAPITAL LETTER ALIF..ADLAM SMALL LETTER SHA
+ {0x1E944, 0x1E94A, prN}, // Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1E94B, 0x1E94B, prN}, // Lm ADLAM NASALIZATION MARK
+ {0x1E950, 0x1E959, prN}, // Nd [10] ADLAM DIGIT ZERO..ADLAM DIGIT NINE
+ {0x1E95E, 0x1E95F, prN}, // Po [2] ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK
+ {0x1EC71, 0x1ECAB, prN}, // No [59] INDIC SIYAQ NUMBER ONE..INDIC SIYAQ NUMBER PREFIXED NINE
+ {0x1ECAC, 0x1ECAC, prN}, // So INDIC SIYAQ PLACEHOLDER
+ {0x1ECAD, 0x1ECAF, prN}, // No [3] INDIC SIYAQ FRACTION ONE QUARTER..INDIC SIYAQ FRACTION THREE QUARTERS
+ {0x1ECB0, 0x1ECB0, prN}, // Sc INDIC SIYAQ RUPEE MARK
+ {0x1ECB1, 0x1ECB4, prN}, // No [4] INDIC SIYAQ NUMBER ALTERNATE ONE..INDIC SIYAQ ALTERNATE LAKH MARK
+ {0x1ED01, 0x1ED2D, prN}, // No [45] OTTOMAN SIYAQ NUMBER ONE..OTTOMAN SIYAQ NUMBER NINETY THOUSAND
+ {0x1ED2E, 0x1ED2E, prN}, // So OTTOMAN SIYAQ MARRATAN
+ {0x1ED2F, 0x1ED3D, prN}, // No [15] OTTOMAN SIYAQ ALTERNATE NUMBER TWO..OTTOMAN SIYAQ FRACTION ONE SIXTH
+ {0x1EE00, 0x1EE03, prN}, // Lo [4] ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL
+ {0x1EE05, 0x1EE1F, prN}, // Lo [27] ARABIC MATHEMATICAL WAW..ARABIC MATHEMATICAL DOTLESS QAF
+ {0x1EE21, 0x1EE22, prN}, // Lo [2] ARABIC MATHEMATICAL INITIAL BEH..ARABIC MATHEMATICAL INITIAL JEEM
+ {0x1EE24, 0x1EE24, prN}, // Lo ARABIC MATHEMATICAL INITIAL HEH
+ {0x1EE27, 0x1EE27, prN}, // Lo ARABIC MATHEMATICAL INITIAL HAH
+ {0x1EE29, 0x1EE32, prN}, // Lo [10] ARABIC MATHEMATICAL INITIAL YEH..ARABIC MATHEMATICAL INITIAL QAF
+ {0x1EE34, 0x1EE37, prN}, // Lo [4] ARABIC MATHEMATICAL INITIAL SHEEN..ARABIC MATHEMATICAL INITIAL KHAH
+ {0x1EE39, 0x1EE39, prN}, // Lo ARABIC MATHEMATICAL INITIAL DAD
+ {0x1EE3B, 0x1EE3B, prN}, // Lo ARABIC MATHEMATICAL INITIAL GHAIN
+ {0x1EE42, 0x1EE42, prN}, // Lo ARABIC MATHEMATICAL TAILED JEEM
+ {0x1EE47, 0x1EE47, prN}, // Lo ARABIC MATHEMATICAL TAILED HAH
+ {0x1EE49, 0x1EE49, prN}, // Lo ARABIC MATHEMATICAL TAILED YEH
+ {0x1EE4B, 0x1EE4B, prN}, // Lo ARABIC MATHEMATICAL TAILED LAM
+ {0x1EE4D, 0x1EE4F, prN}, // Lo [3] ARABIC MATHEMATICAL TAILED NOON..ARABIC MATHEMATICAL TAILED AIN
+ {0x1EE51, 0x1EE52, prN}, // Lo [2] ARABIC MATHEMATICAL TAILED SAD..ARABIC MATHEMATICAL TAILED QAF
+ {0x1EE54, 0x1EE54, prN}, // Lo ARABIC MATHEMATICAL TAILED SHEEN
+ {0x1EE57, 0x1EE57, prN}, // Lo ARABIC MATHEMATICAL TAILED KHAH
+ {0x1EE59, 0x1EE59, prN}, // Lo ARABIC MATHEMATICAL TAILED DAD
+ {0x1EE5B, 0x1EE5B, prN}, // Lo ARABIC MATHEMATICAL TAILED GHAIN
+ {0x1EE5D, 0x1EE5D, prN}, // Lo ARABIC MATHEMATICAL TAILED DOTLESS NOON
+ {0x1EE5F, 0x1EE5F, prN}, // Lo ARABIC MATHEMATICAL TAILED DOTLESS QAF
+ {0x1EE61, 0x1EE62, prN}, // Lo [2] ARABIC MATHEMATICAL STRETCHED BEH..ARABIC MATHEMATICAL STRETCHED JEEM
+ {0x1EE64, 0x1EE64, prN}, // Lo ARABIC MATHEMATICAL STRETCHED HEH
+ {0x1EE67, 0x1EE6A, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF
+ {0x1EE6C, 0x1EE72, prN}, // Lo [7] ARABIC MATHEMATICAL STRETCHED MEEM..ARABIC MATHEMATICAL STRETCHED QAF
+ {0x1EE74, 0x1EE77, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED SHEEN..ARABIC MATHEMATICAL STRETCHED KHAH
+ {0x1EE79, 0x1EE7C, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED DAD..ARABIC MATHEMATICAL STRETCHED DOTLESS BEH
+ {0x1EE7E, 0x1EE7E, prN}, // Lo ARABIC MATHEMATICAL STRETCHED DOTLESS FEH
+ {0x1EE80, 0x1EE89, prN}, // Lo [10] ARABIC MATHEMATICAL LOOPED ALEF..ARABIC MATHEMATICAL LOOPED YEH
+ {0x1EE8B, 0x1EE9B, prN}, // Lo [17] ARABIC MATHEMATICAL LOOPED LAM..ARABIC MATHEMATICAL LOOPED GHAIN
+ {0x1EEA1, 0x1EEA3, prN}, // Lo [3] ARABIC MATHEMATICAL DOUBLE-STRUCK BEH..ARABIC MATHEMATICAL DOUBLE-STRUCK DAL
+ {0x1EEA5, 0x1EEA9, prN}, // Lo [5] ARABIC MATHEMATICAL DOUBLE-STRUCK WAW..ARABIC MATHEMATICAL DOUBLE-STRUCK YEH
+ {0x1EEAB, 0x1EEBB, prN}, // Lo [17] ARABIC MATHEMATICAL DOUBLE-STRUCK LAM..ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN
+ {0x1EEF0, 0x1EEF1, prN}, // Sm [2] ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL
+ {0x1F000, 0x1F003, prN}, // So [4] MAHJONG TILE EAST WIND..MAHJONG TILE NORTH WIND
+ {0x1F004, 0x1F004, prW}, // So MAHJONG TILE RED DRAGON
+ {0x1F005, 0x1F02B, prN}, // So [39] MAHJONG TILE GREEN DRAGON..MAHJONG TILE BACK
+ {0x1F030, 0x1F093, prN}, // So [100] DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06
+ {0x1F0A0, 0x1F0AE, prN}, // So [15] PLAYING CARD BACK..PLAYING CARD KING OF SPADES
+ {0x1F0B1, 0x1F0BF, prN}, // So [15] PLAYING CARD ACE OF HEARTS..PLAYING CARD RED JOKER
+ {0x1F0C1, 0x1F0CE, prN}, // So [14] PLAYING CARD ACE OF DIAMONDS..PLAYING CARD KING OF DIAMONDS
+ {0x1F0CF, 0x1F0CF, prW}, // So PLAYING CARD BLACK JOKER
+ {0x1F0D1, 0x1F0F5, prN}, // So [37] PLAYING CARD ACE OF CLUBS..PLAYING CARD TRUMP-21
+ {0x1F100, 0x1F10A, prA}, // No [11] DIGIT ZERO FULL STOP..DIGIT NINE COMMA
+ {0x1F10B, 0x1F10C, prN}, // No [2] DINGBAT CIRCLED SANS-SERIF DIGIT ZERO..DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO
+ {0x1F10D, 0x1F10F, prN}, // So [3] CIRCLED ZERO WITH SLASH..CIRCLED DOLLAR SIGN WITH OVERLAID BACKSLASH
+ {0x1F110, 0x1F12D, prA}, // So [30] PARENTHESIZED LATIN CAPITAL LETTER A..CIRCLED CD
+ {0x1F12E, 0x1F12F, prN}, // So [2] CIRCLED WZ..COPYLEFT SYMBOL
+ {0x1F130, 0x1F169, prA}, // So [58] SQUARED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z
+ {0x1F16A, 0x1F16F, prN}, // So [6] RAISED MC SIGN..CIRCLED HUMAN FIGURE
+ {0x1F170, 0x1F18D, prA}, // So [30] NEGATIVE SQUARED LATIN CAPITAL LETTER A..NEGATIVE SQUARED SA
+ {0x1F18E, 0x1F18E, prW}, // So NEGATIVE SQUARED AB
+ {0x1F18F, 0x1F190, prA}, // So [2] NEGATIVE SQUARED WC..SQUARE DJ
+ {0x1F191, 0x1F19A, prW}, // So [10] SQUARED CL..SQUARED VS
+ {0x1F19B, 0x1F1AC, prA}, // So [18] SQUARED THREE D..SQUARED VOD
+ {0x1F1AD, 0x1F1AD, prN}, // So MASK WORK SYMBOL
+ {0x1F1E6, 0x1F1FF, prN}, // So [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z
+ {0x1F200, 0x1F202, prW}, // So [3] SQUARE HIRAGANA HOKA..SQUARED KATAKANA SA
+ {0x1F210, 0x1F23B, prW}, // So [44] SQUARED CJK UNIFIED IDEOGRAPH-624B..SQUARED CJK UNIFIED IDEOGRAPH-914D
+ {0x1F240, 0x1F248, prW}, // So [9] TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C..TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6557
+ {0x1F250, 0x1F251, prW}, // So [2] CIRCLED IDEOGRAPH ADVANTAGE..CIRCLED IDEOGRAPH ACCEPT
+ {0x1F260, 0x1F265, prW}, // So [6] ROUNDED SYMBOL FOR FU..ROUNDED SYMBOL FOR CAI
+ {0x1F300, 0x1F320, prW}, // So [33] CYCLONE..SHOOTING STAR
+ {0x1F321, 0x1F32C, prN}, // So [12] THERMOMETER..WIND BLOWING FACE
+ {0x1F32D, 0x1F335, prW}, // So [9] HOT DOG..CACTUS
+ {0x1F336, 0x1F336, prN}, // So HOT PEPPER
+ {0x1F337, 0x1F37C, prW}, // So [70] TULIP..BABY BOTTLE
+ {0x1F37D, 0x1F37D, prN}, // So FORK AND KNIFE WITH PLATE
+ {0x1F37E, 0x1F393, prW}, // So [22] BOTTLE WITH POPPING CORK..GRADUATION CAP
+ {0x1F394, 0x1F39F, prN}, // So [12] HEART WITH TIP ON THE LEFT..ADMISSION TICKETS
+ {0x1F3A0, 0x1F3CA, prW}, // So [43] CAROUSEL HORSE..SWIMMER
+ {0x1F3CB, 0x1F3CE, prN}, // So [4] WEIGHT LIFTER..RACING CAR
+ {0x1F3CF, 0x1F3D3, prW}, // So [5] CRICKET BAT AND BALL..TABLE TENNIS PADDLE AND BALL
+ {0x1F3D4, 0x1F3DF, prN}, // So [12] SNOW CAPPED MOUNTAIN..STADIUM
+ {0x1F3E0, 0x1F3F0, prW}, // So [17] HOUSE BUILDING..EUROPEAN CASTLE
+ {0x1F3F1, 0x1F3F3, prN}, // So [3] WHITE PENNANT..WAVING WHITE FLAG
+ {0x1F3F4, 0x1F3F4, prW}, // So WAVING BLACK FLAG
+ {0x1F3F5, 0x1F3F7, prN}, // So [3] ROSETTE..LABEL
+ {0x1F3F8, 0x1F3FA, prW}, // So [3] BADMINTON RACQUET AND SHUTTLECOCK..AMPHORA
+ {0x1F3FB, 0x1F3FF, prW}, // Sk [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6
+ {0x1F400, 0x1F43E, prW}, // So [63] RAT..PAW PRINTS
+ {0x1F43F, 0x1F43F, prN}, // So CHIPMUNK
+ {0x1F440, 0x1F440, prW}, // So EYES
+ {0x1F441, 0x1F441, prN}, // So EYE
+ {0x1F442, 0x1F4FC, prW}, // So [187] EAR..VIDEOCASSETTE
+ {0x1F4FD, 0x1F4FE, prN}, // So [2] FILM PROJECTOR..PORTABLE STEREO
+ {0x1F4FF, 0x1F53D, prW}, // So [63] PRAYER BEADS..DOWN-POINTING SMALL RED TRIANGLE
+ {0x1F53E, 0x1F54A, prN}, // So [13] LOWER RIGHT SHADOWED WHITE CIRCLE..DOVE OF PEACE
+ {0x1F54B, 0x1F54E, prW}, // So [4] KAABA..MENORAH WITH NINE BRANCHES
+ {0x1F54F, 0x1F54F, prN}, // So BOWL OF HYGIEIA
+ {0x1F550, 0x1F567, prW}, // So [24] CLOCK FACE ONE OCLOCK..CLOCK FACE TWELVE-THIRTY
+ {0x1F568, 0x1F579, prN}, // So [18] RIGHT SPEAKER..JOYSTICK
+ {0x1F57A, 0x1F57A, prW}, // So MAN DANCING
+ {0x1F57B, 0x1F594, prN}, // So [26] LEFT HAND TELEPHONE RECEIVER..REVERSED VICTORY HAND
+ {0x1F595, 0x1F596, prW}, // So [2] REVERSED HAND WITH MIDDLE FINGER EXTENDED..RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS
+ {0x1F597, 0x1F5A3, prN}, // So [13] WHITE DOWN POINTING LEFT HAND INDEX..BLACK DOWN POINTING BACKHAND INDEX
+ {0x1F5A4, 0x1F5A4, prW}, // So BLACK HEART
+ {0x1F5A5, 0x1F5FA, prN}, // So [86] DESKTOP COMPUTER..WORLD MAP
+ {0x1F5FB, 0x1F5FF, prW}, // So [5] MOUNT FUJI..MOYAI
+ {0x1F600, 0x1F64F, prW}, // So [80] GRINNING FACE..PERSON WITH FOLDED HANDS
+ {0x1F650, 0x1F67F, prN}, // So [48] NORTH WEST POINTING LEAF..REVERSE CHECKER BOARD
+ {0x1F680, 0x1F6C5, prW}, // So [70] ROCKET..LEFT LUGGAGE
+ {0x1F6C6, 0x1F6CB, prN}, // So [6] TRIANGLE WITH ROUNDED CORNERS..COUCH AND LAMP
+ {0x1F6CC, 0x1F6CC, prW}, // So SLEEPING ACCOMMODATION
+ {0x1F6CD, 0x1F6CF, prN}, // So [3] SHOPPING BAGS..BED
+ {0x1F6D0, 0x1F6D2, prW}, // So [3] PLACE OF WORSHIP..SHOPPING TROLLEY
+ {0x1F6D3, 0x1F6D4, prN}, // So [2] STUPA..PAGODA
+ {0x1F6D5, 0x1F6D7, prW}, // So [3] HINDU TEMPLE..ELEVATOR
+ {0x1F6DD, 0x1F6DF, prW}, // So [3] PLAYGROUND SLIDE..RING BUOY
+ {0x1F6E0, 0x1F6EA, prN}, // So [11] HAMMER AND WRENCH..NORTHEAST-POINTING AIRPLANE
+ {0x1F6EB, 0x1F6EC, prW}, // So [2] AIRPLANE DEPARTURE..AIRPLANE ARRIVING
+ {0x1F6F0, 0x1F6F3, prN}, // So [4] SATELLITE..PASSENGER SHIP
+ {0x1F6F4, 0x1F6FC, prW}, // So [9] SCOOTER..ROLLER SKATE
+ {0x1F700, 0x1F773, prN}, // So [116] ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE
+ {0x1F780, 0x1F7D8, prN}, // So [89] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..NEGATIVE CIRCLED SQUARE
+ {0x1F7E0, 0x1F7EB, prW}, // So [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE
+ {0x1F7F0, 0x1F7F0, prW}, // So HEAVY EQUALS SIGN
+ {0x1F800, 0x1F80B, prN}, // So [12] LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD
+ {0x1F810, 0x1F847, prN}, // So [56] LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD..DOWNWARDS HEAVY ARROW
+ {0x1F850, 0x1F859, prN}, // So [10] LEFTWARDS SANS-SERIF ARROW..UP DOWN SANS-SERIF ARROW
+ {0x1F860, 0x1F887, prN}, // So [40] WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW
+ {0x1F890, 0x1F8AD, prN}, // So [30] LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS
+ {0x1F8B0, 0x1F8B1, prN}, // So [2] ARROW POINTING UPWARDS THEN NORTH WEST..ARROW POINTING RIGHTWARDS THEN CURVING SOUTH WEST
+ {0x1F900, 0x1F90B, prN}, // So [12] CIRCLED CROSS FORMEE WITH FOUR DOTS..DOWNWARD FACING NOTCHED HOOK WITH DOT
+ {0x1F90C, 0x1F93A, prW}, // So [47] PINCHED FINGERS..FENCER
+ {0x1F93B, 0x1F93B, prN}, // So MODERN PENTATHLON
+ {0x1F93C, 0x1F945, prW}, // So [10] WRESTLERS..GOAL NET
+ {0x1F946, 0x1F946, prN}, // So RIFLE
+ {0x1F947, 0x1F9FF, prW}, // So [185] FIRST PLACE MEDAL..NAZAR AMULET
+ {0x1FA00, 0x1FA53, prN}, // So [84] NEUTRAL CHESS KING..BLACK CHESS KNIGHT-BISHOP
+ {0x1FA60, 0x1FA6D, prN}, // So [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER
+ {0x1FA70, 0x1FA74, prW}, // So [5] BALLET SHOES..THONG SANDAL
+ {0x1FA78, 0x1FA7C, prW}, // So [5] DROP OF BLOOD..CRUTCH
+ {0x1FA80, 0x1FA86, prW}, // So [7] YO-YO..NESTING DOLLS
+ {0x1FA90, 0x1FAAC, prW}, // So [29] RINGED PLANET..HAMSA
+ {0x1FAB0, 0x1FABA, prW}, // So [11] FLY..NEST WITH EGGS
+ {0x1FAC0, 0x1FAC5, prW}, // So [6] ANATOMICAL HEART..PERSON WITH CROWN
+ {0x1FAD0, 0x1FAD9, prW}, // So [10] BLUEBERRIES..JAR
+ {0x1FAE0, 0x1FAE7, prW}, // So [8] MELTING FACE..BUBBLES
+ {0x1FAF0, 0x1FAF6, prW}, // So [7] HAND WITH INDEX FINGER AND THUMB CROSSED..HEART HANDS
+ {0x1FB00, 0x1FB92, prN}, // So [147] BLOCK SEXTANT-1..UPPER HALF INVERSE MEDIUM SHADE AND LOWER HALF BLOCK
+ {0x1FB94, 0x1FBCA, prN}, // So [55] LEFT HALF INVERSE MEDIUM SHADE AND RIGHT HALF BLOCK..WHITE UP-POINTING CHEVRON
+ {0x1FBF0, 0x1FBF9, prN}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE
+ {0x20000, 0x2A6DF, prW}, // Lo [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF
+ {0x2A6E0, 0x2A6FF, prW}, // Cn [32] ..
+ {0x2A700, 0x2B738, prW}, // Lo [4153] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B738
+ {0x2B739, 0x2B73F, prW}, // Cn [7] ..
+ {0x2B740, 0x2B81D, prW}, // Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D
+ {0x2B81E, 0x2B81F, prW}, // Cn [2] ..
+ {0x2B820, 0x2CEA1, prW}, // Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1
+ {0x2CEA2, 0x2CEAF, prW}, // Cn [14] ..
+ {0x2CEB0, 0x2EBE0, prW}, // Lo [7473] CJK UNIFIED IDEOGRAPH-2CEB0..CJK UNIFIED IDEOGRAPH-2EBE0
+ {0x2EBE1, 0x2F7FF, prW}, // Cn [3103] ..
+ {0x2F800, 0x2FA1D, prW}, // Lo [542] CJK COMPATIBILITY IDEOGRAPH-2F800..CJK COMPATIBILITY IDEOGRAPH-2FA1D
+ {0x2FA1E, 0x2FA1F, prW}, // Cn [2] ..
+ {0x2FA20, 0x2FFFD, prW}, // Cn [1502] ..
+ {0x30000, 0x3134A, prW}, // Lo [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A
+ {0x3134B, 0x3FFFD, prW}, // Cn [60595] ..
+ {0xE0001, 0xE0001, prN}, // Cf LANGUAGE TAG
+ {0xE0020, 0xE007F, prN}, // Cf [96] TAG SPACE..CANCEL TAG
+ {0xE0100, 0xE01EF, prA}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
+ {0xF0000, 0xFFFFD, prA}, // Co [65534] ..
+ {0x100000, 0x10FFFD, prA}, // Co [65534] ..
+}
diff --git a/vendor/github.com/rivo/uniseg/emojipresentation.go b/vendor/github.com/rivo/uniseg/emojipresentation.go
new file mode 100644
index 0000000..fd0f745
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/emojipresentation.go
@@ -0,0 +1,285 @@
+package uniseg
+
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+// emojiPresentation are taken from
+//
+// and
+// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var emojiPresentation = [][3]int{
+ {0x231A, 0x231B, prEmojiPresentation}, // E0.6 [2] (⌚..⌛) watch..hourglass done
+ {0x23E9, 0x23EC, prEmojiPresentation}, // E0.6 [4] (⏩..⏬) fast-forward button..fast down button
+ {0x23F0, 0x23F0, prEmojiPresentation}, // E0.6 [1] (⏰) alarm clock
+ {0x23F3, 0x23F3, prEmojiPresentation}, // E0.6 [1] (⏳) hourglass not done
+ {0x25FD, 0x25FE, prEmojiPresentation}, // E0.6 [2] (◽..◾) white medium-small square..black medium-small square
+ {0x2614, 0x2615, prEmojiPresentation}, // E0.6 [2] (☔..☕) umbrella with rain drops..hot beverage
+ {0x2648, 0x2653, prEmojiPresentation}, // E0.6 [12] (♈..♓) Aries..Pisces
+ {0x267F, 0x267F, prEmojiPresentation}, // E0.6 [1] (♿) wheelchair symbol
+ {0x2693, 0x2693, prEmojiPresentation}, // E0.6 [1] (⚓) anchor
+ {0x26A1, 0x26A1, prEmojiPresentation}, // E0.6 [1] (⚡) high voltage
+ {0x26AA, 0x26AB, prEmojiPresentation}, // E0.6 [2] (⚪..⚫) white circle..black circle
+ {0x26BD, 0x26BE, prEmojiPresentation}, // E0.6 [2] (⚽..⚾) soccer ball..baseball
+ {0x26C4, 0x26C5, prEmojiPresentation}, // E0.6 [2] (⛄..⛅) snowman without snow..sun behind cloud
+ {0x26CE, 0x26CE, prEmojiPresentation}, // E0.6 [1] (⛎) Ophiuchus
+ {0x26D4, 0x26D4, prEmojiPresentation}, // E0.6 [1] (⛔) no entry
+ {0x26EA, 0x26EA, prEmojiPresentation}, // E0.6 [1] (⛪) church
+ {0x26F2, 0x26F3, prEmojiPresentation}, // E0.6 [2] (⛲..⛳) fountain..flag in hole
+ {0x26F5, 0x26F5, prEmojiPresentation}, // E0.6 [1] (⛵) sailboat
+ {0x26FA, 0x26FA, prEmojiPresentation}, // E0.6 [1] (⛺) tent
+ {0x26FD, 0x26FD, prEmojiPresentation}, // E0.6 [1] (⛽) fuel pump
+ {0x2705, 0x2705, prEmojiPresentation}, // E0.6 [1] (✅) check mark button
+ {0x270A, 0x270B, prEmojiPresentation}, // E0.6 [2] (✊..✋) raised fist..raised hand
+ {0x2728, 0x2728, prEmojiPresentation}, // E0.6 [1] (✨) sparkles
+ {0x274C, 0x274C, prEmojiPresentation}, // E0.6 [1] (❌) cross mark
+ {0x274E, 0x274E, prEmojiPresentation}, // E0.6 [1] (❎) cross mark button
+ {0x2753, 0x2755, prEmojiPresentation}, // E0.6 [3] (❓..❕) red question mark..white exclamation mark
+ {0x2757, 0x2757, prEmojiPresentation}, // E0.6 [1] (❗) red exclamation mark
+ {0x2795, 0x2797, prEmojiPresentation}, // E0.6 [3] (➕..➗) plus..divide
+ {0x27B0, 0x27B0, prEmojiPresentation}, // E0.6 [1] (➰) curly loop
+ {0x27BF, 0x27BF, prEmojiPresentation}, // E1.0 [1] (➿) double curly loop
+ {0x2B1B, 0x2B1C, prEmojiPresentation}, // E0.6 [2] (⬛..⬜) black large square..white large square
+ {0x2B50, 0x2B50, prEmojiPresentation}, // E0.6 [1] (⭐) star
+ {0x2B55, 0x2B55, prEmojiPresentation}, // E0.6 [1] (⭕) hollow red circle
+ {0x1F004, 0x1F004, prEmojiPresentation}, // E0.6 [1] (🀄) mahjong red dragon
+ {0x1F0CF, 0x1F0CF, prEmojiPresentation}, // E0.6 [1] (🃏) joker
+ {0x1F18E, 0x1F18E, prEmojiPresentation}, // E0.6 [1] (🆎) AB button (blood type)
+ {0x1F191, 0x1F19A, prEmojiPresentation}, // E0.6 [10] (🆑..🆚) CL button..VS button
+ {0x1F1E6, 0x1F1FF, prEmojiPresentation}, // E0.0 [26] (🇦..🇿) regional indicator symbol letter a..regional indicator symbol letter z
+ {0x1F201, 0x1F201, prEmojiPresentation}, // E0.6 [1] (🈁) Japanese “here” button
+ {0x1F21A, 0x1F21A, prEmojiPresentation}, // E0.6 [1] (🈚) Japanese “free of charge” button
+ {0x1F22F, 0x1F22F, prEmojiPresentation}, // E0.6 [1] (🈯) Japanese “reserved” button
+ {0x1F232, 0x1F236, prEmojiPresentation}, // E0.6 [5] (🈲..🈶) Japanese “prohibited” button..Japanese “not free of charge” button
+ {0x1F238, 0x1F23A, prEmojiPresentation}, // E0.6 [3] (🈸..🈺) Japanese “application” button..Japanese “open for business” button
+ {0x1F250, 0x1F251, prEmojiPresentation}, // E0.6 [2] (🉐..🉑) Japanese “bargain” button..Japanese “acceptable” button
+ {0x1F300, 0x1F30C, prEmojiPresentation}, // E0.6 [13] (🌀..🌌) cyclone..milky way
+ {0x1F30D, 0x1F30E, prEmojiPresentation}, // E0.7 [2] (🌍..🌎) globe showing Europe-Africa..globe showing Americas
+ {0x1F30F, 0x1F30F, prEmojiPresentation}, // E0.6 [1] (🌏) globe showing Asia-Australia
+ {0x1F310, 0x1F310, prEmojiPresentation}, // E1.0 [1] (🌐) globe with meridians
+ {0x1F311, 0x1F311, prEmojiPresentation}, // E0.6 [1] (🌑) new moon
+ {0x1F312, 0x1F312, prEmojiPresentation}, // E1.0 [1] (🌒) waxing crescent moon
+ {0x1F313, 0x1F315, prEmojiPresentation}, // E0.6 [3] (🌓..🌕) first quarter moon..full moon
+ {0x1F316, 0x1F318, prEmojiPresentation}, // E1.0 [3] (🌖..🌘) waning gibbous moon..waning crescent moon
+ {0x1F319, 0x1F319, prEmojiPresentation}, // E0.6 [1] (🌙) crescent moon
+ {0x1F31A, 0x1F31A, prEmojiPresentation}, // E1.0 [1] (🌚) new moon face
+ {0x1F31B, 0x1F31B, prEmojiPresentation}, // E0.6 [1] (🌛) first quarter moon face
+ {0x1F31C, 0x1F31C, prEmojiPresentation}, // E0.7 [1] (🌜) last quarter moon face
+ {0x1F31D, 0x1F31E, prEmojiPresentation}, // E1.0 [2] (🌝..🌞) full moon face..sun with face
+ {0x1F31F, 0x1F320, prEmojiPresentation}, // E0.6 [2] (🌟..🌠) glowing star..shooting star
+ {0x1F32D, 0x1F32F, prEmojiPresentation}, // E1.0 [3] (🌭..🌯) hot dog..burrito
+ {0x1F330, 0x1F331, prEmojiPresentation}, // E0.6 [2] (🌰..🌱) chestnut..seedling
+ {0x1F332, 0x1F333, prEmojiPresentation}, // E1.0 [2] (🌲..🌳) evergreen tree..deciduous tree
+ {0x1F334, 0x1F335, prEmojiPresentation}, // E0.6 [2] (🌴..🌵) palm tree..cactus
+ {0x1F337, 0x1F34A, prEmojiPresentation}, // E0.6 [20] (🌷..🍊) tulip..tangerine
+ {0x1F34B, 0x1F34B, prEmojiPresentation}, // E1.0 [1] (🍋) lemon
+ {0x1F34C, 0x1F34F, prEmojiPresentation}, // E0.6 [4] (🍌..🍏) banana..green apple
+ {0x1F350, 0x1F350, prEmojiPresentation}, // E1.0 [1] (🍐) pear
+ {0x1F351, 0x1F37B, prEmojiPresentation}, // E0.6 [43] (🍑..🍻) peach..clinking beer mugs
+ {0x1F37C, 0x1F37C, prEmojiPresentation}, // E1.0 [1] (🍼) baby bottle
+ {0x1F37E, 0x1F37F, prEmojiPresentation}, // E1.0 [2] (🍾..🍿) bottle with popping cork..popcorn
+ {0x1F380, 0x1F393, prEmojiPresentation}, // E0.6 [20] (🎀..🎓) ribbon..graduation cap
+ {0x1F3A0, 0x1F3C4, prEmojiPresentation}, // E0.6 [37] (🎠..🏄) carousel horse..person surfing
+ {0x1F3C5, 0x1F3C5, prEmojiPresentation}, // E1.0 [1] (🏅) sports medal
+ {0x1F3C6, 0x1F3C6, prEmojiPresentation}, // E0.6 [1] (🏆) trophy
+ {0x1F3C7, 0x1F3C7, prEmojiPresentation}, // E1.0 [1] (🏇) horse racing
+ {0x1F3C8, 0x1F3C8, prEmojiPresentation}, // E0.6 [1] (🏈) american football
+ {0x1F3C9, 0x1F3C9, prEmojiPresentation}, // E1.0 [1] (🏉) rugby football
+ {0x1F3CA, 0x1F3CA, prEmojiPresentation}, // E0.6 [1] (🏊) person swimming
+ {0x1F3CF, 0x1F3D3, prEmojiPresentation}, // E1.0 [5] (🏏..🏓) cricket game..ping pong
+ {0x1F3E0, 0x1F3E3, prEmojiPresentation}, // E0.6 [4] (🏠..🏣) house..Japanese post office
+ {0x1F3E4, 0x1F3E4, prEmojiPresentation}, // E1.0 [1] (🏤) post office
+ {0x1F3E5, 0x1F3F0, prEmojiPresentation}, // E0.6 [12] (🏥..🏰) hospital..castle
+ {0x1F3F4, 0x1F3F4, prEmojiPresentation}, // E1.0 [1] (🏴) black flag
+ {0x1F3F8, 0x1F407, prEmojiPresentation}, // E1.0 [16] (🏸..🐇) badminton..rabbit
+ {0x1F408, 0x1F408, prEmojiPresentation}, // E0.7 [1] (🐈) cat
+ {0x1F409, 0x1F40B, prEmojiPresentation}, // E1.0 [3] (🐉..🐋) dragon..whale
+ {0x1F40C, 0x1F40E, prEmojiPresentation}, // E0.6 [3] (🐌..🐎) snail..horse
+ {0x1F40F, 0x1F410, prEmojiPresentation}, // E1.0 [2] (🐏..🐐) ram..goat
+ {0x1F411, 0x1F412, prEmojiPresentation}, // E0.6 [2] (🐑..🐒) ewe..monkey
+ {0x1F413, 0x1F413, prEmojiPresentation}, // E1.0 [1] (🐓) rooster
+ {0x1F414, 0x1F414, prEmojiPresentation}, // E0.6 [1] (🐔) chicken
+ {0x1F415, 0x1F415, prEmojiPresentation}, // E0.7 [1] (🐕) dog
+ {0x1F416, 0x1F416, prEmojiPresentation}, // E1.0 [1] (🐖) pig
+ {0x1F417, 0x1F429, prEmojiPresentation}, // E0.6 [19] (🐗..🐩) boar..poodle
+ {0x1F42A, 0x1F42A, prEmojiPresentation}, // E1.0 [1] (🐪) camel
+ {0x1F42B, 0x1F43E, prEmojiPresentation}, // E0.6 [20] (🐫..🐾) two-hump camel..paw prints
+ {0x1F440, 0x1F440, prEmojiPresentation}, // E0.6 [1] (👀) eyes
+ {0x1F442, 0x1F464, prEmojiPresentation}, // E0.6 [35] (👂..👤) ear..bust in silhouette
+ {0x1F465, 0x1F465, prEmojiPresentation}, // E1.0 [1] (👥) busts in silhouette
+ {0x1F466, 0x1F46B, prEmojiPresentation}, // E0.6 [6] (👦..👫) boy..woman and man holding hands
+ {0x1F46C, 0x1F46D, prEmojiPresentation}, // E1.0 [2] (👬..👭) men holding hands..women holding hands
+ {0x1F46E, 0x1F4AC, prEmojiPresentation}, // E0.6 [63] (👮..💬) police officer..speech balloon
+ {0x1F4AD, 0x1F4AD, prEmojiPresentation}, // E1.0 [1] (💭) thought balloon
+ {0x1F4AE, 0x1F4B5, prEmojiPresentation}, // E0.6 [8] (💮..💵) white flower..dollar banknote
+ {0x1F4B6, 0x1F4B7, prEmojiPresentation}, // E1.0 [2] (💶..💷) euro banknote..pound banknote
+ {0x1F4B8, 0x1F4EB, prEmojiPresentation}, // E0.6 [52] (💸..📫) money with wings..closed mailbox with raised flag
+ {0x1F4EC, 0x1F4ED, prEmojiPresentation}, // E0.7 [2] (📬..📭) open mailbox with raised flag..open mailbox with lowered flag
+ {0x1F4EE, 0x1F4EE, prEmojiPresentation}, // E0.6 [1] (📮) postbox
+ {0x1F4EF, 0x1F4EF, prEmojiPresentation}, // E1.0 [1] (📯) postal horn
+ {0x1F4F0, 0x1F4F4, prEmojiPresentation}, // E0.6 [5] (📰..📴) newspaper..mobile phone off
+ {0x1F4F5, 0x1F4F5, prEmojiPresentation}, // E1.0 [1] (📵) no mobile phones
+ {0x1F4F6, 0x1F4F7, prEmojiPresentation}, // E0.6 [2] (📶..📷) antenna bars..camera
+ {0x1F4F8, 0x1F4F8, prEmojiPresentation}, // E1.0 [1] (📸) camera with flash
+ {0x1F4F9, 0x1F4FC, prEmojiPresentation}, // E0.6 [4] (📹..📼) video camera..videocassette
+ {0x1F4FF, 0x1F502, prEmojiPresentation}, // E1.0 [4] (📿..🔂) prayer beads..repeat single button
+ {0x1F503, 0x1F503, prEmojiPresentation}, // E0.6 [1] (🔃) clockwise vertical arrows
+ {0x1F504, 0x1F507, prEmojiPresentation}, // E1.0 [4] (🔄..🔇) counterclockwise arrows button..muted speaker
+ {0x1F508, 0x1F508, prEmojiPresentation}, // E0.7 [1] (🔈) speaker low volume
+ {0x1F509, 0x1F509, prEmojiPresentation}, // E1.0 [1] (🔉) speaker medium volume
+ {0x1F50A, 0x1F514, prEmojiPresentation}, // E0.6 [11] (🔊..🔔) speaker high volume..bell
+ {0x1F515, 0x1F515, prEmojiPresentation}, // E1.0 [1] (🔕) bell with slash
+ {0x1F516, 0x1F52B, prEmojiPresentation}, // E0.6 [22] (🔖..🔫) bookmark..water pistol
+ {0x1F52C, 0x1F52D, prEmojiPresentation}, // E1.0 [2] (🔬..🔭) microscope..telescope
+ {0x1F52E, 0x1F53D, prEmojiPresentation}, // E0.6 [16] (🔮..🔽) crystal ball..downwards button
+ {0x1F54B, 0x1F54E, prEmojiPresentation}, // E1.0 [4] (🕋..🕎) kaaba..menorah
+ {0x1F550, 0x1F55B, prEmojiPresentation}, // E0.6 [12] (🕐..🕛) one o’clock..twelve o’clock
+ {0x1F55C, 0x1F567, prEmojiPresentation}, // E0.7 [12] (🕜..🕧) one-thirty..twelve-thirty
+ {0x1F57A, 0x1F57A, prEmojiPresentation}, // E3.0 [1] (🕺) man dancing
+ {0x1F595, 0x1F596, prEmojiPresentation}, // E1.0 [2] (🖕..🖖) middle finger..vulcan salute
+ {0x1F5A4, 0x1F5A4, prEmojiPresentation}, // E3.0 [1] (🖤) black heart
+ {0x1F5FB, 0x1F5FF, prEmojiPresentation}, // E0.6 [5] (🗻..🗿) mount fuji..moai
+ {0x1F600, 0x1F600, prEmojiPresentation}, // E1.0 [1] (😀) grinning face
+ {0x1F601, 0x1F606, prEmojiPresentation}, // E0.6 [6] (😁..😆) beaming face with smiling eyes..grinning squinting face
+ {0x1F607, 0x1F608, prEmojiPresentation}, // E1.0 [2] (😇..😈) smiling face with halo..smiling face with horns
+ {0x1F609, 0x1F60D, prEmojiPresentation}, // E0.6 [5] (😉..😍) winking face..smiling face with heart-eyes
+ {0x1F60E, 0x1F60E, prEmojiPresentation}, // E1.0 [1] (😎) smiling face with sunglasses
+ {0x1F60F, 0x1F60F, prEmojiPresentation}, // E0.6 [1] (😏) smirking face
+ {0x1F610, 0x1F610, prEmojiPresentation}, // E0.7 [1] (😐) neutral face
+ {0x1F611, 0x1F611, prEmojiPresentation}, // E1.0 [1] (😑) expressionless face
+ {0x1F612, 0x1F614, prEmojiPresentation}, // E0.6 [3] (😒..😔) unamused face..pensive face
+ {0x1F615, 0x1F615, prEmojiPresentation}, // E1.0 [1] (😕) confused face
+ {0x1F616, 0x1F616, prEmojiPresentation}, // E0.6 [1] (😖) confounded face
+ {0x1F617, 0x1F617, prEmojiPresentation}, // E1.0 [1] (😗) kissing face
+ {0x1F618, 0x1F618, prEmojiPresentation}, // E0.6 [1] (😘) face blowing a kiss
+ {0x1F619, 0x1F619, prEmojiPresentation}, // E1.0 [1] (😙) kissing face with smiling eyes
+ {0x1F61A, 0x1F61A, prEmojiPresentation}, // E0.6 [1] (😚) kissing face with closed eyes
+ {0x1F61B, 0x1F61B, prEmojiPresentation}, // E1.0 [1] (😛) face with tongue
+ {0x1F61C, 0x1F61E, prEmojiPresentation}, // E0.6 [3] (😜..😞) winking face with tongue..disappointed face
+ {0x1F61F, 0x1F61F, prEmojiPresentation}, // E1.0 [1] (😟) worried face
+ {0x1F620, 0x1F625, prEmojiPresentation}, // E0.6 [6] (😠..😥) angry face..sad but relieved face
+ {0x1F626, 0x1F627, prEmojiPresentation}, // E1.0 [2] (😦..😧) frowning face with open mouth..anguished face
+ {0x1F628, 0x1F62B, prEmojiPresentation}, // E0.6 [4] (😨..😫) fearful face..tired face
+ {0x1F62C, 0x1F62C, prEmojiPresentation}, // E1.0 [1] (😬) grimacing face
+ {0x1F62D, 0x1F62D, prEmojiPresentation}, // E0.6 [1] (😭) loudly crying face
+ {0x1F62E, 0x1F62F, prEmojiPresentation}, // E1.0 [2] (😮..😯) face with open mouth..hushed face
+ {0x1F630, 0x1F633, prEmojiPresentation}, // E0.6 [4] (😰..😳) anxious face with sweat..flushed face
+ {0x1F634, 0x1F634, prEmojiPresentation}, // E1.0 [1] (😴) sleeping face
+ {0x1F635, 0x1F635, prEmojiPresentation}, // E0.6 [1] (😵) face with crossed-out eyes
+ {0x1F636, 0x1F636, prEmojiPresentation}, // E1.0 [1] (😶) face without mouth
+ {0x1F637, 0x1F640, prEmojiPresentation}, // E0.6 [10] (😷..🙀) face with medical mask..weary cat
+ {0x1F641, 0x1F644, prEmojiPresentation}, // E1.0 [4] (🙁..🙄) slightly frowning face..face with rolling eyes
+ {0x1F645, 0x1F64F, prEmojiPresentation}, // E0.6 [11] (🙅..🙏) person gesturing NO..folded hands
+ {0x1F680, 0x1F680, prEmojiPresentation}, // E0.6 [1] (🚀) rocket
+ {0x1F681, 0x1F682, prEmojiPresentation}, // E1.0 [2] (🚁..🚂) helicopter..locomotive
+ {0x1F683, 0x1F685, prEmojiPresentation}, // E0.6 [3] (🚃..🚅) railway car..bullet train
+ {0x1F686, 0x1F686, prEmojiPresentation}, // E1.0 [1] (🚆) train
+ {0x1F687, 0x1F687, prEmojiPresentation}, // E0.6 [1] (🚇) metro
+ {0x1F688, 0x1F688, prEmojiPresentation}, // E1.0 [1] (🚈) light rail
+ {0x1F689, 0x1F689, prEmojiPresentation}, // E0.6 [1] (🚉) station
+ {0x1F68A, 0x1F68B, prEmojiPresentation}, // E1.0 [2] (🚊..🚋) tram..tram car
+ {0x1F68C, 0x1F68C, prEmojiPresentation}, // E0.6 [1] (🚌) bus
+ {0x1F68D, 0x1F68D, prEmojiPresentation}, // E0.7 [1] (🚍) oncoming bus
+ {0x1F68E, 0x1F68E, prEmojiPresentation}, // E1.0 [1] (🚎) trolleybus
+ {0x1F68F, 0x1F68F, prEmojiPresentation}, // E0.6 [1] (🚏) bus stop
+ {0x1F690, 0x1F690, prEmojiPresentation}, // E1.0 [1] (🚐) minibus
+ {0x1F691, 0x1F693, prEmojiPresentation}, // E0.6 [3] (🚑..🚓) ambulance..police car
+ {0x1F694, 0x1F694, prEmojiPresentation}, // E0.7 [1] (🚔) oncoming police car
+ {0x1F695, 0x1F695, prEmojiPresentation}, // E0.6 [1] (🚕) taxi
+ {0x1F696, 0x1F696, prEmojiPresentation}, // E1.0 [1] (🚖) oncoming taxi
+ {0x1F697, 0x1F697, prEmojiPresentation}, // E0.6 [1] (🚗) automobile
+ {0x1F698, 0x1F698, prEmojiPresentation}, // E0.7 [1] (🚘) oncoming automobile
+ {0x1F699, 0x1F69A, prEmojiPresentation}, // E0.6 [2] (🚙..🚚) sport utility vehicle..delivery truck
+ {0x1F69B, 0x1F6A1, prEmojiPresentation}, // E1.0 [7] (🚛..🚡) articulated lorry..aerial tramway
+ {0x1F6A2, 0x1F6A2, prEmojiPresentation}, // E0.6 [1] (🚢) ship
+ {0x1F6A3, 0x1F6A3, prEmojiPresentation}, // E1.0 [1] (🚣) person rowing boat
+ {0x1F6A4, 0x1F6A5, prEmojiPresentation}, // E0.6 [2] (🚤..🚥) speedboat..horizontal traffic light
+ {0x1F6A6, 0x1F6A6, prEmojiPresentation}, // E1.0 [1] (🚦) vertical traffic light
+ {0x1F6A7, 0x1F6AD, prEmojiPresentation}, // E0.6 [7] (🚧..🚭) construction..no smoking
+ {0x1F6AE, 0x1F6B1, prEmojiPresentation}, // E1.0 [4] (🚮..🚱) litter in bin sign..non-potable water
+ {0x1F6B2, 0x1F6B2, prEmojiPresentation}, // E0.6 [1] (🚲) bicycle
+ {0x1F6B3, 0x1F6B5, prEmojiPresentation}, // E1.0 [3] (🚳..🚵) no bicycles..person mountain biking
+ {0x1F6B6, 0x1F6B6, prEmojiPresentation}, // E0.6 [1] (🚶) person walking
+ {0x1F6B7, 0x1F6B8, prEmojiPresentation}, // E1.0 [2] (🚷..🚸) no pedestrians..children crossing
+ {0x1F6B9, 0x1F6BE, prEmojiPresentation}, // E0.6 [6] (🚹..🚾) men’s room..water closet
+ {0x1F6BF, 0x1F6BF, prEmojiPresentation}, // E1.0 [1] (🚿) shower
+ {0x1F6C0, 0x1F6C0, prEmojiPresentation}, // E0.6 [1] (🛀) person taking bath
+ {0x1F6C1, 0x1F6C5, prEmojiPresentation}, // E1.0 [5] (🛁..🛅) bathtub..left luggage
+ {0x1F6CC, 0x1F6CC, prEmojiPresentation}, // E1.0 [1] (🛌) person in bed
+ {0x1F6D0, 0x1F6D0, prEmojiPresentation}, // E1.0 [1] (🛐) place of worship
+ {0x1F6D1, 0x1F6D2, prEmojiPresentation}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart
+ {0x1F6D5, 0x1F6D5, prEmojiPresentation}, // E12.0 [1] (🛕) hindu temple
+ {0x1F6D6, 0x1F6D7, prEmojiPresentation}, // E13.0 [2] (🛖..🛗) hut..elevator
+ {0x1F6DD, 0x1F6DF, prEmojiPresentation}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
+ {0x1F6EB, 0x1F6EC, prEmojiPresentation}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival
+ {0x1F6F4, 0x1F6F6, prEmojiPresentation}, // E3.0 [3] (🛴..🛶) kick scooter..canoe
+ {0x1F6F7, 0x1F6F8, prEmojiPresentation}, // E5.0 [2] (🛷..🛸) sled..flying saucer
+ {0x1F6F9, 0x1F6F9, prEmojiPresentation}, // E11.0 [1] (🛹) skateboard
+ {0x1F6FA, 0x1F6FA, prEmojiPresentation}, // E12.0 [1] (🛺) auto rickshaw
+ {0x1F6FB, 0x1F6FC, prEmojiPresentation}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
+ {0x1F7E0, 0x1F7EB, prEmojiPresentation}, // E12.0 [12] (🟠..🟫) orange circle..brown square
+ {0x1F7F0, 0x1F7F0, prEmojiPresentation}, // E14.0 [1] (🟰) heavy equals sign
+ {0x1F90C, 0x1F90C, prEmojiPresentation}, // E13.0 [1] (🤌) pinched fingers
+ {0x1F90D, 0x1F90F, prEmojiPresentation}, // E12.0 [3] (🤍..🤏) white heart..pinching hand
+ {0x1F910, 0x1F918, prEmojiPresentation}, // E1.0 [9] (🤐..🤘) zipper-mouth face..sign of the horns
+ {0x1F919, 0x1F91E, prEmojiPresentation}, // E3.0 [6] (🤙..🤞) call me hand..crossed fingers
+ {0x1F91F, 0x1F91F, prEmojiPresentation}, // E5.0 [1] (🤟) love-you gesture
+ {0x1F920, 0x1F927, prEmojiPresentation}, // E3.0 [8] (🤠..🤧) cowboy hat face..sneezing face
+ {0x1F928, 0x1F92F, prEmojiPresentation}, // E5.0 [8] (🤨..🤯) face with raised eyebrow..exploding head
+ {0x1F930, 0x1F930, prEmojiPresentation}, // E3.0 [1] (🤰) pregnant woman
+ {0x1F931, 0x1F932, prEmojiPresentation}, // E5.0 [2] (🤱..🤲) breast-feeding..palms up together
+ {0x1F933, 0x1F93A, prEmojiPresentation}, // E3.0 [8] (🤳..🤺) selfie..person fencing
+ {0x1F93C, 0x1F93E, prEmojiPresentation}, // E3.0 [3] (🤼..🤾) people wrestling..person playing handball
+ {0x1F93F, 0x1F93F, prEmojiPresentation}, // E12.0 [1] (🤿) diving mask
+ {0x1F940, 0x1F945, prEmojiPresentation}, // E3.0 [6] (🥀..🥅) wilted flower..goal net
+ {0x1F947, 0x1F94B, prEmojiPresentation}, // E3.0 [5] (🥇..🥋) 1st place medal..martial arts uniform
+ {0x1F94C, 0x1F94C, prEmojiPresentation}, // E5.0 [1] (🥌) curling stone
+ {0x1F94D, 0x1F94F, prEmojiPresentation}, // E11.0 [3] (🥍..🥏) lacrosse..flying disc
+ {0x1F950, 0x1F95E, prEmojiPresentation}, // E3.0 [15] (🥐..🥞) croissant..pancakes
+ {0x1F95F, 0x1F96B, prEmojiPresentation}, // E5.0 [13] (🥟..🥫) dumpling..canned food
+ {0x1F96C, 0x1F970, prEmojiPresentation}, // E11.0 [5] (🥬..🥰) leafy green..smiling face with hearts
+ {0x1F971, 0x1F971, prEmojiPresentation}, // E12.0 [1] (🥱) yawning face
+ {0x1F972, 0x1F972, prEmojiPresentation}, // E13.0 [1] (🥲) smiling face with tear
+ {0x1F973, 0x1F976, prEmojiPresentation}, // E11.0 [4] (🥳..🥶) partying face..cold face
+ {0x1F977, 0x1F978, prEmojiPresentation}, // E13.0 [2] (🥷..🥸) ninja..disguised face
+ {0x1F979, 0x1F979, prEmojiPresentation}, // E14.0 [1] (🥹) face holding back tears
+ {0x1F97A, 0x1F97A, prEmojiPresentation}, // E11.0 [1] (🥺) pleading face
+ {0x1F97B, 0x1F97B, prEmojiPresentation}, // E12.0 [1] (🥻) sari
+ {0x1F97C, 0x1F97F, prEmojiPresentation}, // E11.0 [4] (🥼..🥿) lab coat..flat shoe
+ {0x1F980, 0x1F984, prEmojiPresentation}, // E1.0 [5] (🦀..🦄) crab..unicorn
+ {0x1F985, 0x1F991, prEmojiPresentation}, // E3.0 [13] (🦅..🦑) eagle..squid
+ {0x1F992, 0x1F997, prEmojiPresentation}, // E5.0 [6] (🦒..🦗) giraffe..cricket
+ {0x1F998, 0x1F9A2, prEmojiPresentation}, // E11.0 [11] (🦘..🦢) kangaroo..swan
+ {0x1F9A3, 0x1F9A4, prEmojiPresentation}, // E13.0 [2] (🦣..🦤) mammoth..dodo
+ {0x1F9A5, 0x1F9AA, prEmojiPresentation}, // E12.0 [6] (🦥..🦪) sloth..oyster
+ {0x1F9AB, 0x1F9AD, prEmojiPresentation}, // E13.0 [3] (🦫..🦭) beaver..seal
+ {0x1F9AE, 0x1F9AF, prEmojiPresentation}, // E12.0 [2] (🦮..🦯) guide dog..white cane
+ {0x1F9B0, 0x1F9B9, prEmojiPresentation}, // E11.0 [10] (🦰..🦹) red hair..supervillain
+ {0x1F9BA, 0x1F9BF, prEmojiPresentation}, // E12.0 [6] (🦺..🦿) safety vest..mechanical leg
+ {0x1F9C0, 0x1F9C0, prEmojiPresentation}, // E1.0 [1] (🧀) cheese wedge
+ {0x1F9C1, 0x1F9C2, prEmojiPresentation}, // E11.0 [2] (🧁..🧂) cupcake..salt
+ {0x1F9C3, 0x1F9CA, prEmojiPresentation}, // E12.0 [8] (🧃..🧊) beverage box..ice
+ {0x1F9CB, 0x1F9CB, prEmojiPresentation}, // E13.0 [1] (🧋) bubble tea
+ {0x1F9CC, 0x1F9CC, prEmojiPresentation}, // E14.0 [1] (🧌) troll
+ {0x1F9CD, 0x1F9CF, prEmojiPresentation}, // E12.0 [3] (🧍..🧏) person standing..deaf person
+ {0x1F9D0, 0x1F9E6, prEmojiPresentation}, // E5.0 [23] (🧐..🧦) face with monocle..socks
+ {0x1F9E7, 0x1F9FF, prEmojiPresentation}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet
+ {0x1FA70, 0x1FA73, prEmojiPresentation}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
+ {0x1FA74, 0x1FA74, prEmojiPresentation}, // E13.0 [1] (🩴) thong sandal
+ {0x1FA78, 0x1FA7A, prEmojiPresentation}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
+ {0x1FA7B, 0x1FA7C, prEmojiPresentation}, // E14.0 [2] (🩻..🩼) x-ray..crutch
+ {0x1FA80, 0x1FA82, prEmojiPresentation}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
+ {0x1FA83, 0x1FA86, prEmojiPresentation}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
+ {0x1FA90, 0x1FA95, prEmojiPresentation}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
+ {0x1FA96, 0x1FAA8, prEmojiPresentation}, // E13.0 [19] (🪖..🪨) military helmet..rock
+ {0x1FAA9, 0x1FAAC, prEmojiPresentation}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
+ {0x1FAB0, 0x1FAB6, prEmojiPresentation}, // E13.0 [7] (🪰..🪶) fly..feather
+ {0x1FAB7, 0x1FABA, prEmojiPresentation}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
+ {0x1FAC0, 0x1FAC2, prEmojiPresentation}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
+ {0x1FAC3, 0x1FAC5, prEmojiPresentation}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
+ {0x1FAD0, 0x1FAD6, prEmojiPresentation}, // E13.0 [7] (🫐..🫖) blueberries..teapot
+ {0x1FAD7, 0x1FAD9, prEmojiPresentation}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
+ {0x1FAE0, 0x1FAE7, prEmojiPresentation}, // E14.0 [8] (🫠..🫧) melting face..bubbles
+ {0x1FAF0, 0x1FAF6, prEmojiPresentation}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
+}
diff --git a/vendor/github.com/rivo/uniseg/gen_breaktest.go b/vendor/github.com/rivo/uniseg/gen_breaktest.go
new file mode 100644
index 0000000..e613c4c
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/gen_breaktest.go
@@ -0,0 +1,213 @@
+//go:build generate
+
+// This program generates a Go containing a slice of test cases based on the
+// Unicode Character Database auxiliary data files. The command line arguments
+// are as follows:
+//
+// 1. The name of the Unicode data file (just the filename, without extension).
+// 2. The name of the locally generated Go file.
+// 3. The name of the slice containing the test cases.
+// 4. The name of the generator, for logging purposes.
+//
+//go:generate go run gen_breaktest.go GraphemeBreakTest graphemebreak_test.go graphemeBreakTestCases graphemes
+//go:generate go run gen_breaktest.go WordBreakTest wordbreak_test.go wordBreakTestCases words
+//go:generate go run gen_breaktest.go SentenceBreakTest sentencebreak_test.go sentenceBreakTestCases sentences
+//go:generate go run gen_breaktest.go LineBreakTest linebreak_test.go lineBreakTestCases lines
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "log"
+ "net/http"
+ "os"
+ "time"
+)
+
+// We want to test against a specific version rather than the latest. When the
+// package is upgraded to a new version, change these to generate new tests.
+const (
+ testCaseURL = `https://www.unicode.org/Public/14.0.0/ucd/auxiliary/%s.txt`
+)
+
+func main() {
+ if len(os.Args) < 5 {
+ fmt.Println("Not enough arguments, see code for details")
+ os.Exit(1)
+ }
+
+ log.SetPrefix("gen_breaktest (" + os.Args[4] + "): ")
+ log.SetFlags(0)
+
+ // Read text of testcases and parse into Go source code.
+ src, err := parse(fmt.Sprintf(testCaseURL, os.Args[1]))
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Format the Go code.
+ formatted, err := format.Source(src)
+ if err != nil {
+ log.Fatalln("gofmt:", err)
+ }
+
+ // Write it out.
+ log.Print("Writing to ", os.Args[2])
+ if err := ioutil.WriteFile(os.Args[2], formatted, 0644); err != nil {
+ log.Fatal(err)
+ }
+}
+
+// parse reads a break text file, either from a local file or from a URL. It
+// parses the file data into Go source code representing the test cases.
+func parse(url string) ([]byte, error) {
+ log.Printf("Parsing %s", url)
+ res, err := http.Get(url)
+ if err != nil {
+ return nil, err
+ }
+ body := res.Body
+ defer body.Close()
+
+ buf := new(bytes.Buffer)
+ buf.Grow(120 << 10)
+ buf.WriteString(`package uniseg
+
+// Code generated via go generate from gen_breaktest.go. DO NOT EDIT.
+
+// ` + os.Args[3] + ` are Grapheme testcases taken from
+// ` + url + `
+// on ` + time.Now().Format("January 2, 2006") + `. See
+// https://www.unicode.org/license.html for the Unicode license agreement.
+var ` + os.Args[3] + ` = []testCase {
+`)
+
+ sc := bufio.NewScanner(body)
+ num := 1
+ var line []byte
+ original := make([]byte, 0, 64)
+ expected := make([]byte, 0, 64)
+ for sc.Scan() {
+ num++
+ line = sc.Bytes()
+ if len(line) == 0 || line[0] == '#' {
+ continue
+ }
+ var comment []byte
+ if i := bytes.IndexByte(line, '#'); i >= 0 {
+ comment = bytes.TrimSpace(line[i+1:])
+ line = bytes.TrimSpace(line[:i])
+ }
+ original, expected, err := parseRuneSequence(line, original[:0], expected[:0])
+ if err != nil {
+ return nil, fmt.Errorf(`line %d: %v: %q`, num, err, line)
+ }
+ fmt.Fprintf(buf, "\t{original: \"%s\", expected: %s}, // %s\n", original, expected, comment)
+ }
+ if err := sc.Err(); err != nil {
+ return nil, err
+ }
+
+ // Check for final "# EOF", useful check if we're streaming via HTTP
+ if !bytes.Equal(line, []byte("# EOF")) {
+ return nil, fmt.Errorf(`line %d: exected "# EOF" as final line, got %q`, num, line)
+ }
+ buf.WriteString("}\n")
+ return buf.Bytes(), nil
+}
+
+// Used by parseRuneSequence to match input via bytes.HasPrefix.
+var (
+ prefixBreak = []byte("÷ ")
+ prefixDontBreak = []byte("× ")
+ breakOk = []byte("÷")
+ breakNo = []byte("×")
+)
+
+// parseRuneSequence parses a rune + breaking opportunity sequence from b
+// and appends the Go code for testcase.original to orig
+// and appends the Go code for testcase.expected to exp.
+// It retuns the new orig and exp slices.
+//
+// E.g. for the input b="÷ 0020 × 0308 ÷ 1F1E6 ÷"
+// it will append
+// "\u0020\u0308\U0001F1E6"
+// and "[][]rune{{0x0020,0x0308},{0x1F1E6},}"
+// to orig and exp respectively.
+//
+// The formatting of exp is expected to be cleaned up by gofmt or format.Source.
+// Note we explicitly require the sequence to start with ÷ and we implicitly
+// require it to end with ÷.
+func parseRuneSequence(b, orig, exp []byte) ([]byte, []byte, error) {
+ // Check for and remove first ÷ or ×.
+ if !bytes.HasPrefix(b, prefixBreak) && !bytes.HasPrefix(b, prefixDontBreak) {
+ return nil, nil, errors.New("expected ÷ or × as first character")
+ }
+ if bytes.HasPrefix(b, prefixBreak) {
+ b = b[len(prefixBreak):]
+ } else {
+ b = b[len(prefixDontBreak):]
+ }
+
+ boundary := true
+ exp = append(exp, "[][]rune{"...)
+ for len(b) > 0 {
+ if boundary {
+ exp = append(exp, '{')
+ }
+ exp = append(exp, "0x"...)
+ // Find end of hex digits.
+ var i int
+ for i = 0; i < len(b) && b[i] != ' '; i++ {
+ if d := b[i]; ('0' <= d || d <= '9') ||
+ ('A' <= d || d <= 'F') ||
+ ('a' <= d || d <= 'f') {
+ continue
+ }
+ return nil, nil, errors.New("bad hex digit")
+ }
+ switch i {
+ case 4:
+ orig = append(orig, "\\u"...)
+ case 5:
+ orig = append(orig, "\\U000"...)
+ default:
+ return nil, nil, errors.New("unsupport code point hex length")
+ }
+ orig = append(orig, b[:i]...)
+ exp = append(exp, b[:i]...)
+ b = b[i:]
+
+ // Check for space between hex and ÷ or ×.
+ if len(b) < 1 || b[0] != ' ' {
+ return nil, nil, errors.New("bad input")
+ }
+ b = b[1:]
+
+ // Check for next boundary.
+ switch {
+ case bytes.HasPrefix(b, breakOk):
+ boundary = true
+ b = b[len(breakOk):]
+ case bytes.HasPrefix(b, breakNo):
+ boundary = false
+ b = b[len(breakNo):]
+ default:
+ return nil, nil, errors.New("missing ÷ or ×")
+ }
+ if boundary {
+ exp = append(exp, '}')
+ }
+ exp = append(exp, ',')
+ if len(b) > 0 && b[0] == ' ' {
+ b = b[1:]
+ }
+ }
+ exp = append(exp, '}')
+ return orig, exp, nil
+}
diff --git a/vendor/github.com/rivo/uniseg/gen_properties.go b/vendor/github.com/rivo/uniseg/gen_properties.go
new file mode 100644
index 0000000..999d5ef
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/gen_properties.go
@@ -0,0 +1,256 @@
+//go:build generate
+
+// This program generates a property file in Go file from Unicode Character
+// Database auxiliary data files. The command line arguments are as follows:
+//
+// 1. The name of the Unicode data file (just the filename, without extension).
+// Can be "-" (to skip) if the emoji flag is included.
+// 2. The name of the locally generated Go file.
+// 3. The name of the slice mapping code points to properties.
+// 4. The name of the generator, for logging purposes.
+// 5. (Optional) Flags, comma-separated. The following flags are available:
+// - "emojis=": include the specified emoji properties (e.g.
+// "Extended_Pictographic").
+// - "gencat": include general category properties.
+//
+//go:generate go run gen_properties.go auxiliary/GraphemeBreakProperty graphemeproperties.go graphemeCodePoints graphemes emojis=Extended_Pictographic
+//go:generate go run gen_properties.go auxiliary/WordBreakProperty wordproperties.go workBreakCodePoints words emojis=Extended_Pictographic
+//go:generate go run gen_properties.go auxiliary/SentenceBreakProperty sentenceproperties.go sentenceBreakCodePoints sentences
+//go:generate go run gen_properties.go LineBreak lineproperties.go lineBreakCodePoints lines gencat
+//go:generate go run gen_properties.go EastAsianWidth eastasianwidth.go eastAsianWidth eastasianwidth
+//go:generate go run gen_properties.go - emojipresentation.go emojiPresentation emojipresentation emojis=Emoji_Presentation
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "log"
+ "net/http"
+ "os"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// We want to test against a specific version rather than the latest. When the
+// package is upgraded to a new version, change these to generate new tests.
+const (
+ propertyURL = `https://www.unicode.org/Public/14.0.0/ucd/%s.txt`
+ emojiURL = `https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt`
+)
+
+// The regular expression for a line containing a code point range property.
+var propertyPattern = regexp.MustCompile(`^([0-9A-F]{4,6})(\.\.([0-9A-F]{4,6}))?\s*;\s*([A-Za-z0-9_]+)\s*#\s(.+)$`)
+
+func main() {
+ if len(os.Args) < 5 {
+ fmt.Println("Not enough arguments, see code for details")
+ os.Exit(1)
+ }
+
+ log.SetPrefix("gen_properties (" + os.Args[4] + "): ")
+ log.SetFlags(0)
+
+ // Parse flags.
+ flags := make(map[string]string)
+ if len(os.Args) >= 6 {
+ for _, flag := range strings.Split(os.Args[5], ",") {
+ flagFields := strings.Split(flag, "=")
+ if len(flagFields) == 1 {
+ flags[flagFields[0]] = "yes"
+ } else {
+ flags[flagFields[0]] = flagFields[1]
+ }
+ }
+ }
+
+ // Parse the text file and generate Go source code from it.
+ _, includeGeneralCategory := flags["gencat"]
+ var mainURL string
+ if os.Args[1] != "-" {
+ mainURL = fmt.Sprintf(propertyURL, os.Args[1])
+ }
+ src, err := parse(mainURL, flags["emojis"], includeGeneralCategory)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Format the Go code.
+ formatted, err := format.Source([]byte(src))
+ if err != nil {
+ log.Fatal("gofmt:", err)
+ }
+
+ // Save it to the (local) target file.
+ log.Print("Writing to ", os.Args[2])
+ if err := ioutil.WriteFile(os.Args[2], formatted, 0644); err != nil {
+ log.Fatal(err)
+ }
+}
+
+// parse parses the Unicode Properties text files located at the given URLs and
+// returns their equivalent Go source code to be used in the uniseg package. If
+// "emojiProperty" is not an empty string, emoji code points for that emoji
+// property (e.g. "Extended_Pictographic") will be included. In those cases, you
+// may pass an empty "propertyURL" to skip parsing the main properties file. If
+// "includeGeneralCategory" is true, the Unicode General Category property will
+// be extracted from the comments and included in the output.
+func parse(propertyURL, emojiProperty string, includeGeneralCategory bool) (string, error) {
+ if propertyURL == "" && emojiProperty == "" {
+ return "", errors.New("no properties to parse")
+ }
+
+ // Temporary buffer to hold properties.
+ var properties [][4]string
+
+ // Open the first URL.
+ if propertyURL != "" {
+ log.Printf("Parsing %s", propertyURL)
+ res, err := http.Get(propertyURL)
+ if err != nil {
+ return "", err
+ }
+ in1 := res.Body
+ defer in1.Close()
+
+ // Parse it.
+ scanner := bufio.NewScanner(in1)
+ num := 0
+ for scanner.Scan() {
+ num++
+ line := strings.TrimSpace(scanner.Text())
+
+ // Skip comments and empty lines.
+ if strings.HasPrefix(line, "#") || line == "" {
+ continue
+ }
+
+ // Everything else must be a code point range, a property and a comment.
+ from, to, property, comment, err := parseProperty(line)
+ if err != nil {
+ return "", fmt.Errorf("%s line %d: %v", os.Args[4], num, err)
+ }
+ properties = append(properties, [4]string{from, to, property, comment})
+ }
+ if err := scanner.Err(); err != nil {
+ return "", err
+ }
+ }
+
+ // Open the second URL.
+ if emojiProperty != "" {
+ log.Printf("Parsing %s", emojiURL)
+ res, err := http.Get(emojiURL)
+ if err != nil {
+ return "", err
+ }
+ in2 := res.Body
+ defer in2.Close()
+
+ // Parse it.
+ scanner := bufio.NewScanner(in2)
+ num := 0
+ for scanner.Scan() {
+ num++
+ line := scanner.Text()
+
+ // Skip comments, empty lines, and everything not containing
+ // "Extended_Pictographic".
+ if strings.HasPrefix(line, "#") || line == "" || !strings.Contains(line, emojiProperty) {
+ continue
+ }
+
+ // Everything else must be a code point range, a property and a comment.
+ from, to, property, comment, err := parseProperty(line)
+ if err != nil {
+ return "", fmt.Errorf("emojis line %d: %v", num, err)
+ }
+ properties = append(properties, [4]string{from, to, property, comment})
+ }
+ if err := scanner.Err(); err != nil {
+ return "", err
+ }
+ }
+
+ // Sort properties.
+ sort.Slice(properties, func(i, j int) bool {
+ left, _ := strconv.ParseUint(properties[i][0], 16, 64)
+ right, _ := strconv.ParseUint(properties[j][0], 16, 64)
+ return left < right
+ })
+
+ // Header.
+ var (
+ buf bytes.Buffer
+ emojiComment string
+ )
+ columns := 3
+ if includeGeneralCategory {
+ columns = 4
+ }
+ if emojiURL != "" {
+ emojiComment = `
+// and
+// ` + emojiURL + `
+// ("Extended_Pictographic" only)`
+ }
+ buf.WriteString(`package uniseg
+
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+// ` + os.Args[3] + ` are taken from
+// ` + propertyURL + emojiComment + `
+// on ` + time.Now().Format("January 2, 2006") + `. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var ` + os.Args[3] + ` = [][` + strconv.Itoa(columns) + `]int{
+ `)
+
+ // Properties.
+ for _, prop := range properties {
+ if includeGeneralCategory {
+ generalCategory := "gc" + prop[3][:2]
+ if generalCategory == "gcL&" {
+ generalCategory = "gcLC"
+ }
+ prop[3] = prop[3][3:]
+ fmt.Fprintf(&buf, "{0x%s,0x%s,%s,%s}, // %s\n", prop[0], prop[1], translateProperty("pr", prop[2]), generalCategory, prop[3])
+ } else {
+ fmt.Fprintf(&buf, "{0x%s,0x%s,%s}, // %s\n", prop[0], prop[1], translateProperty("pr", prop[2]), prop[3])
+ }
+ }
+
+ // Tail.
+ buf.WriteString("}")
+
+ return buf.String(), nil
+}
+
+// parseProperty parses a line of the Unicode properties text file containing a
+// property for a code point range and returns it along with its comment.
+func parseProperty(line string) (from, to, property, comment string, err error) {
+ fields := propertyPattern.FindStringSubmatch(line)
+ if fields == nil {
+ err = errors.New("no property found")
+ return
+ }
+ from = fields[1]
+ to = fields[3]
+ if to == "" {
+ to = from
+ }
+ property = fields[4]
+ comment = fields[5]
+ return
+}
+
+// translateProperty translates a property name as used in the Unicode data file
+// to a variable used in the Go code.
+func translateProperty(prefix, property string) string {
+ return prefix + strings.ReplaceAll(property, "_", "")
+}
diff --git a/vendor/github.com/rivo/uniseg/grapheme.go b/vendor/github.com/rivo/uniseg/grapheme.go
new file mode 100644
index 0000000..0086fc1
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/grapheme.go
@@ -0,0 +1,334 @@
+package uniseg
+
+import "unicode/utf8"
+
+// Graphemes implements an iterator over Unicode grapheme clusters, or
+// user-perceived characters. While iterating, it also provides information
+// about word boundaries, sentence boundaries, line breaks, and monospace
+// character widths.
+//
+// After constructing the class via [NewGraphemes] for a given string "str",
+// [Graphemes.Next] is called for every grapheme cluster in a loop until it
+// returns false. Inside the loop, information about the grapheme cluster as
+// well as boundary information and character width is available via the various
+// methods (see examples below).
+//
+// Using this class to iterate over a string is convenient but it is much slower
+// than using this package's [Step] or [StepString] functions or any of the
+// other specialized functions starting with "First".
+type Graphemes struct {
+ // The original string.
+ original string
+
+ // The remaining string to be parsed.
+ remaining string
+
+ // The current grapheme cluster.
+ cluster string
+
+ // The byte offset of the current grapheme cluster relative to the original
+ // string.
+ offset int
+
+ // The current boundary information of the [Step] parser.
+ boundaries int
+
+ // The current state of the [Step] parser.
+ state int
+}
+
+// NewGraphemes returns a new grapheme cluster iterator.
+func NewGraphemes(str string) *Graphemes {
+ return &Graphemes{
+ original: str,
+ remaining: str,
+ state: -1,
+ }
+}
+
+// Next advances the iterator by one grapheme cluster and returns false if no
+// clusters are left. This function must be called before the first cluster is
+// accessed.
+func (g *Graphemes) Next() bool {
+ if len(g.remaining) == 0 {
+ // We're already past the end.
+ g.state = -2
+ g.cluster = ""
+ return false
+ }
+ g.offset += len(g.cluster)
+ g.cluster, g.remaining, g.boundaries, g.state = StepString(g.remaining, g.state)
+ return true
+}
+
+// Runes returns a slice of runes (code points) which corresponds to the current
+// grapheme cluster. If the iterator is already past the end or [Graphemes.Next]
+// has not yet been called, nil is returned.
+func (g *Graphemes) Runes() []rune {
+ if g.state < 0 {
+ return nil
+ }
+ return []rune(g.cluster)
+}
+
+// Str returns a substring of the original string which corresponds to the
+// current grapheme cluster. If the iterator is already past the end or
+// [Graphemes.Next] has not yet been called, an empty string is returned.
+func (g *Graphemes) Str() string {
+ return g.cluster
+}
+
+// Bytes returns a byte slice which corresponds to the current grapheme cluster.
+// If the iterator is already past the end or [Graphemes.Next] has not yet been
+// called, nil is returned.
+func (g *Graphemes) Bytes() []byte {
+ if g.state < 0 {
+ return nil
+ }
+ return []byte(g.cluster)
+}
+
+// Positions returns the interval of the current grapheme cluster as byte
+// positions into the original string. The first returned value "from" indexes
+// the first byte and the second returned value "to" indexes the first byte that
+// is not included anymore, i.e. str[from:to] is the current grapheme cluster of
+// the original string "str". If [Graphemes.Next] has not yet been called, both
+// values are 0. If the iterator is already past the end, both values are 1.
+func (g *Graphemes) Positions() (int, int) {
+ if g.state == -1 {
+ return 0, 0
+ } else if g.state == -2 {
+ return 1, 1
+ }
+ return g.offset, g.offset + len(g.cluster)
+}
+
+// IsWordBoundary returns true if a word ends after the current grapheme
+// cluster.
+func (g *Graphemes) IsWordBoundary() bool {
+ if g.state < 0 {
+ return true
+ }
+ return g.boundaries&MaskWord != 0
+}
+
+// IsSentenceBoundary returns true if a sentence ends after the current
+// grapheme cluster.
+func (g *Graphemes) IsSentenceBoundary() bool {
+ if g.state < 0 {
+ return true
+ }
+ return g.boundaries&MaskSentence != 0
+}
+
+// LineBreak returns whether the line can be broken after the current grapheme
+// cluster. A value of [LineDontBreak] means the line may not be broken, a value
+// of [LineMustBreak] means the line must be broken, and a value of
+// [LineCanBreak] means the line may or may not be broken.
+func (g *Graphemes) LineBreak() int {
+ if g.state == -1 {
+ return LineDontBreak
+ }
+ if g.state == -2 {
+ return LineMustBreak
+ }
+ return g.boundaries & MaskLine
+}
+
+// Width returns the monospace width of the current grapheme cluster.
+func (g *Graphemes) Width() int {
+ if g.state < 0 {
+ return 0
+ }
+ return g.boundaries >> ShiftWidth
+}
+
+// Reset puts the iterator into its initial state such that the next call to
+// [Graphemes.Next] sets it to the first grapheme cluster again.
+func (g *Graphemes) Reset() {
+ g.state = -1
+ g.offset = 0
+ g.cluster = ""
+ g.remaining = g.original
+}
+
+// GraphemeClusterCount returns the number of user-perceived characters
+// (grapheme clusters) for the given string.
+func GraphemeClusterCount(s string) (n int) {
+ state := -1
+ for len(s) > 0 {
+ _, s, _, state = FirstGraphemeClusterInString(s, state)
+ n++
+ }
+ return
+}
+
+// ReverseString reverses the given string while observing grapheme cluster
+// boundaries.
+func ReverseString(s string) string {
+ str := []byte(s)
+ reversed := make([]byte, len(str))
+ state := -1
+ index := len(str)
+ for len(str) > 0 {
+ var cluster []byte
+ cluster, str, _, state = FirstGraphemeCluster(str, state)
+ index -= len(cluster)
+ copy(reversed[index:], cluster)
+ if index <= len(str)/2 {
+ break
+ }
+ }
+ return string(reversed)
+}
+
+// The number of bits the grapheme property must be shifted to make place for
+// grapheme states.
+const shiftGraphemePropState = 4
+
+// FirstGraphemeCluster returns the first grapheme cluster found in the given
+// byte slice according to the rules of [Unicode Standard Annex #29, Grapheme
+// Cluster Boundaries]. This function can be called continuously to extract all
+// grapheme clusters from a byte slice, as illustrated in the example below.
+//
+// If you don't know the current state, for example when calling the function
+// for the first time, you must pass -1. For consecutive calls, pass the state
+// and rest slice returned by the previous call.
+//
+// The "rest" slice is the sub-slice of the original byte slice "b" starting
+// after the last byte of the identified grapheme cluster. If the length of the
+// "rest" slice is 0, the entire byte slice "b" has been processed. The
+// "cluster" byte slice is the sub-slice of the input slice containing the
+// identified grapheme cluster.
+//
+// The returned width is the width of the grapheme cluster for most monospace
+// fonts where a value of 1 represents one character cell.
+//
+// Given an empty byte slice "b", the function returns nil values.
+//
+// While slightly less convenient than using the Graphemes class, this function
+// has much better performance and makes no allocations. It lends itself well to
+// large byte slices.
+//
+// [Unicode Standard Annex #29, Grapheme Cluster Boundaries]: http://unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries
+func FirstGraphemeCluster(b []byte, state int) (cluster, rest []byte, width, newState int) {
+ // An empty byte slice returns nothing.
+ if len(b) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRune(b)
+ if len(b) <= length { // If we're already past the end, there is nothing else to parse.
+ var prop int
+ if state < 0 {
+ prop = property(graphemeCodePoints, r)
+ } else {
+ prop = state >> shiftGraphemePropState
+ }
+ return b, nil, runeWidth(r, prop), grAny | (prop << shiftGraphemePropState)
+ }
+
+ // If we don't know the state, determine it now.
+ var firstProp int
+ if state < 0 {
+ state, firstProp, _ = transitionGraphemeState(state, r)
+ } else {
+ firstProp = state >> shiftGraphemePropState
+ }
+ width += runeWidth(r, firstProp)
+
+ // Transition until we find a boundary.
+ for {
+ var (
+ prop int
+ boundary bool
+ )
+
+ r, l := utf8.DecodeRune(b[length:])
+ state, prop, boundary = transitionGraphemeState(state&maskGraphemeState, r)
+
+ if boundary {
+ return b[:length], b[length:], width, state | (prop << shiftGraphemePropState)
+ }
+
+ if r == vs16 {
+ width = 2
+ } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
+ } else if firstProp == prExtendedPictographic {
+ if r == vs15 {
+ width = 1
+ } else {
+ width = 2
+ }
+ }
+
+ length += l
+ if len(b) <= length {
+ return b, nil, width, grAny | (prop << shiftGraphemePropState)
+ }
+ }
+}
+
+// FirstGraphemeClusterInString is like [FirstGraphemeCluster] but its input and
+// outputs are strings.
+func FirstGraphemeClusterInString(str string, state int) (cluster, rest string, width, newState int) {
+ // An empty string returns nothing.
+ if len(str) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRuneInString(str)
+ if len(str) <= length { // If we're already past the end, there is nothing else to parse.
+ var prop int
+ if state < 0 {
+ prop = property(graphemeCodePoints, r)
+ } else {
+ prop = state >> shiftGraphemePropState
+ }
+ return str, "", runeWidth(r, prop), grAny | (prop << shiftGraphemePropState)
+ }
+
+ // If we don't know the state, determine it now.
+ var firstProp int
+ if state < 0 {
+ state, firstProp, _ = transitionGraphemeState(state, r)
+ } else {
+ firstProp = state >> shiftGraphemePropState
+ }
+ width += runeWidth(r, firstProp)
+
+ // Transition until we find a boundary.
+ for {
+ var (
+ prop int
+ boundary bool
+ )
+
+ r, l := utf8.DecodeRuneInString(str[length:])
+ state, prop, boundary = transitionGraphemeState(state&maskGraphemeState, r)
+
+ if boundary {
+ return str[:length], str[length:], width, state | (prop << shiftGraphemePropState)
+ }
+
+ if r == vs16 {
+ width = 2
+ } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
+ } else if firstProp == prExtendedPictographic {
+ if r == vs15 {
+ width = 1
+ } else {
+ width = 2
+ }
+ }
+
+ length += l
+ if len(str) <= length {
+ return str, "", width, grAny | (prop << shiftGraphemePropState)
+ }
+ }
+}
diff --git a/vendor/github.com/rivo/uniseg/graphemeproperties.go b/vendor/github.com/rivo/uniseg/graphemeproperties.go
new file mode 100644
index 0000000..a87d140
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/graphemeproperties.go
@@ -0,0 +1,1891 @@
+package uniseg
+
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+// graphemeCodePoints are taken from
+// https://www.unicode.org/Public/14.0.0/ucd/auxiliary/GraphemeBreakProperty.txt
+// and
+// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var graphemeCodePoints = [][3]int{
+ {0x0000, 0x0009, prControl}, // Cc [10] ..
+ {0x000A, 0x000A, prLF}, // Cc
+ {0x000B, 0x000C, prControl}, // Cc [2] ..
+ {0x000D, 0x000D, prCR}, // Cc
+ {0x000E, 0x001F, prControl}, // Cc [18] ..
+ {0x007F, 0x009F, prControl}, // Cc [33] ..
+ {0x00A9, 0x00A9, prExtendedPictographic}, // E0.6 [1] (©️) copyright
+ {0x00AD, 0x00AD, prControl}, // Cf SOFT HYPHEN
+ {0x00AE, 0x00AE, prExtendedPictographic}, // E0.6 [1] (®️) registered
+ {0x0300, 0x036F, prExtend}, // Mn [112] COMBINING GRAVE ACCENT..COMBINING LATIN SMALL LETTER X
+ {0x0483, 0x0487, prExtend}, // Mn [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prExtend}, // Me [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x0591, 0x05BD, prExtend}, // Mn [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BF, 0x05BF, prExtend}, // Mn HEBREW POINT RAFE
+ {0x05C1, 0x05C2, prExtend}, // Mn [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C4, 0x05C5, prExtend}, // Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C7, 0x05C7, prExtend}, // Mn HEBREW POINT QAMATS QATAN
+ {0x0600, 0x0605, prPrepend}, // Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0610, 0x061A, prExtend}, // Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061C, 0x061C, prControl}, // Cf ARABIC LETTER MARK
+ {0x064B, 0x065F, prExtend}, // Mn [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0670, 0x0670, prExtend}, // Mn ARABIC LETTER SUPERSCRIPT ALEF
+ {0x06D6, 0x06DC, prExtend}, // Mn [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prPrepend}, // Cf ARABIC END OF AYAH
+ {0x06DF, 0x06E4, prExtend}, // Mn [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E7, 0x06E8, prExtend}, // Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06EA, 0x06ED, prExtend}, // Mn [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x070F, 0x070F, prPrepend}, // Cf SYRIAC ABBREVIATION MARK
+ {0x0711, 0x0711, prExtend}, // Mn SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0730, 0x074A, prExtend}, // Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x07A6, 0x07B0, prExtend}, // Mn [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07EB, 0x07F3, prExtend}, // Mn [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07FD, 0x07FD, prExtend}, // Mn NKO DANTAYALAN
+ {0x0816, 0x0819, prExtend}, // Mn [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081B, 0x0823, prExtend}, // Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0825, 0x0827, prExtend}, // Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0829, 0x082D, prExtend}, // Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0859, 0x085B, prExtend}, // Mn [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x0890, 0x0891, prPrepend}, // Cf [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prExtend}, // Mn [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08CA, 0x08E1, prExtend}, // Mn [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prPrepend}, // Cf ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x0902, prExtend}, // Mn [32] ARABIC TURNED DAMMA BELOW..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prSpacingMark}, // Mc DEVANAGARI SIGN VISARGA
+ {0x093A, 0x093A, prExtend}, // Mn DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prSpacingMark}, // Mc DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prExtend}, // Mn DEVANAGARI SIGN NUKTA
+ {0x093E, 0x0940, prSpacingMark}, // Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prExtend}, // Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prSpacingMark}, // Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prExtend}, // Mn DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prSpacingMark}, // Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0951, 0x0957, prExtend}, // Mn [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0962, 0x0963, prExtend}, // Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0981, 0x0981, prExtend}, // Mn BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prSpacingMark}, // Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x09BC, 0x09BC, prExtend}, // Mn BENGALI SIGN NUKTA
+ {0x09BE, 0x09BE, prExtend}, // Mc BENGALI VOWEL SIGN AA
+ {0x09BF, 0x09C0, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN I..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prExtend}, // Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prExtend}, // Mn BENGALI SIGN VIRAMA
+ {0x09D7, 0x09D7, prExtend}, // Mc BENGALI AU LENGTH MARK
+ {0x09E2, 0x09E3, prExtend}, // Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09FE, 0x09FE, prExtend}, // Mn BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prExtend}, // Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prSpacingMark}, // Mc GURMUKHI SIGN VISARGA
+ {0x0A3C, 0x0A3C, prExtend}, // Mn GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prSpacingMark}, // Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prExtend}, // Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prExtend}, // Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prExtend}, // Mn [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prExtend}, // Mn GURMUKHI SIGN UDAAT
+ {0x0A70, 0x0A71, prExtend}, // Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A75, 0x0A75, prExtend}, // Mn GURMUKHI SIGN YAKASH
+ {0x0A81, 0x0A82, prExtend}, // Mn [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prSpacingMark}, // Mc GUJARATI SIGN VISARGA
+ {0x0ABC, 0x0ABC, prExtend}, // Mn GUJARATI SIGN NUKTA
+ {0x0ABE, 0x0AC0, prSpacingMark}, // Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prExtend}, // Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prExtend}, // Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prSpacingMark}, // Mc GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prSpacingMark}, // Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prExtend}, // Mn GUJARATI SIGN VIRAMA
+ {0x0AE2, 0x0AE3, prExtend}, // Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AFA, 0x0AFF, prExtend}, // Mn [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prExtend}, // Mn ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prSpacingMark}, // Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B3C, 0x0B3C, prExtend}, // Mn ORIYA SIGN NUKTA
+ {0x0B3E, 0x0B3E, prExtend}, // Mc ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prExtend}, // Mn ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prSpacingMark}, // Mc ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prExtend}, // Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prSpacingMark}, // Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prSpacingMark}, // Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prExtend}, // Mn ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prExtend}, // Mn [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prExtend}, // Mc ORIYA AU LENGTH MARK
+ {0x0B62, 0x0B63, prExtend}, // Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B82, 0x0B82, prExtend}, // Mn TAMIL SIGN ANUSVARA
+ {0x0BBE, 0x0BBE, prExtend}, // Mc TAMIL VOWEL SIGN AA
+ {0x0BBF, 0x0BBF, prSpacingMark}, // Mc TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prExtend}, // Mn TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prSpacingMark}, // Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prSpacingMark}, // Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prSpacingMark}, // Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prExtend}, // Mn TAMIL SIGN VIRAMA
+ {0x0BD7, 0x0BD7, prExtend}, // Mc TAMIL AU LENGTH MARK
+ {0x0C00, 0x0C00, prExtend}, // Mn TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prSpacingMark}, // Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prExtend}, // Mn TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C3C, 0x0C3C, prExtend}, // Mn TELUGU SIGN NUKTA
+ {0x0C3E, 0x0C40, prExtend}, // Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prSpacingMark}, // Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prExtend}, // Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prExtend}, // Mn [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prExtend}, // Mn [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C62, 0x0C63, prExtend}, // Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C81, 0x0C81, prExtend}, // Mn KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prSpacingMark}, // Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0CBC, 0x0CBC, prExtend}, // Mn KANNADA SIGN NUKTA
+ {0x0CBE, 0x0CBE, prSpacingMark}, // Mc KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prExtend}, // Mn KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC1, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN U
+ {0x0CC2, 0x0CC2, prExtend}, // Mc KANNADA VOWEL SIGN UU
+ {0x0CC3, 0x0CC4, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN VOCALIC R..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prExtend}, // Mn KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prExtend}, // Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prExtend}, // Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prSpacingMark}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D3B, 0x0D3C, prExtend}, // Mn [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3E, 0x0D3E, prExtend}, // Mc MALAYALAM VOWEL SIGN AA
+ {0x0D3F, 0x0D40, prSpacingMark}, // Mc [2] MALAYALAM VOWEL SIGN I..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prExtend}, // Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prSpacingMark}, // Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prSpacingMark}, // Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prExtend}, // Mn MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prPrepend}, // Lo MALAYALAM LETTER DOT REPH
+ {0x0D57, 0x0D57, prExtend}, // Mc MALAYALAM AU LENGTH MARK
+ {0x0D62, 0x0D63, prExtend}, // Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D81, 0x0D81, prExtend}, // Mn SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prSpacingMark}, // Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0DCA, 0x0DCA, prExtend}, // Mn SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DCF, prExtend}, // Mc SINHALA VOWEL SIGN AELA-PILLA
+ {0x0DD0, 0x0DD1, prSpacingMark}, // Mc [2] SINHALA VOWEL SIGN KETTI AEDA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prExtend}, // Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prExtend}, // Mn SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDE, prSpacingMark}, // Mc [7] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN KOMBUVA HAA GAYANUKITTA
+ {0x0DDF, 0x0DDF, prExtend}, // Mc SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DF2, 0x0DF3, prSpacingMark}, // Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0E31, 0x0E31, prExtend}, // Mn THAI CHARACTER MAI HAN-AKAT
+ {0x0E33, 0x0E33, prSpacingMark}, // Lo THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prExtend}, // Mn [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E47, 0x0E4E, prExtend}, // Mn [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0EB1, 0x0EB1, prExtend}, // Mn LAO VOWEL SIGN MAI KAN
+ {0x0EB3, 0x0EB3, prSpacingMark}, // Lo LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prExtend}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EC8, 0x0ECD, prExtend}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA
+ {0x0F18, 0x0F19, prExtend}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F35, 0x0F35, prExtend}, // Mn TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F37, 0x0F37, prExtend}, // Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F39, 0x0F39, prExtend}, // Mn TIBETAN MARK TSA -PHRU
+ {0x0F3E, 0x0F3F, prSpacingMark}, // Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F71, 0x0F7E, prExtend}, // Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prSpacingMark}, // Mc TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prExtend}, // Mn [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F86, 0x0F87, prExtend}, // Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F8D, 0x0F97, prExtend}, // Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prExtend}, // Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FC6, 0x0FC6, prExtend}, // Mn TIBETAN SYMBOL PADMA GDAN
+ {0x102D, 0x1030, prExtend}, // Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prSpacingMark}, // Mc MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prExtend}, // Mn [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1039, 0x103A, prExtend}, // Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prSpacingMark}, // Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prExtend}, // Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x1056, 0x1057, prSpacingMark}, // Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prExtend}, // Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105E, 0x1060, prExtend}, // Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1071, 0x1074, prExtend}, // Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1082, 0x1082, prExtend}, // Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1084, 0x1084, prSpacingMark}, // Mc MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prExtend}, // Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x108D, 0x108D, prExtend}, // Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x109D, 0x109D, prExtend}, // Mn MYANMAR VOWEL SIGN AITON AI
+ {0x1100, 0x115F, prL}, // Lo [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11A7, prV}, // Lo [72] HANGUL JUNGSEONG FILLER..HANGUL JUNGSEONG O-YAE
+ {0x11A8, 0x11FF, prT}, // Lo [88] HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG SSANGNIEUN
+ {0x135D, 0x135F, prExtend}, // Mn [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1712, 0x1714, prExtend}, // Mn [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prSpacingMark}, // Mc TAGALOG SIGN PAMUDPOD
+ {0x1732, 0x1733, prExtend}, // Mn [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prSpacingMark}, // Mc HANUNOO SIGN PAMUDPOD
+ {0x1752, 0x1753, prExtend}, // Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1772, 0x1773, prExtend}, // Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x17B4, 0x17B5, prExtend}, // Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prSpacingMark}, // Mc KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prExtend}, // Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prSpacingMark}, // Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prExtend}, // Mn KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prSpacingMark}, // Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prExtend}, // Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17DD, 0x17DD, prExtend}, // Mn KHMER SIGN ATTHACAN
+ {0x180B, 0x180D, prExtend}, // Mn [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prControl}, // Cf MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prExtend}, // Mn MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1885, 0x1886, prExtend}, // Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x18A9, 0x18A9, prExtend}, // Mn MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x1920, 0x1922, prExtend}, // Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prSpacingMark}, // Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prExtend}, // Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prSpacingMark}, // Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prSpacingMark}, // Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prExtend}, // Mn LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prSpacingMark}, // Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prExtend}, // Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1A17, 0x1A18, prExtend}, // Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prSpacingMark}, // Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prExtend}, // Mn BUGINESE VOWEL SIGN AE
+ {0x1A55, 0x1A55, prSpacingMark}, // Mc TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prExtend}, // Mn TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prSpacingMark}, // Mc TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prExtend}, // Mn [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prExtend}, // Mn TAI THAM SIGN SAKOT
+ {0x1A62, 0x1A62, prExtend}, // Mn TAI THAM VOWEL SIGN MAI SAT
+ {0x1A65, 0x1A6C, prExtend}, // Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prSpacingMark}, // Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prExtend}, // Mn [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prExtend}, // Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1AB0, 0x1ABD, prExtend}, // Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prExtend}, // Me COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prExtend}, // Mn [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prExtend}, // Mn [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prSpacingMark}, // Mc BALINESE SIGN BISAH
+ {0x1B34, 0x1B34, prExtend}, // Mn BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prExtend}, // Mc BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prExtend}, // Mn [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prSpacingMark}, // Mc BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prExtend}, // Mn BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prSpacingMark}, // Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prExtend}, // Mn BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prSpacingMark}, // Mc [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B6B, 0x1B73, prExtend}, // Mn [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B80, 0x1B81, prExtend}, // Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prSpacingMark}, // Mc SUNDANESE SIGN PANGWISAD
+ {0x1BA1, 0x1BA1, prSpacingMark}, // Mc SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prExtend}, // Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prSpacingMark}, // Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prExtend}, // Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prSpacingMark}, // Mc SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prExtend}, // Mn [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BE6, 0x1BE6, prExtend}, // Mn BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prSpacingMark}, // Mc BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prExtend}, // Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prSpacingMark}, // Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prExtend}, // Mn BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prSpacingMark}, // Mc BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prExtend}, // Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prSpacingMark}, // Mc [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1C24, 0x1C2B, prSpacingMark}, // Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prExtend}, // Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prSpacingMark}, // Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prExtend}, // Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1CD0, 0x1CD2, prExtend}, // Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD4, 0x1CE0, prExtend}, // Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prSpacingMark}, // Mc VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prExtend}, // Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CED, 0x1CED, prExtend}, // Mn VEDIC SIGN TIRYAK
+ {0x1CF4, 0x1CF4, prExtend}, // Mn VEDIC TONE CANDRA ABOVE
+ {0x1CF7, 0x1CF7, prSpacingMark}, // Mc VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prExtend}, // Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1DC0, 0x1DFF, prExtend}, // Mn [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x200B, 0x200B, prControl}, // Cf ZERO WIDTH SPACE
+ {0x200C, 0x200C, prExtend}, // Cf ZERO WIDTH NON-JOINER
+ {0x200D, 0x200D, prZWJ}, // Cf ZERO WIDTH JOINER
+ {0x200E, 0x200F, prControl}, // Cf [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK
+ {0x2028, 0x2028, prControl}, // Zl LINE SEPARATOR
+ {0x2029, 0x2029, prControl}, // Zp PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prControl}, // Cf [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x203C, 0x203C, prExtendedPictographic}, // E0.6 [1] (‼️) double exclamation mark
+ {0x2049, 0x2049, prExtendedPictographic}, // E0.6 [1] (⁉️) exclamation question mark
+ {0x2060, 0x2064, prControl}, // Cf [5] WORD JOINER..INVISIBLE PLUS
+ {0x2065, 0x2065, prControl}, // Cn
+ {0x2066, 0x206F, prControl}, // Cf [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x20D0, 0x20DC, prExtend}, // Mn [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prExtend}, // Me [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prExtend}, // Mn COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prExtend}, // Me [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prExtend}, // Mn [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2122, 0x2122, prExtendedPictographic}, // E0.6 [1] (™️) trade mark
+ {0x2139, 0x2139, prExtendedPictographic}, // E0.6 [1] (ℹ️) information
+ {0x2194, 0x2199, prExtendedPictographic}, // E0.6 [6] (↔️..↙️) left-right arrow..down-left arrow
+ {0x21A9, 0x21AA, prExtendedPictographic}, // E0.6 [2] (↩️..↪️) right arrow curving left..left arrow curving right
+ {0x231A, 0x231B, prExtendedPictographic}, // E0.6 [2] (⌚..⌛) watch..hourglass done
+ {0x2328, 0x2328, prExtendedPictographic}, // E1.0 [1] (⌨️) keyboard
+ {0x2388, 0x2388, prExtendedPictographic}, // E0.0 [1] (⎈) HELM SYMBOL
+ {0x23CF, 0x23CF, prExtendedPictographic}, // E1.0 [1] (⏏️) eject button
+ {0x23E9, 0x23EC, prExtendedPictographic}, // E0.6 [4] (⏩..⏬) fast-forward button..fast down button
+ {0x23ED, 0x23EE, prExtendedPictographic}, // E0.7 [2] (⏭️..⏮️) next track button..last track button
+ {0x23EF, 0x23EF, prExtendedPictographic}, // E1.0 [1] (⏯️) play or pause button
+ {0x23F0, 0x23F0, prExtendedPictographic}, // E0.6 [1] (⏰) alarm clock
+ {0x23F1, 0x23F2, prExtendedPictographic}, // E1.0 [2] (⏱️..⏲️) stopwatch..timer clock
+ {0x23F3, 0x23F3, prExtendedPictographic}, // E0.6 [1] (⏳) hourglass not done
+ {0x23F8, 0x23FA, prExtendedPictographic}, // E0.7 [3] (⏸️..⏺️) pause button..record button
+ {0x24C2, 0x24C2, prExtendedPictographic}, // E0.6 [1] (Ⓜ️) circled M
+ {0x25AA, 0x25AB, prExtendedPictographic}, // E0.6 [2] (▪️..▫️) black small square..white small square
+ {0x25B6, 0x25B6, prExtendedPictographic}, // E0.6 [1] (▶️) play button
+ {0x25C0, 0x25C0, prExtendedPictographic}, // E0.6 [1] (◀️) reverse button
+ {0x25FB, 0x25FE, prExtendedPictographic}, // E0.6 [4] (◻️..◾) white medium square..black medium-small square
+ {0x2600, 0x2601, prExtendedPictographic}, // E0.6 [2] (☀️..☁️) sun..cloud
+ {0x2602, 0x2603, prExtendedPictographic}, // E0.7 [2] (☂️..☃️) umbrella..snowman
+ {0x2604, 0x2604, prExtendedPictographic}, // E1.0 [1] (☄️) comet
+ {0x2605, 0x2605, prExtendedPictographic}, // E0.0 [1] (★) BLACK STAR
+ {0x2607, 0x260D, prExtendedPictographic}, // E0.0 [7] (☇..☍) LIGHTNING..OPPOSITION
+ {0x260E, 0x260E, prExtendedPictographic}, // E0.6 [1] (☎️) telephone
+ {0x260F, 0x2610, prExtendedPictographic}, // E0.0 [2] (☏..☐) WHITE TELEPHONE..BALLOT BOX
+ {0x2611, 0x2611, prExtendedPictographic}, // E0.6 [1] (☑️) check box with check
+ {0x2612, 0x2612, prExtendedPictographic}, // E0.0 [1] (☒) BALLOT BOX WITH X
+ {0x2614, 0x2615, prExtendedPictographic}, // E0.6 [2] (☔..☕) umbrella with rain drops..hot beverage
+ {0x2616, 0x2617, prExtendedPictographic}, // E0.0 [2] (☖..☗) WHITE SHOGI PIECE..BLACK SHOGI PIECE
+ {0x2618, 0x2618, prExtendedPictographic}, // E1.0 [1] (☘️) shamrock
+ {0x2619, 0x261C, prExtendedPictographic}, // E0.0 [4] (☙..☜) REVERSED ROTATED FLORAL HEART BULLET..WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prExtendedPictographic}, // E0.6 [1] (☝️) index pointing up
+ {0x261E, 0x261F, prExtendedPictographic}, // E0.0 [2] (☞..☟) WHITE RIGHT POINTING INDEX..WHITE DOWN POINTING INDEX
+ {0x2620, 0x2620, prExtendedPictographic}, // E1.0 [1] (☠️) skull and crossbones
+ {0x2621, 0x2621, prExtendedPictographic}, // E0.0 [1] (☡) CAUTION SIGN
+ {0x2622, 0x2623, prExtendedPictographic}, // E1.0 [2] (☢️..☣️) radioactive..biohazard
+ {0x2624, 0x2625, prExtendedPictographic}, // E0.0 [2] (☤..☥) CADUCEUS..ANKH
+ {0x2626, 0x2626, prExtendedPictographic}, // E1.0 [1] (☦️) orthodox cross
+ {0x2627, 0x2629, prExtendedPictographic}, // E0.0 [3] (☧..☩) CHI RHO..CROSS OF JERUSALEM
+ {0x262A, 0x262A, prExtendedPictographic}, // E0.7 [1] (☪️) star and crescent
+ {0x262B, 0x262D, prExtendedPictographic}, // E0.0 [3] (☫..☭) FARSI SYMBOL..HAMMER AND SICKLE
+ {0x262E, 0x262E, prExtendedPictographic}, // E1.0 [1] (☮️) peace symbol
+ {0x262F, 0x262F, prExtendedPictographic}, // E0.7 [1] (☯️) yin yang
+ {0x2630, 0x2637, prExtendedPictographic}, // E0.0 [8] (☰..☷) TRIGRAM FOR HEAVEN..TRIGRAM FOR EARTH
+ {0x2638, 0x2639, prExtendedPictographic}, // E0.7 [2] (☸️..☹️) wheel of dharma..frowning face
+ {0x263A, 0x263A, prExtendedPictographic}, // E0.6 [1] (☺️) smiling face
+ {0x263B, 0x263F, prExtendedPictographic}, // E0.0 [5] (☻..☿) BLACK SMILING FACE..MERCURY
+ {0x2640, 0x2640, prExtendedPictographic}, // E4.0 [1] (♀️) female sign
+ {0x2641, 0x2641, prExtendedPictographic}, // E0.0 [1] (♁) EARTH
+ {0x2642, 0x2642, prExtendedPictographic}, // E4.0 [1] (♂️) male sign
+ {0x2643, 0x2647, prExtendedPictographic}, // E0.0 [5] (♃..♇) JUPITER..PLUTO
+ {0x2648, 0x2653, prExtendedPictographic}, // E0.6 [12] (♈..♓) Aries..Pisces
+ {0x2654, 0x265E, prExtendedPictographic}, // E0.0 [11] (♔..♞) WHITE CHESS KING..BLACK CHESS KNIGHT
+ {0x265F, 0x265F, prExtendedPictographic}, // E11.0 [1] (♟️) chess pawn
+ {0x2660, 0x2660, prExtendedPictographic}, // E0.6 [1] (♠️) spade suit
+ {0x2661, 0x2662, prExtendedPictographic}, // E0.0 [2] (♡..♢) WHITE HEART SUIT..WHITE DIAMOND SUIT
+ {0x2663, 0x2663, prExtendedPictographic}, // E0.6 [1] (♣️) club suit
+ {0x2664, 0x2664, prExtendedPictographic}, // E0.0 [1] (♤) WHITE SPADE SUIT
+ {0x2665, 0x2666, prExtendedPictographic}, // E0.6 [2] (♥️..♦️) heart suit..diamond suit
+ {0x2667, 0x2667, prExtendedPictographic}, // E0.0 [1] (♧) WHITE CLUB SUIT
+ {0x2668, 0x2668, prExtendedPictographic}, // E0.6 [1] (♨️) hot springs
+ {0x2669, 0x267A, prExtendedPictographic}, // E0.0 [18] (♩..♺) QUARTER NOTE..RECYCLING SYMBOL FOR GENERIC MATERIALS
+ {0x267B, 0x267B, prExtendedPictographic}, // E0.6 [1] (♻️) recycling symbol
+ {0x267C, 0x267D, prExtendedPictographic}, // E0.0 [2] (♼..♽) RECYCLED PAPER SYMBOL..PARTIALLY-RECYCLED PAPER SYMBOL
+ {0x267E, 0x267E, prExtendedPictographic}, // E11.0 [1] (♾️) infinity
+ {0x267F, 0x267F, prExtendedPictographic}, // E0.6 [1] (♿) wheelchair symbol
+ {0x2680, 0x2685, prExtendedPictographic}, // E0.0 [6] (⚀..⚅) DIE FACE-1..DIE FACE-6
+ {0x2690, 0x2691, prExtendedPictographic}, // E0.0 [2] (⚐..⚑) WHITE FLAG..BLACK FLAG
+ {0x2692, 0x2692, prExtendedPictographic}, // E1.0 [1] (⚒️) hammer and pick
+ {0x2693, 0x2693, prExtendedPictographic}, // E0.6 [1] (⚓) anchor
+ {0x2694, 0x2694, prExtendedPictographic}, // E1.0 [1] (⚔️) crossed swords
+ {0x2695, 0x2695, prExtendedPictographic}, // E4.0 [1] (⚕️) medical symbol
+ {0x2696, 0x2697, prExtendedPictographic}, // E1.0 [2] (⚖️..⚗️) balance scale..alembic
+ {0x2698, 0x2698, prExtendedPictographic}, // E0.0 [1] (⚘) FLOWER
+ {0x2699, 0x2699, prExtendedPictographic}, // E1.0 [1] (⚙️) gear
+ {0x269A, 0x269A, prExtendedPictographic}, // E0.0 [1] (⚚) STAFF OF HERMES
+ {0x269B, 0x269C, prExtendedPictographic}, // E1.0 [2] (⚛️..⚜️) atom symbol..fleur-de-lis
+ {0x269D, 0x269F, prExtendedPictographic}, // E0.0 [3] (⚝..⚟) OUTLINED WHITE STAR..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26A1, prExtendedPictographic}, // E0.6 [2] (⚠️..⚡) warning..high voltage
+ {0x26A2, 0x26A6, prExtendedPictographic}, // E0.0 [5] (⚢..⚦) DOUBLED FEMALE SIGN..MALE WITH STROKE SIGN
+ {0x26A7, 0x26A7, prExtendedPictographic}, // E13.0 [1] (⚧️) transgender symbol
+ {0x26A8, 0x26A9, prExtendedPictographic}, // E0.0 [2] (⚨..⚩) VERTICAL MALE WITH STROKE SIGN..HORIZONTAL MALE WITH STROKE SIGN
+ {0x26AA, 0x26AB, prExtendedPictographic}, // E0.6 [2] (⚪..⚫) white circle..black circle
+ {0x26AC, 0x26AF, prExtendedPictographic}, // E0.0 [4] (⚬..⚯) MEDIUM SMALL WHITE CIRCLE..UNMARRIED PARTNERSHIP SYMBOL
+ {0x26B0, 0x26B1, prExtendedPictographic}, // E1.0 [2] (⚰️..⚱️) coffin..funeral urn
+ {0x26B2, 0x26BC, prExtendedPictographic}, // E0.0 [11] (⚲..⚼) NEUTER..SESQUIQUADRATE
+ {0x26BD, 0x26BE, prExtendedPictographic}, // E0.6 [2] (⚽..⚾) soccer ball..baseball
+ {0x26BF, 0x26C3, prExtendedPictographic}, // E0.0 [5] (⚿..⛃) SQUARED KEY..BLACK DRAUGHTS KING
+ {0x26C4, 0x26C5, prExtendedPictographic}, // E0.6 [2] (⛄..⛅) snowman without snow..sun behind cloud
+ {0x26C6, 0x26C7, prExtendedPictographic}, // E0.0 [2] (⛆..⛇) RAIN..BLACK SNOWMAN
+ {0x26C8, 0x26C8, prExtendedPictographic}, // E0.7 [1] (⛈️) cloud with lightning and rain
+ {0x26C9, 0x26CD, prExtendedPictographic}, // E0.0 [5] (⛉..⛍) TURNED WHITE SHOGI PIECE..DISABLED CAR
+ {0x26CE, 0x26CE, prExtendedPictographic}, // E0.6 [1] (⛎) Ophiuchus
+ {0x26CF, 0x26CF, prExtendedPictographic}, // E0.7 [1] (⛏️) pick
+ {0x26D0, 0x26D0, prExtendedPictographic}, // E0.0 [1] (⛐) CAR SLIDING
+ {0x26D1, 0x26D1, prExtendedPictographic}, // E0.7 [1] (⛑️) rescue worker’s helmet
+ {0x26D2, 0x26D2, prExtendedPictographic}, // E0.0 [1] (⛒) CIRCLED CROSSING LANES
+ {0x26D3, 0x26D3, prExtendedPictographic}, // E0.7 [1] (⛓️) chains
+ {0x26D4, 0x26D4, prExtendedPictographic}, // E0.6 [1] (⛔) no entry
+ {0x26D5, 0x26E8, prExtendedPictographic}, // E0.0 [20] (⛕..⛨) ALTERNATE ONE-WAY LEFT WAY TRAFFIC..BLACK CROSS ON SHIELD
+ {0x26E9, 0x26E9, prExtendedPictographic}, // E0.7 [1] (⛩️) shinto shrine
+ {0x26EA, 0x26EA, prExtendedPictographic}, // E0.6 [1] (⛪) church
+ {0x26EB, 0x26EF, prExtendedPictographic}, // E0.0 [5] (⛫..⛯) CASTLE..MAP SYMBOL FOR LIGHTHOUSE
+ {0x26F0, 0x26F1, prExtendedPictographic}, // E0.7 [2] (⛰️..⛱️) mountain..umbrella on ground
+ {0x26F2, 0x26F3, prExtendedPictographic}, // E0.6 [2] (⛲..⛳) fountain..flag in hole
+ {0x26F4, 0x26F4, prExtendedPictographic}, // E0.7 [1] (⛴️) ferry
+ {0x26F5, 0x26F5, prExtendedPictographic}, // E0.6 [1] (⛵) sailboat
+ {0x26F6, 0x26F6, prExtendedPictographic}, // E0.0 [1] (⛶) SQUARE FOUR CORNERS
+ {0x26F7, 0x26F9, prExtendedPictographic}, // E0.7 [3] (⛷️..⛹️) skier..person bouncing ball
+ {0x26FA, 0x26FA, prExtendedPictographic}, // E0.6 [1] (⛺) tent
+ {0x26FB, 0x26FC, prExtendedPictographic}, // E0.0 [2] (⛻..⛼) JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FD, prExtendedPictographic}, // E0.6 [1] (⛽) fuel pump
+ {0x26FE, 0x2701, prExtendedPictographic}, // E0.0 [4] (⛾..✁) CUP ON BLACK SQUARE..UPPER BLADE SCISSORS
+ {0x2702, 0x2702, prExtendedPictographic}, // E0.6 [1] (✂️) scissors
+ {0x2703, 0x2704, prExtendedPictographic}, // E0.0 [2] (✃..✄) LOWER BLADE SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2705, prExtendedPictographic}, // E0.6 [1] (✅) check mark button
+ {0x2708, 0x270C, prExtendedPictographic}, // E0.6 [5] (✈️..✌️) airplane..victory hand
+ {0x270D, 0x270D, prExtendedPictographic}, // E0.7 [1] (✍️) writing hand
+ {0x270E, 0x270E, prExtendedPictographic}, // E0.0 [1] (✎) LOWER RIGHT PENCIL
+ {0x270F, 0x270F, prExtendedPictographic}, // E0.6 [1] (✏️) pencil
+ {0x2710, 0x2711, prExtendedPictographic}, // E0.0 [2] (✐..✑) UPPER RIGHT PENCIL..WHITE NIB
+ {0x2712, 0x2712, prExtendedPictographic}, // E0.6 [1] (✒️) black nib
+ {0x2714, 0x2714, prExtendedPictographic}, // E0.6 [1] (✔️) check mark
+ {0x2716, 0x2716, prExtendedPictographic}, // E0.6 [1] (✖️) multiply
+ {0x271D, 0x271D, prExtendedPictographic}, // E0.7 [1] (✝️) latin cross
+ {0x2721, 0x2721, prExtendedPictographic}, // E0.7 [1] (✡️) star of David
+ {0x2728, 0x2728, prExtendedPictographic}, // E0.6 [1] (✨) sparkles
+ {0x2733, 0x2734, prExtendedPictographic}, // E0.6 [2] (✳️..✴️) eight-spoked asterisk..eight-pointed star
+ {0x2744, 0x2744, prExtendedPictographic}, // E0.6 [1] (❄️) snowflake
+ {0x2747, 0x2747, prExtendedPictographic}, // E0.6 [1] (❇️) sparkle
+ {0x274C, 0x274C, prExtendedPictographic}, // E0.6 [1] (❌) cross mark
+ {0x274E, 0x274E, prExtendedPictographic}, // E0.6 [1] (❎) cross mark button
+ {0x2753, 0x2755, prExtendedPictographic}, // E0.6 [3] (❓..❕) red question mark..white exclamation mark
+ {0x2757, 0x2757, prExtendedPictographic}, // E0.6 [1] (❗) red exclamation mark
+ {0x2763, 0x2763, prExtendedPictographic}, // E1.0 [1] (❣️) heart exclamation
+ {0x2764, 0x2764, prExtendedPictographic}, // E0.6 [1] (❤️) red heart
+ {0x2765, 0x2767, prExtendedPictographic}, // E0.0 [3] (❥..❧) ROTATED HEAVY BLACK HEART BULLET..ROTATED FLORAL HEART BULLET
+ {0x2795, 0x2797, prExtendedPictographic}, // E0.6 [3] (➕..➗) plus..divide
+ {0x27A1, 0x27A1, prExtendedPictographic}, // E0.6 [1] (➡️) right arrow
+ {0x27B0, 0x27B0, prExtendedPictographic}, // E0.6 [1] (➰) curly loop
+ {0x27BF, 0x27BF, prExtendedPictographic}, // E1.0 [1] (➿) double curly loop
+ {0x2934, 0x2935, prExtendedPictographic}, // E0.6 [2] (⤴️..⤵️) right arrow curving up..right arrow curving down
+ {0x2B05, 0x2B07, prExtendedPictographic}, // E0.6 [3] (⬅️..⬇️) left arrow..down arrow
+ {0x2B1B, 0x2B1C, prExtendedPictographic}, // E0.6 [2] (⬛..⬜) black large square..white large square
+ {0x2B50, 0x2B50, prExtendedPictographic}, // E0.6 [1] (⭐) star
+ {0x2B55, 0x2B55, prExtendedPictographic}, // E0.6 [1] (⭕) hollow red circle
+ {0x2CEF, 0x2CF1, prExtend}, // Mn [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2D7F, 0x2D7F, prExtend}, // Mn TIFINAGH CONSONANT JOINER
+ {0x2DE0, 0x2DFF, prExtend}, // Mn [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x302A, 0x302D, prExtend}, // Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prExtend}, // Mc [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prExtendedPictographic}, // E0.6 [1] (〰️) wavy dash
+ {0x303D, 0x303D, prExtendedPictographic}, // E0.6 [1] (〽️) part alternation mark
+ {0x3099, 0x309A, prExtend}, // Mn [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x3297, 0x3297, prExtendedPictographic}, // E0.6 [1] (㊗️) Japanese “congratulations” button
+ {0x3299, 0x3299, prExtendedPictographic}, // E0.6 [1] (㊙️) Japanese “secret” button
+ {0xA66F, 0xA66F, prExtend}, // Mn COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prExtend}, // Me [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA674, 0xA67D, prExtend}, // Mn [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA69E, 0xA69F, prExtend}, // Mn [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6F0, 0xA6F1, prExtend}, // Mn [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA802, 0xA802, prExtend}, // Mn SYLOTI NAGRI SIGN DVISVARA
+ {0xA806, 0xA806, prExtend}, // Mn SYLOTI NAGRI SIGN HASANTA
+ {0xA80B, 0xA80B, prExtend}, // Mn SYLOTI NAGRI SIGN ANUSVARA
+ {0xA823, 0xA824, prSpacingMark}, // Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prExtend}, // Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prSpacingMark}, // Mc SYLOTI NAGRI VOWEL SIGN OO
+ {0xA82C, 0xA82C, prExtend}, // Mn SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA880, 0xA881, prSpacingMark}, // Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA8B4, 0xA8C3, prSpacingMark}, // Mc [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prExtend}, // Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8E0, 0xA8F1, prExtend}, // Mn [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8FF, 0xA8FF, prExtend}, // Mn DEVANAGARI VOWEL SIGN AY
+ {0xA926, 0xA92D, prExtend}, // Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA947, 0xA951, prExtend}, // Mn [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prSpacingMark}, // Mc [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA960, 0xA97C, prL}, // Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prExtend}, // Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prSpacingMark}, // Mc JAVANESE SIGN WIGNYAN
+ {0xA9B3, 0xA9B3, prExtend}, // Mn JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prSpacingMark}, // Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prExtend}, // Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prSpacingMark}, // Mc [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prExtend}, // Mn [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prSpacingMark}, // Mc [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9E5, 0xA9E5, prExtend}, // Mn MYANMAR SIGN SHAN SAW
+ {0xAA29, 0xAA2E, prExtend}, // Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prSpacingMark}, // Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prExtend}, // Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prSpacingMark}, // Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prExtend}, // Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA43, 0xAA43, prExtend}, // Mn CHAM CONSONANT SIGN FINAL NG
+ {0xAA4C, 0xAA4C, prExtend}, // Mn CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prSpacingMark}, // Mc CHAM CONSONANT SIGN FINAL H
+ {0xAA7C, 0xAA7C, prExtend}, // Mn MYANMAR SIGN TAI LAING TONE-2
+ {0xAAB0, 0xAAB0, prExtend}, // Mn TAI VIET MAI KANG
+ {0xAAB2, 0xAAB4, prExtend}, // Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB7, 0xAAB8, prExtend}, // Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAABE, 0xAABF, prExtend}, // Mn [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC1, 0xAAC1, prExtend}, // Mn TAI VIET TONE MAI THO
+ {0xAAEB, 0xAAEB, prSpacingMark}, // Mc MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prExtend}, // Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF5, 0xAAF5, prSpacingMark}, // Mc MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prExtend}, // Mn MEETEI MAYEK VIRAMA
+ {0xABE3, 0xABE4, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prExtend}, // Mn MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prExtend}, // Mn MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEC, 0xABEC, prSpacingMark}, // Mc MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prExtend}, // Mn MEETEI MAYEK APUN IYEK
+ {0xAC00, 0xAC00, prLV}, // Lo HANGUL SYLLABLE GA
+ {0xAC01, 0xAC1B, prLVT}, // Lo [27] HANGUL SYLLABLE GAG..HANGUL SYLLABLE GAH
+ {0xAC1C, 0xAC1C, prLV}, // Lo HANGUL SYLLABLE GAE
+ {0xAC1D, 0xAC37, prLVT}, // Lo [27] HANGUL SYLLABLE GAEG..HANGUL SYLLABLE GAEH
+ {0xAC38, 0xAC38, prLV}, // Lo HANGUL SYLLABLE GYA
+ {0xAC39, 0xAC53, prLVT}, // Lo [27] HANGUL SYLLABLE GYAG..HANGUL SYLLABLE GYAH
+ {0xAC54, 0xAC54, prLV}, // Lo HANGUL SYLLABLE GYAE
+ {0xAC55, 0xAC6F, prLVT}, // Lo [27] HANGUL SYLLABLE GYAEG..HANGUL SYLLABLE GYAEH
+ {0xAC70, 0xAC70, prLV}, // Lo HANGUL SYLLABLE GEO
+ {0xAC71, 0xAC8B, prLVT}, // Lo [27] HANGUL SYLLABLE GEOG..HANGUL SYLLABLE GEOH
+ {0xAC8C, 0xAC8C, prLV}, // Lo HANGUL SYLLABLE GE
+ {0xAC8D, 0xACA7, prLVT}, // Lo [27] HANGUL SYLLABLE GEG..HANGUL SYLLABLE GEH
+ {0xACA8, 0xACA8, prLV}, // Lo HANGUL SYLLABLE GYEO
+ {0xACA9, 0xACC3, prLVT}, // Lo [27] HANGUL SYLLABLE GYEOG..HANGUL SYLLABLE GYEOH
+ {0xACC4, 0xACC4, prLV}, // Lo HANGUL SYLLABLE GYE
+ {0xACC5, 0xACDF, prLVT}, // Lo [27] HANGUL SYLLABLE GYEG..HANGUL SYLLABLE GYEH
+ {0xACE0, 0xACE0, prLV}, // Lo HANGUL SYLLABLE GO
+ {0xACE1, 0xACFB, prLVT}, // Lo [27] HANGUL SYLLABLE GOG..HANGUL SYLLABLE GOH
+ {0xACFC, 0xACFC, prLV}, // Lo HANGUL SYLLABLE GWA
+ {0xACFD, 0xAD17, prLVT}, // Lo [27] HANGUL SYLLABLE GWAG..HANGUL SYLLABLE GWAH
+ {0xAD18, 0xAD18, prLV}, // Lo HANGUL SYLLABLE GWAE
+ {0xAD19, 0xAD33, prLVT}, // Lo [27] HANGUL SYLLABLE GWAEG..HANGUL SYLLABLE GWAEH
+ {0xAD34, 0xAD34, prLV}, // Lo HANGUL SYLLABLE GOE
+ {0xAD35, 0xAD4F, prLVT}, // Lo [27] HANGUL SYLLABLE GOEG..HANGUL SYLLABLE GOEH
+ {0xAD50, 0xAD50, prLV}, // Lo HANGUL SYLLABLE GYO
+ {0xAD51, 0xAD6B, prLVT}, // Lo [27] HANGUL SYLLABLE GYOG..HANGUL SYLLABLE GYOH
+ {0xAD6C, 0xAD6C, prLV}, // Lo HANGUL SYLLABLE GU
+ {0xAD6D, 0xAD87, prLVT}, // Lo [27] HANGUL SYLLABLE GUG..HANGUL SYLLABLE GUH
+ {0xAD88, 0xAD88, prLV}, // Lo HANGUL SYLLABLE GWEO
+ {0xAD89, 0xADA3, prLVT}, // Lo [27] HANGUL SYLLABLE GWEOG..HANGUL SYLLABLE GWEOH
+ {0xADA4, 0xADA4, prLV}, // Lo HANGUL SYLLABLE GWE
+ {0xADA5, 0xADBF, prLVT}, // Lo [27] HANGUL SYLLABLE GWEG..HANGUL SYLLABLE GWEH
+ {0xADC0, 0xADC0, prLV}, // Lo HANGUL SYLLABLE GWI
+ {0xADC1, 0xADDB, prLVT}, // Lo [27] HANGUL SYLLABLE GWIG..HANGUL SYLLABLE GWIH
+ {0xADDC, 0xADDC, prLV}, // Lo HANGUL SYLLABLE GYU
+ {0xADDD, 0xADF7, prLVT}, // Lo [27] HANGUL SYLLABLE GYUG..HANGUL SYLLABLE GYUH
+ {0xADF8, 0xADF8, prLV}, // Lo HANGUL SYLLABLE GEU
+ {0xADF9, 0xAE13, prLVT}, // Lo [27] HANGUL SYLLABLE GEUG..HANGUL SYLLABLE GEUH
+ {0xAE14, 0xAE14, prLV}, // Lo HANGUL SYLLABLE GYI
+ {0xAE15, 0xAE2F, prLVT}, // Lo [27] HANGUL SYLLABLE GYIG..HANGUL SYLLABLE GYIH
+ {0xAE30, 0xAE30, prLV}, // Lo HANGUL SYLLABLE GI
+ {0xAE31, 0xAE4B, prLVT}, // Lo [27] HANGUL SYLLABLE GIG..HANGUL SYLLABLE GIH
+ {0xAE4C, 0xAE4C, prLV}, // Lo HANGUL SYLLABLE GGA
+ {0xAE4D, 0xAE67, prLVT}, // Lo [27] HANGUL SYLLABLE GGAG..HANGUL SYLLABLE GGAH
+ {0xAE68, 0xAE68, prLV}, // Lo HANGUL SYLLABLE GGAE
+ {0xAE69, 0xAE83, prLVT}, // Lo [27] HANGUL SYLLABLE GGAEG..HANGUL SYLLABLE GGAEH
+ {0xAE84, 0xAE84, prLV}, // Lo HANGUL SYLLABLE GGYA
+ {0xAE85, 0xAE9F, prLVT}, // Lo [27] HANGUL SYLLABLE GGYAG..HANGUL SYLLABLE GGYAH
+ {0xAEA0, 0xAEA0, prLV}, // Lo HANGUL SYLLABLE GGYAE
+ {0xAEA1, 0xAEBB, prLVT}, // Lo [27] HANGUL SYLLABLE GGYAEG..HANGUL SYLLABLE GGYAEH
+ {0xAEBC, 0xAEBC, prLV}, // Lo HANGUL SYLLABLE GGEO
+ {0xAEBD, 0xAED7, prLVT}, // Lo [27] HANGUL SYLLABLE GGEOG..HANGUL SYLLABLE GGEOH
+ {0xAED8, 0xAED8, prLV}, // Lo HANGUL SYLLABLE GGE
+ {0xAED9, 0xAEF3, prLVT}, // Lo [27] HANGUL SYLLABLE GGEG..HANGUL SYLLABLE GGEH
+ {0xAEF4, 0xAEF4, prLV}, // Lo HANGUL SYLLABLE GGYEO
+ {0xAEF5, 0xAF0F, prLVT}, // Lo [27] HANGUL SYLLABLE GGYEOG..HANGUL SYLLABLE GGYEOH
+ {0xAF10, 0xAF10, prLV}, // Lo HANGUL SYLLABLE GGYE
+ {0xAF11, 0xAF2B, prLVT}, // Lo [27] HANGUL SYLLABLE GGYEG..HANGUL SYLLABLE GGYEH
+ {0xAF2C, 0xAF2C, prLV}, // Lo HANGUL SYLLABLE GGO
+ {0xAF2D, 0xAF47, prLVT}, // Lo [27] HANGUL SYLLABLE GGOG..HANGUL SYLLABLE GGOH
+ {0xAF48, 0xAF48, prLV}, // Lo HANGUL SYLLABLE GGWA
+ {0xAF49, 0xAF63, prLVT}, // Lo [27] HANGUL SYLLABLE GGWAG..HANGUL SYLLABLE GGWAH
+ {0xAF64, 0xAF64, prLV}, // Lo HANGUL SYLLABLE GGWAE
+ {0xAF65, 0xAF7F, prLVT}, // Lo [27] HANGUL SYLLABLE GGWAEG..HANGUL SYLLABLE GGWAEH
+ {0xAF80, 0xAF80, prLV}, // Lo HANGUL SYLLABLE GGOE
+ {0xAF81, 0xAF9B, prLVT}, // Lo [27] HANGUL SYLLABLE GGOEG..HANGUL SYLLABLE GGOEH
+ {0xAF9C, 0xAF9C, prLV}, // Lo HANGUL SYLLABLE GGYO
+ {0xAF9D, 0xAFB7, prLVT}, // Lo [27] HANGUL SYLLABLE GGYOG..HANGUL SYLLABLE GGYOH
+ {0xAFB8, 0xAFB8, prLV}, // Lo HANGUL SYLLABLE GGU
+ {0xAFB9, 0xAFD3, prLVT}, // Lo [27] HANGUL SYLLABLE GGUG..HANGUL SYLLABLE GGUH
+ {0xAFD4, 0xAFD4, prLV}, // Lo HANGUL SYLLABLE GGWEO
+ {0xAFD5, 0xAFEF, prLVT}, // Lo [27] HANGUL SYLLABLE GGWEOG..HANGUL SYLLABLE GGWEOH
+ {0xAFF0, 0xAFF0, prLV}, // Lo HANGUL SYLLABLE GGWE
+ {0xAFF1, 0xB00B, prLVT}, // Lo [27] HANGUL SYLLABLE GGWEG..HANGUL SYLLABLE GGWEH
+ {0xB00C, 0xB00C, prLV}, // Lo HANGUL SYLLABLE GGWI
+ {0xB00D, 0xB027, prLVT}, // Lo [27] HANGUL SYLLABLE GGWIG..HANGUL SYLLABLE GGWIH
+ {0xB028, 0xB028, prLV}, // Lo HANGUL SYLLABLE GGYU
+ {0xB029, 0xB043, prLVT}, // Lo [27] HANGUL SYLLABLE GGYUG..HANGUL SYLLABLE GGYUH
+ {0xB044, 0xB044, prLV}, // Lo HANGUL SYLLABLE GGEU
+ {0xB045, 0xB05F, prLVT}, // Lo [27] HANGUL SYLLABLE GGEUG..HANGUL SYLLABLE GGEUH
+ {0xB060, 0xB060, prLV}, // Lo HANGUL SYLLABLE GGYI
+ {0xB061, 0xB07B, prLVT}, // Lo [27] HANGUL SYLLABLE GGYIG..HANGUL SYLLABLE GGYIH
+ {0xB07C, 0xB07C, prLV}, // Lo HANGUL SYLLABLE GGI
+ {0xB07D, 0xB097, prLVT}, // Lo [27] HANGUL SYLLABLE GGIG..HANGUL SYLLABLE GGIH
+ {0xB098, 0xB098, prLV}, // Lo HANGUL SYLLABLE NA
+ {0xB099, 0xB0B3, prLVT}, // Lo [27] HANGUL SYLLABLE NAG..HANGUL SYLLABLE NAH
+ {0xB0B4, 0xB0B4, prLV}, // Lo HANGUL SYLLABLE NAE
+ {0xB0B5, 0xB0CF, prLVT}, // Lo [27] HANGUL SYLLABLE NAEG..HANGUL SYLLABLE NAEH
+ {0xB0D0, 0xB0D0, prLV}, // Lo HANGUL SYLLABLE NYA
+ {0xB0D1, 0xB0EB, prLVT}, // Lo [27] HANGUL SYLLABLE NYAG..HANGUL SYLLABLE NYAH
+ {0xB0EC, 0xB0EC, prLV}, // Lo HANGUL SYLLABLE NYAE
+ {0xB0ED, 0xB107, prLVT}, // Lo [27] HANGUL SYLLABLE NYAEG..HANGUL SYLLABLE NYAEH
+ {0xB108, 0xB108, prLV}, // Lo HANGUL SYLLABLE NEO
+ {0xB109, 0xB123, prLVT}, // Lo [27] HANGUL SYLLABLE NEOG..HANGUL SYLLABLE NEOH
+ {0xB124, 0xB124, prLV}, // Lo HANGUL SYLLABLE NE
+ {0xB125, 0xB13F, prLVT}, // Lo [27] HANGUL SYLLABLE NEG..HANGUL SYLLABLE NEH
+ {0xB140, 0xB140, prLV}, // Lo HANGUL SYLLABLE NYEO
+ {0xB141, 0xB15B, prLVT}, // Lo [27] HANGUL SYLLABLE NYEOG..HANGUL SYLLABLE NYEOH
+ {0xB15C, 0xB15C, prLV}, // Lo HANGUL SYLLABLE NYE
+ {0xB15D, 0xB177, prLVT}, // Lo [27] HANGUL SYLLABLE NYEG..HANGUL SYLLABLE NYEH
+ {0xB178, 0xB178, prLV}, // Lo HANGUL SYLLABLE NO
+ {0xB179, 0xB193, prLVT}, // Lo [27] HANGUL SYLLABLE NOG..HANGUL SYLLABLE NOH
+ {0xB194, 0xB194, prLV}, // Lo HANGUL SYLLABLE NWA
+ {0xB195, 0xB1AF, prLVT}, // Lo [27] HANGUL SYLLABLE NWAG..HANGUL SYLLABLE NWAH
+ {0xB1B0, 0xB1B0, prLV}, // Lo HANGUL SYLLABLE NWAE
+ {0xB1B1, 0xB1CB, prLVT}, // Lo [27] HANGUL SYLLABLE NWAEG..HANGUL SYLLABLE NWAEH
+ {0xB1CC, 0xB1CC, prLV}, // Lo HANGUL SYLLABLE NOE
+ {0xB1CD, 0xB1E7, prLVT}, // Lo [27] HANGUL SYLLABLE NOEG..HANGUL SYLLABLE NOEH
+ {0xB1E8, 0xB1E8, prLV}, // Lo HANGUL SYLLABLE NYO
+ {0xB1E9, 0xB203, prLVT}, // Lo [27] HANGUL SYLLABLE NYOG..HANGUL SYLLABLE NYOH
+ {0xB204, 0xB204, prLV}, // Lo HANGUL SYLLABLE NU
+ {0xB205, 0xB21F, prLVT}, // Lo [27] HANGUL SYLLABLE NUG..HANGUL SYLLABLE NUH
+ {0xB220, 0xB220, prLV}, // Lo HANGUL SYLLABLE NWEO
+ {0xB221, 0xB23B, prLVT}, // Lo [27] HANGUL SYLLABLE NWEOG..HANGUL SYLLABLE NWEOH
+ {0xB23C, 0xB23C, prLV}, // Lo HANGUL SYLLABLE NWE
+ {0xB23D, 0xB257, prLVT}, // Lo [27] HANGUL SYLLABLE NWEG..HANGUL SYLLABLE NWEH
+ {0xB258, 0xB258, prLV}, // Lo HANGUL SYLLABLE NWI
+ {0xB259, 0xB273, prLVT}, // Lo [27] HANGUL SYLLABLE NWIG..HANGUL SYLLABLE NWIH
+ {0xB274, 0xB274, prLV}, // Lo HANGUL SYLLABLE NYU
+ {0xB275, 0xB28F, prLVT}, // Lo [27] HANGUL SYLLABLE NYUG..HANGUL SYLLABLE NYUH
+ {0xB290, 0xB290, prLV}, // Lo HANGUL SYLLABLE NEU
+ {0xB291, 0xB2AB, prLVT}, // Lo [27] HANGUL SYLLABLE NEUG..HANGUL SYLLABLE NEUH
+ {0xB2AC, 0xB2AC, prLV}, // Lo HANGUL SYLLABLE NYI
+ {0xB2AD, 0xB2C7, prLVT}, // Lo [27] HANGUL SYLLABLE NYIG..HANGUL SYLLABLE NYIH
+ {0xB2C8, 0xB2C8, prLV}, // Lo HANGUL SYLLABLE NI
+ {0xB2C9, 0xB2E3, prLVT}, // Lo [27] HANGUL SYLLABLE NIG..HANGUL SYLLABLE NIH
+ {0xB2E4, 0xB2E4, prLV}, // Lo HANGUL SYLLABLE DA
+ {0xB2E5, 0xB2FF, prLVT}, // Lo [27] HANGUL SYLLABLE DAG..HANGUL SYLLABLE DAH
+ {0xB300, 0xB300, prLV}, // Lo HANGUL SYLLABLE DAE
+ {0xB301, 0xB31B, prLVT}, // Lo [27] HANGUL SYLLABLE DAEG..HANGUL SYLLABLE DAEH
+ {0xB31C, 0xB31C, prLV}, // Lo HANGUL SYLLABLE DYA
+ {0xB31D, 0xB337, prLVT}, // Lo [27] HANGUL SYLLABLE DYAG..HANGUL SYLLABLE DYAH
+ {0xB338, 0xB338, prLV}, // Lo HANGUL SYLLABLE DYAE
+ {0xB339, 0xB353, prLVT}, // Lo [27] HANGUL SYLLABLE DYAEG..HANGUL SYLLABLE DYAEH
+ {0xB354, 0xB354, prLV}, // Lo HANGUL SYLLABLE DEO
+ {0xB355, 0xB36F, prLVT}, // Lo [27] HANGUL SYLLABLE DEOG..HANGUL SYLLABLE DEOH
+ {0xB370, 0xB370, prLV}, // Lo HANGUL SYLLABLE DE
+ {0xB371, 0xB38B, prLVT}, // Lo [27] HANGUL SYLLABLE DEG..HANGUL SYLLABLE DEH
+ {0xB38C, 0xB38C, prLV}, // Lo HANGUL SYLLABLE DYEO
+ {0xB38D, 0xB3A7, prLVT}, // Lo [27] HANGUL SYLLABLE DYEOG..HANGUL SYLLABLE DYEOH
+ {0xB3A8, 0xB3A8, prLV}, // Lo HANGUL SYLLABLE DYE
+ {0xB3A9, 0xB3C3, prLVT}, // Lo [27] HANGUL SYLLABLE DYEG..HANGUL SYLLABLE DYEH
+ {0xB3C4, 0xB3C4, prLV}, // Lo HANGUL SYLLABLE DO
+ {0xB3C5, 0xB3DF, prLVT}, // Lo [27] HANGUL SYLLABLE DOG..HANGUL SYLLABLE DOH
+ {0xB3E0, 0xB3E0, prLV}, // Lo HANGUL SYLLABLE DWA
+ {0xB3E1, 0xB3FB, prLVT}, // Lo [27] HANGUL SYLLABLE DWAG..HANGUL SYLLABLE DWAH
+ {0xB3FC, 0xB3FC, prLV}, // Lo HANGUL SYLLABLE DWAE
+ {0xB3FD, 0xB417, prLVT}, // Lo [27] HANGUL SYLLABLE DWAEG..HANGUL SYLLABLE DWAEH
+ {0xB418, 0xB418, prLV}, // Lo HANGUL SYLLABLE DOE
+ {0xB419, 0xB433, prLVT}, // Lo [27] HANGUL SYLLABLE DOEG..HANGUL SYLLABLE DOEH
+ {0xB434, 0xB434, prLV}, // Lo HANGUL SYLLABLE DYO
+ {0xB435, 0xB44F, prLVT}, // Lo [27] HANGUL SYLLABLE DYOG..HANGUL SYLLABLE DYOH
+ {0xB450, 0xB450, prLV}, // Lo HANGUL SYLLABLE DU
+ {0xB451, 0xB46B, prLVT}, // Lo [27] HANGUL SYLLABLE DUG..HANGUL SYLLABLE DUH
+ {0xB46C, 0xB46C, prLV}, // Lo HANGUL SYLLABLE DWEO
+ {0xB46D, 0xB487, prLVT}, // Lo [27] HANGUL SYLLABLE DWEOG..HANGUL SYLLABLE DWEOH
+ {0xB488, 0xB488, prLV}, // Lo HANGUL SYLLABLE DWE
+ {0xB489, 0xB4A3, prLVT}, // Lo [27] HANGUL SYLLABLE DWEG..HANGUL SYLLABLE DWEH
+ {0xB4A4, 0xB4A4, prLV}, // Lo HANGUL SYLLABLE DWI
+ {0xB4A5, 0xB4BF, prLVT}, // Lo [27] HANGUL SYLLABLE DWIG..HANGUL SYLLABLE DWIH
+ {0xB4C0, 0xB4C0, prLV}, // Lo HANGUL SYLLABLE DYU
+ {0xB4C1, 0xB4DB, prLVT}, // Lo [27] HANGUL SYLLABLE DYUG..HANGUL SYLLABLE DYUH
+ {0xB4DC, 0xB4DC, prLV}, // Lo HANGUL SYLLABLE DEU
+ {0xB4DD, 0xB4F7, prLVT}, // Lo [27] HANGUL SYLLABLE DEUG..HANGUL SYLLABLE DEUH
+ {0xB4F8, 0xB4F8, prLV}, // Lo HANGUL SYLLABLE DYI
+ {0xB4F9, 0xB513, prLVT}, // Lo [27] HANGUL SYLLABLE DYIG..HANGUL SYLLABLE DYIH
+ {0xB514, 0xB514, prLV}, // Lo HANGUL SYLLABLE DI
+ {0xB515, 0xB52F, prLVT}, // Lo [27] HANGUL SYLLABLE DIG..HANGUL SYLLABLE DIH
+ {0xB530, 0xB530, prLV}, // Lo HANGUL SYLLABLE DDA
+ {0xB531, 0xB54B, prLVT}, // Lo [27] HANGUL SYLLABLE DDAG..HANGUL SYLLABLE DDAH
+ {0xB54C, 0xB54C, prLV}, // Lo HANGUL SYLLABLE DDAE
+ {0xB54D, 0xB567, prLVT}, // Lo [27] HANGUL SYLLABLE DDAEG..HANGUL SYLLABLE DDAEH
+ {0xB568, 0xB568, prLV}, // Lo HANGUL SYLLABLE DDYA
+ {0xB569, 0xB583, prLVT}, // Lo [27] HANGUL SYLLABLE DDYAG..HANGUL SYLLABLE DDYAH
+ {0xB584, 0xB584, prLV}, // Lo HANGUL SYLLABLE DDYAE
+ {0xB585, 0xB59F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYAEG..HANGUL SYLLABLE DDYAEH
+ {0xB5A0, 0xB5A0, prLV}, // Lo HANGUL SYLLABLE DDEO
+ {0xB5A1, 0xB5BB, prLVT}, // Lo [27] HANGUL SYLLABLE DDEOG..HANGUL SYLLABLE DDEOH
+ {0xB5BC, 0xB5BC, prLV}, // Lo HANGUL SYLLABLE DDE
+ {0xB5BD, 0xB5D7, prLVT}, // Lo [27] HANGUL SYLLABLE DDEG..HANGUL SYLLABLE DDEH
+ {0xB5D8, 0xB5D8, prLV}, // Lo HANGUL SYLLABLE DDYEO
+ {0xB5D9, 0xB5F3, prLVT}, // Lo [27] HANGUL SYLLABLE DDYEOG..HANGUL SYLLABLE DDYEOH
+ {0xB5F4, 0xB5F4, prLV}, // Lo HANGUL SYLLABLE DDYE
+ {0xB5F5, 0xB60F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYEG..HANGUL SYLLABLE DDYEH
+ {0xB610, 0xB610, prLV}, // Lo HANGUL SYLLABLE DDO
+ {0xB611, 0xB62B, prLVT}, // Lo [27] HANGUL SYLLABLE DDOG..HANGUL SYLLABLE DDOH
+ {0xB62C, 0xB62C, prLV}, // Lo HANGUL SYLLABLE DDWA
+ {0xB62D, 0xB647, prLVT}, // Lo [27] HANGUL SYLLABLE DDWAG..HANGUL SYLLABLE DDWAH
+ {0xB648, 0xB648, prLV}, // Lo HANGUL SYLLABLE DDWAE
+ {0xB649, 0xB663, prLVT}, // Lo [27] HANGUL SYLLABLE DDWAEG..HANGUL SYLLABLE DDWAEH
+ {0xB664, 0xB664, prLV}, // Lo HANGUL SYLLABLE DDOE
+ {0xB665, 0xB67F, prLVT}, // Lo [27] HANGUL SYLLABLE DDOEG..HANGUL SYLLABLE DDOEH
+ {0xB680, 0xB680, prLV}, // Lo HANGUL SYLLABLE DDYO
+ {0xB681, 0xB69B, prLVT}, // Lo [27] HANGUL SYLLABLE DDYOG..HANGUL SYLLABLE DDYOH
+ {0xB69C, 0xB69C, prLV}, // Lo HANGUL SYLLABLE DDU
+ {0xB69D, 0xB6B7, prLVT}, // Lo [27] HANGUL SYLLABLE DDUG..HANGUL SYLLABLE DDUH
+ {0xB6B8, 0xB6B8, prLV}, // Lo HANGUL SYLLABLE DDWEO
+ {0xB6B9, 0xB6D3, prLVT}, // Lo [27] HANGUL SYLLABLE DDWEOG..HANGUL SYLLABLE DDWEOH
+ {0xB6D4, 0xB6D4, prLV}, // Lo HANGUL SYLLABLE DDWE
+ {0xB6D5, 0xB6EF, prLVT}, // Lo [27] HANGUL SYLLABLE DDWEG..HANGUL SYLLABLE DDWEH
+ {0xB6F0, 0xB6F0, prLV}, // Lo HANGUL SYLLABLE DDWI
+ {0xB6F1, 0xB70B, prLVT}, // Lo [27] HANGUL SYLLABLE DDWIG..HANGUL SYLLABLE DDWIH
+ {0xB70C, 0xB70C, prLV}, // Lo HANGUL SYLLABLE DDYU
+ {0xB70D, 0xB727, prLVT}, // Lo [27] HANGUL SYLLABLE DDYUG..HANGUL SYLLABLE DDYUH
+ {0xB728, 0xB728, prLV}, // Lo HANGUL SYLLABLE DDEU
+ {0xB729, 0xB743, prLVT}, // Lo [27] HANGUL SYLLABLE DDEUG..HANGUL SYLLABLE DDEUH
+ {0xB744, 0xB744, prLV}, // Lo HANGUL SYLLABLE DDYI
+ {0xB745, 0xB75F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYIG..HANGUL SYLLABLE DDYIH
+ {0xB760, 0xB760, prLV}, // Lo HANGUL SYLLABLE DDI
+ {0xB761, 0xB77B, prLVT}, // Lo [27] HANGUL SYLLABLE DDIG..HANGUL SYLLABLE DDIH
+ {0xB77C, 0xB77C, prLV}, // Lo HANGUL SYLLABLE RA
+ {0xB77D, 0xB797, prLVT}, // Lo [27] HANGUL SYLLABLE RAG..HANGUL SYLLABLE RAH
+ {0xB798, 0xB798, prLV}, // Lo HANGUL SYLLABLE RAE
+ {0xB799, 0xB7B3, prLVT}, // Lo [27] HANGUL SYLLABLE RAEG..HANGUL SYLLABLE RAEH
+ {0xB7B4, 0xB7B4, prLV}, // Lo HANGUL SYLLABLE RYA
+ {0xB7B5, 0xB7CF, prLVT}, // Lo [27] HANGUL SYLLABLE RYAG..HANGUL SYLLABLE RYAH
+ {0xB7D0, 0xB7D0, prLV}, // Lo HANGUL SYLLABLE RYAE
+ {0xB7D1, 0xB7EB, prLVT}, // Lo [27] HANGUL SYLLABLE RYAEG..HANGUL SYLLABLE RYAEH
+ {0xB7EC, 0xB7EC, prLV}, // Lo HANGUL SYLLABLE REO
+ {0xB7ED, 0xB807, prLVT}, // Lo [27] HANGUL SYLLABLE REOG..HANGUL SYLLABLE REOH
+ {0xB808, 0xB808, prLV}, // Lo HANGUL SYLLABLE RE
+ {0xB809, 0xB823, prLVT}, // Lo [27] HANGUL SYLLABLE REG..HANGUL SYLLABLE REH
+ {0xB824, 0xB824, prLV}, // Lo HANGUL SYLLABLE RYEO
+ {0xB825, 0xB83F, prLVT}, // Lo [27] HANGUL SYLLABLE RYEOG..HANGUL SYLLABLE RYEOH
+ {0xB840, 0xB840, prLV}, // Lo HANGUL SYLLABLE RYE
+ {0xB841, 0xB85B, prLVT}, // Lo [27] HANGUL SYLLABLE RYEG..HANGUL SYLLABLE RYEH
+ {0xB85C, 0xB85C, prLV}, // Lo HANGUL SYLLABLE RO
+ {0xB85D, 0xB877, prLVT}, // Lo [27] HANGUL SYLLABLE ROG..HANGUL SYLLABLE ROH
+ {0xB878, 0xB878, prLV}, // Lo HANGUL SYLLABLE RWA
+ {0xB879, 0xB893, prLVT}, // Lo [27] HANGUL SYLLABLE RWAG..HANGUL SYLLABLE RWAH
+ {0xB894, 0xB894, prLV}, // Lo HANGUL SYLLABLE RWAE
+ {0xB895, 0xB8AF, prLVT}, // Lo [27] HANGUL SYLLABLE RWAEG..HANGUL SYLLABLE RWAEH
+ {0xB8B0, 0xB8B0, prLV}, // Lo HANGUL SYLLABLE ROE
+ {0xB8B1, 0xB8CB, prLVT}, // Lo [27] HANGUL SYLLABLE ROEG..HANGUL SYLLABLE ROEH
+ {0xB8CC, 0xB8CC, prLV}, // Lo HANGUL SYLLABLE RYO
+ {0xB8CD, 0xB8E7, prLVT}, // Lo [27] HANGUL SYLLABLE RYOG..HANGUL SYLLABLE RYOH
+ {0xB8E8, 0xB8E8, prLV}, // Lo HANGUL SYLLABLE RU
+ {0xB8E9, 0xB903, prLVT}, // Lo [27] HANGUL SYLLABLE RUG..HANGUL SYLLABLE RUH
+ {0xB904, 0xB904, prLV}, // Lo HANGUL SYLLABLE RWEO
+ {0xB905, 0xB91F, prLVT}, // Lo [27] HANGUL SYLLABLE RWEOG..HANGUL SYLLABLE RWEOH
+ {0xB920, 0xB920, prLV}, // Lo HANGUL SYLLABLE RWE
+ {0xB921, 0xB93B, prLVT}, // Lo [27] HANGUL SYLLABLE RWEG..HANGUL SYLLABLE RWEH
+ {0xB93C, 0xB93C, prLV}, // Lo HANGUL SYLLABLE RWI
+ {0xB93D, 0xB957, prLVT}, // Lo [27] HANGUL SYLLABLE RWIG..HANGUL SYLLABLE RWIH
+ {0xB958, 0xB958, prLV}, // Lo HANGUL SYLLABLE RYU
+ {0xB959, 0xB973, prLVT}, // Lo [27] HANGUL SYLLABLE RYUG..HANGUL SYLLABLE RYUH
+ {0xB974, 0xB974, prLV}, // Lo HANGUL SYLLABLE REU
+ {0xB975, 0xB98F, prLVT}, // Lo [27] HANGUL SYLLABLE REUG..HANGUL SYLLABLE REUH
+ {0xB990, 0xB990, prLV}, // Lo HANGUL SYLLABLE RYI
+ {0xB991, 0xB9AB, prLVT}, // Lo [27] HANGUL SYLLABLE RYIG..HANGUL SYLLABLE RYIH
+ {0xB9AC, 0xB9AC, prLV}, // Lo HANGUL SYLLABLE RI
+ {0xB9AD, 0xB9C7, prLVT}, // Lo [27] HANGUL SYLLABLE RIG..HANGUL SYLLABLE RIH
+ {0xB9C8, 0xB9C8, prLV}, // Lo HANGUL SYLLABLE MA
+ {0xB9C9, 0xB9E3, prLVT}, // Lo [27] HANGUL SYLLABLE MAG..HANGUL SYLLABLE MAH
+ {0xB9E4, 0xB9E4, prLV}, // Lo HANGUL SYLLABLE MAE
+ {0xB9E5, 0xB9FF, prLVT}, // Lo [27] HANGUL SYLLABLE MAEG..HANGUL SYLLABLE MAEH
+ {0xBA00, 0xBA00, prLV}, // Lo HANGUL SYLLABLE MYA
+ {0xBA01, 0xBA1B, prLVT}, // Lo [27] HANGUL SYLLABLE MYAG..HANGUL SYLLABLE MYAH
+ {0xBA1C, 0xBA1C, prLV}, // Lo HANGUL SYLLABLE MYAE
+ {0xBA1D, 0xBA37, prLVT}, // Lo [27] HANGUL SYLLABLE MYAEG..HANGUL SYLLABLE MYAEH
+ {0xBA38, 0xBA38, prLV}, // Lo HANGUL SYLLABLE MEO
+ {0xBA39, 0xBA53, prLVT}, // Lo [27] HANGUL SYLLABLE MEOG..HANGUL SYLLABLE MEOH
+ {0xBA54, 0xBA54, prLV}, // Lo HANGUL SYLLABLE ME
+ {0xBA55, 0xBA6F, prLVT}, // Lo [27] HANGUL SYLLABLE MEG..HANGUL SYLLABLE MEH
+ {0xBA70, 0xBA70, prLV}, // Lo HANGUL SYLLABLE MYEO
+ {0xBA71, 0xBA8B, prLVT}, // Lo [27] HANGUL SYLLABLE MYEOG..HANGUL SYLLABLE MYEOH
+ {0xBA8C, 0xBA8C, prLV}, // Lo HANGUL SYLLABLE MYE
+ {0xBA8D, 0xBAA7, prLVT}, // Lo [27] HANGUL SYLLABLE MYEG..HANGUL SYLLABLE MYEH
+ {0xBAA8, 0xBAA8, prLV}, // Lo HANGUL SYLLABLE MO
+ {0xBAA9, 0xBAC3, prLVT}, // Lo [27] HANGUL SYLLABLE MOG..HANGUL SYLLABLE MOH
+ {0xBAC4, 0xBAC4, prLV}, // Lo HANGUL SYLLABLE MWA
+ {0xBAC5, 0xBADF, prLVT}, // Lo [27] HANGUL SYLLABLE MWAG..HANGUL SYLLABLE MWAH
+ {0xBAE0, 0xBAE0, prLV}, // Lo HANGUL SYLLABLE MWAE
+ {0xBAE1, 0xBAFB, prLVT}, // Lo [27] HANGUL SYLLABLE MWAEG..HANGUL SYLLABLE MWAEH
+ {0xBAFC, 0xBAFC, prLV}, // Lo HANGUL SYLLABLE MOE
+ {0xBAFD, 0xBB17, prLVT}, // Lo [27] HANGUL SYLLABLE MOEG..HANGUL SYLLABLE MOEH
+ {0xBB18, 0xBB18, prLV}, // Lo HANGUL SYLLABLE MYO
+ {0xBB19, 0xBB33, prLVT}, // Lo [27] HANGUL SYLLABLE MYOG..HANGUL SYLLABLE MYOH
+ {0xBB34, 0xBB34, prLV}, // Lo HANGUL SYLLABLE MU
+ {0xBB35, 0xBB4F, prLVT}, // Lo [27] HANGUL SYLLABLE MUG..HANGUL SYLLABLE MUH
+ {0xBB50, 0xBB50, prLV}, // Lo HANGUL SYLLABLE MWEO
+ {0xBB51, 0xBB6B, prLVT}, // Lo [27] HANGUL SYLLABLE MWEOG..HANGUL SYLLABLE MWEOH
+ {0xBB6C, 0xBB6C, prLV}, // Lo HANGUL SYLLABLE MWE
+ {0xBB6D, 0xBB87, prLVT}, // Lo [27] HANGUL SYLLABLE MWEG..HANGUL SYLLABLE MWEH
+ {0xBB88, 0xBB88, prLV}, // Lo HANGUL SYLLABLE MWI
+ {0xBB89, 0xBBA3, prLVT}, // Lo [27] HANGUL SYLLABLE MWIG..HANGUL SYLLABLE MWIH
+ {0xBBA4, 0xBBA4, prLV}, // Lo HANGUL SYLLABLE MYU
+ {0xBBA5, 0xBBBF, prLVT}, // Lo [27] HANGUL SYLLABLE MYUG..HANGUL SYLLABLE MYUH
+ {0xBBC0, 0xBBC0, prLV}, // Lo HANGUL SYLLABLE MEU
+ {0xBBC1, 0xBBDB, prLVT}, // Lo [27] HANGUL SYLLABLE MEUG..HANGUL SYLLABLE MEUH
+ {0xBBDC, 0xBBDC, prLV}, // Lo HANGUL SYLLABLE MYI
+ {0xBBDD, 0xBBF7, prLVT}, // Lo [27] HANGUL SYLLABLE MYIG..HANGUL SYLLABLE MYIH
+ {0xBBF8, 0xBBF8, prLV}, // Lo HANGUL SYLLABLE MI
+ {0xBBF9, 0xBC13, prLVT}, // Lo [27] HANGUL SYLLABLE MIG..HANGUL SYLLABLE MIH
+ {0xBC14, 0xBC14, prLV}, // Lo HANGUL SYLLABLE BA
+ {0xBC15, 0xBC2F, prLVT}, // Lo [27] HANGUL SYLLABLE BAG..HANGUL SYLLABLE BAH
+ {0xBC30, 0xBC30, prLV}, // Lo HANGUL SYLLABLE BAE
+ {0xBC31, 0xBC4B, prLVT}, // Lo [27] HANGUL SYLLABLE BAEG..HANGUL SYLLABLE BAEH
+ {0xBC4C, 0xBC4C, prLV}, // Lo HANGUL SYLLABLE BYA
+ {0xBC4D, 0xBC67, prLVT}, // Lo [27] HANGUL SYLLABLE BYAG..HANGUL SYLLABLE BYAH
+ {0xBC68, 0xBC68, prLV}, // Lo HANGUL SYLLABLE BYAE
+ {0xBC69, 0xBC83, prLVT}, // Lo [27] HANGUL SYLLABLE BYAEG..HANGUL SYLLABLE BYAEH
+ {0xBC84, 0xBC84, prLV}, // Lo HANGUL SYLLABLE BEO
+ {0xBC85, 0xBC9F, prLVT}, // Lo [27] HANGUL SYLLABLE BEOG..HANGUL SYLLABLE BEOH
+ {0xBCA0, 0xBCA0, prLV}, // Lo HANGUL SYLLABLE BE
+ {0xBCA1, 0xBCBB, prLVT}, // Lo [27] HANGUL SYLLABLE BEG..HANGUL SYLLABLE BEH
+ {0xBCBC, 0xBCBC, prLV}, // Lo HANGUL SYLLABLE BYEO
+ {0xBCBD, 0xBCD7, prLVT}, // Lo [27] HANGUL SYLLABLE BYEOG..HANGUL SYLLABLE BYEOH
+ {0xBCD8, 0xBCD8, prLV}, // Lo HANGUL SYLLABLE BYE
+ {0xBCD9, 0xBCF3, prLVT}, // Lo [27] HANGUL SYLLABLE BYEG..HANGUL SYLLABLE BYEH
+ {0xBCF4, 0xBCF4, prLV}, // Lo HANGUL SYLLABLE BO
+ {0xBCF5, 0xBD0F, prLVT}, // Lo [27] HANGUL SYLLABLE BOG..HANGUL SYLLABLE BOH
+ {0xBD10, 0xBD10, prLV}, // Lo HANGUL SYLLABLE BWA
+ {0xBD11, 0xBD2B, prLVT}, // Lo [27] HANGUL SYLLABLE BWAG..HANGUL SYLLABLE BWAH
+ {0xBD2C, 0xBD2C, prLV}, // Lo HANGUL SYLLABLE BWAE
+ {0xBD2D, 0xBD47, prLVT}, // Lo [27] HANGUL SYLLABLE BWAEG..HANGUL SYLLABLE BWAEH
+ {0xBD48, 0xBD48, prLV}, // Lo HANGUL SYLLABLE BOE
+ {0xBD49, 0xBD63, prLVT}, // Lo [27] HANGUL SYLLABLE BOEG..HANGUL SYLLABLE BOEH
+ {0xBD64, 0xBD64, prLV}, // Lo HANGUL SYLLABLE BYO
+ {0xBD65, 0xBD7F, prLVT}, // Lo [27] HANGUL SYLLABLE BYOG..HANGUL SYLLABLE BYOH
+ {0xBD80, 0xBD80, prLV}, // Lo HANGUL SYLLABLE BU
+ {0xBD81, 0xBD9B, prLVT}, // Lo [27] HANGUL SYLLABLE BUG..HANGUL SYLLABLE BUH
+ {0xBD9C, 0xBD9C, prLV}, // Lo HANGUL SYLLABLE BWEO
+ {0xBD9D, 0xBDB7, prLVT}, // Lo [27] HANGUL SYLLABLE BWEOG..HANGUL SYLLABLE BWEOH
+ {0xBDB8, 0xBDB8, prLV}, // Lo HANGUL SYLLABLE BWE
+ {0xBDB9, 0xBDD3, prLVT}, // Lo [27] HANGUL SYLLABLE BWEG..HANGUL SYLLABLE BWEH
+ {0xBDD4, 0xBDD4, prLV}, // Lo HANGUL SYLLABLE BWI
+ {0xBDD5, 0xBDEF, prLVT}, // Lo [27] HANGUL SYLLABLE BWIG..HANGUL SYLLABLE BWIH
+ {0xBDF0, 0xBDF0, prLV}, // Lo HANGUL SYLLABLE BYU
+ {0xBDF1, 0xBE0B, prLVT}, // Lo [27] HANGUL SYLLABLE BYUG..HANGUL SYLLABLE BYUH
+ {0xBE0C, 0xBE0C, prLV}, // Lo HANGUL SYLLABLE BEU
+ {0xBE0D, 0xBE27, prLVT}, // Lo [27] HANGUL SYLLABLE BEUG..HANGUL SYLLABLE BEUH
+ {0xBE28, 0xBE28, prLV}, // Lo HANGUL SYLLABLE BYI
+ {0xBE29, 0xBE43, prLVT}, // Lo [27] HANGUL SYLLABLE BYIG..HANGUL SYLLABLE BYIH
+ {0xBE44, 0xBE44, prLV}, // Lo HANGUL SYLLABLE BI
+ {0xBE45, 0xBE5F, prLVT}, // Lo [27] HANGUL SYLLABLE BIG..HANGUL SYLLABLE BIH
+ {0xBE60, 0xBE60, prLV}, // Lo HANGUL SYLLABLE BBA
+ {0xBE61, 0xBE7B, prLVT}, // Lo [27] HANGUL SYLLABLE BBAG..HANGUL SYLLABLE BBAH
+ {0xBE7C, 0xBE7C, prLV}, // Lo HANGUL SYLLABLE BBAE
+ {0xBE7D, 0xBE97, prLVT}, // Lo [27] HANGUL SYLLABLE BBAEG..HANGUL SYLLABLE BBAEH
+ {0xBE98, 0xBE98, prLV}, // Lo HANGUL SYLLABLE BBYA
+ {0xBE99, 0xBEB3, prLVT}, // Lo [27] HANGUL SYLLABLE BBYAG..HANGUL SYLLABLE BBYAH
+ {0xBEB4, 0xBEB4, prLV}, // Lo HANGUL SYLLABLE BBYAE
+ {0xBEB5, 0xBECF, prLVT}, // Lo [27] HANGUL SYLLABLE BBYAEG..HANGUL SYLLABLE BBYAEH
+ {0xBED0, 0xBED0, prLV}, // Lo HANGUL SYLLABLE BBEO
+ {0xBED1, 0xBEEB, prLVT}, // Lo [27] HANGUL SYLLABLE BBEOG..HANGUL SYLLABLE BBEOH
+ {0xBEEC, 0xBEEC, prLV}, // Lo HANGUL SYLLABLE BBE
+ {0xBEED, 0xBF07, prLVT}, // Lo [27] HANGUL SYLLABLE BBEG..HANGUL SYLLABLE BBEH
+ {0xBF08, 0xBF08, prLV}, // Lo HANGUL SYLLABLE BBYEO
+ {0xBF09, 0xBF23, prLVT}, // Lo [27] HANGUL SYLLABLE BBYEOG..HANGUL SYLLABLE BBYEOH
+ {0xBF24, 0xBF24, prLV}, // Lo HANGUL SYLLABLE BBYE
+ {0xBF25, 0xBF3F, prLVT}, // Lo [27] HANGUL SYLLABLE BBYEG..HANGUL SYLLABLE BBYEH
+ {0xBF40, 0xBF40, prLV}, // Lo HANGUL SYLLABLE BBO
+ {0xBF41, 0xBF5B, prLVT}, // Lo [27] HANGUL SYLLABLE BBOG..HANGUL SYLLABLE BBOH
+ {0xBF5C, 0xBF5C, prLV}, // Lo HANGUL SYLLABLE BBWA
+ {0xBF5D, 0xBF77, prLVT}, // Lo [27] HANGUL SYLLABLE BBWAG..HANGUL SYLLABLE BBWAH
+ {0xBF78, 0xBF78, prLV}, // Lo HANGUL SYLLABLE BBWAE
+ {0xBF79, 0xBF93, prLVT}, // Lo [27] HANGUL SYLLABLE BBWAEG..HANGUL SYLLABLE BBWAEH
+ {0xBF94, 0xBF94, prLV}, // Lo HANGUL SYLLABLE BBOE
+ {0xBF95, 0xBFAF, prLVT}, // Lo [27] HANGUL SYLLABLE BBOEG..HANGUL SYLLABLE BBOEH
+ {0xBFB0, 0xBFB0, prLV}, // Lo HANGUL SYLLABLE BBYO
+ {0xBFB1, 0xBFCB, prLVT}, // Lo [27] HANGUL SYLLABLE BBYOG..HANGUL SYLLABLE BBYOH
+ {0xBFCC, 0xBFCC, prLV}, // Lo HANGUL SYLLABLE BBU
+ {0xBFCD, 0xBFE7, prLVT}, // Lo [27] HANGUL SYLLABLE BBUG..HANGUL SYLLABLE BBUH
+ {0xBFE8, 0xBFE8, prLV}, // Lo HANGUL SYLLABLE BBWEO
+ {0xBFE9, 0xC003, prLVT}, // Lo [27] HANGUL SYLLABLE BBWEOG..HANGUL SYLLABLE BBWEOH
+ {0xC004, 0xC004, prLV}, // Lo HANGUL SYLLABLE BBWE
+ {0xC005, 0xC01F, prLVT}, // Lo [27] HANGUL SYLLABLE BBWEG..HANGUL SYLLABLE BBWEH
+ {0xC020, 0xC020, prLV}, // Lo HANGUL SYLLABLE BBWI
+ {0xC021, 0xC03B, prLVT}, // Lo [27] HANGUL SYLLABLE BBWIG..HANGUL SYLLABLE BBWIH
+ {0xC03C, 0xC03C, prLV}, // Lo HANGUL SYLLABLE BBYU
+ {0xC03D, 0xC057, prLVT}, // Lo [27] HANGUL SYLLABLE BBYUG..HANGUL SYLLABLE BBYUH
+ {0xC058, 0xC058, prLV}, // Lo HANGUL SYLLABLE BBEU
+ {0xC059, 0xC073, prLVT}, // Lo [27] HANGUL SYLLABLE BBEUG..HANGUL SYLLABLE BBEUH
+ {0xC074, 0xC074, prLV}, // Lo HANGUL SYLLABLE BBYI
+ {0xC075, 0xC08F, prLVT}, // Lo [27] HANGUL SYLLABLE BBYIG..HANGUL SYLLABLE BBYIH
+ {0xC090, 0xC090, prLV}, // Lo HANGUL SYLLABLE BBI
+ {0xC091, 0xC0AB, prLVT}, // Lo [27] HANGUL SYLLABLE BBIG..HANGUL SYLLABLE BBIH
+ {0xC0AC, 0xC0AC, prLV}, // Lo HANGUL SYLLABLE SA
+ {0xC0AD, 0xC0C7, prLVT}, // Lo [27] HANGUL SYLLABLE SAG..HANGUL SYLLABLE SAH
+ {0xC0C8, 0xC0C8, prLV}, // Lo HANGUL SYLLABLE SAE
+ {0xC0C9, 0xC0E3, prLVT}, // Lo [27] HANGUL SYLLABLE SAEG..HANGUL SYLLABLE SAEH
+ {0xC0E4, 0xC0E4, prLV}, // Lo HANGUL SYLLABLE SYA
+ {0xC0E5, 0xC0FF, prLVT}, // Lo [27] HANGUL SYLLABLE SYAG..HANGUL SYLLABLE SYAH
+ {0xC100, 0xC100, prLV}, // Lo HANGUL SYLLABLE SYAE
+ {0xC101, 0xC11B, prLVT}, // Lo [27] HANGUL SYLLABLE SYAEG..HANGUL SYLLABLE SYAEH
+ {0xC11C, 0xC11C, prLV}, // Lo HANGUL SYLLABLE SEO
+ {0xC11D, 0xC137, prLVT}, // Lo [27] HANGUL SYLLABLE SEOG..HANGUL SYLLABLE SEOH
+ {0xC138, 0xC138, prLV}, // Lo HANGUL SYLLABLE SE
+ {0xC139, 0xC153, prLVT}, // Lo [27] HANGUL SYLLABLE SEG..HANGUL SYLLABLE SEH
+ {0xC154, 0xC154, prLV}, // Lo HANGUL SYLLABLE SYEO
+ {0xC155, 0xC16F, prLVT}, // Lo [27] HANGUL SYLLABLE SYEOG..HANGUL SYLLABLE SYEOH
+ {0xC170, 0xC170, prLV}, // Lo HANGUL SYLLABLE SYE
+ {0xC171, 0xC18B, prLVT}, // Lo [27] HANGUL SYLLABLE SYEG..HANGUL SYLLABLE SYEH
+ {0xC18C, 0xC18C, prLV}, // Lo HANGUL SYLLABLE SO
+ {0xC18D, 0xC1A7, prLVT}, // Lo [27] HANGUL SYLLABLE SOG..HANGUL SYLLABLE SOH
+ {0xC1A8, 0xC1A8, prLV}, // Lo HANGUL SYLLABLE SWA
+ {0xC1A9, 0xC1C3, prLVT}, // Lo [27] HANGUL SYLLABLE SWAG..HANGUL SYLLABLE SWAH
+ {0xC1C4, 0xC1C4, prLV}, // Lo HANGUL SYLLABLE SWAE
+ {0xC1C5, 0xC1DF, prLVT}, // Lo [27] HANGUL SYLLABLE SWAEG..HANGUL SYLLABLE SWAEH
+ {0xC1E0, 0xC1E0, prLV}, // Lo HANGUL SYLLABLE SOE
+ {0xC1E1, 0xC1FB, prLVT}, // Lo [27] HANGUL SYLLABLE SOEG..HANGUL SYLLABLE SOEH
+ {0xC1FC, 0xC1FC, prLV}, // Lo HANGUL SYLLABLE SYO
+ {0xC1FD, 0xC217, prLVT}, // Lo [27] HANGUL SYLLABLE SYOG..HANGUL SYLLABLE SYOH
+ {0xC218, 0xC218, prLV}, // Lo HANGUL SYLLABLE SU
+ {0xC219, 0xC233, prLVT}, // Lo [27] HANGUL SYLLABLE SUG..HANGUL SYLLABLE SUH
+ {0xC234, 0xC234, prLV}, // Lo HANGUL SYLLABLE SWEO
+ {0xC235, 0xC24F, prLVT}, // Lo [27] HANGUL SYLLABLE SWEOG..HANGUL SYLLABLE SWEOH
+ {0xC250, 0xC250, prLV}, // Lo HANGUL SYLLABLE SWE
+ {0xC251, 0xC26B, prLVT}, // Lo [27] HANGUL SYLLABLE SWEG..HANGUL SYLLABLE SWEH
+ {0xC26C, 0xC26C, prLV}, // Lo HANGUL SYLLABLE SWI
+ {0xC26D, 0xC287, prLVT}, // Lo [27] HANGUL SYLLABLE SWIG..HANGUL SYLLABLE SWIH
+ {0xC288, 0xC288, prLV}, // Lo HANGUL SYLLABLE SYU
+ {0xC289, 0xC2A3, prLVT}, // Lo [27] HANGUL SYLLABLE SYUG..HANGUL SYLLABLE SYUH
+ {0xC2A4, 0xC2A4, prLV}, // Lo HANGUL SYLLABLE SEU
+ {0xC2A5, 0xC2BF, prLVT}, // Lo [27] HANGUL SYLLABLE SEUG..HANGUL SYLLABLE SEUH
+ {0xC2C0, 0xC2C0, prLV}, // Lo HANGUL SYLLABLE SYI
+ {0xC2C1, 0xC2DB, prLVT}, // Lo [27] HANGUL SYLLABLE SYIG..HANGUL SYLLABLE SYIH
+ {0xC2DC, 0xC2DC, prLV}, // Lo HANGUL SYLLABLE SI
+ {0xC2DD, 0xC2F7, prLVT}, // Lo [27] HANGUL SYLLABLE SIG..HANGUL SYLLABLE SIH
+ {0xC2F8, 0xC2F8, prLV}, // Lo HANGUL SYLLABLE SSA
+ {0xC2F9, 0xC313, prLVT}, // Lo [27] HANGUL SYLLABLE SSAG..HANGUL SYLLABLE SSAH
+ {0xC314, 0xC314, prLV}, // Lo HANGUL SYLLABLE SSAE
+ {0xC315, 0xC32F, prLVT}, // Lo [27] HANGUL SYLLABLE SSAEG..HANGUL SYLLABLE SSAEH
+ {0xC330, 0xC330, prLV}, // Lo HANGUL SYLLABLE SSYA
+ {0xC331, 0xC34B, prLVT}, // Lo [27] HANGUL SYLLABLE SSYAG..HANGUL SYLLABLE SSYAH
+ {0xC34C, 0xC34C, prLV}, // Lo HANGUL SYLLABLE SSYAE
+ {0xC34D, 0xC367, prLVT}, // Lo [27] HANGUL SYLLABLE SSYAEG..HANGUL SYLLABLE SSYAEH
+ {0xC368, 0xC368, prLV}, // Lo HANGUL SYLLABLE SSEO
+ {0xC369, 0xC383, prLVT}, // Lo [27] HANGUL SYLLABLE SSEOG..HANGUL SYLLABLE SSEOH
+ {0xC384, 0xC384, prLV}, // Lo HANGUL SYLLABLE SSE
+ {0xC385, 0xC39F, prLVT}, // Lo [27] HANGUL SYLLABLE SSEG..HANGUL SYLLABLE SSEH
+ {0xC3A0, 0xC3A0, prLV}, // Lo HANGUL SYLLABLE SSYEO
+ {0xC3A1, 0xC3BB, prLVT}, // Lo [27] HANGUL SYLLABLE SSYEOG..HANGUL SYLLABLE SSYEOH
+ {0xC3BC, 0xC3BC, prLV}, // Lo HANGUL SYLLABLE SSYE
+ {0xC3BD, 0xC3D7, prLVT}, // Lo [27] HANGUL SYLLABLE SSYEG..HANGUL SYLLABLE SSYEH
+ {0xC3D8, 0xC3D8, prLV}, // Lo HANGUL SYLLABLE SSO
+ {0xC3D9, 0xC3F3, prLVT}, // Lo [27] HANGUL SYLLABLE SSOG..HANGUL SYLLABLE SSOH
+ {0xC3F4, 0xC3F4, prLV}, // Lo HANGUL SYLLABLE SSWA
+ {0xC3F5, 0xC40F, prLVT}, // Lo [27] HANGUL SYLLABLE SSWAG..HANGUL SYLLABLE SSWAH
+ {0xC410, 0xC410, prLV}, // Lo HANGUL SYLLABLE SSWAE
+ {0xC411, 0xC42B, prLVT}, // Lo [27] HANGUL SYLLABLE SSWAEG..HANGUL SYLLABLE SSWAEH
+ {0xC42C, 0xC42C, prLV}, // Lo HANGUL SYLLABLE SSOE
+ {0xC42D, 0xC447, prLVT}, // Lo [27] HANGUL SYLLABLE SSOEG..HANGUL SYLLABLE SSOEH
+ {0xC448, 0xC448, prLV}, // Lo HANGUL SYLLABLE SSYO
+ {0xC449, 0xC463, prLVT}, // Lo [27] HANGUL SYLLABLE SSYOG..HANGUL SYLLABLE SSYOH
+ {0xC464, 0xC464, prLV}, // Lo HANGUL SYLLABLE SSU
+ {0xC465, 0xC47F, prLVT}, // Lo [27] HANGUL SYLLABLE SSUG..HANGUL SYLLABLE SSUH
+ {0xC480, 0xC480, prLV}, // Lo HANGUL SYLLABLE SSWEO
+ {0xC481, 0xC49B, prLVT}, // Lo [27] HANGUL SYLLABLE SSWEOG..HANGUL SYLLABLE SSWEOH
+ {0xC49C, 0xC49C, prLV}, // Lo HANGUL SYLLABLE SSWE
+ {0xC49D, 0xC4B7, prLVT}, // Lo [27] HANGUL SYLLABLE SSWEG..HANGUL SYLLABLE SSWEH
+ {0xC4B8, 0xC4B8, prLV}, // Lo HANGUL SYLLABLE SSWI
+ {0xC4B9, 0xC4D3, prLVT}, // Lo [27] HANGUL SYLLABLE SSWIG..HANGUL SYLLABLE SSWIH
+ {0xC4D4, 0xC4D4, prLV}, // Lo HANGUL SYLLABLE SSYU
+ {0xC4D5, 0xC4EF, prLVT}, // Lo [27] HANGUL SYLLABLE SSYUG..HANGUL SYLLABLE SSYUH
+ {0xC4F0, 0xC4F0, prLV}, // Lo HANGUL SYLLABLE SSEU
+ {0xC4F1, 0xC50B, prLVT}, // Lo [27] HANGUL SYLLABLE SSEUG..HANGUL SYLLABLE SSEUH
+ {0xC50C, 0xC50C, prLV}, // Lo HANGUL SYLLABLE SSYI
+ {0xC50D, 0xC527, prLVT}, // Lo [27] HANGUL SYLLABLE SSYIG..HANGUL SYLLABLE SSYIH
+ {0xC528, 0xC528, prLV}, // Lo HANGUL SYLLABLE SSI
+ {0xC529, 0xC543, prLVT}, // Lo [27] HANGUL SYLLABLE SSIG..HANGUL SYLLABLE SSIH
+ {0xC544, 0xC544, prLV}, // Lo HANGUL SYLLABLE A
+ {0xC545, 0xC55F, prLVT}, // Lo [27] HANGUL SYLLABLE AG..HANGUL SYLLABLE AH
+ {0xC560, 0xC560, prLV}, // Lo HANGUL SYLLABLE AE
+ {0xC561, 0xC57B, prLVT}, // Lo [27] HANGUL SYLLABLE AEG..HANGUL SYLLABLE AEH
+ {0xC57C, 0xC57C, prLV}, // Lo HANGUL SYLLABLE YA
+ {0xC57D, 0xC597, prLVT}, // Lo [27] HANGUL SYLLABLE YAG..HANGUL SYLLABLE YAH
+ {0xC598, 0xC598, prLV}, // Lo HANGUL SYLLABLE YAE
+ {0xC599, 0xC5B3, prLVT}, // Lo [27] HANGUL SYLLABLE YAEG..HANGUL SYLLABLE YAEH
+ {0xC5B4, 0xC5B4, prLV}, // Lo HANGUL SYLLABLE EO
+ {0xC5B5, 0xC5CF, prLVT}, // Lo [27] HANGUL SYLLABLE EOG..HANGUL SYLLABLE EOH
+ {0xC5D0, 0xC5D0, prLV}, // Lo HANGUL SYLLABLE E
+ {0xC5D1, 0xC5EB, prLVT}, // Lo [27] HANGUL SYLLABLE EG..HANGUL SYLLABLE EH
+ {0xC5EC, 0xC5EC, prLV}, // Lo HANGUL SYLLABLE YEO
+ {0xC5ED, 0xC607, prLVT}, // Lo [27] HANGUL SYLLABLE YEOG..HANGUL SYLLABLE YEOH
+ {0xC608, 0xC608, prLV}, // Lo HANGUL SYLLABLE YE
+ {0xC609, 0xC623, prLVT}, // Lo [27] HANGUL SYLLABLE YEG..HANGUL SYLLABLE YEH
+ {0xC624, 0xC624, prLV}, // Lo HANGUL SYLLABLE O
+ {0xC625, 0xC63F, prLVT}, // Lo [27] HANGUL SYLLABLE OG..HANGUL SYLLABLE OH
+ {0xC640, 0xC640, prLV}, // Lo HANGUL SYLLABLE WA
+ {0xC641, 0xC65B, prLVT}, // Lo [27] HANGUL SYLLABLE WAG..HANGUL SYLLABLE WAH
+ {0xC65C, 0xC65C, prLV}, // Lo HANGUL SYLLABLE WAE
+ {0xC65D, 0xC677, prLVT}, // Lo [27] HANGUL SYLLABLE WAEG..HANGUL SYLLABLE WAEH
+ {0xC678, 0xC678, prLV}, // Lo HANGUL SYLLABLE OE
+ {0xC679, 0xC693, prLVT}, // Lo [27] HANGUL SYLLABLE OEG..HANGUL SYLLABLE OEH
+ {0xC694, 0xC694, prLV}, // Lo HANGUL SYLLABLE YO
+ {0xC695, 0xC6AF, prLVT}, // Lo [27] HANGUL SYLLABLE YOG..HANGUL SYLLABLE YOH
+ {0xC6B0, 0xC6B0, prLV}, // Lo HANGUL SYLLABLE U
+ {0xC6B1, 0xC6CB, prLVT}, // Lo [27] HANGUL SYLLABLE UG..HANGUL SYLLABLE UH
+ {0xC6CC, 0xC6CC, prLV}, // Lo HANGUL SYLLABLE WEO
+ {0xC6CD, 0xC6E7, prLVT}, // Lo [27] HANGUL SYLLABLE WEOG..HANGUL SYLLABLE WEOH
+ {0xC6E8, 0xC6E8, prLV}, // Lo HANGUL SYLLABLE WE
+ {0xC6E9, 0xC703, prLVT}, // Lo [27] HANGUL SYLLABLE WEG..HANGUL SYLLABLE WEH
+ {0xC704, 0xC704, prLV}, // Lo HANGUL SYLLABLE WI
+ {0xC705, 0xC71F, prLVT}, // Lo [27] HANGUL SYLLABLE WIG..HANGUL SYLLABLE WIH
+ {0xC720, 0xC720, prLV}, // Lo HANGUL SYLLABLE YU
+ {0xC721, 0xC73B, prLVT}, // Lo [27] HANGUL SYLLABLE YUG..HANGUL SYLLABLE YUH
+ {0xC73C, 0xC73C, prLV}, // Lo HANGUL SYLLABLE EU
+ {0xC73D, 0xC757, prLVT}, // Lo [27] HANGUL SYLLABLE EUG..HANGUL SYLLABLE EUH
+ {0xC758, 0xC758, prLV}, // Lo HANGUL SYLLABLE YI
+ {0xC759, 0xC773, prLVT}, // Lo [27] HANGUL SYLLABLE YIG..HANGUL SYLLABLE YIH
+ {0xC774, 0xC774, prLV}, // Lo HANGUL SYLLABLE I
+ {0xC775, 0xC78F, prLVT}, // Lo [27] HANGUL SYLLABLE IG..HANGUL SYLLABLE IH
+ {0xC790, 0xC790, prLV}, // Lo HANGUL SYLLABLE JA
+ {0xC791, 0xC7AB, prLVT}, // Lo [27] HANGUL SYLLABLE JAG..HANGUL SYLLABLE JAH
+ {0xC7AC, 0xC7AC, prLV}, // Lo HANGUL SYLLABLE JAE
+ {0xC7AD, 0xC7C7, prLVT}, // Lo [27] HANGUL SYLLABLE JAEG..HANGUL SYLLABLE JAEH
+ {0xC7C8, 0xC7C8, prLV}, // Lo HANGUL SYLLABLE JYA
+ {0xC7C9, 0xC7E3, prLVT}, // Lo [27] HANGUL SYLLABLE JYAG..HANGUL SYLLABLE JYAH
+ {0xC7E4, 0xC7E4, prLV}, // Lo HANGUL SYLLABLE JYAE
+ {0xC7E5, 0xC7FF, prLVT}, // Lo [27] HANGUL SYLLABLE JYAEG..HANGUL SYLLABLE JYAEH
+ {0xC800, 0xC800, prLV}, // Lo HANGUL SYLLABLE JEO
+ {0xC801, 0xC81B, prLVT}, // Lo [27] HANGUL SYLLABLE JEOG..HANGUL SYLLABLE JEOH
+ {0xC81C, 0xC81C, prLV}, // Lo HANGUL SYLLABLE JE
+ {0xC81D, 0xC837, prLVT}, // Lo [27] HANGUL SYLLABLE JEG..HANGUL SYLLABLE JEH
+ {0xC838, 0xC838, prLV}, // Lo HANGUL SYLLABLE JYEO
+ {0xC839, 0xC853, prLVT}, // Lo [27] HANGUL SYLLABLE JYEOG..HANGUL SYLLABLE JYEOH
+ {0xC854, 0xC854, prLV}, // Lo HANGUL SYLLABLE JYE
+ {0xC855, 0xC86F, prLVT}, // Lo [27] HANGUL SYLLABLE JYEG..HANGUL SYLLABLE JYEH
+ {0xC870, 0xC870, prLV}, // Lo HANGUL SYLLABLE JO
+ {0xC871, 0xC88B, prLVT}, // Lo [27] HANGUL SYLLABLE JOG..HANGUL SYLLABLE JOH
+ {0xC88C, 0xC88C, prLV}, // Lo HANGUL SYLLABLE JWA
+ {0xC88D, 0xC8A7, prLVT}, // Lo [27] HANGUL SYLLABLE JWAG..HANGUL SYLLABLE JWAH
+ {0xC8A8, 0xC8A8, prLV}, // Lo HANGUL SYLLABLE JWAE
+ {0xC8A9, 0xC8C3, prLVT}, // Lo [27] HANGUL SYLLABLE JWAEG..HANGUL SYLLABLE JWAEH
+ {0xC8C4, 0xC8C4, prLV}, // Lo HANGUL SYLLABLE JOE
+ {0xC8C5, 0xC8DF, prLVT}, // Lo [27] HANGUL SYLLABLE JOEG..HANGUL SYLLABLE JOEH
+ {0xC8E0, 0xC8E0, prLV}, // Lo HANGUL SYLLABLE JYO
+ {0xC8E1, 0xC8FB, prLVT}, // Lo [27] HANGUL SYLLABLE JYOG..HANGUL SYLLABLE JYOH
+ {0xC8FC, 0xC8FC, prLV}, // Lo HANGUL SYLLABLE JU
+ {0xC8FD, 0xC917, prLVT}, // Lo [27] HANGUL SYLLABLE JUG..HANGUL SYLLABLE JUH
+ {0xC918, 0xC918, prLV}, // Lo HANGUL SYLLABLE JWEO
+ {0xC919, 0xC933, prLVT}, // Lo [27] HANGUL SYLLABLE JWEOG..HANGUL SYLLABLE JWEOH
+ {0xC934, 0xC934, prLV}, // Lo HANGUL SYLLABLE JWE
+ {0xC935, 0xC94F, prLVT}, // Lo [27] HANGUL SYLLABLE JWEG..HANGUL SYLLABLE JWEH
+ {0xC950, 0xC950, prLV}, // Lo HANGUL SYLLABLE JWI
+ {0xC951, 0xC96B, prLVT}, // Lo [27] HANGUL SYLLABLE JWIG..HANGUL SYLLABLE JWIH
+ {0xC96C, 0xC96C, prLV}, // Lo HANGUL SYLLABLE JYU
+ {0xC96D, 0xC987, prLVT}, // Lo [27] HANGUL SYLLABLE JYUG..HANGUL SYLLABLE JYUH
+ {0xC988, 0xC988, prLV}, // Lo HANGUL SYLLABLE JEU
+ {0xC989, 0xC9A3, prLVT}, // Lo [27] HANGUL SYLLABLE JEUG..HANGUL SYLLABLE JEUH
+ {0xC9A4, 0xC9A4, prLV}, // Lo HANGUL SYLLABLE JYI
+ {0xC9A5, 0xC9BF, prLVT}, // Lo [27] HANGUL SYLLABLE JYIG..HANGUL SYLLABLE JYIH
+ {0xC9C0, 0xC9C0, prLV}, // Lo HANGUL SYLLABLE JI
+ {0xC9C1, 0xC9DB, prLVT}, // Lo [27] HANGUL SYLLABLE JIG..HANGUL SYLLABLE JIH
+ {0xC9DC, 0xC9DC, prLV}, // Lo HANGUL SYLLABLE JJA
+ {0xC9DD, 0xC9F7, prLVT}, // Lo [27] HANGUL SYLLABLE JJAG..HANGUL SYLLABLE JJAH
+ {0xC9F8, 0xC9F8, prLV}, // Lo HANGUL SYLLABLE JJAE
+ {0xC9F9, 0xCA13, prLVT}, // Lo [27] HANGUL SYLLABLE JJAEG..HANGUL SYLLABLE JJAEH
+ {0xCA14, 0xCA14, prLV}, // Lo HANGUL SYLLABLE JJYA
+ {0xCA15, 0xCA2F, prLVT}, // Lo [27] HANGUL SYLLABLE JJYAG..HANGUL SYLLABLE JJYAH
+ {0xCA30, 0xCA30, prLV}, // Lo HANGUL SYLLABLE JJYAE
+ {0xCA31, 0xCA4B, prLVT}, // Lo [27] HANGUL SYLLABLE JJYAEG..HANGUL SYLLABLE JJYAEH
+ {0xCA4C, 0xCA4C, prLV}, // Lo HANGUL SYLLABLE JJEO
+ {0xCA4D, 0xCA67, prLVT}, // Lo [27] HANGUL SYLLABLE JJEOG..HANGUL SYLLABLE JJEOH
+ {0xCA68, 0xCA68, prLV}, // Lo HANGUL SYLLABLE JJE
+ {0xCA69, 0xCA83, prLVT}, // Lo [27] HANGUL SYLLABLE JJEG..HANGUL SYLLABLE JJEH
+ {0xCA84, 0xCA84, prLV}, // Lo HANGUL SYLLABLE JJYEO
+ {0xCA85, 0xCA9F, prLVT}, // Lo [27] HANGUL SYLLABLE JJYEOG..HANGUL SYLLABLE JJYEOH
+ {0xCAA0, 0xCAA0, prLV}, // Lo HANGUL SYLLABLE JJYE
+ {0xCAA1, 0xCABB, prLVT}, // Lo [27] HANGUL SYLLABLE JJYEG..HANGUL SYLLABLE JJYEH
+ {0xCABC, 0xCABC, prLV}, // Lo HANGUL SYLLABLE JJO
+ {0xCABD, 0xCAD7, prLVT}, // Lo [27] HANGUL SYLLABLE JJOG..HANGUL SYLLABLE JJOH
+ {0xCAD8, 0xCAD8, prLV}, // Lo HANGUL SYLLABLE JJWA
+ {0xCAD9, 0xCAF3, prLVT}, // Lo [27] HANGUL SYLLABLE JJWAG..HANGUL SYLLABLE JJWAH
+ {0xCAF4, 0xCAF4, prLV}, // Lo HANGUL SYLLABLE JJWAE
+ {0xCAF5, 0xCB0F, prLVT}, // Lo [27] HANGUL SYLLABLE JJWAEG..HANGUL SYLLABLE JJWAEH
+ {0xCB10, 0xCB10, prLV}, // Lo HANGUL SYLLABLE JJOE
+ {0xCB11, 0xCB2B, prLVT}, // Lo [27] HANGUL SYLLABLE JJOEG..HANGUL SYLLABLE JJOEH
+ {0xCB2C, 0xCB2C, prLV}, // Lo HANGUL SYLLABLE JJYO
+ {0xCB2D, 0xCB47, prLVT}, // Lo [27] HANGUL SYLLABLE JJYOG..HANGUL SYLLABLE JJYOH
+ {0xCB48, 0xCB48, prLV}, // Lo HANGUL SYLLABLE JJU
+ {0xCB49, 0xCB63, prLVT}, // Lo [27] HANGUL SYLLABLE JJUG..HANGUL SYLLABLE JJUH
+ {0xCB64, 0xCB64, prLV}, // Lo HANGUL SYLLABLE JJWEO
+ {0xCB65, 0xCB7F, prLVT}, // Lo [27] HANGUL SYLLABLE JJWEOG..HANGUL SYLLABLE JJWEOH
+ {0xCB80, 0xCB80, prLV}, // Lo HANGUL SYLLABLE JJWE
+ {0xCB81, 0xCB9B, prLVT}, // Lo [27] HANGUL SYLLABLE JJWEG..HANGUL SYLLABLE JJWEH
+ {0xCB9C, 0xCB9C, prLV}, // Lo HANGUL SYLLABLE JJWI
+ {0xCB9D, 0xCBB7, prLVT}, // Lo [27] HANGUL SYLLABLE JJWIG..HANGUL SYLLABLE JJWIH
+ {0xCBB8, 0xCBB8, prLV}, // Lo HANGUL SYLLABLE JJYU
+ {0xCBB9, 0xCBD3, prLVT}, // Lo [27] HANGUL SYLLABLE JJYUG..HANGUL SYLLABLE JJYUH
+ {0xCBD4, 0xCBD4, prLV}, // Lo HANGUL SYLLABLE JJEU
+ {0xCBD5, 0xCBEF, prLVT}, // Lo [27] HANGUL SYLLABLE JJEUG..HANGUL SYLLABLE JJEUH
+ {0xCBF0, 0xCBF0, prLV}, // Lo HANGUL SYLLABLE JJYI
+ {0xCBF1, 0xCC0B, prLVT}, // Lo [27] HANGUL SYLLABLE JJYIG..HANGUL SYLLABLE JJYIH
+ {0xCC0C, 0xCC0C, prLV}, // Lo HANGUL SYLLABLE JJI
+ {0xCC0D, 0xCC27, prLVT}, // Lo [27] HANGUL SYLLABLE JJIG..HANGUL SYLLABLE JJIH
+ {0xCC28, 0xCC28, prLV}, // Lo HANGUL SYLLABLE CA
+ {0xCC29, 0xCC43, prLVT}, // Lo [27] HANGUL SYLLABLE CAG..HANGUL SYLLABLE CAH
+ {0xCC44, 0xCC44, prLV}, // Lo HANGUL SYLLABLE CAE
+ {0xCC45, 0xCC5F, prLVT}, // Lo [27] HANGUL SYLLABLE CAEG..HANGUL SYLLABLE CAEH
+ {0xCC60, 0xCC60, prLV}, // Lo HANGUL SYLLABLE CYA
+ {0xCC61, 0xCC7B, prLVT}, // Lo [27] HANGUL SYLLABLE CYAG..HANGUL SYLLABLE CYAH
+ {0xCC7C, 0xCC7C, prLV}, // Lo HANGUL SYLLABLE CYAE
+ {0xCC7D, 0xCC97, prLVT}, // Lo [27] HANGUL SYLLABLE CYAEG..HANGUL SYLLABLE CYAEH
+ {0xCC98, 0xCC98, prLV}, // Lo HANGUL SYLLABLE CEO
+ {0xCC99, 0xCCB3, prLVT}, // Lo [27] HANGUL SYLLABLE CEOG..HANGUL SYLLABLE CEOH
+ {0xCCB4, 0xCCB4, prLV}, // Lo HANGUL SYLLABLE CE
+ {0xCCB5, 0xCCCF, prLVT}, // Lo [27] HANGUL SYLLABLE CEG..HANGUL SYLLABLE CEH
+ {0xCCD0, 0xCCD0, prLV}, // Lo HANGUL SYLLABLE CYEO
+ {0xCCD1, 0xCCEB, prLVT}, // Lo [27] HANGUL SYLLABLE CYEOG..HANGUL SYLLABLE CYEOH
+ {0xCCEC, 0xCCEC, prLV}, // Lo HANGUL SYLLABLE CYE
+ {0xCCED, 0xCD07, prLVT}, // Lo [27] HANGUL SYLLABLE CYEG..HANGUL SYLLABLE CYEH
+ {0xCD08, 0xCD08, prLV}, // Lo HANGUL SYLLABLE CO
+ {0xCD09, 0xCD23, prLVT}, // Lo [27] HANGUL SYLLABLE COG..HANGUL SYLLABLE COH
+ {0xCD24, 0xCD24, prLV}, // Lo HANGUL SYLLABLE CWA
+ {0xCD25, 0xCD3F, prLVT}, // Lo [27] HANGUL SYLLABLE CWAG..HANGUL SYLLABLE CWAH
+ {0xCD40, 0xCD40, prLV}, // Lo HANGUL SYLLABLE CWAE
+ {0xCD41, 0xCD5B, prLVT}, // Lo [27] HANGUL SYLLABLE CWAEG..HANGUL SYLLABLE CWAEH
+ {0xCD5C, 0xCD5C, prLV}, // Lo HANGUL SYLLABLE COE
+ {0xCD5D, 0xCD77, prLVT}, // Lo [27] HANGUL SYLLABLE COEG..HANGUL SYLLABLE COEH
+ {0xCD78, 0xCD78, prLV}, // Lo HANGUL SYLLABLE CYO
+ {0xCD79, 0xCD93, prLVT}, // Lo [27] HANGUL SYLLABLE CYOG..HANGUL SYLLABLE CYOH
+ {0xCD94, 0xCD94, prLV}, // Lo HANGUL SYLLABLE CU
+ {0xCD95, 0xCDAF, prLVT}, // Lo [27] HANGUL SYLLABLE CUG..HANGUL SYLLABLE CUH
+ {0xCDB0, 0xCDB0, prLV}, // Lo HANGUL SYLLABLE CWEO
+ {0xCDB1, 0xCDCB, prLVT}, // Lo [27] HANGUL SYLLABLE CWEOG..HANGUL SYLLABLE CWEOH
+ {0xCDCC, 0xCDCC, prLV}, // Lo HANGUL SYLLABLE CWE
+ {0xCDCD, 0xCDE7, prLVT}, // Lo [27] HANGUL SYLLABLE CWEG..HANGUL SYLLABLE CWEH
+ {0xCDE8, 0xCDE8, prLV}, // Lo HANGUL SYLLABLE CWI
+ {0xCDE9, 0xCE03, prLVT}, // Lo [27] HANGUL SYLLABLE CWIG..HANGUL SYLLABLE CWIH
+ {0xCE04, 0xCE04, prLV}, // Lo HANGUL SYLLABLE CYU
+ {0xCE05, 0xCE1F, prLVT}, // Lo [27] HANGUL SYLLABLE CYUG..HANGUL SYLLABLE CYUH
+ {0xCE20, 0xCE20, prLV}, // Lo HANGUL SYLLABLE CEU
+ {0xCE21, 0xCE3B, prLVT}, // Lo [27] HANGUL SYLLABLE CEUG..HANGUL SYLLABLE CEUH
+ {0xCE3C, 0xCE3C, prLV}, // Lo HANGUL SYLLABLE CYI
+ {0xCE3D, 0xCE57, prLVT}, // Lo [27] HANGUL SYLLABLE CYIG..HANGUL SYLLABLE CYIH
+ {0xCE58, 0xCE58, prLV}, // Lo HANGUL SYLLABLE CI
+ {0xCE59, 0xCE73, prLVT}, // Lo [27] HANGUL SYLLABLE CIG..HANGUL SYLLABLE CIH
+ {0xCE74, 0xCE74, prLV}, // Lo HANGUL SYLLABLE KA
+ {0xCE75, 0xCE8F, prLVT}, // Lo [27] HANGUL SYLLABLE KAG..HANGUL SYLLABLE KAH
+ {0xCE90, 0xCE90, prLV}, // Lo HANGUL SYLLABLE KAE
+ {0xCE91, 0xCEAB, prLVT}, // Lo [27] HANGUL SYLLABLE KAEG..HANGUL SYLLABLE KAEH
+ {0xCEAC, 0xCEAC, prLV}, // Lo HANGUL SYLLABLE KYA
+ {0xCEAD, 0xCEC7, prLVT}, // Lo [27] HANGUL SYLLABLE KYAG..HANGUL SYLLABLE KYAH
+ {0xCEC8, 0xCEC8, prLV}, // Lo HANGUL SYLLABLE KYAE
+ {0xCEC9, 0xCEE3, prLVT}, // Lo [27] HANGUL SYLLABLE KYAEG..HANGUL SYLLABLE KYAEH
+ {0xCEE4, 0xCEE4, prLV}, // Lo HANGUL SYLLABLE KEO
+ {0xCEE5, 0xCEFF, prLVT}, // Lo [27] HANGUL SYLLABLE KEOG..HANGUL SYLLABLE KEOH
+ {0xCF00, 0xCF00, prLV}, // Lo HANGUL SYLLABLE KE
+ {0xCF01, 0xCF1B, prLVT}, // Lo [27] HANGUL SYLLABLE KEG..HANGUL SYLLABLE KEH
+ {0xCF1C, 0xCF1C, prLV}, // Lo HANGUL SYLLABLE KYEO
+ {0xCF1D, 0xCF37, prLVT}, // Lo [27] HANGUL SYLLABLE KYEOG..HANGUL SYLLABLE KYEOH
+ {0xCF38, 0xCF38, prLV}, // Lo HANGUL SYLLABLE KYE
+ {0xCF39, 0xCF53, prLVT}, // Lo [27] HANGUL SYLLABLE KYEG..HANGUL SYLLABLE KYEH
+ {0xCF54, 0xCF54, prLV}, // Lo HANGUL SYLLABLE KO
+ {0xCF55, 0xCF6F, prLVT}, // Lo [27] HANGUL SYLLABLE KOG..HANGUL SYLLABLE KOH
+ {0xCF70, 0xCF70, prLV}, // Lo HANGUL SYLLABLE KWA
+ {0xCF71, 0xCF8B, prLVT}, // Lo [27] HANGUL SYLLABLE KWAG..HANGUL SYLLABLE KWAH
+ {0xCF8C, 0xCF8C, prLV}, // Lo HANGUL SYLLABLE KWAE
+ {0xCF8D, 0xCFA7, prLVT}, // Lo [27] HANGUL SYLLABLE KWAEG..HANGUL SYLLABLE KWAEH
+ {0xCFA8, 0xCFA8, prLV}, // Lo HANGUL SYLLABLE KOE
+ {0xCFA9, 0xCFC3, prLVT}, // Lo [27] HANGUL SYLLABLE KOEG..HANGUL SYLLABLE KOEH
+ {0xCFC4, 0xCFC4, prLV}, // Lo HANGUL SYLLABLE KYO
+ {0xCFC5, 0xCFDF, prLVT}, // Lo [27] HANGUL SYLLABLE KYOG..HANGUL SYLLABLE KYOH
+ {0xCFE0, 0xCFE0, prLV}, // Lo HANGUL SYLLABLE KU
+ {0xCFE1, 0xCFFB, prLVT}, // Lo [27] HANGUL SYLLABLE KUG..HANGUL SYLLABLE KUH
+ {0xCFFC, 0xCFFC, prLV}, // Lo HANGUL SYLLABLE KWEO
+ {0xCFFD, 0xD017, prLVT}, // Lo [27] HANGUL SYLLABLE KWEOG..HANGUL SYLLABLE KWEOH
+ {0xD018, 0xD018, prLV}, // Lo HANGUL SYLLABLE KWE
+ {0xD019, 0xD033, prLVT}, // Lo [27] HANGUL SYLLABLE KWEG..HANGUL SYLLABLE KWEH
+ {0xD034, 0xD034, prLV}, // Lo HANGUL SYLLABLE KWI
+ {0xD035, 0xD04F, prLVT}, // Lo [27] HANGUL SYLLABLE KWIG..HANGUL SYLLABLE KWIH
+ {0xD050, 0xD050, prLV}, // Lo HANGUL SYLLABLE KYU
+ {0xD051, 0xD06B, prLVT}, // Lo [27] HANGUL SYLLABLE KYUG..HANGUL SYLLABLE KYUH
+ {0xD06C, 0xD06C, prLV}, // Lo HANGUL SYLLABLE KEU
+ {0xD06D, 0xD087, prLVT}, // Lo [27] HANGUL SYLLABLE KEUG..HANGUL SYLLABLE KEUH
+ {0xD088, 0xD088, prLV}, // Lo HANGUL SYLLABLE KYI
+ {0xD089, 0xD0A3, prLVT}, // Lo [27] HANGUL SYLLABLE KYIG..HANGUL SYLLABLE KYIH
+ {0xD0A4, 0xD0A4, prLV}, // Lo HANGUL SYLLABLE KI
+ {0xD0A5, 0xD0BF, prLVT}, // Lo [27] HANGUL SYLLABLE KIG..HANGUL SYLLABLE KIH
+ {0xD0C0, 0xD0C0, prLV}, // Lo HANGUL SYLLABLE TA
+ {0xD0C1, 0xD0DB, prLVT}, // Lo [27] HANGUL SYLLABLE TAG..HANGUL SYLLABLE TAH
+ {0xD0DC, 0xD0DC, prLV}, // Lo HANGUL SYLLABLE TAE
+ {0xD0DD, 0xD0F7, prLVT}, // Lo [27] HANGUL SYLLABLE TAEG..HANGUL SYLLABLE TAEH
+ {0xD0F8, 0xD0F8, prLV}, // Lo HANGUL SYLLABLE TYA
+ {0xD0F9, 0xD113, prLVT}, // Lo [27] HANGUL SYLLABLE TYAG..HANGUL SYLLABLE TYAH
+ {0xD114, 0xD114, prLV}, // Lo HANGUL SYLLABLE TYAE
+ {0xD115, 0xD12F, prLVT}, // Lo [27] HANGUL SYLLABLE TYAEG..HANGUL SYLLABLE TYAEH
+ {0xD130, 0xD130, prLV}, // Lo HANGUL SYLLABLE TEO
+ {0xD131, 0xD14B, prLVT}, // Lo [27] HANGUL SYLLABLE TEOG..HANGUL SYLLABLE TEOH
+ {0xD14C, 0xD14C, prLV}, // Lo HANGUL SYLLABLE TE
+ {0xD14D, 0xD167, prLVT}, // Lo [27] HANGUL SYLLABLE TEG..HANGUL SYLLABLE TEH
+ {0xD168, 0xD168, prLV}, // Lo HANGUL SYLLABLE TYEO
+ {0xD169, 0xD183, prLVT}, // Lo [27] HANGUL SYLLABLE TYEOG..HANGUL SYLLABLE TYEOH
+ {0xD184, 0xD184, prLV}, // Lo HANGUL SYLLABLE TYE
+ {0xD185, 0xD19F, prLVT}, // Lo [27] HANGUL SYLLABLE TYEG..HANGUL SYLLABLE TYEH
+ {0xD1A0, 0xD1A0, prLV}, // Lo HANGUL SYLLABLE TO
+ {0xD1A1, 0xD1BB, prLVT}, // Lo [27] HANGUL SYLLABLE TOG..HANGUL SYLLABLE TOH
+ {0xD1BC, 0xD1BC, prLV}, // Lo HANGUL SYLLABLE TWA
+ {0xD1BD, 0xD1D7, prLVT}, // Lo [27] HANGUL SYLLABLE TWAG..HANGUL SYLLABLE TWAH
+ {0xD1D8, 0xD1D8, prLV}, // Lo HANGUL SYLLABLE TWAE
+ {0xD1D9, 0xD1F3, prLVT}, // Lo [27] HANGUL SYLLABLE TWAEG..HANGUL SYLLABLE TWAEH
+ {0xD1F4, 0xD1F4, prLV}, // Lo HANGUL SYLLABLE TOE
+ {0xD1F5, 0xD20F, prLVT}, // Lo [27] HANGUL SYLLABLE TOEG..HANGUL SYLLABLE TOEH
+ {0xD210, 0xD210, prLV}, // Lo HANGUL SYLLABLE TYO
+ {0xD211, 0xD22B, prLVT}, // Lo [27] HANGUL SYLLABLE TYOG..HANGUL SYLLABLE TYOH
+ {0xD22C, 0xD22C, prLV}, // Lo HANGUL SYLLABLE TU
+ {0xD22D, 0xD247, prLVT}, // Lo [27] HANGUL SYLLABLE TUG..HANGUL SYLLABLE TUH
+ {0xD248, 0xD248, prLV}, // Lo HANGUL SYLLABLE TWEO
+ {0xD249, 0xD263, prLVT}, // Lo [27] HANGUL SYLLABLE TWEOG..HANGUL SYLLABLE TWEOH
+ {0xD264, 0xD264, prLV}, // Lo HANGUL SYLLABLE TWE
+ {0xD265, 0xD27F, prLVT}, // Lo [27] HANGUL SYLLABLE TWEG..HANGUL SYLLABLE TWEH
+ {0xD280, 0xD280, prLV}, // Lo HANGUL SYLLABLE TWI
+ {0xD281, 0xD29B, prLVT}, // Lo [27] HANGUL SYLLABLE TWIG..HANGUL SYLLABLE TWIH
+ {0xD29C, 0xD29C, prLV}, // Lo HANGUL SYLLABLE TYU
+ {0xD29D, 0xD2B7, prLVT}, // Lo [27] HANGUL SYLLABLE TYUG..HANGUL SYLLABLE TYUH
+ {0xD2B8, 0xD2B8, prLV}, // Lo HANGUL SYLLABLE TEU
+ {0xD2B9, 0xD2D3, prLVT}, // Lo [27] HANGUL SYLLABLE TEUG..HANGUL SYLLABLE TEUH
+ {0xD2D4, 0xD2D4, prLV}, // Lo HANGUL SYLLABLE TYI
+ {0xD2D5, 0xD2EF, prLVT}, // Lo [27] HANGUL SYLLABLE TYIG..HANGUL SYLLABLE TYIH
+ {0xD2F0, 0xD2F0, prLV}, // Lo HANGUL SYLLABLE TI
+ {0xD2F1, 0xD30B, prLVT}, // Lo [27] HANGUL SYLLABLE TIG..HANGUL SYLLABLE TIH
+ {0xD30C, 0xD30C, prLV}, // Lo HANGUL SYLLABLE PA
+ {0xD30D, 0xD327, prLVT}, // Lo [27] HANGUL SYLLABLE PAG..HANGUL SYLLABLE PAH
+ {0xD328, 0xD328, prLV}, // Lo HANGUL SYLLABLE PAE
+ {0xD329, 0xD343, prLVT}, // Lo [27] HANGUL SYLLABLE PAEG..HANGUL SYLLABLE PAEH
+ {0xD344, 0xD344, prLV}, // Lo HANGUL SYLLABLE PYA
+ {0xD345, 0xD35F, prLVT}, // Lo [27] HANGUL SYLLABLE PYAG..HANGUL SYLLABLE PYAH
+ {0xD360, 0xD360, prLV}, // Lo HANGUL SYLLABLE PYAE
+ {0xD361, 0xD37B, prLVT}, // Lo [27] HANGUL SYLLABLE PYAEG..HANGUL SYLLABLE PYAEH
+ {0xD37C, 0xD37C, prLV}, // Lo HANGUL SYLLABLE PEO
+ {0xD37D, 0xD397, prLVT}, // Lo [27] HANGUL SYLLABLE PEOG..HANGUL SYLLABLE PEOH
+ {0xD398, 0xD398, prLV}, // Lo HANGUL SYLLABLE PE
+ {0xD399, 0xD3B3, prLVT}, // Lo [27] HANGUL SYLLABLE PEG..HANGUL SYLLABLE PEH
+ {0xD3B4, 0xD3B4, prLV}, // Lo HANGUL SYLLABLE PYEO
+ {0xD3B5, 0xD3CF, prLVT}, // Lo [27] HANGUL SYLLABLE PYEOG..HANGUL SYLLABLE PYEOH
+ {0xD3D0, 0xD3D0, prLV}, // Lo HANGUL SYLLABLE PYE
+ {0xD3D1, 0xD3EB, prLVT}, // Lo [27] HANGUL SYLLABLE PYEG..HANGUL SYLLABLE PYEH
+ {0xD3EC, 0xD3EC, prLV}, // Lo HANGUL SYLLABLE PO
+ {0xD3ED, 0xD407, prLVT}, // Lo [27] HANGUL SYLLABLE POG..HANGUL SYLLABLE POH
+ {0xD408, 0xD408, prLV}, // Lo HANGUL SYLLABLE PWA
+ {0xD409, 0xD423, prLVT}, // Lo [27] HANGUL SYLLABLE PWAG..HANGUL SYLLABLE PWAH
+ {0xD424, 0xD424, prLV}, // Lo HANGUL SYLLABLE PWAE
+ {0xD425, 0xD43F, prLVT}, // Lo [27] HANGUL SYLLABLE PWAEG..HANGUL SYLLABLE PWAEH
+ {0xD440, 0xD440, prLV}, // Lo HANGUL SYLLABLE POE
+ {0xD441, 0xD45B, prLVT}, // Lo [27] HANGUL SYLLABLE POEG..HANGUL SYLLABLE POEH
+ {0xD45C, 0xD45C, prLV}, // Lo HANGUL SYLLABLE PYO
+ {0xD45D, 0xD477, prLVT}, // Lo [27] HANGUL SYLLABLE PYOG..HANGUL SYLLABLE PYOH
+ {0xD478, 0xD478, prLV}, // Lo HANGUL SYLLABLE PU
+ {0xD479, 0xD493, prLVT}, // Lo [27] HANGUL SYLLABLE PUG..HANGUL SYLLABLE PUH
+ {0xD494, 0xD494, prLV}, // Lo HANGUL SYLLABLE PWEO
+ {0xD495, 0xD4AF, prLVT}, // Lo [27] HANGUL SYLLABLE PWEOG..HANGUL SYLLABLE PWEOH
+ {0xD4B0, 0xD4B0, prLV}, // Lo HANGUL SYLLABLE PWE
+ {0xD4B1, 0xD4CB, prLVT}, // Lo [27] HANGUL SYLLABLE PWEG..HANGUL SYLLABLE PWEH
+ {0xD4CC, 0xD4CC, prLV}, // Lo HANGUL SYLLABLE PWI
+ {0xD4CD, 0xD4E7, prLVT}, // Lo [27] HANGUL SYLLABLE PWIG..HANGUL SYLLABLE PWIH
+ {0xD4E8, 0xD4E8, prLV}, // Lo HANGUL SYLLABLE PYU
+ {0xD4E9, 0xD503, prLVT}, // Lo [27] HANGUL SYLLABLE PYUG..HANGUL SYLLABLE PYUH
+ {0xD504, 0xD504, prLV}, // Lo HANGUL SYLLABLE PEU
+ {0xD505, 0xD51F, prLVT}, // Lo [27] HANGUL SYLLABLE PEUG..HANGUL SYLLABLE PEUH
+ {0xD520, 0xD520, prLV}, // Lo HANGUL SYLLABLE PYI
+ {0xD521, 0xD53B, prLVT}, // Lo [27] HANGUL SYLLABLE PYIG..HANGUL SYLLABLE PYIH
+ {0xD53C, 0xD53C, prLV}, // Lo HANGUL SYLLABLE PI
+ {0xD53D, 0xD557, prLVT}, // Lo [27] HANGUL SYLLABLE PIG..HANGUL SYLLABLE PIH
+ {0xD558, 0xD558, prLV}, // Lo HANGUL SYLLABLE HA
+ {0xD559, 0xD573, prLVT}, // Lo [27] HANGUL SYLLABLE HAG..HANGUL SYLLABLE HAH
+ {0xD574, 0xD574, prLV}, // Lo HANGUL SYLLABLE HAE
+ {0xD575, 0xD58F, prLVT}, // Lo [27] HANGUL SYLLABLE HAEG..HANGUL SYLLABLE HAEH
+ {0xD590, 0xD590, prLV}, // Lo HANGUL SYLLABLE HYA
+ {0xD591, 0xD5AB, prLVT}, // Lo [27] HANGUL SYLLABLE HYAG..HANGUL SYLLABLE HYAH
+ {0xD5AC, 0xD5AC, prLV}, // Lo HANGUL SYLLABLE HYAE
+ {0xD5AD, 0xD5C7, prLVT}, // Lo [27] HANGUL SYLLABLE HYAEG..HANGUL SYLLABLE HYAEH
+ {0xD5C8, 0xD5C8, prLV}, // Lo HANGUL SYLLABLE HEO
+ {0xD5C9, 0xD5E3, prLVT}, // Lo [27] HANGUL SYLLABLE HEOG..HANGUL SYLLABLE HEOH
+ {0xD5E4, 0xD5E4, prLV}, // Lo HANGUL SYLLABLE HE
+ {0xD5E5, 0xD5FF, prLVT}, // Lo [27] HANGUL SYLLABLE HEG..HANGUL SYLLABLE HEH
+ {0xD600, 0xD600, prLV}, // Lo HANGUL SYLLABLE HYEO
+ {0xD601, 0xD61B, prLVT}, // Lo [27] HANGUL SYLLABLE HYEOG..HANGUL SYLLABLE HYEOH
+ {0xD61C, 0xD61C, prLV}, // Lo HANGUL SYLLABLE HYE
+ {0xD61D, 0xD637, prLVT}, // Lo [27] HANGUL SYLLABLE HYEG..HANGUL SYLLABLE HYEH
+ {0xD638, 0xD638, prLV}, // Lo HANGUL SYLLABLE HO
+ {0xD639, 0xD653, prLVT}, // Lo [27] HANGUL SYLLABLE HOG..HANGUL SYLLABLE HOH
+ {0xD654, 0xD654, prLV}, // Lo HANGUL SYLLABLE HWA
+ {0xD655, 0xD66F, prLVT}, // Lo [27] HANGUL SYLLABLE HWAG..HANGUL SYLLABLE HWAH
+ {0xD670, 0xD670, prLV}, // Lo HANGUL SYLLABLE HWAE
+ {0xD671, 0xD68B, prLVT}, // Lo [27] HANGUL SYLLABLE HWAEG..HANGUL SYLLABLE HWAEH
+ {0xD68C, 0xD68C, prLV}, // Lo HANGUL SYLLABLE HOE
+ {0xD68D, 0xD6A7, prLVT}, // Lo [27] HANGUL SYLLABLE HOEG..HANGUL SYLLABLE HOEH
+ {0xD6A8, 0xD6A8, prLV}, // Lo HANGUL SYLLABLE HYO
+ {0xD6A9, 0xD6C3, prLVT}, // Lo [27] HANGUL SYLLABLE HYOG..HANGUL SYLLABLE HYOH
+ {0xD6C4, 0xD6C4, prLV}, // Lo HANGUL SYLLABLE HU
+ {0xD6C5, 0xD6DF, prLVT}, // Lo [27] HANGUL SYLLABLE HUG..HANGUL SYLLABLE HUH
+ {0xD6E0, 0xD6E0, prLV}, // Lo HANGUL SYLLABLE HWEO
+ {0xD6E1, 0xD6FB, prLVT}, // Lo [27] HANGUL SYLLABLE HWEOG..HANGUL SYLLABLE HWEOH
+ {0xD6FC, 0xD6FC, prLV}, // Lo HANGUL SYLLABLE HWE
+ {0xD6FD, 0xD717, prLVT}, // Lo [27] HANGUL SYLLABLE HWEG..HANGUL SYLLABLE HWEH
+ {0xD718, 0xD718, prLV}, // Lo HANGUL SYLLABLE HWI
+ {0xD719, 0xD733, prLVT}, // Lo [27] HANGUL SYLLABLE HWIG..HANGUL SYLLABLE HWIH
+ {0xD734, 0xD734, prLV}, // Lo HANGUL SYLLABLE HYU
+ {0xD735, 0xD74F, prLVT}, // Lo [27] HANGUL SYLLABLE HYUG..HANGUL SYLLABLE HYUH
+ {0xD750, 0xD750, prLV}, // Lo HANGUL SYLLABLE HEU
+ {0xD751, 0xD76B, prLVT}, // Lo [27] HANGUL SYLLABLE HEUG..HANGUL SYLLABLE HEUH
+ {0xD76C, 0xD76C, prLV}, // Lo HANGUL SYLLABLE HYI
+ {0xD76D, 0xD787, prLVT}, // Lo [27] HANGUL SYLLABLE HYIG..HANGUL SYLLABLE HYIH
+ {0xD788, 0xD788, prLV}, // Lo HANGUL SYLLABLE HI
+ {0xD789, 0xD7A3, prLVT}, // Lo [27] HANGUL SYLLABLE HIG..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prV}, // Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prT}, // Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xFB1E, 0xFB1E, prExtend}, // Mn HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFE00, 0xFE0F, prExtend}, // Mn [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE20, 0xFE2F, prExtend}, // Mn [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFEFF, 0xFEFF, prControl}, // Cf ZERO WIDTH NO-BREAK SPACE
+ {0xFF9E, 0xFF9F, prExtend}, // Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFF0, 0xFFF8, prControl}, // Cn [9] ..
+ {0xFFF9, 0xFFFB, prControl}, // Cf [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0x101FD, 0x101FD, prExtend}, // Mn PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x102E0, 0x102E0, prExtend}, // Mn COPTIC EPACT THOUSANDS MARK
+ {0x10376, 0x1037A, prExtend}, // Mn [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10A01, 0x10A03, prExtend}, // Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prExtend}, // Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prExtend}, // Mn [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A38, 0x10A3A, prExtend}, // Mn [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prExtend}, // Mn KHAROSHTHI VIRAMA
+ {0x10AE5, 0x10AE6, prExtend}, // Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10D24, 0x10D27, prExtend}, // Mn [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10F46, 0x10F50, prExtend}, // Mn [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F82, 0x10F85, prExtend}, // Mn [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x11000, 0x11000, prSpacingMark}, // Mc BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prExtend}, // Mn BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prSpacingMark}, // Mc BRAHMI SIGN VISARGA
+ {0x11038, 0x11046, prExtend}, // Mn [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11070, 0x11070, prExtend}, // Mn BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11073, 0x11074, prExtend}, // Mn [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x1107F, 0x11081, prExtend}, // Mn [3] BRAHMI NUMBER JOINER..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prSpacingMark}, // Mc KAITHI SIGN VISARGA
+ {0x110B0, 0x110B2, prSpacingMark}, // Mc [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prExtend}, // Mn [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prSpacingMark}, // Mc [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prExtend}, // Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BD, 0x110BD, prPrepend}, // Cf KAITHI NUMBER SIGN
+ {0x110C2, 0x110C2, prExtend}, // Mn KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prPrepend}, // Cf KAITHI NUMBER SIGN ABOVE
+ {0x11100, 0x11102, prExtend}, // Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11127, 0x1112B, prExtend}, // Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prSpacingMark}, // Mc CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prExtend}, // Mn [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11145, 0x11146, prSpacingMark}, // Mc [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11173, 0x11173, prExtend}, // Mn MAHAJANI SIGN NUKTA
+ {0x11180, 0x11181, prExtend}, // Mn [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prSpacingMark}, // Mc SHARADA SIGN VISARGA
+ {0x111B3, 0x111B5, prSpacingMark}, // Mc [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prExtend}, // Mn [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prSpacingMark}, // Mc [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C2, 0x111C3, prPrepend}, // Lo [2] SHARADA SIGN JIHVAMULIYA..SHARADA SIGN UPADHMANIYA
+ {0x111C9, 0x111CC, prExtend}, // Mn [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CE, 0x111CE, prSpacingMark}, // Mc SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prExtend}, // Mn SHARADA SIGN INVERTED CANDRABINDU
+ {0x1122C, 0x1122E, prSpacingMark}, // Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prExtend}, // Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prSpacingMark}, // Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prExtend}, // Mn KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prSpacingMark}, // Mc KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prExtend}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN
+ {0x112DF, 0x112DF, prExtend}, // Mn KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prSpacingMark}, // Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prExtend}, // Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x11300, 0x11301, prExtend}, // Mn [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prSpacingMark}, // Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x1133B, 0x1133C, prExtend}, // Mn [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133E, 0x1133E, prExtend}, // Mc GRANTHA VOWEL SIGN AA
+ {0x1133F, 0x1133F, prSpacingMark}, // Mc GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prExtend}, // Mn GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prSpacingMark}, // Mc [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prSpacingMark}, // Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prSpacingMark}, // Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11357, 0x11357, prExtend}, // Mc GRANTHA AU LENGTH MARK
+ {0x11362, 0x11363, prSpacingMark}, // Mc [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prExtend}, // Mn [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prExtend}, // Mn [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11435, 0x11437, prSpacingMark}, // Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prExtend}, // Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prSpacingMark}, // Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prExtend}, // Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prSpacingMark}, // Mc NEWA SIGN VISARGA
+ {0x11446, 0x11446, prExtend}, // Mn NEWA SIGN NUKTA
+ {0x1145E, 0x1145E, prExtend}, // Mn NEWA SANDHI MARK
+ {0x114B0, 0x114B0, prExtend}, // Mc TIRHUTA VOWEL SIGN AA
+ {0x114B1, 0x114B2, prSpacingMark}, // Mc [2] TIRHUTA VOWEL SIGN I..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prExtend}, // Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prSpacingMark}, // Mc TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prExtend}, // Mn TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BC, prSpacingMark}, // Mc [2] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN O
+ {0x114BD, 0x114BD, prExtend}, // Mc TIRHUTA VOWEL SIGN SHORT O
+ {0x114BE, 0x114BE, prSpacingMark}, // Mc TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prExtend}, // Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prSpacingMark}, // Mc TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prExtend}, // Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x115AF, 0x115AF, prExtend}, // Mc SIDDHAM VOWEL SIGN AA
+ {0x115B0, 0x115B1, prSpacingMark}, // Mc [2] SIDDHAM VOWEL SIGN I..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prExtend}, // Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prSpacingMark}, // Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prExtend}, // Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prSpacingMark}, // Mc SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prExtend}, // Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115DC, 0x115DD, prExtend}, // Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11630, 0x11632, prSpacingMark}, // Mc [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prExtend}, // Mn [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prSpacingMark}, // Mc [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prExtend}, // Mn MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prSpacingMark}, // Mc MODI SIGN VISARGA
+ {0x1163F, 0x11640, prExtend}, // Mn [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x116AB, 0x116AB, prExtend}, // Mn TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prSpacingMark}, // Mc TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prExtend}, // Mn TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prSpacingMark}, // Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prExtend}, // Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prSpacingMark}, // Mc TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prExtend}, // Mn TAKRI SIGN NUKTA
+ {0x1171D, 0x1171F, prExtend}, // Mn [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11722, 0x11725, prExtend}, // Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prSpacingMark}, // Mc AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prExtend}, // Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x1182C, 0x1182E, prSpacingMark}, // Mc [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prExtend}, // Mn [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prSpacingMark}, // Mc DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prExtend}, // Mn [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x11930, 0x11930, prExtend}, // Mc DIVES AKURU VOWEL SIGN AA
+ {0x11931, 0x11935, prSpacingMark}, // Mc [5] DIVES AKURU VOWEL SIGN I..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prSpacingMark}, // Mc [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prExtend}, // Mn [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prSpacingMark}, // Mc DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prExtend}, // Mn DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prPrepend}, // Lo DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prSpacingMark}, // Mc DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prPrepend}, // Lo DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prSpacingMark}, // Mc DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prExtend}, // Mn DIVES AKURU SIGN NUKTA
+ {0x119D1, 0x119D3, prSpacingMark}, // Mc [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prExtend}, // Mn [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prExtend}, // Mn [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prSpacingMark}, // Mc [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prExtend}, // Mn NANDINAGARI SIGN VIRAMA
+ {0x119E4, 0x119E4, prSpacingMark}, // Mc NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A01, 0x11A0A, prExtend}, // Mn [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A33, 0x11A38, prExtend}, // Mn [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prSpacingMark}, // Mc ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prPrepend}, // Lo ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prExtend}, // Mn [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A47, 0x11A47, prExtend}, // Mn ZANABAZAR SQUARE SUBJOINER
+ {0x11A51, 0x11A56, prExtend}, // Mn [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prSpacingMark}, // Mc [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prExtend}, // Mn [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A84, 0x11A89, prPrepend}, // Lo [6] SOYOMBO SIGN JIHVAMULIYA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prExtend}, // Mn [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prSpacingMark}, // Mc SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prExtend}, // Mn [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11C2F, 0x11C2F, prSpacingMark}, // Mc BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prExtend}, // Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prExtend}, // Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prSpacingMark}, // Mc BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prExtend}, // Mn BHAIKSUKI SIGN VIRAMA
+ {0x11C92, 0x11CA7, prExtend}, // Mn [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prSpacingMark}, // Mc MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prExtend}, // Mn [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prSpacingMark}, // Mc MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prExtend}, // Mn [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prSpacingMark}, // Mc MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prExtend}, // Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D31, 0x11D36, prExtend}, // Mn [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prExtend}, // Mn MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prExtend}, // Mn [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prExtend}, // Mn [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prPrepend}, // Lo MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prExtend}, // Mn MASARAM GONDI RA-KARA
+ {0x11D8A, 0x11D8E, prSpacingMark}, // Mc [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prExtend}, // Mn [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prSpacingMark}, // Mc [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prExtend}, // Mn GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prSpacingMark}, // Mc GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prExtend}, // Mn GUNJALA GONDI VIRAMA
+ {0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prSpacingMark}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x13430, 0x13438, prControl}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x16AF0, 0x16AF4, prExtend}, // Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16B30, 0x16B36, prExtend}, // Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16F4F, 0x16F4F, prExtend}, // Mn MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F51, 0x16F87, prSpacingMark}, // Mc [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prExtend}, // Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16FE4, 0x16FE4, prExtend}, // Mn KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prSpacingMark}, // Mc [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x1BC9D, 0x1BC9E, prExtend}, // Mn [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BCA0, 0x1BCA3, prControl}, // Cf [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prExtend}, // Mn [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prExtend}, // Mn [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1D165, 0x1D165, prExtend}, // Mc MUSICAL SYMBOL COMBINING STEM
+ {0x1D166, 0x1D166, prSpacingMark}, // Mc MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prExtend}, // Mn [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16D, 0x1D16D, prSpacingMark}, // Mc MUSICAL SYMBOL COMBINING AUGMENTATION DOT
+ {0x1D16E, 0x1D172, prExtend}, // Mc [5] MUSICAL SYMBOL COMBINING FLAG-1..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prControl}, // Cf [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prExtend}, // Mn [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D185, 0x1D18B, prExtend}, // Mn [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D1AA, 0x1D1AD, prExtend}, // Mn [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D242, 0x1D244, prExtend}, // Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1DA00, 0x1DA36, prExtend}, // Mn [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA3B, 0x1DA6C, prExtend}, // Mn [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA75, 0x1DA75, prExtend}, // Mn SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA84, 0x1DA84, prExtend}, // Mn SIGNWRITING LOCATION HEAD NECK
+ {0x1DA9B, 0x1DA9F, prExtend}, // Mn [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prExtend}, // Mn [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1E000, 0x1E006, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prExtend}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E2AE, 0x1E2AE, prExtend}, // Mn TOTO SIGN RISING TONE
+ {0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E8D0, 0x1E8D6, prExtend}, // Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E944, 0x1E94A, prExtend}, // Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1F000, 0x1F003, prExtendedPictographic}, // E0.0 [4] (🀀..🀃) MAHJONG TILE EAST WIND..MAHJONG TILE NORTH WIND
+ {0x1F004, 0x1F004, prExtendedPictographic}, // E0.6 [1] (🀄) mahjong red dragon
+ {0x1F005, 0x1F0CE, prExtendedPictographic}, // E0.0 [202] (🀅..🃎) MAHJONG TILE GREEN DRAGON..PLAYING CARD KING OF DIAMONDS
+ {0x1F0CF, 0x1F0CF, prExtendedPictographic}, // E0.6 [1] (🃏) joker
+ {0x1F0D0, 0x1F0FF, prExtendedPictographic}, // E0.0 [48] (..) ..
+ {0x1F10D, 0x1F10F, prExtendedPictographic}, // E0.0 [3] (🄍..🄏) CIRCLED ZERO WITH SLASH..CIRCLED DOLLAR SIGN WITH OVERLAID BACKSLASH
+ {0x1F12F, 0x1F12F, prExtendedPictographic}, // E0.0 [1] (🄯) COPYLEFT SYMBOL
+ {0x1F16C, 0x1F16F, prExtendedPictographic}, // E0.0 [4] (🅬..🅯) RAISED MR SIGN..CIRCLED HUMAN FIGURE
+ {0x1F170, 0x1F171, prExtendedPictographic}, // E0.6 [2] (🅰️..🅱️) A button (blood type)..B button (blood type)
+ {0x1F17E, 0x1F17F, prExtendedPictographic}, // E0.6 [2] (🅾️..🅿️) O button (blood type)..P button
+ {0x1F18E, 0x1F18E, prExtendedPictographic}, // E0.6 [1] (🆎) AB button (blood type)
+ {0x1F191, 0x1F19A, prExtendedPictographic}, // E0.6 [10] (🆑..🆚) CL button..VS button
+ {0x1F1AD, 0x1F1E5, prExtendedPictographic}, // E0.0 [57] (🆭..) MASK WORK SYMBOL..
+ {0x1F1E6, 0x1F1FF, prRegionalIndicator}, // So [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z
+ {0x1F201, 0x1F202, prExtendedPictographic}, // E0.6 [2] (🈁..🈂️) Japanese “here” button..Japanese “service charge” button
+ {0x1F203, 0x1F20F, prExtendedPictographic}, // E0.0 [13] (..) ..
+ {0x1F21A, 0x1F21A, prExtendedPictographic}, // E0.6 [1] (🈚) Japanese “free of charge” button
+ {0x1F22F, 0x1F22F, prExtendedPictographic}, // E0.6 [1] (🈯) Japanese “reserved” button
+ {0x1F232, 0x1F23A, prExtendedPictographic}, // E0.6 [9] (🈲..🈺) Japanese “prohibited” button..Japanese “open for business” button
+ {0x1F23C, 0x1F23F, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F249, 0x1F24F, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1F250, 0x1F251, prExtendedPictographic}, // E0.6 [2] (🉐..🉑) Japanese “bargain” button..Japanese “acceptable” button
+ {0x1F252, 0x1F2FF, prExtendedPictographic}, // E0.0 [174] (..) ..
+ {0x1F300, 0x1F30C, prExtendedPictographic}, // E0.6 [13] (🌀..🌌) cyclone..milky way
+ {0x1F30D, 0x1F30E, prExtendedPictographic}, // E0.7 [2] (🌍..🌎) globe showing Europe-Africa..globe showing Americas
+ {0x1F30F, 0x1F30F, prExtendedPictographic}, // E0.6 [1] (🌏) globe showing Asia-Australia
+ {0x1F310, 0x1F310, prExtendedPictographic}, // E1.0 [1] (🌐) globe with meridians
+ {0x1F311, 0x1F311, prExtendedPictographic}, // E0.6 [1] (🌑) new moon
+ {0x1F312, 0x1F312, prExtendedPictographic}, // E1.0 [1] (🌒) waxing crescent moon
+ {0x1F313, 0x1F315, prExtendedPictographic}, // E0.6 [3] (🌓..🌕) first quarter moon..full moon
+ {0x1F316, 0x1F318, prExtendedPictographic}, // E1.0 [3] (🌖..🌘) waning gibbous moon..waning crescent moon
+ {0x1F319, 0x1F319, prExtendedPictographic}, // E0.6 [1] (🌙) crescent moon
+ {0x1F31A, 0x1F31A, prExtendedPictographic}, // E1.0 [1] (🌚) new moon face
+ {0x1F31B, 0x1F31B, prExtendedPictographic}, // E0.6 [1] (🌛) first quarter moon face
+ {0x1F31C, 0x1F31C, prExtendedPictographic}, // E0.7 [1] (🌜) last quarter moon face
+ {0x1F31D, 0x1F31E, prExtendedPictographic}, // E1.0 [2] (🌝..🌞) full moon face..sun with face
+ {0x1F31F, 0x1F320, prExtendedPictographic}, // E0.6 [2] (🌟..🌠) glowing star..shooting star
+ {0x1F321, 0x1F321, prExtendedPictographic}, // E0.7 [1] (🌡️) thermometer
+ {0x1F322, 0x1F323, prExtendedPictographic}, // E0.0 [2] (🌢..🌣) BLACK DROPLET..WHITE SUN
+ {0x1F324, 0x1F32C, prExtendedPictographic}, // E0.7 [9] (🌤️..🌬️) sun behind small cloud..wind face
+ {0x1F32D, 0x1F32F, prExtendedPictographic}, // E1.0 [3] (🌭..🌯) hot dog..burrito
+ {0x1F330, 0x1F331, prExtendedPictographic}, // E0.6 [2] (🌰..🌱) chestnut..seedling
+ {0x1F332, 0x1F333, prExtendedPictographic}, // E1.0 [2] (🌲..🌳) evergreen tree..deciduous tree
+ {0x1F334, 0x1F335, prExtendedPictographic}, // E0.6 [2] (🌴..🌵) palm tree..cactus
+ {0x1F336, 0x1F336, prExtendedPictographic}, // E0.7 [1] (🌶️) hot pepper
+ {0x1F337, 0x1F34A, prExtendedPictographic}, // E0.6 [20] (🌷..🍊) tulip..tangerine
+ {0x1F34B, 0x1F34B, prExtendedPictographic}, // E1.0 [1] (🍋) lemon
+ {0x1F34C, 0x1F34F, prExtendedPictographic}, // E0.6 [4] (🍌..🍏) banana..green apple
+ {0x1F350, 0x1F350, prExtendedPictographic}, // E1.0 [1] (🍐) pear
+ {0x1F351, 0x1F37B, prExtendedPictographic}, // E0.6 [43] (🍑..🍻) peach..clinking beer mugs
+ {0x1F37C, 0x1F37C, prExtendedPictographic}, // E1.0 [1] (🍼) baby bottle
+ {0x1F37D, 0x1F37D, prExtendedPictographic}, // E0.7 [1] (🍽️) fork and knife with plate
+ {0x1F37E, 0x1F37F, prExtendedPictographic}, // E1.0 [2] (🍾..🍿) bottle with popping cork..popcorn
+ {0x1F380, 0x1F393, prExtendedPictographic}, // E0.6 [20] (🎀..🎓) ribbon..graduation cap
+ {0x1F394, 0x1F395, prExtendedPictographic}, // E0.0 [2] (🎔..🎕) HEART WITH TIP ON THE LEFT..BOUQUET OF FLOWERS
+ {0x1F396, 0x1F397, prExtendedPictographic}, // E0.7 [2] (🎖️..🎗️) military medal..reminder ribbon
+ {0x1F398, 0x1F398, prExtendedPictographic}, // E0.0 [1] (🎘) MUSICAL KEYBOARD WITH JACKS
+ {0x1F399, 0x1F39B, prExtendedPictographic}, // E0.7 [3] (🎙️..🎛️) studio microphone..control knobs
+ {0x1F39C, 0x1F39D, prExtendedPictographic}, // E0.0 [2] (🎜..🎝) BEAMED ASCENDING MUSICAL NOTES..BEAMED DESCENDING MUSICAL NOTES
+ {0x1F39E, 0x1F39F, prExtendedPictographic}, // E0.7 [2] (🎞️..🎟️) film frames..admission tickets
+ {0x1F3A0, 0x1F3C4, prExtendedPictographic}, // E0.6 [37] (🎠..🏄) carousel horse..person surfing
+ {0x1F3C5, 0x1F3C5, prExtendedPictographic}, // E1.0 [1] (🏅) sports medal
+ {0x1F3C6, 0x1F3C6, prExtendedPictographic}, // E0.6 [1] (🏆) trophy
+ {0x1F3C7, 0x1F3C7, prExtendedPictographic}, // E1.0 [1] (🏇) horse racing
+ {0x1F3C8, 0x1F3C8, prExtendedPictographic}, // E0.6 [1] (🏈) american football
+ {0x1F3C9, 0x1F3C9, prExtendedPictographic}, // E1.0 [1] (🏉) rugby football
+ {0x1F3CA, 0x1F3CA, prExtendedPictographic}, // E0.6 [1] (🏊) person swimming
+ {0x1F3CB, 0x1F3CE, prExtendedPictographic}, // E0.7 [4] (🏋️..🏎️) person lifting weights..racing car
+ {0x1F3CF, 0x1F3D3, prExtendedPictographic}, // E1.0 [5] (🏏..🏓) cricket game..ping pong
+ {0x1F3D4, 0x1F3DF, prExtendedPictographic}, // E0.7 [12] (🏔️..🏟️) snow-capped mountain..stadium
+ {0x1F3E0, 0x1F3E3, prExtendedPictographic}, // E0.6 [4] (🏠..🏣) house..Japanese post office
+ {0x1F3E4, 0x1F3E4, prExtendedPictographic}, // E1.0 [1] (🏤) post office
+ {0x1F3E5, 0x1F3F0, prExtendedPictographic}, // E0.6 [12] (🏥..🏰) hospital..castle
+ {0x1F3F1, 0x1F3F2, prExtendedPictographic}, // E0.0 [2] (🏱..🏲) WHITE PENNANT..BLACK PENNANT
+ {0x1F3F3, 0x1F3F3, prExtendedPictographic}, // E0.7 [1] (🏳️) white flag
+ {0x1F3F4, 0x1F3F4, prExtendedPictographic}, // E1.0 [1] (🏴) black flag
+ {0x1F3F5, 0x1F3F5, prExtendedPictographic}, // E0.7 [1] (🏵️) rosette
+ {0x1F3F6, 0x1F3F6, prExtendedPictographic}, // E0.0 [1] (🏶) BLACK ROSETTE
+ {0x1F3F7, 0x1F3F7, prExtendedPictographic}, // E0.7 [1] (🏷️) label
+ {0x1F3F8, 0x1F3FA, prExtendedPictographic}, // E1.0 [3] (🏸..🏺) badminton..amphora
+ {0x1F3FB, 0x1F3FF, prExtend}, // Sk [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6
+ {0x1F400, 0x1F407, prExtendedPictographic}, // E1.0 [8] (🐀..🐇) rat..rabbit
+ {0x1F408, 0x1F408, prExtendedPictographic}, // E0.7 [1] (🐈) cat
+ {0x1F409, 0x1F40B, prExtendedPictographic}, // E1.0 [3] (🐉..🐋) dragon..whale
+ {0x1F40C, 0x1F40E, prExtendedPictographic}, // E0.6 [3] (🐌..🐎) snail..horse
+ {0x1F40F, 0x1F410, prExtendedPictographic}, // E1.0 [2] (🐏..🐐) ram..goat
+ {0x1F411, 0x1F412, prExtendedPictographic}, // E0.6 [2] (🐑..🐒) ewe..monkey
+ {0x1F413, 0x1F413, prExtendedPictographic}, // E1.0 [1] (🐓) rooster
+ {0x1F414, 0x1F414, prExtendedPictographic}, // E0.6 [1] (🐔) chicken
+ {0x1F415, 0x1F415, prExtendedPictographic}, // E0.7 [1] (🐕) dog
+ {0x1F416, 0x1F416, prExtendedPictographic}, // E1.0 [1] (🐖) pig
+ {0x1F417, 0x1F429, prExtendedPictographic}, // E0.6 [19] (🐗..🐩) boar..poodle
+ {0x1F42A, 0x1F42A, prExtendedPictographic}, // E1.0 [1] (🐪) camel
+ {0x1F42B, 0x1F43E, prExtendedPictographic}, // E0.6 [20] (🐫..🐾) two-hump camel..paw prints
+ {0x1F43F, 0x1F43F, prExtendedPictographic}, // E0.7 [1] (🐿️) chipmunk
+ {0x1F440, 0x1F440, prExtendedPictographic}, // E0.6 [1] (👀) eyes
+ {0x1F441, 0x1F441, prExtendedPictographic}, // E0.7 [1] (👁️) eye
+ {0x1F442, 0x1F464, prExtendedPictographic}, // E0.6 [35] (👂..👤) ear..bust in silhouette
+ {0x1F465, 0x1F465, prExtendedPictographic}, // E1.0 [1] (👥) busts in silhouette
+ {0x1F466, 0x1F46B, prExtendedPictographic}, // E0.6 [6] (👦..👫) boy..woman and man holding hands
+ {0x1F46C, 0x1F46D, prExtendedPictographic}, // E1.0 [2] (👬..👭) men holding hands..women holding hands
+ {0x1F46E, 0x1F4AC, prExtendedPictographic}, // E0.6 [63] (👮..💬) police officer..speech balloon
+ {0x1F4AD, 0x1F4AD, prExtendedPictographic}, // E1.0 [1] (💭) thought balloon
+ {0x1F4AE, 0x1F4B5, prExtendedPictographic}, // E0.6 [8] (💮..💵) white flower..dollar banknote
+ {0x1F4B6, 0x1F4B7, prExtendedPictographic}, // E1.0 [2] (💶..💷) euro banknote..pound banknote
+ {0x1F4B8, 0x1F4EB, prExtendedPictographic}, // E0.6 [52] (💸..📫) money with wings..closed mailbox with raised flag
+ {0x1F4EC, 0x1F4ED, prExtendedPictographic}, // E0.7 [2] (📬..📭) open mailbox with raised flag..open mailbox with lowered flag
+ {0x1F4EE, 0x1F4EE, prExtendedPictographic}, // E0.6 [1] (📮) postbox
+ {0x1F4EF, 0x1F4EF, prExtendedPictographic}, // E1.0 [1] (📯) postal horn
+ {0x1F4F0, 0x1F4F4, prExtendedPictographic}, // E0.6 [5] (📰..📴) newspaper..mobile phone off
+ {0x1F4F5, 0x1F4F5, prExtendedPictographic}, // E1.0 [1] (📵) no mobile phones
+ {0x1F4F6, 0x1F4F7, prExtendedPictographic}, // E0.6 [2] (📶..📷) antenna bars..camera
+ {0x1F4F8, 0x1F4F8, prExtendedPictographic}, // E1.0 [1] (📸) camera with flash
+ {0x1F4F9, 0x1F4FC, prExtendedPictographic}, // E0.6 [4] (📹..📼) video camera..videocassette
+ {0x1F4FD, 0x1F4FD, prExtendedPictographic}, // E0.7 [1] (📽️) film projector
+ {0x1F4FE, 0x1F4FE, prExtendedPictographic}, // E0.0 [1] (📾) PORTABLE STEREO
+ {0x1F4FF, 0x1F502, prExtendedPictographic}, // E1.0 [4] (📿..🔂) prayer beads..repeat single button
+ {0x1F503, 0x1F503, prExtendedPictographic}, // E0.6 [1] (🔃) clockwise vertical arrows
+ {0x1F504, 0x1F507, prExtendedPictographic}, // E1.0 [4] (🔄..🔇) counterclockwise arrows button..muted speaker
+ {0x1F508, 0x1F508, prExtendedPictographic}, // E0.7 [1] (🔈) speaker low volume
+ {0x1F509, 0x1F509, prExtendedPictographic}, // E1.0 [1] (🔉) speaker medium volume
+ {0x1F50A, 0x1F514, prExtendedPictographic}, // E0.6 [11] (🔊..🔔) speaker high volume..bell
+ {0x1F515, 0x1F515, prExtendedPictographic}, // E1.0 [1] (🔕) bell with slash
+ {0x1F516, 0x1F52B, prExtendedPictographic}, // E0.6 [22] (🔖..🔫) bookmark..water pistol
+ {0x1F52C, 0x1F52D, prExtendedPictographic}, // E1.0 [2] (🔬..🔭) microscope..telescope
+ {0x1F52E, 0x1F53D, prExtendedPictographic}, // E0.6 [16] (🔮..🔽) crystal ball..downwards button
+ {0x1F546, 0x1F548, prExtendedPictographic}, // E0.0 [3] (🕆..🕈) WHITE LATIN CROSS..CELTIC CROSS
+ {0x1F549, 0x1F54A, prExtendedPictographic}, // E0.7 [2] (🕉️..🕊️) om..dove
+ {0x1F54B, 0x1F54E, prExtendedPictographic}, // E1.0 [4] (🕋..🕎) kaaba..menorah
+ {0x1F54F, 0x1F54F, prExtendedPictographic}, // E0.0 [1] (🕏) BOWL OF HYGIEIA
+ {0x1F550, 0x1F55B, prExtendedPictographic}, // E0.6 [12] (🕐..🕛) one o’clock..twelve o’clock
+ {0x1F55C, 0x1F567, prExtendedPictographic}, // E0.7 [12] (🕜..🕧) one-thirty..twelve-thirty
+ {0x1F568, 0x1F56E, prExtendedPictographic}, // E0.0 [7] (🕨..🕮) RIGHT SPEAKER..BOOK
+ {0x1F56F, 0x1F570, prExtendedPictographic}, // E0.7 [2] (🕯️..🕰️) candle..mantelpiece clock
+ {0x1F571, 0x1F572, prExtendedPictographic}, // E0.0 [2] (🕱..🕲) BLACK SKULL AND CROSSBONES..NO PIRACY
+ {0x1F573, 0x1F579, prExtendedPictographic}, // E0.7 [7] (🕳️..🕹️) hole..joystick
+ {0x1F57A, 0x1F57A, prExtendedPictographic}, // E3.0 [1] (🕺) man dancing
+ {0x1F57B, 0x1F586, prExtendedPictographic}, // E0.0 [12] (🕻..🖆) LEFT HAND TELEPHONE RECEIVER..PEN OVER STAMPED ENVELOPE
+ {0x1F587, 0x1F587, prExtendedPictographic}, // E0.7 [1] (🖇️) linked paperclips
+ {0x1F588, 0x1F589, prExtendedPictographic}, // E0.0 [2] (🖈..🖉) BLACK PUSHPIN..LOWER LEFT PENCIL
+ {0x1F58A, 0x1F58D, prExtendedPictographic}, // E0.7 [4] (🖊️..🖍️) pen..crayon
+ {0x1F58E, 0x1F58F, prExtendedPictographic}, // E0.0 [2] (🖎..🖏) LEFT WRITING HAND..TURNED OK HAND SIGN
+ {0x1F590, 0x1F590, prExtendedPictographic}, // E0.7 [1] (🖐️) hand with fingers splayed
+ {0x1F591, 0x1F594, prExtendedPictographic}, // E0.0 [4] (🖑..🖔) REVERSED RAISED HAND WITH FINGERS SPLAYED..REVERSED VICTORY HAND
+ {0x1F595, 0x1F596, prExtendedPictographic}, // E1.0 [2] (🖕..🖖) middle finger..vulcan salute
+ {0x1F597, 0x1F5A3, prExtendedPictographic}, // E0.0 [13] (🖗..🖣) WHITE DOWN POINTING LEFT HAND INDEX..BLACK DOWN POINTING BACKHAND INDEX
+ {0x1F5A4, 0x1F5A4, prExtendedPictographic}, // E3.0 [1] (🖤) black heart
+ {0x1F5A5, 0x1F5A5, prExtendedPictographic}, // E0.7 [1] (🖥️) desktop computer
+ {0x1F5A6, 0x1F5A7, prExtendedPictographic}, // E0.0 [2] (🖦..🖧) KEYBOARD AND MOUSE..THREE NETWORKED COMPUTERS
+ {0x1F5A8, 0x1F5A8, prExtendedPictographic}, // E0.7 [1] (🖨️) printer
+ {0x1F5A9, 0x1F5B0, prExtendedPictographic}, // E0.0 [8] (🖩..🖰) POCKET CALCULATOR..TWO BUTTON MOUSE
+ {0x1F5B1, 0x1F5B2, prExtendedPictographic}, // E0.7 [2] (🖱️..🖲️) computer mouse..trackball
+ {0x1F5B3, 0x1F5BB, prExtendedPictographic}, // E0.0 [9] (🖳..🖻) OLD PERSONAL COMPUTER..DOCUMENT WITH PICTURE
+ {0x1F5BC, 0x1F5BC, prExtendedPictographic}, // E0.7 [1] (🖼️) framed picture
+ {0x1F5BD, 0x1F5C1, prExtendedPictographic}, // E0.0 [5] (🖽..🗁) FRAME WITH TILES..OPEN FOLDER
+ {0x1F5C2, 0x1F5C4, prExtendedPictographic}, // E0.7 [3] (🗂️..🗄️) card index dividers..file cabinet
+ {0x1F5C5, 0x1F5D0, prExtendedPictographic}, // E0.0 [12] (🗅..🗐) EMPTY NOTE..PAGES
+ {0x1F5D1, 0x1F5D3, prExtendedPictographic}, // E0.7 [3] (🗑️..🗓️) wastebasket..spiral calendar
+ {0x1F5D4, 0x1F5DB, prExtendedPictographic}, // E0.0 [8] (🗔..🗛) DESKTOP WINDOW..DECREASE FONT SIZE SYMBOL
+ {0x1F5DC, 0x1F5DE, prExtendedPictographic}, // E0.7 [3] (🗜️..🗞️) clamp..rolled-up newspaper
+ {0x1F5DF, 0x1F5E0, prExtendedPictographic}, // E0.0 [2] (🗟..🗠) PAGE WITH CIRCLED TEXT..STOCK CHART
+ {0x1F5E1, 0x1F5E1, prExtendedPictographic}, // E0.7 [1] (🗡️) dagger
+ {0x1F5E2, 0x1F5E2, prExtendedPictographic}, // E0.0 [1] (🗢) LIPS
+ {0x1F5E3, 0x1F5E3, prExtendedPictographic}, // E0.7 [1] (🗣️) speaking head
+ {0x1F5E4, 0x1F5E7, prExtendedPictographic}, // E0.0 [4] (🗤..🗧) THREE RAYS ABOVE..THREE RAYS RIGHT
+ {0x1F5E8, 0x1F5E8, prExtendedPictographic}, // E2.0 [1] (🗨️) left speech bubble
+ {0x1F5E9, 0x1F5EE, prExtendedPictographic}, // E0.0 [6] (🗩..🗮) RIGHT SPEECH BUBBLE..LEFT ANGER BUBBLE
+ {0x1F5EF, 0x1F5EF, prExtendedPictographic}, // E0.7 [1] (🗯️) right anger bubble
+ {0x1F5F0, 0x1F5F2, prExtendedPictographic}, // E0.0 [3] (🗰..🗲) MOOD BUBBLE..LIGHTNING MOOD
+ {0x1F5F3, 0x1F5F3, prExtendedPictographic}, // E0.7 [1] (🗳️) ballot box with ballot
+ {0x1F5F4, 0x1F5F9, prExtendedPictographic}, // E0.0 [6] (🗴..🗹) BALLOT SCRIPT X..BALLOT BOX WITH BOLD CHECK
+ {0x1F5FA, 0x1F5FA, prExtendedPictographic}, // E0.7 [1] (🗺️) world map
+ {0x1F5FB, 0x1F5FF, prExtendedPictographic}, // E0.6 [5] (🗻..🗿) mount fuji..moai
+ {0x1F600, 0x1F600, prExtendedPictographic}, // E1.0 [1] (😀) grinning face
+ {0x1F601, 0x1F606, prExtendedPictographic}, // E0.6 [6] (😁..😆) beaming face with smiling eyes..grinning squinting face
+ {0x1F607, 0x1F608, prExtendedPictographic}, // E1.0 [2] (😇..😈) smiling face with halo..smiling face with horns
+ {0x1F609, 0x1F60D, prExtendedPictographic}, // E0.6 [5] (😉..😍) winking face..smiling face with heart-eyes
+ {0x1F60E, 0x1F60E, prExtendedPictographic}, // E1.0 [1] (😎) smiling face with sunglasses
+ {0x1F60F, 0x1F60F, prExtendedPictographic}, // E0.6 [1] (😏) smirking face
+ {0x1F610, 0x1F610, prExtendedPictographic}, // E0.7 [1] (😐) neutral face
+ {0x1F611, 0x1F611, prExtendedPictographic}, // E1.0 [1] (😑) expressionless face
+ {0x1F612, 0x1F614, prExtendedPictographic}, // E0.6 [3] (😒..😔) unamused face..pensive face
+ {0x1F615, 0x1F615, prExtendedPictographic}, // E1.0 [1] (😕) confused face
+ {0x1F616, 0x1F616, prExtendedPictographic}, // E0.6 [1] (😖) confounded face
+ {0x1F617, 0x1F617, prExtendedPictographic}, // E1.0 [1] (😗) kissing face
+ {0x1F618, 0x1F618, prExtendedPictographic}, // E0.6 [1] (😘) face blowing a kiss
+ {0x1F619, 0x1F619, prExtendedPictographic}, // E1.0 [1] (😙) kissing face with smiling eyes
+ {0x1F61A, 0x1F61A, prExtendedPictographic}, // E0.6 [1] (😚) kissing face with closed eyes
+ {0x1F61B, 0x1F61B, prExtendedPictographic}, // E1.0 [1] (😛) face with tongue
+ {0x1F61C, 0x1F61E, prExtendedPictographic}, // E0.6 [3] (😜..😞) winking face with tongue..disappointed face
+ {0x1F61F, 0x1F61F, prExtendedPictographic}, // E1.0 [1] (😟) worried face
+ {0x1F620, 0x1F625, prExtendedPictographic}, // E0.6 [6] (😠..😥) angry face..sad but relieved face
+ {0x1F626, 0x1F627, prExtendedPictographic}, // E1.0 [2] (😦..😧) frowning face with open mouth..anguished face
+ {0x1F628, 0x1F62B, prExtendedPictographic}, // E0.6 [4] (😨..😫) fearful face..tired face
+ {0x1F62C, 0x1F62C, prExtendedPictographic}, // E1.0 [1] (😬) grimacing face
+ {0x1F62D, 0x1F62D, prExtendedPictographic}, // E0.6 [1] (😭) loudly crying face
+ {0x1F62E, 0x1F62F, prExtendedPictographic}, // E1.0 [2] (😮..😯) face with open mouth..hushed face
+ {0x1F630, 0x1F633, prExtendedPictographic}, // E0.6 [4] (😰..😳) anxious face with sweat..flushed face
+ {0x1F634, 0x1F634, prExtendedPictographic}, // E1.0 [1] (😴) sleeping face
+ {0x1F635, 0x1F635, prExtendedPictographic}, // E0.6 [1] (😵) face with crossed-out eyes
+ {0x1F636, 0x1F636, prExtendedPictographic}, // E1.0 [1] (😶) face without mouth
+ {0x1F637, 0x1F640, prExtendedPictographic}, // E0.6 [10] (😷..🙀) face with medical mask..weary cat
+ {0x1F641, 0x1F644, prExtendedPictographic}, // E1.0 [4] (🙁..🙄) slightly frowning face..face with rolling eyes
+ {0x1F645, 0x1F64F, prExtendedPictographic}, // E0.6 [11] (🙅..🙏) person gesturing NO..folded hands
+ {0x1F680, 0x1F680, prExtendedPictographic}, // E0.6 [1] (🚀) rocket
+ {0x1F681, 0x1F682, prExtendedPictographic}, // E1.0 [2] (🚁..🚂) helicopter..locomotive
+ {0x1F683, 0x1F685, prExtendedPictographic}, // E0.6 [3] (🚃..🚅) railway car..bullet train
+ {0x1F686, 0x1F686, prExtendedPictographic}, // E1.0 [1] (🚆) train
+ {0x1F687, 0x1F687, prExtendedPictographic}, // E0.6 [1] (🚇) metro
+ {0x1F688, 0x1F688, prExtendedPictographic}, // E1.0 [1] (🚈) light rail
+ {0x1F689, 0x1F689, prExtendedPictographic}, // E0.6 [1] (🚉) station
+ {0x1F68A, 0x1F68B, prExtendedPictographic}, // E1.0 [2] (🚊..🚋) tram..tram car
+ {0x1F68C, 0x1F68C, prExtendedPictographic}, // E0.6 [1] (🚌) bus
+ {0x1F68D, 0x1F68D, prExtendedPictographic}, // E0.7 [1] (🚍) oncoming bus
+ {0x1F68E, 0x1F68E, prExtendedPictographic}, // E1.0 [1] (🚎) trolleybus
+ {0x1F68F, 0x1F68F, prExtendedPictographic}, // E0.6 [1] (🚏) bus stop
+ {0x1F690, 0x1F690, prExtendedPictographic}, // E1.0 [1] (🚐) minibus
+ {0x1F691, 0x1F693, prExtendedPictographic}, // E0.6 [3] (🚑..🚓) ambulance..police car
+ {0x1F694, 0x1F694, prExtendedPictographic}, // E0.7 [1] (🚔) oncoming police car
+ {0x1F695, 0x1F695, prExtendedPictographic}, // E0.6 [1] (🚕) taxi
+ {0x1F696, 0x1F696, prExtendedPictographic}, // E1.0 [1] (🚖) oncoming taxi
+ {0x1F697, 0x1F697, prExtendedPictographic}, // E0.6 [1] (🚗) automobile
+ {0x1F698, 0x1F698, prExtendedPictographic}, // E0.7 [1] (🚘) oncoming automobile
+ {0x1F699, 0x1F69A, prExtendedPictographic}, // E0.6 [2] (🚙..🚚) sport utility vehicle..delivery truck
+ {0x1F69B, 0x1F6A1, prExtendedPictographic}, // E1.0 [7] (🚛..🚡) articulated lorry..aerial tramway
+ {0x1F6A2, 0x1F6A2, prExtendedPictographic}, // E0.6 [1] (🚢) ship
+ {0x1F6A3, 0x1F6A3, prExtendedPictographic}, // E1.0 [1] (🚣) person rowing boat
+ {0x1F6A4, 0x1F6A5, prExtendedPictographic}, // E0.6 [2] (🚤..🚥) speedboat..horizontal traffic light
+ {0x1F6A6, 0x1F6A6, prExtendedPictographic}, // E1.0 [1] (🚦) vertical traffic light
+ {0x1F6A7, 0x1F6AD, prExtendedPictographic}, // E0.6 [7] (🚧..🚭) construction..no smoking
+ {0x1F6AE, 0x1F6B1, prExtendedPictographic}, // E1.0 [4] (🚮..🚱) litter in bin sign..non-potable water
+ {0x1F6B2, 0x1F6B2, prExtendedPictographic}, // E0.6 [1] (🚲) bicycle
+ {0x1F6B3, 0x1F6B5, prExtendedPictographic}, // E1.0 [3] (🚳..🚵) no bicycles..person mountain biking
+ {0x1F6B6, 0x1F6B6, prExtendedPictographic}, // E0.6 [1] (🚶) person walking
+ {0x1F6B7, 0x1F6B8, prExtendedPictographic}, // E1.0 [2] (🚷..🚸) no pedestrians..children crossing
+ {0x1F6B9, 0x1F6BE, prExtendedPictographic}, // E0.6 [6] (🚹..🚾) men’s room..water closet
+ {0x1F6BF, 0x1F6BF, prExtendedPictographic}, // E1.0 [1] (🚿) shower
+ {0x1F6C0, 0x1F6C0, prExtendedPictographic}, // E0.6 [1] (🛀) person taking bath
+ {0x1F6C1, 0x1F6C5, prExtendedPictographic}, // E1.0 [5] (🛁..🛅) bathtub..left luggage
+ {0x1F6C6, 0x1F6CA, prExtendedPictographic}, // E0.0 [5] (🛆..🛊) TRIANGLE WITH ROUNDED CORNERS..GIRLS SYMBOL
+ {0x1F6CB, 0x1F6CB, prExtendedPictographic}, // E0.7 [1] (🛋️) couch and lamp
+ {0x1F6CC, 0x1F6CC, prExtendedPictographic}, // E1.0 [1] (🛌) person in bed
+ {0x1F6CD, 0x1F6CF, prExtendedPictographic}, // E0.7 [3] (🛍️..🛏️) shopping bags..bed
+ {0x1F6D0, 0x1F6D0, prExtendedPictographic}, // E1.0 [1] (🛐) place of worship
+ {0x1F6D1, 0x1F6D2, prExtendedPictographic}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart
+ {0x1F6D3, 0x1F6D4, prExtendedPictographic}, // E0.0 [2] (🛓..🛔) STUPA..PAGODA
+ {0x1F6D5, 0x1F6D5, prExtendedPictographic}, // E12.0 [1] (🛕) hindu temple
+ {0x1F6D6, 0x1F6D7, prExtendedPictographic}, // E13.0 [2] (🛖..🛗) hut..elevator
+ {0x1F6D8, 0x1F6DC, prExtendedPictographic}, // E0.0 [5] (..🛜) ..
+ {0x1F6DD, 0x1F6DF, prExtendedPictographic}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
+ {0x1F6E0, 0x1F6E5, prExtendedPictographic}, // E0.7 [6] (🛠️..🛥️) hammer and wrench..motor boat
+ {0x1F6E6, 0x1F6E8, prExtendedPictographic}, // E0.0 [3] (🛦..🛨) UP-POINTING MILITARY AIRPLANE..UP-POINTING SMALL AIRPLANE
+ {0x1F6E9, 0x1F6E9, prExtendedPictographic}, // E0.7 [1] (🛩️) small airplane
+ {0x1F6EA, 0x1F6EA, prExtendedPictographic}, // E0.0 [1] (🛪) NORTHEAST-POINTING AIRPLANE
+ {0x1F6EB, 0x1F6EC, prExtendedPictographic}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival
+ {0x1F6ED, 0x1F6EF, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1F6F0, 0x1F6F0, prExtendedPictographic}, // E0.7 [1] (🛰️) satellite
+ {0x1F6F1, 0x1F6F2, prExtendedPictographic}, // E0.0 [2] (🛱..🛲) ONCOMING FIRE ENGINE..DIESEL LOCOMOTIVE
+ {0x1F6F3, 0x1F6F3, prExtendedPictographic}, // E0.7 [1] (🛳️) passenger ship
+ {0x1F6F4, 0x1F6F6, prExtendedPictographic}, // E3.0 [3] (🛴..🛶) kick scooter..canoe
+ {0x1F6F7, 0x1F6F8, prExtendedPictographic}, // E5.0 [2] (🛷..🛸) sled..flying saucer
+ {0x1F6F9, 0x1F6F9, prExtendedPictographic}, // E11.0 [1] (🛹) skateboard
+ {0x1F6FA, 0x1F6FA, prExtendedPictographic}, // E12.0 [1] (🛺) auto rickshaw
+ {0x1F6FB, 0x1F6FC, prExtendedPictographic}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
+ {0x1F6FD, 0x1F6FF, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) ..
+ {0x1F7D5, 0x1F7DF, prExtendedPictographic}, // E0.0 [11] (🟕..) CIRCLED TRIANGLE..
+ {0x1F7E0, 0x1F7EB, prExtendedPictographic}, // E12.0 [12] (🟠..🟫) orange circle..brown square
+ {0x1F7EC, 0x1F7EF, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F7F0, 0x1F7F0, prExtendedPictographic}, // E14.0 [1] (🟰) heavy equals sign
+ {0x1F7F1, 0x1F7FF, prExtendedPictographic}, // E0.0 [15] (..) ..
+ {0x1F80C, 0x1F80F, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F848, 0x1F84F, prExtendedPictographic}, // E0.0 [8] (..) ..
+ {0x1F85A, 0x1F85F, prExtendedPictographic}, // E0.0 [6] (..) ..
+ {0x1F888, 0x1F88F, prExtendedPictographic}, // E0.0 [8] (..) ..
+ {0x1F8AE, 0x1F8FF, prExtendedPictographic}, // E0.0 [82] (..) ..
+ {0x1F90C, 0x1F90C, prExtendedPictographic}, // E13.0 [1] (🤌) pinched fingers
+ {0x1F90D, 0x1F90F, prExtendedPictographic}, // E12.0 [3] (🤍..🤏) white heart..pinching hand
+ {0x1F910, 0x1F918, prExtendedPictographic}, // E1.0 [9] (🤐..🤘) zipper-mouth face..sign of the horns
+ {0x1F919, 0x1F91E, prExtendedPictographic}, // E3.0 [6] (🤙..🤞) call me hand..crossed fingers
+ {0x1F91F, 0x1F91F, prExtendedPictographic}, // E5.0 [1] (🤟) love-you gesture
+ {0x1F920, 0x1F927, prExtendedPictographic}, // E3.0 [8] (🤠..🤧) cowboy hat face..sneezing face
+ {0x1F928, 0x1F92F, prExtendedPictographic}, // E5.0 [8] (🤨..🤯) face with raised eyebrow..exploding head
+ {0x1F930, 0x1F930, prExtendedPictographic}, // E3.0 [1] (🤰) pregnant woman
+ {0x1F931, 0x1F932, prExtendedPictographic}, // E5.0 [2] (🤱..🤲) breast-feeding..palms up together
+ {0x1F933, 0x1F93A, prExtendedPictographic}, // E3.0 [8] (🤳..🤺) selfie..person fencing
+ {0x1F93C, 0x1F93E, prExtendedPictographic}, // E3.0 [3] (🤼..🤾) people wrestling..person playing handball
+ {0x1F93F, 0x1F93F, prExtendedPictographic}, // E12.0 [1] (🤿) diving mask
+ {0x1F940, 0x1F945, prExtendedPictographic}, // E3.0 [6] (🥀..🥅) wilted flower..goal net
+ {0x1F947, 0x1F94B, prExtendedPictographic}, // E3.0 [5] (🥇..🥋) 1st place medal..martial arts uniform
+ {0x1F94C, 0x1F94C, prExtendedPictographic}, // E5.0 [1] (🥌) curling stone
+ {0x1F94D, 0x1F94F, prExtendedPictographic}, // E11.0 [3] (🥍..🥏) lacrosse..flying disc
+ {0x1F950, 0x1F95E, prExtendedPictographic}, // E3.0 [15] (🥐..🥞) croissant..pancakes
+ {0x1F95F, 0x1F96B, prExtendedPictographic}, // E5.0 [13] (🥟..🥫) dumpling..canned food
+ {0x1F96C, 0x1F970, prExtendedPictographic}, // E11.0 [5] (🥬..🥰) leafy green..smiling face with hearts
+ {0x1F971, 0x1F971, prExtendedPictographic}, // E12.0 [1] (🥱) yawning face
+ {0x1F972, 0x1F972, prExtendedPictographic}, // E13.0 [1] (🥲) smiling face with tear
+ {0x1F973, 0x1F976, prExtendedPictographic}, // E11.0 [4] (🥳..🥶) partying face..cold face
+ {0x1F977, 0x1F978, prExtendedPictographic}, // E13.0 [2] (🥷..🥸) ninja..disguised face
+ {0x1F979, 0x1F979, prExtendedPictographic}, // E14.0 [1] (🥹) face holding back tears
+ {0x1F97A, 0x1F97A, prExtendedPictographic}, // E11.0 [1] (🥺) pleading face
+ {0x1F97B, 0x1F97B, prExtendedPictographic}, // E12.0 [1] (🥻) sari
+ {0x1F97C, 0x1F97F, prExtendedPictographic}, // E11.0 [4] (🥼..🥿) lab coat..flat shoe
+ {0x1F980, 0x1F984, prExtendedPictographic}, // E1.0 [5] (🦀..🦄) crab..unicorn
+ {0x1F985, 0x1F991, prExtendedPictographic}, // E3.0 [13] (🦅..🦑) eagle..squid
+ {0x1F992, 0x1F997, prExtendedPictographic}, // E5.0 [6] (🦒..🦗) giraffe..cricket
+ {0x1F998, 0x1F9A2, prExtendedPictographic}, // E11.0 [11] (🦘..🦢) kangaroo..swan
+ {0x1F9A3, 0x1F9A4, prExtendedPictographic}, // E13.0 [2] (🦣..🦤) mammoth..dodo
+ {0x1F9A5, 0x1F9AA, prExtendedPictographic}, // E12.0 [6] (🦥..🦪) sloth..oyster
+ {0x1F9AB, 0x1F9AD, prExtendedPictographic}, // E13.0 [3] (🦫..🦭) beaver..seal
+ {0x1F9AE, 0x1F9AF, prExtendedPictographic}, // E12.0 [2] (🦮..🦯) guide dog..white cane
+ {0x1F9B0, 0x1F9B9, prExtendedPictographic}, // E11.0 [10] (🦰..🦹) red hair..supervillain
+ {0x1F9BA, 0x1F9BF, prExtendedPictographic}, // E12.0 [6] (🦺..🦿) safety vest..mechanical leg
+ {0x1F9C0, 0x1F9C0, prExtendedPictographic}, // E1.0 [1] (🧀) cheese wedge
+ {0x1F9C1, 0x1F9C2, prExtendedPictographic}, // E11.0 [2] (🧁..🧂) cupcake..salt
+ {0x1F9C3, 0x1F9CA, prExtendedPictographic}, // E12.0 [8] (🧃..🧊) beverage box..ice
+ {0x1F9CB, 0x1F9CB, prExtendedPictographic}, // E13.0 [1] (🧋) bubble tea
+ {0x1F9CC, 0x1F9CC, prExtendedPictographic}, // E14.0 [1] (🧌) troll
+ {0x1F9CD, 0x1F9CF, prExtendedPictographic}, // E12.0 [3] (🧍..🧏) person standing..deaf person
+ {0x1F9D0, 0x1F9E6, prExtendedPictographic}, // E5.0 [23] (🧐..🧦) face with monocle..socks
+ {0x1F9E7, 0x1F9FF, prExtendedPictographic}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet
+ {0x1FA00, 0x1FA6F, prExtendedPictographic}, // E0.0 [112] (🨀..) NEUTRAL CHESS KING..
+ {0x1FA70, 0x1FA73, prExtendedPictographic}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
+ {0x1FA74, 0x1FA74, prExtendedPictographic}, // E13.0 [1] (🩴) thong sandal
+ {0x1FA75, 0x1FA77, prExtendedPictographic}, // E0.0 [3] (🩵..🩷) ..
+ {0x1FA78, 0x1FA7A, prExtendedPictographic}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
+ {0x1FA7B, 0x1FA7C, prExtendedPictographic}, // E14.0 [2] (🩻..🩼) x-ray..crutch
+ {0x1FA7D, 0x1FA7F, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1FA80, 0x1FA82, prExtendedPictographic}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
+ {0x1FA83, 0x1FA86, prExtendedPictographic}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
+ {0x1FA87, 0x1FA8F, prExtendedPictographic}, // E0.0 [9] (🪇..) ..
+ {0x1FA90, 0x1FA95, prExtendedPictographic}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
+ {0x1FA96, 0x1FAA8, prExtendedPictographic}, // E13.0 [19] (🪖..🪨) military helmet..rock
+ {0x1FAA9, 0x1FAAC, prExtendedPictographic}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
+ {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E0.0 [3] (🪭..🪯) ..
+ {0x1FAB0, 0x1FAB6, prExtendedPictographic}, // E13.0 [7] (🪰..🪶) fly..feather
+ {0x1FAB7, 0x1FABA, prExtendedPictographic}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
+ {0x1FABB, 0x1FABF, prExtendedPictographic}, // E0.0 [5] (🪻..🪿) ..
+ {0x1FAC0, 0x1FAC2, prExtendedPictographic}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
+ {0x1FAC3, 0x1FAC5, prExtendedPictographic}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
+ {0x1FAC6, 0x1FACF, prExtendedPictographic}, // E0.0 [10] (..🫏) ..
+ {0x1FAD0, 0x1FAD6, prExtendedPictographic}, // E13.0 [7] (🫐..🫖) blueberries..teapot
+ {0x1FAD7, 0x1FAD9, prExtendedPictographic}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
+ {0x1FADA, 0x1FADF, prExtendedPictographic}, // E0.0 [6] (🫚..) ..
+ {0x1FAE0, 0x1FAE7, prExtendedPictographic}, // E14.0 [8] (🫠..🫧) melting face..bubbles
+ {0x1FAE8, 0x1FAEF, prExtendedPictographic}, // E0.0 [8] (🫨..) ..
+ {0x1FAF0, 0x1FAF6, prExtendedPictographic}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
+ {0x1FAF7, 0x1FAFF, prExtendedPictographic}, // E0.0 [9] (🫷..) ..
+ {0x1FC00, 0x1FFFD, prExtendedPictographic}, // E0.0[1022] (..) ..
+ {0xE0000, 0xE0000, prControl}, // Cn
+ {0xE0001, 0xE0001, prControl}, // Cf LANGUAGE TAG
+ {0xE0002, 0xE001F, prControl}, // Cn [30] ..
+ {0xE0020, 0xE007F, prExtend}, // Cf [96] TAG SPACE..CANCEL TAG
+ {0xE0080, 0xE00FF, prControl}, // Cn [128] ..
+ {0xE0100, 0xE01EF, prExtend}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
+ {0xE01F0, 0xE0FFF, prControl}, // Cn [3600] ..
+}
diff --git a/vendor/github.com/rivo/uniseg/graphemerules.go b/vendor/github.com/rivo/uniseg/graphemerules.go
new file mode 100644
index 0000000..9f46b57
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/graphemerules.go
@@ -0,0 +1,138 @@
+package uniseg
+
+// The states of the grapheme cluster parser.
+const (
+ grAny = iota
+ grCR
+ grControlLF
+ grL
+ grLVV
+ grLVTT
+ grPrepend
+ grExtendedPictographic
+ grExtendedPictographicZWJ
+ grRIOdd
+ grRIEven
+)
+
+// The grapheme cluster parser's breaking instructions.
+const (
+ grNoBoundary = iota
+ grBoundary
+)
+
+// The grapheme cluster parser's state transitions. Maps (state, property) to
+// (new state, breaking instruction, rule number). The breaking instruction
+// always refers to the boundary between the last and next code point.
+//
+// This map is queried as follows:
+//
+// 1. Find specific state + specific property. Stop if found.
+// 2. Find specific state + any property.
+// 3. Find any state + specific property.
+// 4. If only (2) or (3) (but not both) was found, stop.
+// 5. If both (2) and (3) were found, use state from (3) and breaking instruction
+// from the transition with the lower rule number, prefer (3) if rule numbers
+// are equal. Stop.
+// 6. Assume grAny and grBoundary.
+//
+// Unicode version 14.0.0.
+var grTransitions = map[[2]int][3]int{
+ // GB5
+ {grAny, prCR}: {grCR, grBoundary, 50},
+ {grAny, prLF}: {grControlLF, grBoundary, 50},
+ {grAny, prControl}: {grControlLF, grBoundary, 50},
+
+ // GB4
+ {grCR, prAny}: {grAny, grBoundary, 40},
+ {grControlLF, prAny}: {grAny, grBoundary, 40},
+
+ // GB3.
+ {grCR, prLF}: {grControlLF, grNoBoundary, 30},
+
+ // GB6.
+ {grAny, prL}: {grL, grBoundary, 9990},
+ {grL, prL}: {grL, grNoBoundary, 60},
+ {grL, prV}: {grLVV, grNoBoundary, 60},
+ {grL, prLV}: {grLVV, grNoBoundary, 60},
+ {grL, prLVT}: {grLVTT, grNoBoundary, 60},
+
+ // GB7.
+ {grAny, prLV}: {grLVV, grBoundary, 9990},
+ {grAny, prV}: {grLVV, grBoundary, 9990},
+ {grLVV, prV}: {grLVV, grNoBoundary, 70},
+ {grLVV, prT}: {grLVTT, grNoBoundary, 70},
+
+ // GB8.
+ {grAny, prLVT}: {grLVTT, grBoundary, 9990},
+ {grAny, prT}: {grLVTT, grBoundary, 9990},
+ {grLVTT, prT}: {grLVTT, grNoBoundary, 80},
+
+ // GB9.
+ {grAny, prExtend}: {grAny, grNoBoundary, 90},
+ {grAny, prZWJ}: {grAny, grNoBoundary, 90},
+
+ // GB9a.
+ {grAny, prSpacingMark}: {grAny, grNoBoundary, 91},
+
+ // GB9b.
+ {grAny, prPrepend}: {grPrepend, grBoundary, 9990},
+ {grPrepend, prAny}: {grAny, grNoBoundary, 92},
+
+ // GB11.
+ {grAny, prExtendedPictographic}: {grExtendedPictographic, grBoundary, 9990},
+ {grExtendedPictographic, prExtend}: {grExtendedPictographic, grNoBoundary, 110},
+ {grExtendedPictographic, prZWJ}: {grExtendedPictographicZWJ, grNoBoundary, 110},
+ {grExtendedPictographicZWJ, prExtendedPictographic}: {grExtendedPictographic, grNoBoundary, 110},
+
+ // GB12 / GB13.
+ {grAny, prRegionalIndicator}: {grRIOdd, grBoundary, 9990},
+ {grRIOdd, prRegionalIndicator}: {grRIEven, grNoBoundary, 120},
+ {grRIEven, prRegionalIndicator}: {grRIOdd, grBoundary, 120},
+}
+
+// transitionGraphemeState determines the new state of the grapheme cluster
+// parser given the current state and the next code point. It also returns the
+// code point's grapheme property (the value mapped by the [graphemeCodePoints]
+// table) and whether a cluster boundary was detected.
+func transitionGraphemeState(state int, r rune) (newState, prop int, boundary bool) {
+ // Determine the property of the next character.
+ prop = property(graphemeCodePoints, r)
+
+ // Find the applicable transition.
+ transition, ok := grTransitions[[2]int{state, prop}]
+ if ok {
+ // We have a specific transition. We'll use it.
+ return transition[0], prop, transition[1] == grBoundary
+ }
+
+ // No specific transition found. Try the less specific ones.
+ transAnyProp, okAnyProp := grTransitions[[2]int{state, prAny}]
+ transAnyState, okAnyState := grTransitions[[2]int{grAny, prop}]
+ if okAnyProp && okAnyState {
+ // Both apply. We'll use a mix (see comments for grTransitions).
+ newState = transAnyState[0]
+ boundary = transAnyState[1] == grBoundary
+ if transAnyProp[2] < transAnyState[2] {
+ boundary = transAnyProp[1] == grBoundary
+ }
+ return
+ }
+
+ if okAnyProp {
+ // We only have a specific state.
+ return transAnyProp[0], prop, transAnyProp[1] == grBoundary
+ // This branch will probably never be reached because okAnyState will
+ // always be true given the current transition map. But we keep it here
+ // for future modifications to the transition map where this may not be
+ // true anymore.
+ }
+
+ if okAnyState {
+ // We only have a specific property.
+ return transAnyState[0], prop, transAnyState[1] == grBoundary
+ }
+
+ // No known transition. GB999: Any ÷ Any.
+ return grAny, prop, true
+}
diff --git a/vendor/github.com/rivo/uniseg/line.go b/vendor/github.com/rivo/uniseg/line.go
new file mode 100644
index 0000000..87f2850
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/line.go
@@ -0,0 +1,134 @@
+package uniseg
+
+import "unicode/utf8"
+
+// FirstLineSegment returns the prefix of the given byte slice after which a
+// decision to break the string over to the next line can or must be made,
+// according to the rules of [Unicode Standard Annex #14]. This is used to
+// implement line breaking.
+//
+// Line breaking, also known as word wrapping, is the process of breaking a
+// section of text into lines such that it will fit in the available width of a
+// page, window or other display area.
+//
+// The returned "segment" may not be broken into smaller parts, unless no other
+// breaking opportunities present themselves, in which case you may break by
+// grapheme clusters (using the [FirstGraphemeCluster] function to determine the
+// grapheme clusters).
+//
+// The "mustBreak" flag indicates whether you MUST break the line after the
+// given segment (true), for example after newline characters, or you MAY break
+// the line after the given segment (false).
+//
+// This function can be called continuously to extract all non-breaking sub-sets
+// from a byte slice, as illustrated in the example below.
+//
+// If you don't know the current state, for example when calling the function
+// for the first time, you must pass -1. For consecutive calls, pass the state
+// and rest slice returned by the previous call.
+//
+// The "rest" slice is the sub-slice of the original byte slice "b" starting
+// after the last byte of the identified line segment. If the length of the
+// "rest" slice is 0, the entire byte slice "b" has been processed. The
+// "segment" byte slice is the sub-slice of the input slice containing the
+// identified line segment.
+//
+// Given an empty byte slice "b", the function returns nil values.
+//
+// Note that in accordance with [UAX #14 LB3], the final segment will end with
+// "mustBreak" set to true. You can choose to ignore this by checking if the
+// length of the "rest" slice is 0 and calling [HasTrailingLineBreak] or
+// [HasTrailingLineBreakInString] on the last rune.
+//
+// Note also that this algorithm may break within grapheme clusters. This is
+// addressed in Section 8.2 Example 6 of UAX #14. To avoid this, you can use
+// the [Step] function instead.
+//
+// [Unicode Standard Annex #14]: https://www.unicode.org/reports/tr14/
+// [UAX #14 LB3]: https://www.unicode.org/reports/tr14/#Algorithm
+func FirstLineSegment(b []byte, state int) (segment, rest []byte, mustBreak bool, newState int) {
+ // An empty byte slice returns nothing.
+ if len(b) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRune(b)
+ if len(b) <= length { // If we're already past the end, there is nothing else to parse.
+ return b, nil, true, lbAny // LB3.
+ }
+
+ // If we don't know the state, determine it now.
+ if state < 0 {
+ state, _ = transitionLineBreakState(state, r, b[length:], "")
+ }
+
+ // Transition until we find a boundary.
+ var boundary int
+ for {
+ r, l := utf8.DecodeRune(b[length:])
+ state, boundary = transitionLineBreakState(state, r, b[length+l:], "")
+
+ if boundary != LineDontBreak {
+ return b[:length], b[length:], boundary == LineMustBreak, state
+ }
+
+ length += l
+ if len(b) <= length {
+ return b, nil, true, lbAny // LB3
+ }
+ }
+}
+
+// FirstLineSegmentInString is like FirstLineSegment() but its input and outputs
+// are strings.
+func FirstLineSegmentInString(str string, state int) (segment, rest string, mustBreak bool, newState int) {
+ // An empty byte slice returns nothing.
+ if len(str) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRuneInString(str)
+ if len(str) <= length { // If we're already past the end, there is nothing else to parse.
+ return str, "", true, lbAny // LB3.
+ }
+
+ // If we don't know the state, determine it now.
+ if state < 0 {
+ state, _ = transitionLineBreakState(state, r, nil, str[length:])
+ }
+
+ // Transition until we find a boundary.
+ var boundary int
+ for {
+ r, l := utf8.DecodeRuneInString(str[length:])
+ state, boundary = transitionLineBreakState(state, r, nil, str[length+l:])
+
+ if boundary != LineDontBreak {
+ return str[:length], str[length:], boundary == LineMustBreak, state
+ }
+
+ length += l
+ if len(str) <= length {
+ return str, "", true, lbAny // LB3.
+ }
+ }
+}
+
+// HasTrailingLineBreak returns true if the last rune in the given byte slice is
+// one of the hard line break code points defined in LB4 and LB5 of [UAX #14].
+//
+// [UAX #14]: https://www.unicode.org/reports/tr14/#Algorithm
+func HasTrailingLineBreak(b []byte) bool {
+ r, _ := utf8.DecodeLastRune(b)
+ property, _ := propertyWithGenCat(lineBreakCodePoints, r)
+ return property == lbBK || property == lbCR || property == lbLF || property == lbNL
+}
+
+// HasTrailingLineBreakInString is like [HasTrailingLineBreak] but for a string.
+func HasTrailingLineBreakInString(str string) bool {
+ r, _ := utf8.DecodeLastRuneInString(str)
+ property, _ := propertyWithGenCat(lineBreakCodePoints, r)
+ return property == lbBK || property == lbCR || property == lbLF || property == lbNL
+}
diff --git a/vendor/github.com/rivo/uniseg/lineproperties.go b/vendor/github.com/rivo/uniseg/lineproperties.go
new file mode 100644
index 0000000..3216930
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/lineproperties.go
@@ -0,0 +1,3513 @@
+package uniseg
+
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+// lineBreakCodePoints are taken from
+// https://www.unicode.org/Public/14.0.0/ucd/LineBreak.txt
+// and
+// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var lineBreakCodePoints = [][4]int{
+ {0x0000, 0x0008, prCM, gcCc}, // [9] ..
+ {0x0009, 0x0009, prBA, gcCc}, //
+ {0x000A, 0x000A, prLF, gcCc}, //
+ {0x000B, 0x000C, prBK, gcCc}, // [2] ..
+ {0x000D, 0x000D, prCR, gcCc}, //
+ {0x000E, 0x001F, prCM, gcCc}, // [18] ..
+ {0x0020, 0x0020, prSP, gcZs}, // SPACE
+ {0x0021, 0x0021, prEX, gcPo}, // EXCLAMATION MARK
+ {0x0022, 0x0022, prQU, gcPo}, // QUOTATION MARK
+ {0x0023, 0x0023, prAL, gcPo}, // NUMBER SIGN
+ {0x0024, 0x0024, prPR, gcSc}, // DOLLAR SIGN
+ {0x0025, 0x0025, prPO, gcPo}, // PERCENT SIGN
+ {0x0026, 0x0026, prAL, gcPo}, // AMPERSAND
+ {0x0027, 0x0027, prQU, gcPo}, // APOSTROPHE
+ {0x0028, 0x0028, prOP, gcPs}, // LEFT PARENTHESIS
+ {0x0029, 0x0029, prCP, gcPe}, // RIGHT PARENTHESIS
+ {0x002A, 0x002A, prAL, gcPo}, // ASTERISK
+ {0x002B, 0x002B, prPR, gcSm}, // PLUS SIGN
+ {0x002C, 0x002C, prIS, gcPo}, // COMMA
+ {0x002D, 0x002D, prHY, gcPd}, // HYPHEN-MINUS
+ {0x002E, 0x002E, prIS, gcPo}, // FULL STOP
+ {0x002F, 0x002F, prSY, gcPo}, // SOLIDUS
+ {0x0030, 0x0039, prNU, gcNd}, // [10] DIGIT ZERO..DIGIT NINE
+ {0x003A, 0x003B, prIS, gcPo}, // [2] COLON..SEMICOLON
+ {0x003C, 0x003E, prAL, gcSm}, // [3] LESS-THAN SIGN..GREATER-THAN SIGN
+ {0x003F, 0x003F, prEX, gcPo}, // QUESTION MARK
+ {0x0040, 0x0040, prAL, gcPo}, // COMMERCIAL AT
+ {0x0041, 0x005A, prAL, gcLu}, // [26] LATIN CAPITAL LETTER A..LATIN CAPITAL LETTER Z
+ {0x005B, 0x005B, prOP, gcPs}, // LEFT SQUARE BRACKET
+ {0x005C, 0x005C, prPR, gcPo}, // REVERSE SOLIDUS
+ {0x005D, 0x005D, prCP, gcPe}, // RIGHT SQUARE BRACKET
+ {0x005E, 0x005E, prAL, gcSk}, // CIRCUMFLEX ACCENT
+ {0x005F, 0x005F, prAL, gcPc}, // LOW LINE
+ {0x0060, 0x0060, prAL, gcSk}, // GRAVE ACCENT
+ {0x0061, 0x007A, prAL, gcLl}, // [26] LATIN SMALL LETTER A..LATIN SMALL LETTER Z
+ {0x007B, 0x007B, prOP, gcPs}, // LEFT CURLY BRACKET
+ {0x007C, 0x007C, prBA, gcSm}, // VERTICAL LINE
+ {0x007D, 0x007D, prCL, gcPe}, // RIGHT CURLY BRACKET
+ {0x007E, 0x007E, prAL, gcSm}, // TILDE
+ {0x007F, 0x007F, prCM, gcCc}, //
+ {0x0080, 0x0084, prCM, gcCc}, // [5] ..
+ {0x0085, 0x0085, prNL, gcCc}, //
+ {0x0086, 0x009F, prCM, gcCc}, // [26] ..
+ {0x00A0, 0x00A0, prGL, gcZs}, // NO-BREAK SPACE
+ {0x00A1, 0x00A1, prOP, gcPo}, // INVERTED EXCLAMATION MARK
+ {0x00A2, 0x00A2, prPO, gcSc}, // CENT SIGN
+ {0x00A3, 0x00A5, prPR, gcSc}, // [3] POUND SIGN..YEN SIGN
+ {0x00A6, 0x00A6, prAL, gcSo}, // BROKEN BAR
+ {0x00A7, 0x00A7, prAI, gcPo}, // SECTION SIGN
+ {0x00A8, 0x00A8, prAI, gcSk}, // DIAERESIS
+ {0x00A9, 0x00A9, prAL, gcSo}, // COPYRIGHT SIGN
+ {0x00AA, 0x00AA, prAI, gcLo}, // FEMININE ORDINAL INDICATOR
+ {0x00AB, 0x00AB, prQU, gcPi}, // LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00AC, 0x00AC, prAL, gcSm}, // NOT SIGN
+ {0x00AD, 0x00AD, prBA, gcCf}, // SOFT HYPHEN
+ {0x00AE, 0x00AE, prAL, gcSo}, // REGISTERED SIGN
+ {0x00AF, 0x00AF, prAL, gcSk}, // MACRON
+ {0x00B0, 0x00B0, prPO, gcSo}, // DEGREE SIGN
+ {0x00B1, 0x00B1, prPR, gcSm}, // PLUS-MINUS SIGN
+ {0x00B2, 0x00B3, prAI, gcNo}, // [2] SUPERSCRIPT TWO..SUPERSCRIPT THREE
+ {0x00B4, 0x00B4, prBB, gcSk}, // ACUTE ACCENT
+ {0x00B5, 0x00B5, prAL, gcLl}, // MICRO SIGN
+ {0x00B6, 0x00B7, prAI, gcPo}, // [2] PILCROW SIGN..MIDDLE DOT
+ {0x00B8, 0x00B8, prAI, gcSk}, // CEDILLA
+ {0x00B9, 0x00B9, prAI, gcNo}, // SUPERSCRIPT ONE
+ {0x00BA, 0x00BA, prAI, gcLo}, // MASCULINE ORDINAL INDICATOR
+ {0x00BB, 0x00BB, prQU, gcPf}, // RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00BC, 0x00BE, prAI, gcNo}, // [3] VULGAR FRACTION ONE QUARTER..VULGAR FRACTION THREE QUARTERS
+ {0x00BF, 0x00BF, prOP, gcPo}, // INVERTED QUESTION MARK
+ {0x00C0, 0x00D6, prAL, gcLu}, // [23] LATIN CAPITAL LETTER A WITH GRAVE..LATIN CAPITAL LETTER O WITH DIAERESIS
+ {0x00D7, 0x00D7, prAI, gcSm}, // MULTIPLICATION SIGN
+ {0x00D8, 0x00F6, prAL, gcLC}, // [31] LATIN CAPITAL LETTER O WITH STROKE..LATIN SMALL LETTER O WITH DIAERESIS
+ {0x00F7, 0x00F7, prAI, gcSm}, // DIVISION SIGN
+ {0x00F8, 0x00FF, prAL, gcLl}, // [8] LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER Y WITH DIAERESIS
+ {0x0100, 0x017F, prAL, gcLC}, // [128] LATIN CAPITAL LETTER A WITH MACRON..LATIN SMALL LETTER LONG S
+ {0x0180, 0x01BA, prAL, gcLC}, // [59] LATIN SMALL LETTER B WITH STROKE..LATIN SMALL LETTER EZH WITH TAIL
+ {0x01BB, 0x01BB, prAL, gcLo}, // LATIN LETTER TWO WITH STROKE
+ {0x01BC, 0x01BF, prAL, gcLC}, // [4] LATIN CAPITAL LETTER TONE FIVE..LATIN LETTER WYNN
+ {0x01C0, 0x01C3, prAL, gcLo}, // [4] LATIN LETTER DENTAL CLICK..LATIN LETTER RETROFLEX CLICK
+ {0x01C4, 0x024F, prAL, gcLC}, // [140] LATIN CAPITAL LETTER DZ WITH CARON..LATIN SMALL LETTER Y WITH STROKE
+ {0x0250, 0x0293, prAL, gcLl}, // [68] LATIN SMALL LETTER TURNED A..LATIN SMALL LETTER EZH WITH CURL
+ {0x0294, 0x0294, prAL, gcLo}, // LATIN LETTER GLOTTAL STOP
+ {0x0295, 0x02AF, prAL, gcLl}, // [27] LATIN LETTER PHARYNGEAL VOICED FRICATIVE..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL
+ {0x02B0, 0x02C1, prAL, gcLm}, // [18] MODIFIER LETTER SMALL H..MODIFIER LETTER REVERSED GLOTTAL STOP
+ {0x02C2, 0x02C5, prAL, gcSk}, // [4] MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER DOWN ARROWHEAD
+ {0x02C6, 0x02C6, prAL, gcLm}, // MODIFIER LETTER CIRCUMFLEX ACCENT
+ {0x02C7, 0x02C7, prAI, gcLm}, // CARON
+ {0x02C8, 0x02C8, prBB, gcLm}, // MODIFIER LETTER VERTICAL LINE
+ {0x02C9, 0x02CB, prAI, gcLm}, // [3] MODIFIER LETTER MACRON..MODIFIER LETTER GRAVE ACCENT
+ {0x02CC, 0x02CC, prBB, gcLm}, // MODIFIER LETTER LOW VERTICAL LINE
+ {0x02CD, 0x02CD, prAI, gcLm}, // MODIFIER LETTER LOW MACRON
+ {0x02CE, 0x02CF, prAL, gcLm}, // [2] MODIFIER LETTER LOW GRAVE ACCENT..MODIFIER LETTER LOW ACUTE ACCENT
+ {0x02D0, 0x02D0, prAI, gcLm}, // MODIFIER LETTER TRIANGULAR COLON
+ {0x02D1, 0x02D1, prAL, gcLm}, // MODIFIER LETTER HALF TRIANGULAR COLON
+ {0x02D2, 0x02D7, prAL, gcSk}, // [6] MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN
+ {0x02D8, 0x02DB, prAI, gcSk}, // [4] BREVE..OGONEK
+ {0x02DC, 0x02DC, prAL, gcSk}, // SMALL TILDE
+ {0x02DD, 0x02DD, prAI, gcSk}, // DOUBLE ACUTE ACCENT
+ {0x02DE, 0x02DE, prAL, gcSk}, // MODIFIER LETTER RHOTIC HOOK
+ {0x02DF, 0x02DF, prBB, gcSk}, // MODIFIER LETTER CROSS ACCENT
+ {0x02E0, 0x02E4, prAL, gcLm}, // [5] MODIFIER LETTER SMALL GAMMA..MODIFIER LETTER SMALL REVERSED GLOTTAL STOP
+ {0x02E5, 0x02EB, prAL, gcSk}, // [7] MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER YANG DEPARTING TONE MARK
+ {0x02EC, 0x02EC, prAL, gcLm}, // MODIFIER LETTER VOICING
+ {0x02ED, 0x02ED, prAL, gcSk}, // MODIFIER LETTER UNASPIRATED
+ {0x02EE, 0x02EE, prAL, gcLm}, // MODIFIER LETTER DOUBLE APOSTROPHE
+ {0x02EF, 0x02FF, prAL, gcSk}, // [17] MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW
+ {0x0300, 0x034E, prCM, gcMn}, // [79] COMBINING GRAVE ACCENT..COMBINING UPWARDS ARROW BELOW
+ {0x034F, 0x034F, prGL, gcMn}, // COMBINING GRAPHEME JOINER
+ {0x0350, 0x035B, prCM, gcMn}, // [12] COMBINING RIGHT ARROWHEAD ABOVE..COMBINING ZIGZAG ABOVE
+ {0x035C, 0x0362, prGL, gcMn}, // [7] COMBINING DOUBLE BREVE BELOW..COMBINING DOUBLE RIGHTWARDS ARROW BELOW
+ {0x0363, 0x036F, prCM, gcMn}, // [13] COMBINING LATIN SMALL LETTER A..COMBINING LATIN SMALL LETTER X
+ {0x0370, 0x0373, prAL, gcLC}, // [4] GREEK CAPITAL LETTER HETA..GREEK SMALL LETTER ARCHAIC SAMPI
+ {0x0374, 0x0374, prAL, gcLm}, // GREEK NUMERAL SIGN
+ {0x0375, 0x0375, prAL, gcSk}, // GREEK LOWER NUMERAL SIGN
+ {0x0376, 0x0377, prAL, gcLC}, // [2] GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA..GREEK SMALL LETTER PAMPHYLIAN DIGAMMA
+ {0x037A, 0x037A, prAL, gcLm}, // GREEK YPOGEGRAMMENI
+ {0x037B, 0x037D, prAL, gcLl}, // [3] GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x037E, 0x037E, prIS, gcPo}, // GREEK QUESTION MARK
+ {0x037F, 0x037F, prAL, gcLu}, // GREEK CAPITAL LETTER YOT
+ {0x0384, 0x0385, prAL, gcSk}, // [2] GREEK TONOS..GREEK DIALYTIKA TONOS
+ {0x0386, 0x0386, prAL, gcLu}, // GREEK CAPITAL LETTER ALPHA WITH TONOS
+ {0x0387, 0x0387, prAL, gcPo}, // GREEK ANO TELEIA
+ {0x0388, 0x038A, prAL, gcLu}, // [3] GREEK CAPITAL LETTER EPSILON WITH TONOS..GREEK CAPITAL LETTER IOTA WITH TONOS
+ {0x038C, 0x038C, prAL, gcLu}, // GREEK CAPITAL LETTER OMICRON WITH TONOS
+ {0x038E, 0x03A1, prAL, gcLC}, // [20] GREEK CAPITAL LETTER UPSILON WITH TONOS..GREEK CAPITAL LETTER RHO
+ {0x03A3, 0x03F5, prAL, gcLC}, // [83] GREEK CAPITAL LETTER SIGMA..GREEK LUNATE EPSILON SYMBOL
+ {0x03F6, 0x03F6, prAL, gcSm}, // GREEK REVERSED LUNATE EPSILON SYMBOL
+ {0x03F7, 0x03FF, prAL, gcLC}, // [9] GREEK CAPITAL LETTER SHO..GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x0400, 0x0481, prAL, gcLC}, // [130] CYRILLIC CAPITAL LETTER IE WITH GRAVE..CYRILLIC SMALL LETTER KOPPA
+ {0x0482, 0x0482, prAL, gcSo}, // CYRILLIC THOUSANDS SIGN
+ {0x0483, 0x0487, prCM, gcMn}, // [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prCM, gcMe}, // [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x048A, 0x04FF, prAL, gcLC}, // [118] CYRILLIC CAPITAL LETTER SHORT I WITH TAIL..CYRILLIC SMALL LETTER HA WITH STROKE
+ {0x0500, 0x052F, prAL, gcLC}, // [48] CYRILLIC CAPITAL LETTER KOMI DE..CYRILLIC SMALL LETTER EL WITH DESCENDER
+ {0x0531, 0x0556, prAL, gcLu}, // [38] ARMENIAN CAPITAL LETTER AYB..ARMENIAN CAPITAL LETTER FEH
+ {0x0559, 0x0559, prAL, gcLm}, // ARMENIAN MODIFIER LETTER LEFT HALF RING
+ {0x055A, 0x055F, prAL, gcPo}, // [6] ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK
+ {0x0560, 0x0588, prAL, gcLl}, // [41] ARMENIAN SMALL LETTER TURNED AYB..ARMENIAN SMALL LETTER YI WITH STROKE
+ {0x0589, 0x0589, prIS, gcPo}, // ARMENIAN FULL STOP
+ {0x058A, 0x058A, prBA, gcPd}, // ARMENIAN HYPHEN
+ {0x058D, 0x058E, prAL, gcSo}, // [2] RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN
+ {0x058F, 0x058F, prPR, gcSc}, // ARMENIAN DRAM SIGN
+ {0x0591, 0x05BD, prCM, gcMn}, // [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BE, 0x05BE, prBA, gcPd}, // HEBREW PUNCTUATION MAQAF
+ {0x05BF, 0x05BF, prCM, gcMn}, // HEBREW POINT RAFE
+ {0x05C0, 0x05C0, prAL, gcPo}, // HEBREW PUNCTUATION PASEQ
+ {0x05C1, 0x05C2, prCM, gcMn}, // [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C3, 0x05C3, prAL, gcPo}, // HEBREW PUNCTUATION SOF PASUQ
+ {0x05C4, 0x05C5, prCM, gcMn}, // [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C6, 0x05C6, prEX, gcPo}, // HEBREW PUNCTUATION NUN HAFUKHA
+ {0x05C7, 0x05C7, prCM, gcMn}, // HEBREW POINT QAMATS QATAN
+ {0x05D0, 0x05EA, prHL, gcLo}, // [27] HEBREW LETTER ALEF..HEBREW LETTER TAV
+ {0x05EF, 0x05F2, prHL, gcLo}, // [4] HEBREW YOD TRIANGLE..HEBREW LIGATURE YIDDISH DOUBLE YOD
+ {0x05F3, 0x05F4, prAL, gcPo}, // [2] HEBREW PUNCTUATION GERESH..HEBREW PUNCTUATION GERSHAYIM
+ {0x0600, 0x0605, prAL, gcCf}, // [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0606, 0x0608, prAL, gcSm}, // [3] ARABIC-INDIC CUBE ROOT..ARABIC RAY
+ {0x0609, 0x060A, prPO, gcPo}, // [2] ARABIC-INDIC PER MILLE SIGN..ARABIC-INDIC PER TEN THOUSAND SIGN
+ {0x060B, 0x060B, prPO, gcSc}, // AFGHANI SIGN
+ {0x060C, 0x060D, prIS, gcPo}, // [2] ARABIC COMMA..ARABIC DATE SEPARATOR
+ {0x060E, 0x060F, prAL, gcSo}, // [2] ARABIC POETIC VERSE SIGN..ARABIC SIGN MISRA
+ {0x0610, 0x061A, prCM, gcMn}, // [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061B, 0x061B, prEX, gcPo}, // ARABIC SEMICOLON
+ {0x061C, 0x061C, prCM, gcCf}, // ARABIC LETTER MARK
+ {0x061D, 0x061F, prEX, gcPo}, // [3] ARABIC END OF TEXT MARK..ARABIC QUESTION MARK
+ {0x0620, 0x063F, prAL, gcLo}, // [32] ARABIC LETTER KASHMIRI YEH..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE
+ {0x0640, 0x0640, prAL, gcLm}, // ARABIC TATWEEL
+ {0x0641, 0x064A, prAL, gcLo}, // [10] ARABIC LETTER FEH..ARABIC LETTER YEH
+ {0x064B, 0x065F, prCM, gcMn}, // [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0660, 0x0669, prNU, gcNd}, // [10] ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE
+ {0x066A, 0x066A, prPO, gcPo}, // ARABIC PERCENT SIGN
+ {0x066B, 0x066C, prNU, gcPo}, // [2] ARABIC DECIMAL SEPARATOR..ARABIC THOUSANDS SEPARATOR
+ {0x066D, 0x066D, prAL, gcPo}, // ARABIC FIVE POINTED STAR
+ {0x066E, 0x066F, prAL, gcLo}, // [2] ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF
+ {0x0670, 0x0670, prCM, gcMn}, // ARABIC LETTER SUPERSCRIPT ALEF
+ {0x0671, 0x06D3, prAL, gcLo}, // [99] ARABIC LETTER ALEF WASLA..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE
+ {0x06D4, 0x06D4, prEX, gcPo}, // ARABIC FULL STOP
+ {0x06D5, 0x06D5, prAL, gcLo}, // ARABIC LETTER AE
+ {0x06D6, 0x06DC, prCM, gcMn}, // [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prAL, gcCf}, // ARABIC END OF AYAH
+ {0x06DE, 0x06DE, prAL, gcSo}, // ARABIC START OF RUB EL HIZB
+ {0x06DF, 0x06E4, prCM, gcMn}, // [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E5, 0x06E6, prAL, gcLm}, // [2] ARABIC SMALL WAW..ARABIC SMALL YEH
+ {0x06E7, 0x06E8, prCM, gcMn}, // [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06E9, 0x06E9, prAL, gcSo}, // ARABIC PLACE OF SAJDAH
+ {0x06EA, 0x06ED, prCM, gcMn}, // [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x06EE, 0x06EF, prAL, gcLo}, // [2] ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V
+ {0x06F0, 0x06F9, prNU, gcNd}, // [10] EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE
+ {0x06FA, 0x06FC, prAL, gcLo}, // [3] ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC LETTER GHAIN WITH DOT BELOW
+ {0x06FD, 0x06FE, prAL, gcSo}, // [2] ARABIC SIGN SINDHI AMPERSAND..ARABIC SIGN SINDHI POSTPOSITION MEN
+ {0x06FF, 0x06FF, prAL, gcLo}, // ARABIC LETTER HEH WITH INVERTED V
+ {0x0700, 0x070D, prAL, gcPo}, // [14] SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS
+ {0x070F, 0x070F, prAL, gcCf}, // SYRIAC ABBREVIATION MARK
+ {0x0710, 0x0710, prAL, gcLo}, // SYRIAC LETTER ALAPH
+ {0x0711, 0x0711, prCM, gcMn}, // SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0712, 0x072F, prAL, gcLo}, // [30] SYRIAC LETTER BETH..SYRIAC LETTER PERSIAN DHALATH
+ {0x0730, 0x074A, prCM, gcMn}, // [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x074D, 0x074F, prAL, gcLo}, // [3] SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE
+ {0x0750, 0x077F, prAL, gcLo}, // [48] ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE
+ {0x0780, 0x07A5, prAL, gcLo}, // [38] THAANA LETTER HAA..THAANA LETTER WAAVU
+ {0x07A6, 0x07B0, prCM, gcMn}, // [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07B1, 0x07B1, prAL, gcLo}, // THAANA LETTER NAA
+ {0x07C0, 0x07C9, prNU, gcNd}, // [10] NKO DIGIT ZERO..NKO DIGIT NINE
+ {0x07CA, 0x07EA, prAL, gcLo}, // [33] NKO LETTER A..NKO LETTER JONA RA
+ {0x07EB, 0x07F3, prCM, gcMn}, // [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07F4, 0x07F5, prAL, gcLm}, // [2] NKO HIGH TONE APOSTROPHE..NKO LOW TONE APOSTROPHE
+ {0x07F6, 0x07F6, prAL, gcSo}, // NKO SYMBOL OO DENNEN
+ {0x07F7, 0x07F7, prAL, gcPo}, // NKO SYMBOL GBAKURUNEN
+ {0x07F8, 0x07F8, prIS, gcPo}, // NKO COMMA
+ {0x07F9, 0x07F9, prEX, gcPo}, // NKO EXCLAMATION MARK
+ {0x07FA, 0x07FA, prAL, gcLm}, // NKO LAJANYALAN
+ {0x07FD, 0x07FD, prCM, gcMn}, // NKO DANTAYALAN
+ {0x07FE, 0x07FF, prPR, gcSc}, // [2] NKO DOROME SIGN..NKO TAMAN SIGN
+ {0x0800, 0x0815, prAL, gcLo}, // [22] SAMARITAN LETTER ALAF..SAMARITAN LETTER TAAF
+ {0x0816, 0x0819, prCM, gcMn}, // [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081A, 0x081A, prAL, gcLm}, // SAMARITAN MODIFIER LETTER EPENTHETIC YUT
+ {0x081B, 0x0823, prCM, gcMn}, // [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0824, 0x0824, prAL, gcLm}, // SAMARITAN MODIFIER LETTER SHORT A
+ {0x0825, 0x0827, prCM, gcMn}, // [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0828, 0x0828, prAL, gcLm}, // SAMARITAN MODIFIER LETTER I
+ {0x0829, 0x082D, prCM, gcMn}, // [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0830, 0x083E, prAL, gcPo}, // [15] SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU
+ {0x0840, 0x0858, prAL, gcLo}, // [25] MANDAIC LETTER HALQA..MANDAIC LETTER AIN
+ {0x0859, 0x085B, prCM, gcMn}, // [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x085E, 0x085E, prAL, gcPo}, // MANDAIC PUNCTUATION
+ {0x0860, 0x086A, prAL, gcLo}, // [11] SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA
+ {0x0870, 0x0887, prAL, gcLo}, // [24] ARABIC LETTER ALEF WITH ATTACHED FATHA..ARABIC BASELINE ROUND DOT
+ {0x0888, 0x0888, prAL, gcSk}, // ARABIC RAISED ROUND DOT
+ {0x0889, 0x088E, prAL, gcLo}, // [6] ARABIC LETTER NOON WITH INVERTED SMALL V..ARABIC VERTICAL TAIL
+ {0x0890, 0x0891, prAL, gcCf}, // [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prCM, gcMn}, // [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08A0, 0x08C8, prAL, gcLo}, // [41] ARABIC LETTER BEH WITH SMALL V BELOW..ARABIC LETTER GRAF
+ {0x08C9, 0x08C9, prAL, gcLm}, // ARABIC SMALL FARSI YEH
+ {0x08CA, 0x08E1, prCM, gcMn}, // [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prAL, gcCf}, // ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x08FF, prCM, gcMn}, // [29] ARABIC TURNED DAMMA BELOW..ARABIC MARK SIDEWAYS NOON GHUNNA
+ {0x0900, 0x0902, prCM, gcMn}, // [3] DEVANAGARI SIGN INVERTED CANDRABINDU..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prCM, gcMc}, // DEVANAGARI SIGN VISARGA
+ {0x0904, 0x0939, prAL, gcLo}, // [54] DEVANAGARI LETTER SHORT A..DEVANAGARI LETTER HA
+ {0x093A, 0x093A, prCM, gcMn}, // DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prCM, gcMc}, // DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prCM, gcMn}, // DEVANAGARI SIGN NUKTA
+ {0x093D, 0x093D, prAL, gcLo}, // DEVANAGARI SIGN AVAGRAHA
+ {0x093E, 0x0940, prCM, gcMc}, // [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prCM, gcMn}, // [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prCM, gcMc}, // [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prCM, gcMn}, // DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prCM, gcMc}, // [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0950, 0x0950, prAL, gcLo}, // DEVANAGARI OM
+ {0x0951, 0x0957, prCM, gcMn}, // [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0958, 0x0961, prAL, gcLo}, // [10] DEVANAGARI LETTER QA..DEVANAGARI LETTER VOCALIC LL
+ {0x0962, 0x0963, prCM, gcMn}, // [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0964, 0x0965, prBA, gcPo}, // [2] DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA
+ {0x0966, 0x096F, prNU, gcNd}, // [10] DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE
+ {0x0970, 0x0970, prAL, gcPo}, // DEVANAGARI ABBREVIATION SIGN
+ {0x0971, 0x0971, prAL, gcLm}, // DEVANAGARI SIGN HIGH SPACING DOT
+ {0x0972, 0x097F, prAL, gcLo}, // [14] DEVANAGARI LETTER CANDRA A..DEVANAGARI LETTER BBA
+ {0x0980, 0x0980, prAL, gcLo}, // BENGALI ANJI
+ {0x0981, 0x0981, prCM, gcMn}, // BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prCM, gcMc}, // [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x0985, 0x098C, prAL, gcLo}, // [8] BENGALI LETTER A..BENGALI LETTER VOCALIC L
+ {0x098F, 0x0990, prAL, gcLo}, // [2] BENGALI LETTER E..BENGALI LETTER AI
+ {0x0993, 0x09A8, prAL, gcLo}, // [22] BENGALI LETTER O..BENGALI LETTER NA
+ {0x09AA, 0x09B0, prAL, gcLo}, // [7] BENGALI LETTER PA..BENGALI LETTER RA
+ {0x09B2, 0x09B2, prAL, gcLo}, // BENGALI LETTER LA
+ {0x09B6, 0x09B9, prAL, gcLo}, // [4] BENGALI LETTER SHA..BENGALI LETTER HA
+ {0x09BC, 0x09BC, prCM, gcMn}, // BENGALI SIGN NUKTA
+ {0x09BD, 0x09BD, prAL, gcLo}, // BENGALI SIGN AVAGRAHA
+ {0x09BE, 0x09C0, prCM, gcMc}, // [3] BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prCM, gcMn}, // [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prCM, gcMc}, // [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prCM, gcMc}, // [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prCM, gcMn}, // BENGALI SIGN VIRAMA
+ {0x09CE, 0x09CE, prAL, gcLo}, // BENGALI LETTER KHANDA TA
+ {0x09D7, 0x09D7, prCM, gcMc}, // BENGALI AU LENGTH MARK
+ {0x09DC, 0x09DD, prAL, gcLo}, // [2] BENGALI LETTER RRA..BENGALI LETTER RHA
+ {0x09DF, 0x09E1, prAL, gcLo}, // [3] BENGALI LETTER YYA..BENGALI LETTER VOCALIC LL
+ {0x09E2, 0x09E3, prCM, gcMn}, // [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09E6, 0x09EF, prNU, gcNd}, // [10] BENGALI DIGIT ZERO..BENGALI DIGIT NINE
+ {0x09F0, 0x09F1, prAL, gcLo}, // [2] BENGALI LETTER RA WITH MIDDLE DIAGONAL..BENGALI LETTER RA WITH LOWER DIAGONAL
+ {0x09F2, 0x09F3, prPO, gcSc}, // [2] BENGALI RUPEE MARK..BENGALI RUPEE SIGN
+ {0x09F4, 0x09F8, prAL, gcNo}, // [5] BENGALI CURRENCY NUMERATOR ONE..BENGALI CURRENCY NUMERATOR ONE LESS THAN THE DENOMINATOR
+ {0x09F9, 0x09F9, prPO, gcNo}, // BENGALI CURRENCY DENOMINATOR SIXTEEN
+ {0x09FA, 0x09FA, prAL, gcSo}, // BENGALI ISSHAR
+ {0x09FB, 0x09FB, prPR, gcSc}, // BENGALI GANDA MARK
+ {0x09FC, 0x09FC, prAL, gcLo}, // BENGALI LETTER VEDIC ANUSVARA
+ {0x09FD, 0x09FD, prAL, gcPo}, // BENGALI ABBREVIATION SIGN
+ {0x09FE, 0x09FE, prCM, gcMn}, // BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prCM, gcMn}, // [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prCM, gcMc}, // GURMUKHI SIGN VISARGA
+ {0x0A05, 0x0A0A, prAL, gcLo}, // [6] GURMUKHI LETTER A..GURMUKHI LETTER UU
+ {0x0A0F, 0x0A10, prAL, gcLo}, // [2] GURMUKHI LETTER EE..GURMUKHI LETTER AI
+ {0x0A13, 0x0A28, prAL, gcLo}, // [22] GURMUKHI LETTER OO..GURMUKHI LETTER NA
+ {0x0A2A, 0x0A30, prAL, gcLo}, // [7] GURMUKHI LETTER PA..GURMUKHI LETTER RA
+ {0x0A32, 0x0A33, prAL, gcLo}, // [2] GURMUKHI LETTER LA..GURMUKHI LETTER LLA
+ {0x0A35, 0x0A36, prAL, gcLo}, // [2] GURMUKHI LETTER VA..GURMUKHI LETTER SHA
+ {0x0A38, 0x0A39, prAL, gcLo}, // [2] GURMUKHI LETTER SA..GURMUKHI LETTER HA
+ {0x0A3C, 0x0A3C, prCM, gcMn}, // GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prCM, gcMc}, // [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prCM, gcMn}, // [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prCM, gcMn}, // [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prCM, gcMn}, // [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prCM, gcMn}, // GURMUKHI SIGN UDAAT
+ {0x0A59, 0x0A5C, prAL, gcLo}, // [4] GURMUKHI LETTER KHHA..GURMUKHI LETTER RRA
+ {0x0A5E, 0x0A5E, prAL, gcLo}, // GURMUKHI LETTER FA
+ {0x0A66, 0x0A6F, prNU, gcNd}, // [10] GURMUKHI DIGIT ZERO..GURMUKHI DIGIT NINE
+ {0x0A70, 0x0A71, prCM, gcMn}, // [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A72, 0x0A74, prAL, gcLo}, // [3] GURMUKHI IRI..GURMUKHI EK ONKAR
+ {0x0A75, 0x0A75, prCM, gcMn}, // GURMUKHI SIGN YAKASH
+ {0x0A76, 0x0A76, prAL, gcPo}, // GURMUKHI ABBREVIATION SIGN
+ {0x0A81, 0x0A82, prCM, gcMn}, // [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prCM, gcMc}, // GUJARATI SIGN VISARGA
+ {0x0A85, 0x0A8D, prAL, gcLo}, // [9] GUJARATI LETTER A..GUJARATI VOWEL CANDRA E
+ {0x0A8F, 0x0A91, prAL, gcLo}, // [3] GUJARATI LETTER E..GUJARATI VOWEL CANDRA O
+ {0x0A93, 0x0AA8, prAL, gcLo}, // [22] GUJARATI LETTER O..GUJARATI LETTER NA
+ {0x0AAA, 0x0AB0, prAL, gcLo}, // [7] GUJARATI LETTER PA..GUJARATI LETTER RA
+ {0x0AB2, 0x0AB3, prAL, gcLo}, // [2] GUJARATI LETTER LA..GUJARATI LETTER LLA
+ {0x0AB5, 0x0AB9, prAL, gcLo}, // [5] GUJARATI LETTER VA..GUJARATI LETTER HA
+ {0x0ABC, 0x0ABC, prCM, gcMn}, // GUJARATI SIGN NUKTA
+ {0x0ABD, 0x0ABD, prAL, gcLo}, // GUJARATI SIGN AVAGRAHA
+ {0x0ABE, 0x0AC0, prCM, gcMc}, // [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prCM, gcMn}, // [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prCM, gcMn}, // [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prCM, gcMc}, // GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prCM, gcMc}, // [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prCM, gcMn}, // GUJARATI SIGN VIRAMA
+ {0x0AD0, 0x0AD0, prAL, gcLo}, // GUJARATI OM
+ {0x0AE0, 0x0AE1, prAL, gcLo}, // [2] GUJARATI LETTER VOCALIC RR..GUJARATI LETTER VOCALIC LL
+ {0x0AE2, 0x0AE3, prCM, gcMn}, // [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AE6, 0x0AEF, prNU, gcNd}, // [10] GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE
+ {0x0AF0, 0x0AF0, prAL, gcPo}, // GUJARATI ABBREVIATION SIGN
+ {0x0AF1, 0x0AF1, prPR, gcSc}, // GUJARATI RUPEE SIGN
+ {0x0AF9, 0x0AF9, prAL, gcLo}, // GUJARATI LETTER ZHA
+ {0x0AFA, 0x0AFF, prCM, gcMn}, // [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prCM, gcMn}, // ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prCM, gcMc}, // [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B05, 0x0B0C, prAL, gcLo}, // [8] ORIYA LETTER A..ORIYA LETTER VOCALIC L
+ {0x0B0F, 0x0B10, prAL, gcLo}, // [2] ORIYA LETTER E..ORIYA LETTER AI
+ {0x0B13, 0x0B28, prAL, gcLo}, // [22] ORIYA LETTER O..ORIYA LETTER NA
+ {0x0B2A, 0x0B30, prAL, gcLo}, // [7] ORIYA LETTER PA..ORIYA LETTER RA
+ {0x0B32, 0x0B33, prAL, gcLo}, // [2] ORIYA LETTER LA..ORIYA LETTER LLA
+ {0x0B35, 0x0B39, prAL, gcLo}, // [5] ORIYA LETTER VA..ORIYA LETTER HA
+ {0x0B3C, 0x0B3C, prCM, gcMn}, // ORIYA SIGN NUKTA
+ {0x0B3D, 0x0B3D, prAL, gcLo}, // ORIYA SIGN AVAGRAHA
+ {0x0B3E, 0x0B3E, prCM, gcMc}, // ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prCM, gcMn}, // ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prCM, gcMc}, // ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prCM, gcMn}, // [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prCM, gcMc}, // [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prCM, gcMc}, // [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prCM, gcMn}, // ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prCM, gcMn}, // [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prCM, gcMc}, // ORIYA AU LENGTH MARK
+ {0x0B5C, 0x0B5D, prAL, gcLo}, // [2] ORIYA LETTER RRA..ORIYA LETTER RHA
+ {0x0B5F, 0x0B61, prAL, gcLo}, // [3] ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL
+ {0x0B62, 0x0B63, prCM, gcMn}, // [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B66, 0x0B6F, prNU, gcNd}, // [10] ORIYA DIGIT ZERO..ORIYA DIGIT NINE
+ {0x0B70, 0x0B70, prAL, gcSo}, // ORIYA ISSHAR
+ {0x0B71, 0x0B71, prAL, gcLo}, // ORIYA LETTER WA
+ {0x0B72, 0x0B77, prAL, gcNo}, // [6] ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS
+ {0x0B82, 0x0B82, prCM, gcMn}, // TAMIL SIGN ANUSVARA
+ {0x0B83, 0x0B83, prAL, gcLo}, // TAMIL SIGN VISARGA
+ {0x0B85, 0x0B8A, prAL, gcLo}, // [6] TAMIL LETTER A..TAMIL LETTER UU
+ {0x0B8E, 0x0B90, prAL, gcLo}, // [3] TAMIL LETTER E..TAMIL LETTER AI
+ {0x0B92, 0x0B95, prAL, gcLo}, // [4] TAMIL LETTER O..TAMIL LETTER KA
+ {0x0B99, 0x0B9A, prAL, gcLo}, // [2] TAMIL LETTER NGA..TAMIL LETTER CA
+ {0x0B9C, 0x0B9C, prAL, gcLo}, // TAMIL LETTER JA
+ {0x0B9E, 0x0B9F, prAL, gcLo}, // [2] TAMIL LETTER NYA..TAMIL LETTER TTA
+ {0x0BA3, 0x0BA4, prAL, gcLo}, // [2] TAMIL LETTER NNA..TAMIL LETTER TA
+ {0x0BA8, 0x0BAA, prAL, gcLo}, // [3] TAMIL LETTER NA..TAMIL LETTER PA
+ {0x0BAE, 0x0BB9, prAL, gcLo}, // [12] TAMIL LETTER MA..TAMIL LETTER HA
+ {0x0BBE, 0x0BBF, prCM, gcMc}, // [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prCM, gcMn}, // TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prCM, gcMc}, // [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prCM, gcMc}, // [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prCM, gcMc}, // [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prCM, gcMn}, // TAMIL SIGN VIRAMA
+ {0x0BD0, 0x0BD0, prAL, gcLo}, // TAMIL OM
+ {0x0BD7, 0x0BD7, prCM, gcMc}, // TAMIL AU LENGTH MARK
+ {0x0BE6, 0x0BEF, prNU, gcNd}, // [10] TAMIL DIGIT ZERO..TAMIL DIGIT NINE
+ {0x0BF0, 0x0BF2, prAL, gcNo}, // [3] TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND
+ {0x0BF3, 0x0BF8, prAL, gcSo}, // [6] TAMIL DAY SIGN..TAMIL AS ABOVE SIGN
+ {0x0BF9, 0x0BF9, prPR, gcSc}, // TAMIL RUPEE SIGN
+ {0x0BFA, 0x0BFA, prAL, gcSo}, // TAMIL NUMBER SIGN
+ {0x0C00, 0x0C00, prCM, gcMn}, // TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prCM, gcMc}, // [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prCM, gcMn}, // TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C05, 0x0C0C, prAL, gcLo}, // [8] TELUGU LETTER A..TELUGU LETTER VOCALIC L
+ {0x0C0E, 0x0C10, prAL, gcLo}, // [3] TELUGU LETTER E..TELUGU LETTER AI
+ {0x0C12, 0x0C28, prAL, gcLo}, // [23] TELUGU LETTER O..TELUGU LETTER NA
+ {0x0C2A, 0x0C39, prAL, gcLo}, // [16] TELUGU LETTER PA..TELUGU LETTER HA
+ {0x0C3C, 0x0C3C, prCM, gcMn}, // TELUGU SIGN NUKTA
+ {0x0C3D, 0x0C3D, prAL, gcLo}, // TELUGU SIGN AVAGRAHA
+ {0x0C3E, 0x0C40, prCM, gcMn}, // [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prCM, gcMc}, // [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prCM, gcMn}, // [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prCM, gcMn}, // [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prCM, gcMn}, // [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C58, 0x0C5A, prAL, gcLo}, // [3] TELUGU LETTER TSA..TELUGU LETTER RRRA
+ {0x0C5D, 0x0C5D, prAL, gcLo}, // TELUGU LETTER NAKAARA POLLU
+ {0x0C60, 0x0C61, prAL, gcLo}, // [2] TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL
+ {0x0C62, 0x0C63, prCM, gcMn}, // [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C66, 0x0C6F, prNU, gcNd}, // [10] TELUGU DIGIT ZERO..TELUGU DIGIT NINE
+ {0x0C77, 0x0C77, prBB, gcPo}, // TELUGU SIGN SIDDHAM
+ {0x0C78, 0x0C7E, prAL, gcNo}, // [7] TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU FRACTION DIGIT THREE FOR EVEN POWERS OF FOUR
+ {0x0C7F, 0x0C7F, prAL, gcSo}, // TELUGU SIGN TUUMU
+ {0x0C80, 0x0C80, prAL, gcLo}, // KANNADA SIGN SPACING CANDRABINDU
+ {0x0C81, 0x0C81, prCM, gcMn}, // KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prCM, gcMc}, // [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0C84, 0x0C84, prBB, gcPo}, // KANNADA SIGN SIDDHAM
+ {0x0C85, 0x0C8C, prAL, gcLo}, // [8] KANNADA LETTER A..KANNADA LETTER VOCALIC L
+ {0x0C8E, 0x0C90, prAL, gcLo}, // [3] KANNADA LETTER E..KANNADA LETTER AI
+ {0x0C92, 0x0CA8, prAL, gcLo}, // [23] KANNADA LETTER O..KANNADA LETTER NA
+ {0x0CAA, 0x0CB3, prAL, gcLo}, // [10] KANNADA LETTER PA..KANNADA LETTER LLA
+ {0x0CB5, 0x0CB9, prAL, gcLo}, // [5] KANNADA LETTER VA..KANNADA LETTER HA
+ {0x0CBC, 0x0CBC, prCM, gcMn}, // KANNADA SIGN NUKTA
+ {0x0CBD, 0x0CBD, prAL, gcLo}, // KANNADA SIGN AVAGRAHA
+ {0x0CBE, 0x0CBE, prCM, gcMc}, // KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prCM, gcMn}, // KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC4, prCM, gcMc}, // [5] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prCM, gcMn}, // KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prCM, gcMc}, // [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prCM, gcMc}, // [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prCM, gcMn}, // [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prCM, gcMc}, // [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CDD, 0x0CDE, prAL, gcLo}, // [2] KANNADA LETTER NAKAARA POLLU..KANNADA LETTER FA
+ {0x0CE0, 0x0CE1, prAL, gcLo}, // [2] KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL
+ {0x0CE2, 0x0CE3, prCM, gcMn}, // [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CE6, 0x0CEF, prNU, gcNd}, // [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
+ {0x0CF1, 0x0CF2, prAL, gcLo}, // [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0D00, 0x0D01, prCM, gcMn}, // [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prCM, gcMc}, // [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D04, 0x0D0C, prAL, gcLo}, // [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
+ {0x0D0E, 0x0D10, prAL, gcLo}, // [3] MALAYALAM LETTER E..MALAYALAM LETTER AI
+ {0x0D12, 0x0D3A, prAL, gcLo}, // [41] MALAYALAM LETTER O..MALAYALAM LETTER TTTA
+ {0x0D3B, 0x0D3C, prCM, gcMn}, // [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3D, 0x0D3D, prAL, gcLo}, // MALAYALAM SIGN AVAGRAHA
+ {0x0D3E, 0x0D40, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prCM, gcMn}, // [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prCM, gcMn}, // MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prAL, gcLo}, // MALAYALAM LETTER DOT REPH
+ {0x0D4F, 0x0D4F, prAL, gcSo}, // MALAYALAM SIGN PARA
+ {0x0D54, 0x0D56, prAL, gcLo}, // [3] MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL
+ {0x0D57, 0x0D57, prCM, gcMc}, // MALAYALAM AU LENGTH MARK
+ {0x0D58, 0x0D5E, prAL, gcNo}, // [7] MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH
+ {0x0D5F, 0x0D61, prAL, gcLo}, // [3] MALAYALAM LETTER ARCHAIC II..MALAYALAM LETTER VOCALIC LL
+ {0x0D62, 0x0D63, prCM, gcMn}, // [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D66, 0x0D6F, prNU, gcNd}, // [10] MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE
+ {0x0D70, 0x0D78, prAL, gcNo}, // [9] MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE SIXTEENTHS
+ {0x0D79, 0x0D79, prPO, gcSo}, // MALAYALAM DATE MARK
+ {0x0D7A, 0x0D7F, prAL, gcLo}, // [6] MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K
+ {0x0D81, 0x0D81, prCM, gcMn}, // SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prCM, gcMc}, // [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0D85, 0x0D96, prAL, gcLo}, // [18] SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA
+ {0x0D9A, 0x0DB1, prAL, gcLo}, // [24] SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA
+ {0x0DB3, 0x0DBB, prAL, gcLo}, // [9] SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA
+ {0x0DBD, 0x0DBD, prAL, gcLo}, // SINHALA LETTER DANTAJA LAYANNA
+ {0x0DC0, 0x0DC6, prAL, gcLo}, // [7] SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA
+ {0x0DCA, 0x0DCA, prCM, gcMn}, // SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DD1, prCM, gcMc}, // [3] SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prCM, gcMn}, // [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prCM, gcMn}, // SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDF, prCM, gcMc}, // [8] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DE6, 0x0DEF, prNU, gcNd}, // [10] SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE
+ {0x0DF2, 0x0DF3, prCM, gcMc}, // [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0DF4, 0x0DF4, prAL, gcPo}, // SINHALA PUNCTUATION KUNDDALIYA
+ {0x0E01, 0x0E30, prSA, gcLo}, // [48] THAI CHARACTER KO KAI..THAI CHARACTER SARA A
+ {0x0E31, 0x0E31, prSA, gcMn}, // THAI CHARACTER MAI HAN-AKAT
+ {0x0E32, 0x0E33, prSA, gcLo}, // [2] THAI CHARACTER SARA AA..THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prSA, gcMn}, // [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E3F, 0x0E3F, prPR, gcSc}, // THAI CURRENCY SYMBOL BAHT
+ {0x0E40, 0x0E45, prSA, gcLo}, // [6] THAI CHARACTER SARA E..THAI CHARACTER LAKKHANGYAO
+ {0x0E46, 0x0E46, prSA, gcLm}, // THAI CHARACTER MAIYAMOK
+ {0x0E47, 0x0E4E, prSA, gcMn}, // [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0E4F, 0x0E4F, prAL, gcPo}, // THAI CHARACTER FONGMAN
+ {0x0E50, 0x0E59, prNU, gcNd}, // [10] THAI DIGIT ZERO..THAI DIGIT NINE
+ {0x0E5A, 0x0E5B, prBA, gcPo}, // [2] THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT
+ {0x0E81, 0x0E82, prSA, gcLo}, // [2] LAO LETTER KO..LAO LETTER KHO SUNG
+ {0x0E84, 0x0E84, prSA, gcLo}, // LAO LETTER KHO TAM
+ {0x0E86, 0x0E8A, prSA, gcLo}, // [5] LAO LETTER PALI GHA..LAO LETTER SO TAM
+ {0x0E8C, 0x0EA3, prSA, gcLo}, // [24] LAO LETTER PALI JHA..LAO LETTER LO LING
+ {0x0EA5, 0x0EA5, prSA, gcLo}, // LAO LETTER LO LOOT
+ {0x0EA7, 0x0EB0, prSA, gcLo}, // [10] LAO LETTER WO..LAO VOWEL SIGN A
+ {0x0EB1, 0x0EB1, prSA, gcMn}, // LAO VOWEL SIGN MAI KAN
+ {0x0EB2, 0x0EB3, prSA, gcLo}, // [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prSA, gcMn}, // [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EBD, 0x0EBD, prSA, gcLo}, // LAO SEMIVOWEL SIGN NYO
+ {0x0EC0, 0x0EC4, prSA, gcLo}, // [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
+ {0x0EC6, 0x0EC6, prSA, gcLm}, // LAO KO LA
+ {0x0EC8, 0x0ECD, prSA, gcMn}, // [6] LAO TONE MAI EK..LAO NIGGAHITA
+ {0x0ED0, 0x0ED9, prNU, gcNd}, // [10] LAO DIGIT ZERO..LAO DIGIT NINE
+ {0x0EDC, 0x0EDF, prSA, gcLo}, // [4] LAO HO NO..LAO LETTER KHMU NYO
+ {0x0F00, 0x0F00, prAL, gcLo}, // TIBETAN SYLLABLE OM
+ {0x0F01, 0x0F03, prBB, gcSo}, // [3] TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK GTER YIG MGO -UM GTER TSHEG MA
+ {0x0F04, 0x0F04, prBB, gcPo}, // TIBETAN MARK INITIAL YIG MGO MDUN MA
+ {0x0F05, 0x0F05, prAL, gcPo}, // TIBETAN MARK CLOSING YIG MGO SGAB MA
+ {0x0F06, 0x0F07, prBB, gcPo}, // [2] TIBETAN MARK CARET YIG MGO PHUR SHAD MA..TIBETAN MARK YIG MGO TSHEG SHAD MA
+ {0x0F08, 0x0F08, prGL, gcPo}, // TIBETAN MARK SBRUL SHAD
+ {0x0F09, 0x0F0A, prBB, gcPo}, // [2] TIBETAN MARK BSKUR YIG MGO..TIBETAN MARK BKA- SHOG YIG MGO
+ {0x0F0B, 0x0F0B, prBA, gcPo}, // TIBETAN MARK INTERSYLLABIC TSHEG
+ {0x0F0C, 0x0F0C, prGL, gcPo}, // TIBETAN MARK DELIMITER TSHEG BSTAR
+ {0x0F0D, 0x0F11, prEX, gcPo}, // [5] TIBETAN MARK SHAD..TIBETAN MARK RIN CHEN SPUNGS SHAD
+ {0x0F12, 0x0F12, prGL, gcPo}, // TIBETAN MARK RGYA GRAM SHAD
+ {0x0F13, 0x0F13, prAL, gcSo}, // TIBETAN MARK CARET -DZUD RTAGS ME LONG CAN
+ {0x0F14, 0x0F14, prEX, gcPo}, // TIBETAN MARK GTER TSHEG
+ {0x0F15, 0x0F17, prAL, gcSo}, // [3] TIBETAN LOGOTYPE SIGN CHAD RTAGS..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS
+ {0x0F18, 0x0F19, prCM, gcMn}, // [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F1A, 0x0F1F, prAL, gcSo}, // [6] TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG
+ {0x0F20, 0x0F29, prNU, gcNd}, // [10] TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE
+ {0x0F2A, 0x0F33, prAL, gcNo}, // [10] TIBETAN DIGIT HALF ONE..TIBETAN DIGIT HALF ZERO
+ {0x0F34, 0x0F34, prBA, gcSo}, // TIBETAN MARK BSDUS RTAGS
+ {0x0F35, 0x0F35, prCM, gcMn}, // TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F36, 0x0F36, prAL, gcSo}, // TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN
+ {0x0F37, 0x0F37, prCM, gcMn}, // TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F38, 0x0F38, prAL, gcSo}, // TIBETAN MARK CHE MGO
+ {0x0F39, 0x0F39, prCM, gcMn}, // TIBETAN MARK TSA -PHRU
+ {0x0F3A, 0x0F3A, prOP, gcPs}, // TIBETAN MARK GUG RTAGS GYON
+ {0x0F3B, 0x0F3B, prCL, gcPe}, // TIBETAN MARK GUG RTAGS GYAS
+ {0x0F3C, 0x0F3C, prOP, gcPs}, // TIBETAN MARK ANG KHANG GYON
+ {0x0F3D, 0x0F3D, prCL, gcPe}, // TIBETAN MARK ANG KHANG GYAS
+ {0x0F3E, 0x0F3F, prCM, gcMc}, // [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F40, 0x0F47, prAL, gcLo}, // [8] TIBETAN LETTER KA..TIBETAN LETTER JA
+ {0x0F49, 0x0F6C, prAL, gcLo}, // [36] TIBETAN LETTER NYA..TIBETAN LETTER RRA
+ {0x0F71, 0x0F7E, prCM, gcMn}, // [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prBA, gcMc}, // TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prCM, gcMn}, // [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F85, 0x0F85, prBA, gcPo}, // TIBETAN MARK PALUTA
+ {0x0F86, 0x0F87, prCM, gcMn}, // [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F88, 0x0F8C, prAL, gcLo}, // [5] TIBETAN SIGN LCE TSA CAN..TIBETAN SIGN INVERTED MCHU CAN
+ {0x0F8D, 0x0F97, prCM, gcMn}, // [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prCM, gcMn}, // [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FBE, 0x0FBF, prBA, gcSo}, // [2] TIBETAN KU RU KHA..TIBETAN KU RU KHA BZHI MIG CAN
+ {0x0FC0, 0x0FC5, prAL, gcSo}, // [6] TIBETAN CANTILLATION SIGN HEAVY BEAT..TIBETAN SYMBOL RDO RJE
+ {0x0FC6, 0x0FC6, prCM, gcMn}, // TIBETAN SYMBOL PADMA GDAN
+ {0x0FC7, 0x0FCC, prAL, gcSo}, // [6] TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL
+ {0x0FCE, 0x0FCF, prAL, gcSo}, // [2] TIBETAN SIGN RDEL NAG RDEL DKAR..TIBETAN SIGN RDEL NAG GSUM
+ {0x0FD0, 0x0FD1, prBB, gcPo}, // [2] TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK MNYAM YIG GI MGO RGYAN
+ {0x0FD2, 0x0FD2, prBA, gcPo}, // TIBETAN MARK NYIS TSHEG
+ {0x0FD3, 0x0FD3, prBB, gcPo}, // TIBETAN MARK INITIAL BRDA RNYING YIG MGO MDUN MA
+ {0x0FD4, 0x0FD4, prAL, gcPo}, // TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA
+ {0x0FD5, 0x0FD8, prAL, gcSo}, // [4] RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS
+ {0x0FD9, 0x0FDA, prGL, gcPo}, // [2] TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS
+ {0x1000, 0x102A, prSA, gcLo}, // [43] MYANMAR LETTER KA..MYANMAR LETTER AU
+ {0x102B, 0x102C, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN TALL AA..MYANMAR VOWEL SIGN AA
+ {0x102D, 0x1030, prSA, gcMn}, // [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prSA, gcMc}, // MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prSA, gcMn}, // [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1038, 0x1038, prSA, gcMc}, // MYANMAR SIGN VISARGA
+ {0x1039, 0x103A, prSA, gcMn}, // [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prSA, gcMc}, // [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prSA, gcMn}, // [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x103F, 0x103F, prSA, gcLo}, // MYANMAR LETTER GREAT SA
+ {0x1040, 0x1049, prNU, gcNd}, // [10] MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE
+ {0x104A, 0x104B, prBA, gcPo}, // [2] MYANMAR SIGN LITTLE SECTION..MYANMAR SIGN SECTION
+ {0x104C, 0x104F, prAL, gcPo}, // [4] MYANMAR SYMBOL LOCATIVE..MYANMAR SYMBOL GENITIVE
+ {0x1050, 0x1055, prSA, gcLo}, // [6] MYANMAR LETTER SHA..MYANMAR LETTER VOCALIC LL
+ {0x1056, 0x1057, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prSA, gcMn}, // [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105A, 0x105D, prSA, gcLo}, // [4] MYANMAR LETTER MON NGA..MYANMAR LETTER MON BBE
+ {0x105E, 0x1060, prSA, gcMn}, // [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1061, 0x1061, prSA, gcLo}, // MYANMAR LETTER SGAW KAREN SHA
+ {0x1062, 0x1064, prSA, gcMc}, // [3] MYANMAR VOWEL SIGN SGAW KAREN EU..MYANMAR TONE MARK SGAW KAREN KE PHO
+ {0x1065, 0x1066, prSA, gcLo}, // [2] MYANMAR LETTER WESTERN PWO KAREN THA..MYANMAR LETTER WESTERN PWO KAREN PWA
+ {0x1067, 0x106D, prSA, gcMc}, // [7] MYANMAR VOWEL SIGN WESTERN PWO KAREN EU..MYANMAR SIGN WESTERN PWO KAREN TONE-5
+ {0x106E, 0x1070, prSA, gcLo}, // [3] MYANMAR LETTER EASTERN PWO KAREN NNA..MYANMAR LETTER EASTERN PWO KAREN GHWA
+ {0x1071, 0x1074, prSA, gcMn}, // [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1075, 0x1081, prSA, gcLo}, // [13] MYANMAR LETTER SHAN KA..MYANMAR LETTER SHAN HA
+ {0x1082, 0x1082, prSA, gcMn}, // MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1083, 0x1084, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN SHAN AA..MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prSA, gcMn}, // [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x1087, 0x108C, prSA, gcMc}, // [6] MYANMAR SIGN SHAN TONE-2..MYANMAR SIGN SHAN COUNCIL TONE-3
+ {0x108D, 0x108D, prSA, gcMn}, // MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x108E, 0x108E, prSA, gcLo}, // MYANMAR LETTER RUMAI PALAUNG FA
+ {0x108F, 0x108F, prSA, gcMc}, // MYANMAR SIGN RUMAI PALAUNG TONE-5
+ {0x1090, 0x1099, prNU, gcNd}, // [10] MYANMAR SHAN DIGIT ZERO..MYANMAR SHAN DIGIT NINE
+ {0x109A, 0x109C, prSA, gcMc}, // [3] MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON A
+ {0x109D, 0x109D, prSA, gcMn}, // MYANMAR VOWEL SIGN AITON AI
+ {0x109E, 0x109F, prSA, gcSo}, // [2] MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION
+ {0x10A0, 0x10C5, prAL, gcLu}, // [38] GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE
+ {0x10C7, 0x10C7, prAL, gcLu}, // GEORGIAN CAPITAL LETTER YN
+ {0x10CD, 0x10CD, prAL, gcLu}, // GEORGIAN CAPITAL LETTER AEN
+ {0x10D0, 0x10FA, prAL, gcLl}, // [43] GEORGIAN LETTER AN..GEORGIAN LETTER AIN
+ {0x10FB, 0x10FB, prAL, gcPo}, // GEORGIAN PARAGRAPH SEPARATOR
+ {0x10FC, 0x10FC, prAL, gcLm}, // MODIFIER LETTER GEORGIAN NAR
+ {0x10FD, 0x10FF, prAL, gcLl}, // [3] GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN
+ {0x1100, 0x115F, prJL, gcLo}, // [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11A7, prJV, gcLo}, // [72] HANGUL JUNGSEONG FILLER..HANGUL JUNGSEONG O-YAE
+ {0x11A8, 0x11FF, prJT, gcLo}, // [88] HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG SSANGNIEUN
+ {0x1200, 0x1248, prAL, gcLo}, // [73] ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE QWA
+ {0x124A, 0x124D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE
+ {0x1250, 0x1256, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO
+ {0x1258, 0x1258, prAL, gcLo}, // ETHIOPIC SYLLABLE QHWA
+ {0x125A, 0x125D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE
+ {0x1260, 0x1288, prAL, gcLo}, // [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XWA
+ {0x128A, 0x128D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE
+ {0x1290, 0x12B0, prAL, gcLo}, // [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KWA
+ {0x12B2, 0x12B5, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE
+ {0x12B8, 0x12BE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO
+ {0x12C0, 0x12C0, prAL, gcLo}, // ETHIOPIC SYLLABLE KXWA
+ {0x12C2, 0x12C5, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE
+ {0x12C8, 0x12D6, prAL, gcLo}, // [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE PHARYNGEAL O
+ {0x12D8, 0x1310, prAL, gcLo}, // [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE GWA
+ {0x1312, 0x1315, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE
+ {0x1318, 0x135A, prAL, gcLo}, // [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE FYA
+ {0x135D, 0x135F, prCM, gcMn}, // [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1360, 0x1360, prAL, gcPo}, // ETHIOPIC SECTION MARK
+ {0x1361, 0x1361, prBA, gcPo}, // ETHIOPIC WORDSPACE
+ {0x1362, 0x1368, prAL, gcPo}, // [7] ETHIOPIC FULL STOP..ETHIOPIC PARAGRAPH SEPARATOR
+ {0x1369, 0x137C, prAL, gcNo}, // [20] ETHIOPIC DIGIT ONE..ETHIOPIC NUMBER TEN THOUSAND
+ {0x1380, 0x138F, prAL, gcLo}, // [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE
+ {0x1390, 0x1399, prAL, gcSo}, // [10] ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT
+ {0x13A0, 0x13F5, prAL, gcLu}, // [86] CHEROKEE LETTER A..CHEROKEE LETTER MV
+ {0x13F8, 0x13FD, prAL, gcLl}, // [6] CHEROKEE SMALL LETTER YE..CHEROKEE SMALL LETTER MV
+ {0x1400, 0x1400, prBA, gcPd}, // CANADIAN SYLLABICS HYPHEN
+ {0x1401, 0x166C, prAL, gcLo}, // [620] CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA
+ {0x166D, 0x166D, prAL, gcSo}, // CANADIAN SYLLABICS CHI SIGN
+ {0x166E, 0x166E, prAL, gcPo}, // CANADIAN SYLLABICS FULL STOP
+ {0x166F, 0x167F, prAL, gcLo}, // [17] CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS BLACKFOOT W
+ {0x1680, 0x1680, prBA, gcZs}, // OGHAM SPACE MARK
+ {0x1681, 0x169A, prAL, gcLo}, // [26] OGHAM LETTER BEITH..OGHAM LETTER PEITH
+ {0x169B, 0x169B, prOP, gcPs}, // OGHAM FEATHER MARK
+ {0x169C, 0x169C, prCL, gcPe}, // OGHAM REVERSED FEATHER MARK
+ {0x16A0, 0x16EA, prAL, gcLo}, // [75] RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X
+ {0x16EB, 0x16ED, prBA, gcPo}, // [3] RUNIC SINGLE PUNCTUATION..RUNIC CROSS PUNCTUATION
+ {0x16EE, 0x16F0, prAL, gcNl}, // [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHOR SYMBOL
+ {0x16F1, 0x16F8, prAL, gcLo}, // [8] RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC
+ {0x1700, 0x1711, prAL, gcLo}, // [18] TAGALOG LETTER A..TAGALOG LETTER HA
+ {0x1712, 0x1714, prCM, gcMn}, // [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prCM, gcMc}, // TAGALOG SIGN PAMUDPOD
+ {0x171F, 0x171F, prAL, gcLo}, // TAGALOG LETTER ARCHAIC RA
+ {0x1720, 0x1731, prAL, gcLo}, // [18] HANUNOO LETTER A..HANUNOO LETTER HA
+ {0x1732, 0x1733, prCM, gcMn}, // [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prCM, gcMc}, // HANUNOO SIGN PAMUDPOD
+ {0x1735, 0x1736, prBA, gcPo}, // [2] PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION
+ {0x1740, 0x1751, prAL, gcLo}, // [18] BUHID LETTER A..BUHID LETTER HA
+ {0x1752, 0x1753, prCM, gcMn}, // [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1760, 0x176C, prAL, gcLo}, // [13] TAGBANWA LETTER A..TAGBANWA LETTER YA
+ {0x176E, 0x1770, prAL, gcLo}, // [3] TAGBANWA LETTER LA..TAGBANWA LETTER SA
+ {0x1772, 0x1773, prCM, gcMn}, // [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x1780, 0x17B3, prSA, gcLo}, // [52] KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU
+ {0x17B4, 0x17B5, prSA, gcMn}, // [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prSA, gcMc}, // KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prSA, gcMn}, // [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prSA, gcMc}, // [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prSA, gcMn}, // KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prSA, gcMc}, // [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prSA, gcMn}, // [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17D4, 0x17D5, prBA, gcPo}, // [2] KHMER SIGN KHAN..KHMER SIGN BARIYOOSAN
+ {0x17D6, 0x17D6, prNS, gcPo}, // KHMER SIGN CAMNUC PII KUUH
+ {0x17D7, 0x17D7, prSA, gcLm}, // KHMER SIGN LEK TOO
+ {0x17D8, 0x17D8, prBA, gcPo}, // KHMER SIGN BEYYAL
+ {0x17D9, 0x17D9, prAL, gcPo}, // KHMER SIGN PHNAEK MUAN
+ {0x17DA, 0x17DA, prBA, gcPo}, // KHMER SIGN KOOMUUT
+ {0x17DB, 0x17DB, prPR, gcSc}, // KHMER CURRENCY SYMBOL RIEL
+ {0x17DC, 0x17DC, prSA, gcLo}, // KHMER SIGN AVAKRAHASANYA
+ {0x17DD, 0x17DD, prSA, gcMn}, // KHMER SIGN ATTHACAN
+ {0x17E0, 0x17E9, prNU, gcNd}, // [10] KHMER DIGIT ZERO..KHMER DIGIT NINE
+ {0x17F0, 0x17F9, prAL, gcNo}, // [10] KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON
+ {0x1800, 0x1801, prAL, gcPo}, // [2] MONGOLIAN BIRGA..MONGOLIAN ELLIPSIS
+ {0x1802, 0x1803, prEX, gcPo}, // [2] MONGOLIAN COMMA..MONGOLIAN FULL STOP
+ {0x1804, 0x1805, prBA, gcPo}, // [2] MONGOLIAN COLON..MONGOLIAN FOUR DOTS
+ {0x1806, 0x1806, prBB, gcPd}, // MONGOLIAN TODO SOFT HYPHEN
+ {0x1807, 0x1807, prAL, gcPo}, // MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER
+ {0x1808, 0x1809, prEX, gcPo}, // [2] MONGOLIAN MANCHU COMMA..MONGOLIAN MANCHU FULL STOP
+ {0x180A, 0x180A, prAL, gcPo}, // MONGOLIAN NIRUGU
+ {0x180B, 0x180D, prCM, gcMn}, // [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prGL, gcCf}, // MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prCM, gcMn}, // MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1810, 0x1819, prNU, gcNd}, // [10] MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE
+ {0x1820, 0x1842, prAL, gcLo}, // [35] MONGOLIAN LETTER A..MONGOLIAN LETTER CHI
+ {0x1843, 0x1843, prAL, gcLm}, // MONGOLIAN LETTER TODO LONG VOWEL SIGN
+ {0x1844, 0x1878, prAL, gcLo}, // [53] MONGOLIAN LETTER TODO E..MONGOLIAN LETTER CHA WITH TWO DOTS
+ {0x1880, 0x1884, prAL, gcLo}, // [5] MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI INVERTED UBADAMA
+ {0x1885, 0x1886, prCM, gcMn}, // [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x1887, 0x18A8, prAL, gcLo}, // [34] MONGOLIAN LETTER ALI GALI A..MONGOLIAN LETTER MANCHU ALI GALI BHA
+ {0x18A9, 0x18A9, prCM, gcMn}, // MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x18AA, 0x18AA, prAL, gcLo}, // MONGOLIAN LETTER MANCHU ALI GALI LHA
+ {0x18B0, 0x18F5, prAL, gcLo}, // [70] CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S
+ {0x1900, 0x191E, prAL, gcLo}, // [31] LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER TRA
+ {0x1920, 0x1922, prCM, gcMn}, // [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prCM, gcMc}, // [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prCM, gcMn}, // [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prCM, gcMc}, // [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prCM, gcMc}, // [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prCM, gcMn}, // LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prCM, gcMc}, // [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prCM, gcMn}, // [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1940, 0x1940, prAL, gcSo}, // LIMBU SIGN LOO
+ {0x1944, 0x1945, prEX, gcPo}, // [2] LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK
+ {0x1946, 0x194F, prNU, gcNd}, // [10] LIMBU DIGIT ZERO..LIMBU DIGIT NINE
+ {0x1950, 0x196D, prSA, gcLo}, // [30] TAI LE LETTER KA..TAI LE LETTER AI
+ {0x1970, 0x1974, prSA, gcLo}, // [5] TAI LE LETTER TONE-2..TAI LE LETTER TONE-6
+ {0x1980, 0x19AB, prSA, gcLo}, // [44] NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW SUA
+ {0x19B0, 0x19C9, prSA, gcLo}, // [26] NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2
+ {0x19D0, 0x19D9, prNU, gcNd}, // [10] NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE
+ {0x19DA, 0x19DA, prSA, gcNo}, // NEW TAI LUE THAM DIGIT ONE
+ {0x19DE, 0x19DF, prSA, gcSo}, // [2] NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV
+ {0x19E0, 0x19FF, prAL, gcSo}, // [32] KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC
+ {0x1A00, 0x1A16, prAL, gcLo}, // [23] BUGINESE LETTER KA..BUGINESE LETTER HA
+ {0x1A17, 0x1A18, prCM, gcMn}, // [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prCM, gcMc}, // [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prCM, gcMn}, // BUGINESE VOWEL SIGN AE
+ {0x1A1E, 0x1A1F, prAL, gcPo}, // [2] BUGINESE PALLAWA..BUGINESE END OF SECTION
+ {0x1A20, 0x1A54, prSA, gcLo}, // [53] TAI THAM LETTER HIGH KA..TAI THAM LETTER GREAT SA
+ {0x1A55, 0x1A55, prSA, gcMc}, // TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prSA, gcMn}, // TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prSA, gcMc}, // TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prSA, gcMn}, // [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prSA, gcMn}, // TAI THAM SIGN SAKOT
+ {0x1A61, 0x1A61, prSA, gcMc}, // TAI THAM VOWEL SIGN A
+ {0x1A62, 0x1A62, prSA, gcMn}, // TAI THAM VOWEL SIGN MAI SAT
+ {0x1A63, 0x1A64, prSA, gcMc}, // [2] TAI THAM VOWEL SIGN AA..TAI THAM VOWEL SIGN TALL AA
+ {0x1A65, 0x1A6C, prSA, gcMn}, // [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prSA, gcMc}, // [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prSA, gcMn}, // [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prCM, gcMn}, // TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1A80, 0x1A89, prNU, gcNd}, // [10] TAI THAM HORA DIGIT ZERO..TAI THAM HORA DIGIT NINE
+ {0x1A90, 0x1A99, prNU, gcNd}, // [10] TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE
+ {0x1AA0, 0x1AA6, prSA, gcPo}, // [7] TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA
+ {0x1AA7, 0x1AA7, prSA, gcLm}, // TAI THAM SIGN MAI YAMOK
+ {0x1AA8, 0x1AAD, prSA, gcPo}, // [6] TAI THAM SIGN KAAN..TAI THAM SIGN CAANG
+ {0x1AB0, 0x1ABD, prCM, gcMn}, // [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prCM, gcMe}, // COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prCM, gcMn}, // [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prCM, gcMn}, // [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prCM, gcMc}, // BALINESE SIGN BISAH
+ {0x1B05, 0x1B33, prAL, gcLo}, // [47] BALINESE LETTER AKARA..BALINESE LETTER HA
+ {0x1B34, 0x1B34, prCM, gcMn}, // BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prCM, gcMc}, // BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prCM, gcMn}, // [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prCM, gcMc}, // BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prCM, gcMn}, // BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prCM, gcMc}, // [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prCM, gcMn}, // BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prCM, gcMc}, // [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B45, 0x1B4C, prAL, gcLo}, // [8] BALINESE LETTER KAF SASAK..BALINESE LETTER ARCHAIC JNYA
+ {0x1B50, 0x1B59, prNU, gcNd}, // [10] BALINESE DIGIT ZERO..BALINESE DIGIT NINE
+ {0x1B5A, 0x1B5B, prBA, gcPo}, // [2] BALINESE PANTI..BALINESE PAMADA
+ {0x1B5C, 0x1B5C, prAL, gcPo}, // BALINESE WINDU
+ {0x1B5D, 0x1B60, prBA, gcPo}, // [4] BALINESE CARIK PAMUNGKAH..BALINESE PAMENENG
+ {0x1B61, 0x1B6A, prAL, gcSo}, // [10] BALINESE MUSICAL SYMBOL DONG..BALINESE MUSICAL SYMBOL DANG GEDE
+ {0x1B6B, 0x1B73, prCM, gcMn}, // [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B74, 0x1B7C, prAL, gcSo}, // [9] BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING
+ {0x1B7D, 0x1B7E, prBA, gcPo}, // [2] BALINESE PANTI LANTANG..BALINESE PAMADA LANTANG
+ {0x1B80, 0x1B81, prCM, gcMn}, // [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prCM, gcMc}, // SUNDANESE SIGN PANGWISAD
+ {0x1B83, 0x1BA0, prAL, gcLo}, // [30] SUNDANESE LETTER A..SUNDANESE LETTER HA
+ {0x1BA1, 0x1BA1, prCM, gcMc}, // SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prCM, gcMn}, // [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prCM, gcMc}, // [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prCM, gcMn}, // [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prCM, gcMc}, // SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prCM, gcMn}, // [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BAE, 0x1BAF, prAL, gcLo}, // [2] SUNDANESE LETTER KHA..SUNDANESE LETTER SYA
+ {0x1BB0, 0x1BB9, prNU, gcNd}, // [10] SUNDANESE DIGIT ZERO..SUNDANESE DIGIT NINE
+ {0x1BBA, 0x1BBF, prAL, gcLo}, // [6] SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M
+ {0x1BC0, 0x1BE5, prAL, gcLo}, // [38] BATAK LETTER A..BATAK LETTER U
+ {0x1BE6, 0x1BE6, prCM, gcMn}, // BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prCM, gcMc}, // BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prCM, gcMn}, // [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prCM, gcMc}, // [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prCM, gcMn}, // BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prCM, gcMc}, // BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prCM, gcMn}, // [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prCM, gcMc}, // [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1BFC, 0x1BFF, prAL, gcPo}, // [4] BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT
+ {0x1C00, 0x1C23, prAL, gcLo}, // [36] LEPCHA LETTER KA..LEPCHA LETTER A
+ {0x1C24, 0x1C2B, prCM, gcMc}, // [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prCM, gcMn}, // [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prCM, gcMc}, // [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prCM, gcMn}, // [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1C3B, 0x1C3F, prBA, gcPo}, // [5] LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK
+ {0x1C40, 0x1C49, prNU, gcNd}, // [10] LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE
+ {0x1C4D, 0x1C4F, prAL, gcLo}, // [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA
+ {0x1C50, 0x1C59, prNU, gcNd}, // [10] OL CHIKI DIGIT ZERO..OL CHIKI DIGIT NINE
+ {0x1C5A, 0x1C77, prAL, gcLo}, // [30] OL CHIKI LETTER LA..OL CHIKI LETTER OH
+ {0x1C78, 0x1C7D, prAL, gcLm}, // [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD
+ {0x1C7E, 0x1C7F, prBA, gcPo}, // [2] OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD
+ {0x1C80, 0x1C88, prAL, gcLl}, // [9] CYRILLIC SMALL LETTER ROUNDED VE..CYRILLIC SMALL LETTER UNBLENDED UK
+ {0x1C90, 0x1CBA, prAL, gcLu}, // [43] GEORGIAN MTAVRULI CAPITAL LETTER AN..GEORGIAN MTAVRULI CAPITAL LETTER AIN
+ {0x1CBD, 0x1CBF, prAL, gcLu}, // [3] GEORGIAN MTAVRULI CAPITAL LETTER AEN..GEORGIAN MTAVRULI CAPITAL LETTER LABIAL SIGN
+ {0x1CC0, 0x1CC7, prAL, gcPo}, // [8] SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA
+ {0x1CD0, 0x1CD2, prCM, gcMn}, // [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD3, 0x1CD3, prAL, gcPo}, // VEDIC SIGN NIHSHVASA
+ {0x1CD4, 0x1CE0, prCM, gcMn}, // [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prCM, gcMc}, // VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prCM, gcMn}, // [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CE9, 0x1CEC, prAL, gcLo}, // [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA..VEDIC SIGN ANUSVARA VAMAGOMUKHA WITH TAIL
+ {0x1CED, 0x1CED, prCM, gcMn}, // VEDIC SIGN TIRYAK
+ {0x1CEE, 0x1CF3, prAL, gcLo}, // [6] VEDIC SIGN HEXIFORM LONG ANUSVARA..VEDIC SIGN ROTATED ARDHAVISARGA
+ {0x1CF4, 0x1CF4, prCM, gcMn}, // VEDIC TONE CANDRA ABOVE
+ {0x1CF5, 0x1CF6, prAL, gcLo}, // [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN UPADHMANIYA
+ {0x1CF7, 0x1CF7, prCM, gcMc}, // VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prCM, gcMn}, // [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1CFA, 0x1CFA, prAL, gcLo}, // VEDIC SIGN DOUBLE ANUSVARA ANTARGOMUKHA
+ {0x1D00, 0x1D2B, prAL, gcLl}, // [44] LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL
+ {0x1D2C, 0x1D6A, prAL, gcLm}, // [63] MODIFIER LETTER CAPITAL A..GREEK SUBSCRIPT SMALL LETTER CHI
+ {0x1D6B, 0x1D77, prAL, gcLl}, // [13] LATIN SMALL LETTER UE..LATIN SMALL LETTER TURNED G
+ {0x1D78, 0x1D78, prAL, gcLm}, // MODIFIER LETTER CYRILLIC EN
+ {0x1D79, 0x1D7F, prAL, gcLl}, // [7] LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER UPSILON WITH STROKE
+ {0x1D80, 0x1D9A, prAL, gcLl}, // [27] LATIN SMALL LETTER B WITH PALATAL HOOK..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK
+ {0x1D9B, 0x1DBF, prAL, gcLm}, // [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA
+ {0x1DC0, 0x1DFF, prCM, gcMn}, // [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x1E00, 0x1EFF, prAL, gcLC}, // [256] LATIN CAPITAL LETTER A WITH RING BELOW..LATIN SMALL LETTER Y WITH LOOP
+ {0x1F00, 0x1F15, prAL, gcLC}, // [22] GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F18, 0x1F1D, prAL, gcLu}, // [6] GREEK CAPITAL LETTER EPSILON WITH PSILI..GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F20, 0x1F45, prAL, gcLC}, // [38] GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F48, 0x1F4D, prAL, gcLu}, // [6] GREEK CAPITAL LETTER OMICRON WITH PSILI..GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F50, 0x1F57, prAL, gcLl}, // [8] GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI
+ {0x1F59, 0x1F59, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA
+ {0x1F5B, 0x1F5B, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA
+ {0x1F5D, 0x1F5D, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA
+ {0x1F5F, 0x1F7D, prAL, gcLC}, // [31] GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI..GREEK SMALL LETTER OMEGA WITH OXIA
+ {0x1F80, 0x1FB4, prAL, gcLC}, // [53] GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI..GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FB6, 0x1FBC, prAL, gcLC}, // [7] GREEK SMALL LETTER ALPHA WITH PERISPOMENI..GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
+ {0x1FBD, 0x1FBD, prAL, gcSk}, // GREEK KORONIS
+ {0x1FBE, 0x1FBE, prAL, gcLl}, // GREEK PROSGEGRAMMENI
+ {0x1FBF, 0x1FC1, prAL, gcSk}, // [3] GREEK PSILI..GREEK DIALYTIKA AND PERISPOMENI
+ {0x1FC2, 0x1FC4, prAL, gcLl}, // [3] GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FC6, 0x1FCC, prAL, gcLC}, // [7] GREEK SMALL LETTER ETA WITH PERISPOMENI..GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
+ {0x1FCD, 0x1FCF, prAL, gcSk}, // [3] GREEK PSILI AND VARIA..GREEK PSILI AND PERISPOMENI
+ {0x1FD0, 0x1FD3, prAL, gcLl}, // [4] GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
+ {0x1FD6, 0x1FDB, prAL, gcLC}, // [6] GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK CAPITAL LETTER IOTA WITH OXIA
+ {0x1FDD, 0x1FDF, prAL, gcSk}, // [3] GREEK DASIA AND VARIA..GREEK DASIA AND PERISPOMENI
+ {0x1FE0, 0x1FEC, prAL, gcLC}, // [13] GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK CAPITAL LETTER RHO WITH DASIA
+ {0x1FED, 0x1FEF, prAL, gcSk}, // [3] GREEK DIALYTIKA AND VARIA..GREEK VARIA
+ {0x1FF2, 0x1FF4, prAL, gcLl}, // [3] GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FF6, 0x1FFC, prAL, gcLC}, // [7] GREEK SMALL LETTER OMEGA WITH PERISPOMENI..GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
+ {0x1FFD, 0x1FFD, prBB, gcSk}, // GREEK OXIA
+ {0x1FFE, 0x1FFE, prAL, gcSk}, // GREEK DASIA
+ {0x2000, 0x2006, prBA, gcZs}, // [7] EN QUAD..SIX-PER-EM SPACE
+ {0x2007, 0x2007, prGL, gcZs}, // FIGURE SPACE
+ {0x2008, 0x200A, prBA, gcZs}, // [3] PUNCTUATION SPACE..HAIR SPACE
+ {0x200B, 0x200B, prZW, gcCf}, // ZERO WIDTH SPACE
+ {0x200C, 0x200C, prCM, gcCf}, // ZERO WIDTH NON-JOINER
+ {0x200D, 0x200D, prZWJ, gcCf}, // ZERO WIDTH JOINER
+ {0x200E, 0x200F, prCM, gcCf}, // [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK
+ {0x2010, 0x2010, prBA, gcPd}, // HYPHEN
+ {0x2011, 0x2011, prGL, gcPd}, // NON-BREAKING HYPHEN
+ {0x2012, 0x2013, prBA, gcPd}, // [2] FIGURE DASH..EN DASH
+ {0x2014, 0x2014, prB2, gcPd}, // EM DASH
+ {0x2015, 0x2015, prAI, gcPd}, // HORIZONTAL BAR
+ {0x2016, 0x2016, prAI, gcPo}, // DOUBLE VERTICAL LINE
+ {0x2017, 0x2017, prAL, gcPo}, // DOUBLE LOW LINE
+ {0x2018, 0x2018, prQU, gcPi}, // LEFT SINGLE QUOTATION MARK
+ {0x2019, 0x2019, prQU, gcPf}, // RIGHT SINGLE QUOTATION MARK
+ {0x201A, 0x201A, prOP, gcPs}, // SINGLE LOW-9 QUOTATION MARK
+ {0x201B, 0x201C, prQU, gcPi}, // [2] SINGLE HIGH-REVERSED-9 QUOTATION MARK..LEFT DOUBLE QUOTATION MARK
+ {0x201D, 0x201D, prQU, gcPf}, // RIGHT DOUBLE QUOTATION MARK
+ {0x201E, 0x201E, prOP, gcPs}, // DOUBLE LOW-9 QUOTATION MARK
+ {0x201F, 0x201F, prQU, gcPi}, // DOUBLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x2020, 0x2021, prAI, gcPo}, // [2] DAGGER..DOUBLE DAGGER
+ {0x2022, 0x2023, prAL, gcPo}, // [2] BULLET..TRIANGULAR BULLET
+ {0x2024, 0x2026, prIN, gcPo}, // [3] ONE DOT LEADER..HORIZONTAL ELLIPSIS
+ {0x2027, 0x2027, prBA, gcPo}, // HYPHENATION POINT
+ {0x2028, 0x2028, prBK, gcZl}, // LINE SEPARATOR
+ {0x2029, 0x2029, prBK, gcZp}, // PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prCM, gcCf}, // [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x202F, 0x202F, prGL, gcZs}, // NARROW NO-BREAK SPACE
+ {0x2030, 0x2037, prPO, gcPo}, // [8] PER MILLE SIGN..REVERSED TRIPLE PRIME
+ {0x2038, 0x2038, prAL, gcPo}, // CARET
+ {0x2039, 0x2039, prQU, gcPi}, // SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ {0x203A, 0x203A, prQU, gcPf}, // SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ {0x203B, 0x203B, prAI, gcPo}, // REFERENCE MARK
+ {0x203C, 0x203D, prNS, gcPo}, // [2] DOUBLE EXCLAMATION MARK..INTERROBANG
+ {0x203E, 0x203E, prAL, gcPo}, // OVERLINE
+ {0x203F, 0x2040, prAL, gcPc}, // [2] UNDERTIE..CHARACTER TIE
+ {0x2041, 0x2043, prAL, gcPo}, // [3] CARET INSERTION POINT..HYPHEN BULLET
+ {0x2044, 0x2044, prIS, gcSm}, // FRACTION SLASH
+ {0x2045, 0x2045, prOP, gcPs}, // LEFT SQUARE BRACKET WITH QUILL
+ {0x2046, 0x2046, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH QUILL
+ {0x2047, 0x2049, prNS, gcPo}, // [3] DOUBLE QUESTION MARK..EXCLAMATION QUESTION MARK
+ {0x204A, 0x2051, prAL, gcPo}, // [8] TIRONIAN SIGN ET..TWO ASTERISKS ALIGNED VERTICALLY
+ {0x2052, 0x2052, prAL, gcSm}, // COMMERCIAL MINUS SIGN
+ {0x2053, 0x2053, prAL, gcPo}, // SWUNG DASH
+ {0x2054, 0x2054, prAL, gcPc}, // INVERTED UNDERTIE
+ {0x2055, 0x2055, prAL, gcPo}, // FLOWER PUNCTUATION MARK
+ {0x2056, 0x2056, prBA, gcPo}, // THREE DOT PUNCTUATION
+ {0x2057, 0x2057, prAL, gcPo}, // QUADRUPLE PRIME
+ {0x2058, 0x205B, prBA, gcPo}, // [4] FOUR DOT PUNCTUATION..FOUR DOT MARK
+ {0x205C, 0x205C, prAL, gcPo}, // DOTTED CROSS
+ {0x205D, 0x205E, prBA, gcPo}, // [2] TRICOLON..VERTICAL FOUR DOTS
+ {0x205F, 0x205F, prBA, gcZs}, // MEDIUM MATHEMATICAL SPACE
+ {0x2060, 0x2060, prWJ, gcCf}, // WORD JOINER
+ {0x2061, 0x2064, prAL, gcCf}, // [4] FUNCTION APPLICATION..INVISIBLE PLUS
+ {0x2066, 0x206F, prCM, gcCf}, // [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x2070, 0x2070, prAL, gcNo}, // SUPERSCRIPT ZERO
+ {0x2071, 0x2071, prAL, gcLm}, // SUPERSCRIPT LATIN SMALL LETTER I
+ {0x2074, 0x2074, prAI, gcNo}, // SUPERSCRIPT FOUR
+ {0x2075, 0x2079, prAL, gcNo}, // [5] SUPERSCRIPT FIVE..SUPERSCRIPT NINE
+ {0x207A, 0x207C, prAL, gcSm}, // [3] SUPERSCRIPT PLUS SIGN..SUPERSCRIPT EQUALS SIGN
+ {0x207D, 0x207D, prOP, gcPs}, // SUPERSCRIPT LEFT PARENTHESIS
+ {0x207E, 0x207E, prCL, gcPe}, // SUPERSCRIPT RIGHT PARENTHESIS
+ {0x207F, 0x207F, prAI, gcLm}, // SUPERSCRIPT LATIN SMALL LETTER N
+ {0x2080, 0x2080, prAL, gcNo}, // SUBSCRIPT ZERO
+ {0x2081, 0x2084, prAI, gcNo}, // [4] SUBSCRIPT ONE..SUBSCRIPT FOUR
+ {0x2085, 0x2089, prAL, gcNo}, // [5] SUBSCRIPT FIVE..SUBSCRIPT NINE
+ {0x208A, 0x208C, prAL, gcSm}, // [3] SUBSCRIPT PLUS SIGN..SUBSCRIPT EQUALS SIGN
+ {0x208D, 0x208D, prOP, gcPs}, // SUBSCRIPT LEFT PARENTHESIS
+ {0x208E, 0x208E, prCL, gcPe}, // SUBSCRIPT RIGHT PARENTHESIS
+ {0x2090, 0x209C, prAL, gcLm}, // [13] LATIN SUBSCRIPT SMALL LETTER A..LATIN SUBSCRIPT SMALL LETTER T
+ {0x20A0, 0x20A6, prPR, gcSc}, // [7] EURO-CURRENCY SIGN..NAIRA SIGN
+ {0x20A7, 0x20A7, prPO, gcSc}, // PESETA SIGN
+ {0x20A8, 0x20B5, prPR, gcSc}, // [14] RUPEE SIGN..CEDI SIGN
+ {0x20B6, 0x20B6, prPO, gcSc}, // LIVRE TOURNOIS SIGN
+ {0x20B7, 0x20BA, prPR, gcSc}, // [4] SPESMILO SIGN..TURKISH LIRA SIGN
+ {0x20BB, 0x20BB, prPO, gcSc}, // NORDIC MARK SIGN
+ {0x20BC, 0x20BD, prPR, gcSc}, // [2] MANAT SIGN..RUBLE SIGN
+ {0x20BE, 0x20BE, prPO, gcSc}, // LARI SIGN
+ {0x20BF, 0x20BF, prPR, gcSc}, // BITCOIN SIGN
+ {0x20C0, 0x20C0, prPO, gcSc}, // SOM SIGN
+ {0x20C1, 0x20CF, prPR, gcCn}, // [15] ..
+ {0x20D0, 0x20DC, prCM, gcMn}, // [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prCM, gcMe}, // [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prCM, gcMn}, // COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prCM, gcMe}, // [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prCM, gcMn}, // [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2100, 0x2101, prAL, gcSo}, // [2] ACCOUNT OF..ADDRESSED TO THE SUBJECT
+ {0x2102, 0x2102, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL C
+ {0x2103, 0x2103, prPO, gcSo}, // DEGREE CELSIUS
+ {0x2104, 0x2104, prAL, gcSo}, // CENTRE LINE SYMBOL
+ {0x2105, 0x2105, prAI, gcSo}, // CARE OF
+ {0x2106, 0x2106, prAL, gcSo}, // CADA UNA
+ {0x2107, 0x2107, prAL, gcLu}, // EULER CONSTANT
+ {0x2108, 0x2108, prAL, gcSo}, // SCRUPLE
+ {0x2109, 0x2109, prPO, gcSo}, // DEGREE FAHRENHEIT
+ {0x210A, 0x2112, prAL, gcLC}, // [9] SCRIPT SMALL G..SCRIPT CAPITAL L
+ {0x2113, 0x2113, prAI, gcLl}, // SCRIPT SMALL L
+ {0x2114, 0x2114, prAL, gcSo}, // L B BAR SYMBOL
+ {0x2115, 0x2115, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL N
+ {0x2116, 0x2116, prPR, gcSo}, // NUMERO SIGN
+ {0x2117, 0x2117, prAL, gcSo}, // SOUND RECORDING COPYRIGHT
+ {0x2118, 0x2118, prAL, gcSm}, // SCRIPT CAPITAL P
+ {0x2119, 0x211D, prAL, gcLu}, // [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-STRUCK CAPITAL R
+ {0x211E, 0x2120, prAL, gcSo}, // [3] PRESCRIPTION TAKE..SERVICE MARK
+ {0x2121, 0x2122, prAI, gcSo}, // [2] TELEPHONE SIGN..TRADE MARK SIGN
+ {0x2123, 0x2123, prAL, gcSo}, // VERSICLE
+ {0x2124, 0x2124, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL Z
+ {0x2125, 0x2125, prAL, gcSo}, // OUNCE SIGN
+ {0x2126, 0x2126, prAL, gcLu}, // OHM SIGN
+ {0x2127, 0x2127, prAL, gcSo}, // INVERTED OHM SIGN
+ {0x2128, 0x2128, prAL, gcLu}, // BLACK-LETTER CAPITAL Z
+ {0x2129, 0x2129, prAL, gcSo}, // TURNED GREEK SMALL LETTER IOTA
+ {0x212A, 0x212A, prAL, gcLu}, // KELVIN SIGN
+ {0x212B, 0x212B, prAI, gcLu}, // ANGSTROM SIGN
+ {0x212C, 0x212D, prAL, gcLu}, // [2] SCRIPT CAPITAL B..BLACK-LETTER CAPITAL C
+ {0x212E, 0x212E, prAL, gcSo}, // ESTIMATED SYMBOL
+ {0x212F, 0x2134, prAL, gcLC}, // [6] SCRIPT SMALL E..SCRIPT SMALL O
+ {0x2135, 0x2138, prAL, gcLo}, // [4] ALEF SYMBOL..DALET SYMBOL
+ {0x2139, 0x2139, prAL, gcLl}, // INFORMATION SOURCE
+ {0x213A, 0x213B, prAL, gcSo}, // [2] ROTATED CAPITAL Q..FACSIMILE SIGN
+ {0x213C, 0x213F, prAL, gcLC}, // [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STRUCK CAPITAL PI
+ {0x2140, 0x2144, prAL, gcSm}, // [5] DOUBLE-STRUCK N-ARY SUMMATION..TURNED SANS-SERIF CAPITAL Y
+ {0x2145, 0x2149, prAL, gcLC}, // [5] DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL J
+ {0x214A, 0x214A, prAL, gcSo}, // PROPERTY LINE
+ {0x214B, 0x214B, prAL, gcSm}, // TURNED AMPERSAND
+ {0x214C, 0x214D, prAL, gcSo}, // [2] PER SIGN..AKTIESELSKAB
+ {0x214E, 0x214E, prAL, gcLl}, // TURNED SMALL F
+ {0x214F, 0x214F, prAL, gcSo}, // SYMBOL FOR SAMARITAN SOURCE
+ {0x2150, 0x2153, prAL, gcNo}, // [4] VULGAR FRACTION ONE SEVENTH..VULGAR FRACTION ONE THIRD
+ {0x2154, 0x2155, prAI, gcNo}, // [2] VULGAR FRACTION TWO THIRDS..VULGAR FRACTION ONE FIFTH
+ {0x2156, 0x215A, prAL, gcNo}, // [5] VULGAR FRACTION TWO FIFTHS..VULGAR FRACTION FIVE SIXTHS
+ {0x215B, 0x215B, prAI, gcNo}, // VULGAR FRACTION ONE EIGHTH
+ {0x215C, 0x215D, prAL, gcNo}, // [2] VULGAR FRACTION THREE EIGHTHS..VULGAR FRACTION FIVE EIGHTHS
+ {0x215E, 0x215E, prAI, gcNo}, // VULGAR FRACTION SEVEN EIGHTHS
+ {0x215F, 0x215F, prAL, gcNo}, // FRACTION NUMERATOR ONE
+ {0x2160, 0x216B, prAI, gcNl}, // [12] ROMAN NUMERAL ONE..ROMAN NUMERAL TWELVE
+ {0x216C, 0x216F, prAL, gcNl}, // [4] ROMAN NUMERAL FIFTY..ROMAN NUMERAL ONE THOUSAND
+ {0x2170, 0x2179, prAI, gcNl}, // [10] SMALL ROMAN NUMERAL ONE..SMALL ROMAN NUMERAL TEN
+ {0x217A, 0x2182, prAL, gcNl}, // [9] SMALL ROMAN NUMERAL ELEVEN..ROMAN NUMERAL TEN THOUSAND
+ {0x2183, 0x2184, prAL, gcLC}, // [2] ROMAN NUMERAL REVERSED ONE HUNDRED..LATIN SMALL LETTER REVERSED C
+ {0x2185, 0x2188, prAL, gcNl}, // [4] ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND
+ {0x2189, 0x2189, prAI, gcNo}, // VULGAR FRACTION ZERO THIRDS
+ {0x218A, 0x218B, prAL, gcSo}, // [2] TURNED DIGIT TWO..TURNED DIGIT THREE
+ {0x2190, 0x2194, prAI, gcSm}, // [5] LEFTWARDS ARROW..LEFT RIGHT ARROW
+ {0x2195, 0x2199, prAI, gcSo}, // [5] UP DOWN ARROW..SOUTH WEST ARROW
+ {0x219A, 0x219B, prAL, gcSm}, // [2] LEFTWARDS ARROW WITH STROKE..RIGHTWARDS ARROW WITH STROKE
+ {0x219C, 0x219F, prAL, gcSo}, // [4] LEFTWARDS WAVE ARROW..UPWARDS TWO HEADED ARROW
+ {0x21A0, 0x21A0, prAL, gcSm}, // RIGHTWARDS TWO HEADED ARROW
+ {0x21A1, 0x21A2, prAL, gcSo}, // [2] DOWNWARDS TWO HEADED ARROW..LEFTWARDS ARROW WITH TAIL
+ {0x21A3, 0x21A3, prAL, gcSm}, // RIGHTWARDS ARROW WITH TAIL
+ {0x21A4, 0x21A5, prAL, gcSo}, // [2] LEFTWARDS ARROW FROM BAR..UPWARDS ARROW FROM BAR
+ {0x21A6, 0x21A6, prAL, gcSm}, // RIGHTWARDS ARROW FROM BAR
+ {0x21A7, 0x21AD, prAL, gcSo}, // [7] DOWNWARDS ARROW FROM BAR..LEFT RIGHT WAVE ARROW
+ {0x21AE, 0x21AE, prAL, gcSm}, // LEFT RIGHT ARROW WITH STROKE
+ {0x21AF, 0x21CD, prAL, gcSo}, // [31] DOWNWARDS ZIGZAG ARROW..LEFTWARDS DOUBLE ARROW WITH STROKE
+ {0x21CE, 0x21CF, prAL, gcSm}, // [2] LEFT RIGHT DOUBLE ARROW WITH STROKE..RIGHTWARDS DOUBLE ARROW WITH STROKE
+ {0x21D0, 0x21D1, prAL, gcSo}, // [2] LEFTWARDS DOUBLE ARROW..UPWARDS DOUBLE ARROW
+ {0x21D2, 0x21D2, prAI, gcSm}, // RIGHTWARDS DOUBLE ARROW
+ {0x21D3, 0x21D3, prAL, gcSo}, // DOWNWARDS DOUBLE ARROW
+ {0x21D4, 0x21D4, prAI, gcSm}, // LEFT RIGHT DOUBLE ARROW
+ {0x21D5, 0x21F3, prAL, gcSo}, // [31] UP DOWN DOUBLE ARROW..UP DOWN WHITE ARROW
+ {0x21F4, 0x21FF, prAL, gcSm}, // [12] RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW
+ {0x2200, 0x2200, prAI, gcSm}, // FOR ALL
+ {0x2201, 0x2201, prAL, gcSm}, // COMPLEMENT
+ {0x2202, 0x2203, prAI, gcSm}, // [2] PARTIAL DIFFERENTIAL..THERE EXISTS
+ {0x2204, 0x2206, prAL, gcSm}, // [3] THERE DOES NOT EXIST..INCREMENT
+ {0x2207, 0x2208, prAI, gcSm}, // [2] NABLA..ELEMENT OF
+ {0x2209, 0x220A, prAL, gcSm}, // [2] NOT AN ELEMENT OF..SMALL ELEMENT OF
+ {0x220B, 0x220B, prAI, gcSm}, // CONTAINS AS MEMBER
+ {0x220C, 0x220E, prAL, gcSm}, // [3] DOES NOT CONTAIN AS MEMBER..END OF PROOF
+ {0x220F, 0x220F, prAI, gcSm}, // N-ARY PRODUCT
+ {0x2210, 0x2210, prAL, gcSm}, // N-ARY COPRODUCT
+ {0x2211, 0x2211, prAI, gcSm}, // N-ARY SUMMATION
+ {0x2212, 0x2213, prPR, gcSm}, // [2] MINUS SIGN..MINUS-OR-PLUS SIGN
+ {0x2214, 0x2214, prAL, gcSm}, // DOT PLUS
+ {0x2215, 0x2215, prAI, gcSm}, // DIVISION SLASH
+ {0x2216, 0x2219, prAL, gcSm}, // [4] SET MINUS..BULLET OPERATOR
+ {0x221A, 0x221A, prAI, gcSm}, // SQUARE ROOT
+ {0x221B, 0x221C, prAL, gcSm}, // [2] CUBE ROOT..FOURTH ROOT
+ {0x221D, 0x2220, prAI, gcSm}, // [4] PROPORTIONAL TO..ANGLE
+ {0x2221, 0x2222, prAL, gcSm}, // [2] MEASURED ANGLE..SPHERICAL ANGLE
+ {0x2223, 0x2223, prAI, gcSm}, // DIVIDES
+ {0x2224, 0x2224, prAL, gcSm}, // DOES NOT DIVIDE
+ {0x2225, 0x2225, prAI, gcSm}, // PARALLEL TO
+ {0x2226, 0x2226, prAL, gcSm}, // NOT PARALLEL TO
+ {0x2227, 0x222C, prAI, gcSm}, // [6] LOGICAL AND..DOUBLE INTEGRAL
+ {0x222D, 0x222D, prAL, gcSm}, // TRIPLE INTEGRAL
+ {0x222E, 0x222E, prAI, gcSm}, // CONTOUR INTEGRAL
+ {0x222F, 0x2233, prAL, gcSm}, // [5] SURFACE INTEGRAL..ANTICLOCKWISE CONTOUR INTEGRAL
+ {0x2234, 0x2237, prAI, gcSm}, // [4] THEREFORE..PROPORTION
+ {0x2238, 0x223B, prAL, gcSm}, // [4] DOT MINUS..HOMOTHETIC
+ {0x223C, 0x223D, prAI, gcSm}, // [2] TILDE OPERATOR..REVERSED TILDE
+ {0x223E, 0x2247, prAL, gcSm}, // [10] INVERTED LAZY S..NEITHER APPROXIMATELY NOR ACTUALLY EQUAL TO
+ {0x2248, 0x2248, prAI, gcSm}, // ALMOST EQUAL TO
+ {0x2249, 0x224B, prAL, gcSm}, // [3] NOT ALMOST EQUAL TO..TRIPLE TILDE
+ {0x224C, 0x224C, prAI, gcSm}, // ALL EQUAL TO
+ {0x224D, 0x2251, prAL, gcSm}, // [5] EQUIVALENT TO..GEOMETRICALLY EQUAL TO
+ {0x2252, 0x2252, prAI, gcSm}, // APPROXIMATELY EQUAL TO OR THE IMAGE OF
+ {0x2253, 0x225F, prAL, gcSm}, // [13] IMAGE OF OR APPROXIMATELY EQUAL TO..QUESTIONED EQUAL TO
+ {0x2260, 0x2261, prAI, gcSm}, // [2] NOT EQUAL TO..IDENTICAL TO
+ {0x2262, 0x2263, prAL, gcSm}, // [2] NOT IDENTICAL TO..STRICTLY EQUIVALENT TO
+ {0x2264, 0x2267, prAI, gcSm}, // [4] LESS-THAN OR EQUAL TO..GREATER-THAN OVER EQUAL TO
+ {0x2268, 0x2269, prAL, gcSm}, // [2] LESS-THAN BUT NOT EQUAL TO..GREATER-THAN BUT NOT EQUAL TO
+ {0x226A, 0x226B, prAI, gcSm}, // [2] MUCH LESS-THAN..MUCH GREATER-THAN
+ {0x226C, 0x226D, prAL, gcSm}, // [2] BETWEEN..NOT EQUIVALENT TO
+ {0x226E, 0x226F, prAI, gcSm}, // [2] NOT LESS-THAN..NOT GREATER-THAN
+ {0x2270, 0x2281, prAL, gcSm}, // [18] NEITHER LESS-THAN NOR EQUAL TO..DOES NOT SUCCEED
+ {0x2282, 0x2283, prAI, gcSm}, // [2] SUBSET OF..SUPERSET OF
+ {0x2284, 0x2285, prAL, gcSm}, // [2] NOT A SUBSET OF..NOT A SUPERSET OF
+ {0x2286, 0x2287, prAI, gcSm}, // [2] SUBSET OF OR EQUAL TO..SUPERSET OF OR EQUAL TO
+ {0x2288, 0x2294, prAL, gcSm}, // [13] NEITHER A SUBSET OF NOR EQUAL TO..SQUARE CUP
+ {0x2295, 0x2295, prAI, gcSm}, // CIRCLED PLUS
+ {0x2296, 0x2298, prAL, gcSm}, // [3] CIRCLED MINUS..CIRCLED DIVISION SLASH
+ {0x2299, 0x2299, prAI, gcSm}, // CIRCLED DOT OPERATOR
+ {0x229A, 0x22A4, prAL, gcSm}, // [11] CIRCLED RING OPERATOR..DOWN TACK
+ {0x22A5, 0x22A5, prAI, gcSm}, // UP TACK
+ {0x22A6, 0x22BE, prAL, gcSm}, // [25] ASSERTION..RIGHT ANGLE WITH ARC
+ {0x22BF, 0x22BF, prAI, gcSm}, // RIGHT TRIANGLE
+ {0x22C0, 0x22EE, prAL, gcSm}, // [47] N-ARY LOGICAL AND..VERTICAL ELLIPSIS
+ {0x22EF, 0x22EF, prIN, gcSm}, // MIDLINE HORIZONTAL ELLIPSIS
+ {0x22F0, 0x22FF, prAL, gcSm}, // [16] UP RIGHT DIAGONAL ELLIPSIS..Z NOTATION BAG MEMBERSHIP
+ {0x2300, 0x2307, prAL, gcSo}, // [8] DIAMETER SIGN..WAVY LINE
+ {0x2308, 0x2308, prOP, gcPs}, // LEFT CEILING
+ {0x2309, 0x2309, prCL, gcPe}, // RIGHT CEILING
+ {0x230A, 0x230A, prOP, gcPs}, // LEFT FLOOR
+ {0x230B, 0x230B, prCL, gcPe}, // RIGHT FLOOR
+ {0x230C, 0x2311, prAL, gcSo}, // [6] BOTTOM RIGHT CROP..SQUARE LOZENGE
+ {0x2312, 0x2312, prAI, gcSo}, // ARC
+ {0x2313, 0x2319, prAL, gcSo}, // [7] SEGMENT..TURNED NOT SIGN
+ {0x231A, 0x231B, prID, gcSo}, // [2] WATCH..HOURGLASS
+ {0x231C, 0x231F, prAL, gcSo}, // [4] TOP LEFT CORNER..BOTTOM RIGHT CORNER
+ {0x2320, 0x2321, prAL, gcSm}, // [2] TOP HALF INTEGRAL..BOTTOM HALF INTEGRAL
+ {0x2322, 0x2328, prAL, gcSo}, // [7] FROWN..KEYBOARD
+ {0x2329, 0x2329, prOP, gcPs}, // LEFT-POINTING ANGLE BRACKET
+ {0x232A, 0x232A, prCL, gcPe}, // RIGHT-POINTING ANGLE BRACKET
+ {0x232B, 0x237B, prAL, gcSo}, // [81] ERASE TO THE LEFT..NOT CHECK MARK
+ {0x237C, 0x237C, prAL, gcSm}, // RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW
+ {0x237D, 0x239A, prAL, gcSo}, // [30] SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL
+ {0x239B, 0x23B3, prAL, gcSm}, // [25] LEFT PARENTHESIS UPPER HOOK..SUMMATION BOTTOM
+ {0x23B4, 0x23DB, prAL, gcSo}, // [40] TOP SQUARE BRACKET..FUSE
+ {0x23DC, 0x23E1, prAL, gcSm}, // [6] TOP PARENTHESIS..BOTTOM TORTOISE SHELL BRACKET
+ {0x23E2, 0x23EF, prAL, gcSo}, // [14] WHITE TRAPEZIUM..BLACK RIGHT-POINTING TRIANGLE WITH DOUBLE VERTICAL BAR
+ {0x23F0, 0x23F3, prID, gcSo}, // [4] ALARM CLOCK..HOURGLASS WITH FLOWING SAND
+ {0x23F4, 0x23FF, prAL, gcSo}, // [12] BLACK MEDIUM LEFT-POINTING TRIANGLE..OBSERVER EYE SYMBOL
+ {0x2400, 0x2426, prAL, gcSo}, // [39] SYMBOL FOR NULL..SYMBOL FOR SUBSTITUTE FORM TWO
+ {0x2440, 0x244A, prAL, gcSo}, // [11] OCR HOOK..OCR DOUBLE BACKSLASH
+ {0x2460, 0x249B, prAI, gcNo}, // [60] CIRCLED DIGIT ONE..NUMBER TWENTY FULL STOP
+ {0x249C, 0x24E9, prAI, gcSo}, // [78] PARENTHESIZED LATIN SMALL LETTER A..CIRCLED LATIN SMALL LETTER Z
+ {0x24EA, 0x24FE, prAI, gcNo}, // [21] CIRCLED DIGIT ZERO..DOUBLE CIRCLED NUMBER TEN
+ {0x24FF, 0x24FF, prAL, gcNo}, // NEGATIVE CIRCLED DIGIT ZERO
+ {0x2500, 0x254B, prAI, gcSo}, // [76] BOX DRAWINGS LIGHT HORIZONTAL..BOX DRAWINGS HEAVY VERTICAL AND HORIZONTAL
+ {0x254C, 0x254F, prAL, gcSo}, // [4] BOX DRAWINGS LIGHT DOUBLE DASH HORIZONTAL..BOX DRAWINGS HEAVY DOUBLE DASH VERTICAL
+ {0x2550, 0x2574, prAI, gcSo}, // [37] BOX DRAWINGS DOUBLE HORIZONTAL..BOX DRAWINGS LIGHT LEFT
+ {0x2575, 0x257F, prAL, gcSo}, // [11] BOX DRAWINGS LIGHT UP..BOX DRAWINGS HEAVY UP AND LIGHT DOWN
+ {0x2580, 0x258F, prAI, gcSo}, // [16] UPPER HALF BLOCK..LEFT ONE EIGHTH BLOCK
+ {0x2590, 0x2591, prAL, gcSo}, // [2] RIGHT HALF BLOCK..LIGHT SHADE
+ {0x2592, 0x2595, prAI, gcSo}, // [4] MEDIUM SHADE..RIGHT ONE EIGHTH BLOCK
+ {0x2596, 0x259F, prAL, gcSo}, // [10] QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT
+ {0x25A0, 0x25A1, prAI, gcSo}, // [2] BLACK SQUARE..WHITE SQUARE
+ {0x25A2, 0x25A2, prAL, gcSo}, // WHITE SQUARE WITH ROUNDED CORNERS
+ {0x25A3, 0x25A9, prAI, gcSo}, // [7] WHITE SQUARE CONTAINING BLACK SMALL SQUARE..SQUARE WITH DIAGONAL CROSSHATCH FILL
+ {0x25AA, 0x25B1, prAL, gcSo}, // [8] BLACK SMALL SQUARE..WHITE PARALLELOGRAM
+ {0x25B2, 0x25B3, prAI, gcSo}, // [2] BLACK UP-POINTING TRIANGLE..WHITE UP-POINTING TRIANGLE
+ {0x25B4, 0x25B5, prAL, gcSo}, // [2] BLACK UP-POINTING SMALL TRIANGLE..WHITE UP-POINTING SMALL TRIANGLE
+ {0x25B6, 0x25B6, prAI, gcSo}, // BLACK RIGHT-POINTING TRIANGLE
+ {0x25B7, 0x25B7, prAI, gcSm}, // WHITE RIGHT-POINTING TRIANGLE
+ {0x25B8, 0x25BB, prAL, gcSo}, // [4] BLACK RIGHT-POINTING SMALL TRIANGLE..WHITE RIGHT-POINTING POINTER
+ {0x25BC, 0x25BD, prAI, gcSo}, // [2] BLACK DOWN-POINTING TRIANGLE..WHITE DOWN-POINTING TRIANGLE
+ {0x25BE, 0x25BF, prAL, gcSo}, // [2] BLACK DOWN-POINTING SMALL TRIANGLE..WHITE DOWN-POINTING SMALL TRIANGLE
+ {0x25C0, 0x25C0, prAI, gcSo}, // BLACK LEFT-POINTING TRIANGLE
+ {0x25C1, 0x25C1, prAI, gcSm}, // WHITE LEFT-POINTING TRIANGLE
+ {0x25C2, 0x25C5, prAL, gcSo}, // [4] BLACK LEFT-POINTING SMALL TRIANGLE..WHITE LEFT-POINTING POINTER
+ {0x25C6, 0x25C8, prAI, gcSo}, // [3] BLACK DIAMOND..WHITE DIAMOND CONTAINING BLACK SMALL DIAMOND
+ {0x25C9, 0x25CA, prAL, gcSo}, // [2] FISHEYE..LOZENGE
+ {0x25CB, 0x25CB, prAI, gcSo}, // WHITE CIRCLE
+ {0x25CC, 0x25CD, prAL, gcSo}, // [2] DOTTED CIRCLE..CIRCLE WITH VERTICAL FILL
+ {0x25CE, 0x25D1, prAI, gcSo}, // [4] BULLSEYE..CIRCLE WITH RIGHT HALF BLACK
+ {0x25D2, 0x25E1, prAL, gcSo}, // [16] CIRCLE WITH LOWER HALF BLACK..LOWER HALF CIRCLE
+ {0x25E2, 0x25E5, prAI, gcSo}, // [4] BLACK LOWER RIGHT TRIANGLE..BLACK UPPER RIGHT TRIANGLE
+ {0x25E6, 0x25EE, prAL, gcSo}, // [9] WHITE BULLET..UP-POINTING TRIANGLE WITH RIGHT HALF BLACK
+ {0x25EF, 0x25EF, prAI, gcSo}, // LARGE CIRCLE
+ {0x25F0, 0x25F7, prAL, gcSo}, // [8] WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT
+ {0x25F8, 0x25FF, prAL, gcSm}, // [8] UPPER LEFT TRIANGLE..LOWER RIGHT TRIANGLE
+ {0x2600, 0x2603, prID, gcSo}, // [4] BLACK SUN WITH RAYS..SNOWMAN
+ {0x2604, 0x2604, prAL, gcSo}, // COMET
+ {0x2605, 0x2606, prAI, gcSo}, // [2] BLACK STAR..WHITE STAR
+ {0x2607, 0x2608, prAL, gcSo}, // [2] LIGHTNING..THUNDERSTORM
+ {0x2609, 0x2609, prAI, gcSo}, // SUN
+ {0x260A, 0x260D, prAL, gcSo}, // [4] ASCENDING NODE..OPPOSITION
+ {0x260E, 0x260F, prAI, gcSo}, // [2] BLACK TELEPHONE..WHITE TELEPHONE
+ {0x2610, 0x2613, prAL, gcSo}, // [4] BALLOT BOX..SALTIRE
+ {0x2614, 0x2615, prID, gcSo}, // [2] UMBRELLA WITH RAIN DROPS..HOT BEVERAGE
+ {0x2616, 0x2617, prAI, gcSo}, // [2] WHITE SHOGI PIECE..BLACK SHOGI PIECE
+ {0x2618, 0x2618, prID, gcSo}, // SHAMROCK
+ {0x2619, 0x2619, prAL, gcSo}, // REVERSED ROTATED FLORAL HEART BULLET
+ {0x261A, 0x261C, prID, gcSo}, // [3] BLACK LEFT POINTING INDEX..WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prEB, gcSo}, // WHITE UP POINTING INDEX
+ {0x261E, 0x261F, prID, gcSo}, // [2] WHITE RIGHT POINTING INDEX..WHITE DOWN POINTING INDEX
+ {0x2620, 0x2638, prAL, gcSo}, // [25] SKULL AND CROSSBONES..WHEEL OF DHARMA
+ {0x2639, 0x263B, prID, gcSo}, // [3] WHITE FROWNING FACE..BLACK SMILING FACE
+ {0x263C, 0x263F, prAL, gcSo}, // [4] WHITE SUN WITH RAYS..MERCURY
+ {0x2640, 0x2640, prAI, gcSo}, // FEMALE SIGN
+ {0x2641, 0x2641, prAL, gcSo}, // EARTH
+ {0x2642, 0x2642, prAI, gcSo}, // MALE SIGN
+ {0x2643, 0x265F, prAL, gcSo}, // [29] JUPITER..BLACK CHESS PAWN
+ {0x2660, 0x2661, prAI, gcSo}, // [2] BLACK SPADE SUIT..WHITE HEART SUIT
+ {0x2662, 0x2662, prAL, gcSo}, // WHITE DIAMOND SUIT
+ {0x2663, 0x2665, prAI, gcSo}, // [3] BLACK CLUB SUIT..BLACK HEART SUIT
+ {0x2666, 0x2666, prAL, gcSo}, // BLACK DIAMOND SUIT
+ {0x2667, 0x2667, prAI, gcSo}, // WHITE CLUB SUIT
+ {0x2668, 0x2668, prID, gcSo}, // HOT SPRINGS
+ {0x2669, 0x266A, prAI, gcSo}, // [2] QUARTER NOTE..EIGHTH NOTE
+ {0x266B, 0x266B, prAL, gcSo}, // BEAMED EIGHTH NOTES
+ {0x266C, 0x266D, prAI, gcSo}, // [2] BEAMED SIXTEENTH NOTES..MUSIC FLAT SIGN
+ {0x266E, 0x266E, prAL, gcSo}, // MUSIC NATURAL SIGN
+ {0x266F, 0x266F, prAI, gcSm}, // MUSIC SHARP SIGN
+ {0x2670, 0x267E, prAL, gcSo}, // [15] WEST SYRIAC CROSS..PERMANENT PAPER SIGN
+ {0x267F, 0x267F, prID, gcSo}, // WHEELCHAIR SYMBOL
+ {0x2680, 0x269D, prAL, gcSo}, // [30] DIE FACE-1..OUTLINED WHITE STAR
+ {0x269E, 0x269F, prAI, gcSo}, // [2] THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26BC, prAL, gcSo}, // [29] WARNING SIGN..SESQUIQUADRATE
+ {0x26BD, 0x26C8, prID, gcSo}, // [12] SOCCER BALL..THUNDER CLOUD AND RAIN
+ {0x26C9, 0x26CC, prAI, gcSo}, // [4] TURNED WHITE SHOGI PIECE..CROSSING LANES
+ {0x26CD, 0x26CD, prID, gcSo}, // DISABLED CAR
+ {0x26CE, 0x26CE, prAL, gcSo}, // OPHIUCHUS
+ {0x26CF, 0x26D1, prID, gcSo}, // [3] PICK..HELMET WITH WHITE CROSS
+ {0x26D2, 0x26D2, prAI, gcSo}, // CIRCLED CROSSING LANES
+ {0x26D3, 0x26D4, prID, gcSo}, // [2] CHAINS..NO ENTRY
+ {0x26D5, 0x26D7, prAI, gcSo}, // [3] ALTERNATE ONE-WAY LEFT WAY TRAFFIC..WHITE TWO-WAY LEFT WAY TRAFFIC
+ {0x26D8, 0x26D9, prID, gcSo}, // [2] BLACK LEFT LANE MERGE..WHITE LEFT LANE MERGE
+ {0x26DA, 0x26DB, prAI, gcSo}, // [2] DRIVE SLOW SIGN..HEAVY WHITE DOWN-POINTING TRIANGLE
+ {0x26DC, 0x26DC, prID, gcSo}, // LEFT CLOSED ENTRY
+ {0x26DD, 0x26DE, prAI, gcSo}, // [2] SQUARED SALTIRE..FALLING DIAGONAL IN WHITE CIRCLE IN BLACK SQUARE
+ {0x26DF, 0x26E1, prID, gcSo}, // [3] BLACK TRUCK..RESTRICTED LEFT ENTRY-2
+ {0x26E2, 0x26E2, prAL, gcSo}, // ASTRONOMICAL SYMBOL FOR URANUS
+ {0x26E3, 0x26E3, prAI, gcSo}, // HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE
+ {0x26E4, 0x26E7, prAL, gcSo}, // [4] PENTAGRAM..INVERTED PENTAGRAM
+ {0x26E8, 0x26E9, prAI, gcSo}, // [2] BLACK CROSS ON SHIELD..SHINTO SHRINE
+ {0x26EA, 0x26EA, prID, gcSo}, // CHURCH
+ {0x26EB, 0x26F0, prAI, gcSo}, // [6] CASTLE..MOUNTAIN
+ {0x26F1, 0x26F5, prID, gcSo}, // [5] UMBRELLA ON GROUND..SAILBOAT
+ {0x26F6, 0x26F6, prAI, gcSo}, // SQUARE FOUR CORNERS
+ {0x26F7, 0x26F8, prID, gcSo}, // [2] SKIER..ICE SKATE
+ {0x26F9, 0x26F9, prEB, gcSo}, // PERSON WITH BALL
+ {0x26FA, 0x26FA, prID, gcSo}, // TENT
+ {0x26FB, 0x26FC, prAI, gcSo}, // [2] JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FF, prID, gcSo}, // [3] FUEL PUMP..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE
+ {0x2700, 0x2704, prID, gcSo}, // [5] BLACK SAFETY SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2707, prAL, gcSo}, // [3] WHITE HEAVY CHECK MARK..TAPE DRIVE
+ {0x2708, 0x2709, prID, gcSo}, // [2] AIRPLANE..ENVELOPE
+ {0x270A, 0x270D, prEB, gcSo}, // [4] RAISED FIST..WRITING HAND
+ {0x270E, 0x2756, prAL, gcSo}, // [73] LOWER RIGHT PENCIL..BLACK DIAMOND MINUS WHITE X
+ {0x2757, 0x2757, prAI, gcSo}, // HEAVY EXCLAMATION MARK SYMBOL
+ {0x2758, 0x275A, prAL, gcSo}, // [3] LIGHT VERTICAL BAR..HEAVY VERTICAL BAR
+ {0x275B, 0x2760, prQU, gcSo}, // [6] HEAVY SINGLE TURNED COMMA QUOTATION MARK ORNAMENT..HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT
+ {0x2761, 0x2761, prAL, gcSo}, // CURVED STEM PARAGRAPH SIGN ORNAMENT
+ {0x2762, 0x2763, prEX, gcSo}, // [2] HEAVY EXCLAMATION MARK ORNAMENT..HEAVY HEART EXCLAMATION MARK ORNAMENT
+ {0x2764, 0x2764, prID, gcSo}, // HEAVY BLACK HEART
+ {0x2765, 0x2767, prAL, gcSo}, // [3] ROTATED HEAVY BLACK HEART BULLET..ROTATED FLORAL HEART BULLET
+ {0x2768, 0x2768, prOP, gcPs}, // MEDIUM LEFT PARENTHESIS ORNAMENT
+ {0x2769, 0x2769, prCL, gcPe}, // MEDIUM RIGHT PARENTHESIS ORNAMENT
+ {0x276A, 0x276A, prOP, gcPs}, // MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT
+ {0x276B, 0x276B, prCL, gcPe}, // MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT
+ {0x276C, 0x276C, prOP, gcPs}, // MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276D, 0x276D, prCL, gcPe}, // MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276E, 0x276E, prOP, gcPs}, // HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x276F, 0x276F, prCL, gcPe}, // HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x2770, 0x2770, prOP, gcPs}, // HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2771, 0x2771, prCL, gcPe}, // HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2772, 0x2772, prOP, gcPs}, // LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2773, 0x2773, prCL, gcPe}, // LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2774, 0x2774, prOP, gcPs}, // MEDIUM LEFT CURLY BRACKET ORNAMENT
+ {0x2775, 0x2775, prCL, gcPe}, // MEDIUM RIGHT CURLY BRACKET ORNAMENT
+ {0x2776, 0x2793, prAI, gcNo}, // [30] DINGBAT NEGATIVE CIRCLED DIGIT ONE..DINGBAT NEGATIVE CIRCLED SANS-SERIF NUMBER TEN
+ {0x2794, 0x27BF, prAL, gcSo}, // [44] HEAVY WIDE-HEADED RIGHTWARDS ARROW..DOUBLE CURLY LOOP
+ {0x27C0, 0x27C4, prAL, gcSm}, // [5] THREE DIMENSIONAL ANGLE..OPEN SUPERSET
+ {0x27C5, 0x27C5, prOP, gcPs}, // LEFT S-SHAPED BAG DELIMITER
+ {0x27C6, 0x27C6, prCL, gcPe}, // RIGHT S-SHAPED BAG DELIMITER
+ {0x27C7, 0x27E5, prAL, gcSm}, // [31] OR WITH DOT INSIDE..WHITE SQUARE WITH RIGHTWARDS TICK
+ {0x27E6, 0x27E6, prOP, gcPs}, // MATHEMATICAL LEFT WHITE SQUARE BRACKET
+ {0x27E7, 0x27E7, prCL, gcPe}, // MATHEMATICAL RIGHT WHITE SQUARE BRACKET
+ {0x27E8, 0x27E8, prOP, gcPs}, // MATHEMATICAL LEFT ANGLE BRACKET
+ {0x27E9, 0x27E9, prCL, gcPe}, // MATHEMATICAL RIGHT ANGLE BRACKET
+ {0x27EA, 0x27EA, prOP, gcPs}, // MATHEMATICAL LEFT DOUBLE ANGLE BRACKET
+ {0x27EB, 0x27EB, prCL, gcPe}, // MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET
+ {0x27EC, 0x27EC, prOP, gcPs}, // MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET
+ {0x27ED, 0x27ED, prCL, gcPe}, // MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x27EE, 0x27EE, prOP, gcPs}, // MATHEMATICAL LEFT FLATTENED PARENTHESIS
+ {0x27EF, 0x27EF, prCL, gcPe}, // MATHEMATICAL RIGHT FLATTENED PARENTHESIS
+ {0x27F0, 0x27FF, prAL, gcSm}, // [16] UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW
+ {0x2800, 0x28FF, prAL, gcSo}, // [256] BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678
+ {0x2900, 0x297F, prAL, gcSm}, // [128] RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..DOWN FISH TAIL
+ {0x2980, 0x2982, prAL, gcSm}, // [3] TRIPLE VERTICAL BAR DELIMITER..Z NOTATION TYPE COLON
+ {0x2983, 0x2983, prOP, gcPs}, // LEFT WHITE CURLY BRACKET
+ {0x2984, 0x2984, prCL, gcPe}, // RIGHT WHITE CURLY BRACKET
+ {0x2985, 0x2985, prOP, gcPs}, // LEFT WHITE PARENTHESIS
+ {0x2986, 0x2986, prCL, gcPe}, // RIGHT WHITE PARENTHESIS
+ {0x2987, 0x2987, prOP, gcPs}, // Z NOTATION LEFT IMAGE BRACKET
+ {0x2988, 0x2988, prCL, gcPe}, // Z NOTATION RIGHT IMAGE BRACKET
+ {0x2989, 0x2989, prOP, gcPs}, // Z NOTATION LEFT BINDING BRACKET
+ {0x298A, 0x298A, prCL, gcPe}, // Z NOTATION RIGHT BINDING BRACKET
+ {0x298B, 0x298B, prOP, gcPs}, // LEFT SQUARE BRACKET WITH UNDERBAR
+ {0x298C, 0x298C, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH UNDERBAR
+ {0x298D, 0x298D, prOP, gcPs}, // LEFT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x298E, 0x298E, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x298F, 0x298F, prOP, gcPs}, // LEFT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x2990, 0x2990, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x2991, 0x2991, prOP, gcPs}, // LEFT ANGLE BRACKET WITH DOT
+ {0x2992, 0x2992, prCL, gcPe}, // RIGHT ANGLE BRACKET WITH DOT
+ {0x2993, 0x2993, prOP, gcPs}, // LEFT ARC LESS-THAN BRACKET
+ {0x2994, 0x2994, prCL, gcPe}, // RIGHT ARC GREATER-THAN BRACKET
+ {0x2995, 0x2995, prOP, gcPs}, // DOUBLE LEFT ARC GREATER-THAN BRACKET
+ {0x2996, 0x2996, prCL, gcPe}, // DOUBLE RIGHT ARC LESS-THAN BRACKET
+ {0x2997, 0x2997, prOP, gcPs}, // LEFT BLACK TORTOISE SHELL BRACKET
+ {0x2998, 0x2998, prCL, gcPe}, // RIGHT BLACK TORTOISE SHELL BRACKET
+ {0x2999, 0x29D7, prAL, gcSm}, // [63] DOTTED FENCE..BLACK HOURGLASS
+ {0x29D8, 0x29D8, prOP, gcPs}, // LEFT WIGGLY FENCE
+ {0x29D9, 0x29D9, prCL, gcPe}, // RIGHT WIGGLY FENCE
+ {0x29DA, 0x29DA, prOP, gcPs}, // LEFT DOUBLE WIGGLY FENCE
+ {0x29DB, 0x29DB, prCL, gcPe}, // RIGHT DOUBLE WIGGLY FENCE
+ {0x29DC, 0x29FB, prAL, gcSm}, // [32] INCOMPLETE INFINITY..TRIPLE PLUS
+ {0x29FC, 0x29FC, prOP, gcPs}, // LEFT-POINTING CURVED ANGLE BRACKET
+ {0x29FD, 0x29FD, prCL, gcPe}, // RIGHT-POINTING CURVED ANGLE BRACKET
+ {0x29FE, 0x29FF, prAL, gcSm}, // [2] TINY..MINY
+ {0x2A00, 0x2AFF, prAL, gcSm}, // [256] N-ARY CIRCLED DOT OPERATOR..N-ARY WHITE VERTICAL BAR
+ {0x2B00, 0x2B2F, prAL, gcSo}, // [48] NORTH EAST WHITE ARROW..WHITE VERTICAL ELLIPSE
+ {0x2B30, 0x2B44, prAL, gcSm}, // [21] LEFT ARROW WITH SMALL CIRCLE..RIGHTWARDS ARROW THROUGH SUPERSET
+ {0x2B45, 0x2B46, prAL, gcSo}, // [2] LEFTWARDS QUADRUPLE ARROW..RIGHTWARDS QUADRUPLE ARROW
+ {0x2B47, 0x2B4C, prAL, gcSm}, // [6] REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR
+ {0x2B4D, 0x2B54, prAL, gcSo}, // [8] DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..WHITE RIGHT-POINTING PENTAGON
+ {0x2B55, 0x2B59, prAI, gcSo}, // [5] HEAVY LARGE CIRCLE..HEAVY CIRCLED SALTIRE
+ {0x2B5A, 0x2B73, prAL, gcSo}, // [26] SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR
+ {0x2B76, 0x2B95, prAL, gcSo}, // [32] NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW
+ {0x2B97, 0x2BFF, prAL, gcSo}, // [105] SYMBOL FOR TYPE A ELECTRONICS..HELLSCHREIBER PAUSE SYMBOL
+ {0x2C00, 0x2C5F, prAL, gcLC}, // [96] GLAGOLITIC CAPITAL LETTER AZU..GLAGOLITIC SMALL LETTER CAUDATE CHRIVI
+ {0x2C60, 0x2C7B, prAL, gcLC}, // [28] LATIN CAPITAL LETTER L WITH DOUBLE BAR..LATIN LETTER SMALL CAPITAL TURNED E
+ {0x2C7C, 0x2C7D, prAL, gcLm}, // [2] LATIN SUBSCRIPT SMALL LETTER J..MODIFIER LETTER CAPITAL V
+ {0x2C7E, 0x2C7F, prAL, gcLu}, // [2] LATIN CAPITAL LETTER S WITH SWASH TAIL..LATIN CAPITAL LETTER Z WITH SWASH TAIL
+ {0x2C80, 0x2CE4, prAL, gcLC}, // [101] COPTIC CAPITAL LETTER ALFA..COPTIC SYMBOL KAI
+ {0x2CE5, 0x2CEA, prAL, gcSo}, // [6] COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA
+ {0x2CEB, 0x2CEE, prAL, gcLC}, // [4] COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI..COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA
+ {0x2CEF, 0x2CF1, prCM, gcMn}, // [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2CF2, 0x2CF3, prAL, gcLC}, // [2] COPTIC CAPITAL LETTER BOHAIRIC KHEI..COPTIC SMALL LETTER BOHAIRIC KHEI
+ {0x2CF9, 0x2CF9, prEX, gcPo}, // COPTIC OLD NUBIAN FULL STOP
+ {0x2CFA, 0x2CFC, prBA, gcPo}, // [3] COPTIC OLD NUBIAN DIRECT QUESTION MARK..COPTIC OLD NUBIAN VERSE DIVIDER
+ {0x2CFD, 0x2CFD, prAL, gcNo}, // COPTIC FRACTION ONE HALF
+ {0x2CFE, 0x2CFE, prEX, gcPo}, // COPTIC FULL STOP
+ {0x2CFF, 0x2CFF, prBA, gcPo}, // COPTIC MORPHOLOGICAL DIVIDER
+ {0x2D00, 0x2D25, prAL, gcLl}, // [38] GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE
+ {0x2D27, 0x2D27, prAL, gcLl}, // GEORGIAN SMALL LETTER YN
+ {0x2D2D, 0x2D2D, prAL, gcLl}, // GEORGIAN SMALL LETTER AEN
+ {0x2D30, 0x2D67, prAL, gcLo}, // [56] TIFINAGH LETTER YA..TIFINAGH LETTER YO
+ {0x2D6F, 0x2D6F, prAL, gcLm}, // TIFINAGH MODIFIER LETTER LABIALIZATION MARK
+ {0x2D70, 0x2D70, prBA, gcPo}, // TIFINAGH SEPARATOR MARK
+ {0x2D7F, 0x2D7F, prCM, gcMn}, // TIFINAGH CONSONANT JOINER
+ {0x2D80, 0x2D96, prAL, gcLo}, // [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE
+ {0x2DA0, 0x2DA6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO
+ {0x2DA8, 0x2DAE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO
+ {0x2DB0, 0x2DB6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO
+ {0x2DB8, 0x2DBE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO
+ {0x2DC0, 0x2DC6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO
+ {0x2DC8, 0x2DCE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO
+ {0x2DD0, 0x2DD6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO
+ {0x2DD8, 0x2DDE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO
+ {0x2DE0, 0x2DFF, prCM, gcMn}, // [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x2E00, 0x2E01, prQU, gcPo}, // [2] RIGHT ANGLE SUBSTITUTION MARKER..RIGHT ANGLE DOTTED SUBSTITUTION MARKER
+ {0x2E02, 0x2E02, prQU, gcPi}, // LEFT SUBSTITUTION BRACKET
+ {0x2E03, 0x2E03, prQU, gcPf}, // RIGHT SUBSTITUTION BRACKET
+ {0x2E04, 0x2E04, prQU, gcPi}, // LEFT DOTTED SUBSTITUTION BRACKET
+ {0x2E05, 0x2E05, prQU, gcPf}, // RIGHT DOTTED SUBSTITUTION BRACKET
+ {0x2E06, 0x2E08, prQU, gcPo}, // [3] RAISED INTERPOLATION MARKER..DOTTED TRANSPOSITION MARKER
+ {0x2E09, 0x2E09, prQU, gcPi}, // LEFT TRANSPOSITION BRACKET
+ {0x2E0A, 0x2E0A, prQU, gcPf}, // RIGHT TRANSPOSITION BRACKET
+ {0x2E0B, 0x2E0B, prQU, gcPo}, // RAISED SQUARE
+ {0x2E0C, 0x2E0C, prQU, gcPi}, // LEFT RAISED OMISSION BRACKET
+ {0x2E0D, 0x2E0D, prQU, gcPf}, // RIGHT RAISED OMISSION BRACKET
+ {0x2E0E, 0x2E15, prBA, gcPo}, // [8] EDITORIAL CORONIS..UPWARDS ANCORA
+ {0x2E16, 0x2E16, prAL, gcPo}, // DOTTED RIGHT-POINTING ANGLE
+ {0x2E17, 0x2E17, prBA, gcPd}, // DOUBLE OBLIQUE HYPHEN
+ {0x2E18, 0x2E18, prOP, gcPo}, // INVERTED INTERROBANG
+ {0x2E19, 0x2E19, prBA, gcPo}, // PALM BRANCH
+ {0x2E1A, 0x2E1A, prAL, gcPd}, // HYPHEN WITH DIAERESIS
+ {0x2E1B, 0x2E1B, prAL, gcPo}, // TILDE WITH RING ABOVE
+ {0x2E1C, 0x2E1C, prQU, gcPi}, // LEFT LOW PARAPHRASE BRACKET
+ {0x2E1D, 0x2E1D, prQU, gcPf}, // RIGHT LOW PARAPHRASE BRACKET
+ {0x2E1E, 0x2E1F, prAL, gcPo}, // [2] TILDE WITH DOT ABOVE..TILDE WITH DOT BELOW
+ {0x2E20, 0x2E20, prQU, gcPi}, // LEFT VERTICAL BAR WITH QUILL
+ {0x2E21, 0x2E21, prQU, gcPf}, // RIGHT VERTICAL BAR WITH QUILL
+ {0x2E22, 0x2E22, prOP, gcPs}, // TOP LEFT HALF BRACKET
+ {0x2E23, 0x2E23, prCL, gcPe}, // TOP RIGHT HALF BRACKET
+ {0x2E24, 0x2E24, prOP, gcPs}, // BOTTOM LEFT HALF BRACKET
+ {0x2E25, 0x2E25, prCL, gcPe}, // BOTTOM RIGHT HALF BRACKET
+ {0x2E26, 0x2E26, prOP, gcPs}, // LEFT SIDEWAYS U BRACKET
+ {0x2E27, 0x2E27, prCL, gcPe}, // RIGHT SIDEWAYS U BRACKET
+ {0x2E28, 0x2E28, prOP, gcPs}, // LEFT DOUBLE PARENTHESIS
+ {0x2E29, 0x2E29, prCL, gcPe}, // RIGHT DOUBLE PARENTHESIS
+ {0x2E2A, 0x2E2D, prBA, gcPo}, // [4] TWO DOTS OVER ONE DOT PUNCTUATION..FIVE DOT MARK
+ {0x2E2E, 0x2E2E, prEX, gcPo}, // REVERSED QUESTION MARK
+ {0x2E2F, 0x2E2F, prAL, gcLm}, // VERTICAL TILDE
+ {0x2E30, 0x2E31, prBA, gcPo}, // [2] RING POINT..WORD SEPARATOR MIDDLE DOT
+ {0x2E32, 0x2E32, prAL, gcPo}, // TURNED COMMA
+ {0x2E33, 0x2E34, prBA, gcPo}, // [2] RAISED DOT..RAISED COMMA
+ {0x2E35, 0x2E39, prAL, gcPo}, // [5] TURNED SEMICOLON..TOP HALF SECTION SIGN
+ {0x2E3A, 0x2E3B, prB2, gcPd}, // [2] TWO-EM DASH..THREE-EM DASH
+ {0x2E3C, 0x2E3E, prBA, gcPo}, // [3] STENOGRAPHIC FULL STOP..WIGGLY VERTICAL LINE
+ {0x2E3F, 0x2E3F, prAL, gcPo}, // CAPITULUM
+ {0x2E40, 0x2E40, prBA, gcPd}, // DOUBLE HYPHEN
+ {0x2E41, 0x2E41, prBA, gcPo}, // REVERSED COMMA
+ {0x2E42, 0x2E42, prOP, gcPs}, // DOUBLE LOW-REVERSED-9 QUOTATION MARK
+ {0x2E43, 0x2E4A, prBA, gcPo}, // [8] DASH WITH LEFT UPTURN..DOTTED SOLIDUS
+ {0x2E4B, 0x2E4B, prAL, gcPo}, // TRIPLE DAGGER
+ {0x2E4C, 0x2E4C, prBA, gcPo}, // MEDIEVAL COMMA
+ {0x2E4D, 0x2E4D, prAL, gcPo}, // PARAGRAPHUS MARK
+ {0x2E4E, 0x2E4F, prBA, gcPo}, // [2] PUNCTUS ELEVATUS MARK..CORNISH VERSE DIVIDER
+ {0x2E50, 0x2E51, prAL, gcSo}, // [2] CROSS PATTY WITH RIGHT CROSSBAR..CROSS PATTY WITH LEFT CROSSBAR
+ {0x2E52, 0x2E52, prAL, gcPo}, // TIRONIAN SIGN CAPITAL ET
+ {0x2E53, 0x2E54, prEX, gcPo}, // [2] MEDIEVAL EXCLAMATION MARK..MEDIEVAL QUESTION MARK
+ {0x2E55, 0x2E55, prOP, gcPs}, // LEFT SQUARE BRACKET WITH STROKE
+ {0x2E56, 0x2E56, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH STROKE
+ {0x2E57, 0x2E57, prOP, gcPs}, // LEFT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E58, 0x2E58, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E59, 0x2E59, prOP, gcPs}, // TOP HALF LEFT PARENTHESIS
+ {0x2E5A, 0x2E5A, prCL, gcPe}, // TOP HALF RIGHT PARENTHESIS
+ {0x2E5B, 0x2E5B, prOP, gcPs}, // BOTTOM HALF LEFT PARENTHESIS
+ {0x2E5C, 0x2E5C, prCL, gcPe}, // BOTTOM HALF RIGHT PARENTHESIS
+ {0x2E5D, 0x2E5D, prBA, gcPd}, // OBLIQUE HYPHEN
+ {0x2E80, 0x2E99, prID, gcSo}, // [26] CJK RADICAL REPEAT..CJK RADICAL RAP
+ {0x2E9B, 0x2EF3, prID, gcSo}, // [89] CJK RADICAL CHOKE..CJK RADICAL C-SIMPLIFIED TURTLE
+ {0x2F00, 0x2FD5, prID, gcSo}, // [214] KANGXI RADICAL ONE..KANGXI RADICAL FLUTE
+ {0x2FF0, 0x2FFB, prID, gcSo}, // [12] IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID
+ {0x3000, 0x3000, prBA, gcZs}, // IDEOGRAPHIC SPACE
+ {0x3001, 0x3002, prCL, gcPo}, // [2] IDEOGRAPHIC COMMA..IDEOGRAPHIC FULL STOP
+ {0x3003, 0x3003, prID, gcPo}, // DITTO MARK
+ {0x3004, 0x3004, prID, gcSo}, // JAPANESE INDUSTRIAL STANDARD SYMBOL
+ {0x3005, 0x3005, prNS, gcLm}, // IDEOGRAPHIC ITERATION MARK
+ {0x3006, 0x3006, prID, gcLo}, // IDEOGRAPHIC CLOSING MARK
+ {0x3007, 0x3007, prID, gcNl}, // IDEOGRAPHIC NUMBER ZERO
+ {0x3008, 0x3008, prOP, gcPs}, // LEFT ANGLE BRACKET
+ {0x3009, 0x3009, prCL, gcPe}, // RIGHT ANGLE BRACKET
+ {0x300A, 0x300A, prOP, gcPs}, // LEFT DOUBLE ANGLE BRACKET
+ {0x300B, 0x300B, prCL, gcPe}, // RIGHT DOUBLE ANGLE BRACKET
+ {0x300C, 0x300C, prOP, gcPs}, // LEFT CORNER BRACKET
+ {0x300D, 0x300D, prCL, gcPe}, // RIGHT CORNER BRACKET
+ {0x300E, 0x300E, prOP, gcPs}, // LEFT WHITE CORNER BRACKET
+ {0x300F, 0x300F, prCL, gcPe}, // RIGHT WHITE CORNER BRACKET
+ {0x3010, 0x3010, prOP, gcPs}, // LEFT BLACK LENTICULAR BRACKET
+ {0x3011, 0x3011, prCL, gcPe}, // RIGHT BLACK LENTICULAR BRACKET
+ {0x3012, 0x3013, prID, gcSo}, // [2] POSTAL MARK..GETA MARK
+ {0x3014, 0x3014, prOP, gcPs}, // LEFT TORTOISE SHELL BRACKET
+ {0x3015, 0x3015, prCL, gcPe}, // RIGHT TORTOISE SHELL BRACKET
+ {0x3016, 0x3016, prOP, gcPs}, // LEFT WHITE LENTICULAR BRACKET
+ {0x3017, 0x3017, prCL, gcPe}, // RIGHT WHITE LENTICULAR BRACKET
+ {0x3018, 0x3018, prOP, gcPs}, // LEFT WHITE TORTOISE SHELL BRACKET
+ {0x3019, 0x3019, prCL, gcPe}, // RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x301A, 0x301A, prOP, gcPs}, // LEFT WHITE SQUARE BRACKET
+ {0x301B, 0x301B, prCL, gcPe}, // RIGHT WHITE SQUARE BRACKET
+ {0x301C, 0x301C, prNS, gcPd}, // WAVE DASH
+ {0x301D, 0x301D, prOP, gcPs}, // REVERSED DOUBLE PRIME QUOTATION MARK
+ {0x301E, 0x301F, prCL, gcPe}, // [2] DOUBLE PRIME QUOTATION MARK..LOW DOUBLE PRIME QUOTATION MARK
+ {0x3020, 0x3020, prID, gcSo}, // POSTAL MARK FACE
+ {0x3021, 0x3029, prID, gcNl}, // [9] HANGZHOU NUMERAL ONE..HANGZHOU NUMERAL NINE
+ {0x302A, 0x302D, prCM, gcMn}, // [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prCM, gcMc}, // [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prID, gcPd}, // WAVY DASH
+ {0x3031, 0x3034, prID, gcLm}, // [4] VERTICAL KANA REPEAT MARK..VERTICAL KANA REPEAT WITH VOICED SOUND MARK UPPER HALF
+ {0x3035, 0x3035, prCM, gcLm}, // VERTICAL KANA REPEAT MARK LOWER HALF
+ {0x3036, 0x3037, prID, gcSo}, // [2] CIRCLED POSTAL MARK..IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL
+ {0x3038, 0x303A, prID, gcNl}, // [3] HANGZHOU NUMERAL TEN..HANGZHOU NUMERAL THIRTY
+ {0x303B, 0x303B, prNS, gcLm}, // VERTICAL IDEOGRAPHIC ITERATION MARK
+ {0x303C, 0x303C, prNS, gcLo}, // MASU MARK
+ {0x303D, 0x303D, prID, gcPo}, // PART ALTERNATION MARK
+ {0x303E, 0x303F, prID, gcSo}, // [2] IDEOGRAPHIC VARIATION INDICATOR..IDEOGRAPHIC HALF FILL SPACE
+ {0x3041, 0x3041, prCJ, gcLo}, // HIRAGANA LETTER SMALL A
+ {0x3042, 0x3042, prID, gcLo}, // HIRAGANA LETTER A
+ {0x3043, 0x3043, prCJ, gcLo}, // HIRAGANA LETTER SMALL I
+ {0x3044, 0x3044, prID, gcLo}, // HIRAGANA LETTER I
+ {0x3045, 0x3045, prCJ, gcLo}, // HIRAGANA LETTER SMALL U
+ {0x3046, 0x3046, prID, gcLo}, // HIRAGANA LETTER U
+ {0x3047, 0x3047, prCJ, gcLo}, // HIRAGANA LETTER SMALL E
+ {0x3048, 0x3048, prID, gcLo}, // HIRAGANA LETTER E
+ {0x3049, 0x3049, prCJ, gcLo}, // HIRAGANA LETTER SMALL O
+ {0x304A, 0x3062, prID, gcLo}, // [25] HIRAGANA LETTER O..HIRAGANA LETTER DI
+ {0x3063, 0x3063, prCJ, gcLo}, // HIRAGANA LETTER SMALL TU
+ {0x3064, 0x3082, prID, gcLo}, // [31] HIRAGANA LETTER TU..HIRAGANA LETTER MO
+ {0x3083, 0x3083, prCJ, gcLo}, // HIRAGANA LETTER SMALL YA
+ {0x3084, 0x3084, prID, gcLo}, // HIRAGANA LETTER YA
+ {0x3085, 0x3085, prCJ, gcLo}, // HIRAGANA LETTER SMALL YU
+ {0x3086, 0x3086, prID, gcLo}, // HIRAGANA LETTER YU
+ {0x3087, 0x3087, prCJ, gcLo}, // HIRAGANA LETTER SMALL YO
+ {0x3088, 0x308D, prID, gcLo}, // [6] HIRAGANA LETTER YO..HIRAGANA LETTER RO
+ {0x308E, 0x308E, prCJ, gcLo}, // HIRAGANA LETTER SMALL WA
+ {0x308F, 0x3094, prID, gcLo}, // [6] HIRAGANA LETTER WA..HIRAGANA LETTER VU
+ {0x3095, 0x3096, prCJ, gcLo}, // [2] HIRAGANA LETTER SMALL KA..HIRAGANA LETTER SMALL KE
+ {0x3099, 0x309A, prCM, gcMn}, // [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309B, 0x309C, prNS, gcSk}, // [2] KATAKANA-HIRAGANA VOICED SOUND MARK..KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309D, 0x309E, prNS, gcLm}, // [2] HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK
+ {0x309F, 0x309F, prID, gcLo}, // HIRAGANA DIGRAPH YORI
+ {0x30A0, 0x30A0, prNS, gcPd}, // KATAKANA-HIRAGANA DOUBLE HYPHEN
+ {0x30A1, 0x30A1, prCJ, gcLo}, // KATAKANA LETTER SMALL A
+ {0x30A2, 0x30A2, prID, gcLo}, // KATAKANA LETTER A
+ {0x30A3, 0x30A3, prCJ, gcLo}, // KATAKANA LETTER SMALL I
+ {0x30A4, 0x30A4, prID, gcLo}, // KATAKANA LETTER I
+ {0x30A5, 0x30A5, prCJ, gcLo}, // KATAKANA LETTER SMALL U
+ {0x30A6, 0x30A6, prID, gcLo}, // KATAKANA LETTER U
+ {0x30A7, 0x30A7, prCJ, gcLo}, // KATAKANA LETTER SMALL E
+ {0x30A8, 0x30A8, prID, gcLo}, // KATAKANA LETTER E
+ {0x30A9, 0x30A9, prCJ, gcLo}, // KATAKANA LETTER SMALL O
+ {0x30AA, 0x30C2, prID, gcLo}, // [25] KATAKANA LETTER O..KATAKANA LETTER DI
+ {0x30C3, 0x30C3, prCJ, gcLo}, // KATAKANA LETTER SMALL TU
+ {0x30C4, 0x30E2, prID, gcLo}, // [31] KATAKANA LETTER TU..KATAKANA LETTER MO
+ {0x30E3, 0x30E3, prCJ, gcLo}, // KATAKANA LETTER SMALL YA
+ {0x30E4, 0x30E4, prID, gcLo}, // KATAKANA LETTER YA
+ {0x30E5, 0x30E5, prCJ, gcLo}, // KATAKANA LETTER SMALL YU
+ {0x30E6, 0x30E6, prID, gcLo}, // KATAKANA LETTER YU
+ {0x30E7, 0x30E7, prCJ, gcLo}, // KATAKANA LETTER SMALL YO
+ {0x30E8, 0x30ED, prID, gcLo}, // [6] KATAKANA LETTER YO..KATAKANA LETTER RO
+ {0x30EE, 0x30EE, prCJ, gcLo}, // KATAKANA LETTER SMALL WA
+ {0x30EF, 0x30F4, prID, gcLo}, // [6] KATAKANA LETTER WA..KATAKANA LETTER VU
+ {0x30F5, 0x30F6, prCJ, gcLo}, // [2] KATAKANA LETTER SMALL KA..KATAKANA LETTER SMALL KE
+ {0x30F7, 0x30FA, prID, gcLo}, // [4] KATAKANA LETTER VA..KATAKANA LETTER VO
+ {0x30FB, 0x30FB, prNS, gcPo}, // KATAKANA MIDDLE DOT
+ {0x30FC, 0x30FC, prCJ, gcLm}, // KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0x30FD, 0x30FE, prNS, gcLm}, // [2] KATAKANA ITERATION MARK..KATAKANA VOICED ITERATION MARK
+ {0x30FF, 0x30FF, prID, gcLo}, // KATAKANA DIGRAPH KOTO
+ {0x3105, 0x312F, prID, gcLo}, // [43] BOPOMOFO LETTER B..BOPOMOFO LETTER NN
+ {0x3131, 0x318E, prID, gcLo}, // [94] HANGUL LETTER KIYEOK..HANGUL LETTER ARAEAE
+ {0x3190, 0x3191, prID, gcSo}, // [2] IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK
+ {0x3192, 0x3195, prID, gcNo}, // [4] IDEOGRAPHIC ANNOTATION ONE MARK..IDEOGRAPHIC ANNOTATION FOUR MARK
+ {0x3196, 0x319F, prID, gcSo}, // [10] IDEOGRAPHIC ANNOTATION TOP MARK..IDEOGRAPHIC ANNOTATION MAN MARK
+ {0x31A0, 0x31BF, prID, gcLo}, // [32] BOPOMOFO LETTER BU..BOPOMOFO LETTER AH
+ {0x31C0, 0x31E3, prID, gcSo}, // [36] CJK STROKE T..CJK STROKE Q
+ {0x31F0, 0x31FF, prCJ, gcLo}, // [16] KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO
+ {0x3200, 0x321E, prID, gcSo}, // [31] PARENTHESIZED HANGUL KIYEOK..PARENTHESIZED KOREAN CHARACTER O HU
+ {0x3220, 0x3229, prID, gcNo}, // [10] PARENTHESIZED IDEOGRAPH ONE..PARENTHESIZED IDEOGRAPH TEN
+ {0x322A, 0x3247, prID, gcSo}, // [30] PARENTHESIZED IDEOGRAPH MOON..CIRCLED IDEOGRAPH KOTO
+ {0x3248, 0x324F, prAI, gcNo}, // [8] CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE
+ {0x3250, 0x3250, prID, gcSo}, // PARTNERSHIP SIGN
+ {0x3251, 0x325F, prID, gcNo}, // [15] CIRCLED NUMBER TWENTY ONE..CIRCLED NUMBER THIRTY FIVE
+ {0x3260, 0x327F, prID, gcSo}, // [32] CIRCLED HANGUL KIYEOK..KOREAN STANDARD SYMBOL
+ {0x3280, 0x3289, prID, gcNo}, // [10] CIRCLED IDEOGRAPH ONE..CIRCLED IDEOGRAPH TEN
+ {0x328A, 0x32B0, prID, gcSo}, // [39] CIRCLED IDEOGRAPH MOON..CIRCLED IDEOGRAPH NIGHT
+ {0x32B1, 0x32BF, prID, gcNo}, // [15] CIRCLED NUMBER THIRTY SIX..CIRCLED NUMBER FIFTY
+ {0x32C0, 0x32FF, prID, gcSo}, // [64] IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY..SQUARE ERA NAME REIWA
+ {0x3300, 0x33FF, prID, gcSo}, // [256] SQUARE APAATO..SQUARE GAL
+ {0x3400, 0x4DBF, prID, gcLo}, // [6592] CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DBF
+ {0x4DC0, 0x4DFF, prAL, gcSo}, // [64] HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION
+ {0x4E00, 0x9FFF, prID, gcLo}, // [20992] CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FFF
+ {0xA000, 0xA014, prID, gcLo}, // [21] YI SYLLABLE IT..YI SYLLABLE E
+ {0xA015, 0xA015, prNS, gcLm}, // YI SYLLABLE WU
+ {0xA016, 0xA48C, prID, gcLo}, // [1143] YI SYLLABLE BIT..YI SYLLABLE YYR
+ {0xA490, 0xA4C6, prID, gcSo}, // [55] YI RADICAL QOT..YI RADICAL KE
+ {0xA4D0, 0xA4F7, prAL, gcLo}, // [40] LISU LETTER BA..LISU LETTER OE
+ {0xA4F8, 0xA4FD, prAL, gcLm}, // [6] LISU LETTER TONE MYA TI..LISU LETTER TONE MYA JEU
+ {0xA4FE, 0xA4FF, prBA, gcPo}, // [2] LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP
+ {0xA500, 0xA60B, prAL, gcLo}, // [268] VAI SYLLABLE EE..VAI SYLLABLE NG
+ {0xA60C, 0xA60C, prAL, gcLm}, // VAI SYLLABLE LENGTHENER
+ {0xA60D, 0xA60D, prBA, gcPo}, // VAI COMMA
+ {0xA60E, 0xA60E, prEX, gcPo}, // VAI FULL STOP
+ {0xA60F, 0xA60F, prBA, gcPo}, // VAI QUESTION MARK
+ {0xA610, 0xA61F, prAL, gcLo}, // [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL JONG
+ {0xA620, 0xA629, prNU, gcNd}, // [10] VAI DIGIT ZERO..VAI DIGIT NINE
+ {0xA62A, 0xA62B, prAL, gcLo}, // [2] VAI SYLLABLE NDOLE MA..VAI SYLLABLE NDOLE DO
+ {0xA640, 0xA66D, prAL, gcLC}, // [46] CYRILLIC CAPITAL LETTER ZEMLYA..CYRILLIC SMALL LETTER DOUBLE MONOCULAR O
+ {0xA66E, 0xA66E, prAL, gcLo}, // CYRILLIC LETTER MULTIOCULAR O
+ {0xA66F, 0xA66F, prCM, gcMn}, // COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prCM, gcMe}, // [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA673, 0xA673, prAL, gcPo}, // SLAVONIC ASTERISK
+ {0xA674, 0xA67D, prCM, gcMn}, // [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA67E, 0xA67E, prAL, gcPo}, // CYRILLIC KAVYKA
+ {0xA67F, 0xA67F, prAL, gcLm}, // CYRILLIC PAYEROK
+ {0xA680, 0xA69B, prAL, gcLC}, // [28] CYRILLIC CAPITAL LETTER DWE..CYRILLIC SMALL LETTER CROSSED O
+ {0xA69C, 0xA69D, prAL, gcLm}, // [2] MODIFIER LETTER CYRILLIC HARD SIGN..MODIFIER LETTER CYRILLIC SOFT SIGN
+ {0xA69E, 0xA69F, prCM, gcMn}, // [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6A0, 0xA6E5, prAL, gcLo}, // [70] BAMUM LETTER A..BAMUM LETTER KI
+ {0xA6E6, 0xA6EF, prAL, gcNl}, // [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM
+ {0xA6F0, 0xA6F1, prCM, gcMn}, // [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA6F2, 0xA6F2, prAL, gcPo}, // BAMUM NJAEMLI
+ {0xA6F3, 0xA6F7, prBA, gcPo}, // [5] BAMUM FULL STOP..BAMUM QUESTION MARK
+ {0xA700, 0xA716, prAL, gcSk}, // [23] MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR
+ {0xA717, 0xA71F, prAL, gcLm}, // [9] MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK
+ {0xA720, 0xA721, prAL, gcSk}, // [2] MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE
+ {0xA722, 0xA76F, prAL, gcLC}, // [78] LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF..LATIN SMALL LETTER CON
+ {0xA770, 0xA770, prAL, gcLm}, // MODIFIER LETTER US
+ {0xA771, 0xA787, prAL, gcLC}, // [23] LATIN SMALL LETTER DUM..LATIN SMALL LETTER INSULAR T
+ {0xA788, 0xA788, prAL, gcLm}, // MODIFIER LETTER LOW CIRCUMFLEX ACCENT
+ {0xA789, 0xA78A, prAL, gcSk}, // [2] MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN
+ {0xA78B, 0xA78E, prAL, gcLC}, // [4] LATIN CAPITAL LETTER SALTILLO..LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT
+ {0xA78F, 0xA78F, prAL, gcLo}, // LATIN LETTER SINOLOGICAL DOT
+ {0xA790, 0xA7CA, prAL, gcLC}, // [59] LATIN CAPITAL LETTER N WITH DESCENDER..LATIN SMALL LETTER S WITH SHORT STROKE OVERLAY
+ {0xA7D0, 0xA7D1, prAL, gcLC}, // [2] LATIN CAPITAL LETTER CLOSED INSULAR G..LATIN SMALL LETTER CLOSED INSULAR G
+ {0xA7D3, 0xA7D3, prAL, gcLl}, // LATIN SMALL LETTER DOUBLE THORN
+ {0xA7D5, 0xA7D9, prAL, gcLC}, // [5] LATIN SMALL LETTER DOUBLE WYNN..LATIN SMALL LETTER SIGMOID S
+ {0xA7F2, 0xA7F4, prAL, gcLm}, // [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q
+ {0xA7F5, 0xA7F6, prAL, gcLC}, // [2] LATIN CAPITAL LETTER REVERSED HALF H..LATIN SMALL LETTER REVERSED HALF H
+ {0xA7F7, 0xA7F7, prAL, gcLo}, // LATIN EPIGRAPHIC LETTER SIDEWAYS I
+ {0xA7F8, 0xA7F9, prAL, gcLm}, // [2] MODIFIER LETTER CAPITAL H WITH STROKE..MODIFIER LETTER SMALL LIGATURE OE
+ {0xA7FA, 0xA7FA, prAL, gcLl}, // LATIN LETTER SMALL CAPITAL TURNED M
+ {0xA7FB, 0xA7FF, prAL, gcLo}, // [5] LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M
+ {0xA800, 0xA801, prAL, gcLo}, // [2] SYLOTI NAGRI LETTER A..SYLOTI NAGRI LETTER I
+ {0xA802, 0xA802, prCM, gcMn}, // SYLOTI NAGRI SIGN DVISVARA
+ {0xA803, 0xA805, prAL, gcLo}, // [3] SYLOTI NAGRI LETTER U..SYLOTI NAGRI LETTER O
+ {0xA806, 0xA806, prCM, gcMn}, // SYLOTI NAGRI SIGN HASANTA
+ {0xA807, 0xA80A, prAL, gcLo}, // [4] SYLOTI NAGRI LETTER KO..SYLOTI NAGRI LETTER GHO
+ {0xA80B, 0xA80B, prCM, gcMn}, // SYLOTI NAGRI SIGN ANUSVARA
+ {0xA80C, 0xA822, prAL, gcLo}, // [23] SYLOTI NAGRI LETTER CO..SYLOTI NAGRI LETTER HO
+ {0xA823, 0xA824, prCM, gcMc}, // [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prCM, gcMn}, // [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prCM, gcMc}, // SYLOTI NAGRI VOWEL SIGN OO
+ {0xA828, 0xA82B, prAL, gcSo}, // [4] SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4
+ {0xA82C, 0xA82C, prCM, gcMn}, // SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA830, 0xA835, prAL, gcNo}, // [6] NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC FRACTION THREE SIXTEENTHS
+ {0xA836, 0xA837, prAL, gcSo}, // [2] NORTH INDIC QUARTER MARK..NORTH INDIC PLACEHOLDER MARK
+ {0xA838, 0xA838, prPO, gcSc}, // NORTH INDIC RUPEE MARK
+ {0xA839, 0xA839, prAL, gcSo}, // NORTH INDIC QUANTITY MARK
+ {0xA840, 0xA873, prAL, gcLo}, // [52] PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU
+ {0xA874, 0xA875, prBB, gcPo}, // [2] PHAGS-PA SINGLE HEAD MARK..PHAGS-PA DOUBLE HEAD MARK
+ {0xA876, 0xA877, prEX, gcPo}, // [2] PHAGS-PA MARK SHAD..PHAGS-PA MARK DOUBLE SHAD
+ {0xA880, 0xA881, prCM, gcMc}, // [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA882, 0xA8B3, prAL, gcLo}, // [50] SAURASHTRA LETTER A..SAURASHTRA LETTER LLA
+ {0xA8B4, 0xA8C3, prCM, gcMc}, // [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prCM, gcMn}, // [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8CE, 0xA8CF, prBA, gcPo}, // [2] SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA
+ {0xA8D0, 0xA8D9, prNU, gcNd}, // [10] SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE
+ {0xA8E0, 0xA8F1, prCM, gcMn}, // [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8F2, 0xA8F7, prAL, gcLo}, // [6] DEVANAGARI SIGN SPACING CANDRABINDU..DEVANAGARI SIGN CANDRABINDU AVAGRAHA
+ {0xA8F8, 0xA8FA, prAL, gcPo}, // [3] DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET
+ {0xA8FB, 0xA8FB, prAL, gcLo}, // DEVANAGARI HEADSTROKE
+ {0xA8FC, 0xA8FC, prBB, gcPo}, // DEVANAGARI SIGN SIDDHAM
+ {0xA8FD, 0xA8FE, prAL, gcLo}, // [2] DEVANAGARI JAIN OM..DEVANAGARI LETTER AY
+ {0xA8FF, 0xA8FF, prCM, gcMn}, // DEVANAGARI VOWEL SIGN AY
+ {0xA900, 0xA909, prNU, gcNd}, // [10] KAYAH LI DIGIT ZERO..KAYAH LI DIGIT NINE
+ {0xA90A, 0xA925, prAL, gcLo}, // [28] KAYAH LI LETTER KA..KAYAH LI LETTER OO
+ {0xA926, 0xA92D, prCM, gcMn}, // [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA92E, 0xA92F, prBA, gcPo}, // [2] KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA
+ {0xA930, 0xA946, prAL, gcLo}, // [23] REJANG LETTER KA..REJANG LETTER A
+ {0xA947, 0xA951, prCM, gcMn}, // [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prCM, gcMc}, // [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA95F, 0xA95F, prAL, gcPo}, // REJANG SECTION MARK
+ {0xA960, 0xA97C, prJL, gcLo}, // [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prCM, gcMn}, // [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prCM, gcMc}, // JAVANESE SIGN WIGNYAN
+ {0xA984, 0xA9B2, prAL, gcLo}, // [47] JAVANESE LETTER A..JAVANESE LETTER HA
+ {0xA9B3, 0xA9B3, prCM, gcMn}, // JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prCM, gcMc}, // [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prCM, gcMn}, // [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prCM, gcMc}, // [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prCM, gcMn}, // [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prCM, gcMc}, // [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9C1, 0xA9C6, prAL, gcPo}, // [6] JAVANESE LEFT RERENGGAN..JAVANESE PADA WINDU
+ {0xA9C7, 0xA9C9, prBA, gcPo}, // [3] JAVANESE PADA PANGKAT..JAVANESE PADA LUNGSI
+ {0xA9CA, 0xA9CD, prAL, gcPo}, // [4] JAVANESE PADA ADEG..JAVANESE TURNED PADA PISELEH
+ {0xA9CF, 0xA9CF, prAL, gcLm}, // JAVANESE PANGRANGKEP
+ {0xA9D0, 0xA9D9, prNU, gcNd}, // [10] JAVANESE DIGIT ZERO..JAVANESE DIGIT NINE
+ {0xA9DE, 0xA9DF, prAL, gcPo}, // [2] JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN
+ {0xA9E0, 0xA9E4, prSA, gcLo}, // [5] MYANMAR LETTER SHAN GHA..MYANMAR LETTER SHAN BHA
+ {0xA9E5, 0xA9E5, prSA, gcMn}, // MYANMAR SIGN SHAN SAW
+ {0xA9E6, 0xA9E6, prSA, gcLm}, // MYANMAR MODIFIER LETTER SHAN REDUPLICATION
+ {0xA9E7, 0xA9EF, prSA, gcLo}, // [9] MYANMAR LETTER TAI LAING NYA..MYANMAR LETTER TAI LAING NNA
+ {0xA9F0, 0xA9F9, prNU, gcNd}, // [10] MYANMAR TAI LAING DIGIT ZERO..MYANMAR TAI LAING DIGIT NINE
+ {0xA9FA, 0xA9FE, prSA, gcLo}, // [5] MYANMAR LETTER TAI LAING LLA..MYANMAR LETTER TAI LAING BHA
+ {0xAA00, 0xAA28, prAL, gcLo}, // [41] CHAM LETTER A..CHAM LETTER HA
+ {0xAA29, 0xAA2E, prCM, gcMn}, // [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prCM, gcMc}, // [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prCM, gcMn}, // [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prCM, gcMc}, // [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prCM, gcMn}, // [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA40, 0xAA42, prAL, gcLo}, // [3] CHAM LETTER FINAL K..CHAM LETTER FINAL NG
+ {0xAA43, 0xAA43, prCM, gcMn}, // CHAM CONSONANT SIGN FINAL NG
+ {0xAA44, 0xAA4B, prAL, gcLo}, // [8] CHAM LETTER FINAL CH..CHAM LETTER FINAL SS
+ {0xAA4C, 0xAA4C, prCM, gcMn}, // CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prCM, gcMc}, // CHAM CONSONANT SIGN FINAL H
+ {0xAA50, 0xAA59, prNU, gcNd}, // [10] CHAM DIGIT ZERO..CHAM DIGIT NINE
+ {0xAA5C, 0xAA5C, prAL, gcPo}, // CHAM PUNCTUATION SPIRAL
+ {0xAA5D, 0xAA5F, prBA, gcPo}, // [3] CHAM PUNCTUATION DANDA..CHAM PUNCTUATION TRIPLE DANDA
+ {0xAA60, 0xAA6F, prSA, gcLo}, // [16] MYANMAR LETTER KHAMTI GA..MYANMAR LETTER KHAMTI FA
+ {0xAA70, 0xAA70, prSA, gcLm}, // MYANMAR MODIFIER LETTER KHAMTI REDUPLICATION
+ {0xAA71, 0xAA76, prSA, gcLo}, // [6] MYANMAR LETTER KHAMTI XA..MYANMAR LOGOGRAM KHAMTI HM
+ {0xAA77, 0xAA79, prSA, gcSo}, // [3] MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO
+ {0xAA7A, 0xAA7A, prSA, gcLo}, // MYANMAR LETTER AITON RA
+ {0xAA7B, 0xAA7B, prSA, gcMc}, // MYANMAR SIGN PAO KAREN TONE
+ {0xAA7C, 0xAA7C, prSA, gcMn}, // MYANMAR SIGN TAI LAING TONE-2
+ {0xAA7D, 0xAA7D, prSA, gcMc}, // MYANMAR SIGN TAI LAING TONE-5
+ {0xAA7E, 0xAA7F, prSA, gcLo}, // [2] MYANMAR LETTER SHWE PALAUNG CHA..MYANMAR LETTER SHWE PALAUNG SHA
+ {0xAA80, 0xAAAF, prSA, gcLo}, // [48] TAI VIET LETTER LOW KO..TAI VIET LETTER HIGH O
+ {0xAAB0, 0xAAB0, prSA, gcMn}, // TAI VIET MAI KANG
+ {0xAAB1, 0xAAB1, prSA, gcLo}, // TAI VIET VOWEL AA
+ {0xAAB2, 0xAAB4, prSA, gcMn}, // [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB5, 0xAAB6, prSA, gcLo}, // [2] TAI VIET VOWEL E..TAI VIET VOWEL O
+ {0xAAB7, 0xAAB8, prSA, gcMn}, // [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAAB9, 0xAABD, prSA, gcLo}, // [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN
+ {0xAABE, 0xAABF, prSA, gcMn}, // [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC0, 0xAAC0, prSA, gcLo}, // TAI VIET TONE MAI NUENG
+ {0xAAC1, 0xAAC1, prSA, gcMn}, // TAI VIET TONE MAI THO
+ {0xAAC2, 0xAAC2, prSA, gcLo}, // TAI VIET TONE MAI SONG
+ {0xAADB, 0xAADC, prSA, gcLo}, // [2] TAI VIET SYMBOL KON..TAI VIET SYMBOL NUENG
+ {0xAADD, 0xAADD, prSA, gcLm}, // TAI VIET SYMBOL SAM
+ {0xAADE, 0xAADF, prSA, gcPo}, // [2] TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI
+ {0xAAE0, 0xAAEA, prAL, gcLo}, // [11] MEETEI MAYEK LETTER E..MEETEI MAYEK LETTER SSA
+ {0xAAEB, 0xAAEB, prCM, gcMc}, // MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prCM, gcMn}, // [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF0, 0xAAF1, prBA, gcPo}, // [2] MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM
+ {0xAAF2, 0xAAF2, prAL, gcLo}, // MEETEI MAYEK ANJI
+ {0xAAF3, 0xAAF4, prAL, gcLm}, // [2] MEETEI MAYEK SYLLABLE REPETITION MARK..MEETEI MAYEK WORD REPETITION MARK
+ {0xAAF5, 0xAAF5, prCM, gcMc}, // MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prCM, gcMn}, // MEETEI MAYEK VIRAMA
+ {0xAB01, 0xAB06, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO
+ {0xAB09, 0xAB0E, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO
+ {0xAB11, 0xAB16, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO
+ {0xAB20, 0xAB26, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO
+ {0xAB28, 0xAB2E, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO
+ {0xAB30, 0xAB5A, prAL, gcLl}, // [43] LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG
+ {0xAB5B, 0xAB5B, prAL, gcSk}, // MODIFIER BREVE WITH INVERTED BREVE
+ {0xAB5C, 0xAB5F, prAL, gcLm}, // [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK
+ {0xAB60, 0xAB68, prAL, gcLl}, // [9] LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE
+ {0xAB69, 0xAB69, prAL, gcLm}, // MODIFIER LETTER SMALL TURNED W
+ {0xAB6A, 0xAB6B, prAL, gcSk}, // [2] MODIFIER LETTER LEFT TACK..MODIFIER LETTER RIGHT TACK
+ {0xAB70, 0xABBF, prAL, gcLl}, // [80] CHEROKEE SMALL LETTER A..CHEROKEE SMALL LETTER YA
+ {0xABC0, 0xABE2, prAL, gcLo}, // [35] MEETEI MAYEK LETTER KOK..MEETEI MAYEK LETTER I LONSUM
+ {0xABE3, 0xABE4, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prCM, gcMn}, // MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prCM, gcMn}, // MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEB, 0xABEB, prBA, gcPo}, // MEETEI MAYEK CHEIKHEI
+ {0xABEC, 0xABEC, prCM, gcMc}, // MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prCM, gcMn}, // MEETEI MAYEK APUN IYEK
+ {0xABF0, 0xABF9, prNU, gcNd}, // [10] MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE
+ {0xAC00, 0xAC00, prH2, gcLo}, // HANGUL SYLLABLE GA
+ {0xAC01, 0xAC1B, prH3, gcLo}, // [27] HANGUL SYLLABLE GAG..HANGUL SYLLABLE GAH
+ {0xAC1C, 0xAC1C, prH2, gcLo}, // HANGUL SYLLABLE GAE
+ {0xAC1D, 0xAC37, prH3, gcLo}, // [27] HANGUL SYLLABLE GAEG..HANGUL SYLLABLE GAEH
+ {0xAC38, 0xAC38, prH2, gcLo}, // HANGUL SYLLABLE GYA
+ {0xAC39, 0xAC53, prH3, gcLo}, // [27] HANGUL SYLLABLE GYAG..HANGUL SYLLABLE GYAH
+ {0xAC54, 0xAC54, prH2, gcLo}, // HANGUL SYLLABLE GYAE
+ {0xAC55, 0xAC6F, prH3, gcLo}, // [27] HANGUL SYLLABLE GYAEG..HANGUL SYLLABLE GYAEH
+ {0xAC70, 0xAC70, prH2, gcLo}, // HANGUL SYLLABLE GEO
+ {0xAC71, 0xAC8B, prH3, gcLo}, // [27] HANGUL SYLLABLE GEOG..HANGUL SYLLABLE GEOH
+ {0xAC8C, 0xAC8C, prH2, gcLo}, // HANGUL SYLLABLE GE
+ {0xAC8D, 0xACA7, prH3, gcLo}, // [27] HANGUL SYLLABLE GEG..HANGUL SYLLABLE GEH
+ {0xACA8, 0xACA8, prH2, gcLo}, // HANGUL SYLLABLE GYEO
+ {0xACA9, 0xACC3, prH3, gcLo}, // [27] HANGUL SYLLABLE GYEOG..HANGUL SYLLABLE GYEOH
+ {0xACC4, 0xACC4, prH2, gcLo}, // HANGUL SYLLABLE GYE
+ {0xACC5, 0xACDF, prH3, gcLo}, // [27] HANGUL SYLLABLE GYEG..HANGUL SYLLABLE GYEH
+ {0xACE0, 0xACE0, prH2, gcLo}, // HANGUL SYLLABLE GO
+ {0xACE1, 0xACFB, prH3, gcLo}, // [27] HANGUL SYLLABLE GOG..HANGUL SYLLABLE GOH
+ {0xACFC, 0xACFC, prH2, gcLo}, // HANGUL SYLLABLE GWA
+ {0xACFD, 0xAD17, prH3, gcLo}, // [27] HANGUL SYLLABLE GWAG..HANGUL SYLLABLE GWAH
+ {0xAD18, 0xAD18, prH2, gcLo}, // HANGUL SYLLABLE GWAE
+ {0xAD19, 0xAD33, prH3, gcLo}, // [27] HANGUL SYLLABLE GWAEG..HANGUL SYLLABLE GWAEH
+ {0xAD34, 0xAD34, prH2, gcLo}, // HANGUL SYLLABLE GOE
+ {0xAD35, 0xAD4F, prH3, gcLo}, // [27] HANGUL SYLLABLE GOEG..HANGUL SYLLABLE GOEH
+ {0xAD50, 0xAD50, prH2, gcLo}, // HANGUL SYLLABLE GYO
+ {0xAD51, 0xAD6B, prH3, gcLo}, // [27] HANGUL SYLLABLE GYOG..HANGUL SYLLABLE GYOH
+ {0xAD6C, 0xAD6C, prH2, gcLo}, // HANGUL SYLLABLE GU
+ {0xAD6D, 0xAD87, prH3, gcLo}, // [27] HANGUL SYLLABLE GUG..HANGUL SYLLABLE GUH
+ {0xAD88, 0xAD88, prH2, gcLo}, // HANGUL SYLLABLE GWEO
+ {0xAD89, 0xADA3, prH3, gcLo}, // [27] HANGUL SYLLABLE GWEOG..HANGUL SYLLABLE GWEOH
+ {0xADA4, 0xADA4, prH2, gcLo}, // HANGUL SYLLABLE GWE
+ {0xADA5, 0xADBF, prH3, gcLo}, // [27] HANGUL SYLLABLE GWEG..HANGUL SYLLABLE GWEH
+ {0xADC0, 0xADC0, prH2, gcLo}, // HANGUL SYLLABLE GWI
+ {0xADC1, 0xADDB, prH3, gcLo}, // [27] HANGUL SYLLABLE GWIG..HANGUL SYLLABLE GWIH
+ {0xADDC, 0xADDC, prH2, gcLo}, // HANGUL SYLLABLE GYU
+ {0xADDD, 0xADF7, prH3, gcLo}, // [27] HANGUL SYLLABLE GYUG..HANGUL SYLLABLE GYUH
+ {0xADF8, 0xADF8, prH2, gcLo}, // HANGUL SYLLABLE GEU
+ {0xADF9, 0xAE13, prH3, gcLo}, // [27] HANGUL SYLLABLE GEUG..HANGUL SYLLABLE GEUH
+ {0xAE14, 0xAE14, prH2, gcLo}, // HANGUL SYLLABLE GYI
+ {0xAE15, 0xAE2F, prH3, gcLo}, // [27] HANGUL SYLLABLE GYIG..HANGUL SYLLABLE GYIH
+ {0xAE30, 0xAE30, prH2, gcLo}, // HANGUL SYLLABLE GI
+ {0xAE31, 0xAE4B, prH3, gcLo}, // [27] HANGUL SYLLABLE GIG..HANGUL SYLLABLE GIH
+ {0xAE4C, 0xAE4C, prH2, gcLo}, // HANGUL SYLLABLE GGA
+ {0xAE4D, 0xAE67, prH3, gcLo}, // [27] HANGUL SYLLABLE GGAG..HANGUL SYLLABLE GGAH
+ {0xAE68, 0xAE68, prH2, gcLo}, // HANGUL SYLLABLE GGAE
+ {0xAE69, 0xAE83, prH3, gcLo}, // [27] HANGUL SYLLABLE GGAEG..HANGUL SYLLABLE GGAEH
+ {0xAE84, 0xAE84, prH2, gcLo}, // HANGUL SYLLABLE GGYA
+ {0xAE85, 0xAE9F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYAG..HANGUL SYLLABLE GGYAH
+ {0xAEA0, 0xAEA0, prH2, gcLo}, // HANGUL SYLLABLE GGYAE
+ {0xAEA1, 0xAEBB, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYAEG..HANGUL SYLLABLE GGYAEH
+ {0xAEBC, 0xAEBC, prH2, gcLo}, // HANGUL SYLLABLE GGEO
+ {0xAEBD, 0xAED7, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEOG..HANGUL SYLLABLE GGEOH
+ {0xAED8, 0xAED8, prH2, gcLo}, // HANGUL SYLLABLE GGE
+ {0xAED9, 0xAEF3, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEG..HANGUL SYLLABLE GGEH
+ {0xAEF4, 0xAEF4, prH2, gcLo}, // HANGUL SYLLABLE GGYEO
+ {0xAEF5, 0xAF0F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYEOG..HANGUL SYLLABLE GGYEOH
+ {0xAF10, 0xAF10, prH2, gcLo}, // HANGUL SYLLABLE GGYE
+ {0xAF11, 0xAF2B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYEG..HANGUL SYLLABLE GGYEH
+ {0xAF2C, 0xAF2C, prH2, gcLo}, // HANGUL SYLLABLE GGO
+ {0xAF2D, 0xAF47, prH3, gcLo}, // [27] HANGUL SYLLABLE GGOG..HANGUL SYLLABLE GGOH
+ {0xAF48, 0xAF48, prH2, gcLo}, // HANGUL SYLLABLE GGWA
+ {0xAF49, 0xAF63, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWAG..HANGUL SYLLABLE GGWAH
+ {0xAF64, 0xAF64, prH2, gcLo}, // HANGUL SYLLABLE GGWAE
+ {0xAF65, 0xAF7F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWAEG..HANGUL SYLLABLE GGWAEH
+ {0xAF80, 0xAF80, prH2, gcLo}, // HANGUL SYLLABLE GGOE
+ {0xAF81, 0xAF9B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGOEG..HANGUL SYLLABLE GGOEH
+ {0xAF9C, 0xAF9C, prH2, gcLo}, // HANGUL SYLLABLE GGYO
+ {0xAF9D, 0xAFB7, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYOG..HANGUL SYLLABLE GGYOH
+ {0xAFB8, 0xAFB8, prH2, gcLo}, // HANGUL SYLLABLE GGU
+ {0xAFB9, 0xAFD3, prH3, gcLo}, // [27] HANGUL SYLLABLE GGUG..HANGUL SYLLABLE GGUH
+ {0xAFD4, 0xAFD4, prH2, gcLo}, // HANGUL SYLLABLE GGWEO
+ {0xAFD5, 0xAFEF, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWEOG..HANGUL SYLLABLE GGWEOH
+ {0xAFF0, 0xAFF0, prH2, gcLo}, // HANGUL SYLLABLE GGWE
+ {0xAFF1, 0xB00B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWEG..HANGUL SYLLABLE GGWEH
+ {0xB00C, 0xB00C, prH2, gcLo}, // HANGUL SYLLABLE GGWI
+ {0xB00D, 0xB027, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWIG..HANGUL SYLLABLE GGWIH
+ {0xB028, 0xB028, prH2, gcLo}, // HANGUL SYLLABLE GGYU
+ {0xB029, 0xB043, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYUG..HANGUL SYLLABLE GGYUH
+ {0xB044, 0xB044, prH2, gcLo}, // HANGUL SYLLABLE GGEU
+ {0xB045, 0xB05F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEUG..HANGUL SYLLABLE GGEUH
+ {0xB060, 0xB060, prH2, gcLo}, // HANGUL SYLLABLE GGYI
+ {0xB061, 0xB07B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYIG..HANGUL SYLLABLE GGYIH
+ {0xB07C, 0xB07C, prH2, gcLo}, // HANGUL SYLLABLE GGI
+ {0xB07D, 0xB097, prH3, gcLo}, // [27] HANGUL SYLLABLE GGIG..HANGUL SYLLABLE GGIH
+ {0xB098, 0xB098, prH2, gcLo}, // HANGUL SYLLABLE NA
+ {0xB099, 0xB0B3, prH3, gcLo}, // [27] HANGUL SYLLABLE NAG..HANGUL SYLLABLE NAH
+ {0xB0B4, 0xB0B4, prH2, gcLo}, // HANGUL SYLLABLE NAE
+ {0xB0B5, 0xB0CF, prH3, gcLo}, // [27] HANGUL SYLLABLE NAEG..HANGUL SYLLABLE NAEH
+ {0xB0D0, 0xB0D0, prH2, gcLo}, // HANGUL SYLLABLE NYA
+ {0xB0D1, 0xB0EB, prH3, gcLo}, // [27] HANGUL SYLLABLE NYAG..HANGUL SYLLABLE NYAH
+ {0xB0EC, 0xB0EC, prH2, gcLo}, // HANGUL SYLLABLE NYAE
+ {0xB0ED, 0xB107, prH3, gcLo}, // [27] HANGUL SYLLABLE NYAEG..HANGUL SYLLABLE NYAEH
+ {0xB108, 0xB108, prH2, gcLo}, // HANGUL SYLLABLE NEO
+ {0xB109, 0xB123, prH3, gcLo}, // [27] HANGUL SYLLABLE NEOG..HANGUL SYLLABLE NEOH
+ {0xB124, 0xB124, prH2, gcLo}, // HANGUL SYLLABLE NE
+ {0xB125, 0xB13F, prH3, gcLo}, // [27] HANGUL SYLLABLE NEG..HANGUL SYLLABLE NEH
+ {0xB140, 0xB140, prH2, gcLo}, // HANGUL SYLLABLE NYEO
+ {0xB141, 0xB15B, prH3, gcLo}, // [27] HANGUL SYLLABLE NYEOG..HANGUL SYLLABLE NYEOH
+ {0xB15C, 0xB15C, prH2, gcLo}, // HANGUL SYLLABLE NYE
+ {0xB15D, 0xB177, prH3, gcLo}, // [27] HANGUL SYLLABLE NYEG..HANGUL SYLLABLE NYEH
+ {0xB178, 0xB178, prH2, gcLo}, // HANGUL SYLLABLE NO
+ {0xB179, 0xB193, prH3, gcLo}, // [27] HANGUL SYLLABLE NOG..HANGUL SYLLABLE NOH
+ {0xB194, 0xB194, prH2, gcLo}, // HANGUL SYLLABLE NWA
+ {0xB195, 0xB1AF, prH3, gcLo}, // [27] HANGUL SYLLABLE NWAG..HANGUL SYLLABLE NWAH
+ {0xB1B0, 0xB1B0, prH2, gcLo}, // HANGUL SYLLABLE NWAE
+ {0xB1B1, 0xB1CB, prH3, gcLo}, // [27] HANGUL SYLLABLE NWAEG..HANGUL SYLLABLE NWAEH
+ {0xB1CC, 0xB1CC, prH2, gcLo}, // HANGUL SYLLABLE NOE
+ {0xB1CD, 0xB1E7, prH3, gcLo}, // [27] HANGUL SYLLABLE NOEG..HANGUL SYLLABLE NOEH
+ {0xB1E8, 0xB1E8, prH2, gcLo}, // HANGUL SYLLABLE NYO
+ {0xB1E9, 0xB203, prH3, gcLo}, // [27] HANGUL SYLLABLE NYOG..HANGUL SYLLABLE NYOH
+ {0xB204, 0xB204, prH2, gcLo}, // HANGUL SYLLABLE NU
+ {0xB205, 0xB21F, prH3, gcLo}, // [27] HANGUL SYLLABLE NUG..HANGUL SYLLABLE NUH
+ {0xB220, 0xB220, prH2, gcLo}, // HANGUL SYLLABLE NWEO
+ {0xB221, 0xB23B, prH3, gcLo}, // [27] HANGUL SYLLABLE NWEOG..HANGUL SYLLABLE NWEOH
+ {0xB23C, 0xB23C, prH2, gcLo}, // HANGUL SYLLABLE NWE
+ {0xB23D, 0xB257, prH3, gcLo}, // [27] HANGUL SYLLABLE NWEG..HANGUL SYLLABLE NWEH
+ {0xB258, 0xB258, prH2, gcLo}, // HANGUL SYLLABLE NWI
+ {0xB259, 0xB273, prH3, gcLo}, // [27] HANGUL SYLLABLE NWIG..HANGUL SYLLABLE NWIH
+ {0xB274, 0xB274, prH2, gcLo}, // HANGUL SYLLABLE NYU
+ {0xB275, 0xB28F, prH3, gcLo}, // [27] HANGUL SYLLABLE NYUG..HANGUL SYLLABLE NYUH
+ {0xB290, 0xB290, prH2, gcLo}, // HANGUL SYLLABLE NEU
+ {0xB291, 0xB2AB, prH3, gcLo}, // [27] HANGUL SYLLABLE NEUG..HANGUL SYLLABLE NEUH
+ {0xB2AC, 0xB2AC, prH2, gcLo}, // HANGUL SYLLABLE NYI
+ {0xB2AD, 0xB2C7, prH3, gcLo}, // [27] HANGUL SYLLABLE NYIG..HANGUL SYLLABLE NYIH
+ {0xB2C8, 0xB2C8, prH2, gcLo}, // HANGUL SYLLABLE NI
+ {0xB2C9, 0xB2E3, prH3, gcLo}, // [27] HANGUL SYLLABLE NIG..HANGUL SYLLABLE NIH
+ {0xB2E4, 0xB2E4, prH2, gcLo}, // HANGUL SYLLABLE DA
+ {0xB2E5, 0xB2FF, prH3, gcLo}, // [27] HANGUL SYLLABLE DAG..HANGUL SYLLABLE DAH
+ {0xB300, 0xB300, prH2, gcLo}, // HANGUL SYLLABLE DAE
+ {0xB301, 0xB31B, prH3, gcLo}, // [27] HANGUL SYLLABLE DAEG..HANGUL SYLLABLE DAEH
+ {0xB31C, 0xB31C, prH2, gcLo}, // HANGUL SYLLABLE DYA
+ {0xB31D, 0xB337, prH3, gcLo}, // [27] HANGUL SYLLABLE DYAG..HANGUL SYLLABLE DYAH
+ {0xB338, 0xB338, prH2, gcLo}, // HANGUL SYLLABLE DYAE
+ {0xB339, 0xB353, prH3, gcLo}, // [27] HANGUL SYLLABLE DYAEG..HANGUL SYLLABLE DYAEH
+ {0xB354, 0xB354, prH2, gcLo}, // HANGUL SYLLABLE DEO
+ {0xB355, 0xB36F, prH3, gcLo}, // [27] HANGUL SYLLABLE DEOG..HANGUL SYLLABLE DEOH
+ {0xB370, 0xB370, prH2, gcLo}, // HANGUL SYLLABLE DE
+ {0xB371, 0xB38B, prH3, gcLo}, // [27] HANGUL SYLLABLE DEG..HANGUL SYLLABLE DEH
+ {0xB38C, 0xB38C, prH2, gcLo}, // HANGUL SYLLABLE DYEO
+ {0xB38D, 0xB3A7, prH3, gcLo}, // [27] HANGUL SYLLABLE DYEOG..HANGUL SYLLABLE DYEOH
+ {0xB3A8, 0xB3A8, prH2, gcLo}, // HANGUL SYLLABLE DYE
+ {0xB3A9, 0xB3C3, prH3, gcLo}, // [27] HANGUL SYLLABLE DYEG..HANGUL SYLLABLE DYEH
+ {0xB3C4, 0xB3C4, prH2, gcLo}, // HANGUL SYLLABLE DO
+ {0xB3C5, 0xB3DF, prH3, gcLo}, // [27] HANGUL SYLLABLE DOG..HANGUL SYLLABLE DOH
+ {0xB3E0, 0xB3E0, prH2, gcLo}, // HANGUL SYLLABLE DWA
+ {0xB3E1, 0xB3FB, prH3, gcLo}, // [27] HANGUL SYLLABLE DWAG..HANGUL SYLLABLE DWAH
+ {0xB3FC, 0xB3FC, prH2, gcLo}, // HANGUL SYLLABLE DWAE
+ {0xB3FD, 0xB417, prH3, gcLo}, // [27] HANGUL SYLLABLE DWAEG..HANGUL SYLLABLE DWAEH
+ {0xB418, 0xB418, prH2, gcLo}, // HANGUL SYLLABLE DOE
+ {0xB419, 0xB433, prH3, gcLo}, // [27] HANGUL SYLLABLE DOEG..HANGUL SYLLABLE DOEH
+ {0xB434, 0xB434, prH2, gcLo}, // HANGUL SYLLABLE DYO
+ {0xB435, 0xB44F, prH3, gcLo}, // [27] HANGUL SYLLABLE DYOG..HANGUL SYLLABLE DYOH
+ {0xB450, 0xB450, prH2, gcLo}, // HANGUL SYLLABLE DU
+ {0xB451, 0xB46B, prH3, gcLo}, // [27] HANGUL SYLLABLE DUG..HANGUL SYLLABLE DUH
+ {0xB46C, 0xB46C, prH2, gcLo}, // HANGUL SYLLABLE DWEO
+ {0xB46D, 0xB487, prH3, gcLo}, // [27] HANGUL SYLLABLE DWEOG..HANGUL SYLLABLE DWEOH
+ {0xB488, 0xB488, prH2, gcLo}, // HANGUL SYLLABLE DWE
+ {0xB489, 0xB4A3, prH3, gcLo}, // [27] HANGUL SYLLABLE DWEG..HANGUL SYLLABLE DWEH
+ {0xB4A4, 0xB4A4, prH2, gcLo}, // HANGUL SYLLABLE DWI
+ {0xB4A5, 0xB4BF, prH3, gcLo}, // [27] HANGUL SYLLABLE DWIG..HANGUL SYLLABLE DWIH
+ {0xB4C0, 0xB4C0, prH2, gcLo}, // HANGUL SYLLABLE DYU
+ {0xB4C1, 0xB4DB, prH3, gcLo}, // [27] HANGUL SYLLABLE DYUG..HANGUL SYLLABLE DYUH
+ {0xB4DC, 0xB4DC, prH2, gcLo}, // HANGUL SYLLABLE DEU
+ {0xB4DD, 0xB4F7, prH3, gcLo}, // [27] HANGUL SYLLABLE DEUG..HANGUL SYLLABLE DEUH
+ {0xB4F8, 0xB4F8, prH2, gcLo}, // HANGUL SYLLABLE DYI
+ {0xB4F9, 0xB513, prH3, gcLo}, // [27] HANGUL SYLLABLE DYIG..HANGUL SYLLABLE DYIH
+ {0xB514, 0xB514, prH2, gcLo}, // HANGUL SYLLABLE DI
+ {0xB515, 0xB52F, prH3, gcLo}, // [27] HANGUL SYLLABLE DIG..HANGUL SYLLABLE DIH
+ {0xB530, 0xB530, prH2, gcLo}, // HANGUL SYLLABLE DDA
+ {0xB531, 0xB54B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDAG..HANGUL SYLLABLE DDAH
+ {0xB54C, 0xB54C, prH2, gcLo}, // HANGUL SYLLABLE DDAE
+ {0xB54D, 0xB567, prH3, gcLo}, // [27] HANGUL SYLLABLE DDAEG..HANGUL SYLLABLE DDAEH
+ {0xB568, 0xB568, prH2, gcLo}, // HANGUL SYLLABLE DDYA
+ {0xB569, 0xB583, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYAG..HANGUL SYLLABLE DDYAH
+ {0xB584, 0xB584, prH2, gcLo}, // HANGUL SYLLABLE DDYAE
+ {0xB585, 0xB59F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYAEG..HANGUL SYLLABLE DDYAEH
+ {0xB5A0, 0xB5A0, prH2, gcLo}, // HANGUL SYLLABLE DDEO
+ {0xB5A1, 0xB5BB, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEOG..HANGUL SYLLABLE DDEOH
+ {0xB5BC, 0xB5BC, prH2, gcLo}, // HANGUL SYLLABLE DDE
+ {0xB5BD, 0xB5D7, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEG..HANGUL SYLLABLE DDEH
+ {0xB5D8, 0xB5D8, prH2, gcLo}, // HANGUL SYLLABLE DDYEO
+ {0xB5D9, 0xB5F3, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYEOG..HANGUL SYLLABLE DDYEOH
+ {0xB5F4, 0xB5F4, prH2, gcLo}, // HANGUL SYLLABLE DDYE
+ {0xB5F5, 0xB60F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYEG..HANGUL SYLLABLE DDYEH
+ {0xB610, 0xB610, prH2, gcLo}, // HANGUL SYLLABLE DDO
+ {0xB611, 0xB62B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDOG..HANGUL SYLLABLE DDOH
+ {0xB62C, 0xB62C, prH2, gcLo}, // HANGUL SYLLABLE DDWA
+ {0xB62D, 0xB647, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWAG..HANGUL SYLLABLE DDWAH
+ {0xB648, 0xB648, prH2, gcLo}, // HANGUL SYLLABLE DDWAE
+ {0xB649, 0xB663, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWAEG..HANGUL SYLLABLE DDWAEH
+ {0xB664, 0xB664, prH2, gcLo}, // HANGUL SYLLABLE DDOE
+ {0xB665, 0xB67F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDOEG..HANGUL SYLLABLE DDOEH
+ {0xB680, 0xB680, prH2, gcLo}, // HANGUL SYLLABLE DDYO
+ {0xB681, 0xB69B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYOG..HANGUL SYLLABLE DDYOH
+ {0xB69C, 0xB69C, prH2, gcLo}, // HANGUL SYLLABLE DDU
+ {0xB69D, 0xB6B7, prH3, gcLo}, // [27] HANGUL SYLLABLE DDUG..HANGUL SYLLABLE DDUH
+ {0xB6B8, 0xB6B8, prH2, gcLo}, // HANGUL SYLLABLE DDWEO
+ {0xB6B9, 0xB6D3, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWEOG..HANGUL SYLLABLE DDWEOH
+ {0xB6D4, 0xB6D4, prH2, gcLo}, // HANGUL SYLLABLE DDWE
+ {0xB6D5, 0xB6EF, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWEG..HANGUL SYLLABLE DDWEH
+ {0xB6F0, 0xB6F0, prH2, gcLo}, // HANGUL SYLLABLE DDWI
+ {0xB6F1, 0xB70B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWIG..HANGUL SYLLABLE DDWIH
+ {0xB70C, 0xB70C, prH2, gcLo}, // HANGUL SYLLABLE DDYU
+ {0xB70D, 0xB727, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYUG..HANGUL SYLLABLE DDYUH
+ {0xB728, 0xB728, prH2, gcLo}, // HANGUL SYLLABLE DDEU
+ {0xB729, 0xB743, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEUG..HANGUL SYLLABLE DDEUH
+ {0xB744, 0xB744, prH2, gcLo}, // HANGUL SYLLABLE DDYI
+ {0xB745, 0xB75F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYIG..HANGUL SYLLABLE DDYIH
+ {0xB760, 0xB760, prH2, gcLo}, // HANGUL SYLLABLE DDI
+ {0xB761, 0xB77B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDIG..HANGUL SYLLABLE DDIH
+ {0xB77C, 0xB77C, prH2, gcLo}, // HANGUL SYLLABLE RA
+ {0xB77D, 0xB797, prH3, gcLo}, // [27] HANGUL SYLLABLE RAG..HANGUL SYLLABLE RAH
+ {0xB798, 0xB798, prH2, gcLo}, // HANGUL SYLLABLE RAE
+ {0xB799, 0xB7B3, prH3, gcLo}, // [27] HANGUL SYLLABLE RAEG..HANGUL SYLLABLE RAEH
+ {0xB7B4, 0xB7B4, prH2, gcLo}, // HANGUL SYLLABLE RYA
+ {0xB7B5, 0xB7CF, prH3, gcLo}, // [27] HANGUL SYLLABLE RYAG..HANGUL SYLLABLE RYAH
+ {0xB7D0, 0xB7D0, prH2, gcLo}, // HANGUL SYLLABLE RYAE
+ {0xB7D1, 0xB7EB, prH3, gcLo}, // [27] HANGUL SYLLABLE RYAEG..HANGUL SYLLABLE RYAEH
+ {0xB7EC, 0xB7EC, prH2, gcLo}, // HANGUL SYLLABLE REO
+ {0xB7ED, 0xB807, prH3, gcLo}, // [27] HANGUL SYLLABLE REOG..HANGUL SYLLABLE REOH
+ {0xB808, 0xB808, prH2, gcLo}, // HANGUL SYLLABLE RE
+ {0xB809, 0xB823, prH3, gcLo}, // [27] HANGUL SYLLABLE REG..HANGUL SYLLABLE REH
+ {0xB824, 0xB824, prH2, gcLo}, // HANGUL SYLLABLE RYEO
+ {0xB825, 0xB83F, prH3, gcLo}, // [27] HANGUL SYLLABLE RYEOG..HANGUL SYLLABLE RYEOH
+ {0xB840, 0xB840, prH2, gcLo}, // HANGUL SYLLABLE RYE
+ {0xB841, 0xB85B, prH3, gcLo}, // [27] HANGUL SYLLABLE RYEG..HANGUL SYLLABLE RYEH
+ {0xB85C, 0xB85C, prH2, gcLo}, // HANGUL SYLLABLE RO
+ {0xB85D, 0xB877, prH3, gcLo}, // [27] HANGUL SYLLABLE ROG..HANGUL SYLLABLE ROH
+ {0xB878, 0xB878, prH2, gcLo}, // HANGUL SYLLABLE RWA
+ {0xB879, 0xB893, prH3, gcLo}, // [27] HANGUL SYLLABLE RWAG..HANGUL SYLLABLE RWAH
+ {0xB894, 0xB894, prH2, gcLo}, // HANGUL SYLLABLE RWAE
+ {0xB895, 0xB8AF, prH3, gcLo}, // [27] HANGUL SYLLABLE RWAEG..HANGUL SYLLABLE RWAEH
+ {0xB8B0, 0xB8B0, prH2, gcLo}, // HANGUL SYLLABLE ROE
+ {0xB8B1, 0xB8CB, prH3, gcLo}, // [27] HANGUL SYLLABLE ROEG..HANGUL SYLLABLE ROEH
+ {0xB8CC, 0xB8CC, prH2, gcLo}, // HANGUL SYLLABLE RYO
+ {0xB8CD, 0xB8E7, prH3, gcLo}, // [27] HANGUL SYLLABLE RYOG..HANGUL SYLLABLE RYOH
+ {0xB8E8, 0xB8E8, prH2, gcLo}, // HANGUL SYLLABLE RU
+ {0xB8E9, 0xB903, prH3, gcLo}, // [27] HANGUL SYLLABLE RUG..HANGUL SYLLABLE RUH
+ {0xB904, 0xB904, prH2, gcLo}, // HANGUL SYLLABLE RWEO
+ {0xB905, 0xB91F, prH3, gcLo}, // [27] HANGUL SYLLABLE RWEOG..HANGUL SYLLABLE RWEOH
+ {0xB920, 0xB920, prH2, gcLo}, // HANGUL SYLLABLE RWE
+ {0xB921, 0xB93B, prH3, gcLo}, // [27] HANGUL SYLLABLE RWEG..HANGUL SYLLABLE RWEH
+ {0xB93C, 0xB93C, prH2, gcLo}, // HANGUL SYLLABLE RWI
+ {0xB93D, 0xB957, prH3, gcLo}, // [27] HANGUL SYLLABLE RWIG..HANGUL SYLLABLE RWIH
+ {0xB958, 0xB958, prH2, gcLo}, // HANGUL SYLLABLE RYU
+ {0xB959, 0xB973, prH3, gcLo}, // [27] HANGUL SYLLABLE RYUG..HANGUL SYLLABLE RYUH
+ {0xB974, 0xB974, prH2, gcLo}, // HANGUL SYLLABLE REU
+ {0xB975, 0xB98F, prH3, gcLo}, // [27] HANGUL SYLLABLE REUG..HANGUL SYLLABLE REUH
+ {0xB990, 0xB990, prH2, gcLo}, // HANGUL SYLLABLE RYI
+ {0xB991, 0xB9AB, prH3, gcLo}, // [27] HANGUL SYLLABLE RYIG..HANGUL SYLLABLE RYIH
+ {0xB9AC, 0xB9AC, prH2, gcLo}, // HANGUL SYLLABLE RI
+ {0xB9AD, 0xB9C7, prH3, gcLo}, // [27] HANGUL SYLLABLE RIG..HANGUL SYLLABLE RIH
+ {0xB9C8, 0xB9C8, prH2, gcLo}, // HANGUL SYLLABLE MA
+ {0xB9C9, 0xB9E3, prH3, gcLo}, // [27] HANGUL SYLLABLE MAG..HANGUL SYLLABLE MAH
+ {0xB9E4, 0xB9E4, prH2, gcLo}, // HANGUL SYLLABLE MAE
+ {0xB9E5, 0xB9FF, prH3, gcLo}, // [27] HANGUL SYLLABLE MAEG..HANGUL SYLLABLE MAEH
+ {0xBA00, 0xBA00, prH2, gcLo}, // HANGUL SYLLABLE MYA
+ {0xBA01, 0xBA1B, prH3, gcLo}, // [27] HANGUL SYLLABLE MYAG..HANGUL SYLLABLE MYAH
+ {0xBA1C, 0xBA1C, prH2, gcLo}, // HANGUL SYLLABLE MYAE
+ {0xBA1D, 0xBA37, prH3, gcLo}, // [27] HANGUL SYLLABLE MYAEG..HANGUL SYLLABLE MYAEH
+ {0xBA38, 0xBA38, prH2, gcLo}, // HANGUL SYLLABLE MEO
+ {0xBA39, 0xBA53, prH3, gcLo}, // [27] HANGUL SYLLABLE MEOG..HANGUL SYLLABLE MEOH
+ {0xBA54, 0xBA54, prH2, gcLo}, // HANGUL SYLLABLE ME
+ {0xBA55, 0xBA6F, prH3, gcLo}, // [27] HANGUL SYLLABLE MEG..HANGUL SYLLABLE MEH
+ {0xBA70, 0xBA70, prH2, gcLo}, // HANGUL SYLLABLE MYEO
+ {0xBA71, 0xBA8B, prH3, gcLo}, // [27] HANGUL SYLLABLE MYEOG..HANGUL SYLLABLE MYEOH
+ {0xBA8C, 0xBA8C, prH2, gcLo}, // HANGUL SYLLABLE MYE
+ {0xBA8D, 0xBAA7, prH3, gcLo}, // [27] HANGUL SYLLABLE MYEG..HANGUL SYLLABLE MYEH
+ {0xBAA8, 0xBAA8, prH2, gcLo}, // HANGUL SYLLABLE MO
+ {0xBAA9, 0xBAC3, prH3, gcLo}, // [27] HANGUL SYLLABLE MOG..HANGUL SYLLABLE MOH
+ {0xBAC4, 0xBAC4, prH2, gcLo}, // HANGUL SYLLABLE MWA
+ {0xBAC5, 0xBADF, prH3, gcLo}, // [27] HANGUL SYLLABLE MWAG..HANGUL SYLLABLE MWAH
+ {0xBAE0, 0xBAE0, prH2, gcLo}, // HANGUL SYLLABLE MWAE
+ {0xBAE1, 0xBAFB, prH3, gcLo}, // [27] HANGUL SYLLABLE MWAEG..HANGUL SYLLABLE MWAEH
+ {0xBAFC, 0xBAFC, prH2, gcLo}, // HANGUL SYLLABLE MOE
+ {0xBAFD, 0xBB17, prH3, gcLo}, // [27] HANGUL SYLLABLE MOEG..HANGUL SYLLABLE MOEH
+ {0xBB18, 0xBB18, prH2, gcLo}, // HANGUL SYLLABLE MYO
+ {0xBB19, 0xBB33, prH3, gcLo}, // [27] HANGUL SYLLABLE MYOG..HANGUL SYLLABLE MYOH
+ {0xBB34, 0xBB34, prH2, gcLo}, // HANGUL SYLLABLE MU
+ {0xBB35, 0xBB4F, prH3, gcLo}, // [27] HANGUL SYLLABLE MUG..HANGUL SYLLABLE MUH
+ {0xBB50, 0xBB50, prH2, gcLo}, // HANGUL SYLLABLE MWEO
+ {0xBB51, 0xBB6B, prH3, gcLo}, // [27] HANGUL SYLLABLE MWEOG..HANGUL SYLLABLE MWEOH
+ {0xBB6C, 0xBB6C, prH2, gcLo}, // HANGUL SYLLABLE MWE
+ {0xBB6D, 0xBB87, prH3, gcLo}, // [27] HANGUL SYLLABLE MWEG..HANGUL SYLLABLE MWEH
+ {0xBB88, 0xBB88, prH2, gcLo}, // HANGUL SYLLABLE MWI
+ {0xBB89, 0xBBA3, prH3, gcLo}, // [27] HANGUL SYLLABLE MWIG..HANGUL SYLLABLE MWIH
+ {0xBBA4, 0xBBA4, prH2, gcLo}, // HANGUL SYLLABLE MYU
+ {0xBBA5, 0xBBBF, prH3, gcLo}, // [27] HANGUL SYLLABLE MYUG..HANGUL SYLLABLE MYUH
+ {0xBBC0, 0xBBC0, prH2, gcLo}, // HANGUL SYLLABLE MEU
+ {0xBBC1, 0xBBDB, prH3, gcLo}, // [27] HANGUL SYLLABLE MEUG..HANGUL SYLLABLE MEUH
+ {0xBBDC, 0xBBDC, prH2, gcLo}, // HANGUL SYLLABLE MYI
+ {0xBBDD, 0xBBF7, prH3, gcLo}, // [27] HANGUL SYLLABLE MYIG..HANGUL SYLLABLE MYIH
+ {0xBBF8, 0xBBF8, prH2, gcLo}, // HANGUL SYLLABLE MI
+ {0xBBF9, 0xBC13, prH3, gcLo}, // [27] HANGUL SYLLABLE MIG..HANGUL SYLLABLE MIH
+ {0xBC14, 0xBC14, prH2, gcLo}, // HANGUL SYLLABLE BA
+ {0xBC15, 0xBC2F, prH3, gcLo}, // [27] HANGUL SYLLABLE BAG..HANGUL SYLLABLE BAH
+ {0xBC30, 0xBC30, prH2, gcLo}, // HANGUL SYLLABLE BAE
+ {0xBC31, 0xBC4B, prH3, gcLo}, // [27] HANGUL SYLLABLE BAEG..HANGUL SYLLABLE BAEH
+ {0xBC4C, 0xBC4C, prH2, gcLo}, // HANGUL SYLLABLE BYA
+ {0xBC4D, 0xBC67, prH3, gcLo}, // [27] HANGUL SYLLABLE BYAG..HANGUL SYLLABLE BYAH
+ {0xBC68, 0xBC68, prH2, gcLo}, // HANGUL SYLLABLE BYAE
+ {0xBC69, 0xBC83, prH3, gcLo}, // [27] HANGUL SYLLABLE BYAEG..HANGUL SYLLABLE BYAEH
+ {0xBC84, 0xBC84, prH2, gcLo}, // HANGUL SYLLABLE BEO
+ {0xBC85, 0xBC9F, prH3, gcLo}, // [27] HANGUL SYLLABLE BEOG..HANGUL SYLLABLE BEOH
+ {0xBCA0, 0xBCA0, prH2, gcLo}, // HANGUL SYLLABLE BE
+ {0xBCA1, 0xBCBB, prH3, gcLo}, // [27] HANGUL SYLLABLE BEG..HANGUL SYLLABLE BEH
+ {0xBCBC, 0xBCBC, prH2, gcLo}, // HANGUL SYLLABLE BYEO
+ {0xBCBD, 0xBCD7, prH3, gcLo}, // [27] HANGUL SYLLABLE BYEOG..HANGUL SYLLABLE BYEOH
+ {0xBCD8, 0xBCD8, prH2, gcLo}, // HANGUL SYLLABLE BYE
+ {0xBCD9, 0xBCF3, prH3, gcLo}, // [27] HANGUL SYLLABLE BYEG..HANGUL SYLLABLE BYEH
+ {0xBCF4, 0xBCF4, prH2, gcLo}, // HANGUL SYLLABLE BO
+ {0xBCF5, 0xBD0F, prH3, gcLo}, // [27] HANGUL SYLLABLE BOG..HANGUL SYLLABLE BOH
+ {0xBD10, 0xBD10, prH2, gcLo}, // HANGUL SYLLABLE BWA
+ {0xBD11, 0xBD2B, prH3, gcLo}, // [27] HANGUL SYLLABLE BWAG..HANGUL SYLLABLE BWAH
+ {0xBD2C, 0xBD2C, prH2, gcLo}, // HANGUL SYLLABLE BWAE
+ {0xBD2D, 0xBD47, prH3, gcLo}, // [27] HANGUL SYLLABLE BWAEG..HANGUL SYLLABLE BWAEH
+ {0xBD48, 0xBD48, prH2, gcLo}, // HANGUL SYLLABLE BOE
+ {0xBD49, 0xBD63, prH3, gcLo}, // [27] HANGUL SYLLABLE BOEG..HANGUL SYLLABLE BOEH
+ {0xBD64, 0xBD64, prH2, gcLo}, // HANGUL SYLLABLE BYO
+ {0xBD65, 0xBD7F, prH3, gcLo}, // [27] HANGUL SYLLABLE BYOG..HANGUL SYLLABLE BYOH
+ {0xBD80, 0xBD80, prH2, gcLo}, // HANGUL SYLLABLE BU
+ {0xBD81, 0xBD9B, prH3, gcLo}, // [27] HANGUL SYLLABLE BUG..HANGUL SYLLABLE BUH
+ {0xBD9C, 0xBD9C, prH2, gcLo}, // HANGUL SYLLABLE BWEO
+ {0xBD9D, 0xBDB7, prH3, gcLo}, // [27] HANGUL SYLLABLE BWEOG..HANGUL SYLLABLE BWEOH
+ {0xBDB8, 0xBDB8, prH2, gcLo}, // HANGUL SYLLABLE BWE
+ {0xBDB9, 0xBDD3, prH3, gcLo}, // [27] HANGUL SYLLABLE BWEG..HANGUL SYLLABLE BWEH
+ {0xBDD4, 0xBDD4, prH2, gcLo}, // HANGUL SYLLABLE BWI
+ {0xBDD5, 0xBDEF, prH3, gcLo}, // [27] HANGUL SYLLABLE BWIG..HANGUL SYLLABLE BWIH
+ {0xBDF0, 0xBDF0, prH2, gcLo}, // HANGUL SYLLABLE BYU
+ {0xBDF1, 0xBE0B, prH3, gcLo}, // [27] HANGUL SYLLABLE BYUG..HANGUL SYLLABLE BYUH
+ {0xBE0C, 0xBE0C, prH2, gcLo}, // HANGUL SYLLABLE BEU
+ {0xBE0D, 0xBE27, prH3, gcLo}, // [27] HANGUL SYLLABLE BEUG..HANGUL SYLLABLE BEUH
+ {0xBE28, 0xBE28, prH2, gcLo}, // HANGUL SYLLABLE BYI
+ {0xBE29, 0xBE43, prH3, gcLo}, // [27] HANGUL SYLLABLE BYIG..HANGUL SYLLABLE BYIH
+ {0xBE44, 0xBE44, prH2, gcLo}, // HANGUL SYLLABLE BI
+ {0xBE45, 0xBE5F, prH3, gcLo}, // [27] HANGUL SYLLABLE BIG..HANGUL SYLLABLE BIH
+ {0xBE60, 0xBE60, prH2, gcLo}, // HANGUL SYLLABLE BBA
+ {0xBE61, 0xBE7B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBAG..HANGUL SYLLABLE BBAH
+ {0xBE7C, 0xBE7C, prH2, gcLo}, // HANGUL SYLLABLE BBAE
+ {0xBE7D, 0xBE97, prH3, gcLo}, // [27] HANGUL SYLLABLE BBAEG..HANGUL SYLLABLE BBAEH
+ {0xBE98, 0xBE98, prH2, gcLo}, // HANGUL SYLLABLE BBYA
+ {0xBE99, 0xBEB3, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYAG..HANGUL SYLLABLE BBYAH
+ {0xBEB4, 0xBEB4, prH2, gcLo}, // HANGUL SYLLABLE BBYAE
+ {0xBEB5, 0xBECF, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYAEG..HANGUL SYLLABLE BBYAEH
+ {0xBED0, 0xBED0, prH2, gcLo}, // HANGUL SYLLABLE BBEO
+ {0xBED1, 0xBEEB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEOG..HANGUL SYLLABLE BBEOH
+ {0xBEEC, 0xBEEC, prH2, gcLo}, // HANGUL SYLLABLE BBE
+ {0xBEED, 0xBF07, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEG..HANGUL SYLLABLE BBEH
+ {0xBF08, 0xBF08, prH2, gcLo}, // HANGUL SYLLABLE BBYEO
+ {0xBF09, 0xBF23, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYEOG..HANGUL SYLLABLE BBYEOH
+ {0xBF24, 0xBF24, prH2, gcLo}, // HANGUL SYLLABLE BBYE
+ {0xBF25, 0xBF3F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYEG..HANGUL SYLLABLE BBYEH
+ {0xBF40, 0xBF40, prH2, gcLo}, // HANGUL SYLLABLE BBO
+ {0xBF41, 0xBF5B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBOG..HANGUL SYLLABLE BBOH
+ {0xBF5C, 0xBF5C, prH2, gcLo}, // HANGUL SYLLABLE BBWA
+ {0xBF5D, 0xBF77, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWAG..HANGUL SYLLABLE BBWAH
+ {0xBF78, 0xBF78, prH2, gcLo}, // HANGUL SYLLABLE BBWAE
+ {0xBF79, 0xBF93, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWAEG..HANGUL SYLLABLE BBWAEH
+ {0xBF94, 0xBF94, prH2, gcLo}, // HANGUL SYLLABLE BBOE
+ {0xBF95, 0xBFAF, prH3, gcLo}, // [27] HANGUL SYLLABLE BBOEG..HANGUL SYLLABLE BBOEH
+ {0xBFB0, 0xBFB0, prH2, gcLo}, // HANGUL SYLLABLE BBYO
+ {0xBFB1, 0xBFCB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYOG..HANGUL SYLLABLE BBYOH
+ {0xBFCC, 0xBFCC, prH2, gcLo}, // HANGUL SYLLABLE BBU
+ {0xBFCD, 0xBFE7, prH3, gcLo}, // [27] HANGUL SYLLABLE BBUG..HANGUL SYLLABLE BBUH
+ {0xBFE8, 0xBFE8, prH2, gcLo}, // HANGUL SYLLABLE BBWEO
+ {0xBFE9, 0xC003, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWEOG..HANGUL SYLLABLE BBWEOH
+ {0xC004, 0xC004, prH2, gcLo}, // HANGUL SYLLABLE BBWE
+ {0xC005, 0xC01F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWEG..HANGUL SYLLABLE BBWEH
+ {0xC020, 0xC020, prH2, gcLo}, // HANGUL SYLLABLE BBWI
+ {0xC021, 0xC03B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWIG..HANGUL SYLLABLE BBWIH
+ {0xC03C, 0xC03C, prH2, gcLo}, // HANGUL SYLLABLE BBYU
+ {0xC03D, 0xC057, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYUG..HANGUL SYLLABLE BBYUH
+ {0xC058, 0xC058, prH2, gcLo}, // HANGUL SYLLABLE BBEU
+ {0xC059, 0xC073, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEUG..HANGUL SYLLABLE BBEUH
+ {0xC074, 0xC074, prH2, gcLo}, // HANGUL SYLLABLE BBYI
+ {0xC075, 0xC08F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYIG..HANGUL SYLLABLE BBYIH
+ {0xC090, 0xC090, prH2, gcLo}, // HANGUL SYLLABLE BBI
+ {0xC091, 0xC0AB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBIG..HANGUL SYLLABLE BBIH
+ {0xC0AC, 0xC0AC, prH2, gcLo}, // HANGUL SYLLABLE SA
+ {0xC0AD, 0xC0C7, prH3, gcLo}, // [27] HANGUL SYLLABLE SAG..HANGUL SYLLABLE SAH
+ {0xC0C8, 0xC0C8, prH2, gcLo}, // HANGUL SYLLABLE SAE
+ {0xC0C9, 0xC0E3, prH3, gcLo}, // [27] HANGUL SYLLABLE SAEG..HANGUL SYLLABLE SAEH
+ {0xC0E4, 0xC0E4, prH2, gcLo}, // HANGUL SYLLABLE SYA
+ {0xC0E5, 0xC0FF, prH3, gcLo}, // [27] HANGUL SYLLABLE SYAG..HANGUL SYLLABLE SYAH
+ {0xC100, 0xC100, prH2, gcLo}, // HANGUL SYLLABLE SYAE
+ {0xC101, 0xC11B, prH3, gcLo}, // [27] HANGUL SYLLABLE SYAEG..HANGUL SYLLABLE SYAEH
+ {0xC11C, 0xC11C, prH2, gcLo}, // HANGUL SYLLABLE SEO
+ {0xC11D, 0xC137, prH3, gcLo}, // [27] HANGUL SYLLABLE SEOG..HANGUL SYLLABLE SEOH
+ {0xC138, 0xC138, prH2, gcLo}, // HANGUL SYLLABLE SE
+ {0xC139, 0xC153, prH3, gcLo}, // [27] HANGUL SYLLABLE SEG..HANGUL SYLLABLE SEH
+ {0xC154, 0xC154, prH2, gcLo}, // HANGUL SYLLABLE SYEO
+ {0xC155, 0xC16F, prH3, gcLo}, // [27] HANGUL SYLLABLE SYEOG..HANGUL SYLLABLE SYEOH
+ {0xC170, 0xC170, prH2, gcLo}, // HANGUL SYLLABLE SYE
+ {0xC171, 0xC18B, prH3, gcLo}, // [27] HANGUL SYLLABLE SYEG..HANGUL SYLLABLE SYEH
+ {0xC18C, 0xC18C, prH2, gcLo}, // HANGUL SYLLABLE SO
+ {0xC18D, 0xC1A7, prH3, gcLo}, // [27] HANGUL SYLLABLE SOG..HANGUL SYLLABLE SOH
+ {0xC1A8, 0xC1A8, prH2, gcLo}, // HANGUL SYLLABLE SWA
+ {0xC1A9, 0xC1C3, prH3, gcLo}, // [27] HANGUL SYLLABLE SWAG..HANGUL SYLLABLE SWAH
+ {0xC1C4, 0xC1C4, prH2, gcLo}, // HANGUL SYLLABLE SWAE
+ {0xC1C5, 0xC1DF, prH3, gcLo}, // [27] HANGUL SYLLABLE SWAEG..HANGUL SYLLABLE SWAEH
+ {0xC1E0, 0xC1E0, prH2, gcLo}, // HANGUL SYLLABLE SOE
+ {0xC1E1, 0xC1FB, prH3, gcLo}, // [27] HANGUL SYLLABLE SOEG..HANGUL SYLLABLE SOEH
+ {0xC1FC, 0xC1FC, prH2, gcLo}, // HANGUL SYLLABLE SYO
+ {0xC1FD, 0xC217, prH3, gcLo}, // [27] HANGUL SYLLABLE SYOG..HANGUL SYLLABLE SYOH
+ {0xC218, 0xC218, prH2, gcLo}, // HANGUL SYLLABLE SU
+ {0xC219, 0xC233, prH3, gcLo}, // [27] HANGUL SYLLABLE SUG..HANGUL SYLLABLE SUH
+ {0xC234, 0xC234, prH2, gcLo}, // HANGUL SYLLABLE SWEO
+ {0xC235, 0xC24F, prH3, gcLo}, // [27] HANGUL SYLLABLE SWEOG..HANGUL SYLLABLE SWEOH
+ {0xC250, 0xC250, prH2, gcLo}, // HANGUL SYLLABLE SWE
+ {0xC251, 0xC26B, prH3, gcLo}, // [27] HANGUL SYLLABLE SWEG..HANGUL SYLLABLE SWEH
+ {0xC26C, 0xC26C, prH2, gcLo}, // HANGUL SYLLABLE SWI
+ {0xC26D, 0xC287, prH3, gcLo}, // [27] HANGUL SYLLABLE SWIG..HANGUL SYLLABLE SWIH
+ {0xC288, 0xC288, prH2, gcLo}, // HANGUL SYLLABLE SYU
+ {0xC289, 0xC2A3, prH3, gcLo}, // [27] HANGUL SYLLABLE SYUG..HANGUL SYLLABLE SYUH
+ {0xC2A4, 0xC2A4, prH2, gcLo}, // HANGUL SYLLABLE SEU
+ {0xC2A5, 0xC2BF, prH3, gcLo}, // [27] HANGUL SYLLABLE SEUG..HANGUL SYLLABLE SEUH
+ {0xC2C0, 0xC2C0, prH2, gcLo}, // HANGUL SYLLABLE SYI
+ {0xC2C1, 0xC2DB, prH3, gcLo}, // [27] HANGUL SYLLABLE SYIG..HANGUL SYLLABLE SYIH
+ {0xC2DC, 0xC2DC, prH2, gcLo}, // HANGUL SYLLABLE SI
+ {0xC2DD, 0xC2F7, prH3, gcLo}, // [27] HANGUL SYLLABLE SIG..HANGUL SYLLABLE SIH
+ {0xC2F8, 0xC2F8, prH2, gcLo}, // HANGUL SYLLABLE SSA
+ {0xC2F9, 0xC313, prH3, gcLo}, // [27] HANGUL SYLLABLE SSAG..HANGUL SYLLABLE SSAH
+ {0xC314, 0xC314, prH2, gcLo}, // HANGUL SYLLABLE SSAE
+ {0xC315, 0xC32F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSAEG..HANGUL SYLLABLE SSAEH
+ {0xC330, 0xC330, prH2, gcLo}, // HANGUL SYLLABLE SSYA
+ {0xC331, 0xC34B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYAG..HANGUL SYLLABLE SSYAH
+ {0xC34C, 0xC34C, prH2, gcLo}, // HANGUL SYLLABLE SSYAE
+ {0xC34D, 0xC367, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYAEG..HANGUL SYLLABLE SSYAEH
+ {0xC368, 0xC368, prH2, gcLo}, // HANGUL SYLLABLE SSEO
+ {0xC369, 0xC383, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEOG..HANGUL SYLLABLE SSEOH
+ {0xC384, 0xC384, prH2, gcLo}, // HANGUL SYLLABLE SSE
+ {0xC385, 0xC39F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEG..HANGUL SYLLABLE SSEH
+ {0xC3A0, 0xC3A0, prH2, gcLo}, // HANGUL SYLLABLE SSYEO
+ {0xC3A1, 0xC3BB, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYEOG..HANGUL SYLLABLE SSYEOH
+ {0xC3BC, 0xC3BC, prH2, gcLo}, // HANGUL SYLLABLE SSYE
+ {0xC3BD, 0xC3D7, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYEG..HANGUL SYLLABLE SSYEH
+ {0xC3D8, 0xC3D8, prH2, gcLo}, // HANGUL SYLLABLE SSO
+ {0xC3D9, 0xC3F3, prH3, gcLo}, // [27] HANGUL SYLLABLE SSOG..HANGUL SYLLABLE SSOH
+ {0xC3F4, 0xC3F4, prH2, gcLo}, // HANGUL SYLLABLE SSWA
+ {0xC3F5, 0xC40F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWAG..HANGUL SYLLABLE SSWAH
+ {0xC410, 0xC410, prH2, gcLo}, // HANGUL SYLLABLE SSWAE
+ {0xC411, 0xC42B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWAEG..HANGUL SYLLABLE SSWAEH
+ {0xC42C, 0xC42C, prH2, gcLo}, // HANGUL SYLLABLE SSOE
+ {0xC42D, 0xC447, prH3, gcLo}, // [27] HANGUL SYLLABLE SSOEG..HANGUL SYLLABLE SSOEH
+ {0xC448, 0xC448, prH2, gcLo}, // HANGUL SYLLABLE SSYO
+ {0xC449, 0xC463, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYOG..HANGUL SYLLABLE SSYOH
+ {0xC464, 0xC464, prH2, gcLo}, // HANGUL SYLLABLE SSU
+ {0xC465, 0xC47F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSUG..HANGUL SYLLABLE SSUH
+ {0xC480, 0xC480, prH2, gcLo}, // HANGUL SYLLABLE SSWEO
+ {0xC481, 0xC49B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWEOG..HANGUL SYLLABLE SSWEOH
+ {0xC49C, 0xC49C, prH2, gcLo}, // HANGUL SYLLABLE SSWE
+ {0xC49D, 0xC4B7, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWEG..HANGUL SYLLABLE SSWEH
+ {0xC4B8, 0xC4B8, prH2, gcLo}, // HANGUL SYLLABLE SSWI
+ {0xC4B9, 0xC4D3, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWIG..HANGUL SYLLABLE SSWIH
+ {0xC4D4, 0xC4D4, prH2, gcLo}, // HANGUL SYLLABLE SSYU
+ {0xC4D5, 0xC4EF, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYUG..HANGUL SYLLABLE SSYUH
+ {0xC4F0, 0xC4F0, prH2, gcLo}, // HANGUL SYLLABLE SSEU
+ {0xC4F1, 0xC50B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEUG..HANGUL SYLLABLE SSEUH
+ {0xC50C, 0xC50C, prH2, gcLo}, // HANGUL SYLLABLE SSYI
+ {0xC50D, 0xC527, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYIG..HANGUL SYLLABLE SSYIH
+ {0xC528, 0xC528, prH2, gcLo}, // HANGUL SYLLABLE SSI
+ {0xC529, 0xC543, prH3, gcLo}, // [27] HANGUL SYLLABLE SSIG..HANGUL SYLLABLE SSIH
+ {0xC544, 0xC544, prH2, gcLo}, // HANGUL SYLLABLE A
+ {0xC545, 0xC55F, prH3, gcLo}, // [27] HANGUL SYLLABLE AG..HANGUL SYLLABLE AH
+ {0xC560, 0xC560, prH2, gcLo}, // HANGUL SYLLABLE AE
+ {0xC561, 0xC57B, prH3, gcLo}, // [27] HANGUL SYLLABLE AEG..HANGUL SYLLABLE AEH
+ {0xC57C, 0xC57C, prH2, gcLo}, // HANGUL SYLLABLE YA
+ {0xC57D, 0xC597, prH3, gcLo}, // [27] HANGUL SYLLABLE YAG..HANGUL SYLLABLE YAH
+ {0xC598, 0xC598, prH2, gcLo}, // HANGUL SYLLABLE YAE
+ {0xC599, 0xC5B3, prH3, gcLo}, // [27] HANGUL SYLLABLE YAEG..HANGUL SYLLABLE YAEH
+ {0xC5B4, 0xC5B4, prH2, gcLo}, // HANGUL SYLLABLE EO
+ {0xC5B5, 0xC5CF, prH3, gcLo}, // [27] HANGUL SYLLABLE EOG..HANGUL SYLLABLE EOH
+ {0xC5D0, 0xC5D0, prH2, gcLo}, // HANGUL SYLLABLE E
+ {0xC5D1, 0xC5EB, prH3, gcLo}, // [27] HANGUL SYLLABLE EG..HANGUL SYLLABLE EH
+ {0xC5EC, 0xC5EC, prH2, gcLo}, // HANGUL SYLLABLE YEO
+ {0xC5ED, 0xC607, prH3, gcLo}, // [27] HANGUL SYLLABLE YEOG..HANGUL SYLLABLE YEOH
+ {0xC608, 0xC608, prH2, gcLo}, // HANGUL SYLLABLE YE
+ {0xC609, 0xC623, prH3, gcLo}, // [27] HANGUL SYLLABLE YEG..HANGUL SYLLABLE YEH
+ {0xC624, 0xC624, prH2, gcLo}, // HANGUL SYLLABLE O
+ {0xC625, 0xC63F, prH3, gcLo}, // [27] HANGUL SYLLABLE OG..HANGUL SYLLABLE OH
+ {0xC640, 0xC640, prH2, gcLo}, // HANGUL SYLLABLE WA
+ {0xC641, 0xC65B, prH3, gcLo}, // [27] HANGUL SYLLABLE WAG..HANGUL SYLLABLE WAH
+ {0xC65C, 0xC65C, prH2, gcLo}, // HANGUL SYLLABLE WAE
+ {0xC65D, 0xC677, prH3, gcLo}, // [27] HANGUL SYLLABLE WAEG..HANGUL SYLLABLE WAEH
+ {0xC678, 0xC678, prH2, gcLo}, // HANGUL SYLLABLE OE
+ {0xC679, 0xC693, prH3, gcLo}, // [27] HANGUL SYLLABLE OEG..HANGUL SYLLABLE OEH
+ {0xC694, 0xC694, prH2, gcLo}, // HANGUL SYLLABLE YO
+ {0xC695, 0xC6AF, prH3, gcLo}, // [27] HANGUL SYLLABLE YOG..HANGUL SYLLABLE YOH
+ {0xC6B0, 0xC6B0, prH2, gcLo}, // HANGUL SYLLABLE U
+ {0xC6B1, 0xC6CB, prH3, gcLo}, // [27] HANGUL SYLLABLE UG..HANGUL SYLLABLE UH
+ {0xC6CC, 0xC6CC, prH2, gcLo}, // HANGUL SYLLABLE WEO
+ {0xC6CD, 0xC6E7, prH3, gcLo}, // [27] HANGUL SYLLABLE WEOG..HANGUL SYLLABLE WEOH
+ {0xC6E8, 0xC6E8, prH2, gcLo}, // HANGUL SYLLABLE WE
+ {0xC6E9, 0xC703, prH3, gcLo}, // [27] HANGUL SYLLABLE WEG..HANGUL SYLLABLE WEH
+ {0xC704, 0xC704, prH2, gcLo}, // HANGUL SYLLABLE WI
+ {0xC705, 0xC71F, prH3, gcLo}, // [27] HANGUL SYLLABLE WIG..HANGUL SYLLABLE WIH
+ {0xC720, 0xC720, prH2, gcLo}, // HANGUL SYLLABLE YU
+ {0xC721, 0xC73B, prH3, gcLo}, // [27] HANGUL SYLLABLE YUG..HANGUL SYLLABLE YUH
+ {0xC73C, 0xC73C, prH2, gcLo}, // HANGUL SYLLABLE EU
+ {0xC73D, 0xC757, prH3, gcLo}, // [27] HANGUL SYLLABLE EUG..HANGUL SYLLABLE EUH
+ {0xC758, 0xC758, prH2, gcLo}, // HANGUL SYLLABLE YI
+ {0xC759, 0xC773, prH3, gcLo}, // [27] HANGUL SYLLABLE YIG..HANGUL SYLLABLE YIH
+ {0xC774, 0xC774, prH2, gcLo}, // HANGUL SYLLABLE I
+ {0xC775, 0xC78F, prH3, gcLo}, // [27] HANGUL SYLLABLE IG..HANGUL SYLLABLE IH
+ {0xC790, 0xC790, prH2, gcLo}, // HANGUL SYLLABLE JA
+ {0xC791, 0xC7AB, prH3, gcLo}, // [27] HANGUL SYLLABLE JAG..HANGUL SYLLABLE JAH
+ {0xC7AC, 0xC7AC, prH2, gcLo}, // HANGUL SYLLABLE JAE
+ {0xC7AD, 0xC7C7, prH3, gcLo}, // [27] HANGUL SYLLABLE JAEG..HANGUL SYLLABLE JAEH
+ {0xC7C8, 0xC7C8, prH2, gcLo}, // HANGUL SYLLABLE JYA
+ {0xC7C9, 0xC7E3, prH3, gcLo}, // [27] HANGUL SYLLABLE JYAG..HANGUL SYLLABLE JYAH
+ {0xC7E4, 0xC7E4, prH2, gcLo}, // HANGUL SYLLABLE JYAE
+ {0xC7E5, 0xC7FF, prH3, gcLo}, // [27] HANGUL SYLLABLE JYAEG..HANGUL SYLLABLE JYAEH
+ {0xC800, 0xC800, prH2, gcLo}, // HANGUL SYLLABLE JEO
+ {0xC801, 0xC81B, prH3, gcLo}, // [27] HANGUL SYLLABLE JEOG..HANGUL SYLLABLE JEOH
+ {0xC81C, 0xC81C, prH2, gcLo}, // HANGUL SYLLABLE JE
+ {0xC81D, 0xC837, prH3, gcLo}, // [27] HANGUL SYLLABLE JEG..HANGUL SYLLABLE JEH
+ {0xC838, 0xC838, prH2, gcLo}, // HANGUL SYLLABLE JYEO
+ {0xC839, 0xC853, prH3, gcLo}, // [27] HANGUL SYLLABLE JYEOG..HANGUL SYLLABLE JYEOH
+ {0xC854, 0xC854, prH2, gcLo}, // HANGUL SYLLABLE JYE
+ {0xC855, 0xC86F, prH3, gcLo}, // [27] HANGUL SYLLABLE JYEG..HANGUL SYLLABLE JYEH
+ {0xC870, 0xC870, prH2, gcLo}, // HANGUL SYLLABLE JO
+ {0xC871, 0xC88B, prH3, gcLo}, // [27] HANGUL SYLLABLE JOG..HANGUL SYLLABLE JOH
+ {0xC88C, 0xC88C, prH2, gcLo}, // HANGUL SYLLABLE JWA
+ {0xC88D, 0xC8A7, prH3, gcLo}, // [27] HANGUL SYLLABLE JWAG..HANGUL SYLLABLE JWAH
+ {0xC8A8, 0xC8A8, prH2, gcLo}, // HANGUL SYLLABLE JWAE
+ {0xC8A9, 0xC8C3, prH3, gcLo}, // [27] HANGUL SYLLABLE JWAEG..HANGUL SYLLABLE JWAEH
+ {0xC8C4, 0xC8C4, prH2, gcLo}, // HANGUL SYLLABLE JOE
+ {0xC8C5, 0xC8DF, prH3, gcLo}, // [27] HANGUL SYLLABLE JOEG..HANGUL SYLLABLE JOEH
+ {0xC8E0, 0xC8E0, prH2, gcLo}, // HANGUL SYLLABLE JYO
+ {0xC8E1, 0xC8FB, prH3, gcLo}, // [27] HANGUL SYLLABLE JYOG..HANGUL SYLLABLE JYOH
+ {0xC8FC, 0xC8FC, prH2, gcLo}, // HANGUL SYLLABLE JU
+ {0xC8FD, 0xC917, prH3, gcLo}, // [27] HANGUL SYLLABLE JUG..HANGUL SYLLABLE JUH
+ {0xC918, 0xC918, prH2, gcLo}, // HANGUL SYLLABLE JWEO
+ {0xC919, 0xC933, prH3, gcLo}, // [27] HANGUL SYLLABLE JWEOG..HANGUL SYLLABLE JWEOH
+ {0xC934, 0xC934, prH2, gcLo}, // HANGUL SYLLABLE JWE
+ {0xC935, 0xC94F, prH3, gcLo}, // [27] HANGUL SYLLABLE JWEG..HANGUL SYLLABLE JWEH
+ {0xC950, 0xC950, prH2, gcLo}, // HANGUL SYLLABLE JWI
+ {0xC951, 0xC96B, prH3, gcLo}, // [27] HANGUL SYLLABLE JWIG..HANGUL SYLLABLE JWIH
+ {0xC96C, 0xC96C, prH2, gcLo}, // HANGUL SYLLABLE JYU
+ {0xC96D, 0xC987, prH3, gcLo}, // [27] HANGUL SYLLABLE JYUG..HANGUL SYLLABLE JYUH
+ {0xC988, 0xC988, prH2, gcLo}, // HANGUL SYLLABLE JEU
+ {0xC989, 0xC9A3, prH3, gcLo}, // [27] HANGUL SYLLABLE JEUG..HANGUL SYLLABLE JEUH
+ {0xC9A4, 0xC9A4, prH2, gcLo}, // HANGUL SYLLABLE JYI
+ {0xC9A5, 0xC9BF, prH3, gcLo}, // [27] HANGUL SYLLABLE JYIG..HANGUL SYLLABLE JYIH
+ {0xC9C0, 0xC9C0, prH2, gcLo}, // HANGUL SYLLABLE JI
+ {0xC9C1, 0xC9DB, prH3, gcLo}, // [27] HANGUL SYLLABLE JIG..HANGUL SYLLABLE JIH
+ {0xC9DC, 0xC9DC, prH2, gcLo}, // HANGUL SYLLABLE JJA
+ {0xC9DD, 0xC9F7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJAG..HANGUL SYLLABLE JJAH
+ {0xC9F8, 0xC9F8, prH2, gcLo}, // HANGUL SYLLABLE JJAE
+ {0xC9F9, 0xCA13, prH3, gcLo}, // [27] HANGUL SYLLABLE JJAEG..HANGUL SYLLABLE JJAEH
+ {0xCA14, 0xCA14, prH2, gcLo}, // HANGUL SYLLABLE JJYA
+ {0xCA15, 0xCA2F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYAG..HANGUL SYLLABLE JJYAH
+ {0xCA30, 0xCA30, prH2, gcLo}, // HANGUL SYLLABLE JJYAE
+ {0xCA31, 0xCA4B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYAEG..HANGUL SYLLABLE JJYAEH
+ {0xCA4C, 0xCA4C, prH2, gcLo}, // HANGUL SYLLABLE JJEO
+ {0xCA4D, 0xCA67, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEOG..HANGUL SYLLABLE JJEOH
+ {0xCA68, 0xCA68, prH2, gcLo}, // HANGUL SYLLABLE JJE
+ {0xCA69, 0xCA83, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEG..HANGUL SYLLABLE JJEH
+ {0xCA84, 0xCA84, prH2, gcLo}, // HANGUL SYLLABLE JJYEO
+ {0xCA85, 0xCA9F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYEOG..HANGUL SYLLABLE JJYEOH
+ {0xCAA0, 0xCAA0, prH2, gcLo}, // HANGUL SYLLABLE JJYE
+ {0xCAA1, 0xCABB, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYEG..HANGUL SYLLABLE JJYEH
+ {0xCABC, 0xCABC, prH2, gcLo}, // HANGUL SYLLABLE JJO
+ {0xCABD, 0xCAD7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJOG..HANGUL SYLLABLE JJOH
+ {0xCAD8, 0xCAD8, prH2, gcLo}, // HANGUL SYLLABLE JJWA
+ {0xCAD9, 0xCAF3, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWAG..HANGUL SYLLABLE JJWAH
+ {0xCAF4, 0xCAF4, prH2, gcLo}, // HANGUL SYLLABLE JJWAE
+ {0xCAF5, 0xCB0F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWAEG..HANGUL SYLLABLE JJWAEH
+ {0xCB10, 0xCB10, prH2, gcLo}, // HANGUL SYLLABLE JJOE
+ {0xCB11, 0xCB2B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJOEG..HANGUL SYLLABLE JJOEH
+ {0xCB2C, 0xCB2C, prH2, gcLo}, // HANGUL SYLLABLE JJYO
+ {0xCB2D, 0xCB47, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYOG..HANGUL SYLLABLE JJYOH
+ {0xCB48, 0xCB48, prH2, gcLo}, // HANGUL SYLLABLE JJU
+ {0xCB49, 0xCB63, prH3, gcLo}, // [27] HANGUL SYLLABLE JJUG..HANGUL SYLLABLE JJUH
+ {0xCB64, 0xCB64, prH2, gcLo}, // HANGUL SYLLABLE JJWEO
+ {0xCB65, 0xCB7F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWEOG..HANGUL SYLLABLE JJWEOH
+ {0xCB80, 0xCB80, prH2, gcLo}, // HANGUL SYLLABLE JJWE
+ {0xCB81, 0xCB9B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWEG..HANGUL SYLLABLE JJWEH
+ {0xCB9C, 0xCB9C, prH2, gcLo}, // HANGUL SYLLABLE JJWI
+ {0xCB9D, 0xCBB7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWIG..HANGUL SYLLABLE JJWIH
+ {0xCBB8, 0xCBB8, prH2, gcLo}, // HANGUL SYLLABLE JJYU
+ {0xCBB9, 0xCBD3, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYUG..HANGUL SYLLABLE JJYUH
+ {0xCBD4, 0xCBD4, prH2, gcLo}, // HANGUL SYLLABLE JJEU
+ {0xCBD5, 0xCBEF, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEUG..HANGUL SYLLABLE JJEUH
+ {0xCBF0, 0xCBF0, prH2, gcLo}, // HANGUL SYLLABLE JJYI
+ {0xCBF1, 0xCC0B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYIG..HANGUL SYLLABLE JJYIH
+ {0xCC0C, 0xCC0C, prH2, gcLo}, // HANGUL SYLLABLE JJI
+ {0xCC0D, 0xCC27, prH3, gcLo}, // [27] HANGUL SYLLABLE JJIG..HANGUL SYLLABLE JJIH
+ {0xCC28, 0xCC28, prH2, gcLo}, // HANGUL SYLLABLE CA
+ {0xCC29, 0xCC43, prH3, gcLo}, // [27] HANGUL SYLLABLE CAG..HANGUL SYLLABLE CAH
+ {0xCC44, 0xCC44, prH2, gcLo}, // HANGUL SYLLABLE CAE
+ {0xCC45, 0xCC5F, prH3, gcLo}, // [27] HANGUL SYLLABLE CAEG..HANGUL SYLLABLE CAEH
+ {0xCC60, 0xCC60, prH2, gcLo}, // HANGUL SYLLABLE CYA
+ {0xCC61, 0xCC7B, prH3, gcLo}, // [27] HANGUL SYLLABLE CYAG..HANGUL SYLLABLE CYAH
+ {0xCC7C, 0xCC7C, prH2, gcLo}, // HANGUL SYLLABLE CYAE
+ {0xCC7D, 0xCC97, prH3, gcLo}, // [27] HANGUL SYLLABLE CYAEG..HANGUL SYLLABLE CYAEH
+ {0xCC98, 0xCC98, prH2, gcLo}, // HANGUL SYLLABLE CEO
+ {0xCC99, 0xCCB3, prH3, gcLo}, // [27] HANGUL SYLLABLE CEOG..HANGUL SYLLABLE CEOH
+ {0xCCB4, 0xCCB4, prH2, gcLo}, // HANGUL SYLLABLE CE
+ {0xCCB5, 0xCCCF, prH3, gcLo}, // [27] HANGUL SYLLABLE CEG..HANGUL SYLLABLE CEH
+ {0xCCD0, 0xCCD0, prH2, gcLo}, // HANGUL SYLLABLE CYEO
+ {0xCCD1, 0xCCEB, prH3, gcLo}, // [27] HANGUL SYLLABLE CYEOG..HANGUL SYLLABLE CYEOH
+ {0xCCEC, 0xCCEC, prH2, gcLo}, // HANGUL SYLLABLE CYE
+ {0xCCED, 0xCD07, prH3, gcLo}, // [27] HANGUL SYLLABLE CYEG..HANGUL SYLLABLE CYEH
+ {0xCD08, 0xCD08, prH2, gcLo}, // HANGUL SYLLABLE CO
+ {0xCD09, 0xCD23, prH3, gcLo}, // [27] HANGUL SYLLABLE COG..HANGUL SYLLABLE COH
+ {0xCD24, 0xCD24, prH2, gcLo}, // HANGUL SYLLABLE CWA
+ {0xCD25, 0xCD3F, prH3, gcLo}, // [27] HANGUL SYLLABLE CWAG..HANGUL SYLLABLE CWAH
+ {0xCD40, 0xCD40, prH2, gcLo}, // HANGUL SYLLABLE CWAE
+ {0xCD41, 0xCD5B, prH3, gcLo}, // [27] HANGUL SYLLABLE CWAEG..HANGUL SYLLABLE CWAEH
+ {0xCD5C, 0xCD5C, prH2, gcLo}, // HANGUL SYLLABLE COE
+ {0xCD5D, 0xCD77, prH3, gcLo}, // [27] HANGUL SYLLABLE COEG..HANGUL SYLLABLE COEH
+ {0xCD78, 0xCD78, prH2, gcLo}, // HANGUL SYLLABLE CYO
+ {0xCD79, 0xCD93, prH3, gcLo}, // [27] HANGUL SYLLABLE CYOG..HANGUL SYLLABLE CYOH
+ {0xCD94, 0xCD94, prH2, gcLo}, // HANGUL SYLLABLE CU
+ {0xCD95, 0xCDAF, prH3, gcLo}, // [27] HANGUL SYLLABLE CUG..HANGUL SYLLABLE CUH
+ {0xCDB0, 0xCDB0, prH2, gcLo}, // HANGUL SYLLABLE CWEO
+ {0xCDB1, 0xCDCB, prH3, gcLo}, // [27] HANGUL SYLLABLE CWEOG..HANGUL SYLLABLE CWEOH
+ {0xCDCC, 0xCDCC, prH2, gcLo}, // HANGUL SYLLABLE CWE
+ {0xCDCD, 0xCDE7, prH3, gcLo}, // [27] HANGUL SYLLABLE CWEG..HANGUL SYLLABLE CWEH
+ {0xCDE8, 0xCDE8, prH2, gcLo}, // HANGUL SYLLABLE CWI
+ {0xCDE9, 0xCE03, prH3, gcLo}, // [27] HANGUL SYLLABLE CWIG..HANGUL SYLLABLE CWIH
+ {0xCE04, 0xCE04, prH2, gcLo}, // HANGUL SYLLABLE CYU
+ {0xCE05, 0xCE1F, prH3, gcLo}, // [27] HANGUL SYLLABLE CYUG..HANGUL SYLLABLE CYUH
+ {0xCE20, 0xCE20, prH2, gcLo}, // HANGUL SYLLABLE CEU
+ {0xCE21, 0xCE3B, prH3, gcLo}, // [27] HANGUL SYLLABLE CEUG..HANGUL SYLLABLE CEUH
+ {0xCE3C, 0xCE3C, prH2, gcLo}, // HANGUL SYLLABLE CYI
+ {0xCE3D, 0xCE57, prH3, gcLo}, // [27] HANGUL SYLLABLE CYIG..HANGUL SYLLABLE CYIH
+ {0xCE58, 0xCE58, prH2, gcLo}, // HANGUL SYLLABLE CI
+ {0xCE59, 0xCE73, prH3, gcLo}, // [27] HANGUL SYLLABLE CIG..HANGUL SYLLABLE CIH
+ {0xCE74, 0xCE74, prH2, gcLo}, // HANGUL SYLLABLE KA
+ {0xCE75, 0xCE8F, prH3, gcLo}, // [27] HANGUL SYLLABLE KAG..HANGUL SYLLABLE KAH
+ {0xCE90, 0xCE90, prH2, gcLo}, // HANGUL SYLLABLE KAE
+ {0xCE91, 0xCEAB, prH3, gcLo}, // [27] HANGUL SYLLABLE KAEG..HANGUL SYLLABLE KAEH
+ {0xCEAC, 0xCEAC, prH2, gcLo}, // HANGUL SYLLABLE KYA
+ {0xCEAD, 0xCEC7, prH3, gcLo}, // [27] HANGUL SYLLABLE KYAG..HANGUL SYLLABLE KYAH
+ {0xCEC8, 0xCEC8, prH2, gcLo}, // HANGUL SYLLABLE KYAE
+ {0xCEC9, 0xCEE3, prH3, gcLo}, // [27] HANGUL SYLLABLE KYAEG..HANGUL SYLLABLE KYAEH
+ {0xCEE4, 0xCEE4, prH2, gcLo}, // HANGUL SYLLABLE KEO
+ {0xCEE5, 0xCEFF, prH3, gcLo}, // [27] HANGUL SYLLABLE KEOG..HANGUL SYLLABLE KEOH
+ {0xCF00, 0xCF00, prH2, gcLo}, // HANGUL SYLLABLE KE
+ {0xCF01, 0xCF1B, prH3, gcLo}, // [27] HANGUL SYLLABLE KEG..HANGUL SYLLABLE KEH
+ {0xCF1C, 0xCF1C, prH2, gcLo}, // HANGUL SYLLABLE KYEO
+ {0xCF1D, 0xCF37, prH3, gcLo}, // [27] HANGUL SYLLABLE KYEOG..HANGUL SYLLABLE KYEOH
+ {0xCF38, 0xCF38, prH2, gcLo}, // HANGUL SYLLABLE KYE
+ {0xCF39, 0xCF53, prH3, gcLo}, // [27] HANGUL SYLLABLE KYEG..HANGUL SYLLABLE KYEH
+ {0xCF54, 0xCF54, prH2, gcLo}, // HANGUL SYLLABLE KO
+ {0xCF55, 0xCF6F, prH3, gcLo}, // [27] HANGUL SYLLABLE KOG..HANGUL SYLLABLE KOH
+ {0xCF70, 0xCF70, prH2, gcLo}, // HANGUL SYLLABLE KWA
+ {0xCF71, 0xCF8B, prH3, gcLo}, // [27] HANGUL SYLLABLE KWAG..HANGUL SYLLABLE KWAH
+ {0xCF8C, 0xCF8C, prH2, gcLo}, // HANGUL SYLLABLE KWAE
+ {0xCF8D, 0xCFA7, prH3, gcLo}, // [27] HANGUL SYLLABLE KWAEG..HANGUL SYLLABLE KWAEH
+ {0xCFA8, 0xCFA8, prH2, gcLo}, // HANGUL SYLLABLE KOE
+ {0xCFA9, 0xCFC3, prH3, gcLo}, // [27] HANGUL SYLLABLE KOEG..HANGUL SYLLABLE KOEH
+ {0xCFC4, 0xCFC4, prH2, gcLo}, // HANGUL SYLLABLE KYO
+ {0xCFC5, 0xCFDF, prH3, gcLo}, // [27] HANGUL SYLLABLE KYOG..HANGUL SYLLABLE KYOH
+ {0xCFE0, 0xCFE0, prH2, gcLo}, // HANGUL SYLLABLE KU
+ {0xCFE1, 0xCFFB, prH3, gcLo}, // [27] HANGUL SYLLABLE KUG..HANGUL SYLLABLE KUH
+ {0xCFFC, 0xCFFC, prH2, gcLo}, // HANGUL SYLLABLE KWEO
+ {0xCFFD, 0xD017, prH3, gcLo}, // [27] HANGUL SYLLABLE KWEOG..HANGUL SYLLABLE KWEOH
+ {0xD018, 0xD018, prH2, gcLo}, // HANGUL SYLLABLE KWE
+ {0xD019, 0xD033, prH3, gcLo}, // [27] HANGUL SYLLABLE KWEG..HANGUL SYLLABLE KWEH
+ {0xD034, 0xD034, prH2, gcLo}, // HANGUL SYLLABLE KWI
+ {0xD035, 0xD04F, prH3, gcLo}, // [27] HANGUL SYLLABLE KWIG..HANGUL SYLLABLE KWIH
+ {0xD050, 0xD050, prH2, gcLo}, // HANGUL SYLLABLE KYU
+ {0xD051, 0xD06B, prH3, gcLo}, // [27] HANGUL SYLLABLE KYUG..HANGUL SYLLABLE KYUH
+ {0xD06C, 0xD06C, prH2, gcLo}, // HANGUL SYLLABLE KEU
+ {0xD06D, 0xD087, prH3, gcLo}, // [27] HANGUL SYLLABLE KEUG..HANGUL SYLLABLE KEUH
+ {0xD088, 0xD088, prH2, gcLo}, // HANGUL SYLLABLE KYI
+ {0xD089, 0xD0A3, prH3, gcLo}, // [27] HANGUL SYLLABLE KYIG..HANGUL SYLLABLE KYIH
+ {0xD0A4, 0xD0A4, prH2, gcLo}, // HANGUL SYLLABLE KI
+ {0xD0A5, 0xD0BF, prH3, gcLo}, // [27] HANGUL SYLLABLE KIG..HANGUL SYLLABLE KIH
+ {0xD0C0, 0xD0C0, prH2, gcLo}, // HANGUL SYLLABLE TA
+ {0xD0C1, 0xD0DB, prH3, gcLo}, // [27] HANGUL SYLLABLE TAG..HANGUL SYLLABLE TAH
+ {0xD0DC, 0xD0DC, prH2, gcLo}, // HANGUL SYLLABLE TAE
+ {0xD0DD, 0xD0F7, prH3, gcLo}, // [27] HANGUL SYLLABLE TAEG..HANGUL SYLLABLE TAEH
+ {0xD0F8, 0xD0F8, prH2, gcLo}, // HANGUL SYLLABLE TYA
+ {0xD0F9, 0xD113, prH3, gcLo}, // [27] HANGUL SYLLABLE TYAG..HANGUL SYLLABLE TYAH
+ {0xD114, 0xD114, prH2, gcLo}, // HANGUL SYLLABLE TYAE
+ {0xD115, 0xD12F, prH3, gcLo}, // [27] HANGUL SYLLABLE TYAEG..HANGUL SYLLABLE TYAEH
+ {0xD130, 0xD130, prH2, gcLo}, // HANGUL SYLLABLE TEO
+ {0xD131, 0xD14B, prH3, gcLo}, // [27] HANGUL SYLLABLE TEOG..HANGUL SYLLABLE TEOH
+ {0xD14C, 0xD14C, prH2, gcLo}, // HANGUL SYLLABLE TE
+ {0xD14D, 0xD167, prH3, gcLo}, // [27] HANGUL SYLLABLE TEG..HANGUL SYLLABLE TEH
+ {0xD168, 0xD168, prH2, gcLo}, // HANGUL SYLLABLE TYEO
+ {0xD169, 0xD183, prH3, gcLo}, // [27] HANGUL SYLLABLE TYEOG..HANGUL SYLLABLE TYEOH
+ {0xD184, 0xD184, prH2, gcLo}, // HANGUL SYLLABLE TYE
+ {0xD185, 0xD19F, prH3, gcLo}, // [27] HANGUL SYLLABLE TYEG..HANGUL SYLLABLE TYEH
+ {0xD1A0, 0xD1A0, prH2, gcLo}, // HANGUL SYLLABLE TO
+ {0xD1A1, 0xD1BB, prH3, gcLo}, // [27] HANGUL SYLLABLE TOG..HANGUL SYLLABLE TOH
+ {0xD1BC, 0xD1BC, prH2, gcLo}, // HANGUL SYLLABLE TWA
+ {0xD1BD, 0xD1D7, prH3, gcLo}, // [27] HANGUL SYLLABLE TWAG..HANGUL SYLLABLE TWAH
+ {0xD1D8, 0xD1D8, prH2, gcLo}, // HANGUL SYLLABLE TWAE
+ {0xD1D9, 0xD1F3, prH3, gcLo}, // [27] HANGUL SYLLABLE TWAEG..HANGUL SYLLABLE TWAEH
+ {0xD1F4, 0xD1F4, prH2, gcLo}, // HANGUL SYLLABLE TOE
+ {0xD1F5, 0xD20F, prH3, gcLo}, // [27] HANGUL SYLLABLE TOEG..HANGUL SYLLABLE TOEH
+ {0xD210, 0xD210, prH2, gcLo}, // HANGUL SYLLABLE TYO
+ {0xD211, 0xD22B, prH3, gcLo}, // [27] HANGUL SYLLABLE TYOG..HANGUL SYLLABLE TYOH
+ {0xD22C, 0xD22C, prH2, gcLo}, // HANGUL SYLLABLE TU
+ {0xD22D, 0xD247, prH3, gcLo}, // [27] HANGUL SYLLABLE TUG..HANGUL SYLLABLE TUH
+ {0xD248, 0xD248, prH2, gcLo}, // HANGUL SYLLABLE TWEO
+ {0xD249, 0xD263, prH3, gcLo}, // [27] HANGUL SYLLABLE TWEOG..HANGUL SYLLABLE TWEOH
+ {0xD264, 0xD264, prH2, gcLo}, // HANGUL SYLLABLE TWE
+ {0xD265, 0xD27F, prH3, gcLo}, // [27] HANGUL SYLLABLE TWEG..HANGUL SYLLABLE TWEH
+ {0xD280, 0xD280, prH2, gcLo}, // HANGUL SYLLABLE TWI
+ {0xD281, 0xD29B, prH3, gcLo}, // [27] HANGUL SYLLABLE TWIG..HANGUL SYLLABLE TWIH
+ {0xD29C, 0xD29C, prH2, gcLo}, // HANGUL SYLLABLE TYU
+ {0xD29D, 0xD2B7, prH3, gcLo}, // [27] HANGUL SYLLABLE TYUG..HANGUL SYLLABLE TYUH
+ {0xD2B8, 0xD2B8, prH2, gcLo}, // HANGUL SYLLABLE TEU
+ {0xD2B9, 0xD2D3, prH3, gcLo}, // [27] HANGUL SYLLABLE TEUG..HANGUL SYLLABLE TEUH
+ {0xD2D4, 0xD2D4, prH2, gcLo}, // HANGUL SYLLABLE TYI
+ {0xD2D5, 0xD2EF, prH3, gcLo}, // [27] HANGUL SYLLABLE TYIG..HANGUL SYLLABLE TYIH
+ {0xD2F0, 0xD2F0, prH2, gcLo}, // HANGUL SYLLABLE TI
+ {0xD2F1, 0xD30B, prH3, gcLo}, // [27] HANGUL SYLLABLE TIG..HANGUL SYLLABLE TIH
+ {0xD30C, 0xD30C, prH2, gcLo}, // HANGUL SYLLABLE PA
+ {0xD30D, 0xD327, prH3, gcLo}, // [27] HANGUL SYLLABLE PAG..HANGUL SYLLABLE PAH
+ {0xD328, 0xD328, prH2, gcLo}, // HANGUL SYLLABLE PAE
+ {0xD329, 0xD343, prH3, gcLo}, // [27] HANGUL SYLLABLE PAEG..HANGUL SYLLABLE PAEH
+ {0xD344, 0xD344, prH2, gcLo}, // HANGUL SYLLABLE PYA
+ {0xD345, 0xD35F, prH3, gcLo}, // [27] HANGUL SYLLABLE PYAG..HANGUL SYLLABLE PYAH
+ {0xD360, 0xD360, prH2, gcLo}, // HANGUL SYLLABLE PYAE
+ {0xD361, 0xD37B, prH3, gcLo}, // [27] HANGUL SYLLABLE PYAEG..HANGUL SYLLABLE PYAEH
+ {0xD37C, 0xD37C, prH2, gcLo}, // HANGUL SYLLABLE PEO
+ {0xD37D, 0xD397, prH3, gcLo}, // [27] HANGUL SYLLABLE PEOG..HANGUL SYLLABLE PEOH
+ {0xD398, 0xD398, prH2, gcLo}, // HANGUL SYLLABLE PE
+ {0xD399, 0xD3B3, prH3, gcLo}, // [27] HANGUL SYLLABLE PEG..HANGUL SYLLABLE PEH
+ {0xD3B4, 0xD3B4, prH2, gcLo}, // HANGUL SYLLABLE PYEO
+ {0xD3B5, 0xD3CF, prH3, gcLo}, // [27] HANGUL SYLLABLE PYEOG..HANGUL SYLLABLE PYEOH
+ {0xD3D0, 0xD3D0, prH2, gcLo}, // HANGUL SYLLABLE PYE
+ {0xD3D1, 0xD3EB, prH3, gcLo}, // [27] HANGUL SYLLABLE PYEG..HANGUL SYLLABLE PYEH
+ {0xD3EC, 0xD3EC, prH2, gcLo}, // HANGUL SYLLABLE PO
+ {0xD3ED, 0xD407, prH3, gcLo}, // [27] HANGUL SYLLABLE POG..HANGUL SYLLABLE POH
+ {0xD408, 0xD408, prH2, gcLo}, // HANGUL SYLLABLE PWA
+ {0xD409, 0xD423, prH3, gcLo}, // [27] HANGUL SYLLABLE PWAG..HANGUL SYLLABLE PWAH
+ {0xD424, 0xD424, prH2, gcLo}, // HANGUL SYLLABLE PWAE
+ {0xD425, 0xD43F, prH3, gcLo}, // [27] HANGUL SYLLABLE PWAEG..HANGUL SYLLABLE PWAEH
+ {0xD440, 0xD440, prH2, gcLo}, // HANGUL SYLLABLE POE
+ {0xD441, 0xD45B, prH3, gcLo}, // [27] HANGUL SYLLABLE POEG..HANGUL SYLLABLE POEH
+ {0xD45C, 0xD45C, prH2, gcLo}, // HANGUL SYLLABLE PYO
+ {0xD45D, 0xD477, prH3, gcLo}, // [27] HANGUL SYLLABLE PYOG..HANGUL SYLLABLE PYOH
+ {0xD478, 0xD478, prH2, gcLo}, // HANGUL SYLLABLE PU
+ {0xD479, 0xD493, prH3, gcLo}, // [27] HANGUL SYLLABLE PUG..HANGUL SYLLABLE PUH
+ {0xD494, 0xD494, prH2, gcLo}, // HANGUL SYLLABLE PWEO
+ {0xD495, 0xD4AF, prH3, gcLo}, // [27] HANGUL SYLLABLE PWEOG..HANGUL SYLLABLE PWEOH
+ {0xD4B0, 0xD4B0, prH2, gcLo}, // HANGUL SYLLABLE PWE
+ {0xD4B1, 0xD4CB, prH3, gcLo}, // [27] HANGUL SYLLABLE PWEG..HANGUL SYLLABLE PWEH
+ {0xD4CC, 0xD4CC, prH2, gcLo}, // HANGUL SYLLABLE PWI
+ {0xD4CD, 0xD4E7, prH3, gcLo}, // [27] HANGUL SYLLABLE PWIG..HANGUL SYLLABLE PWIH
+ {0xD4E8, 0xD4E8, prH2, gcLo}, // HANGUL SYLLABLE PYU
+ {0xD4E9, 0xD503, prH3, gcLo}, // [27] HANGUL SYLLABLE PYUG..HANGUL SYLLABLE PYUH
+ {0xD504, 0xD504, prH2, gcLo}, // HANGUL SYLLABLE PEU
+ {0xD505, 0xD51F, prH3, gcLo}, // [27] HANGUL SYLLABLE PEUG..HANGUL SYLLABLE PEUH
+ {0xD520, 0xD520, prH2, gcLo}, // HANGUL SYLLABLE PYI
+ {0xD521, 0xD53B, prH3, gcLo}, // [27] HANGUL SYLLABLE PYIG..HANGUL SYLLABLE PYIH
+ {0xD53C, 0xD53C, prH2, gcLo}, // HANGUL SYLLABLE PI
+ {0xD53D, 0xD557, prH3, gcLo}, // [27] HANGUL SYLLABLE PIG..HANGUL SYLLABLE PIH
+ {0xD558, 0xD558, prH2, gcLo}, // HANGUL SYLLABLE HA
+ {0xD559, 0xD573, prH3, gcLo}, // [27] HANGUL SYLLABLE HAG..HANGUL SYLLABLE HAH
+ {0xD574, 0xD574, prH2, gcLo}, // HANGUL SYLLABLE HAE
+ {0xD575, 0xD58F, prH3, gcLo}, // [27] HANGUL SYLLABLE HAEG..HANGUL SYLLABLE HAEH
+ {0xD590, 0xD590, prH2, gcLo}, // HANGUL SYLLABLE HYA
+ {0xD591, 0xD5AB, prH3, gcLo}, // [27] HANGUL SYLLABLE HYAG..HANGUL SYLLABLE HYAH
+ {0xD5AC, 0xD5AC, prH2, gcLo}, // HANGUL SYLLABLE HYAE
+ {0xD5AD, 0xD5C7, prH3, gcLo}, // [27] HANGUL SYLLABLE HYAEG..HANGUL SYLLABLE HYAEH
+ {0xD5C8, 0xD5C8, prH2, gcLo}, // HANGUL SYLLABLE HEO
+ {0xD5C9, 0xD5E3, prH3, gcLo}, // [27] HANGUL SYLLABLE HEOG..HANGUL SYLLABLE HEOH
+ {0xD5E4, 0xD5E4, prH2, gcLo}, // HANGUL SYLLABLE HE
+ {0xD5E5, 0xD5FF, prH3, gcLo}, // [27] HANGUL SYLLABLE HEG..HANGUL SYLLABLE HEH
+ {0xD600, 0xD600, prH2, gcLo}, // HANGUL SYLLABLE HYEO
+ {0xD601, 0xD61B, prH3, gcLo}, // [27] HANGUL SYLLABLE HYEOG..HANGUL SYLLABLE HYEOH
+ {0xD61C, 0xD61C, prH2, gcLo}, // HANGUL SYLLABLE HYE
+ {0xD61D, 0xD637, prH3, gcLo}, // [27] HANGUL SYLLABLE HYEG..HANGUL SYLLABLE HYEH
+ {0xD638, 0xD638, prH2, gcLo}, // HANGUL SYLLABLE HO
+ {0xD639, 0xD653, prH3, gcLo}, // [27] HANGUL SYLLABLE HOG..HANGUL SYLLABLE HOH
+ {0xD654, 0xD654, prH2, gcLo}, // HANGUL SYLLABLE HWA
+ {0xD655, 0xD66F, prH3, gcLo}, // [27] HANGUL SYLLABLE HWAG..HANGUL SYLLABLE HWAH
+ {0xD670, 0xD670, prH2, gcLo}, // HANGUL SYLLABLE HWAE
+ {0xD671, 0xD68B, prH3, gcLo}, // [27] HANGUL SYLLABLE HWAEG..HANGUL SYLLABLE HWAEH
+ {0xD68C, 0xD68C, prH2, gcLo}, // HANGUL SYLLABLE HOE
+ {0xD68D, 0xD6A7, prH3, gcLo}, // [27] HANGUL SYLLABLE HOEG..HANGUL SYLLABLE HOEH
+ {0xD6A8, 0xD6A8, prH2, gcLo}, // HANGUL SYLLABLE HYO
+ {0xD6A9, 0xD6C3, prH3, gcLo}, // [27] HANGUL SYLLABLE HYOG..HANGUL SYLLABLE HYOH
+ {0xD6C4, 0xD6C4, prH2, gcLo}, // HANGUL SYLLABLE HU
+ {0xD6C5, 0xD6DF, prH3, gcLo}, // [27] HANGUL SYLLABLE HUG..HANGUL SYLLABLE HUH
+ {0xD6E0, 0xD6E0, prH2, gcLo}, // HANGUL SYLLABLE HWEO
+ {0xD6E1, 0xD6FB, prH3, gcLo}, // [27] HANGUL SYLLABLE HWEOG..HANGUL SYLLABLE HWEOH
+ {0xD6FC, 0xD6FC, prH2, gcLo}, // HANGUL SYLLABLE HWE
+ {0xD6FD, 0xD717, prH3, gcLo}, // [27] HANGUL SYLLABLE HWEG..HANGUL SYLLABLE HWEH
+ {0xD718, 0xD718, prH2, gcLo}, // HANGUL SYLLABLE HWI
+ {0xD719, 0xD733, prH3, gcLo}, // [27] HANGUL SYLLABLE HWIG..HANGUL SYLLABLE HWIH
+ {0xD734, 0xD734, prH2, gcLo}, // HANGUL SYLLABLE HYU
+ {0xD735, 0xD74F, prH3, gcLo}, // [27] HANGUL SYLLABLE HYUG..HANGUL SYLLABLE HYUH
+ {0xD750, 0xD750, prH2, gcLo}, // HANGUL SYLLABLE HEU
+ {0xD751, 0xD76B, prH3, gcLo}, // [27] HANGUL SYLLABLE HEUG..HANGUL SYLLABLE HEUH
+ {0xD76C, 0xD76C, prH2, gcLo}, // HANGUL SYLLABLE HYI
+ {0xD76D, 0xD787, prH3, gcLo}, // [27] HANGUL SYLLABLE HYIG..HANGUL SYLLABLE HYIH
+ {0xD788, 0xD788, prH2, gcLo}, // HANGUL SYLLABLE HI
+ {0xD789, 0xD7A3, prH3, gcLo}, // [27] HANGUL SYLLABLE HIG..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prJV, gcLo}, // [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prJT, gcLo}, // [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xD800, 0xDB7F, prSG, gcCs}, // [896] ..
+ {0xDB80, 0xDBFF, prSG, gcCs}, // [128] ..
+ {0xDC00, 0xDFFF, prSG, gcCs}, // [1024] ..
+ {0xE000, 0xF8FF, prXX, gcCo}, // [6400] ..
+ {0xF900, 0xFA6D, prID, gcLo}, // [366] CJK COMPATIBILITY IDEOGRAPH-F900..CJK COMPATIBILITY IDEOGRAPH-FA6D
+ {0xFA6E, 0xFA6F, prID, gcCn}, // [2] ..
+ {0xFA70, 0xFAD9, prID, gcLo}, // [106] CJK COMPATIBILITY IDEOGRAPH-FA70..CJK COMPATIBILITY IDEOGRAPH-FAD9
+ {0xFADA, 0xFAFF, prID, gcCn}, // [38] ..
+ {0xFB00, 0xFB06, prAL, gcLl}, // [7] LATIN SMALL LIGATURE FF..LATIN SMALL LIGATURE ST
+ {0xFB13, 0xFB17, prAL, gcLl}, // [5] ARMENIAN SMALL LIGATURE MEN NOW..ARMENIAN SMALL LIGATURE MEN XEH
+ {0xFB1D, 0xFB1D, prHL, gcLo}, // HEBREW LETTER YOD WITH HIRIQ
+ {0xFB1E, 0xFB1E, prCM, gcMn}, // HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFB1F, 0xFB28, prHL, gcLo}, // [10] HEBREW LIGATURE YIDDISH YOD YOD PATAH..HEBREW LETTER WIDE TAV
+ {0xFB29, 0xFB29, prAL, gcSm}, // HEBREW LETTER ALTERNATIVE PLUS SIGN
+ {0xFB2A, 0xFB36, prHL, gcLo}, // [13] HEBREW LETTER SHIN WITH SHIN DOT..HEBREW LETTER ZAYIN WITH DAGESH
+ {0xFB38, 0xFB3C, prHL, gcLo}, // [5] HEBREW LETTER TET WITH DAGESH..HEBREW LETTER LAMED WITH DAGESH
+ {0xFB3E, 0xFB3E, prHL, gcLo}, // HEBREW LETTER MEM WITH DAGESH
+ {0xFB40, 0xFB41, prHL, gcLo}, // [2] HEBREW LETTER NUN WITH DAGESH..HEBREW LETTER SAMEKH WITH DAGESH
+ {0xFB43, 0xFB44, prHL, gcLo}, // [2] HEBREW LETTER FINAL PE WITH DAGESH..HEBREW LETTER PE WITH DAGESH
+ {0xFB46, 0xFB4F, prHL, gcLo}, // [10] HEBREW LETTER TSADI WITH DAGESH..HEBREW LIGATURE ALEF LAMED
+ {0xFB50, 0xFBB1, prAL, gcLo}, // [98] ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM
+ {0xFBB2, 0xFBC2, prAL, gcSk}, // [17] ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL WASLA ABOVE
+ {0xFBD3, 0xFD3D, prAL, gcLo}, // [363] ARABIC LETTER NG ISOLATED FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM
+ {0xFD3E, 0xFD3E, prCL, gcPe}, // ORNATE LEFT PARENTHESIS
+ {0xFD3F, 0xFD3F, prOP, gcPs}, // ORNATE RIGHT PARENTHESIS
+ {0xFD40, 0xFD4F, prAL, gcSo}, // [16] ARABIC LIGATURE RAHIMAHU ALLAAH..ARABIC LIGATURE RAHIMAHUM ALLAAH
+ {0xFD50, 0xFD8F, prAL, gcLo}, // [64] ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM..ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM
+ {0xFD92, 0xFDC7, prAL, gcLo}, // [54] ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM
+ {0xFDCF, 0xFDCF, prAL, gcSo}, // ARABIC LIGATURE SALAAMUHU ALAYNAA
+ {0xFDF0, 0xFDFB, prAL, gcLo}, // [12] ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM..ARABIC LIGATURE JALLAJALALOUHOU
+ {0xFDFC, 0xFDFC, prPO, gcSc}, // RIAL SIGN
+ {0xFDFD, 0xFDFF, prAL, gcSo}, // [3] ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM..ARABIC LIGATURE AZZA WA JALL
+ {0xFE00, 0xFE0F, prCM, gcMn}, // [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE10, 0xFE10, prIS, gcPo}, // PRESENTATION FORM FOR VERTICAL COMMA
+ {0xFE11, 0xFE12, prCL, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC COMMA..PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC FULL STOP
+ {0xFE13, 0xFE14, prIS, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL COLON..PRESENTATION FORM FOR VERTICAL SEMICOLON
+ {0xFE15, 0xFE16, prEX, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL EXCLAMATION MARK..PRESENTATION FORM FOR VERTICAL QUESTION MARK
+ {0xFE17, 0xFE17, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET
+ {0xFE18, 0xFE18, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET
+ {0xFE19, 0xFE19, prIN, gcPo}, // PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS
+ {0xFE20, 0xFE2F, prCM, gcMn}, // [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFE30, 0xFE30, prID, gcPo}, // PRESENTATION FORM FOR VERTICAL TWO DOT LEADER
+ {0xFE31, 0xFE32, prID, gcPd}, // [2] PRESENTATION FORM FOR VERTICAL EM DASH..PRESENTATION FORM FOR VERTICAL EN DASH
+ {0xFE33, 0xFE34, prID, gcPc}, // [2] PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE
+ {0xFE35, 0xFE35, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
+ {0xFE36, 0xFE36, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS
+ {0xFE37, 0xFE37, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET
+ {0xFE38, 0xFE38, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET
+ {0xFE39, 0xFE39, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET
+ {0xFE3A, 0xFE3A, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET
+ {0xFE3B, 0xFE3B, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET
+ {0xFE3C, 0xFE3C, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET
+ {0xFE3D, 0xFE3D, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET
+ {0xFE3E, 0xFE3E, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET
+ {0xFE3F, 0xFE3F, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET
+ {0xFE40, 0xFE40, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET
+ {0xFE41, 0xFE41, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET
+ {0xFE42, 0xFE42, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET
+ {0xFE43, 0xFE43, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET
+ {0xFE44, 0xFE44, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET
+ {0xFE45, 0xFE46, prID, gcPo}, // [2] SESAME DOT..WHITE SESAME DOT
+ {0xFE47, 0xFE47, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET
+ {0xFE48, 0xFE48, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET
+ {0xFE49, 0xFE4C, prID, gcPo}, // [4] DASHED OVERLINE..DOUBLE WAVY OVERLINE
+ {0xFE4D, 0xFE4F, prID, gcPc}, // [3] DASHED LOW LINE..WAVY LOW LINE
+ {0xFE50, 0xFE50, prCL, gcPo}, // SMALL COMMA
+ {0xFE51, 0xFE51, prID, gcPo}, // SMALL IDEOGRAPHIC COMMA
+ {0xFE52, 0xFE52, prCL, gcPo}, // SMALL FULL STOP
+ {0xFE54, 0xFE55, prNS, gcPo}, // [2] SMALL SEMICOLON..SMALL COLON
+ {0xFE56, 0xFE57, prEX, gcPo}, // [2] SMALL QUESTION MARK..SMALL EXCLAMATION MARK
+ {0xFE58, 0xFE58, prID, gcPd}, // SMALL EM DASH
+ {0xFE59, 0xFE59, prOP, gcPs}, // SMALL LEFT PARENTHESIS
+ {0xFE5A, 0xFE5A, prCL, gcPe}, // SMALL RIGHT PARENTHESIS
+ {0xFE5B, 0xFE5B, prOP, gcPs}, // SMALL LEFT CURLY BRACKET
+ {0xFE5C, 0xFE5C, prCL, gcPe}, // SMALL RIGHT CURLY BRACKET
+ {0xFE5D, 0xFE5D, prOP, gcPs}, // SMALL LEFT TORTOISE SHELL BRACKET
+ {0xFE5E, 0xFE5E, prCL, gcPe}, // SMALL RIGHT TORTOISE SHELL BRACKET
+ {0xFE5F, 0xFE61, prID, gcPo}, // [3] SMALL NUMBER SIGN..SMALL ASTERISK
+ {0xFE62, 0xFE62, prID, gcSm}, // SMALL PLUS SIGN
+ {0xFE63, 0xFE63, prID, gcPd}, // SMALL HYPHEN-MINUS
+ {0xFE64, 0xFE66, prID, gcSm}, // [3] SMALL LESS-THAN SIGN..SMALL EQUALS SIGN
+ {0xFE68, 0xFE68, prID, gcPo}, // SMALL REVERSE SOLIDUS
+ {0xFE69, 0xFE69, prPR, gcSc}, // SMALL DOLLAR SIGN
+ {0xFE6A, 0xFE6A, prPO, gcPo}, // SMALL PERCENT SIGN
+ {0xFE6B, 0xFE6B, prID, gcPo}, // SMALL COMMERCIAL AT
+ {0xFE70, 0xFE74, prAL, gcLo}, // [5] ARABIC FATHATAN ISOLATED FORM..ARABIC KASRATAN ISOLATED FORM
+ {0xFE76, 0xFEFC, prAL, gcLo}, // [135] ARABIC FATHA ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM
+ {0xFEFF, 0xFEFF, prWJ, gcCf}, // ZERO WIDTH NO-BREAK SPACE
+ {0xFF01, 0xFF01, prEX, gcPo}, // FULLWIDTH EXCLAMATION MARK
+ {0xFF02, 0xFF03, prID, gcPo}, // [2] FULLWIDTH QUOTATION MARK..FULLWIDTH NUMBER SIGN
+ {0xFF04, 0xFF04, prPR, gcSc}, // FULLWIDTH DOLLAR SIGN
+ {0xFF05, 0xFF05, prPO, gcPo}, // FULLWIDTH PERCENT SIGN
+ {0xFF06, 0xFF07, prID, gcPo}, // [2] FULLWIDTH AMPERSAND..FULLWIDTH APOSTROPHE
+ {0xFF08, 0xFF08, prOP, gcPs}, // FULLWIDTH LEFT PARENTHESIS
+ {0xFF09, 0xFF09, prCL, gcPe}, // FULLWIDTH RIGHT PARENTHESIS
+ {0xFF0A, 0xFF0A, prID, gcPo}, // FULLWIDTH ASTERISK
+ {0xFF0B, 0xFF0B, prID, gcSm}, // FULLWIDTH PLUS SIGN
+ {0xFF0C, 0xFF0C, prCL, gcPo}, // FULLWIDTH COMMA
+ {0xFF0D, 0xFF0D, prID, gcPd}, // FULLWIDTH HYPHEN-MINUS
+ {0xFF0E, 0xFF0E, prCL, gcPo}, // FULLWIDTH FULL STOP
+ {0xFF0F, 0xFF0F, prID, gcPo}, // FULLWIDTH SOLIDUS
+ {0xFF10, 0xFF19, prID, gcNd}, // [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DIGIT NINE
+ {0xFF1A, 0xFF1B, prNS, gcPo}, // [2] FULLWIDTH COLON..FULLWIDTH SEMICOLON
+ {0xFF1C, 0xFF1E, prID, gcSm}, // [3] FULLWIDTH LESS-THAN SIGN..FULLWIDTH GREATER-THAN SIGN
+ {0xFF1F, 0xFF1F, prEX, gcPo}, // FULLWIDTH QUESTION MARK
+ {0xFF20, 0xFF20, prID, gcPo}, // FULLWIDTH COMMERCIAL AT
+ {0xFF21, 0xFF3A, prID, gcLu}, // [26] FULLWIDTH LATIN CAPITAL LETTER A..FULLWIDTH LATIN CAPITAL LETTER Z
+ {0xFF3B, 0xFF3B, prOP, gcPs}, // FULLWIDTH LEFT SQUARE BRACKET
+ {0xFF3C, 0xFF3C, prID, gcPo}, // FULLWIDTH REVERSE SOLIDUS
+ {0xFF3D, 0xFF3D, prCL, gcPe}, // FULLWIDTH RIGHT SQUARE BRACKET
+ {0xFF3E, 0xFF3E, prID, gcSk}, // FULLWIDTH CIRCUMFLEX ACCENT
+ {0xFF3F, 0xFF3F, prID, gcPc}, // FULLWIDTH LOW LINE
+ {0xFF40, 0xFF40, prID, gcSk}, // FULLWIDTH GRAVE ACCENT
+ {0xFF41, 0xFF5A, prID, gcLl}, // [26] FULLWIDTH LATIN SMALL LETTER A..FULLWIDTH LATIN SMALL LETTER Z
+ {0xFF5B, 0xFF5B, prOP, gcPs}, // FULLWIDTH LEFT CURLY BRACKET
+ {0xFF5C, 0xFF5C, prID, gcSm}, // FULLWIDTH VERTICAL LINE
+ {0xFF5D, 0xFF5D, prCL, gcPe}, // FULLWIDTH RIGHT CURLY BRACKET
+ {0xFF5E, 0xFF5E, prID, gcSm}, // FULLWIDTH TILDE
+ {0xFF5F, 0xFF5F, prOP, gcPs}, // FULLWIDTH LEFT WHITE PARENTHESIS
+ {0xFF60, 0xFF60, prCL, gcPe}, // FULLWIDTH RIGHT WHITE PARENTHESIS
+ {0xFF61, 0xFF61, prCL, gcPo}, // HALFWIDTH IDEOGRAPHIC FULL STOP
+ {0xFF62, 0xFF62, prOP, gcPs}, // HALFWIDTH LEFT CORNER BRACKET
+ {0xFF63, 0xFF63, prCL, gcPe}, // HALFWIDTH RIGHT CORNER BRACKET
+ {0xFF64, 0xFF64, prCL, gcPo}, // HALFWIDTH IDEOGRAPHIC COMMA
+ {0xFF65, 0xFF65, prNS, gcPo}, // HALFWIDTH KATAKANA MIDDLE DOT
+ {0xFF66, 0xFF66, prID, gcLo}, // HALFWIDTH KATAKANA LETTER WO
+ {0xFF67, 0xFF6F, prCJ, gcLo}, // [9] HALFWIDTH KATAKANA LETTER SMALL A..HALFWIDTH KATAKANA LETTER SMALL TU
+ {0xFF70, 0xFF70, prCJ, gcLm}, // HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0xFF71, 0xFF9D, prID, gcLo}, // [45] HALFWIDTH KATAKANA LETTER A..HALFWIDTH KATAKANA LETTER N
+ {0xFF9E, 0xFF9F, prNS, gcLm}, // [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFA0, 0xFFBE, prID, gcLo}, // [31] HALFWIDTH HANGUL FILLER..HALFWIDTH HANGUL LETTER HIEUH
+ {0xFFC2, 0xFFC7, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER A..HALFWIDTH HANGUL LETTER E
+ {0xFFCA, 0xFFCF, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER YEO..HALFWIDTH HANGUL LETTER OE
+ {0xFFD2, 0xFFD7, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER YO..HALFWIDTH HANGUL LETTER YU
+ {0xFFDA, 0xFFDC, prID, gcLo}, // [3] HALFWIDTH HANGUL LETTER EU..HALFWIDTH HANGUL LETTER I
+ {0xFFE0, 0xFFE0, prPO, gcSc}, // FULLWIDTH CENT SIGN
+ {0xFFE1, 0xFFE1, prPR, gcSc}, // FULLWIDTH POUND SIGN
+ {0xFFE2, 0xFFE2, prID, gcSm}, // FULLWIDTH NOT SIGN
+ {0xFFE3, 0xFFE3, prID, gcSk}, // FULLWIDTH MACRON
+ {0xFFE4, 0xFFE4, prID, gcSo}, // FULLWIDTH BROKEN BAR
+ {0xFFE5, 0xFFE6, prPR, gcSc}, // [2] FULLWIDTH YEN SIGN..FULLWIDTH WON SIGN
+ {0xFFE8, 0xFFE8, prAL, gcSo}, // HALFWIDTH FORMS LIGHT VERTICAL
+ {0xFFE9, 0xFFEC, prAL, gcSm}, // [4] HALFWIDTH LEFTWARDS ARROW..HALFWIDTH DOWNWARDS ARROW
+ {0xFFED, 0xFFEE, prAL, gcSo}, // [2] HALFWIDTH BLACK SQUARE..HALFWIDTH WHITE CIRCLE
+ {0xFFF9, 0xFFFB, prCM, gcCf}, // [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0xFFFC, 0xFFFC, prCB, gcSo}, // OBJECT REPLACEMENT CHARACTER
+ {0xFFFD, 0xFFFD, prAI, gcSo}, // REPLACEMENT CHARACTER
+ {0x10000, 0x1000B, prAL, gcLo}, // [12] LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE
+ {0x1000D, 0x10026, prAL, gcLo}, // [26] LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO
+ {0x10028, 0x1003A, prAL, gcLo}, // [19] LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO
+ {0x1003C, 0x1003D, prAL, gcLo}, // [2] LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE
+ {0x1003F, 0x1004D, prAL, gcLo}, // [15] LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO
+ {0x10050, 0x1005D, prAL, gcLo}, // [14] LINEAR B SYMBOL B018..LINEAR B SYMBOL B089
+ {0x10080, 0x100FA, prAL, gcLo}, // [123] LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305
+ {0x10100, 0x10102, prBA, gcPo}, // [3] AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK
+ {0x10107, 0x10133, prAL, gcNo}, // [45] AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND
+ {0x10137, 0x1013F, prAL, gcSo}, // [9] AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT
+ {0x10140, 0x10174, prAL, gcNl}, // [53] GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ACROPHONIC STRATIAN FIFTY MNAS
+ {0x10175, 0x10178, prAL, gcNo}, // [4] GREEK ONE HALF SIGN..GREEK THREE QUARTERS SIGN
+ {0x10179, 0x10189, prAL, gcSo}, // [17] GREEK YEAR SIGN..GREEK TRYBLION BASE SIGN
+ {0x1018A, 0x1018B, prAL, gcNo}, // [2] GREEK ZERO SIGN..GREEK ONE QUARTER SIGN
+ {0x1018C, 0x1018E, prAL, gcSo}, // [3] GREEK SINUSOID SIGN..NOMISMA SIGN
+ {0x10190, 0x1019C, prAL, gcSo}, // [13] ROMAN SEXTANS SIGN..ASCIA SYMBOL
+ {0x101A0, 0x101A0, prAL, gcSo}, // GREEK SYMBOL TAU RHO
+ {0x101D0, 0x101FC, prAL, gcSo}, // [45] PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND
+ {0x101FD, 0x101FD, prCM, gcMn}, // PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x10280, 0x1029C, prAL, gcLo}, // [29] LYCIAN LETTER A..LYCIAN LETTER X
+ {0x102A0, 0x102D0, prAL, gcLo}, // [49] CARIAN LETTER A..CARIAN LETTER UUU3
+ {0x102E0, 0x102E0, prCM, gcMn}, // COPTIC EPACT THOUSANDS MARK
+ {0x102E1, 0x102FB, prAL, gcNo}, // [27] COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED
+ {0x10300, 0x1031F, prAL, gcLo}, // [32] OLD ITALIC LETTER A..OLD ITALIC LETTER ESS
+ {0x10320, 0x10323, prAL, gcNo}, // [4] OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY
+ {0x1032D, 0x1032F, prAL, gcLo}, // [3] OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE
+ {0x10330, 0x10340, prAL, gcLo}, // [17] GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA
+ {0x10341, 0x10341, prAL, gcNl}, // GOTHIC LETTER NINETY
+ {0x10342, 0x10349, prAL, gcLo}, // [8] GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL
+ {0x1034A, 0x1034A, prAL, gcNl}, // GOTHIC LETTER NINE HUNDRED
+ {0x10350, 0x10375, prAL, gcLo}, // [38] OLD PERMIC LETTER AN..OLD PERMIC LETTER IA
+ {0x10376, 0x1037A, prCM, gcMn}, // [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10380, 0x1039D, prAL, gcLo}, // [30] UGARITIC LETTER ALPA..UGARITIC LETTER SSU
+ {0x1039F, 0x1039F, prBA, gcPo}, // UGARITIC WORD DIVIDER
+ {0x103A0, 0x103C3, prAL, gcLo}, // [36] OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA
+ {0x103C8, 0x103CF, prAL, gcLo}, // [8] OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH
+ {0x103D0, 0x103D0, prBA, gcPo}, // OLD PERSIAN WORD DIVIDER
+ {0x103D1, 0x103D5, prAL, gcNl}, // [5] OLD PERSIAN NUMBER ONE..OLD PERSIAN NUMBER HUNDRED
+ {0x10400, 0x1044F, prAL, gcLC}, // [80] DESERET CAPITAL LETTER LONG I..DESERET SMALL LETTER EW
+ {0x10450, 0x1047F, prAL, gcLo}, // [48] SHAVIAN LETTER PEEP..SHAVIAN LETTER YEW
+ {0x10480, 0x1049D, prAL, gcLo}, // [30] OSMANYA LETTER ALEF..OSMANYA LETTER OO
+ {0x104A0, 0x104A9, prNU, gcNd}, // [10] OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE
+ {0x104B0, 0x104D3, prAL, gcLu}, // [36] OSAGE CAPITAL LETTER A..OSAGE CAPITAL LETTER ZHA
+ {0x104D8, 0x104FB, prAL, gcLl}, // [36] OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA
+ {0x10500, 0x10527, prAL, gcLo}, // [40] ELBASAN LETTER A..ELBASAN LETTER KHE
+ {0x10530, 0x10563, prAL, gcLo}, // [52] CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW
+ {0x1056F, 0x1056F, prAL, gcPo}, // CAUCASIAN ALBANIAN CITATION MARK
+ {0x10570, 0x1057A, prAL, gcLu}, // [11] VITHKUQI CAPITAL LETTER A..VITHKUQI CAPITAL LETTER GA
+ {0x1057C, 0x1058A, prAL, gcLu}, // [15] VITHKUQI CAPITAL LETTER HA..VITHKUQI CAPITAL LETTER RE
+ {0x1058C, 0x10592, prAL, gcLu}, // [7] VITHKUQI CAPITAL LETTER SE..VITHKUQI CAPITAL LETTER XE
+ {0x10594, 0x10595, prAL, gcLu}, // [2] VITHKUQI CAPITAL LETTER Y..VITHKUQI CAPITAL LETTER ZE
+ {0x10597, 0x105A1, prAL, gcLl}, // [11] VITHKUQI SMALL LETTER A..VITHKUQI SMALL LETTER GA
+ {0x105A3, 0x105B1, prAL, gcLl}, // [15] VITHKUQI SMALL LETTER HA..VITHKUQI SMALL LETTER RE
+ {0x105B3, 0x105B9, prAL, gcLl}, // [7] VITHKUQI SMALL LETTER SE..VITHKUQI SMALL LETTER XE
+ {0x105BB, 0x105BC, prAL, gcLl}, // [2] VITHKUQI SMALL LETTER Y..VITHKUQI SMALL LETTER ZE
+ {0x10600, 0x10736, prAL, gcLo}, // [311] LINEAR A SIGN AB001..LINEAR A SIGN A664
+ {0x10740, 0x10755, prAL, gcLo}, // [22] LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE
+ {0x10760, 0x10767, prAL, gcLo}, // [8] LINEAR A SIGN A800..LINEAR A SIGN A807
+ {0x10780, 0x10785, prAL, gcLm}, // [6] MODIFIER LETTER SMALL CAPITAL AA..MODIFIER LETTER SMALL B WITH HOOK
+ {0x10787, 0x107B0, prAL, gcLm}, // [42] MODIFIER LETTER SMALL DZ DIGRAPH..MODIFIER LETTER SMALL V WITH RIGHT HOOK
+ {0x107B2, 0x107BA, prAL, gcLm}, // [9] MODIFIER LETTER SMALL CAPITAL Y..MODIFIER LETTER SMALL S WITH CURL
+ {0x10800, 0x10805, prAL, gcLo}, // [6] CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA
+ {0x10808, 0x10808, prAL, gcLo}, // CYPRIOT SYLLABLE JO
+ {0x1080A, 0x10835, prAL, gcLo}, // [44] CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO
+ {0x10837, 0x10838, prAL, gcLo}, // [2] CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE
+ {0x1083C, 0x1083C, prAL, gcLo}, // CYPRIOT SYLLABLE ZA
+ {0x1083F, 0x1083F, prAL, gcLo}, // CYPRIOT SYLLABLE ZO
+ {0x10840, 0x10855, prAL, gcLo}, // [22] IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW
+ {0x10857, 0x10857, prBA, gcPo}, // IMPERIAL ARAMAIC SECTION SIGN
+ {0x10858, 0x1085F, prAL, gcNo}, // [8] IMPERIAL ARAMAIC NUMBER ONE..IMPERIAL ARAMAIC NUMBER TEN THOUSAND
+ {0x10860, 0x10876, prAL, gcLo}, // [23] PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW
+ {0x10877, 0x10878, prAL, gcSo}, // [2] PALMYRENE LEFT-POINTING FLEURON..PALMYRENE RIGHT-POINTING FLEURON
+ {0x10879, 0x1087F, prAL, gcNo}, // [7] PALMYRENE NUMBER ONE..PALMYRENE NUMBER TWENTY
+ {0x10880, 0x1089E, prAL, gcLo}, // [31] NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW
+ {0x108A7, 0x108AF, prAL, gcNo}, // [9] NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED
+ {0x108E0, 0x108F2, prAL, gcLo}, // [19] HATRAN LETTER ALEPH..HATRAN LETTER QOPH
+ {0x108F4, 0x108F5, prAL, gcLo}, // [2] HATRAN LETTER SHIN..HATRAN LETTER TAW
+ {0x108FB, 0x108FF, prAL, gcNo}, // [5] HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED
+ {0x10900, 0x10915, prAL, gcLo}, // [22] PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU
+ {0x10916, 0x1091B, prAL, gcNo}, // [6] PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER THREE
+ {0x1091F, 0x1091F, prBA, gcPo}, // PHOENICIAN WORD SEPARATOR
+ {0x10920, 0x10939, prAL, gcLo}, // [26] LYDIAN LETTER A..LYDIAN LETTER C
+ {0x1093F, 0x1093F, prAL, gcPo}, // LYDIAN TRIANGULAR MARK
+ {0x10980, 0x1099F, prAL, gcLo}, // [32] MEROITIC HIEROGLYPHIC LETTER A..MEROITIC HIEROGLYPHIC SYMBOL VIDJ-2
+ {0x109A0, 0x109B7, prAL, gcLo}, // [24] MEROITIC CURSIVE LETTER A..MEROITIC CURSIVE LETTER DA
+ {0x109BC, 0x109BD, prAL, gcNo}, // [2] MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF
+ {0x109BE, 0x109BF, prAL, gcLo}, // [2] MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN
+ {0x109C0, 0x109CF, prAL, gcNo}, // [16] MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY
+ {0x109D2, 0x109FF, prAL, gcNo}, // [46] MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS
+ {0x10A00, 0x10A00, prAL, gcLo}, // KHAROSHTHI LETTER A
+ {0x10A01, 0x10A03, prCM, gcMn}, // [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prCM, gcMn}, // [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prCM, gcMn}, // [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A10, 0x10A13, prAL, gcLo}, // [4] KHAROSHTHI LETTER KA..KHAROSHTHI LETTER GHA
+ {0x10A15, 0x10A17, prAL, gcLo}, // [3] KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA
+ {0x10A19, 0x10A35, prAL, gcLo}, // [29] KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER VHA
+ {0x10A38, 0x10A3A, prCM, gcMn}, // [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prCM, gcMn}, // KHAROSHTHI VIRAMA
+ {0x10A40, 0x10A48, prAL, gcNo}, // [9] KHAROSHTHI DIGIT ONE..KHAROSHTHI FRACTION ONE HALF
+ {0x10A50, 0x10A57, prBA, gcPo}, // [8] KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION DOUBLE DANDA
+ {0x10A58, 0x10A58, prAL, gcPo}, // KHAROSHTHI PUNCTUATION LINES
+ {0x10A60, 0x10A7C, prAL, gcLo}, // [29] OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH
+ {0x10A7D, 0x10A7E, prAL, gcNo}, // [2] OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMBER FIFTY
+ {0x10A7F, 0x10A7F, prAL, gcPo}, // OLD SOUTH ARABIAN NUMERIC INDICATOR
+ {0x10A80, 0x10A9C, prAL, gcLo}, // [29] OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH
+ {0x10A9D, 0x10A9F, prAL, gcNo}, // [3] OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY
+ {0x10AC0, 0x10AC7, prAL, gcLo}, // [8] MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW
+ {0x10AC8, 0x10AC8, prAL, gcSo}, // MANICHAEAN SIGN UD
+ {0x10AC9, 0x10AE4, prAL, gcLo}, // [28] MANICHAEAN LETTER ZAYIN..MANICHAEAN LETTER TAW
+ {0x10AE5, 0x10AE6, prCM, gcMn}, // [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10AEB, 0x10AEF, prAL, gcNo}, // [5] MANICHAEAN NUMBER ONE..MANICHAEAN NUMBER ONE HUNDRED
+ {0x10AF0, 0x10AF5, prBA, gcPo}, // [6] MANICHAEAN PUNCTUATION STAR..MANICHAEAN PUNCTUATION TWO DOTS
+ {0x10AF6, 0x10AF6, prIN, gcPo}, // MANICHAEAN PUNCTUATION LINE FILLER
+ {0x10B00, 0x10B35, prAL, gcLo}, // [54] AVESTAN LETTER A..AVESTAN LETTER HE
+ {0x10B39, 0x10B3F, prBA, gcPo}, // [7] AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION
+ {0x10B40, 0x10B55, prAL, gcLo}, // [22] INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW
+ {0x10B58, 0x10B5F, prAL, gcNo}, // [8] INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND
+ {0x10B60, 0x10B72, prAL, gcLo}, // [19] INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW
+ {0x10B78, 0x10B7F, prAL, gcNo}, // [8] INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND
+ {0x10B80, 0x10B91, prAL, gcLo}, // [18] PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW
+ {0x10B99, 0x10B9C, prAL, gcPo}, // [4] PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT
+ {0x10BA9, 0x10BAF, prAL, gcNo}, // [7] PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED
+ {0x10C00, 0x10C48, prAL, gcLo}, // [73] OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH
+ {0x10C80, 0x10CB2, prAL, gcLu}, // [51] OLD HUNGARIAN CAPITAL LETTER A..OLD HUNGARIAN CAPITAL LETTER US
+ {0x10CC0, 0x10CF2, prAL, gcLl}, // [51] OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US
+ {0x10CFA, 0x10CFF, prAL, gcNo}, // [6] OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND
+ {0x10D00, 0x10D23, prAL, gcLo}, // [36] HANIFI ROHINGYA LETTER A..HANIFI ROHINGYA MARK NA KHONNA
+ {0x10D24, 0x10D27, prCM, gcMn}, // [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10D30, 0x10D39, prNU, gcNd}, // [10] HANIFI ROHINGYA DIGIT ZERO..HANIFI ROHINGYA DIGIT NINE
+ {0x10E60, 0x10E7E, prAL, gcNo}, // [31] RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS
+ {0x10E80, 0x10EA9, prAL, gcLo}, // [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET
+ {0x10EAB, 0x10EAC, prCM, gcMn}, // [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EAD, 0x10EAD, prBA, gcPd}, // YEZIDI HYPHENATION MARK
+ {0x10EB0, 0x10EB1, prAL, gcLo}, // [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10F00, 0x10F1C, prAL, gcLo}, // [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
+ {0x10F1D, 0x10F26, prAL, gcNo}, // [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF
+ {0x10F27, 0x10F27, prAL, gcLo}, // OLD SOGDIAN LIGATURE AYIN-DALETH
+ {0x10F30, 0x10F45, prAL, gcLo}, // [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN
+ {0x10F46, 0x10F50, prCM, gcMn}, // [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F51, 0x10F54, prAL, gcNo}, // [4] SOGDIAN NUMBER ONE..SOGDIAN NUMBER ONE HUNDRED
+ {0x10F55, 0x10F59, prAL, gcPo}, // [5] SOGDIAN PUNCTUATION TWO VERTICAL BARS..SOGDIAN PUNCTUATION HALF CIRCLE WITH DOT
+ {0x10F70, 0x10F81, prAL, gcLo}, // [18] OLD UYGHUR LETTER ALEPH..OLD UYGHUR LETTER LESH
+ {0x10F82, 0x10F85, prCM, gcMn}, // [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x10F86, 0x10F89, prAL, gcPo}, // [4] OLD UYGHUR PUNCTUATION BAR..OLD UYGHUR PUNCTUATION FOUR DOTS
+ {0x10FB0, 0x10FC4, prAL, gcLo}, // [21] CHORASMIAN LETTER ALEPH..CHORASMIAN LETTER TAW
+ {0x10FC5, 0x10FCB, prAL, gcNo}, // [7] CHORASMIAN NUMBER ONE..CHORASMIAN NUMBER ONE HUNDRED
+ {0x10FE0, 0x10FF6, prAL, gcLo}, // [23] ELYMAIC LETTER ALEPH..ELYMAIC LIGATURE ZAYIN-YODH
+ {0x11000, 0x11000, prCM, gcMc}, // BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prCM, gcMn}, // BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prCM, gcMc}, // BRAHMI SIGN VISARGA
+ {0x11003, 0x11037, prAL, gcLo}, // [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI LETTER OLD TAMIL NNNA
+ {0x11038, 0x11046, prCM, gcMn}, // [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11047, 0x11048, prBA, gcPo}, // [2] BRAHMI DANDA..BRAHMI DOUBLE DANDA
+ {0x11049, 0x1104D, prAL, gcPo}, // [5] BRAHMI PUNCTUATION DOT..BRAHMI PUNCTUATION LOTUS
+ {0x11052, 0x11065, prAL, gcNo}, // [20] BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND
+ {0x11066, 0x1106F, prNU, gcNd}, // [10] BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE
+ {0x11070, 0x11070, prCM, gcMn}, // BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11071, 0x11072, prAL, gcLo}, // [2] BRAHMI LETTER OLD TAMIL SHORT E..BRAHMI LETTER OLD TAMIL SHORT O
+ {0x11073, 0x11074, prCM, gcMn}, // [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x11075, 0x11075, prAL, gcLo}, // BRAHMI LETTER OLD TAMIL LLA
+ {0x1107F, 0x1107F, prCM, gcMn}, // BRAHMI NUMBER JOINER
+ {0x11080, 0x11081, prCM, gcMn}, // [2] KAITHI SIGN CANDRABINDU..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prCM, gcMc}, // KAITHI SIGN VISARGA
+ {0x11083, 0x110AF, prAL, gcLo}, // [45] KAITHI LETTER A..KAITHI LETTER HA
+ {0x110B0, 0x110B2, prCM, gcMc}, // [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prCM, gcMn}, // [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prCM, gcMc}, // [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prCM, gcMn}, // [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BB, 0x110BC, prAL, gcPo}, // [2] KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN
+ {0x110BD, 0x110BD, prAL, gcCf}, // KAITHI NUMBER SIGN
+ {0x110BE, 0x110C1, prBA, gcPo}, // [4] KAITHI SECTION MARK..KAITHI DOUBLE DANDA
+ {0x110C2, 0x110C2, prCM, gcMn}, // KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prAL, gcCf}, // KAITHI NUMBER SIGN ABOVE
+ {0x110D0, 0x110E8, prAL, gcLo}, // [25] SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE
+ {0x110F0, 0x110F9, prNU, gcNd}, // [10] SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE
+ {0x11100, 0x11102, prCM, gcMn}, // [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11103, 0x11126, prAL, gcLo}, // [36] CHAKMA LETTER AA..CHAKMA LETTER HAA
+ {0x11127, 0x1112B, prCM, gcMn}, // [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prCM, gcMc}, // CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prCM, gcMn}, // [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11136, 0x1113F, prNU, gcNd}, // [10] CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE
+ {0x11140, 0x11143, prBA, gcPo}, // [4] CHAKMA SECTION MARK..CHAKMA QUESTION MARK
+ {0x11144, 0x11144, prAL, gcLo}, // CHAKMA LETTER LHAA
+ {0x11145, 0x11146, prCM, gcMc}, // [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11147, 0x11147, prAL, gcLo}, // CHAKMA LETTER VAA
+ {0x11150, 0x11172, prAL, gcLo}, // [35] MAHAJANI LETTER A..MAHAJANI LETTER RRA
+ {0x11173, 0x11173, prCM, gcMn}, // MAHAJANI SIGN NUKTA
+ {0x11174, 0x11174, prAL, gcPo}, // MAHAJANI ABBREVIATION SIGN
+ {0x11175, 0x11175, prBB, gcPo}, // MAHAJANI SECTION MARK
+ {0x11176, 0x11176, prAL, gcLo}, // MAHAJANI LIGATURE SHRI
+ {0x11180, 0x11181, prCM, gcMn}, // [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prCM, gcMc}, // SHARADA SIGN VISARGA
+ {0x11183, 0x111B2, prAL, gcLo}, // [48] SHARADA LETTER A..SHARADA LETTER HA
+ {0x111B3, 0x111B5, prCM, gcMc}, // [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prCM, gcMn}, // [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prCM, gcMc}, // [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C1, 0x111C4, prAL, gcLo}, // [4] SHARADA SIGN AVAGRAHA..SHARADA OM
+ {0x111C5, 0x111C6, prBA, gcPo}, // [2] SHARADA DANDA..SHARADA DOUBLE DANDA
+ {0x111C7, 0x111C7, prAL, gcPo}, // SHARADA ABBREVIATION SIGN
+ {0x111C8, 0x111C8, prBA, gcPo}, // SHARADA SEPARATOR
+ {0x111C9, 0x111CC, prCM, gcMn}, // [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CD, 0x111CD, prAL, gcPo}, // SHARADA SUTRA MARK
+ {0x111CE, 0x111CE, prCM, gcMc}, // SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prCM, gcMn}, // SHARADA SIGN INVERTED CANDRABINDU
+ {0x111D0, 0x111D9, prNU, gcNd}, // [10] SHARADA DIGIT ZERO..SHARADA DIGIT NINE
+ {0x111DA, 0x111DA, prAL, gcLo}, // SHARADA EKAM
+ {0x111DB, 0x111DB, prBB, gcPo}, // SHARADA SIGN SIDDHAM
+ {0x111DC, 0x111DC, prAL, gcLo}, // SHARADA HEADSTROKE
+ {0x111DD, 0x111DF, prBA, gcPo}, // [3] SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2
+ {0x111E1, 0x111F4, prAL, gcNo}, // [20] SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND
+ {0x11200, 0x11211, prAL, gcLo}, // [18] KHOJKI LETTER A..KHOJKI LETTER JJA
+ {0x11213, 0x1122B, prAL, gcLo}, // [25] KHOJKI LETTER NYA..KHOJKI LETTER LLA
+ {0x1122C, 0x1122E, prCM, gcMc}, // [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prCM, gcMn}, // [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prCM, gcMc}, // [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prCM, gcMn}, // KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prCM, gcMc}, // KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prCM, gcMn}, // [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x11238, 0x11239, prBA, gcPo}, // [2] KHOJKI DANDA..KHOJKI DOUBLE DANDA
+ {0x1123A, 0x1123A, prAL, gcPo}, // KHOJKI WORD SEPARATOR
+ {0x1123B, 0x1123C, prBA, gcPo}, // [2] KHOJKI SECTION MARK..KHOJKI DOUBLE SECTION MARK
+ {0x1123D, 0x1123D, prAL, gcPo}, // KHOJKI ABBREVIATION SIGN
+ {0x1123E, 0x1123E, prCM, gcMn}, // KHOJKI SIGN SUKUN
+ {0x11280, 0x11286, prAL, gcLo}, // [7] MULTANI LETTER A..MULTANI LETTER GA
+ {0x11288, 0x11288, prAL, gcLo}, // MULTANI LETTER GHA
+ {0x1128A, 0x1128D, prAL, gcLo}, // [4] MULTANI LETTER CA..MULTANI LETTER JJA
+ {0x1128F, 0x1129D, prAL, gcLo}, // [15] MULTANI LETTER NYA..MULTANI LETTER BA
+ {0x1129F, 0x112A8, prAL, gcLo}, // [10] MULTANI LETTER BHA..MULTANI LETTER RHA
+ {0x112A9, 0x112A9, prBA, gcPo}, // MULTANI SECTION MARK
+ {0x112B0, 0x112DE, prAL, gcLo}, // [47] KHUDAWADI LETTER A..KHUDAWADI LETTER HA
+ {0x112DF, 0x112DF, prCM, gcMn}, // KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prCM, gcMc}, // [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prCM, gcMn}, // [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x112F0, 0x112F9, prNU, gcNd}, // [10] KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE
+ {0x11300, 0x11301, prCM, gcMn}, // [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prCM, gcMc}, // [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x11305, 0x1130C, prAL, gcLo}, // [8] GRANTHA LETTER A..GRANTHA LETTER VOCALIC L
+ {0x1130F, 0x11310, prAL, gcLo}, // [2] GRANTHA LETTER EE..GRANTHA LETTER AI
+ {0x11313, 0x11328, prAL, gcLo}, // [22] GRANTHA LETTER OO..GRANTHA LETTER NA
+ {0x1132A, 0x11330, prAL, gcLo}, // [7] GRANTHA LETTER PA..GRANTHA LETTER RA
+ {0x11332, 0x11333, prAL, gcLo}, // [2] GRANTHA LETTER LA..GRANTHA LETTER LLA
+ {0x11335, 0x11339, prAL, gcLo}, // [5] GRANTHA LETTER VA..GRANTHA LETTER HA
+ {0x1133B, 0x1133C, prCM, gcMn}, // [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133D, 0x1133D, prAL, gcLo}, // GRANTHA SIGN AVAGRAHA
+ {0x1133E, 0x1133F, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN AA..GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prCM, gcMn}, // GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prCM, gcMc}, // [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prCM, gcMc}, // [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11350, 0x11350, prAL, gcLo}, // GRANTHA OM
+ {0x11357, 0x11357, prCM, gcMc}, // GRANTHA AU LENGTH MARK
+ {0x1135D, 0x11361, prAL, gcLo}, // [5] GRANTHA SIGN PLUTA..GRANTHA LETTER VOCALIC LL
+ {0x11362, 0x11363, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prCM, gcMn}, // [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prCM, gcMn}, // [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11400, 0x11434, prAL, gcLo}, // [53] NEWA LETTER A..NEWA LETTER HA
+ {0x11435, 0x11437, prCM, gcMc}, // [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prCM, gcMn}, // [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prCM, gcMc}, // [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prCM, gcMn}, // [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prCM, gcMc}, // NEWA SIGN VISARGA
+ {0x11446, 0x11446, prCM, gcMn}, // NEWA SIGN NUKTA
+ {0x11447, 0x1144A, prAL, gcLo}, // [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI
+ {0x1144B, 0x1144E, prBA, gcPo}, // [4] NEWA DANDA..NEWA GAP FILLER
+ {0x1144F, 0x1144F, prAL, gcPo}, // NEWA ABBREVIATION SIGN
+ {0x11450, 0x11459, prNU, gcNd}, // [10] NEWA DIGIT ZERO..NEWA DIGIT NINE
+ {0x1145A, 0x1145B, prBA, gcPo}, // [2] NEWA DOUBLE COMMA..NEWA PLACEHOLDER MARK
+ {0x1145D, 0x1145D, prAL, gcPo}, // NEWA INSERTION SIGN
+ {0x1145E, 0x1145E, prCM, gcMn}, // NEWA SANDHI MARK
+ {0x1145F, 0x11461, prAL, gcLo}, // [3] NEWA LETTER VEDIC ANUSVARA..NEWA SIGN UPADHMANIYA
+ {0x11480, 0x114AF, prAL, gcLo}, // [48] TIRHUTA ANJI..TIRHUTA LETTER HA
+ {0x114B0, 0x114B2, prCM, gcMc}, // [3] TIRHUTA VOWEL SIGN AA..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prCM, gcMn}, // [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prCM, gcMc}, // TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prCM, gcMn}, // TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BE, prCM, gcMc}, // [4] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prCM, gcMn}, // [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prCM, gcMc}, // TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prCM, gcMn}, // [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x114C4, 0x114C5, prAL, gcLo}, // [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA GVANG
+ {0x114C6, 0x114C6, prAL, gcPo}, // TIRHUTA ABBREVIATION SIGN
+ {0x114C7, 0x114C7, prAL, gcLo}, // TIRHUTA OM
+ {0x114D0, 0x114D9, prNU, gcNd}, // [10] TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE
+ {0x11580, 0x115AE, prAL, gcLo}, // [47] SIDDHAM LETTER A..SIDDHAM LETTER HA
+ {0x115AF, 0x115B1, prCM, gcMc}, // [3] SIDDHAM VOWEL SIGN AA..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prCM, gcMn}, // [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prCM, gcMc}, // [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prCM, gcMn}, // [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prCM, gcMc}, // SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prCM, gcMn}, // [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115C1, 0x115C1, prBB, gcPo}, // SIDDHAM SIGN SIDDHAM
+ {0x115C2, 0x115C3, prBA, gcPo}, // [2] SIDDHAM DANDA..SIDDHAM DOUBLE DANDA
+ {0x115C4, 0x115C5, prEX, gcPo}, // [2] SIDDHAM SEPARATOR DOT..SIDDHAM SEPARATOR BAR
+ {0x115C6, 0x115C8, prAL, gcPo}, // [3] SIDDHAM REPETITION MARK-1..SIDDHAM REPETITION MARK-3
+ {0x115C9, 0x115D7, prBA, gcPo}, // [15] SIDDHAM END OF TEXT MARK..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES
+ {0x115D8, 0x115DB, prAL, gcLo}, // [4] SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM LETTER ALTERNATE U
+ {0x115DC, 0x115DD, prCM, gcMn}, // [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11600, 0x1162F, prAL, gcLo}, // [48] MODI LETTER A..MODI LETTER LLA
+ {0x11630, 0x11632, prCM, gcMc}, // [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prCM, gcMn}, // [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prCM, gcMc}, // [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prCM, gcMn}, // MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prCM, gcMc}, // MODI SIGN VISARGA
+ {0x1163F, 0x11640, prCM, gcMn}, // [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x11641, 0x11642, prBA, gcPo}, // [2] MODI DANDA..MODI DOUBLE DANDA
+ {0x11643, 0x11643, prAL, gcPo}, // MODI ABBREVIATION SIGN
+ {0x11644, 0x11644, prAL, gcLo}, // MODI SIGN HUVA
+ {0x11650, 0x11659, prNU, gcNd}, // [10] MODI DIGIT ZERO..MODI DIGIT NINE
+ {0x11660, 0x1166C, prBB, gcPo}, // [13] MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT
+ {0x11680, 0x116AA, prAL, gcLo}, // [43] TAKRI LETTER A..TAKRI LETTER RRA
+ {0x116AB, 0x116AB, prCM, gcMn}, // TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prCM, gcMc}, // TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prCM, gcMn}, // TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prCM, gcMc}, // [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prCM, gcMn}, // [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prCM, gcMc}, // TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prCM, gcMn}, // TAKRI SIGN NUKTA
+ {0x116B8, 0x116B8, prAL, gcLo}, // TAKRI LETTER ARCHAIC KHA
+ {0x116B9, 0x116B9, prAL, gcPo}, // TAKRI ABBREVIATION SIGN
+ {0x116C0, 0x116C9, prNU, gcNd}, // [10] TAKRI DIGIT ZERO..TAKRI DIGIT NINE
+ {0x11700, 0x1171A, prSA, gcLo}, // [27] AHOM LETTER KA..AHOM LETTER ALTERNATE BA
+ {0x1171D, 0x1171F, prSA, gcMn}, // [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11720, 0x11721, prSA, gcMc}, // [2] AHOM VOWEL SIGN A..AHOM VOWEL SIGN AA
+ {0x11722, 0x11725, prSA, gcMn}, // [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prSA, gcMc}, // AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prSA, gcMn}, // [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x11730, 0x11739, prNU, gcNd}, // [10] AHOM DIGIT ZERO..AHOM DIGIT NINE
+ {0x1173A, 0x1173B, prSA, gcNo}, // [2] AHOM NUMBER TEN..AHOM NUMBER TWENTY
+ {0x1173C, 0x1173E, prBA, gcPo}, // [3] AHOM SIGN SMALL SECTION..AHOM SIGN RULAI
+ {0x1173F, 0x1173F, prSA, gcSo}, // AHOM SYMBOL VI
+ {0x11740, 0x11746, prSA, gcLo}, // [7] AHOM LETTER CA..AHOM LETTER LLA
+ {0x11800, 0x1182B, prAL, gcLo}, // [44] DOGRA LETTER A..DOGRA LETTER RRA
+ {0x1182C, 0x1182E, prCM, gcMc}, // [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prCM, gcMn}, // [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prCM, gcMc}, // DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prCM, gcMn}, // [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x1183B, 0x1183B, prAL, gcPo}, // DOGRA ABBREVIATION SIGN
+ {0x118A0, 0x118DF, prAL, gcLC}, // [64] WARANG CITI CAPITAL LETTER NGAA..WARANG CITI SMALL LETTER VIYO
+ {0x118E0, 0x118E9, prNU, gcNd}, // [10] WARANG CITI DIGIT ZERO..WARANG CITI DIGIT NINE
+ {0x118EA, 0x118F2, prAL, gcNo}, // [9] WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY
+ {0x118FF, 0x118FF, prAL, gcLo}, // WARANG CITI OM
+ {0x11900, 0x11906, prAL, gcLo}, // [7] DIVES AKURU LETTER A..DIVES AKURU LETTER E
+ {0x11909, 0x11909, prAL, gcLo}, // DIVES AKURU LETTER O
+ {0x1190C, 0x11913, prAL, gcLo}, // [8] DIVES AKURU LETTER KA..DIVES AKURU LETTER JA
+ {0x11915, 0x11916, prAL, gcLo}, // [2] DIVES AKURU LETTER NYA..DIVES AKURU LETTER TTA
+ {0x11918, 0x1192F, prAL, gcLo}, // [24] DIVES AKURU LETTER DDA..DIVES AKURU LETTER ZA
+ {0x11930, 0x11935, prCM, gcMc}, // [6] DIVES AKURU VOWEL SIGN AA..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prCM, gcMc}, // [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prCM, gcMn}, // [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prCM, gcMc}, // DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prCM, gcMn}, // DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prAL, gcLo}, // DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prCM, gcMc}, // DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prAL, gcLo}, // DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prCM, gcMc}, // DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prCM, gcMn}, // DIVES AKURU SIGN NUKTA
+ {0x11944, 0x11946, prBA, gcPo}, // [3] DIVES AKURU DOUBLE DANDA..DIVES AKURU END OF TEXT MARK
+ {0x11950, 0x11959, prNU, gcNd}, // [10] DIVES AKURU DIGIT ZERO..DIVES AKURU DIGIT NINE
+ {0x119A0, 0x119A7, prAL, gcLo}, // [8] NANDINAGARI LETTER A..NANDINAGARI LETTER VOCALIC RR
+ {0x119AA, 0x119D0, prAL, gcLo}, // [39] NANDINAGARI LETTER E..NANDINAGARI LETTER RRA
+ {0x119D1, 0x119D3, prCM, gcMc}, // [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prCM, gcMn}, // [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prCM, gcMn}, // [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prCM, gcMc}, // [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prCM, gcMn}, // NANDINAGARI SIGN VIRAMA
+ {0x119E1, 0x119E1, prAL, gcLo}, // NANDINAGARI SIGN AVAGRAHA
+ {0x119E2, 0x119E2, prBB, gcPo}, // NANDINAGARI SIGN SIDDHAM
+ {0x119E3, 0x119E3, prAL, gcLo}, // NANDINAGARI HEADSTROKE
+ {0x119E4, 0x119E4, prCM, gcMc}, // NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A00, 0x11A00, prAL, gcLo}, // ZANABAZAR SQUARE LETTER A
+ {0x11A01, 0x11A0A, prCM, gcMn}, // [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A0B, 0x11A32, prAL, gcLo}, // [40] ZANABAZAR SQUARE LETTER KA..ZANABAZAR SQUARE LETTER KSSA
+ {0x11A33, 0x11A38, prCM, gcMn}, // [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prCM, gcMc}, // ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prAL, gcLo}, // ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prCM, gcMn}, // [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A3F, 0x11A3F, prBB, gcPo}, // ZANABAZAR SQUARE INITIAL HEAD MARK
+ {0x11A40, 0x11A40, prAL, gcPo}, // ZANABAZAR SQUARE CLOSING HEAD MARK
+ {0x11A41, 0x11A44, prBA, gcPo}, // [4] ZANABAZAR SQUARE MARK TSHEG..ZANABAZAR SQUARE MARK LONG TSHEG
+ {0x11A45, 0x11A45, prBB, gcPo}, // ZANABAZAR SQUARE INITIAL DOUBLE-LINED HEAD MARK
+ {0x11A46, 0x11A46, prAL, gcPo}, // ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK
+ {0x11A47, 0x11A47, prCM, gcMn}, // ZANABAZAR SQUARE SUBJOINER
+ {0x11A50, 0x11A50, prAL, gcLo}, // SOYOMBO LETTER A
+ {0x11A51, 0x11A56, prCM, gcMn}, // [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prCM, gcMc}, // [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prCM, gcMn}, // [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A5C, 0x11A89, prAL, gcLo}, // [46] SOYOMBO LETTER KA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prCM, gcMn}, // [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prCM, gcMc}, // SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prCM, gcMn}, // [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11A9A, 0x11A9C, prBA, gcPo}, // [3] SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD
+ {0x11A9D, 0x11A9D, prAL, gcLo}, // SOYOMBO MARK PLUTA
+ {0x11A9E, 0x11AA0, prBB, gcPo}, // [3] SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO HEAD MARK WITH MOON AND SUN
+ {0x11AA1, 0x11AA2, prBA, gcPo}, // [2] SOYOMBO TERMINAL MARK-1..SOYOMBO TERMINAL MARK-2
+ {0x11AB0, 0x11ABF, prAL, gcLo}, // [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA
+ {0x11AC0, 0x11AF8, prAL, gcLo}, // [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
+ {0x11C00, 0x11C08, prAL, gcLo}, // [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
+ {0x11C0A, 0x11C2E, prAL, gcLo}, // [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA
+ {0x11C2F, 0x11C2F, prCM, gcMc}, // BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prCM, gcMn}, // [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prCM, gcMn}, // [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prCM, gcMc}, // BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prCM, gcMn}, // BHAIKSUKI SIGN VIRAMA
+ {0x11C40, 0x11C40, prAL, gcLo}, // BHAIKSUKI SIGN AVAGRAHA
+ {0x11C41, 0x11C45, prBA, gcPo}, // [5] BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2
+ {0x11C50, 0x11C59, prNU, gcNd}, // [10] BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE
+ {0x11C5A, 0x11C6C, prAL, gcNo}, // [19] BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK
+ {0x11C70, 0x11C70, prBB, gcPo}, // MARCHEN HEAD MARK
+ {0x11C71, 0x11C71, prEX, gcPo}, // MARCHEN MARK SHAD
+ {0x11C72, 0x11C8F, prAL, gcLo}, // [30] MARCHEN LETTER KA..MARCHEN LETTER A
+ {0x11C92, 0x11CA7, prCM, gcMn}, // [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prCM, gcMc}, // MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prCM, gcMn}, // [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prCM, gcMc}, // MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prCM, gcMn}, // [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prCM, gcMc}, // MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prCM, gcMn}, // [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D00, 0x11D06, prAL, gcLo}, // [7] MASARAM GONDI LETTER A..MASARAM GONDI LETTER E
+ {0x11D08, 0x11D09, prAL, gcLo}, // [2] MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O
+ {0x11D0B, 0x11D30, prAL, gcLo}, // [38] MASARAM GONDI LETTER AU..MASARAM GONDI LETTER TRA
+ {0x11D31, 0x11D36, prCM, gcMn}, // [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prCM, gcMn}, // MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prCM, gcMn}, // [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prCM, gcMn}, // [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prAL, gcLo}, // MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prCM, gcMn}, // MASARAM GONDI RA-KARA
+ {0x11D50, 0x11D59, prNU, gcNd}, // [10] MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE
+ {0x11D60, 0x11D65, prAL, gcLo}, // [6] GUNJALA GONDI LETTER A..GUNJALA GONDI LETTER UU
+ {0x11D67, 0x11D68, prAL, gcLo}, // [2] GUNJALA GONDI LETTER EE..GUNJALA GONDI LETTER AI
+ {0x11D6A, 0x11D89, prAL, gcLo}, // [32] GUNJALA GONDI LETTER OO..GUNJALA GONDI LETTER SA
+ {0x11D8A, 0x11D8E, prCM, gcMc}, // [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prCM, gcMn}, // [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prCM, gcMc}, // [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prCM, gcMn}, // GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prCM, gcMc}, // GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prCM, gcMn}, // GUNJALA GONDI VIRAMA
+ {0x11D98, 0x11D98, prAL, gcLo}, // GUNJALA GONDI OM
+ {0x11DA0, 0x11DA9, prNU, gcNd}, // [10] GUNJALA GONDI DIGIT ZERO..GUNJALA GONDI DIGIT NINE
+ {0x11EE0, 0x11EF2, prAL, gcLo}, // [19] MAKASAR LETTER KA..MAKASAR ANGKA
+ {0x11EF3, 0x11EF4, prCM, gcMn}, // [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prCM, gcMc}, // [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11EF7, 0x11EF8, prAL, gcPo}, // [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11FB0, 0x11FB0, prAL, gcLo}, // LISU LETTER YHA
+ {0x11FC0, 0x11FD4, prAL, gcNo}, // [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH
+ {0x11FD5, 0x11FDC, prAL, gcSo}, // [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI
+ {0x11FDD, 0x11FE0, prPO, gcSc}, // [4] TAMIL SIGN KAACU..TAMIL SIGN VARAAKAN
+ {0x11FE1, 0x11FF1, prAL, gcSo}, // [17] TAMIL SIGN PAARAM..TAMIL SIGN VAKAIYARAA
+ {0x11FFF, 0x11FFF, prBA, gcPo}, // TAMIL PUNCTUATION END OF TEXT
+ {0x12000, 0x12399, prAL, gcLo}, // [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U
+ {0x12400, 0x1246E, prAL, gcNl}, // [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM
+ {0x12470, 0x12474, prBA, gcPo}, // [5] CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON
+ {0x12480, 0x12543, prAL, gcLo}, // [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
+ {0x12F90, 0x12FF0, prAL, gcLo}, // [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
+ {0x12FF1, 0x12FF2, prAL, gcPo}, // [2] CYPRO-MINOAN SIGN CM301..CYPRO-MINOAN SIGN CM302
+ {0x13000, 0x13257, prAL, gcLo}, // [600] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH O006
+ {0x13258, 0x1325A, prOP, gcLo}, // [3] EGYPTIAN HIEROGLYPH O006A..EGYPTIAN HIEROGLYPH O006C
+ {0x1325B, 0x1325D, prCL, gcLo}, // [3] EGYPTIAN HIEROGLYPH O006D..EGYPTIAN HIEROGLYPH O006F
+ {0x1325E, 0x13281, prAL, gcLo}, // [36] EGYPTIAN HIEROGLYPH O007..EGYPTIAN HIEROGLYPH O033
+ {0x13282, 0x13282, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O033A
+ {0x13283, 0x13285, prAL, gcLo}, // [3] EGYPTIAN HIEROGLYPH O034..EGYPTIAN HIEROGLYPH O036
+ {0x13286, 0x13286, prOP, gcLo}, // EGYPTIAN HIEROGLYPH O036A
+ {0x13287, 0x13287, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O036B
+ {0x13288, 0x13288, prOP, gcLo}, // EGYPTIAN HIEROGLYPH O036C
+ {0x13289, 0x13289, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O036D
+ {0x1328A, 0x13378, prAL, gcLo}, // [239] EGYPTIAN HIEROGLYPH O037..EGYPTIAN HIEROGLYPH V011
+ {0x13379, 0x13379, prOP, gcLo}, // EGYPTIAN HIEROGLYPH V011A
+ {0x1337A, 0x1337B, prCL, gcLo}, // [2] EGYPTIAN HIEROGLYPH V011B..EGYPTIAN HIEROGLYPH V011C
+ {0x1337C, 0x1342E, prAL, gcLo}, // [179] EGYPTIAN HIEROGLYPH V012..EGYPTIAN HIEROGLYPH AA032
+ {0x13430, 0x13436, prGL, gcCf}, // [7] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH OVERLAY MIDDLE
+ {0x13437, 0x13437, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN SEGMENT
+ {0x13438, 0x13438, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x14400, 0x145CD, prAL, gcLo}, // [462] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A409
+ {0x145CE, 0x145CE, prOP, gcLo}, // ANATOLIAN HIEROGLYPH A410 BEGIN LOGOGRAM MARK
+ {0x145CF, 0x145CF, prCL, gcLo}, // ANATOLIAN HIEROGLYPH A410A END LOGOGRAM MARK
+ {0x145D0, 0x14646, prAL, gcLo}, // [119] ANATOLIAN HIEROGLYPH A411..ANATOLIAN HIEROGLYPH A530
+ {0x16800, 0x16A38, prAL, gcLo}, // [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
+ {0x16A40, 0x16A5E, prAL, gcLo}, // [31] MRO LETTER TA..MRO LETTER TEK
+ {0x16A60, 0x16A69, prNU, gcNd}, // [10] MRO DIGIT ZERO..MRO DIGIT NINE
+ {0x16A6E, 0x16A6F, prBA, gcPo}, // [2] MRO DANDA..MRO DOUBLE DANDA
+ {0x16A70, 0x16ABE, prAL, gcLo}, // [79] TANGSA LETTER OZ..TANGSA LETTER ZA
+ {0x16AC0, 0x16AC9, prNU, gcNd}, // [10] TANGSA DIGIT ZERO..TANGSA DIGIT NINE
+ {0x16AD0, 0x16AED, prAL, gcLo}, // [30] BASSA VAH LETTER ENNI..BASSA VAH LETTER I
+ {0x16AF0, 0x16AF4, prCM, gcMn}, // [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16AF5, 0x16AF5, prBA, gcPo}, // BASSA VAH FULL STOP
+ {0x16B00, 0x16B2F, prAL, gcLo}, // [48] PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG CONSONANT CAU
+ {0x16B30, 0x16B36, prCM, gcMn}, // [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16B37, 0x16B39, prBA, gcPo}, // [3] PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN CIM CHEEM
+ {0x16B3A, 0x16B3B, prAL, gcPo}, // [2] PAHAWH HMONG SIGN VOS THIAB..PAHAWH HMONG SIGN VOS FEEM
+ {0x16B3C, 0x16B3F, prAL, gcSo}, // [4] PAHAWH HMONG SIGN XYEEM NTXIV..PAHAWH HMONG SIGN XYEEM FAIB
+ {0x16B40, 0x16B43, prAL, gcLm}, // [4] PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM
+ {0x16B44, 0x16B44, prBA, gcPo}, // PAHAWH HMONG SIGN XAUS
+ {0x16B45, 0x16B45, prAL, gcSo}, // PAHAWH HMONG SIGN CIM TSOV ROG
+ {0x16B50, 0x16B59, prNU, gcNd}, // [10] PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE
+ {0x16B5B, 0x16B61, prAL, gcNo}, // [7] PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS
+ {0x16B63, 0x16B77, prAL, gcLo}, // [21] PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS
+ {0x16B7D, 0x16B8F, prAL, gcLo}, // [19] PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ
+ {0x16E40, 0x16E7F, prAL, gcLC}, // [64] MEDEFAIDRIN CAPITAL LETTER M..MEDEFAIDRIN SMALL LETTER Y
+ {0x16E80, 0x16E96, prAL, gcNo}, // [23] MEDEFAIDRIN DIGIT ZERO..MEDEFAIDRIN DIGIT THREE ALTERNATE FORM
+ {0x16E97, 0x16E98, prBA, gcPo}, // [2] MEDEFAIDRIN COMMA..MEDEFAIDRIN FULL STOP
+ {0x16E99, 0x16E9A, prAL, gcPo}, // [2] MEDEFAIDRIN SYMBOL AIVA..MEDEFAIDRIN EXCLAMATION OH
+ {0x16F00, 0x16F4A, prAL, gcLo}, // [75] MIAO LETTER PA..MIAO LETTER RTE
+ {0x16F4F, 0x16F4F, prCM, gcMn}, // MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F50, 0x16F50, prAL, gcLo}, // MIAO LETTER NASALIZATION
+ {0x16F51, 0x16F87, prCM, gcMc}, // [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prCM, gcMn}, // [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16F93, 0x16F9F, prAL, gcLm}, // [13] MIAO LETTER TONE-2..MIAO LETTER REFORMED TONE-8
+ {0x16FE0, 0x16FE1, prNS, gcLm}, // [2] TANGUT ITERATION MARK..NUSHU ITERATION MARK
+ {0x16FE2, 0x16FE2, prNS, gcPo}, // OLD CHINESE HOOK MARK
+ {0x16FE3, 0x16FE3, prNS, gcLm}, // OLD CHINESE ITERATION MARK
+ {0x16FE4, 0x16FE4, prGL, gcMn}, // KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prCM, gcMc}, // [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x17000, 0x187F7, prID, gcLo}, // [6136] TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187F7
+ {0x18800, 0x18AFF, prID, gcLo}, // [768] TANGUT COMPONENT-001..TANGUT COMPONENT-768
+ {0x18B00, 0x18CD5, prAL, gcLo}, // [470] KHITAN SMALL SCRIPT CHARACTER-18B00..KHITAN SMALL SCRIPT CHARACTER-18CD5
+ {0x18D00, 0x18D08, prID, gcLo}, // [9] TANGUT IDEOGRAPH-18D00..TANGUT IDEOGRAPH-18D08
+ {0x1AFF0, 0x1AFF3, prAL, gcLm}, // [4] KATAKANA LETTER MINNAN TONE-2..KATAKANA LETTER MINNAN TONE-5
+ {0x1AFF5, 0x1AFFB, prAL, gcLm}, // [7] KATAKANA LETTER MINNAN TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-5
+ {0x1AFFD, 0x1AFFE, prAL, gcLm}, // [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
+ {0x1B000, 0x1B0FF, prID, gcLo}, // [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2
+ {0x1B100, 0x1B122, prID, gcLo}, // [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU
+ {0x1B150, 0x1B152, prCJ, gcLo}, // [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B164, 0x1B167, prCJ, gcLo}, // [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
+ {0x1B170, 0x1B2FB, prID, gcLo}, // [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
+ {0x1BC00, 0x1BC6A, prAL, gcLo}, // [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
+ {0x1BC70, 0x1BC7C, prAL, gcLo}, // [13] DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK
+ {0x1BC80, 0x1BC88, prAL, gcLo}, // [9] DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL
+ {0x1BC90, 0x1BC99, prAL, gcLo}, // [10] DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW
+ {0x1BC9C, 0x1BC9C, prAL, gcSo}, // DUPLOYAN SIGN O WITH CROSS
+ {0x1BC9D, 0x1BC9E, prCM, gcMn}, // [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BC9F, 0x1BC9F, prBA, gcPo}, // DUPLOYAN PUNCTUATION CHINOOK FULL STOP
+ {0x1BCA0, 0x1BCA3, prCM, gcCf}, // [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prCM, gcMn}, // [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prCM, gcMn}, // [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1CF50, 0x1CFC3, prAL, gcSo}, // [116] ZNAMENNY NEUME KRYUK..ZNAMENNY NEUME PAUK
+ {0x1D000, 0x1D0F5, prAL, gcSo}, // [246] BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO
+ {0x1D100, 0x1D126, prAL, gcSo}, // [39] MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2
+ {0x1D129, 0x1D164, prAL, gcSo}, // [60] MUSICAL SYMBOL MULTIPLE MEASURE REST..MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE
+ {0x1D165, 0x1D166, prCM, gcMc}, // [2] MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prCM, gcMn}, // [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16A, 0x1D16C, prAL, gcSo}, // [3] MUSICAL SYMBOL FINGERED TREMOLO-1..MUSICAL SYMBOL FINGERED TREMOLO-3
+ {0x1D16D, 0x1D172, prCM, gcMc}, // [6] MUSICAL SYMBOL COMBINING AUGMENTATION DOT..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prCM, gcCf}, // [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prCM, gcMn}, // [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D183, 0x1D184, prAL, gcSo}, // [2] MUSICAL SYMBOL ARPEGGIATO UP..MUSICAL SYMBOL ARPEGGIATO DOWN
+ {0x1D185, 0x1D18B, prCM, gcMn}, // [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D18C, 0x1D1A9, prAL, gcSo}, // [30] MUSICAL SYMBOL RINFORZANDO..MUSICAL SYMBOL DEGREE SLASH
+ {0x1D1AA, 0x1D1AD, prCM, gcMn}, // [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D1AE, 0x1D1EA, prAL, gcSo}, // [61] MUSICAL SYMBOL PEDAL MARK..MUSICAL SYMBOL KORON
+ {0x1D200, 0x1D241, prAL, gcSo}, // [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54
+ {0x1D242, 0x1D244, prCM, gcMn}, // [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1D245, 0x1D245, prAL, gcSo}, // GREEK MUSICAL LEIMMA
+ {0x1D2E0, 0x1D2F3, prAL, gcNo}, // [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN
+ {0x1D300, 0x1D356, prAL, gcSo}, // [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING
+ {0x1D360, 0x1D378, prAL, gcNo}, // [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE
+ {0x1D400, 0x1D454, prAL, gcLC}, // [85] MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G
+ {0x1D456, 0x1D49C, prAL, gcLC}, // [71] MATHEMATICAL ITALIC SMALL I..MATHEMATICAL SCRIPT CAPITAL A
+ {0x1D49E, 0x1D49F, prAL, gcLu}, // [2] MATHEMATICAL SCRIPT CAPITAL C..MATHEMATICAL SCRIPT CAPITAL D
+ {0x1D4A2, 0x1D4A2, prAL, gcLu}, // MATHEMATICAL SCRIPT CAPITAL G
+ {0x1D4A5, 0x1D4A6, prAL, gcLu}, // [2] MATHEMATICAL SCRIPT CAPITAL J..MATHEMATICAL SCRIPT CAPITAL K
+ {0x1D4A9, 0x1D4AC, prAL, gcLu}, // [4] MATHEMATICAL SCRIPT CAPITAL N..MATHEMATICAL SCRIPT CAPITAL Q
+ {0x1D4AE, 0x1D4B9, prAL, gcLC}, // [12] MATHEMATICAL SCRIPT CAPITAL S..MATHEMATICAL SCRIPT SMALL D
+ {0x1D4BB, 0x1D4BB, prAL, gcLl}, // MATHEMATICAL SCRIPT SMALL F
+ {0x1D4BD, 0x1D4C3, prAL, gcLl}, // [7] MATHEMATICAL SCRIPT SMALL H..MATHEMATICAL SCRIPT SMALL N
+ {0x1D4C5, 0x1D505, prAL, gcLC}, // [65] MATHEMATICAL SCRIPT SMALL P..MATHEMATICAL FRAKTUR CAPITAL B
+ {0x1D507, 0x1D50A, prAL, gcLu}, // [4] MATHEMATICAL FRAKTUR CAPITAL D..MATHEMATICAL FRAKTUR CAPITAL G
+ {0x1D50D, 0x1D514, prAL, gcLu}, // [8] MATHEMATICAL FRAKTUR CAPITAL J..MATHEMATICAL FRAKTUR CAPITAL Q
+ {0x1D516, 0x1D51C, prAL, gcLu}, // [7] MATHEMATICAL FRAKTUR CAPITAL S..MATHEMATICAL FRAKTUR CAPITAL Y
+ {0x1D51E, 0x1D539, prAL, gcLC}, // [28] MATHEMATICAL FRAKTUR SMALL A..MATHEMATICAL DOUBLE-STRUCK CAPITAL B
+ {0x1D53B, 0x1D53E, prAL, gcLu}, // [4] MATHEMATICAL DOUBLE-STRUCK CAPITAL D..MATHEMATICAL DOUBLE-STRUCK CAPITAL G
+ {0x1D540, 0x1D544, prAL, gcLu}, // [5] MATHEMATICAL DOUBLE-STRUCK CAPITAL I..MATHEMATICAL DOUBLE-STRUCK CAPITAL M
+ {0x1D546, 0x1D546, prAL, gcLu}, // MATHEMATICAL DOUBLE-STRUCK CAPITAL O
+ {0x1D54A, 0x1D550, prAL, gcLu}, // [7] MATHEMATICAL DOUBLE-STRUCK CAPITAL S..MATHEMATICAL DOUBLE-STRUCK CAPITAL Y
+ {0x1D552, 0x1D6A5, prAL, gcLC}, // [340] MATHEMATICAL DOUBLE-STRUCK SMALL A..MATHEMATICAL ITALIC SMALL DOTLESS J
+ {0x1D6A8, 0x1D6C0, prAL, gcLu}, // [25] MATHEMATICAL BOLD CAPITAL ALPHA..MATHEMATICAL BOLD CAPITAL OMEGA
+ {0x1D6C1, 0x1D6C1, prAL, gcSm}, // MATHEMATICAL BOLD NABLA
+ {0x1D6C2, 0x1D6DA, prAL, gcLl}, // [25] MATHEMATICAL BOLD SMALL ALPHA..MATHEMATICAL BOLD SMALL OMEGA
+ {0x1D6DB, 0x1D6DB, prAL, gcSm}, // MATHEMATICAL BOLD PARTIAL DIFFERENTIAL
+ {0x1D6DC, 0x1D6FA, prAL, gcLC}, // [31] MATHEMATICAL BOLD EPSILON SYMBOL..MATHEMATICAL ITALIC CAPITAL OMEGA
+ {0x1D6FB, 0x1D6FB, prAL, gcSm}, // MATHEMATICAL ITALIC NABLA
+ {0x1D6FC, 0x1D714, prAL, gcLl}, // [25] MATHEMATICAL ITALIC SMALL ALPHA..MATHEMATICAL ITALIC SMALL OMEGA
+ {0x1D715, 0x1D715, prAL, gcSm}, // MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL
+ {0x1D716, 0x1D734, prAL, gcLC}, // [31] MATHEMATICAL ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD ITALIC CAPITAL OMEGA
+ {0x1D735, 0x1D735, prAL, gcSm}, // MATHEMATICAL BOLD ITALIC NABLA
+ {0x1D736, 0x1D74E, prAL, gcLl}, // [25] MATHEMATICAL BOLD ITALIC SMALL ALPHA..MATHEMATICAL BOLD ITALIC SMALL OMEGA
+ {0x1D74F, 0x1D74F, prAL, gcSm}, // MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D750, 0x1D76E, prAL, gcLC}, // [31] MATHEMATICAL BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA
+ {0x1D76F, 0x1D76F, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD NABLA
+ {0x1D770, 0x1D788, prAL, gcLl}, // [25] MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA
+ {0x1D789, 0x1D789, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL
+ {0x1D78A, 0x1D7A8, prAL, gcLC}, // [31] MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA
+ {0x1D7A9, 0x1D7A9, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA
+ {0x1D7AA, 0x1D7C2, prAL, gcLl}, // [25] MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA
+ {0x1D7C3, 0x1D7C3, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D7C4, 0x1D7CB, prAL, gcLC}, // [8] MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD SMALL DIGAMMA
+ {0x1D7CE, 0x1D7FF, prNU, gcNd}, // [50] MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE
+ {0x1D800, 0x1D9FF, prAL, gcSo}, // [512] SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD
+ {0x1DA00, 0x1DA36, prCM, gcMn}, // [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA37, 0x1DA3A, prAL, gcSo}, // [4] SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE
+ {0x1DA3B, 0x1DA6C, prCM, gcMn}, // [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA6D, 0x1DA74, prAL, gcSo}, // [8] SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING
+ {0x1DA75, 0x1DA75, prCM, gcMn}, // SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA76, 0x1DA83, prAL, gcSo}, // [14] SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH
+ {0x1DA84, 0x1DA84, prCM, gcMn}, // SIGNWRITING LOCATION HEAD NECK
+ {0x1DA85, 0x1DA86, prAL, gcSo}, // [2] SIGNWRITING LOCATION TORSO..SIGNWRITING LOCATION LIMBS DIGITS
+ {0x1DA87, 0x1DA8A, prBA, gcPo}, // [4] SIGNWRITING COMMA..SIGNWRITING COLON
+ {0x1DA8B, 0x1DA8B, prAL, gcPo}, // SIGNWRITING PARENTHESIS
+ {0x1DA9B, 0x1DA9F, prCM, gcMn}, // [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prCM, gcMn}, // [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1DF00, 0x1DF09, prAL, gcLl}, // [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
+ {0x1DF0A, 0x1DF0A, prAL, gcLo}, // LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
+ {0x1DF0B, 0x1DF1E, prAL, gcLl}, // [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1E000, 0x1E006, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prCM, gcMn}, // [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prCM, gcMn}, // [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prCM, gcMn}, // [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E100, 0x1E12C, prAL, gcLo}, // [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
+ {0x1E130, 0x1E136, prCM, gcMn}, // [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E137, 0x1E13D, prAL, gcLm}, // [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
+ {0x1E140, 0x1E149, prNU, gcNd}, // [10] NYIAKENG PUACHUE HMONG DIGIT ZERO..NYIAKENG PUACHUE HMONG DIGIT NINE
+ {0x1E14E, 0x1E14E, prAL, gcLo}, // NYIAKENG PUACHUE HMONG LOGOGRAM NYAJ
+ {0x1E14F, 0x1E14F, prAL, gcSo}, // NYIAKENG PUACHUE HMONG CIRCLED CA
+ {0x1E290, 0x1E2AD, prAL, gcLo}, // [30] TOTO LETTER PA..TOTO LETTER A
+ {0x1E2AE, 0x1E2AE, prCM, gcMn}, // TOTO SIGN RISING TONE
+ {0x1E2C0, 0x1E2EB, prAL, gcLo}, // [44] WANCHO LETTER AA..WANCHO LETTER YIH
+ {0x1E2EC, 0x1E2EF, prCM, gcMn}, // [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E2F0, 0x1E2F9, prNU, gcNd}, // [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
+ {0x1E2FF, 0x1E2FF, prPR, gcSc}, // WANCHO NGUN SIGN
+ {0x1E7E0, 0x1E7E6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
+ {0x1E7E8, 0x1E7EB, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
+ {0x1E7ED, 0x1E7EE, prAL, gcLo}, // [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
+ {0x1E7F0, 0x1E7FE, prAL, gcLo}, // [15] ETHIOPIC SYLLABLE GURAGE QWI..ETHIOPIC SYLLABLE GURAGE PWEE
+ {0x1E800, 0x1E8C4, prAL, gcLo}, // [197] MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON
+ {0x1E8C7, 0x1E8CF, prAL, gcNo}, // [9] MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE
+ {0x1E8D0, 0x1E8D6, prCM, gcMn}, // [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E900, 0x1E943, prAL, gcLC}, // [68] ADLAM CAPITAL LETTER ALIF..ADLAM SMALL LETTER SHA
+ {0x1E944, 0x1E94A, prCM, gcMn}, // [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1E94B, 0x1E94B, prAL, gcLm}, // ADLAM NASALIZATION MARK
+ {0x1E950, 0x1E959, prNU, gcNd}, // [10] ADLAM DIGIT ZERO..ADLAM DIGIT NINE
+ {0x1E95E, 0x1E95F, prOP, gcPo}, // [2] ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK
+ {0x1EC71, 0x1ECAB, prAL, gcNo}, // [59] INDIC SIYAQ NUMBER ONE..INDIC SIYAQ NUMBER PREFIXED NINE
+ {0x1ECAC, 0x1ECAC, prPO, gcSo}, // INDIC SIYAQ PLACEHOLDER
+ {0x1ECAD, 0x1ECAF, prAL, gcNo}, // [3] INDIC SIYAQ FRACTION ONE QUARTER..INDIC SIYAQ FRACTION THREE QUARTERS
+ {0x1ECB0, 0x1ECB0, prPO, gcSc}, // INDIC SIYAQ RUPEE MARK
+ {0x1ECB1, 0x1ECB4, prAL, gcNo}, // [4] INDIC SIYAQ NUMBER ALTERNATE ONE..INDIC SIYAQ ALTERNATE LAKH MARK
+ {0x1ED01, 0x1ED2D, prAL, gcNo}, // [45] OTTOMAN SIYAQ NUMBER ONE..OTTOMAN SIYAQ NUMBER NINETY THOUSAND
+ {0x1ED2E, 0x1ED2E, prAL, gcSo}, // OTTOMAN SIYAQ MARRATAN
+ {0x1ED2F, 0x1ED3D, prAL, gcNo}, // [15] OTTOMAN SIYAQ ALTERNATE NUMBER TWO..OTTOMAN SIYAQ FRACTION ONE SIXTH
+ {0x1EE00, 0x1EE03, prAL, gcLo}, // [4] ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL
+ {0x1EE05, 0x1EE1F, prAL, gcLo}, // [27] ARABIC MATHEMATICAL WAW..ARABIC MATHEMATICAL DOTLESS QAF
+ {0x1EE21, 0x1EE22, prAL, gcLo}, // [2] ARABIC MATHEMATICAL INITIAL BEH..ARABIC MATHEMATICAL INITIAL JEEM
+ {0x1EE24, 0x1EE24, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL HEH
+ {0x1EE27, 0x1EE27, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL HAH
+ {0x1EE29, 0x1EE32, prAL, gcLo}, // [10] ARABIC MATHEMATICAL INITIAL YEH..ARABIC MATHEMATICAL INITIAL QAF
+ {0x1EE34, 0x1EE37, prAL, gcLo}, // [4] ARABIC MATHEMATICAL INITIAL SHEEN..ARABIC MATHEMATICAL INITIAL KHAH
+ {0x1EE39, 0x1EE39, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL DAD
+ {0x1EE3B, 0x1EE3B, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL GHAIN
+ {0x1EE42, 0x1EE42, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED JEEM
+ {0x1EE47, 0x1EE47, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED HAH
+ {0x1EE49, 0x1EE49, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED YEH
+ {0x1EE4B, 0x1EE4B, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED LAM
+ {0x1EE4D, 0x1EE4F, prAL, gcLo}, // [3] ARABIC MATHEMATICAL TAILED NOON..ARABIC MATHEMATICAL TAILED AIN
+ {0x1EE51, 0x1EE52, prAL, gcLo}, // [2] ARABIC MATHEMATICAL TAILED SAD..ARABIC MATHEMATICAL TAILED QAF
+ {0x1EE54, 0x1EE54, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED SHEEN
+ {0x1EE57, 0x1EE57, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED KHAH
+ {0x1EE59, 0x1EE59, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DAD
+ {0x1EE5B, 0x1EE5B, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED GHAIN
+ {0x1EE5D, 0x1EE5D, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DOTLESS NOON
+ {0x1EE5F, 0x1EE5F, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DOTLESS QAF
+ {0x1EE61, 0x1EE62, prAL, gcLo}, // [2] ARABIC MATHEMATICAL STRETCHED BEH..ARABIC MATHEMATICAL STRETCHED JEEM
+ {0x1EE64, 0x1EE64, prAL, gcLo}, // ARABIC MATHEMATICAL STRETCHED HEH
+ {0x1EE67, 0x1EE6A, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF
+ {0x1EE6C, 0x1EE72, prAL, gcLo}, // [7] ARABIC MATHEMATICAL STRETCHED MEEM..ARABIC MATHEMATICAL STRETCHED QAF
+ {0x1EE74, 0x1EE77, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED SHEEN..ARABIC MATHEMATICAL STRETCHED KHAH
+ {0x1EE79, 0x1EE7C, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED DAD..ARABIC MATHEMATICAL STRETCHED DOTLESS BEH
+ {0x1EE7E, 0x1EE7E, prAL, gcLo}, // ARABIC MATHEMATICAL STRETCHED DOTLESS FEH
+ {0x1EE80, 0x1EE89, prAL, gcLo}, // [10] ARABIC MATHEMATICAL LOOPED ALEF..ARABIC MATHEMATICAL LOOPED YEH
+ {0x1EE8B, 0x1EE9B, prAL, gcLo}, // [17] ARABIC MATHEMATICAL LOOPED LAM..ARABIC MATHEMATICAL LOOPED GHAIN
+ {0x1EEA1, 0x1EEA3, prAL, gcLo}, // [3] ARABIC MATHEMATICAL DOUBLE-STRUCK BEH..ARABIC MATHEMATICAL DOUBLE-STRUCK DAL
+ {0x1EEA5, 0x1EEA9, prAL, gcLo}, // [5] ARABIC MATHEMATICAL DOUBLE-STRUCK WAW..ARABIC MATHEMATICAL DOUBLE-STRUCK YEH
+ {0x1EEAB, 0x1EEBB, prAL, gcLo}, // [17] ARABIC MATHEMATICAL DOUBLE-STRUCK LAM..ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN
+ {0x1EEF0, 0x1EEF1, prAL, gcSm}, // [2] ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL
+ {0x1F000, 0x1F02B, prID, gcSo}, // [44] MAHJONG TILE EAST WIND..MAHJONG TILE BACK
+ {0x1F02C, 0x1F02F, prID, gcCn}, // [4] ..
+ {0x1F030, 0x1F093, prID, gcSo}, // [100] DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06
+ {0x1F094, 0x1F09F, prID, gcCn}, // [12] ..
+ {0x1F0A0, 0x1F0AE, prID, gcSo}, // [15] PLAYING CARD BACK..PLAYING CARD KING OF SPADES
+ {0x1F0AF, 0x1F0B0, prID, gcCn}, // [2]