mirror of
https://github.com/cwinfo/matterbridge.git
synced 2025-07-03 10:47:44 +00:00
Add vendor files for spf13/viper
This commit is contained in:
27
vendor/golang.org/x/text/internal/gen/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/text/internal/gen/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
226
vendor/golang.org/x/text/internal/gen/bitfield/bitfield.go
generated
vendored
Normal file
226
vendor/golang.org/x/text/internal/gen/bitfield/bitfield.go
generated
vendored
Normal file
@ -0,0 +1,226 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package bitfield converts annotated structs into integer values.
|
||||
//
|
||||
// Any field that is marked with a bitfield tag is compacted. The tag value has
|
||||
// two parts. The part before the comma determines the method name for a
|
||||
// generated type. If left blank the name of the field is used.
|
||||
// The part after the comma determines the number of bits to use for the
|
||||
// representation.
|
||||
package bitfield
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Config determines settings for packing and generation. If a Config is used,
|
||||
// the same Config should be used for packing and generation.
|
||||
type Config struct {
|
||||
// NumBits fixes the maximum allowed bits for the integer representation.
|
||||
// If NumBits is not 8, 16, 32, or 64, the actual underlying integer size
|
||||
// will be the next largest available.
|
||||
NumBits uint
|
||||
|
||||
// If Package is set, code generation will write a package clause.
|
||||
Package string
|
||||
|
||||
// TypeName is the name for the generated type. By default it is the name
|
||||
// of the type of the value passed to Gen.
|
||||
TypeName string
|
||||
}
|
||||
|
||||
var nullConfig = &Config{}
|
||||
|
||||
// Pack packs annotated bit ranges of struct x in an integer.
|
||||
//
|
||||
// Only fields that have a "bitfield" tag are compacted.
|
||||
func Pack(x interface{}, c *Config) (packed uint64, err error) {
|
||||
packed, _, err = pack(x, c)
|
||||
return
|
||||
}
|
||||
|
||||
func pack(x interface{}, c *Config) (packed uint64, nBit uint, err error) {
|
||||
if c == nil {
|
||||
c = nullConfig
|
||||
}
|
||||
nBits := c.NumBits
|
||||
v := reflect.ValueOf(x)
|
||||
v = reflect.Indirect(v)
|
||||
t := v.Type()
|
||||
pos := 64 - nBits
|
||||
if nBits == 0 {
|
||||
pos = 0
|
||||
}
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
v := v.Field(i)
|
||||
field := t.Field(i)
|
||||
f, err := parseField(field)
|
||||
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
if f.nBits == 0 {
|
||||
continue
|
||||
}
|
||||
value := uint64(0)
|
||||
switch v.Kind() {
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
value = 1
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
value = v.Uint()
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
x := v.Int()
|
||||
if x < 0 {
|
||||
return 0, 0, fmt.Errorf("bitfield: negative value for field %q not allowed", field.Name)
|
||||
}
|
||||
value = uint64(x)
|
||||
}
|
||||
if value > (1<<f.nBits)-1 {
|
||||
return 0, 0, fmt.Errorf("bitfield: value %#x of field %q does not fit in %d bits", value, field.Name, f.nBits)
|
||||
}
|
||||
shift := 64 - pos - f.nBits
|
||||
if pos += f.nBits; pos > 64 {
|
||||
return 0, 0, fmt.Errorf("bitfield: no more bits left for field %q", field.Name)
|
||||
}
|
||||
packed |= value << shift
|
||||
}
|
||||
if nBits == 0 {
|
||||
nBits = posToBits(pos)
|
||||
packed >>= (64 - nBits)
|
||||
}
|
||||
return packed, nBits, nil
|
||||
}
|
||||
|
||||
type field struct {
|
||||
name string
|
||||
value uint64
|
||||
nBits uint
|
||||
}
|
||||
|
||||
// parseField parses a tag of the form [<name>][:<nBits>][,<pos>[..<end>]]
|
||||
func parseField(field reflect.StructField) (f field, err error) {
|
||||
s, ok := field.Tag.Lookup("bitfield")
|
||||
if !ok {
|
||||
return f, nil
|
||||
}
|
||||
switch field.Type.Kind() {
|
||||
case reflect.Bool:
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
default:
|
||||
return f, fmt.Errorf("bitfield: field %q is not an integer or bool type", field.Name)
|
||||
}
|
||||
bits := s
|
||||
f.name = ""
|
||||
|
||||
if i := strings.IndexByte(s, ','); i >= 0 {
|
||||
bits = s[:i]
|
||||
f.name = s[i+1:]
|
||||
}
|
||||
if bits != "" {
|
||||
nBits, err := strconv.ParseUint(bits, 10, 8)
|
||||
if err != nil {
|
||||
return f, fmt.Errorf("bitfield: invalid bit size for field %q: %v", field.Name, err)
|
||||
}
|
||||
f.nBits = uint(nBits)
|
||||
}
|
||||
if f.nBits == 0 {
|
||||
if field.Type.Kind() == reflect.Bool {
|
||||
f.nBits = 1
|
||||
} else {
|
||||
f.nBits = uint(field.Type.Bits())
|
||||
}
|
||||
}
|
||||
if f.name == "" {
|
||||
f.name = field.Name
|
||||
}
|
||||
return f, err
|
||||
}
|
||||
|
||||
func posToBits(pos uint) (bits uint) {
|
||||
switch {
|
||||
case pos <= 8:
|
||||
bits = 8
|
||||
case pos <= 16:
|
||||
bits = 16
|
||||
case pos <= 32:
|
||||
bits = 32
|
||||
case pos <= 64:
|
||||
bits = 64
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
return bits
|
||||
}
|
||||
|
||||
// Gen generates code for unpacking integers created with Pack.
|
||||
func Gen(w io.Writer, x interface{}, c *Config) error {
|
||||
if c == nil {
|
||||
c = nullConfig
|
||||
}
|
||||
_, nBits, err := pack(x, c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t := reflect.TypeOf(x)
|
||||
if t.Kind() == reflect.Ptr {
|
||||
t = t.Elem()
|
||||
}
|
||||
if c.TypeName == "" {
|
||||
c.TypeName = t.Name()
|
||||
}
|
||||
firstChar := []rune(c.TypeName)[0]
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
print := func(w io.Writer, format string, args ...interface{}) {
|
||||
if _, e := fmt.Fprintf(w, format+"\n", args...); e != nil && err == nil {
|
||||
err = fmt.Errorf("bitfield: write failed: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
pos := uint(0)
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
field := t.Field(i)
|
||||
f, _ := parseField(field)
|
||||
if f.nBits == 0 {
|
||||
continue
|
||||
}
|
||||
shift := nBits - pos - f.nBits
|
||||
pos += f.nBits
|
||||
|
||||
retType := field.Type.Name()
|
||||
print(buf, "\nfunc (%c %s) %s() %s {", firstChar, c.TypeName, f.name, retType)
|
||||
if field.Type.Kind() == reflect.Bool {
|
||||
print(buf, "\tconst bit = 1 << %d", shift)
|
||||
print(buf, "\treturn %c&bit == bit", firstChar)
|
||||
} else {
|
||||
print(buf, "\treturn %s((%c >> %d) & %#x)", retType, firstChar, shift, (1<<f.nBits)-1)
|
||||
}
|
||||
print(buf, "}")
|
||||
}
|
||||
|
||||
if c.Package != "" {
|
||||
print(w, "// Code generated by golang.org/x/text/internal/gen/bitfield. DO NOT EDIT.\n")
|
||||
print(w, "package %s\n", c.Package)
|
||||
}
|
||||
|
||||
bits := posToBits(pos)
|
||||
|
||||
print(w, "type %s uint%d", c.TypeName, bits)
|
||||
|
||||
if _, err := io.Copy(w, buf); err != nil {
|
||||
return fmt.Errorf("bitfield: write failed: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
371
vendor/golang.org/x/text/internal/gen/code.go
generated
vendored
Normal file
371
vendor/golang.org/x/text/internal/gen/code.go
generated
vendored
Normal file
@ -0,0 +1,371 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gen
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/gob"
|
||||
"fmt"
|
||||
"hash"
|
||||
"hash/fnv"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// This file contains utilities for generating code.
|
||||
|
||||
// TODO: other write methods like:
|
||||
// - slices, maps, types, etc.
|
||||
|
||||
// CodeWriter is a utility for writing structured code. It computes the content
|
||||
// hash and size of written content. It ensures there are newlines between
|
||||
// written code blocks.
|
||||
type CodeWriter struct {
|
||||
buf bytes.Buffer
|
||||
Size int
|
||||
Hash hash.Hash32 // content hash
|
||||
gob *gob.Encoder
|
||||
// For comments we skip the usual one-line separator if they are followed by
|
||||
// a code block.
|
||||
skipSep bool
|
||||
}
|
||||
|
||||
func (w *CodeWriter) Write(p []byte) (n int, err error) {
|
||||
return w.buf.Write(p)
|
||||
}
|
||||
|
||||
// NewCodeWriter returns a new CodeWriter.
|
||||
func NewCodeWriter() *CodeWriter {
|
||||
h := fnv.New32()
|
||||
return &CodeWriter{Hash: h, gob: gob.NewEncoder(h)}
|
||||
}
|
||||
|
||||
// WriteGoFile appends the buffer with the total size of all created structures
|
||||
// and writes it as a Go file to the the given file with the given package name.
|
||||
func (w *CodeWriter) WriteGoFile(filename, pkg string) {
|
||||
f, err := os.Create(filename)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not create file %s: %v", filename, err)
|
||||
}
|
||||
defer f.Close()
|
||||
if _, err = w.WriteGo(f, pkg, ""); err != nil {
|
||||
log.Fatalf("Error writing file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVersionedGoFile appends the buffer with the total size of all created
|
||||
// structures and writes it as a Go file to the the given file with the given
|
||||
// package name and build tags for the current Unicode version,
|
||||
func (w *CodeWriter) WriteVersionedGoFile(filename, pkg string) {
|
||||
tags := buildTags()
|
||||
if tags != "" {
|
||||
filename = insertVersion(filename, UnicodeVersion())
|
||||
}
|
||||
f, err := os.Create(filename)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not create file %s: %v", filename, err)
|
||||
}
|
||||
defer f.Close()
|
||||
if _, err = w.WriteGo(f, pkg, tags); err != nil {
|
||||
log.Fatalf("Error writing file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// WriteGo appends the buffer with the total size of all created structures and
|
||||
// writes it as a Go file to the the given writer with the given package name.
|
||||
func (w *CodeWriter) WriteGo(out io.Writer, pkg, tags string) (n int, err error) {
|
||||
sz := w.Size
|
||||
w.WriteComment("Total table size %d bytes (%dKiB); checksum: %X\n", sz, sz/1024, w.Hash.Sum32())
|
||||
defer w.buf.Reset()
|
||||
return WriteGo(out, pkg, tags, w.buf.Bytes())
|
||||
}
|
||||
|
||||
func (w *CodeWriter) printf(f string, x ...interface{}) {
|
||||
fmt.Fprintf(w, f, x...)
|
||||
}
|
||||
|
||||
func (w *CodeWriter) insertSep() {
|
||||
if w.skipSep {
|
||||
w.skipSep = false
|
||||
return
|
||||
}
|
||||
// Use at least two newlines to ensure a blank space between the previous
|
||||
// block. WriteGoFile will remove extraneous newlines.
|
||||
w.printf("\n\n")
|
||||
}
|
||||
|
||||
// WriteComment writes a comment block. All line starts are prefixed with "//".
|
||||
// Initial empty lines are gobbled. The indentation for the first line is
|
||||
// stripped from consecutive lines.
|
||||
func (w *CodeWriter) WriteComment(comment string, args ...interface{}) {
|
||||
s := fmt.Sprintf(comment, args...)
|
||||
s = strings.Trim(s, "\n")
|
||||
|
||||
// Use at least two newlines to ensure a blank space between the previous
|
||||
// block. WriteGoFile will remove extraneous newlines.
|
||||
w.printf("\n\n// ")
|
||||
w.skipSep = true
|
||||
|
||||
// strip first indent level.
|
||||
sep := "\n"
|
||||
for ; len(s) > 0 && (s[0] == '\t' || s[0] == ' '); s = s[1:] {
|
||||
sep += s[:1]
|
||||
}
|
||||
|
||||
strings.NewReplacer(sep, "\n// ", "\n", "\n// ").WriteString(w, s)
|
||||
|
||||
w.printf("\n")
|
||||
}
|
||||
|
||||
func (w *CodeWriter) writeSizeInfo(size int) {
|
||||
w.printf("// Size: %d bytes\n", size)
|
||||
}
|
||||
|
||||
// WriteConst writes a constant of the given name and value.
|
||||
func (w *CodeWriter) WriteConst(name string, x interface{}) {
|
||||
w.insertSep()
|
||||
v := reflect.ValueOf(x)
|
||||
|
||||
switch v.Type().Kind() {
|
||||
case reflect.String:
|
||||
w.printf("const %s %s = ", name, typeName(x))
|
||||
w.WriteString(v.String())
|
||||
w.printf("\n")
|
||||
default:
|
||||
w.printf("const %s = %#v\n", name, x)
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVar writes a variable of the given name and value.
|
||||
func (w *CodeWriter) WriteVar(name string, x interface{}) {
|
||||
w.insertSep()
|
||||
v := reflect.ValueOf(x)
|
||||
oldSize := w.Size
|
||||
sz := int(v.Type().Size())
|
||||
w.Size += sz
|
||||
|
||||
switch v.Type().Kind() {
|
||||
case reflect.String:
|
||||
w.printf("var %s %s = ", name, typeName(x))
|
||||
w.WriteString(v.String())
|
||||
case reflect.Struct:
|
||||
w.gob.Encode(x)
|
||||
fallthrough
|
||||
case reflect.Slice, reflect.Array:
|
||||
w.printf("var %s = ", name)
|
||||
w.writeValue(v)
|
||||
w.writeSizeInfo(w.Size - oldSize)
|
||||
default:
|
||||
w.printf("var %s %s = ", name, typeName(x))
|
||||
w.gob.Encode(x)
|
||||
w.writeValue(v)
|
||||
w.writeSizeInfo(w.Size - oldSize)
|
||||
}
|
||||
w.printf("\n")
|
||||
}
|
||||
|
||||
func (w *CodeWriter) writeValue(v reflect.Value) {
|
||||
x := v.Interface()
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
w.WriteString(v.String())
|
||||
case reflect.Array:
|
||||
// Don't double count: callers of WriteArray count on the size being
|
||||
// added, so we need to discount it here.
|
||||
w.Size -= int(v.Type().Size())
|
||||
w.writeSlice(x, true)
|
||||
case reflect.Slice:
|
||||
w.writeSlice(x, false)
|
||||
case reflect.Struct:
|
||||
w.printf("%s{\n", typeName(v.Interface()))
|
||||
t := v.Type()
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
w.printf("%s: ", t.Field(i).Name)
|
||||
w.writeValue(v.Field(i))
|
||||
w.printf(",\n")
|
||||
}
|
||||
w.printf("}")
|
||||
default:
|
||||
w.printf("%#v", x)
|
||||
}
|
||||
}
|
||||
|
||||
// WriteString writes a string literal.
|
||||
func (w *CodeWriter) WriteString(s string) {
|
||||
io.WriteString(w.Hash, s) // content hash
|
||||
w.Size += len(s)
|
||||
|
||||
const maxInline = 40
|
||||
if len(s) <= maxInline {
|
||||
w.printf("%q", s)
|
||||
return
|
||||
}
|
||||
|
||||
// We will render the string as a multi-line string.
|
||||
const maxWidth = 80 - 4 - len(`"`) - len(`" +`)
|
||||
|
||||
// When starting on its own line, go fmt indents line 2+ an extra level.
|
||||
n, max := maxWidth, maxWidth-4
|
||||
|
||||
// As per https://golang.org/issue/18078, the compiler has trouble
|
||||
// compiling the concatenation of many strings, s0 + s1 + s2 + ... + sN,
|
||||
// for large N. We insert redundant, explicit parentheses to work around
|
||||
// that, lowering the N at any given step: (s0 + s1 + ... + s63) + (s64 +
|
||||
// ... + s127) + etc + (etc + ... + sN).
|
||||
explicitParens, extraComment := len(s) > 128*1024, ""
|
||||
if explicitParens {
|
||||
w.printf(`(`)
|
||||
extraComment = "; the redundant, explicit parens are for https://golang.org/issue/18078"
|
||||
}
|
||||
|
||||
// Print "" +\n, if a string does not start on its own line.
|
||||
b := w.buf.Bytes()
|
||||
if p := len(bytes.TrimRight(b, " \t")); p > 0 && b[p-1] != '\n' {
|
||||
w.printf("\"\" + // Size: %d bytes%s\n", len(s), extraComment)
|
||||
n, max = maxWidth, maxWidth
|
||||
}
|
||||
|
||||
w.printf(`"`)
|
||||
|
||||
for sz, p, nLines := 0, 0, 0; p < len(s); {
|
||||
var r rune
|
||||
r, sz = utf8.DecodeRuneInString(s[p:])
|
||||
out := s[p : p+sz]
|
||||
chars := 1
|
||||
if !unicode.IsPrint(r) || r == utf8.RuneError || r == '"' {
|
||||
switch sz {
|
||||
case 1:
|
||||
out = fmt.Sprintf("\\x%02x", s[p])
|
||||
case 2, 3:
|
||||
out = fmt.Sprintf("\\u%04x", r)
|
||||
case 4:
|
||||
out = fmt.Sprintf("\\U%08x", r)
|
||||
}
|
||||
chars = len(out)
|
||||
} else if r == '\\' {
|
||||
out = "\\" + string(r)
|
||||
chars = 2
|
||||
}
|
||||
if n -= chars; n < 0 {
|
||||
nLines++
|
||||
if explicitParens && nLines&63 == 63 {
|
||||
w.printf("\") + (\"")
|
||||
}
|
||||
w.printf("\" +\n\"")
|
||||
n = max - len(out)
|
||||
}
|
||||
w.printf("%s", out)
|
||||
p += sz
|
||||
}
|
||||
w.printf(`"`)
|
||||
if explicitParens {
|
||||
w.printf(`)`)
|
||||
}
|
||||
}
|
||||
|
||||
// WriteSlice writes a slice value.
|
||||
func (w *CodeWriter) WriteSlice(x interface{}) {
|
||||
w.writeSlice(x, false)
|
||||
}
|
||||
|
||||
// WriteArray writes an array value.
|
||||
func (w *CodeWriter) WriteArray(x interface{}) {
|
||||
w.writeSlice(x, true)
|
||||
}
|
||||
|
||||
func (w *CodeWriter) writeSlice(x interface{}, isArray bool) {
|
||||
v := reflect.ValueOf(x)
|
||||
w.gob.Encode(v.Len())
|
||||
w.Size += v.Len() * int(v.Type().Elem().Size())
|
||||
name := typeName(x)
|
||||
if isArray {
|
||||
name = fmt.Sprintf("[%d]%s", v.Len(), name[strings.Index(name, "]")+1:])
|
||||
}
|
||||
if isArray {
|
||||
w.printf("%s{\n", name)
|
||||
} else {
|
||||
w.printf("%s{ // %d elements\n", name, v.Len())
|
||||
}
|
||||
|
||||
switch kind := v.Type().Elem().Kind(); kind {
|
||||
case reflect.String:
|
||||
for _, s := range x.([]string) {
|
||||
w.WriteString(s)
|
||||
w.printf(",\n")
|
||||
}
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
// nLine and nBlock are the number of elements per line and block.
|
||||
nLine, nBlock, format := 8, 64, "%d,"
|
||||
switch kind {
|
||||
case reflect.Uint8:
|
||||
format = "%#02x,"
|
||||
case reflect.Uint16:
|
||||
format = "%#04x,"
|
||||
case reflect.Uint32:
|
||||
nLine, nBlock, format = 4, 32, "%#08x,"
|
||||
case reflect.Uint, reflect.Uint64:
|
||||
nLine, nBlock, format = 4, 32, "%#016x,"
|
||||
case reflect.Int8:
|
||||
nLine = 16
|
||||
}
|
||||
n := nLine
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
if i%nBlock == 0 && v.Len() > nBlock {
|
||||
w.printf("// Entry %X - %X\n", i, i+nBlock-1)
|
||||
}
|
||||
x := v.Index(i).Interface()
|
||||
w.gob.Encode(x)
|
||||
w.printf(format, x)
|
||||
if n--; n == 0 {
|
||||
n = nLine
|
||||
w.printf("\n")
|
||||
}
|
||||
}
|
||||
w.printf("\n")
|
||||
case reflect.Struct:
|
||||
zero := reflect.Zero(v.Type().Elem()).Interface()
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
x := v.Index(i).Interface()
|
||||
w.gob.EncodeValue(v)
|
||||
if !reflect.DeepEqual(zero, x) {
|
||||
line := fmt.Sprintf("%#v,\n", x)
|
||||
line = line[strings.IndexByte(line, '{'):]
|
||||
w.printf("%d: ", i)
|
||||
w.printf(line)
|
||||
}
|
||||
}
|
||||
case reflect.Array:
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
w.printf("%d: %#v,\n", i, v.Index(i).Interface())
|
||||
}
|
||||
default:
|
||||
panic("gen: slice elem type not supported")
|
||||
}
|
||||
w.printf("}")
|
||||
}
|
||||
|
||||
// WriteType writes a definition of the type of the given value and returns the
|
||||
// type name.
|
||||
func (w *CodeWriter) WriteType(x interface{}) string {
|
||||
t := reflect.TypeOf(x)
|
||||
w.printf("type %s struct {\n", t.Name())
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
w.printf("\t%s %s\n", t.Field(i).Name, t.Field(i).Type)
|
||||
}
|
||||
w.printf("}\n")
|
||||
return t.Name()
|
||||
}
|
||||
|
||||
// typeName returns the name of the go type of x.
|
||||
func typeName(x interface{}) string {
|
||||
t := reflect.ValueOf(x).Type()
|
||||
return strings.Replace(fmt.Sprint(t), "main.", "", 1)
|
||||
}
|
333
vendor/golang.org/x/text/internal/gen/gen.go
generated
vendored
Normal file
333
vendor/golang.org/x/text/internal/gen/gen.go
generated
vendored
Normal file
@ -0,0 +1,333 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package gen contains common code for the various code generation tools in the
|
||||
// text repository. Its usage ensures consistency between tools.
|
||||
//
|
||||
// This package defines command line flags that are common to most generation
|
||||
// tools. The flags allow for specifying specific Unicode and CLDR versions
|
||||
// in the public Unicode data repository (http://www.unicode.org/Public).
|
||||
//
|
||||
// A local Unicode data mirror can be set through the flag -local or the
|
||||
// environment variable UNICODE_DIR. The former takes precedence. The local
|
||||
// directory should follow the same structure as the public repository.
|
||||
//
|
||||
// IANA data can also optionally be mirrored by putting it in the iana directory
|
||||
// rooted at the top of the local mirror. Beware, though, that IANA data is not
|
||||
// versioned. So it is up to the developer to use the right version.
|
||||
package gen // import "golang.org/x/text/internal/gen"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"go/format"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"unicode"
|
||||
|
||||
"golang.org/x/text/unicode/cldr"
|
||||
)
|
||||
|
||||
var (
|
||||
url = flag.String("url",
|
||||
"http://www.unicode.org/Public",
|
||||
"URL of Unicode database directory")
|
||||
iana = flag.String("iana",
|
||||
"http://www.iana.org",
|
||||
"URL of the IANA repository")
|
||||
unicodeVersion = flag.String("unicode",
|
||||
getEnv("UNICODE_VERSION", unicode.Version),
|
||||
"unicode version to use")
|
||||
cldrVersion = flag.String("cldr",
|
||||
getEnv("CLDR_VERSION", cldr.Version),
|
||||
"cldr version to use")
|
||||
)
|
||||
|
||||
func getEnv(name, def string) string {
|
||||
if v := os.Getenv(name); v != "" {
|
||||
return v
|
||||
}
|
||||
return def
|
||||
}
|
||||
|
||||
// Init performs common initialization for a gen command. It parses the flags
|
||||
// and sets up the standard logging parameters.
|
||||
func Init() {
|
||||
log.SetPrefix("")
|
||||
log.SetFlags(log.Lshortfile)
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
const header = `// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
|
||||
|
||||
`
|
||||
|
||||
// UnicodeVersion reports the requested Unicode version.
|
||||
func UnicodeVersion() string {
|
||||
return *unicodeVersion
|
||||
}
|
||||
|
||||
// CLDRVersion reports the requested CLDR version.
|
||||
func CLDRVersion() string {
|
||||
return *cldrVersion
|
||||
}
|
||||
|
||||
var tags = []struct{ version, buildTags string }{
|
||||
{"10.0.0", "go1.10"},
|
||||
{"", "!go1.10"},
|
||||
}
|
||||
|
||||
// buildTags reports the build tags used for the current Unicode version.
|
||||
func buildTags() string {
|
||||
v := UnicodeVersion()
|
||||
for _, x := range tags {
|
||||
// We should do a numeric comparison, but including the collate package
|
||||
// would create an import cycle. We approximate it by assuming that
|
||||
// longer version strings are later.
|
||||
if len(x.version) <= len(v) {
|
||||
return x.buildTags
|
||||
}
|
||||
if len(x.version) == len(v) && x.version <= v {
|
||||
return x.buildTags
|
||||
}
|
||||
}
|
||||
return tags[0].buildTags
|
||||
}
|
||||
|
||||
// IsLocal reports whether data files are available locally.
|
||||
func IsLocal() bool {
|
||||
dir, err := localReadmeFile()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if _, err = os.Stat(dir); err != nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// OpenUCDFile opens the requested UCD file. The file is specified relative to
|
||||
// the public Unicode root directory. It will call log.Fatal if there are any
|
||||
// errors.
|
||||
func OpenUCDFile(file string) io.ReadCloser {
|
||||
return openUnicode(path.Join(*unicodeVersion, "ucd", file))
|
||||
}
|
||||
|
||||
// OpenCLDRCoreZip opens the CLDR core zip file. It will call log.Fatal if there
|
||||
// are any errors.
|
||||
func OpenCLDRCoreZip() io.ReadCloser {
|
||||
return OpenUnicodeFile("cldr", *cldrVersion, "core.zip")
|
||||
}
|
||||
|
||||
// OpenUnicodeFile opens the requested file of the requested category from the
|
||||
// root of the Unicode data archive. The file is specified relative to the
|
||||
// public Unicode root directory. If version is "", it will use the default
|
||||
// Unicode version. It will call log.Fatal if there are any errors.
|
||||
func OpenUnicodeFile(category, version, file string) io.ReadCloser {
|
||||
if version == "" {
|
||||
version = UnicodeVersion()
|
||||
}
|
||||
return openUnicode(path.Join(category, version, file))
|
||||
}
|
||||
|
||||
// OpenIANAFile opens the requested IANA file. The file is specified relative
|
||||
// to the IANA root, which is typically either http://www.iana.org or the
|
||||
// iana directory in the local mirror. It will call log.Fatal if there are any
|
||||
// errors.
|
||||
func OpenIANAFile(path string) io.ReadCloser {
|
||||
return Open(*iana, "iana", path)
|
||||
}
|
||||
|
||||
var (
|
||||
dirMutex sync.Mutex
|
||||
localDir string
|
||||
)
|
||||
|
||||
const permissions = 0755
|
||||
|
||||
func localReadmeFile() (string, error) {
|
||||
p, err := build.Import("golang.org/x/text", "", build.FindOnly)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Could not locate package: %v", err)
|
||||
}
|
||||
return filepath.Join(p.Dir, "DATA", "README"), nil
|
||||
}
|
||||
|
||||
func getLocalDir() string {
|
||||
dirMutex.Lock()
|
||||
defer dirMutex.Unlock()
|
||||
|
||||
readme, err := localReadmeFile()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
dir := filepath.Dir(readme)
|
||||
if _, err := os.Stat(readme); err != nil {
|
||||
if err := os.MkdirAll(dir, permissions); err != nil {
|
||||
log.Fatalf("Could not create directory: %v", err)
|
||||
}
|
||||
ioutil.WriteFile(readme, []byte(readmeTxt), permissions)
|
||||
}
|
||||
return dir
|
||||
}
|
||||
|
||||
const readmeTxt = `Generated by golang.org/x/text/internal/gen. DO NOT EDIT.
|
||||
|
||||
This directory contains downloaded files used to generate the various tables
|
||||
in the golang.org/x/text subrepo.
|
||||
|
||||
Note that the language subtag repo (iana/assignments/language-subtag-registry)
|
||||
and all other times in the iana subdirectory are not versioned and will need
|
||||
to be periodically manually updated. The easiest way to do this is to remove
|
||||
the entire iana directory. This is mostly of concern when updating the language
|
||||
package.
|
||||
`
|
||||
|
||||
// Open opens subdir/path if a local directory is specified and the file exists,
|
||||
// where subdir is a directory relative to the local root, or fetches it from
|
||||
// urlRoot/path otherwise. It will call log.Fatal if there are any errors.
|
||||
func Open(urlRoot, subdir, path string) io.ReadCloser {
|
||||
file := filepath.Join(getLocalDir(), subdir, filepath.FromSlash(path))
|
||||
return open(file, urlRoot, path)
|
||||
}
|
||||
|
||||
func openUnicode(path string) io.ReadCloser {
|
||||
file := filepath.Join(getLocalDir(), filepath.FromSlash(path))
|
||||
return open(file, *url, path)
|
||||
}
|
||||
|
||||
// TODO: automatically periodically update non-versioned files.
|
||||
|
||||
func open(file, urlRoot, path string) io.ReadCloser {
|
||||
if f, err := os.Open(file); err == nil {
|
||||
return f
|
||||
}
|
||||
r := get(urlRoot, path)
|
||||
defer r.Close()
|
||||
b, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not download file: %v", err)
|
||||
}
|
||||
os.MkdirAll(filepath.Dir(file), permissions)
|
||||
if err := ioutil.WriteFile(file, b, permissions); err != nil {
|
||||
log.Fatalf("Could not create file: %v", err)
|
||||
}
|
||||
return ioutil.NopCloser(bytes.NewReader(b))
|
||||
}
|
||||
|
||||
func get(root, path string) io.ReadCloser {
|
||||
url := root + "/" + path
|
||||
fmt.Printf("Fetching %s...", url)
|
||||
defer fmt.Println(" done.")
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
log.Fatalf("HTTP GET: %v", err)
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
log.Fatalf("Bad GET status for %q: %q", url, resp.Status)
|
||||
}
|
||||
return resp.Body
|
||||
}
|
||||
|
||||
// TODO: use Write*Version in all applicable packages.
|
||||
|
||||
// WriteUnicodeVersion writes a constant for the Unicode version from which the
|
||||
// tables are generated.
|
||||
func WriteUnicodeVersion(w io.Writer) {
|
||||
fmt.Fprintf(w, "// UnicodeVersion is the Unicode version from which the tables in this package are derived.\n")
|
||||
fmt.Fprintf(w, "const UnicodeVersion = %q\n\n", UnicodeVersion())
|
||||
}
|
||||
|
||||
// WriteCLDRVersion writes a constant for the CLDR version from which the
|
||||
// tables are generated.
|
||||
func WriteCLDRVersion(w io.Writer) {
|
||||
fmt.Fprintf(w, "// CLDRVersion is the CLDR version from which the tables in this package are derived.\n")
|
||||
fmt.Fprintf(w, "const CLDRVersion = %q\n\n", CLDRVersion())
|
||||
}
|
||||
|
||||
// WriteGoFile prepends a standard file comment and package statement to the
|
||||
// given bytes, applies gofmt, and writes them to a file with the given name.
|
||||
// It will call log.Fatal if there are any errors.
|
||||
func WriteGoFile(filename, pkg string, b []byte) {
|
||||
w, err := os.Create(filename)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not create file %s: %v", filename, err)
|
||||
}
|
||||
defer w.Close()
|
||||
if _, err = WriteGo(w, pkg, "", b); err != nil {
|
||||
log.Fatalf("Error writing file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
func insertVersion(filename, version string) string {
|
||||
suffix := ".go"
|
||||
if strings.HasSuffix(filename, "_test.go") {
|
||||
suffix = "_test.go"
|
||||
}
|
||||
return fmt.Sprint(filename[:len(filename)-len(suffix)], version, suffix)
|
||||
}
|
||||
|
||||
// WriteVersionedGoFile prepends a standard file comment, adds build tags to
|
||||
// version the file for the current Unicode version, and package statement to
|
||||
// the given bytes, applies gofmt, and writes them to a file with the given
|
||||
// name. It will call log.Fatal if there are any errors.
|
||||
func WriteVersionedGoFile(filename, pkg string, b []byte) {
|
||||
tags := buildTags()
|
||||
if tags != "" {
|
||||
filename = insertVersion(filename, UnicodeVersion())
|
||||
}
|
||||
w, err := os.Create(filename)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not create file %s: %v", filename, err)
|
||||
}
|
||||
defer w.Close()
|
||||
if _, err = WriteGo(w, pkg, tags, b); err != nil {
|
||||
log.Fatalf("Error writing file %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
// WriteGo prepends a standard file comment and package statement to the given
|
||||
// bytes, applies gofmt, and writes them to w.
|
||||
func WriteGo(w io.Writer, pkg, tags string, b []byte) (n int, err error) {
|
||||
src := []byte(header)
|
||||
if tags != "" {
|
||||
src = append(src, fmt.Sprintf("// +build %s\n\n", tags)...)
|
||||
}
|
||||
src = append(src, fmt.Sprintf("package %s\n\n", pkg)...)
|
||||
src = append(src, b...)
|
||||
formatted, err := format.Source(src)
|
||||
if err != nil {
|
||||
// Print the generated code even in case of an error so that the
|
||||
// returned error can be meaningfully interpreted.
|
||||
n, _ = w.Write(src)
|
||||
return n, err
|
||||
}
|
||||
return w.Write(formatted)
|
||||
}
|
||||
|
||||
// Repackage rewrites a Go file from belonging to package main to belonging to
|
||||
// the given package.
|
||||
func Repackage(inFile, outFile, pkg string) {
|
||||
src, err := ioutil.ReadFile(inFile)
|
||||
if err != nil {
|
||||
log.Fatalf("reading %s: %v", inFile, err)
|
||||
}
|
||||
const toDelete = "package main\n\n"
|
||||
i := bytes.Index(src, []byte(toDelete))
|
||||
if i < 0 {
|
||||
log.Fatalf("Could not find %q in %s.", toDelete, inFile)
|
||||
}
|
||||
w := &bytes.Buffer{}
|
||||
w.Write(src[i+len(toDelete):])
|
||||
WriteGoFile(outFile, pkg, w.Bytes())
|
||||
}
|
27
vendor/golang.org/x/text/internal/triegen/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/text/internal/triegen/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
58
vendor/golang.org/x/text/internal/triegen/compact.go
generated
vendored
Normal file
58
vendor/golang.org/x/text/internal/triegen/compact.go
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package triegen
|
||||
|
||||
// This file defines Compacter and its implementations.
|
||||
|
||||
import "io"
|
||||
|
||||
// A Compacter generates an alternative, more space-efficient way to store a
|
||||
// trie value block. A trie value block holds all possible values for the last
|
||||
// byte of a UTF-8 encoded rune. Excluding ASCII characters, a trie value block
|
||||
// always has 64 values, as a UTF-8 encoding ends with a byte in [0x80, 0xC0).
|
||||
type Compacter interface {
|
||||
// Size returns whether the Compacter could encode the given block as well
|
||||
// as its size in case it can. len(v) is always 64.
|
||||
Size(v []uint64) (sz int, ok bool)
|
||||
|
||||
// Store stores the block using the Compacter's compression method.
|
||||
// It returns a handle with which the block can be retrieved.
|
||||
// len(v) is always 64.
|
||||
Store(v []uint64) uint32
|
||||
|
||||
// Print writes the data structures associated to the given store to w.
|
||||
Print(w io.Writer) error
|
||||
|
||||
// Handler returns the name of a function that gets called during trie
|
||||
// lookup for blocks generated by the Compacter. The function should be of
|
||||
// the form func (n uint32, b byte) uint64, where n is the index returned by
|
||||
// the Compacter's Store method and b is the last byte of the UTF-8
|
||||
// encoding, where 0x80 <= b < 0xC0, for which to do the lookup in the
|
||||
// block.
|
||||
Handler() string
|
||||
}
|
||||
|
||||
// simpleCompacter is the default Compacter used by builder. It implements a
|
||||
// normal trie block.
|
||||
type simpleCompacter builder
|
||||
|
||||
func (b *simpleCompacter) Size([]uint64) (sz int, ok bool) {
|
||||
return blockSize * b.ValueSize, true
|
||||
}
|
||||
|
||||
func (b *simpleCompacter) Store(v []uint64) uint32 {
|
||||
h := uint32(len(b.ValueBlocks) - blockOffset)
|
||||
b.ValueBlocks = append(b.ValueBlocks, v)
|
||||
return h
|
||||
}
|
||||
|
||||
func (b *simpleCompacter) Print(io.Writer) error {
|
||||
// Structures are printed in print.go.
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *simpleCompacter) Handler() string {
|
||||
panic("Handler should be special-cased for this Compacter")
|
||||
}
|
251
vendor/golang.org/x/text/internal/triegen/print.go
generated
vendored
Normal file
251
vendor/golang.org/x/text/internal/triegen/print.go
generated
vendored
Normal file
@ -0,0 +1,251 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package triegen
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
// print writes all the data structures as well as the code necessary to use the
|
||||
// trie to w.
|
||||
func (b *builder) print(w io.Writer) error {
|
||||
b.Stats.NValueEntries = len(b.ValueBlocks) * blockSize
|
||||
b.Stats.NValueBytes = len(b.ValueBlocks) * blockSize * b.ValueSize
|
||||
b.Stats.NIndexEntries = len(b.IndexBlocks) * blockSize
|
||||
b.Stats.NIndexBytes = len(b.IndexBlocks) * blockSize * b.IndexSize
|
||||
b.Stats.NHandleBytes = len(b.Trie) * 2 * b.IndexSize
|
||||
|
||||
// If we only have one root trie, all starter blocks are at position 0 and
|
||||
// we can access the arrays directly.
|
||||
if len(b.Trie) == 1 {
|
||||
// At this point we cannot refer to the generated tables directly.
|
||||
b.ASCIIBlock = b.Name + "Values"
|
||||
b.StarterBlock = b.Name + "Index"
|
||||
} else {
|
||||
// Otherwise we need to have explicit starter indexes in the trie
|
||||
// structure.
|
||||
b.ASCIIBlock = "t.ascii"
|
||||
b.StarterBlock = "t.utf8Start"
|
||||
}
|
||||
|
||||
b.SourceType = "[]byte"
|
||||
if err := lookupGen.Execute(w, b); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b.SourceType = "string"
|
||||
if err := lookupGen.Execute(w, b); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := trieGen.Execute(w, b); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, c := range b.Compactions {
|
||||
if err := c.c.Print(w); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func printValues(n int, values []uint64) string {
|
||||
w := &bytes.Buffer{}
|
||||
boff := n * blockSize
|
||||
fmt.Fprintf(w, "\t// Block %#x, offset %#x", n, boff)
|
||||
var newline bool
|
||||
for i, v := range values {
|
||||
if i%6 == 0 {
|
||||
newline = true
|
||||
}
|
||||
if v != 0 {
|
||||
if newline {
|
||||
fmt.Fprintf(w, "\n")
|
||||
newline = false
|
||||
}
|
||||
fmt.Fprintf(w, "\t%#02x:%#04x, ", boff+i, v)
|
||||
}
|
||||
}
|
||||
return w.String()
|
||||
}
|
||||
|
||||
func printIndex(b *builder, nr int, n *node) string {
|
||||
w := &bytes.Buffer{}
|
||||
boff := nr * blockSize
|
||||
fmt.Fprintf(w, "\t// Block %#x, offset %#x", nr, boff)
|
||||
var newline bool
|
||||
for i, c := range n.children {
|
||||
if i%8 == 0 {
|
||||
newline = true
|
||||
}
|
||||
if c != nil {
|
||||
v := b.Compactions[c.index.compaction].Offset + uint32(c.index.index)
|
||||
if v != 0 {
|
||||
if newline {
|
||||
fmt.Fprintf(w, "\n")
|
||||
newline = false
|
||||
}
|
||||
fmt.Fprintf(w, "\t%#02x:%#02x, ", boff+i, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
return w.String()
|
||||
}
|
||||
|
||||
var (
|
||||
trieGen = template.Must(template.New("trie").Funcs(template.FuncMap{
|
||||
"printValues": printValues,
|
||||
"printIndex": printIndex,
|
||||
"title": strings.Title,
|
||||
"dec": func(x int) int { return x - 1 },
|
||||
"psize": func(n int) string {
|
||||
return fmt.Sprintf("%d bytes (%.2f KiB)", n, float64(n)/1024)
|
||||
},
|
||||
}).Parse(trieTemplate))
|
||||
lookupGen = template.Must(template.New("lookup").Parse(lookupTemplate))
|
||||
)
|
||||
|
||||
// TODO: consider the return type of lookup. It could be uint64, even if the
|
||||
// internal value type is smaller. We will have to verify this with the
|
||||
// performance of unicode/norm, which is very sensitive to such changes.
|
||||
const trieTemplate = `{{$b := .}}{{$multi := gt (len .Trie) 1}}
|
||||
// {{.Name}}Trie. Total size: {{psize .Size}}. Checksum: {{printf "%08x" .Checksum}}.
|
||||
type {{.Name}}Trie struct { {{if $multi}}
|
||||
ascii []{{.ValueType}} // index for ASCII bytes
|
||||
utf8Start []{{.IndexType}} // index for UTF-8 bytes >= 0xC0
|
||||
{{end}}}
|
||||
|
||||
func new{{title .Name}}Trie(i int) *{{.Name}}Trie { {{if $multi}}
|
||||
h := {{.Name}}TrieHandles[i]
|
||||
return &{{.Name}}Trie{ {{.Name}}Values[uint32(h.ascii)<<6:], {{.Name}}Index[uint32(h.multi)<<6:] }
|
||||
}
|
||||
|
||||
type {{.Name}}TrieHandle struct {
|
||||
ascii, multi {{.IndexType}}
|
||||
}
|
||||
|
||||
// {{.Name}}TrieHandles: {{len .Trie}} handles, {{.Stats.NHandleBytes}} bytes
|
||||
var {{.Name}}TrieHandles = [{{len .Trie}}]{{.Name}}TrieHandle{
|
||||
{{range .Trie}} { {{.ASCIIIndex}}, {{.StarterIndex}} }, // {{printf "%08x" .Checksum}}: {{.Name}}
|
||||
{{end}}}{{else}}
|
||||
return &{{.Name}}Trie{}
|
||||
}
|
||||
{{end}}
|
||||
// lookupValue determines the type of block n and looks up the value for b.
|
||||
func (t *{{.Name}}Trie) lookupValue(n uint32, b byte) {{.ValueType}}{{$last := dec (len .Compactions)}} {
|
||||
switch { {{range $i, $c := .Compactions}}
|
||||
{{if eq $i $last}}default{{else}}case n < {{$c.Cutoff}}{{end}}:{{if ne $i 0}}
|
||||
n -= {{$c.Offset}}{{end}}
|
||||
return {{print $b.ValueType}}({{$c.Handler}}){{end}}
|
||||
}
|
||||
}
|
||||
|
||||
// {{.Name}}Values: {{len .ValueBlocks}} blocks, {{.Stats.NValueEntries}} entries, {{.Stats.NValueBytes}} bytes
|
||||
// The third block is the zero block.
|
||||
var {{.Name}}Values = [{{.Stats.NValueEntries}}]{{.ValueType}} {
|
||||
{{range $i, $v := .ValueBlocks}}{{printValues $i $v}}
|
||||
{{end}}}
|
||||
|
||||
// {{.Name}}Index: {{len .IndexBlocks}} blocks, {{.Stats.NIndexEntries}} entries, {{.Stats.NIndexBytes}} bytes
|
||||
// Block 0 is the zero block.
|
||||
var {{.Name}}Index = [{{.Stats.NIndexEntries}}]{{.IndexType}} {
|
||||
{{range $i, $v := .IndexBlocks}}{{printIndex $b $i $v}}
|
||||
{{end}}}
|
||||
`
|
||||
|
||||
// TODO: consider allowing zero-length strings after evaluating performance with
|
||||
// unicode/norm.
|
||||
const lookupTemplate = `
|
||||
// lookup{{if eq .SourceType "string"}}String{{end}} returns the trie value for the first UTF-8 encoding in s and
|
||||
// the width in bytes of this encoding. The size will be 0 if s does not
|
||||
// hold enough bytes to complete the encoding. len(s) must be greater than 0.
|
||||
func (t *{{.Name}}Trie) lookup{{if eq .SourceType "string"}}String{{end}}(s {{.SourceType}}) (v {{.ValueType}}, sz int) {
|
||||
c0 := s[0]
|
||||
switch {
|
||||
case c0 < 0x80: // is ASCII
|
||||
return {{.ASCIIBlock}}[c0], 1
|
||||
case c0 < 0xC2:
|
||||
return 0, 1 // Illegal UTF-8: not a starter, not ASCII.
|
||||
case c0 < 0xE0: // 2-byte UTF-8
|
||||
if len(s) < 2 {
|
||||
return 0, 0
|
||||
}
|
||||
i := {{.StarterBlock}}[c0]
|
||||
c1 := s[1]
|
||||
if c1 < 0x80 || 0xC0 <= c1 {
|
||||
return 0, 1 // Illegal UTF-8: not a continuation byte.
|
||||
}
|
||||
return t.lookupValue(uint32(i), c1), 2
|
||||
case c0 < 0xF0: // 3-byte UTF-8
|
||||
if len(s) < 3 {
|
||||
return 0, 0
|
||||
}
|
||||
i := {{.StarterBlock}}[c0]
|
||||
c1 := s[1]
|
||||
if c1 < 0x80 || 0xC0 <= c1 {
|
||||
return 0, 1 // Illegal UTF-8: not a continuation byte.
|
||||
}
|
||||
o := uint32(i)<<6 + uint32(c1)
|
||||
i = {{.Name}}Index[o]
|
||||
c2 := s[2]
|
||||
if c2 < 0x80 || 0xC0 <= c2 {
|
||||
return 0, 2 // Illegal UTF-8: not a continuation byte.
|
||||
}
|
||||
return t.lookupValue(uint32(i), c2), 3
|
||||
case c0 < 0xF8: // 4-byte UTF-8
|
||||
if len(s) < 4 {
|
||||
return 0, 0
|
||||
}
|
||||
i := {{.StarterBlock}}[c0]
|
||||
c1 := s[1]
|
||||
if c1 < 0x80 || 0xC0 <= c1 {
|
||||
return 0, 1 // Illegal UTF-8: not a continuation byte.
|
||||
}
|
||||
o := uint32(i)<<6 + uint32(c1)
|
||||
i = {{.Name}}Index[o]
|
||||
c2 := s[2]
|
||||
if c2 < 0x80 || 0xC0 <= c2 {
|
||||
return 0, 2 // Illegal UTF-8: not a continuation byte.
|
||||
}
|
||||
o = uint32(i)<<6 + uint32(c2)
|
||||
i = {{.Name}}Index[o]
|
||||
c3 := s[3]
|
||||
if c3 < 0x80 || 0xC0 <= c3 {
|
||||
return 0, 3 // Illegal UTF-8: not a continuation byte.
|
||||
}
|
||||
return t.lookupValue(uint32(i), c3), 4
|
||||
}
|
||||
// Illegal rune
|
||||
return 0, 1
|
||||
}
|
||||
|
||||
// lookup{{if eq .SourceType "string"}}String{{end}}Unsafe returns the trie value for the first UTF-8 encoding in s.
|
||||
// s must start with a full and valid UTF-8 encoded rune.
|
||||
func (t *{{.Name}}Trie) lookup{{if eq .SourceType "string"}}String{{end}}Unsafe(s {{.SourceType}}) {{.ValueType}} {
|
||||
c0 := s[0]
|
||||
if c0 < 0x80 { // is ASCII
|
||||
return {{.ASCIIBlock}}[c0]
|
||||
}
|
||||
i := {{.StarterBlock}}[c0]
|
||||
if c0 < 0xE0 { // 2-byte UTF-8
|
||||
return t.lookupValue(uint32(i), s[1])
|
||||
}
|
||||
i = {{.Name}}Index[uint32(i)<<6+uint32(s[1])]
|
||||
if c0 < 0xF0 { // 3-byte UTF-8
|
||||
return t.lookupValue(uint32(i), s[2])
|
||||
}
|
||||
i = {{.Name}}Index[uint32(i)<<6+uint32(s[2])]
|
||||
if c0 < 0xF8 { // 4-byte UTF-8
|
||||
return t.lookupValue(uint32(i), s[3])
|
||||
}
|
||||
return 0
|
||||
}
|
||||
`
|
494
vendor/golang.org/x/text/internal/triegen/triegen.go
generated
vendored
Normal file
494
vendor/golang.org/x/text/internal/triegen/triegen.go
generated
vendored
Normal file
@ -0,0 +1,494 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package triegen implements a code generator for a trie for associating
|
||||
// unsigned integer values with UTF-8 encoded runes.
|
||||
//
|
||||
// Many of the go.text packages use tries for storing per-rune information. A
|
||||
// trie is especially useful if many of the runes have the same value. If this
|
||||
// is the case, many blocks can be expected to be shared allowing for
|
||||
// information on many runes to be stored in little space.
|
||||
//
|
||||
// As most of the lookups are done directly on []byte slices, the tries use the
|
||||
// UTF-8 bytes directly for the lookup. This saves a conversion from UTF-8 to
|
||||
// runes and contributes a little bit to better performance. It also naturally
|
||||
// provides a fast path for ASCII.
|
||||
//
|
||||
// Space is also an issue. There are many code points defined in Unicode and as
|
||||
// a result tables can get quite large. So every byte counts. The triegen
|
||||
// package automatically chooses the smallest integer values to represent the
|
||||
// tables. Compacters allow further compression of the trie by allowing for
|
||||
// alternative representations of individual trie blocks.
|
||||
//
|
||||
// triegen allows generating multiple tries as a single structure. This is
|
||||
// useful when, for example, one wants to generate tries for several languages
|
||||
// that have a lot of values in common. Some existing libraries for
|
||||
// internationalization store all per-language data as a dynamically loadable
|
||||
// chunk. The go.text packages are designed with the assumption that the user
|
||||
// typically wants to compile in support for all supported languages, in line
|
||||
// with the approach common to Go to create a single standalone binary. The
|
||||
// multi-root trie approach can give significant storage savings in this
|
||||
// scenario.
|
||||
//
|
||||
// triegen generates both tables and code. The code is optimized to use the
|
||||
// automatically chosen data types. The following code is generated for a Trie
|
||||
// or multiple Tries named "foo":
|
||||
// - type fooTrie
|
||||
// The trie type.
|
||||
//
|
||||
// - func newFooTrie(x int) *fooTrie
|
||||
// Trie constructor, where x is the index of the trie passed to Gen.
|
||||
//
|
||||
// - func (t *fooTrie) lookup(s []byte) (v uintX, sz int)
|
||||
// The lookup method, where uintX is automatically chosen.
|
||||
//
|
||||
// - func lookupString, lookupUnsafe and lookupStringUnsafe
|
||||
// Variants of the above.
|
||||
//
|
||||
// - var fooValues and fooIndex and any tables generated by Compacters.
|
||||
// The core trie data.
|
||||
//
|
||||
// - var fooTrieHandles
|
||||
// Indexes of starter blocks in case of multiple trie roots.
|
||||
//
|
||||
// It is recommended that users test the generated trie by checking the returned
|
||||
// value for every rune. Such exhaustive tests are possible as the the number of
|
||||
// runes in Unicode is limited.
|
||||
package triegen // import "golang.org/x/text/internal/triegen"
|
||||
|
||||
// TODO: Arguably, the internally optimized data types would not have to be
|
||||
// exposed in the generated API. We could also investigate not generating the
|
||||
// code, but using it through a package. We would have to investigate the impact
|
||||
// on performance of making such change, though. For packages like unicode/norm,
|
||||
// small changes like this could tank performance.
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"hash/crc64"
|
||||
"io"
|
||||
"log"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// builder builds a set of tries for associating values with runes. The set of
|
||||
// tries can share common index and value blocks.
|
||||
type builder struct {
|
||||
Name string
|
||||
|
||||
// ValueType is the type of the trie values looked up.
|
||||
ValueType string
|
||||
|
||||
// ValueSize is the byte size of the ValueType.
|
||||
ValueSize int
|
||||
|
||||
// IndexType is the type of trie index values used for all UTF-8 bytes of
|
||||
// a rune except the last one.
|
||||
IndexType string
|
||||
|
||||
// IndexSize is the byte size of the IndexType.
|
||||
IndexSize int
|
||||
|
||||
// SourceType is used when generating the lookup functions. If the user
|
||||
// requests StringSupport, all lookup functions will be generated for
|
||||
// string input as well.
|
||||
SourceType string
|
||||
|
||||
Trie []*Trie
|
||||
|
||||
IndexBlocks []*node
|
||||
ValueBlocks [][]uint64
|
||||
Compactions []compaction
|
||||
Checksum uint64
|
||||
|
||||
ASCIIBlock string
|
||||
StarterBlock string
|
||||
|
||||
indexBlockIdx map[uint64]int
|
||||
valueBlockIdx map[uint64]nodeIndex
|
||||
asciiBlockIdx map[uint64]int
|
||||
|
||||
// Stats are used to fill out the template.
|
||||
Stats struct {
|
||||
NValueEntries int
|
||||
NValueBytes int
|
||||
NIndexEntries int
|
||||
NIndexBytes int
|
||||
NHandleBytes int
|
||||
}
|
||||
|
||||
err error
|
||||
}
|
||||
|
||||
// A nodeIndex encodes the index of a node, which is defined by the compaction
|
||||
// which stores it and an index within the compaction. For internal nodes, the
|
||||
// compaction is always 0.
|
||||
type nodeIndex struct {
|
||||
compaction int
|
||||
index int
|
||||
}
|
||||
|
||||
// compaction keeps track of stats used for the compaction.
|
||||
type compaction struct {
|
||||
c Compacter
|
||||
blocks []*node
|
||||
maxHandle uint32
|
||||
totalSize int
|
||||
|
||||
// Used by template-based generator and thus exported.
|
||||
Cutoff uint32
|
||||
Offset uint32
|
||||
Handler string
|
||||
}
|
||||
|
||||
func (b *builder) setError(err error) {
|
||||
if b.err == nil {
|
||||
b.err = err
|
||||
}
|
||||
}
|
||||
|
||||
// An Option can be passed to Gen.
|
||||
type Option func(b *builder) error
|
||||
|
||||
// Compact configures the trie generator to use the given Compacter.
|
||||
func Compact(c Compacter) Option {
|
||||
return func(b *builder) error {
|
||||
b.Compactions = append(b.Compactions, compaction{
|
||||
c: c,
|
||||
Handler: c.Handler() + "(n, b)"})
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Gen writes Go code for a shared trie lookup structure to w for the given
|
||||
// Tries. The generated trie type will be called nameTrie. newNameTrie(x) will
|
||||
// return the *nameTrie for tries[x]. A value can be looked up by using one of
|
||||
// the various lookup methods defined on nameTrie. It returns the table size of
|
||||
// the generated trie.
|
||||
func Gen(w io.Writer, name string, tries []*Trie, opts ...Option) (sz int, err error) {
|
||||
// The index contains two dummy blocks, followed by the zero block. The zero
|
||||
// block is at offset 0x80, so that the offset for the zero block for
|
||||
// continuation bytes is 0.
|
||||
b := &builder{
|
||||
Name: name,
|
||||
Trie: tries,
|
||||
IndexBlocks: []*node{{}, {}, {}},
|
||||
Compactions: []compaction{{
|
||||
Handler: name + "Values[n<<6+uint32(b)]",
|
||||
}},
|
||||
// The 0 key in indexBlockIdx and valueBlockIdx is the hash of the zero
|
||||
// block.
|
||||
indexBlockIdx: map[uint64]int{0: 0},
|
||||
valueBlockIdx: map[uint64]nodeIndex{0: {}},
|
||||
asciiBlockIdx: map[uint64]int{},
|
||||
}
|
||||
b.Compactions[0].c = (*simpleCompacter)(b)
|
||||
|
||||
for _, f := range opts {
|
||||
if err := f(b); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
b.build()
|
||||
if b.err != nil {
|
||||
return 0, b.err
|
||||
}
|
||||
if err = b.print(w); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return b.Size(), nil
|
||||
}
|
||||
|
||||
// A Trie represents a single root node of a trie. A builder may build several
|
||||
// overlapping tries at once.
|
||||
type Trie struct {
|
||||
root *node
|
||||
|
||||
hiddenTrie
|
||||
}
|
||||
|
||||
// hiddenTrie contains values we want to be visible to the template generator,
|
||||
// but hidden from the API documentation.
|
||||
type hiddenTrie struct {
|
||||
Name string
|
||||
Checksum uint64
|
||||
ASCIIIndex int
|
||||
StarterIndex int
|
||||
}
|
||||
|
||||
// NewTrie returns a new trie root.
|
||||
func NewTrie(name string) *Trie {
|
||||
return &Trie{
|
||||
&node{
|
||||
children: make([]*node, blockSize),
|
||||
values: make([]uint64, utf8.RuneSelf),
|
||||
},
|
||||
hiddenTrie{Name: name},
|
||||
}
|
||||
}
|
||||
|
||||
// Gen is a convenience wrapper around the Gen func passing t as the only trie
|
||||
// and uses the name passed to NewTrie. It returns the size of the generated
|
||||
// tables.
|
||||
func (t *Trie) Gen(w io.Writer, opts ...Option) (sz int, err error) {
|
||||
return Gen(w, t.Name, []*Trie{t}, opts...)
|
||||
}
|
||||
|
||||
// node is a node of the intermediate trie structure.
|
||||
type node struct {
|
||||
// children holds this node's children. It is always of length 64.
|
||||
// A child node may be nil.
|
||||
children []*node
|
||||
|
||||
// values contains the values of this node. If it is non-nil, this node is
|
||||
// either a root or leaf node:
|
||||
// For root nodes, len(values) == 128 and it maps the bytes in [0x00, 0x7F].
|
||||
// For leaf nodes, len(values) == 64 and it maps the bytes in [0x80, 0xBF].
|
||||
values []uint64
|
||||
|
||||
index nodeIndex
|
||||
}
|
||||
|
||||
// Insert associates value with the given rune. Insert will panic if a non-zero
|
||||
// value is passed for an invalid rune.
|
||||
func (t *Trie) Insert(r rune, value uint64) {
|
||||
if value == 0 {
|
||||
return
|
||||
}
|
||||
s := string(r)
|
||||
if []rune(s)[0] != r && value != 0 {
|
||||
// Note: The UCD tables will always assign what amounts to a zero value
|
||||
// to a surrogate. Allowing a zero value for an illegal rune allows
|
||||
// users to iterate over [0..MaxRune] without having to explicitly
|
||||
// exclude surrogates, which would be tedious.
|
||||
panic(fmt.Sprintf("triegen: non-zero value for invalid rune %U", r))
|
||||
}
|
||||
if len(s) == 1 {
|
||||
// It is a root node value (ASCII).
|
||||
t.root.values[s[0]] = value
|
||||
return
|
||||
}
|
||||
|
||||
n := t.root
|
||||
for ; len(s) > 1; s = s[1:] {
|
||||
if n.children == nil {
|
||||
n.children = make([]*node, blockSize)
|
||||
}
|
||||
p := s[0] % blockSize
|
||||
c := n.children[p]
|
||||
if c == nil {
|
||||
c = &node{}
|
||||
n.children[p] = c
|
||||
}
|
||||
if len(s) > 2 && c.values != nil {
|
||||
log.Fatalf("triegen: insert(%U): found internal node with values", r)
|
||||
}
|
||||
n = c
|
||||
}
|
||||
if n.values == nil {
|
||||
n.values = make([]uint64, blockSize)
|
||||
}
|
||||
if n.children != nil {
|
||||
log.Fatalf("triegen: insert(%U): found leaf node that also has child nodes", r)
|
||||
}
|
||||
n.values[s[0]-0x80] = value
|
||||
}
|
||||
|
||||
// Size returns the number of bytes the generated trie will take to store. It
|
||||
// needs to be exported as it is used in the templates.
|
||||
func (b *builder) Size() int {
|
||||
// Index blocks.
|
||||
sz := len(b.IndexBlocks) * blockSize * b.IndexSize
|
||||
|
||||
// Skip the first compaction, which represents the normal value blocks, as
|
||||
// its totalSize does not account for the ASCII blocks, which are managed
|
||||
// separately.
|
||||
sz += len(b.ValueBlocks) * blockSize * b.ValueSize
|
||||
for _, c := range b.Compactions[1:] {
|
||||
sz += c.totalSize
|
||||
}
|
||||
|
||||
// TODO: this computation does not account for the fixed overhead of a using
|
||||
// a compaction, either code or data. As for data, though, the typical
|
||||
// overhead of data is in the order of bytes (2 bytes for cases). Further,
|
||||
// the savings of using a compaction should anyway be substantial for it to
|
||||
// be worth it.
|
||||
|
||||
// For multi-root tries, we also need to account for the handles.
|
||||
if len(b.Trie) > 1 {
|
||||
sz += 2 * b.IndexSize * len(b.Trie)
|
||||
}
|
||||
return sz
|
||||
}
|
||||
|
||||
func (b *builder) build() {
|
||||
// Compute the sizes of the values.
|
||||
var vmax uint64
|
||||
for _, t := range b.Trie {
|
||||
vmax = maxValue(t.root, vmax)
|
||||
}
|
||||
b.ValueType, b.ValueSize = getIntType(vmax)
|
||||
|
||||
// Compute all block allocations.
|
||||
// TODO: first compute the ASCII blocks for all tries and then the other
|
||||
// nodes. ASCII blocks are more restricted in placement, as they require two
|
||||
// blocks to be placed consecutively. Processing them first may improve
|
||||
// sharing (at least one zero block can be expected to be saved.)
|
||||
for _, t := range b.Trie {
|
||||
b.Checksum += b.buildTrie(t)
|
||||
}
|
||||
|
||||
// Compute the offsets for all the Compacters.
|
||||
offset := uint32(0)
|
||||
for i := range b.Compactions {
|
||||
c := &b.Compactions[i]
|
||||
c.Offset = offset
|
||||
offset += c.maxHandle + 1
|
||||
c.Cutoff = offset
|
||||
}
|
||||
|
||||
// Compute the sizes of indexes.
|
||||
// TODO: different byte positions could have different sizes. So far we have
|
||||
// not found a case where this is beneficial.
|
||||
imax := uint64(b.Compactions[len(b.Compactions)-1].Cutoff)
|
||||
for _, ib := range b.IndexBlocks {
|
||||
if x := uint64(ib.index.index); x > imax {
|
||||
imax = x
|
||||
}
|
||||
}
|
||||
b.IndexType, b.IndexSize = getIntType(imax)
|
||||
}
|
||||
|
||||
func maxValue(n *node, max uint64) uint64 {
|
||||
if n == nil {
|
||||
return max
|
||||
}
|
||||
for _, c := range n.children {
|
||||
max = maxValue(c, max)
|
||||
}
|
||||
for _, v := range n.values {
|
||||
if max < v {
|
||||
max = v
|
||||
}
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
func getIntType(v uint64) (string, int) {
|
||||
switch {
|
||||
case v < 1<<8:
|
||||
return "uint8", 1
|
||||
case v < 1<<16:
|
||||
return "uint16", 2
|
||||
case v < 1<<32:
|
||||
return "uint32", 4
|
||||
}
|
||||
return "uint64", 8
|
||||
}
|
||||
|
||||
const (
|
||||
blockSize = 64
|
||||
|
||||
// Subtract two blocks to offset 0x80, the first continuation byte.
|
||||
blockOffset = 2
|
||||
|
||||
// Subtract three blocks to offset 0xC0, the first non-ASCII starter.
|
||||
rootBlockOffset = 3
|
||||
)
|
||||
|
||||
var crcTable = crc64.MakeTable(crc64.ISO)
|
||||
|
||||
func (b *builder) buildTrie(t *Trie) uint64 {
|
||||
n := t.root
|
||||
|
||||
// Get the ASCII offset. For the first trie, the ASCII block will be at
|
||||
// position 0.
|
||||
hasher := crc64.New(crcTable)
|
||||
binary.Write(hasher, binary.BigEndian, n.values)
|
||||
hash := hasher.Sum64()
|
||||
|
||||
v, ok := b.asciiBlockIdx[hash]
|
||||
if !ok {
|
||||
v = len(b.ValueBlocks)
|
||||
b.asciiBlockIdx[hash] = v
|
||||
|
||||
b.ValueBlocks = append(b.ValueBlocks, n.values[:blockSize], n.values[blockSize:])
|
||||
if v == 0 {
|
||||
// Add the zero block at position 2 so that it will be assigned a
|
||||
// zero reference in the lookup blocks.
|
||||
// TODO: always do this? This would allow us to remove a check from
|
||||
// the trie lookup, but at the expense of extra space. Analyze
|
||||
// performance for unicode/norm.
|
||||
b.ValueBlocks = append(b.ValueBlocks, make([]uint64, blockSize))
|
||||
}
|
||||
}
|
||||
t.ASCIIIndex = v
|
||||
|
||||
// Compute remaining offsets.
|
||||
t.Checksum = b.computeOffsets(n, true)
|
||||
// We already subtracted the normal blockOffset from the index. Subtract the
|
||||
// difference for starter bytes.
|
||||
t.StarterIndex = n.index.index - (rootBlockOffset - blockOffset)
|
||||
return t.Checksum
|
||||
}
|
||||
|
||||
func (b *builder) computeOffsets(n *node, root bool) uint64 {
|
||||
// For the first trie, the root lookup block will be at position 3, which is
|
||||
// the offset for UTF-8 non-ASCII starter bytes.
|
||||
first := len(b.IndexBlocks) == rootBlockOffset
|
||||
if first {
|
||||
b.IndexBlocks = append(b.IndexBlocks, n)
|
||||
}
|
||||
|
||||
// We special-case the cases where all values recursively are 0. This allows
|
||||
// for the use of a zero block to which all such values can be directed.
|
||||
hash := uint64(0)
|
||||
if n.children != nil || n.values != nil {
|
||||
hasher := crc64.New(crcTable)
|
||||
for _, c := range n.children {
|
||||
var v uint64
|
||||
if c != nil {
|
||||
v = b.computeOffsets(c, false)
|
||||
}
|
||||
binary.Write(hasher, binary.BigEndian, v)
|
||||
}
|
||||
binary.Write(hasher, binary.BigEndian, n.values)
|
||||
hash = hasher.Sum64()
|
||||
}
|
||||
|
||||
if first {
|
||||
b.indexBlockIdx[hash] = rootBlockOffset - blockOffset
|
||||
}
|
||||
|
||||
// Compacters don't apply to internal nodes.
|
||||
if n.children != nil {
|
||||
v, ok := b.indexBlockIdx[hash]
|
||||
if !ok {
|
||||
v = len(b.IndexBlocks) - blockOffset
|
||||
b.IndexBlocks = append(b.IndexBlocks, n)
|
||||
b.indexBlockIdx[hash] = v
|
||||
}
|
||||
n.index = nodeIndex{0, v}
|
||||
} else {
|
||||
h, ok := b.valueBlockIdx[hash]
|
||||
if !ok {
|
||||
bestI, bestSize := 0, blockSize*b.ValueSize
|
||||
for i, c := range b.Compactions[1:] {
|
||||
if sz, ok := c.c.Size(n.values); ok && bestSize > sz {
|
||||
bestI, bestSize = i+1, sz
|
||||
}
|
||||
}
|
||||
c := &b.Compactions[bestI]
|
||||
c.totalSize += bestSize
|
||||
v := c.c.Store(n.values)
|
||||
if c.maxHandle < v {
|
||||
c.maxHandle = v
|
||||
}
|
||||
h = nodeIndex{bestI, int(v)}
|
||||
b.valueBlockIdx[hash] = h
|
||||
}
|
||||
n.index = h
|
||||
}
|
||||
return hash
|
||||
}
|
27
vendor/golang.org/x/text/internal/ucd/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/text/internal/ucd/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
371
vendor/golang.org/x/text/internal/ucd/ucd.go
generated
vendored
Normal file
371
vendor/golang.org/x/text/internal/ucd/ucd.go
generated
vendored
Normal file
@ -0,0 +1,371 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package ucd provides a parser for Unicode Character Database files, the
|
||||
// format of which is defined in http://www.unicode.org/reports/tr44/. See
|
||||
// http://www.unicode.org/Public/UCD/latest/ucd/ for example files.
|
||||
//
|
||||
// It currently does not support substitutions of missing fields.
|
||||
package ucd // import "golang.org/x/text/internal/ucd"
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// UnicodeData.txt fields.
|
||||
const (
|
||||
CodePoint = iota
|
||||
Name
|
||||
GeneralCategory
|
||||
CanonicalCombiningClass
|
||||
BidiClass
|
||||
DecompMapping
|
||||
DecimalValue
|
||||
DigitValue
|
||||
NumericValue
|
||||
BidiMirrored
|
||||
Unicode1Name
|
||||
ISOComment
|
||||
SimpleUppercaseMapping
|
||||
SimpleLowercaseMapping
|
||||
SimpleTitlecaseMapping
|
||||
)
|
||||
|
||||
// Parse calls f for each entry in the given reader of a UCD file. It will close
|
||||
// the reader upon return. It will call log.Fatal if any error occurred.
|
||||
//
|
||||
// This implements the most common usage pattern of using Parser.
|
||||
func Parse(r io.ReadCloser, f func(p *Parser)) {
|
||||
defer r.Close()
|
||||
|
||||
p := New(r)
|
||||
for p.Next() {
|
||||
f(p)
|
||||
}
|
||||
if err := p.Err(); err != nil {
|
||||
r.Close() // os.Exit will cause defers not to be called.
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
// An Option is used to configure a Parser.
|
||||
type Option func(p *Parser)
|
||||
|
||||
func keepRanges(p *Parser) {
|
||||
p.keepRanges = true
|
||||
}
|
||||
|
||||
var (
|
||||
// KeepRanges prevents the expansion of ranges. The raw ranges can be
|
||||
// obtained by calling Range(0) on the parser.
|
||||
KeepRanges Option = keepRanges
|
||||
)
|
||||
|
||||
// The Part option register a handler for lines starting with a '@'. The text
|
||||
// after a '@' is available as the first field. Comments are handled as usual.
|
||||
func Part(f func(p *Parser)) Option {
|
||||
return func(p *Parser) {
|
||||
p.partHandler = f
|
||||
}
|
||||
}
|
||||
|
||||
// The CommentHandler option passes comments that are on a line by itself to
|
||||
// a given handler.
|
||||
func CommentHandler(f func(s string)) Option {
|
||||
return func(p *Parser) {
|
||||
p.commentHandler = f
|
||||
}
|
||||
}
|
||||
|
||||
// A Parser parses Unicode Character Database (UCD) files.
|
||||
type Parser struct {
|
||||
scanner *bufio.Scanner
|
||||
|
||||
keepRanges bool // Don't expand rune ranges in field 0.
|
||||
|
||||
err error
|
||||
comment string
|
||||
field []string
|
||||
// parsedRange is needed in case Range(0) is called more than once for one
|
||||
// field. In some cases this requires scanning ahead.
|
||||
line int
|
||||
parsedRange bool
|
||||
rangeStart, rangeEnd rune
|
||||
|
||||
partHandler func(p *Parser)
|
||||
commentHandler func(s string)
|
||||
}
|
||||
|
||||
func (p *Parser) setError(err error, msg string) {
|
||||
if p.err == nil && err != nil {
|
||||
if msg == "" {
|
||||
p.err = fmt.Errorf("ucd:line:%d: %v", p.line, err)
|
||||
} else {
|
||||
p.err = fmt.Errorf("ucd:line:%d:%s: %v", p.line, msg, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) getField(i int) string {
|
||||
if i >= len(p.field) {
|
||||
return ""
|
||||
}
|
||||
return p.field[i]
|
||||
}
|
||||
|
||||
// Err returns a non-nil error if any error occurred during parsing.
|
||||
func (p *Parser) Err() error {
|
||||
return p.err
|
||||
}
|
||||
|
||||
// New returns a Parser for the given Reader.
|
||||
func New(r io.Reader, o ...Option) *Parser {
|
||||
p := &Parser{
|
||||
scanner: bufio.NewScanner(r),
|
||||
}
|
||||
for _, f := range o {
|
||||
f(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// Next parses the next line in the file. It returns true if a line was parsed
|
||||
// and false if it reached the end of the file.
|
||||
func (p *Parser) Next() bool {
|
||||
if !p.keepRanges && p.rangeStart < p.rangeEnd {
|
||||
p.rangeStart++
|
||||
return true
|
||||
}
|
||||
p.comment = ""
|
||||
p.field = p.field[:0]
|
||||
p.parsedRange = false
|
||||
|
||||
for p.scanner.Scan() && p.err == nil {
|
||||
p.line++
|
||||
s := p.scanner.Text()
|
||||
if s == "" {
|
||||
continue
|
||||
}
|
||||
if s[0] == '#' {
|
||||
if p.commentHandler != nil {
|
||||
p.commentHandler(strings.TrimSpace(s[1:]))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse line
|
||||
if i := strings.IndexByte(s, '#'); i != -1 {
|
||||
p.comment = strings.TrimSpace(s[i+1:])
|
||||
s = s[:i]
|
||||
}
|
||||
if s[0] == '@' {
|
||||
if p.partHandler != nil {
|
||||
p.field = append(p.field, strings.TrimSpace(s[1:]))
|
||||
p.partHandler(p)
|
||||
p.field = p.field[:0]
|
||||
}
|
||||
p.comment = ""
|
||||
continue
|
||||
}
|
||||
for {
|
||||
i := strings.IndexByte(s, ';')
|
||||
if i == -1 {
|
||||
p.field = append(p.field, strings.TrimSpace(s))
|
||||
break
|
||||
}
|
||||
p.field = append(p.field, strings.TrimSpace(s[:i]))
|
||||
s = s[i+1:]
|
||||
}
|
||||
if !p.keepRanges {
|
||||
p.rangeStart, p.rangeEnd = p.getRange(0)
|
||||
}
|
||||
return true
|
||||
}
|
||||
p.setError(p.scanner.Err(), "scanner failed")
|
||||
return false
|
||||
}
|
||||
|
||||
func parseRune(b string) (rune, error) {
|
||||
if len(b) > 2 && b[0] == 'U' && b[1] == '+' {
|
||||
b = b[2:]
|
||||
}
|
||||
x, err := strconv.ParseUint(b, 16, 32)
|
||||
return rune(x), err
|
||||
}
|
||||
|
||||
func (p *Parser) parseRune(s string) rune {
|
||||
x, err := parseRune(s)
|
||||
p.setError(err, "failed to parse rune")
|
||||
return x
|
||||
}
|
||||
|
||||
// Rune parses and returns field i as a rune.
|
||||
func (p *Parser) Rune(i int) rune {
|
||||
if i > 0 || p.keepRanges {
|
||||
return p.parseRune(p.getField(i))
|
||||
}
|
||||
return p.rangeStart
|
||||
}
|
||||
|
||||
// Runes interprets and returns field i as a sequence of runes.
|
||||
func (p *Parser) Runes(i int) (runes []rune) {
|
||||
add := func(s string) {
|
||||
if s = strings.TrimSpace(s); len(s) > 0 {
|
||||
runes = append(runes, p.parseRune(s))
|
||||
}
|
||||
}
|
||||
for b := p.getField(i); ; {
|
||||
i := strings.IndexByte(b, ' ')
|
||||
if i == -1 {
|
||||
add(b)
|
||||
break
|
||||
}
|
||||
add(b[:i])
|
||||
b = b[i+1:]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var (
|
||||
errIncorrectLegacyRange = errors.New("ucd: unmatched <* First>")
|
||||
|
||||
// reRange matches one line of a legacy rune range.
|
||||
reRange = regexp.MustCompile("^([0-9A-F]*);<([^,]*), ([^>]*)>(.*)$")
|
||||
)
|
||||
|
||||
// Range parses and returns field i as a rune range. A range is inclusive at
|
||||
// both ends. If the field only has one rune, first and last will be identical.
|
||||
// It supports the legacy format for ranges used in UnicodeData.txt.
|
||||
func (p *Parser) Range(i int) (first, last rune) {
|
||||
if !p.keepRanges {
|
||||
return p.rangeStart, p.rangeStart
|
||||
}
|
||||
return p.getRange(i)
|
||||
}
|
||||
|
||||
func (p *Parser) getRange(i int) (first, last rune) {
|
||||
b := p.getField(i)
|
||||
if k := strings.Index(b, ".."); k != -1 {
|
||||
return p.parseRune(b[:k]), p.parseRune(b[k+2:])
|
||||
}
|
||||
// The first field may not be a rune, in which case we may ignore any error
|
||||
// and set the range as 0..0.
|
||||
x, err := parseRune(b)
|
||||
if err != nil {
|
||||
// Disable range parsing henceforth. This ensures that an error will be
|
||||
// returned if the user subsequently will try to parse this field as
|
||||
// a Rune.
|
||||
p.keepRanges = true
|
||||
}
|
||||
// Special case for UnicodeData that was retained for backwards compatibility.
|
||||
if i == 0 && len(p.field) > 1 && strings.HasSuffix(p.field[1], "First>") {
|
||||
if p.parsedRange {
|
||||
return p.rangeStart, p.rangeEnd
|
||||
}
|
||||
mf := reRange.FindStringSubmatch(p.scanner.Text())
|
||||
p.line++
|
||||
if mf == nil || !p.scanner.Scan() {
|
||||
p.setError(errIncorrectLegacyRange, "")
|
||||
return x, x
|
||||
}
|
||||
// Using Bytes would be more efficient here, but Text is a lot easier
|
||||
// and this is not a frequent case.
|
||||
ml := reRange.FindStringSubmatch(p.scanner.Text())
|
||||
if ml == nil || mf[2] != ml[2] || ml[3] != "Last" || mf[4] != ml[4] {
|
||||
p.setError(errIncorrectLegacyRange, "")
|
||||
return x, x
|
||||
}
|
||||
p.rangeStart, p.rangeEnd = x, p.parseRune(p.scanner.Text()[:len(ml[1])])
|
||||
p.parsedRange = true
|
||||
return p.rangeStart, p.rangeEnd
|
||||
}
|
||||
return x, x
|
||||
}
|
||||
|
||||
// bools recognizes all valid UCD boolean values.
|
||||
var bools = map[string]bool{
|
||||
"": false,
|
||||
"N": false,
|
||||
"No": false,
|
||||
"F": false,
|
||||
"False": false,
|
||||
"Y": true,
|
||||
"Yes": true,
|
||||
"T": true,
|
||||
"True": true,
|
||||
}
|
||||
|
||||
// Bool parses and returns field i as a boolean value.
|
||||
func (p *Parser) Bool(i int) bool {
|
||||
f := p.getField(i)
|
||||
for s, v := range bools {
|
||||
if f == s {
|
||||
return v
|
||||
}
|
||||
}
|
||||
p.setError(strconv.ErrSyntax, "error parsing bool")
|
||||
return false
|
||||
}
|
||||
|
||||
// Int parses and returns field i as an integer value.
|
||||
func (p *Parser) Int(i int) int {
|
||||
x, err := strconv.ParseInt(string(p.getField(i)), 10, 64)
|
||||
p.setError(err, "error parsing int")
|
||||
return int(x)
|
||||
}
|
||||
|
||||
// Uint parses and returns field i as an unsigned integer value.
|
||||
func (p *Parser) Uint(i int) uint {
|
||||
x, err := strconv.ParseUint(string(p.getField(i)), 10, 64)
|
||||
p.setError(err, "error parsing uint")
|
||||
return uint(x)
|
||||
}
|
||||
|
||||
// Float parses and returns field i as a decimal value.
|
||||
func (p *Parser) Float(i int) float64 {
|
||||
x, err := strconv.ParseFloat(string(p.getField(i)), 64)
|
||||
p.setError(err, "error parsing float")
|
||||
return x
|
||||
}
|
||||
|
||||
// String parses and returns field i as a string value.
|
||||
func (p *Parser) String(i int) string {
|
||||
return string(p.getField(i))
|
||||
}
|
||||
|
||||
// Strings parses and returns field i as a space-separated list of strings.
|
||||
func (p *Parser) Strings(i int) []string {
|
||||
ss := strings.Split(string(p.getField(i)), " ")
|
||||
for i, s := range ss {
|
||||
ss[i] = strings.TrimSpace(s)
|
||||
}
|
||||
return ss
|
||||
}
|
||||
|
||||
// Comment returns the comments for the current line.
|
||||
func (p *Parser) Comment() string {
|
||||
return string(p.comment)
|
||||
}
|
||||
|
||||
var errUndefinedEnum = errors.New("ucd: undefined enum value")
|
||||
|
||||
// Enum interprets and returns field i as a value that must be one of the values
|
||||
// in enum.
|
||||
func (p *Parser) Enum(i int, enum ...string) string {
|
||||
f := p.getField(i)
|
||||
for _, s := range enum {
|
||||
if f == s {
|
||||
return s
|
||||
}
|
||||
}
|
||||
p.setError(errUndefinedEnum, "error parsing enum")
|
||||
return ""
|
||||
}
|
27
vendor/golang.org/x/text/unicode/cldr/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/text/unicode/cldr/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
105
vendor/golang.org/x/text/unicode/cldr/base.go
generated
vendored
Normal file
105
vendor/golang.org/x/text/unicode/cldr/base.go
generated
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cldr
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"regexp"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Elem is implemented by every XML element.
|
||||
type Elem interface {
|
||||
setEnclosing(Elem)
|
||||
setName(string)
|
||||
enclosing() Elem
|
||||
|
||||
GetCommon() *Common
|
||||
}
|
||||
|
||||
type hidden struct {
|
||||
CharData string `xml:",chardata"`
|
||||
Alias *struct {
|
||||
Common
|
||||
Source string `xml:"source,attr"`
|
||||
Path string `xml:"path,attr"`
|
||||
} `xml:"alias"`
|
||||
Def *struct {
|
||||
Common
|
||||
Choice string `xml:"choice,attr,omitempty"`
|
||||
Type string `xml:"type,attr,omitempty"`
|
||||
} `xml:"default"`
|
||||
}
|
||||
|
||||
// Common holds several of the most common attributes and sub elements
|
||||
// of an XML element.
|
||||
type Common struct {
|
||||
XMLName xml.Name
|
||||
name string
|
||||
enclElem Elem
|
||||
Type string `xml:"type,attr,omitempty"`
|
||||
Reference string `xml:"reference,attr,omitempty"`
|
||||
Alt string `xml:"alt,attr,omitempty"`
|
||||
ValidSubLocales string `xml:"validSubLocales,attr,omitempty"`
|
||||
Draft string `xml:"draft,attr,omitempty"`
|
||||
hidden
|
||||
}
|
||||
|
||||
// Default returns the default type to select from the enclosed list
|
||||
// or "" if no default value is specified.
|
||||
func (e *Common) Default() string {
|
||||
if e.Def == nil {
|
||||
return ""
|
||||
}
|
||||
if e.Def.Choice != "" {
|
||||
return e.Def.Choice
|
||||
} else if e.Def.Type != "" {
|
||||
// Type is still used by the default element in collation.
|
||||
return e.Def.Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Element returns the XML element name.
|
||||
func (e *Common) Element() string {
|
||||
return e.name
|
||||
}
|
||||
|
||||
// GetCommon returns e. It is provided such that Common implements Elem.
|
||||
func (e *Common) GetCommon() *Common {
|
||||
return e
|
||||
}
|
||||
|
||||
// Data returns the character data accumulated for this element.
|
||||
func (e *Common) Data() string {
|
||||
e.CharData = charRe.ReplaceAllStringFunc(e.CharData, replaceUnicode)
|
||||
return e.CharData
|
||||
}
|
||||
|
||||
func (e *Common) setName(s string) {
|
||||
e.name = s
|
||||
}
|
||||
|
||||
func (e *Common) enclosing() Elem {
|
||||
return e.enclElem
|
||||
}
|
||||
|
||||
func (e *Common) setEnclosing(en Elem) {
|
||||
e.enclElem = en
|
||||
}
|
||||
|
||||
// Escape characters that can be escaped without further escaping the string.
|
||||
var charRe = regexp.MustCompile(`&#x[0-9a-fA-F]*;|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\x[0-9a-fA-F]{2}|\\[0-7]{3}|\\[abtnvfr]`)
|
||||
|
||||
// replaceUnicode converts hexadecimal Unicode codepoint notations to a one-rune string.
|
||||
// It assumes the input string is correctly formatted.
|
||||
func replaceUnicode(s string) string {
|
||||
if s[1] == '#' {
|
||||
r, _ := strconv.ParseInt(s[3:len(s)-1], 16, 32)
|
||||
return string(r)
|
||||
}
|
||||
r, _, _, _ := strconv.UnquoteChar(s, 0)
|
||||
return string(r)
|
||||
}
|
130
vendor/golang.org/x/text/unicode/cldr/cldr.go
generated
vendored
Normal file
130
vendor/golang.org/x/text/unicode/cldr/cldr.go
generated
vendored
Normal file
@ -0,0 +1,130 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:generate go run makexml.go -output xml.go
|
||||
|
||||
// Package cldr provides a parser for LDML and related XML formats.
|
||||
// This package is intended to be used by the table generation tools
|
||||
// for the various internationalization-related packages.
|
||||
// As the XML types are generated from the CLDR DTD, and as the CLDR standard
|
||||
// is periodically amended, this package may change considerably over time.
|
||||
// This mostly means that data may appear and disappear between versions.
|
||||
// That is, old code should keep compiling for newer versions, but data
|
||||
// may have moved or changed.
|
||||
// CLDR version 22 is the first version supported by this package.
|
||||
// Older versions may not work.
|
||||
package cldr // import "golang.org/x/text/unicode/cldr"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// CLDR provides access to parsed data of the Unicode Common Locale Data Repository.
|
||||
type CLDR struct {
|
||||
parent map[string][]string
|
||||
locale map[string]*LDML
|
||||
resolved map[string]*LDML
|
||||
bcp47 *LDMLBCP47
|
||||
supp *SupplementalData
|
||||
}
|
||||
|
||||
func makeCLDR() *CLDR {
|
||||
return &CLDR{
|
||||
parent: make(map[string][]string),
|
||||
locale: make(map[string]*LDML),
|
||||
resolved: make(map[string]*LDML),
|
||||
bcp47: &LDMLBCP47{},
|
||||
supp: &SupplementalData{},
|
||||
}
|
||||
}
|
||||
|
||||
// BCP47 returns the parsed BCP47 LDML data. If no such data was parsed, nil is returned.
|
||||
func (cldr *CLDR) BCP47() *LDMLBCP47 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Draft indicates the draft level of an element.
|
||||
type Draft int
|
||||
|
||||
const (
|
||||
Approved Draft = iota
|
||||
Contributed
|
||||
Provisional
|
||||
Unconfirmed
|
||||
)
|
||||
|
||||
var drafts = []string{"unconfirmed", "provisional", "contributed", "approved", ""}
|
||||
|
||||
// ParseDraft returns the Draft value corresponding to the given string. The
|
||||
// empty string corresponds to Approved.
|
||||
func ParseDraft(level string) (Draft, error) {
|
||||
if level == "" {
|
||||
return Approved, nil
|
||||
}
|
||||
for i, s := range drafts {
|
||||
if level == s {
|
||||
return Unconfirmed - Draft(i), nil
|
||||
}
|
||||
}
|
||||
return Approved, fmt.Errorf("cldr: unknown draft level %q", level)
|
||||
}
|
||||
|
||||
func (d Draft) String() string {
|
||||
return drafts[len(drafts)-1-int(d)]
|
||||
}
|
||||
|
||||
// SetDraftLevel sets which draft levels to include in the evaluated LDML.
|
||||
// Any draft element for which the draft level is higher than lev will be excluded.
|
||||
// If multiple draft levels are available for a single element, the one with the
|
||||
// lowest draft level will be selected, unless preferDraft is true, in which case
|
||||
// the highest draft will be chosen.
|
||||
// It is assumed that the underlying LDML is canonicalized.
|
||||
func (cldr *CLDR) SetDraftLevel(lev Draft, preferDraft bool) {
|
||||
// TODO: implement
|
||||
cldr.resolved = make(map[string]*LDML)
|
||||
}
|
||||
|
||||
// RawLDML returns the LDML XML for id in unresolved form.
|
||||
// id must be one of the strings returned by Locales.
|
||||
func (cldr *CLDR) RawLDML(loc string) *LDML {
|
||||
return cldr.locale[loc]
|
||||
}
|
||||
|
||||
// LDML returns the fully resolved LDML XML for loc, which must be one of
|
||||
// the strings returned by Locales.
|
||||
func (cldr *CLDR) LDML(loc string) (*LDML, error) {
|
||||
return cldr.resolve(loc)
|
||||
}
|
||||
|
||||
// Supplemental returns the parsed supplemental data. If no such data was parsed,
|
||||
// nil is returned.
|
||||
func (cldr *CLDR) Supplemental() *SupplementalData {
|
||||
return cldr.supp
|
||||
}
|
||||
|
||||
// Locales returns the locales for which there exist files.
|
||||
// Valid sublocales for which there is no file are not included.
|
||||
// The root locale is always sorted first.
|
||||
func (cldr *CLDR) Locales() []string {
|
||||
loc := []string{"root"}
|
||||
hasRoot := false
|
||||
for l, _ := range cldr.locale {
|
||||
if l == "root" {
|
||||
hasRoot = true
|
||||
continue
|
||||
}
|
||||
loc = append(loc, l)
|
||||
}
|
||||
sort.Strings(loc[1:])
|
||||
if !hasRoot {
|
||||
return loc[1:]
|
||||
}
|
||||
return loc
|
||||
}
|
||||
|
||||
// Get fills in the fields of x based on the XPath path.
|
||||
func Get(e Elem, path string) (res Elem, err error) {
|
||||
return walkXPath(e, path)
|
||||
}
|
359
vendor/golang.org/x/text/unicode/cldr/collate.go
generated
vendored
Normal file
359
vendor/golang.org/x/text/unicode/cldr/collate.go
generated
vendored
Normal file
@ -0,0 +1,359 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cldr
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/xml"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// RuleProcessor can be passed to Collator's Process method, which
|
||||
// parses the rules and calls the respective method for each rule found.
|
||||
type RuleProcessor interface {
|
||||
Reset(anchor string, before int) error
|
||||
Insert(level int, str, context, extend string) error
|
||||
Index(id string)
|
||||
}
|
||||
|
||||
const (
|
||||
// cldrIndex is a Unicode-reserved sentinel value used to mark the start
|
||||
// of a grouping within an index.
|
||||
// We ignore any rule that starts with this rune.
|
||||
// See http://unicode.org/reports/tr35/#Collation_Elements for details.
|
||||
cldrIndex = "\uFDD0"
|
||||
|
||||
// specialAnchor is the format in which to represent logical reset positions,
|
||||
// such as "first tertiary ignorable".
|
||||
specialAnchor = "<%s/>"
|
||||
)
|
||||
|
||||
// Process parses the rules for the tailorings of this collation
|
||||
// and calls the respective methods of p for each rule found.
|
||||
func (c Collation) Process(p RuleProcessor) (err error) {
|
||||
if len(c.Cr) > 0 {
|
||||
if len(c.Cr) > 1 {
|
||||
return fmt.Errorf("multiple cr elements, want 0 or 1")
|
||||
}
|
||||
return processRules(p, c.Cr[0].Data())
|
||||
}
|
||||
if c.Rules.Any != nil {
|
||||
return c.processXML(p)
|
||||
}
|
||||
return errors.New("no tailoring data")
|
||||
}
|
||||
|
||||
// processRules parses rules in the Collation Rule Syntax defined in
|
||||
// http://www.unicode.org/reports/tr35/tr35-collation.html#Collation_Tailorings.
|
||||
func processRules(p RuleProcessor, s string) (err error) {
|
||||
chk := func(s string, e error) string {
|
||||
if err == nil {
|
||||
err = e
|
||||
}
|
||||
return s
|
||||
}
|
||||
i := 0 // Save the line number for use after the loop.
|
||||
scanner := bufio.NewScanner(strings.NewReader(s))
|
||||
for ; scanner.Scan() && err == nil; i++ {
|
||||
for s := skipSpace(scanner.Text()); s != "" && s[0] != '#'; s = skipSpace(s) {
|
||||
level := 5
|
||||
var ch byte
|
||||
switch ch, s = s[0], s[1:]; ch {
|
||||
case '&': // followed by <anchor> or '[' <key> ']'
|
||||
if s = skipSpace(s); consume(&s, '[') {
|
||||
s = chk(parseSpecialAnchor(p, s))
|
||||
} else {
|
||||
s = chk(parseAnchor(p, 0, s))
|
||||
}
|
||||
case '<': // sort relation '<'{1,4}, optionally followed by '*'.
|
||||
for level = 1; consume(&s, '<'); level++ {
|
||||
}
|
||||
if level > 4 {
|
||||
err = fmt.Errorf("level %d > 4", level)
|
||||
}
|
||||
fallthrough
|
||||
case '=': // identity relation, optionally followed by *.
|
||||
if consume(&s, '*') {
|
||||
s = chk(parseSequence(p, level, s))
|
||||
} else {
|
||||
s = chk(parseOrder(p, level, s))
|
||||
}
|
||||
default:
|
||||
chk("", fmt.Errorf("illegal operator %q", ch))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if chk("", scanner.Err()); err != nil {
|
||||
return fmt.Errorf("%d: %v", i, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// parseSpecialAnchor parses the anchor syntax which is either of the form
|
||||
// ['before' <level>] <anchor>
|
||||
// or
|
||||
// [<label>]
|
||||
// The starting should already be consumed.
|
||||
func parseSpecialAnchor(p RuleProcessor, s string) (tail string, err error) {
|
||||
i := strings.IndexByte(s, ']')
|
||||
if i == -1 {
|
||||
return "", errors.New("unmatched bracket")
|
||||
}
|
||||
a := strings.TrimSpace(s[:i])
|
||||
s = s[i+1:]
|
||||
if strings.HasPrefix(a, "before ") {
|
||||
l, err := strconv.ParseUint(skipSpace(a[len("before "):]), 10, 3)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
return parseAnchor(p, int(l), s)
|
||||
}
|
||||
return s, p.Reset(fmt.Sprintf(specialAnchor, a), 0)
|
||||
}
|
||||
|
||||
func parseAnchor(p RuleProcessor, level int, s string) (tail string, err error) {
|
||||
anchor, s, err := scanString(s)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
return s, p.Reset(anchor, level)
|
||||
}
|
||||
|
||||
func parseOrder(p RuleProcessor, level int, s string) (tail string, err error) {
|
||||
var value, context, extend string
|
||||
if value, s, err = scanString(s); err != nil {
|
||||
return s, err
|
||||
}
|
||||
if strings.HasPrefix(value, cldrIndex) {
|
||||
p.Index(value[len(cldrIndex):])
|
||||
return
|
||||
}
|
||||
if consume(&s, '|') {
|
||||
if context, s, err = scanString(s); err != nil {
|
||||
return s, errors.New("missing string after context")
|
||||
}
|
||||
}
|
||||
if consume(&s, '/') {
|
||||
if extend, s, err = scanString(s); err != nil {
|
||||
return s, errors.New("missing string after extension")
|
||||
}
|
||||
}
|
||||
return s, p.Insert(level, value, context, extend)
|
||||
}
|
||||
|
||||
// scanString scans a single input string.
|
||||
func scanString(s string) (str, tail string, err error) {
|
||||
if s = skipSpace(s); s == "" {
|
||||
return s, s, errors.New("missing string")
|
||||
}
|
||||
buf := [16]byte{} // small but enough to hold most cases.
|
||||
value := buf[:0]
|
||||
for s != "" {
|
||||
if consume(&s, '\'') {
|
||||
i := strings.IndexByte(s, '\'')
|
||||
if i == -1 {
|
||||
return "", "", errors.New(`unmatched single quote`)
|
||||
}
|
||||
if i == 0 {
|
||||
value = append(value, '\'')
|
||||
} else {
|
||||
value = append(value, s[:i]...)
|
||||
}
|
||||
s = s[i+1:]
|
||||
continue
|
||||
}
|
||||
r, sz := utf8.DecodeRuneInString(s)
|
||||
if unicode.IsSpace(r) || strings.ContainsRune("&<=#", r) {
|
||||
break
|
||||
}
|
||||
value = append(value, s[:sz]...)
|
||||
s = s[sz:]
|
||||
}
|
||||
return string(value), skipSpace(s), nil
|
||||
}
|
||||
|
||||
func parseSequence(p RuleProcessor, level int, s string) (tail string, err error) {
|
||||
if s = skipSpace(s); s == "" {
|
||||
return s, errors.New("empty sequence")
|
||||
}
|
||||
last := rune(0)
|
||||
for s != "" {
|
||||
r, sz := utf8.DecodeRuneInString(s)
|
||||
s = s[sz:]
|
||||
|
||||
if r == '-' {
|
||||
// We have a range. The first element was already written.
|
||||
if last == 0 {
|
||||
return s, errors.New("range without starter value")
|
||||
}
|
||||
r, sz = utf8.DecodeRuneInString(s)
|
||||
s = s[sz:]
|
||||
if r == utf8.RuneError || r < last {
|
||||
return s, fmt.Errorf("invalid range %q-%q", last, r)
|
||||
}
|
||||
for i := last + 1; i <= r; i++ {
|
||||
if err := p.Insert(level, string(i), "", ""); err != nil {
|
||||
return s, err
|
||||
}
|
||||
}
|
||||
last = 0
|
||||
continue
|
||||
}
|
||||
|
||||
if unicode.IsSpace(r) || unicode.IsPunct(r) {
|
||||
break
|
||||
}
|
||||
|
||||
// normal case
|
||||
if err := p.Insert(level, string(r), "", ""); err != nil {
|
||||
return s, err
|
||||
}
|
||||
last = r
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func skipSpace(s string) string {
|
||||
return strings.TrimLeftFunc(s, unicode.IsSpace)
|
||||
}
|
||||
|
||||
// consumes returns whether the next byte is ch. If so, it gobbles it by
|
||||
// updating s.
|
||||
func consume(s *string, ch byte) (ok bool) {
|
||||
if *s == "" || (*s)[0] != ch {
|
||||
return false
|
||||
}
|
||||
*s = (*s)[1:]
|
||||
return true
|
||||
}
|
||||
|
||||
// The following code parses Collation rules of CLDR version 24 and before.
|
||||
|
||||
var lmap = map[byte]int{
|
||||
'p': 1,
|
||||
's': 2,
|
||||
't': 3,
|
||||
'i': 5,
|
||||
}
|
||||
|
||||
type rulesElem struct {
|
||||
Rules struct {
|
||||
Common
|
||||
Any []*struct {
|
||||
XMLName xml.Name
|
||||
rule
|
||||
} `xml:",any"`
|
||||
} `xml:"rules"`
|
||||
}
|
||||
|
||||
type rule struct {
|
||||
Value string `xml:",chardata"`
|
||||
Before string `xml:"before,attr"`
|
||||
Any []*struct {
|
||||
XMLName xml.Name
|
||||
rule
|
||||
} `xml:",any"`
|
||||
}
|
||||
|
||||
var emptyValueError = errors.New("cldr: empty rule value")
|
||||
|
||||
func (r *rule) value() (string, error) {
|
||||
// Convert hexadecimal Unicode codepoint notation to a string.
|
||||
s := charRe.ReplaceAllStringFunc(r.Value, replaceUnicode)
|
||||
r.Value = s
|
||||
if s == "" {
|
||||
if len(r.Any) != 1 {
|
||||
return "", emptyValueError
|
||||
}
|
||||
r.Value = fmt.Sprintf(specialAnchor, r.Any[0].XMLName.Local)
|
||||
r.Any = nil
|
||||
} else if len(r.Any) != 0 {
|
||||
return "", fmt.Errorf("cldr: XML elements found in collation rule: %v", r.Any)
|
||||
}
|
||||
return r.Value, nil
|
||||
}
|
||||
|
||||
func (r rule) process(p RuleProcessor, name, context, extend string) error {
|
||||
v, err := r.value()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
switch name {
|
||||
case "p", "s", "t", "i":
|
||||
if strings.HasPrefix(v, cldrIndex) {
|
||||
p.Index(v[len(cldrIndex):])
|
||||
return nil
|
||||
}
|
||||
if err := p.Insert(lmap[name[0]], v, context, extend); err != nil {
|
||||
return err
|
||||
}
|
||||
case "pc", "sc", "tc", "ic":
|
||||
level := lmap[name[0]]
|
||||
for _, s := range v {
|
||||
if err := p.Insert(level, string(s), context, extend); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("cldr: unsupported tag: %q", name)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// processXML parses the format of CLDR versions 24 and older.
|
||||
func (c Collation) processXML(p RuleProcessor) (err error) {
|
||||
// Collation is generated and defined in xml.go.
|
||||
var v string
|
||||
for _, r := range c.Rules.Any {
|
||||
switch r.XMLName.Local {
|
||||
case "reset":
|
||||
level := 0
|
||||
switch r.Before {
|
||||
case "primary", "1":
|
||||
level = 1
|
||||
case "secondary", "2":
|
||||
level = 2
|
||||
case "tertiary", "3":
|
||||
level = 3
|
||||
case "":
|
||||
default:
|
||||
return fmt.Errorf("cldr: unknown level %q", r.Before)
|
||||
}
|
||||
v, err = r.value()
|
||||
if err == nil {
|
||||
err = p.Reset(v, level)
|
||||
}
|
||||
case "x":
|
||||
var context, extend string
|
||||
for _, r1 := range r.Any {
|
||||
v, err = r1.value()
|
||||
switch r1.XMLName.Local {
|
||||
case "context":
|
||||
context = v
|
||||
case "extend":
|
||||
extend = v
|
||||
}
|
||||
}
|
||||
for _, r1 := range r.Any {
|
||||
if t := r1.XMLName.Local; t == "context" || t == "extend" {
|
||||
continue
|
||||
}
|
||||
r1.rule.process(p, r1.XMLName.Local, context, extend)
|
||||
}
|
||||
default:
|
||||
err = r.rule.process(p, r.XMLName.Local, "", "")
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
171
vendor/golang.org/x/text/unicode/cldr/decode.go
generated
vendored
Normal file
171
vendor/golang.org/x/text/unicode/cldr/decode.go
generated
vendored
Normal file
@ -0,0 +1,171 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cldr
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
// A Decoder loads an archive of CLDR data.
|
||||
type Decoder struct {
|
||||
dirFilter []string
|
||||
sectionFilter []string
|
||||
loader Loader
|
||||
cldr *CLDR
|
||||
curLocale string
|
||||
}
|
||||
|
||||
// SetSectionFilter takes a list top-level LDML element names to which
|
||||
// evaluation of LDML should be limited. It automatically calls SetDirFilter.
|
||||
func (d *Decoder) SetSectionFilter(filter ...string) {
|
||||
d.sectionFilter = filter
|
||||
// TODO: automatically set dir filter
|
||||
}
|
||||
|
||||
// SetDirFilter limits the loading of LDML XML files of the specied directories.
|
||||
// Note that sections may be split across directories differently for different CLDR versions.
|
||||
// For more robust code, use SetSectionFilter.
|
||||
func (d *Decoder) SetDirFilter(dir ...string) {
|
||||
d.dirFilter = dir
|
||||
}
|
||||
|
||||
// A Loader provides access to the files of a CLDR archive.
|
||||
type Loader interface {
|
||||
Len() int
|
||||
Path(i int) string
|
||||
Reader(i int) (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
var fileRe = regexp.MustCompile(`.*[/\\](.*)[/\\](.*)\.xml`)
|
||||
|
||||
// Decode loads and decodes the files represented by l.
|
||||
func (d *Decoder) Decode(l Loader) (cldr *CLDR, err error) {
|
||||
d.cldr = makeCLDR()
|
||||
for i := 0; i < l.Len(); i++ {
|
||||
fname := l.Path(i)
|
||||
if m := fileRe.FindStringSubmatch(fname); m != nil {
|
||||
if len(d.dirFilter) > 0 && !in(d.dirFilter, m[1]) {
|
||||
continue
|
||||
}
|
||||
var r io.Reader
|
||||
if r, err = l.Reader(i); err == nil {
|
||||
err = d.decode(m[1], m[2], r)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
d.cldr.finalize(d.sectionFilter)
|
||||
return d.cldr, nil
|
||||
}
|
||||
|
||||
func (d *Decoder) decode(dir, id string, r io.Reader) error {
|
||||
var v interface{}
|
||||
var l *LDML
|
||||
cldr := d.cldr
|
||||
switch {
|
||||
case dir == "supplemental":
|
||||
v = cldr.supp
|
||||
case dir == "transforms":
|
||||
return nil
|
||||
case dir == "bcp47":
|
||||
v = cldr.bcp47
|
||||
case dir == "validity":
|
||||
return nil
|
||||
default:
|
||||
ok := false
|
||||
if v, ok = cldr.locale[id]; !ok {
|
||||
l = &LDML{}
|
||||
v, cldr.locale[id] = l, l
|
||||
}
|
||||
}
|
||||
x := xml.NewDecoder(r)
|
||||
if err := x.Decode(v); err != nil {
|
||||
log.Printf("%s/%s: %v", dir, id, err)
|
||||
return err
|
||||
}
|
||||
if l != nil {
|
||||
if l.Identity == nil {
|
||||
return fmt.Errorf("%s/%s: missing identity element", dir, id)
|
||||
}
|
||||
// TODO: verify when CLDR bug http://unicode.org/cldr/trac/ticket/8970
|
||||
// is resolved.
|
||||
// path := strings.Split(id, "_")
|
||||
// if lang := l.Identity.Language.Type; lang != path[0] {
|
||||
// return fmt.Errorf("%s/%s: language was %s; want %s", dir, id, lang, path[0])
|
||||
// }
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type pathLoader []string
|
||||
|
||||
func makePathLoader(path string) (pl pathLoader, err error) {
|
||||
err = filepath.Walk(path, func(path string, _ os.FileInfo, err error) error {
|
||||
pl = append(pl, path)
|
||||
return err
|
||||
})
|
||||
return pl, err
|
||||
}
|
||||
|
||||
func (pl pathLoader) Len() int {
|
||||
return len(pl)
|
||||
}
|
||||
|
||||
func (pl pathLoader) Path(i int) string {
|
||||
return pl[i]
|
||||
}
|
||||
|
||||
func (pl pathLoader) Reader(i int) (io.ReadCloser, error) {
|
||||
return os.Open(pl[i])
|
||||
}
|
||||
|
||||
// DecodePath loads CLDR data from the given path.
|
||||
func (d *Decoder) DecodePath(path string) (cldr *CLDR, err error) {
|
||||
loader, err := makePathLoader(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.Decode(loader)
|
||||
}
|
||||
|
||||
type zipLoader struct {
|
||||
r *zip.Reader
|
||||
}
|
||||
|
||||
func (zl zipLoader) Len() int {
|
||||
return len(zl.r.File)
|
||||
}
|
||||
|
||||
func (zl zipLoader) Path(i int) string {
|
||||
return zl.r.File[i].Name
|
||||
}
|
||||
|
||||
func (zl zipLoader) Reader(i int) (io.ReadCloser, error) {
|
||||
return zl.r.File[i].Open()
|
||||
}
|
||||
|
||||
// DecodeZip loads CLDR data from the zip archive for which r is the source.
|
||||
func (d *Decoder) DecodeZip(r io.Reader) (cldr *CLDR, err error) {
|
||||
buffer, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
archive, err := zip.NewReader(bytes.NewReader(buffer), int64(len(buffer)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.Decode(zipLoader{archive})
|
||||
}
|
400
vendor/golang.org/x/text/unicode/cldr/makexml.go
generated
vendored
Normal file
400
vendor/golang.org/x/text/unicode/cldr/makexml.go
generated
vendored
Normal file
@ -0,0 +1,400 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// This tool generates types for the various XML formats of CLDR.
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/text/internal/gen"
|
||||
)
|
||||
|
||||
var outputFile = flag.String("output", "xml.go", "output file name")
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
r := gen.OpenCLDRCoreZip()
|
||||
buffer, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
log.Fatal("Could not read zip file")
|
||||
}
|
||||
r.Close()
|
||||
z, err := zip.NewReader(bytes.NewReader(buffer), int64(len(buffer)))
|
||||
if err != nil {
|
||||
log.Fatalf("Could not read zip archive: %v", err)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
version := gen.CLDRVersion()
|
||||
|
||||
for _, dtd := range files {
|
||||
for _, f := range z.File {
|
||||
if strings.HasSuffix(f.Name, dtd.file+".dtd") {
|
||||
r, err := f.Open()
|
||||
failOnError(err)
|
||||
|
||||
b := makeBuilder(&buf, dtd)
|
||||
b.parseDTD(r)
|
||||
b.resolve(b.index[dtd.top[0]])
|
||||
b.write()
|
||||
if b.version != "" && version != b.version {
|
||||
println(f.Name)
|
||||
log.Fatalf("main: inconsistent versions: found %s; want %s", b.version, version)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
fmt.Fprintln(&buf, "// Version is the version of CLDR from which the XML definitions are generated.")
|
||||
fmt.Fprintf(&buf, "const Version = %q\n", version)
|
||||
|
||||
gen.WriteGoFile(*outputFile, "cldr", buf.Bytes())
|
||||
}
|
||||
|
||||
func failOnError(err error) {
|
||||
if err != nil {
|
||||
log.New(os.Stderr, "", log.Lshortfile).Output(2, err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// configuration data per DTD type
|
||||
type dtd struct {
|
||||
file string // base file name
|
||||
root string // Go name of the root XML element
|
||||
top []string // create a different type for this section
|
||||
|
||||
skipElem []string // hard-coded or deprecated elements
|
||||
skipAttr []string // attributes to exclude
|
||||
predefined []string // hard-coded elements exist of the form <name>Elem
|
||||
forceRepeat []string // elements to make slices despite DTD
|
||||
}
|
||||
|
||||
var files = []dtd{
|
||||
{
|
||||
file: "ldmlBCP47",
|
||||
root: "LDMLBCP47",
|
||||
top: []string{"ldmlBCP47"},
|
||||
skipElem: []string{
|
||||
"cldrVersion", // deprecated, not used
|
||||
},
|
||||
},
|
||||
{
|
||||
file: "ldmlSupplemental",
|
||||
root: "SupplementalData",
|
||||
top: []string{"supplementalData"},
|
||||
skipElem: []string{
|
||||
"cldrVersion", // deprecated, not used
|
||||
},
|
||||
forceRepeat: []string{
|
||||
"plurals", // data defined in plurals.xml and ordinals.xml
|
||||
},
|
||||
},
|
||||
{
|
||||
file: "ldml",
|
||||
root: "LDML",
|
||||
top: []string{
|
||||
"ldml", "collation", "calendar", "timeZoneNames", "localeDisplayNames", "numbers",
|
||||
},
|
||||
skipElem: []string{
|
||||
"cp", // not used anywhere
|
||||
"special", // not used anywhere
|
||||
"fallback", // deprecated, not used
|
||||
"alias", // in Common
|
||||
"default", // in Common
|
||||
},
|
||||
skipAttr: []string{
|
||||
"hiraganaQuarternary", // typo in DTD, correct version included as well
|
||||
},
|
||||
predefined: []string{"rules"},
|
||||
},
|
||||
}
|
||||
|
||||
var comments = map[string]string{
|
||||
"ldmlBCP47": `
|
||||
// LDMLBCP47 holds information on allowable values for various variables in LDML.
|
||||
`,
|
||||
"supplementalData": `
|
||||
// SupplementalData holds information relevant for internationalization
|
||||
// and proper use of CLDR, but that is not contained in the locale hierarchy.
|
||||
`,
|
||||
"ldml": `
|
||||
// LDML is the top-level type for locale-specific data.
|
||||
`,
|
||||
"collation": `
|
||||
// Collation contains rules that specify a certain sort-order,
|
||||
// as a tailoring of the root order.
|
||||
// The parsed rules are obtained by passing a RuleProcessor to Collation's
|
||||
// Process method.
|
||||
`,
|
||||
"calendar": `
|
||||
// Calendar specifies the fields used for formatting and parsing dates and times.
|
||||
// The month and quarter names are identified numerically, starting at 1.
|
||||
// The day (of the week) names are identified with short strings, since there is
|
||||
// no universally-accepted numeric designation.
|
||||
`,
|
||||
"dates": `
|
||||
// Dates contains information regarding the format and parsing of dates and times.
|
||||
`,
|
||||
"localeDisplayNames": `
|
||||
// LocaleDisplayNames specifies localized display names for for scripts, languages,
|
||||
// countries, currencies, and variants.
|
||||
`,
|
||||
"numbers": `
|
||||
// Numbers supplies information for formatting and parsing numbers and currencies.
|
||||
`,
|
||||
}
|
||||
|
||||
type element struct {
|
||||
name string // XML element name
|
||||
category string // elements contained by this element
|
||||
signature string // category + attrKey*
|
||||
|
||||
attr []*attribute // attributes supported by this element.
|
||||
sub []struct { // parsed and evaluated sub elements of this element.
|
||||
e *element
|
||||
repeat bool // true if the element needs to be a slice
|
||||
}
|
||||
|
||||
resolved bool // prevent multiple resolutions of this element.
|
||||
}
|
||||
|
||||
type attribute struct {
|
||||
name string
|
||||
key string
|
||||
list []string
|
||||
|
||||
tag string // Go tag
|
||||
}
|
||||
|
||||
var (
|
||||
reHead = regexp.MustCompile(` *(\w+) +([\w\-]+)`)
|
||||
reAttr = regexp.MustCompile(` *(\w+) *(?:(\w+)|\(([\w\- \|]+)\)) *(?:#([A-Z]*) *(?:\"([\.\d+])\")?)? *("[\w\-:]*")?`)
|
||||
reElem = regexp.MustCompile(`^ *(EMPTY|ANY|\(.*\)[\*\+\?]?) *$`)
|
||||
reToken = regexp.MustCompile(`\w\-`)
|
||||
)
|
||||
|
||||
// builder is used to read in the DTD files from CLDR and generate Go code
|
||||
// to be used with the encoding/xml package.
|
||||
type builder struct {
|
||||
w io.Writer
|
||||
index map[string]*element
|
||||
elem []*element
|
||||
info dtd
|
||||
version string
|
||||
}
|
||||
|
||||
func makeBuilder(w io.Writer, d dtd) builder {
|
||||
return builder{
|
||||
w: w,
|
||||
index: make(map[string]*element),
|
||||
elem: []*element{},
|
||||
info: d,
|
||||
}
|
||||
}
|
||||
|
||||
// parseDTD parses a DTD file.
|
||||
func (b *builder) parseDTD(r io.Reader) {
|
||||
for d := xml.NewDecoder(r); ; {
|
||||
t, err := d.Token()
|
||||
if t == nil {
|
||||
break
|
||||
}
|
||||
failOnError(err)
|
||||
dir, ok := t.(xml.Directive)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
m := reHead.FindSubmatch(dir)
|
||||
dir = dir[len(m[0]):]
|
||||
ename := string(m[2])
|
||||
el, elementFound := b.index[ename]
|
||||
switch string(m[1]) {
|
||||
case "ELEMENT":
|
||||
if elementFound {
|
||||
log.Fatal("parseDTD: duplicate entry for element %q", ename)
|
||||
}
|
||||
m := reElem.FindSubmatch(dir)
|
||||
if m == nil {
|
||||
log.Fatalf("parseDTD: invalid element %q", string(dir))
|
||||
}
|
||||
if len(m[0]) != len(dir) {
|
||||
log.Fatal("parseDTD: invalid element %q", string(dir), len(dir), len(m[0]), string(m[0]))
|
||||
}
|
||||
s := string(m[1])
|
||||
el = &element{
|
||||
name: ename,
|
||||
category: s,
|
||||
}
|
||||
b.index[ename] = el
|
||||
case "ATTLIST":
|
||||
if !elementFound {
|
||||
log.Fatalf("parseDTD: unknown element %q", ename)
|
||||
}
|
||||
s := string(dir)
|
||||
m := reAttr.FindStringSubmatch(s)
|
||||
if m == nil {
|
||||
log.Fatal(fmt.Errorf("parseDTD: invalid attribute %q", string(dir)))
|
||||
}
|
||||
if m[4] == "FIXED" {
|
||||
b.version = m[5]
|
||||
} else {
|
||||
switch m[1] {
|
||||
case "draft", "references", "alt", "validSubLocales", "standard" /* in Common */ :
|
||||
case "type", "choice":
|
||||
default:
|
||||
el.attr = append(el.attr, &attribute{
|
||||
name: m[1],
|
||||
key: s,
|
||||
list: reToken.FindAllString(m[3], -1),
|
||||
})
|
||||
el.signature = fmt.Sprintf("%s=%s+%s", el.signature, m[1], m[2])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var reCat = regexp.MustCompile(`[ ,\|]*(?:(\(|\)|\#?[\w_-]+)([\*\+\?]?))?`)
|
||||
|
||||
// resolve takes a parsed element and converts it into structured data
|
||||
// that can be used to generate the XML code.
|
||||
func (b *builder) resolve(e *element) {
|
||||
if e.resolved {
|
||||
return
|
||||
}
|
||||
b.elem = append(b.elem, e)
|
||||
e.resolved = true
|
||||
s := e.category
|
||||
found := make(map[string]bool)
|
||||
sequenceStart := []int{}
|
||||
for len(s) > 0 {
|
||||
m := reCat.FindStringSubmatch(s)
|
||||
if m == nil {
|
||||
log.Fatalf("%s: invalid category string %q", e.name, s)
|
||||
}
|
||||
repeat := m[2] == "*" || m[2] == "+" || in(b.info.forceRepeat, m[1])
|
||||
switch m[1] {
|
||||
case "":
|
||||
case "(":
|
||||
sequenceStart = append(sequenceStart, len(e.sub))
|
||||
case ")":
|
||||
if len(sequenceStart) == 0 {
|
||||
log.Fatalf("%s: unmatched closing parenthesis", e.name)
|
||||
}
|
||||
for i := sequenceStart[len(sequenceStart)-1]; i < len(e.sub); i++ {
|
||||
e.sub[i].repeat = e.sub[i].repeat || repeat
|
||||
}
|
||||
sequenceStart = sequenceStart[:len(sequenceStart)-1]
|
||||
default:
|
||||
if in(b.info.skipElem, m[1]) {
|
||||
} else if sub, ok := b.index[m[1]]; ok {
|
||||
if !found[sub.name] {
|
||||
e.sub = append(e.sub, struct {
|
||||
e *element
|
||||
repeat bool
|
||||
}{sub, repeat})
|
||||
found[sub.name] = true
|
||||
b.resolve(sub)
|
||||
}
|
||||
} else if m[1] == "#PCDATA" || m[1] == "ANY" {
|
||||
} else if m[1] != "EMPTY" {
|
||||
log.Fatalf("resolve:%s: element %q not found", e.name, m[1])
|
||||
}
|
||||
}
|
||||
s = s[len(m[0]):]
|
||||
}
|
||||
}
|
||||
|
||||
// return true if s is contained in set.
|
||||
func in(set []string, s string) bool {
|
||||
for _, v := range set {
|
||||
if v == s {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var repl = strings.NewReplacer("-", " ", "_", " ")
|
||||
|
||||
// title puts the first character or each character following '_' in title case and
|
||||
// removes all occurrences of '_'.
|
||||
func title(s string) string {
|
||||
return strings.Replace(strings.Title(repl.Replace(s)), " ", "", -1)
|
||||
}
|
||||
|
||||
// writeElem generates Go code for a single element, recursively.
|
||||
func (b *builder) writeElem(tab int, e *element) {
|
||||
p := func(f string, x ...interface{}) {
|
||||
f = strings.Replace(f, "\n", "\n"+strings.Repeat("\t", tab), -1)
|
||||
fmt.Fprintf(b.w, f, x...)
|
||||
}
|
||||
if len(e.sub) == 0 && len(e.attr) == 0 {
|
||||
p("Common")
|
||||
return
|
||||
}
|
||||
p("struct {")
|
||||
tab++
|
||||
p("\nCommon")
|
||||
for _, attr := range e.attr {
|
||||
if !in(b.info.skipAttr, attr.name) {
|
||||
p("\n%s string `xml:\"%s,attr\"`", title(attr.name), attr.name)
|
||||
}
|
||||
}
|
||||
for _, sub := range e.sub {
|
||||
if in(b.info.predefined, sub.e.name) {
|
||||
p("\n%sElem", sub.e.name)
|
||||
continue
|
||||
}
|
||||
if in(b.info.skipElem, sub.e.name) {
|
||||
continue
|
||||
}
|
||||
p("\n%s ", title(sub.e.name))
|
||||
if sub.repeat {
|
||||
p("[]")
|
||||
}
|
||||
p("*")
|
||||
if in(b.info.top, sub.e.name) {
|
||||
p(title(sub.e.name))
|
||||
} else {
|
||||
b.writeElem(tab, sub.e)
|
||||
}
|
||||
p(" `xml:\"%s\"`", sub.e.name)
|
||||
}
|
||||
tab--
|
||||
p("\n}")
|
||||
}
|
||||
|
||||
// write generates the Go XML code.
|
||||
func (b *builder) write() {
|
||||
for i, name := range b.info.top {
|
||||
e := b.index[name]
|
||||
if e != nil {
|
||||
fmt.Fprintf(b.w, comments[name])
|
||||
name := title(e.name)
|
||||
if i == 0 {
|
||||
name = b.info.root
|
||||
}
|
||||
fmt.Fprintf(b.w, "type %s ", name)
|
||||
b.writeElem(0, e)
|
||||
fmt.Fprint(b.w, "\n")
|
||||
}
|
||||
}
|
||||
}
|
602
vendor/golang.org/x/text/unicode/cldr/resolve.go
generated
vendored
Normal file
602
vendor/golang.org/x/text/unicode/cldr/resolve.go
generated
vendored
Normal file
@ -0,0 +1,602 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cldr
|
||||
|
||||
// This file implements the various inheritance constructs defined by LDML.
|
||||
// See http://www.unicode.org/reports/tr35/#Inheritance_and_Validity
|
||||
// for more details.
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// fieldIter iterates over fields in a struct. It includes
|
||||
// fields of embedded structs.
|
||||
type fieldIter struct {
|
||||
v reflect.Value
|
||||
index, n []int
|
||||
}
|
||||
|
||||
func iter(v reflect.Value) fieldIter {
|
||||
if v.Kind() != reflect.Struct {
|
||||
log.Panicf("value %v must be a struct", v)
|
||||
}
|
||||
i := fieldIter{
|
||||
v: v,
|
||||
index: []int{0},
|
||||
n: []int{v.NumField()},
|
||||
}
|
||||
i.descent()
|
||||
return i
|
||||
}
|
||||
|
||||
func (i *fieldIter) descent() {
|
||||
for f := i.field(); f.Anonymous && f.Type.NumField() > 0; f = i.field() {
|
||||
i.index = append(i.index, 0)
|
||||
i.n = append(i.n, f.Type.NumField())
|
||||
}
|
||||
}
|
||||
|
||||
func (i *fieldIter) done() bool {
|
||||
return len(i.index) == 1 && i.index[0] >= i.n[0]
|
||||
}
|
||||
|
||||
func skip(f reflect.StructField) bool {
|
||||
return !f.Anonymous && (f.Name[0] < 'A' || f.Name[0] > 'Z')
|
||||
}
|
||||
|
||||
func (i *fieldIter) next() {
|
||||
for {
|
||||
k := len(i.index) - 1
|
||||
i.index[k]++
|
||||
if i.index[k] < i.n[k] {
|
||||
if !skip(i.field()) {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
if k == 0 {
|
||||
return
|
||||
}
|
||||
i.index = i.index[:k]
|
||||
i.n = i.n[:k]
|
||||
}
|
||||
}
|
||||
i.descent()
|
||||
}
|
||||
|
||||
func (i *fieldIter) value() reflect.Value {
|
||||
return i.v.FieldByIndex(i.index)
|
||||
}
|
||||
|
||||
func (i *fieldIter) field() reflect.StructField {
|
||||
return i.v.Type().FieldByIndex(i.index)
|
||||
}
|
||||
|
||||
type visitor func(v reflect.Value) error
|
||||
|
||||
var stopDescent = fmt.Errorf("do not recurse")
|
||||
|
||||
func (f visitor) visit(x interface{}) error {
|
||||
return f.visitRec(reflect.ValueOf(x))
|
||||
}
|
||||
|
||||
// visit recursively calls f on all nodes in v.
|
||||
func (f visitor) visitRec(v reflect.Value) error {
|
||||
if v.Kind() == reflect.Ptr {
|
||||
if v.IsNil() {
|
||||
return nil
|
||||
}
|
||||
return f.visitRec(v.Elem())
|
||||
}
|
||||
if err := f(v); err != nil {
|
||||
if err == stopDescent {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
switch v.Kind() {
|
||||
case reflect.Struct:
|
||||
for i := iter(v); !i.done(); i.next() {
|
||||
if err := f.visitRec(i.value()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case reflect.Slice:
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
if err := f.visitRec(v.Index(i)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// getPath is used for error reporting purposes only.
|
||||
func getPath(e Elem) string {
|
||||
if e == nil {
|
||||
return "<nil>"
|
||||
}
|
||||
if e.enclosing() == nil {
|
||||
return e.GetCommon().name
|
||||
}
|
||||
if e.GetCommon().Type == "" {
|
||||
return fmt.Sprintf("%s.%s", getPath(e.enclosing()), e.GetCommon().name)
|
||||
}
|
||||
return fmt.Sprintf("%s.%s[type=%s]", getPath(e.enclosing()), e.GetCommon().name, e.GetCommon().Type)
|
||||
}
|
||||
|
||||
// xmlName returns the xml name of the element or attribute
|
||||
func xmlName(f reflect.StructField) (name string, attr bool) {
|
||||
tags := strings.Split(f.Tag.Get("xml"), ",")
|
||||
for _, s := range tags {
|
||||
attr = attr || s == "attr"
|
||||
}
|
||||
return tags[0], attr
|
||||
}
|
||||
|
||||
func findField(v reflect.Value, key string) (reflect.Value, error) {
|
||||
v = reflect.Indirect(v)
|
||||
for i := iter(v); !i.done(); i.next() {
|
||||
if n, _ := xmlName(i.field()); n == key {
|
||||
return i.value(), nil
|
||||
}
|
||||
}
|
||||
return reflect.Value{}, fmt.Errorf("cldr: no field %q in element %#v", key, v.Interface())
|
||||
}
|
||||
|
||||
var xpathPart = regexp.MustCompile(`(\pL+)(?:\[@(\pL+)='([\w-]+)'\])?`)
|
||||
|
||||
func walkXPath(e Elem, path string) (res Elem, err error) {
|
||||
for _, c := range strings.Split(path, "/") {
|
||||
if c == ".." {
|
||||
if e = e.enclosing(); e == nil {
|
||||
panic("path ..")
|
||||
return nil, fmt.Errorf(`cldr: ".." moves past root in path %q`, path)
|
||||
}
|
||||
continue
|
||||
} else if c == "" {
|
||||
continue
|
||||
}
|
||||
m := xpathPart.FindStringSubmatch(c)
|
||||
if len(m) == 0 || len(m[0]) != len(c) {
|
||||
return nil, fmt.Errorf("cldr: syntax error in path component %q", c)
|
||||
}
|
||||
v, err := findField(reflect.ValueOf(e), m[1])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch v.Kind() {
|
||||
case reflect.Slice:
|
||||
i := 0
|
||||
if m[2] != "" || v.Len() > 1 {
|
||||
if m[2] == "" {
|
||||
m[2] = "type"
|
||||
if m[3] = e.GetCommon().Default(); m[3] == "" {
|
||||
return nil, fmt.Errorf("cldr: type selector or default value needed for element %s", m[1])
|
||||
}
|
||||
}
|
||||
for ; i < v.Len(); i++ {
|
||||
vi := v.Index(i)
|
||||
key, err := findField(vi.Elem(), m[2])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
key = reflect.Indirect(key)
|
||||
if key.Kind() == reflect.String && key.String() == m[3] {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if i == v.Len() || v.Index(i).IsNil() {
|
||||
return nil, fmt.Errorf("no %s found with %s==%s", m[1], m[2], m[3])
|
||||
}
|
||||
e = v.Index(i).Interface().(Elem)
|
||||
case reflect.Ptr:
|
||||
if v.IsNil() {
|
||||
return nil, fmt.Errorf("cldr: element %q not found within element %q", m[1], e.GetCommon().name)
|
||||
}
|
||||
var ok bool
|
||||
if e, ok = v.Interface().(Elem); !ok {
|
||||
return nil, fmt.Errorf("cldr: %q is not an XML element", m[1])
|
||||
} else if m[2] != "" || m[3] != "" {
|
||||
return nil, fmt.Errorf("cldr: no type selector allowed for element %s", m[1])
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("cldr: %q is not an XML element", m[1])
|
||||
}
|
||||
}
|
||||
return e, nil
|
||||
}
|
||||
|
||||
const absPrefix = "//ldml/"
|
||||
|
||||
func (cldr *CLDR) resolveAlias(e Elem, src, path string) (res Elem, err error) {
|
||||
if src != "locale" {
|
||||
if !strings.HasPrefix(path, absPrefix) {
|
||||
return nil, fmt.Errorf("cldr: expected absolute path, found %q", path)
|
||||
}
|
||||
path = path[len(absPrefix):]
|
||||
if e, err = cldr.resolve(src); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return walkXPath(e, path)
|
||||
}
|
||||
|
||||
func (cldr *CLDR) resolveAndMergeAlias(e Elem) error {
|
||||
alias := e.GetCommon().Alias
|
||||
if alias == nil {
|
||||
return nil
|
||||
}
|
||||
a, err := cldr.resolveAlias(e, alias.Source, alias.Path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%v: error evaluating path %q: %v", getPath(e), alias.Path, err)
|
||||
}
|
||||
// Ensure alias node was already evaluated. TODO: avoid double evaluation.
|
||||
err = cldr.resolveAndMergeAlias(a)
|
||||
v := reflect.ValueOf(e).Elem()
|
||||
for i := iter(reflect.ValueOf(a).Elem()); !i.done(); i.next() {
|
||||
if vv := i.value(); vv.Kind() != reflect.Ptr || !vv.IsNil() {
|
||||
if _, attr := xmlName(i.field()); !attr {
|
||||
v.FieldByIndex(i.index).Set(vv)
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (cldr *CLDR) aliasResolver() visitor {
|
||||
return func(v reflect.Value) (err error) {
|
||||
if e, ok := v.Addr().Interface().(Elem); ok {
|
||||
err = cldr.resolveAndMergeAlias(e)
|
||||
if err == nil && blocking[e.GetCommon().name] {
|
||||
return stopDescent
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// elements within blocking elements do not inherit.
|
||||
// Taken from CLDR's supplementalMetaData.xml.
|
||||
var blocking = map[string]bool{
|
||||
"identity": true,
|
||||
"supplementalData": true,
|
||||
"cldrTest": true,
|
||||
"collation": true,
|
||||
"transform": true,
|
||||
}
|
||||
|
||||
// Distinguishing attributes affect inheritance; two elements with different
|
||||
// distinguishing attributes are treated as different for purposes of inheritance,
|
||||
// except when such attributes occur in the indicated elements.
|
||||
// Taken from CLDR's supplementalMetaData.xml.
|
||||
var distinguishing = map[string][]string{
|
||||
"key": nil,
|
||||
"request_id": nil,
|
||||
"id": nil,
|
||||
"registry": nil,
|
||||
"alt": nil,
|
||||
"iso4217": nil,
|
||||
"iso3166": nil,
|
||||
"mzone": nil,
|
||||
"from": nil,
|
||||
"to": nil,
|
||||
"type": []string{
|
||||
"abbreviationFallback",
|
||||
"default",
|
||||
"mapping",
|
||||
"measurementSystem",
|
||||
"preferenceOrdering",
|
||||
},
|
||||
"numberSystem": nil,
|
||||
}
|
||||
|
||||
func in(set []string, s string) bool {
|
||||
for _, v := range set {
|
||||
if v == s {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// attrKey computes a key based on the distinguishable attributes of
|
||||
// an element and it's values.
|
||||
func attrKey(v reflect.Value, exclude ...string) string {
|
||||
parts := []string{}
|
||||
ename := v.Interface().(Elem).GetCommon().name
|
||||
v = v.Elem()
|
||||
for i := iter(v); !i.done(); i.next() {
|
||||
if name, attr := xmlName(i.field()); attr {
|
||||
if except, ok := distinguishing[name]; ok && !in(exclude, name) && !in(except, ename) {
|
||||
v := i.value()
|
||||
if v.Kind() == reflect.Ptr {
|
||||
v = v.Elem()
|
||||
}
|
||||
if v.IsValid() {
|
||||
parts = append(parts, fmt.Sprintf("%s=%s", name, v.String()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
sort.Strings(parts)
|
||||
return strings.Join(parts, ";")
|
||||
}
|
||||
|
||||
// Key returns a key for e derived from all distinguishing attributes
|
||||
// except those specified by exclude.
|
||||
func Key(e Elem, exclude ...string) string {
|
||||
return attrKey(reflect.ValueOf(e), exclude...)
|
||||
}
|
||||
|
||||
// linkEnclosing sets the enclosing element as well as the name
|
||||
// for all sub-elements of child, recursively.
|
||||
func linkEnclosing(parent, child Elem) {
|
||||
child.setEnclosing(parent)
|
||||
v := reflect.ValueOf(child).Elem()
|
||||
for i := iter(v); !i.done(); i.next() {
|
||||
vf := i.value()
|
||||
if vf.Kind() == reflect.Slice {
|
||||
for j := 0; j < vf.Len(); j++ {
|
||||
linkEnclosing(child, vf.Index(j).Interface().(Elem))
|
||||
}
|
||||
} else if vf.Kind() == reflect.Ptr && !vf.IsNil() && vf.Elem().Kind() == reflect.Struct {
|
||||
linkEnclosing(child, vf.Interface().(Elem))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setNames(e Elem, name string) {
|
||||
e.setName(name)
|
||||
v := reflect.ValueOf(e).Elem()
|
||||
for i := iter(v); !i.done(); i.next() {
|
||||
vf := i.value()
|
||||
name, _ = xmlName(i.field())
|
||||
if vf.Kind() == reflect.Slice {
|
||||
for j := 0; j < vf.Len(); j++ {
|
||||
setNames(vf.Index(j).Interface().(Elem), name)
|
||||
}
|
||||
} else if vf.Kind() == reflect.Ptr && !vf.IsNil() && vf.Elem().Kind() == reflect.Struct {
|
||||
setNames(vf.Interface().(Elem), name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// deepCopy copies elements of v recursively. All elements of v that may
|
||||
// be modified by inheritance are explicitly copied.
|
||||
func deepCopy(v reflect.Value) reflect.Value {
|
||||
switch v.Kind() {
|
||||
case reflect.Ptr:
|
||||
if v.IsNil() || v.Elem().Kind() != reflect.Struct {
|
||||
return v
|
||||
}
|
||||
nv := reflect.New(v.Elem().Type())
|
||||
nv.Elem().Set(v.Elem())
|
||||
deepCopyRec(nv.Elem(), v.Elem())
|
||||
return nv
|
||||
case reflect.Slice:
|
||||
nv := reflect.MakeSlice(v.Type(), v.Len(), v.Len())
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
deepCopyRec(nv.Index(i), v.Index(i))
|
||||
}
|
||||
return nv
|
||||
}
|
||||
panic("deepCopy: must be called with pointer or slice")
|
||||
}
|
||||
|
||||
// deepCopyRec is only called by deepCopy.
|
||||
func deepCopyRec(nv, v reflect.Value) {
|
||||
if v.Kind() == reflect.Struct {
|
||||
t := v.Type()
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
if name, attr := xmlName(t.Field(i)); name != "" && !attr {
|
||||
deepCopyRec(nv.Field(i), v.Field(i))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
nv.Set(deepCopy(v))
|
||||
}
|
||||
}
|
||||
|
||||
// newNode is used to insert a missing node during inheritance.
|
||||
func (cldr *CLDR) newNode(v, enc reflect.Value) reflect.Value {
|
||||
n := reflect.New(v.Type())
|
||||
for i := iter(v); !i.done(); i.next() {
|
||||
if name, attr := xmlName(i.field()); name == "" || attr {
|
||||
n.Elem().FieldByIndex(i.index).Set(i.value())
|
||||
}
|
||||
}
|
||||
n.Interface().(Elem).GetCommon().setEnclosing(enc.Addr().Interface().(Elem))
|
||||
return n
|
||||
}
|
||||
|
||||
// v, parent must be pointers to struct
|
||||
func (cldr *CLDR) inheritFields(v, parent reflect.Value) (res reflect.Value, err error) {
|
||||
t := v.Type()
|
||||
nv := reflect.New(t)
|
||||
nv.Elem().Set(v)
|
||||
for i := iter(v); !i.done(); i.next() {
|
||||
vf := i.value()
|
||||
f := i.field()
|
||||
name, attr := xmlName(f)
|
||||
if name == "" || attr {
|
||||
continue
|
||||
}
|
||||
pf := parent.FieldByIndex(i.index)
|
||||
if blocking[name] {
|
||||
if vf.IsNil() {
|
||||
vf = pf
|
||||
}
|
||||
nv.Elem().FieldByIndex(i.index).Set(deepCopy(vf))
|
||||
continue
|
||||
}
|
||||
switch f.Type.Kind() {
|
||||
case reflect.Ptr:
|
||||
if f.Type.Elem().Kind() == reflect.Struct {
|
||||
if !vf.IsNil() {
|
||||
if vf, err = cldr.inheritStructPtr(vf, pf); err != nil {
|
||||
return reflect.Value{}, err
|
||||
}
|
||||
vf.Interface().(Elem).setEnclosing(nv.Interface().(Elem))
|
||||
nv.Elem().FieldByIndex(i.index).Set(vf)
|
||||
} else if !pf.IsNil() {
|
||||
n := cldr.newNode(pf.Elem(), v)
|
||||
if vf, err = cldr.inheritStructPtr(n, pf); err != nil {
|
||||
return reflect.Value{}, err
|
||||
}
|
||||
vf.Interface().(Elem).setEnclosing(nv.Interface().(Elem))
|
||||
nv.Elem().FieldByIndex(i.index).Set(vf)
|
||||
}
|
||||
}
|
||||
case reflect.Slice:
|
||||
vf, err := cldr.inheritSlice(nv.Elem(), vf, pf)
|
||||
if err != nil {
|
||||
return reflect.Zero(t), err
|
||||
}
|
||||
nv.Elem().FieldByIndex(i.index).Set(vf)
|
||||
}
|
||||
}
|
||||
return nv, nil
|
||||
}
|
||||
|
||||
func root(e Elem) *LDML {
|
||||
for ; e.enclosing() != nil; e = e.enclosing() {
|
||||
}
|
||||
return e.(*LDML)
|
||||
}
|
||||
|
||||
// inheritStructPtr first merges possible aliases in with v and then inherits
|
||||
// any underspecified elements from parent.
|
||||
func (cldr *CLDR) inheritStructPtr(v, parent reflect.Value) (r reflect.Value, err error) {
|
||||
if !v.IsNil() {
|
||||
e := v.Interface().(Elem).GetCommon()
|
||||
alias := e.Alias
|
||||
if alias == nil && !parent.IsNil() {
|
||||
alias = parent.Interface().(Elem).GetCommon().Alias
|
||||
}
|
||||
if alias != nil {
|
||||
a, err := cldr.resolveAlias(v.Interface().(Elem), alias.Source, alias.Path)
|
||||
if a != nil {
|
||||
if v, err = cldr.inheritFields(v.Elem(), reflect.ValueOf(a).Elem()); err != nil {
|
||||
return reflect.Value{}, err
|
||||
}
|
||||
}
|
||||
}
|
||||
if !parent.IsNil() {
|
||||
return cldr.inheritFields(v.Elem(), parent.Elem())
|
||||
}
|
||||
} else if parent.IsNil() {
|
||||
panic("should not reach here")
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
// Must be slice of struct pointers.
|
||||
func (cldr *CLDR) inheritSlice(enc, v, parent reflect.Value) (res reflect.Value, err error) {
|
||||
t := v.Type()
|
||||
index := make(map[string]reflect.Value)
|
||||
if !v.IsNil() {
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
vi := v.Index(i)
|
||||
key := attrKey(vi)
|
||||
index[key] = vi
|
||||
}
|
||||
}
|
||||
if !parent.IsNil() {
|
||||
for i := 0; i < parent.Len(); i++ {
|
||||
vi := parent.Index(i)
|
||||
key := attrKey(vi)
|
||||
if w, ok := index[key]; ok {
|
||||
index[key], err = cldr.inheritStructPtr(w, vi)
|
||||
} else {
|
||||
n := cldr.newNode(vi.Elem(), enc)
|
||||
index[key], err = cldr.inheritStructPtr(n, vi)
|
||||
}
|
||||
index[key].Interface().(Elem).setEnclosing(enc.Addr().Interface().(Elem))
|
||||
if err != nil {
|
||||
return v, err
|
||||
}
|
||||
}
|
||||
}
|
||||
keys := make([]string, 0, len(index))
|
||||
for k, _ := range index {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
sl := reflect.MakeSlice(t, len(index), len(index))
|
||||
for i, k := range keys {
|
||||
sl.Index(i).Set(index[k])
|
||||
}
|
||||
return sl, nil
|
||||
}
|
||||
|
||||
func parentLocale(loc string) string {
|
||||
parts := strings.Split(loc, "_")
|
||||
if len(parts) == 1 {
|
||||
return "root"
|
||||
}
|
||||
parts = parts[:len(parts)-1]
|
||||
key := strings.Join(parts, "_")
|
||||
return key
|
||||
}
|
||||
|
||||
func (cldr *CLDR) resolve(loc string) (res *LDML, err error) {
|
||||
if r := cldr.resolved[loc]; r != nil {
|
||||
return r, nil
|
||||
}
|
||||
x := cldr.RawLDML(loc)
|
||||
if x == nil {
|
||||
return nil, fmt.Errorf("cldr: unknown locale %q", loc)
|
||||
}
|
||||
var v reflect.Value
|
||||
if loc == "root" {
|
||||
x = deepCopy(reflect.ValueOf(x)).Interface().(*LDML)
|
||||
linkEnclosing(nil, x)
|
||||
err = cldr.aliasResolver().visit(x)
|
||||
} else {
|
||||
key := parentLocale(loc)
|
||||
var parent *LDML
|
||||
for ; cldr.locale[key] == nil; key = parentLocale(key) {
|
||||
}
|
||||
if parent, err = cldr.resolve(key); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
v, err = cldr.inheritFields(reflect.ValueOf(x).Elem(), reflect.ValueOf(parent).Elem())
|
||||
x = v.Interface().(*LDML)
|
||||
linkEnclosing(nil, x)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cldr.resolved[loc] = x
|
||||
return x, err
|
||||
}
|
||||
|
||||
// finalize finalizes the initialization of the raw LDML structs. It also
|
||||
// removed unwanted fields, as specified by filter, so that they will not
|
||||
// be unnecessarily evaluated.
|
||||
func (cldr *CLDR) finalize(filter []string) {
|
||||
for _, x := range cldr.locale {
|
||||
if filter != nil {
|
||||
v := reflect.ValueOf(x).Elem()
|
||||
t := v.Type()
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
name, _ := xmlName(f)
|
||||
if name != "" && name != "identity" && !in(filter, name) {
|
||||
v.Field(i).Set(reflect.Zero(f.Type))
|
||||
}
|
||||
}
|
||||
}
|
||||
linkEnclosing(nil, x) // for resolving aliases and paths
|
||||
setNames(x, "ldml")
|
||||
}
|
||||
}
|
144
vendor/golang.org/x/text/unicode/cldr/slice.go
generated
vendored
Normal file
144
vendor/golang.org/x/text/unicode/cldr/slice.go
generated
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cldr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// Slice provides utilities for modifying slices of elements.
|
||||
// It can be wrapped around any slice of which the element type implements
|
||||
// interface Elem.
|
||||
type Slice struct {
|
||||
ptr reflect.Value
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
// Value returns the reflect.Value of the underlying slice.
|
||||
func (s *Slice) Value() reflect.Value {
|
||||
return s.ptr.Elem()
|
||||
}
|
||||
|
||||
// MakeSlice wraps a pointer to a slice of Elems.
|
||||
// It replaces the array pointed to by the slice so that subsequent modifications
|
||||
// do not alter the data in a CLDR type.
|
||||
// It panics if an incorrect type is passed.
|
||||
func MakeSlice(slicePtr interface{}) Slice {
|
||||
ptr := reflect.ValueOf(slicePtr)
|
||||
if ptr.Kind() != reflect.Ptr {
|
||||
panic(fmt.Sprintf("MakeSlice: argument must be pointer to slice, found %v", ptr.Type()))
|
||||
}
|
||||
sl := ptr.Elem()
|
||||
if sl.Kind() != reflect.Slice {
|
||||
panic(fmt.Sprintf("MakeSlice: argument must point to a slice, found %v", sl.Type()))
|
||||
}
|
||||
intf := reflect.TypeOf((*Elem)(nil)).Elem()
|
||||
if !sl.Type().Elem().Implements(intf) {
|
||||
panic(fmt.Sprintf("MakeSlice: element type of slice (%v) does not implement Elem", sl.Type().Elem()))
|
||||
}
|
||||
nsl := reflect.MakeSlice(sl.Type(), sl.Len(), sl.Len())
|
||||
reflect.Copy(nsl, sl)
|
||||
sl.Set(nsl)
|
||||
return Slice{
|
||||
ptr: ptr,
|
||||
typ: sl.Type().Elem().Elem(),
|
||||
}
|
||||
}
|
||||
|
||||
func (s Slice) indexForAttr(a string) []int {
|
||||
for i := iter(reflect.Zero(s.typ)); !i.done(); i.next() {
|
||||
if n, _ := xmlName(i.field()); n == a {
|
||||
return i.index
|
||||
}
|
||||
}
|
||||
panic(fmt.Sprintf("MakeSlice: no attribute %q for type %v", a, s.typ))
|
||||
}
|
||||
|
||||
// Filter filters s to only include elements for which fn returns true.
|
||||
func (s Slice) Filter(fn func(e Elem) bool) {
|
||||
k := 0
|
||||
sl := s.Value()
|
||||
for i := 0; i < sl.Len(); i++ {
|
||||
vi := sl.Index(i)
|
||||
if fn(vi.Interface().(Elem)) {
|
||||
sl.Index(k).Set(vi)
|
||||
k++
|
||||
}
|
||||
}
|
||||
sl.Set(sl.Slice(0, k))
|
||||
}
|
||||
|
||||
// Group finds elements in s for which fn returns the same value and groups
|
||||
// them in a new Slice.
|
||||
func (s Slice) Group(fn func(e Elem) string) []Slice {
|
||||
m := make(map[string][]reflect.Value)
|
||||
sl := s.Value()
|
||||
for i := 0; i < sl.Len(); i++ {
|
||||
vi := sl.Index(i)
|
||||
key := fn(vi.Interface().(Elem))
|
||||
m[key] = append(m[key], vi)
|
||||
}
|
||||
keys := []string{}
|
||||
for k, _ := range m {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
res := []Slice{}
|
||||
for _, k := range keys {
|
||||
nsl := reflect.New(sl.Type())
|
||||
nsl.Elem().Set(reflect.Append(nsl.Elem(), m[k]...))
|
||||
res = append(res, MakeSlice(nsl.Interface()))
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// SelectAnyOf filters s to contain only elements for which attr matches
|
||||
// any of the values.
|
||||
func (s Slice) SelectAnyOf(attr string, values ...string) {
|
||||
index := s.indexForAttr(attr)
|
||||
s.Filter(func(e Elem) bool {
|
||||
vf := reflect.ValueOf(e).Elem().FieldByIndex(index)
|
||||
return in(values, vf.String())
|
||||
})
|
||||
}
|
||||
|
||||
// SelectOnePerGroup filters s to include at most one element e per group of
|
||||
// elements matching Key(attr), where e has an attribute a that matches any
|
||||
// the values in v.
|
||||
// If more than one element in a group matches a value in v preference
|
||||
// is given to the element that matches the first value in v.
|
||||
func (s Slice) SelectOnePerGroup(a string, v []string) {
|
||||
index := s.indexForAttr(a)
|
||||
grouped := s.Group(func(e Elem) string { return Key(e, a) })
|
||||
sl := s.Value()
|
||||
sl.Set(sl.Slice(0, 0))
|
||||
for _, g := range grouped {
|
||||
e := reflect.Value{}
|
||||
found := len(v)
|
||||
gsl := g.Value()
|
||||
for i := 0; i < gsl.Len(); i++ {
|
||||
vi := gsl.Index(i).Elem().FieldByIndex(index)
|
||||
j := 0
|
||||
for ; j < len(v) && v[j] != vi.String(); j++ {
|
||||
}
|
||||
if j < found {
|
||||
found = j
|
||||
e = gsl.Index(i)
|
||||
}
|
||||
}
|
||||
if found < len(v) {
|
||||
sl.Set(reflect.Append(sl, e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SelectDraft drops all elements from the list with a draft level smaller than d
|
||||
// and selects the highest draft level of the remaining.
|
||||
// This method assumes that the input CLDR is canonicalized.
|
||||
func (s Slice) SelectDraft(d Draft) {
|
||||
s.SelectOnePerGroup("draft", drafts[len(drafts)-2-int(d):])
|
||||
}
|
1494
vendor/golang.org/x/text/unicode/cldr/xml.go
generated
vendored
Normal file
1494
vendor/golang.org/x/text/unicode/cldr/xml.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
27
vendor/golang.org/x/text/unicode/norm/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/text/unicode/norm/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
508
vendor/golang.org/x/text/unicode/norm/composition.go
generated
vendored
Normal file
508
vendor/golang.org/x/text/unicode/norm/composition.go
generated
vendored
Normal file
@ -0,0 +1,508 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package norm
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
const (
|
||||
maxNonStarters = 30
|
||||
// The maximum number of characters needed for a buffer is
|
||||
// maxNonStarters + 1 for the starter + 1 for the GCJ
|
||||
maxBufferSize = maxNonStarters + 2
|
||||
maxNFCExpansion = 3 // NFC(0x1D160)
|
||||
maxNFKCExpansion = 18 // NFKC(0xFDFA)
|
||||
|
||||
maxByteBufferSize = utf8.UTFMax * maxBufferSize // 128
|
||||
)
|
||||
|
||||
// ssState is used for reporting the segment state after inserting a rune.
|
||||
// It is returned by streamSafe.next.
|
||||
type ssState int
|
||||
|
||||
const (
|
||||
// Indicates a rune was successfully added to the segment.
|
||||
ssSuccess ssState = iota
|
||||
// Indicates a rune starts a new segment and should not be added.
|
||||
ssStarter
|
||||
// Indicates a rune caused a segment overflow and a CGJ should be inserted.
|
||||
ssOverflow
|
||||
)
|
||||
|
||||
// streamSafe implements the policy of when a CGJ should be inserted.
|
||||
type streamSafe uint8
|
||||
|
||||
// first inserts the first rune of a segment. It is a faster version of next if
|
||||
// it is known p represents the first rune in a segment.
|
||||
func (ss *streamSafe) first(p Properties) {
|
||||
*ss = streamSafe(p.nTrailingNonStarters())
|
||||
}
|
||||
|
||||
// insert returns a ssState value to indicate whether a rune represented by p
|
||||
// can be inserted.
|
||||
func (ss *streamSafe) next(p Properties) ssState {
|
||||
if *ss > maxNonStarters {
|
||||
panic("streamSafe was not reset")
|
||||
}
|
||||
n := p.nLeadingNonStarters()
|
||||
if *ss += streamSafe(n); *ss > maxNonStarters {
|
||||
*ss = 0
|
||||
return ssOverflow
|
||||
}
|
||||
// The Stream-Safe Text Processing prescribes that the counting can stop
|
||||
// as soon as a starter is encountered. However, there are some starters,
|
||||
// like Jamo V and T, that can combine with other runes, leaving their
|
||||
// successive non-starters appended to the previous, possibly causing an
|
||||
// overflow. We will therefore consider any rune with a non-zero nLead to
|
||||
// be a non-starter. Note that it always hold that if nLead > 0 then
|
||||
// nLead == nTrail.
|
||||
if n == 0 {
|
||||
*ss = streamSafe(p.nTrailingNonStarters())
|
||||
return ssStarter
|
||||
}
|
||||
return ssSuccess
|
||||
}
|
||||
|
||||
// backwards is used for checking for overflow and segment starts
|
||||
// when traversing a string backwards. Users do not need to call first
|
||||
// for the first rune. The state of the streamSafe retains the count of
|
||||
// the non-starters loaded.
|
||||
func (ss *streamSafe) backwards(p Properties) ssState {
|
||||
if *ss > maxNonStarters {
|
||||
panic("streamSafe was not reset")
|
||||
}
|
||||
c := *ss + streamSafe(p.nTrailingNonStarters())
|
||||
if c > maxNonStarters {
|
||||
return ssOverflow
|
||||
}
|
||||
*ss = c
|
||||
if p.nLeadingNonStarters() == 0 {
|
||||
return ssStarter
|
||||
}
|
||||
return ssSuccess
|
||||
}
|
||||
|
||||
func (ss streamSafe) isMax() bool {
|
||||
return ss == maxNonStarters
|
||||
}
|
||||
|
||||
// GraphemeJoiner is inserted after maxNonStarters non-starter runes.
|
||||
const GraphemeJoiner = "\u034F"
|
||||
|
||||
// reorderBuffer is used to normalize a single segment. Characters inserted with
|
||||
// insert are decomposed and reordered based on CCC. The compose method can
|
||||
// be used to recombine characters. Note that the byte buffer does not hold
|
||||
// the UTF-8 characters in order. Only the rune array is maintained in sorted
|
||||
// order. flush writes the resulting segment to a byte array.
|
||||
type reorderBuffer struct {
|
||||
rune [maxBufferSize]Properties // Per character info.
|
||||
byte [maxByteBufferSize]byte // UTF-8 buffer. Referenced by runeInfo.pos.
|
||||
nbyte uint8 // Number or bytes.
|
||||
ss streamSafe // For limiting length of non-starter sequence.
|
||||
nrune int // Number of runeInfos.
|
||||
f formInfo
|
||||
|
||||
src input
|
||||
nsrc int
|
||||
tmpBytes input
|
||||
|
||||
out []byte
|
||||
flushF func(*reorderBuffer) bool
|
||||
}
|
||||
|
||||
func (rb *reorderBuffer) init(f Form, src []byte) {
|
||||
rb.f = *formTable[f]
|
||||
rb.src.setBytes(src)
|
||||
rb.nsrc = len(src)
|
||||
rb.ss = 0
|
||||
}
|
||||
|
||||
func (rb *reorderBuffer) initString(f Form, src string) {
|
||||
rb.f = *formTable[f]
|
||||
rb.src.setString(src)
|
||||
rb.nsrc = len(src)
|
||||
rb.ss = 0
|
||||
}
|
||||
|
||||
func (rb *reorderBuffer) setFlusher(out []byte, f func(*reorderBuffer) bool) {
|
||||
rb.out = out
|
||||
rb.flushF = f
|
||||
}
|
||||
|
||||
// reset discards all characters from the buffer.
|
||||
func (rb *reorderBuffer) reset() {
|
||||
rb.nrune = 0
|
||||
rb.nbyte = 0
|
||||
}
|
||||
|
||||
func (rb *reorderBuffer) doFlush() bool {
|
||||
if rb.f.composing {
|
||||
rb.compose()
|
||||
}
|
||||
res := rb.flushF(rb)
|
||||
rb.reset()
|
||||
return res
|
||||
}
|
||||
|
||||
// appendFlush appends the normalized segment to rb.out.
|
||||
func appendFlush(rb *reorderBuffer) bool {
|
||||
for i := 0; i < rb.nrune; i++ {
|
||||
start := rb.rune[i].pos
|
||||
end := start + rb.rune[i].size
|
||||
rb.out = append(rb.out, rb.byte[start:end]...)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// flush appends the normalized segment to out and resets rb.
|
||||
func (rb *reorderBuffer) flush(out []byte) []byte {
|
||||
for i := 0; i < rb.nrune; i++ {
|
||||
start := rb.rune[i].pos
|
||||
end := start + rb.rune[i].size
|
||||
out = append(out, rb.byte[start:end]...)
|
||||
}
|
||||
rb.reset()
|
||||
return out
|
||||
}
|
||||
|
||||
// flushCopy copies the normalized segment to buf and resets rb.
|
||||
// It returns the number of bytes written to buf.
|
||||
func (rb *reorderBuffer) flushCopy(buf []byte) int {
|
||||
p := 0
|
||||
for i := 0; i < rb.nrune; i++ {
|
||||
runep := rb.rune[i]
|
||||
p += copy(buf[p:], rb.byte[runep.pos:runep.pos+runep.size])
|
||||
}
|
||||
rb.reset()
|
||||
return p
|
||||
}
|
||||
|
||||
// insertOrdered inserts a rune in the buffer, ordered by Canonical Combining Class.
|
||||
// It returns false if the buffer is not large enough to hold the rune.
|
||||
// It is used internally by insert and insertString only.
|
||||
func (rb *reorderBuffer) insertOrdered(info Properties) {
|
||||
n := rb.nrune
|
||||
b := rb.rune[:]
|
||||
cc := info.ccc
|
||||
if cc > 0 {
|
||||
// Find insertion position + move elements to make room.
|
||||
for ; n > 0; n-- {
|
||||
if b[n-1].ccc <= cc {
|
||||
break
|
||||
}
|
||||
b[n] = b[n-1]
|
||||
}
|
||||
}
|
||||
rb.nrune += 1
|
||||
pos := uint8(rb.nbyte)
|
||||
rb.nbyte += utf8.UTFMax
|
||||
info.pos = pos
|
||||
b[n] = info
|
||||
}
|
||||
|
||||
// insertErr is an error code returned by insert. Using this type instead
|
||||
// of error improves performance up to 20% for many of the benchmarks.
|
||||
type insertErr int
|
||||
|
||||
const (
|
||||
iSuccess insertErr = -iota
|
||||
iShortDst
|
||||
iShortSrc
|
||||
)
|
||||
|
||||
// insertFlush inserts the given rune in the buffer ordered by CCC.
|
||||
// If a decomposition with multiple segments are encountered, they leading
|
||||
// ones are flushed.
|
||||
// It returns a non-zero error code if the rune was not inserted.
|
||||
func (rb *reorderBuffer) insertFlush(src input, i int, info Properties) insertErr {
|
||||
if rune := src.hangul(i); rune != 0 {
|
||||
rb.decomposeHangul(rune)
|
||||
return iSuccess
|
||||
}
|
||||
if info.hasDecomposition() {
|
||||
return rb.insertDecomposed(info.Decomposition())
|
||||
}
|
||||
rb.insertSingle(src, i, info)
|
||||
return iSuccess
|
||||
}
|
||||
|
||||
// insertUnsafe inserts the given rune in the buffer ordered by CCC.
|
||||
// It is assumed there is sufficient space to hold the runes. It is the
|
||||
// responsibility of the caller to ensure this. This can be done by checking
|
||||
// the state returned by the streamSafe type.
|
||||
func (rb *reorderBuffer) insertUnsafe(src input, i int, info Properties) {
|
||||
if rune := src.hangul(i); rune != 0 {
|
||||
rb.decomposeHangul(rune)
|
||||
}
|
||||
if info.hasDecomposition() {
|
||||
// TODO: inline.
|
||||
rb.insertDecomposed(info.Decomposition())
|
||||
} else {
|
||||
rb.insertSingle(src, i, info)
|
||||
}
|
||||
}
|
||||
|
||||
// insertDecomposed inserts an entry in to the reorderBuffer for each rune
|
||||
// in dcomp. dcomp must be a sequence of decomposed UTF-8-encoded runes.
|
||||
// It flushes the buffer on each new segment start.
|
||||
func (rb *reorderBuffer) insertDecomposed(dcomp []byte) insertErr {
|
||||
rb.tmpBytes.setBytes(dcomp)
|
||||
// As the streamSafe accounting already handles the counting for modifiers,
|
||||
// we don't have to call next. However, we do need to keep the accounting
|
||||
// intact when flushing the buffer.
|
||||
for i := 0; i < len(dcomp); {
|
||||
info := rb.f.info(rb.tmpBytes, i)
|
||||
if info.BoundaryBefore() && rb.nrune > 0 && !rb.doFlush() {
|
||||
return iShortDst
|
||||
}
|
||||
i += copy(rb.byte[rb.nbyte:], dcomp[i:i+int(info.size)])
|
||||
rb.insertOrdered(info)
|
||||
}
|
||||
return iSuccess
|
||||
}
|
||||
|
||||
// insertSingle inserts an entry in the reorderBuffer for the rune at
|
||||
// position i. info is the runeInfo for the rune at position i.
|
||||
func (rb *reorderBuffer) insertSingle(src input, i int, info Properties) {
|
||||
src.copySlice(rb.byte[rb.nbyte:], i, i+int(info.size))
|
||||
rb.insertOrdered(info)
|
||||
}
|
||||
|
||||
// insertCGJ inserts a Combining Grapheme Joiner (0x034f) into rb.
|
||||
func (rb *reorderBuffer) insertCGJ() {
|
||||
rb.insertSingle(input{str: GraphemeJoiner}, 0, Properties{size: uint8(len(GraphemeJoiner))})
|
||||
}
|
||||
|
||||
// appendRune inserts a rune at the end of the buffer. It is used for Hangul.
|
||||
func (rb *reorderBuffer) appendRune(r rune) {
|
||||
bn := rb.nbyte
|
||||
sz := utf8.EncodeRune(rb.byte[bn:], rune(r))
|
||||
rb.nbyte += utf8.UTFMax
|
||||
rb.rune[rb.nrune] = Properties{pos: bn, size: uint8(sz)}
|
||||
rb.nrune++
|
||||
}
|
||||
|
||||
// assignRune sets a rune at position pos. It is used for Hangul and recomposition.
|
||||
func (rb *reorderBuffer) assignRune(pos int, r rune) {
|
||||
bn := rb.rune[pos].pos
|
||||
sz := utf8.EncodeRune(rb.byte[bn:], rune(r))
|
||||
rb.rune[pos] = Properties{pos: bn, size: uint8(sz)}
|
||||
}
|
||||
|
||||
// runeAt returns the rune at position n. It is used for Hangul and recomposition.
|
||||
func (rb *reorderBuffer) runeAt(n int) rune {
|
||||
inf := rb.rune[n]
|
||||
r, _ := utf8.DecodeRune(rb.byte[inf.pos : inf.pos+inf.size])
|
||||
return r
|
||||
}
|
||||
|
||||
// bytesAt returns the UTF-8 encoding of the rune at position n.
|
||||
// It is used for Hangul and recomposition.
|
||||
func (rb *reorderBuffer) bytesAt(n int) []byte {
|
||||
inf := rb.rune[n]
|
||||
return rb.byte[inf.pos : int(inf.pos)+int(inf.size)]
|
||||
}
|
||||
|
||||
// For Hangul we combine algorithmically, instead of using tables.
|
||||
const (
|
||||
hangulBase = 0xAC00 // UTF-8(hangulBase) -> EA B0 80
|
||||
hangulBase0 = 0xEA
|
||||
hangulBase1 = 0xB0
|
||||
hangulBase2 = 0x80
|
||||
|
||||
hangulEnd = hangulBase + jamoLVTCount // UTF-8(0xD7A4) -> ED 9E A4
|
||||
hangulEnd0 = 0xED
|
||||
hangulEnd1 = 0x9E
|
||||
hangulEnd2 = 0xA4
|
||||
|
||||
jamoLBase = 0x1100 // UTF-8(jamoLBase) -> E1 84 00
|
||||
jamoLBase0 = 0xE1
|
||||
jamoLBase1 = 0x84
|
||||
jamoLEnd = 0x1113
|
||||
jamoVBase = 0x1161
|
||||
jamoVEnd = 0x1176
|
||||
jamoTBase = 0x11A7
|
||||
jamoTEnd = 0x11C3
|
||||
|
||||
jamoTCount = 28
|
||||
jamoVCount = 21
|
||||
jamoVTCount = 21 * 28
|
||||
jamoLVTCount = 19 * 21 * 28
|
||||
)
|
||||
|
||||
const hangulUTF8Size = 3
|
||||
|
||||
func isHangul(b []byte) bool {
|
||||
if len(b) < hangulUTF8Size {
|
||||
return false
|
||||
}
|
||||
b0 := b[0]
|
||||
if b0 < hangulBase0 {
|
||||
return false
|
||||
}
|
||||
b1 := b[1]
|
||||
switch {
|
||||
case b0 == hangulBase0:
|
||||
return b1 >= hangulBase1
|
||||
case b0 < hangulEnd0:
|
||||
return true
|
||||
case b0 > hangulEnd0:
|
||||
return false
|
||||
case b1 < hangulEnd1:
|
||||
return true
|
||||
}
|
||||
return b1 == hangulEnd1 && b[2] < hangulEnd2
|
||||
}
|
||||
|
||||
func isHangulString(b string) bool {
|
||||
if len(b) < hangulUTF8Size {
|
||||
return false
|
||||
}
|
||||
b0 := b[0]
|
||||
if b0 < hangulBase0 {
|
||||
return false
|
||||
}
|
||||
b1 := b[1]
|
||||
switch {
|
||||
case b0 == hangulBase0:
|
||||
return b1 >= hangulBase1
|
||||
case b0 < hangulEnd0:
|
||||
return true
|
||||
case b0 > hangulEnd0:
|
||||
return false
|
||||
case b1 < hangulEnd1:
|
||||
return true
|
||||
}
|
||||
return b1 == hangulEnd1 && b[2] < hangulEnd2
|
||||
}
|
||||
|
||||
// Caller must ensure len(b) >= 2.
|
||||
func isJamoVT(b []byte) bool {
|
||||
// True if (rune & 0xff00) == jamoLBase
|
||||
return b[0] == jamoLBase0 && (b[1]&0xFC) == jamoLBase1
|
||||
}
|
||||
|
||||
func isHangulWithoutJamoT(b []byte) bool {
|
||||
c, _ := utf8.DecodeRune(b)
|
||||
c -= hangulBase
|
||||
return c < jamoLVTCount && c%jamoTCount == 0
|
||||
}
|
||||
|
||||
// decomposeHangul writes the decomposed Hangul to buf and returns the number
|
||||
// of bytes written. len(buf) should be at least 9.
|
||||
func decomposeHangul(buf []byte, r rune) int {
|
||||
const JamoUTF8Len = 3
|
||||
r -= hangulBase
|
||||
x := r % jamoTCount
|
||||
r /= jamoTCount
|
||||
utf8.EncodeRune(buf, jamoLBase+r/jamoVCount)
|
||||
utf8.EncodeRune(buf[JamoUTF8Len:], jamoVBase+r%jamoVCount)
|
||||
if x != 0 {
|
||||
utf8.EncodeRune(buf[2*JamoUTF8Len:], jamoTBase+x)
|
||||
return 3 * JamoUTF8Len
|
||||
}
|
||||
return 2 * JamoUTF8Len
|
||||
}
|
||||
|
||||
// decomposeHangul algorithmically decomposes a Hangul rune into
|
||||
// its Jamo components.
|
||||
// See http://unicode.org/reports/tr15/#Hangul for details on decomposing Hangul.
|
||||
func (rb *reorderBuffer) decomposeHangul(r rune) {
|
||||
r -= hangulBase
|
||||
x := r % jamoTCount
|
||||
r /= jamoTCount
|
||||
rb.appendRune(jamoLBase + r/jamoVCount)
|
||||
rb.appendRune(jamoVBase + r%jamoVCount)
|
||||
if x != 0 {
|
||||
rb.appendRune(jamoTBase + x)
|
||||
}
|
||||
}
|
||||
|
||||
// combineHangul algorithmically combines Jamo character components into Hangul.
|
||||
// See http://unicode.org/reports/tr15/#Hangul for details on combining Hangul.
|
||||
func (rb *reorderBuffer) combineHangul(s, i, k int) {
|
||||
b := rb.rune[:]
|
||||
bn := rb.nrune
|
||||
for ; i < bn; i++ {
|
||||
cccB := b[k-1].ccc
|
||||
cccC := b[i].ccc
|
||||
if cccB == 0 {
|
||||
s = k - 1
|
||||
}
|
||||
if s != k-1 && cccB >= cccC {
|
||||
// b[i] is blocked by greater-equal cccX below it
|
||||
b[k] = b[i]
|
||||
k++
|
||||
} else {
|
||||
l := rb.runeAt(s) // also used to compare to hangulBase
|
||||
v := rb.runeAt(i) // also used to compare to jamoT
|
||||
switch {
|
||||
case jamoLBase <= l && l < jamoLEnd &&
|
||||
jamoVBase <= v && v < jamoVEnd:
|
||||
// 11xx plus 116x to LV
|
||||
rb.assignRune(s, hangulBase+
|
||||
(l-jamoLBase)*jamoVTCount+(v-jamoVBase)*jamoTCount)
|
||||
case hangulBase <= l && l < hangulEnd &&
|
||||
jamoTBase < v && v < jamoTEnd &&
|
||||
((l-hangulBase)%jamoTCount) == 0:
|
||||
// ACxx plus 11Ax to LVT
|
||||
rb.assignRune(s, l+v-jamoTBase)
|
||||
default:
|
||||
b[k] = b[i]
|
||||
k++
|
||||
}
|
||||
}
|
||||
}
|
||||
rb.nrune = k
|
||||
}
|
||||
|
||||
// compose recombines the runes in the buffer.
|
||||
// It should only be used to recompose a single segment, as it will not
|
||||
// handle alternations between Hangul and non-Hangul characters correctly.
|
||||
func (rb *reorderBuffer) compose() {
|
||||
// UAX #15, section X5 , including Corrigendum #5
|
||||
// "In any character sequence beginning with starter S, a character C is
|
||||
// blocked from S if and only if there is some character B between S
|
||||
// and C, and either B is a starter or it has the same or higher
|
||||
// combining class as C."
|
||||
bn := rb.nrune
|
||||
if bn == 0 {
|
||||
return
|
||||
}
|
||||
k := 1
|
||||
b := rb.rune[:]
|
||||
for s, i := 0, 1; i < bn; i++ {
|
||||
if isJamoVT(rb.bytesAt(i)) {
|
||||
// Redo from start in Hangul mode. Necessary to support
|
||||
// U+320E..U+321E in NFKC mode.
|
||||
rb.combineHangul(s, i, k)
|
||||
return
|
||||
}
|
||||
ii := b[i]
|
||||
// We can only use combineForward as a filter if we later
|
||||
// get the info for the combined character. This is more
|
||||
// expensive than using the filter. Using combinesBackward()
|
||||
// is safe.
|
||||
if ii.combinesBackward() {
|
||||
cccB := b[k-1].ccc
|
||||
cccC := ii.ccc
|
||||
blocked := false // b[i] blocked by starter or greater or equal CCC?
|
||||
if cccB == 0 {
|
||||
s = k - 1
|
||||
} else {
|
||||
blocked = s != k-1 && cccB >= cccC
|
||||
}
|
||||
if !blocked {
|
||||
combined := combine(rb.runeAt(s), rb.runeAt(i))
|
||||
if combined != 0 {
|
||||
rb.assignRune(s, combined)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
b[k] = b[i]
|
||||
k++
|
||||
}
|
||||
rb.nrune = k
|
||||
}
|
259
vendor/golang.org/x/text/unicode/norm/forminfo.go
generated
vendored
Normal file
259
vendor/golang.org/x/text/unicode/norm/forminfo.go
generated
vendored
Normal file
@ -0,0 +1,259 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package norm
|
||||
|
||||
// This file contains Form-specific logic and wrappers for data in tables.go.
|
||||
|
||||
// Rune info is stored in a separate trie per composing form. A composing form
|
||||
// and its corresponding decomposing form share the same trie. Each trie maps
|
||||
// a rune to a uint16. The values take two forms. For v >= 0x8000:
|
||||
// bits
|
||||
// 15: 1 (inverse of NFD_QC bit of qcInfo)
|
||||
// 13..7: qcInfo (see below). isYesD is always true (no decompostion).
|
||||
// 6..0: ccc (compressed CCC value).
|
||||
// For v < 0x8000, the respective rune has a decomposition and v is an index
|
||||
// into a byte array of UTF-8 decomposition sequences and additional info and
|
||||
// has the form:
|
||||
// <header> <decomp_byte>* [<tccc> [<lccc>]]
|
||||
// The header contains the number of bytes in the decomposition (excluding this
|
||||
// length byte). The two most significant bits of this length byte correspond
|
||||
// to bit 5 and 4 of qcInfo (see below). The byte sequence itself starts at v+1.
|
||||
// The byte sequence is followed by a trailing and leading CCC if the values
|
||||
// for these are not zero. The value of v determines which ccc are appended
|
||||
// to the sequences. For v < firstCCC, there are none, for v >= firstCCC,
|
||||
// the sequence is followed by a trailing ccc, and for v >= firstLeadingCC
|
||||
// there is an additional leading ccc. The value of tccc itself is the
|
||||
// trailing CCC shifted left 2 bits. The two least-significant bits of tccc
|
||||
// are the number of trailing non-starters.
|
||||
|
||||
const (
|
||||
qcInfoMask = 0x3F // to clear all but the relevant bits in a qcInfo
|
||||
headerLenMask = 0x3F // extract the length value from the header byte
|
||||
headerFlagsMask = 0xC0 // extract the qcInfo bits from the header byte
|
||||
)
|
||||
|
||||
// Properties provides access to normalization properties of a rune.
|
||||
type Properties struct {
|
||||
pos uint8 // start position in reorderBuffer; used in composition.go
|
||||
size uint8 // length of UTF-8 encoding of this rune
|
||||
ccc uint8 // leading canonical combining class (ccc if not decomposition)
|
||||
tccc uint8 // trailing canonical combining class (ccc if not decomposition)
|
||||
nLead uint8 // number of leading non-starters.
|
||||
flags qcInfo // quick check flags
|
||||
index uint16
|
||||
}
|
||||
|
||||
// functions dispatchable per form
|
||||
type lookupFunc func(b input, i int) Properties
|
||||
|
||||
// formInfo holds Form-specific functions and tables.
|
||||
type formInfo struct {
|
||||
form Form
|
||||
composing, compatibility bool // form type
|
||||
info lookupFunc
|
||||
nextMain iterFunc
|
||||
}
|
||||
|
||||
var formTable = []*formInfo{{
|
||||
form: NFC,
|
||||
composing: true,
|
||||
compatibility: false,
|
||||
info: lookupInfoNFC,
|
||||
nextMain: nextComposed,
|
||||
}, {
|
||||
form: NFD,
|
||||
composing: false,
|
||||
compatibility: false,
|
||||
info: lookupInfoNFC,
|
||||
nextMain: nextDecomposed,
|
||||
}, {
|
||||
form: NFKC,
|
||||
composing: true,
|
||||
compatibility: true,
|
||||
info: lookupInfoNFKC,
|
||||
nextMain: nextComposed,
|
||||
}, {
|
||||
form: NFKD,
|
||||
composing: false,
|
||||
compatibility: true,
|
||||
info: lookupInfoNFKC,
|
||||
nextMain: nextDecomposed,
|
||||
}}
|
||||
|
||||
// We do not distinguish between boundaries for NFC, NFD, etc. to avoid
|
||||
// unexpected behavior for the user. For example, in NFD, there is a boundary
|
||||
// after 'a'. However, 'a' might combine with modifiers, so from the application's
|
||||
// perspective it is not a good boundary. We will therefore always use the
|
||||
// boundaries for the combining variants.
|
||||
|
||||
// BoundaryBefore returns true if this rune starts a new segment and
|
||||
// cannot combine with any rune on the left.
|
||||
func (p Properties) BoundaryBefore() bool {
|
||||
if p.ccc == 0 && !p.combinesBackward() {
|
||||
return true
|
||||
}
|
||||
// We assume that the CCC of the first character in a decomposition
|
||||
// is always non-zero if different from info.ccc and that we can return
|
||||
// false at this point. This is verified by maketables.
|
||||
return false
|
||||
}
|
||||
|
||||
// BoundaryAfter returns true if runes cannot combine with or otherwise
|
||||
// interact with this or previous runes.
|
||||
func (p Properties) BoundaryAfter() bool {
|
||||
// TODO: loosen these conditions.
|
||||
return p.isInert()
|
||||
}
|
||||
|
||||
// We pack quick check data in 4 bits:
|
||||
// 5: Combines forward (0 == false, 1 == true)
|
||||
// 4..3: NFC_QC Yes(00), No (10), or Maybe (11)
|
||||
// 2: NFD_QC Yes (0) or No (1). No also means there is a decomposition.
|
||||
// 1..0: Number of trailing non-starters.
|
||||
//
|
||||
// When all 4 bits are zero, the character is inert, meaning it is never
|
||||
// influenced by normalization.
|
||||
type qcInfo uint8
|
||||
|
||||
func (p Properties) isYesC() bool { return p.flags&0x10 == 0 }
|
||||
func (p Properties) isYesD() bool { return p.flags&0x4 == 0 }
|
||||
|
||||
func (p Properties) combinesForward() bool { return p.flags&0x20 != 0 }
|
||||
func (p Properties) combinesBackward() bool { return p.flags&0x8 != 0 } // == isMaybe
|
||||
func (p Properties) hasDecomposition() bool { return p.flags&0x4 != 0 } // == isNoD
|
||||
|
||||
func (p Properties) isInert() bool {
|
||||
return p.flags&qcInfoMask == 0 && p.ccc == 0
|
||||
}
|
||||
|
||||
func (p Properties) multiSegment() bool {
|
||||
return p.index >= firstMulti && p.index < endMulti
|
||||
}
|
||||
|
||||
func (p Properties) nLeadingNonStarters() uint8 {
|
||||
return p.nLead
|
||||
}
|
||||
|
||||
func (p Properties) nTrailingNonStarters() uint8 {
|
||||
return uint8(p.flags & 0x03)
|
||||
}
|
||||
|
||||
// Decomposition returns the decomposition for the underlying rune
|
||||
// or nil if there is none.
|
||||
func (p Properties) Decomposition() []byte {
|
||||
// TODO: create the decomposition for Hangul?
|
||||
if p.index == 0 {
|
||||
return nil
|
||||
}
|
||||
i := p.index
|
||||
n := decomps[i] & headerLenMask
|
||||
i++
|
||||
return decomps[i : i+uint16(n)]
|
||||
}
|
||||
|
||||
// Size returns the length of UTF-8 encoding of the rune.
|
||||
func (p Properties) Size() int {
|
||||
return int(p.size)
|
||||
}
|
||||
|
||||
// CCC returns the canonical combining class of the underlying rune.
|
||||
func (p Properties) CCC() uint8 {
|
||||
if p.index >= firstCCCZeroExcept {
|
||||
return 0
|
||||
}
|
||||
return ccc[p.ccc]
|
||||
}
|
||||
|
||||
// LeadCCC returns the CCC of the first rune in the decomposition.
|
||||
// If there is no decomposition, LeadCCC equals CCC.
|
||||
func (p Properties) LeadCCC() uint8 {
|
||||
return ccc[p.ccc]
|
||||
}
|
||||
|
||||
// TrailCCC returns the CCC of the last rune in the decomposition.
|
||||
// If there is no decomposition, TrailCCC equals CCC.
|
||||
func (p Properties) TrailCCC() uint8 {
|
||||
return ccc[p.tccc]
|
||||
}
|
||||
|
||||
// Recomposition
|
||||
// We use 32-bit keys instead of 64-bit for the two codepoint keys.
|
||||
// This clips off the bits of three entries, but we know this will not
|
||||
// result in a collision. In the unlikely event that changes to
|
||||
// UnicodeData.txt introduce collisions, the compiler will catch it.
|
||||
// Note that the recomposition map for NFC and NFKC are identical.
|
||||
|
||||
// combine returns the combined rune or 0 if it doesn't exist.
|
||||
func combine(a, b rune) rune {
|
||||
key := uint32(uint16(a))<<16 + uint32(uint16(b))
|
||||
return recompMap[key]
|
||||
}
|
||||
|
||||
func lookupInfoNFC(b input, i int) Properties {
|
||||
v, sz := b.charinfoNFC(i)
|
||||
return compInfo(v, sz)
|
||||
}
|
||||
|
||||
func lookupInfoNFKC(b input, i int) Properties {
|
||||
v, sz := b.charinfoNFKC(i)
|
||||
return compInfo(v, sz)
|
||||
}
|
||||
|
||||
// Properties returns properties for the first rune in s.
|
||||
func (f Form) Properties(s []byte) Properties {
|
||||
if f == NFC || f == NFD {
|
||||
return compInfo(nfcData.lookup(s))
|
||||
}
|
||||
return compInfo(nfkcData.lookup(s))
|
||||
}
|
||||
|
||||
// PropertiesString returns properties for the first rune in s.
|
||||
func (f Form) PropertiesString(s string) Properties {
|
||||
if f == NFC || f == NFD {
|
||||
return compInfo(nfcData.lookupString(s))
|
||||
}
|
||||
return compInfo(nfkcData.lookupString(s))
|
||||
}
|
||||
|
||||
// compInfo converts the information contained in v and sz
|
||||
// to a Properties. See the comment at the top of the file
|
||||
// for more information on the format.
|
||||
func compInfo(v uint16, sz int) Properties {
|
||||
if v == 0 {
|
||||
return Properties{size: uint8(sz)}
|
||||
} else if v >= 0x8000 {
|
||||
p := Properties{
|
||||
size: uint8(sz),
|
||||
ccc: uint8(v),
|
||||
tccc: uint8(v),
|
||||
flags: qcInfo(v >> 8),
|
||||
}
|
||||
if p.ccc > 0 || p.combinesBackward() {
|
||||
p.nLead = uint8(p.flags & 0x3)
|
||||
}
|
||||
return p
|
||||
}
|
||||
// has decomposition
|
||||
h := decomps[v]
|
||||
f := (qcInfo(h&headerFlagsMask) >> 2) | 0x4
|
||||
p := Properties{size: uint8(sz), flags: f, index: v}
|
||||
if v >= firstCCC {
|
||||
v += uint16(h&headerLenMask) + 1
|
||||
c := decomps[v]
|
||||
p.tccc = c >> 2
|
||||
p.flags |= qcInfo(c & 0x3)
|
||||
if v >= firstLeadingCCC {
|
||||
p.nLead = c & 0x3
|
||||
if v >= firstStarterWithNLead {
|
||||
// We were tricked. Remove the decomposition.
|
||||
p.flags &= 0x03
|
||||
p.index = 0
|
||||
return p
|
||||
}
|
||||
p.ccc = decomps[v+1]
|
||||
}
|
||||
}
|
||||
return p
|
||||
}
|
109
vendor/golang.org/x/text/unicode/norm/input.go
generated
vendored
Normal file
109
vendor/golang.org/x/text/unicode/norm/input.go
generated
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package norm
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
type input struct {
|
||||
str string
|
||||
bytes []byte
|
||||
}
|
||||
|
||||
func inputBytes(str []byte) input {
|
||||
return input{bytes: str}
|
||||
}
|
||||
|
||||
func inputString(str string) input {
|
||||
return input{str: str}
|
||||
}
|
||||
|
||||
func (in *input) setBytes(str []byte) {
|
||||
in.str = ""
|
||||
in.bytes = str
|
||||
}
|
||||
|
||||
func (in *input) setString(str string) {
|
||||
in.str = str
|
||||
in.bytes = nil
|
||||
}
|
||||
|
||||
func (in *input) _byte(p int) byte {
|
||||
if in.bytes == nil {
|
||||
return in.str[p]
|
||||
}
|
||||
return in.bytes[p]
|
||||
}
|
||||
|
||||
func (in *input) skipASCII(p, max int) int {
|
||||
if in.bytes == nil {
|
||||
for ; p < max && in.str[p] < utf8.RuneSelf; p++ {
|
||||
}
|
||||
} else {
|
||||
for ; p < max && in.bytes[p] < utf8.RuneSelf; p++ {
|
||||
}
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func (in *input) skipContinuationBytes(p int) int {
|
||||
if in.bytes == nil {
|
||||
for ; p < len(in.str) && !utf8.RuneStart(in.str[p]); p++ {
|
||||
}
|
||||
} else {
|
||||
for ; p < len(in.bytes) && !utf8.RuneStart(in.bytes[p]); p++ {
|
||||
}
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func (in *input) appendSlice(buf []byte, b, e int) []byte {
|
||||
if in.bytes != nil {
|
||||
return append(buf, in.bytes[b:e]...)
|
||||
}
|
||||
for i := b; i < e; i++ {
|
||||
buf = append(buf, in.str[i])
|
||||
}
|
||||
return buf
|
||||
}
|
||||
|
||||
func (in *input) copySlice(buf []byte, b, e int) int {
|
||||
if in.bytes == nil {
|
||||
return copy(buf, in.str[b:e])
|
||||
}
|
||||
return copy(buf, in.bytes[b:e])
|
||||
}
|
||||
|
||||
func (in *input) charinfoNFC(p int) (uint16, int) {
|
||||
if in.bytes == nil {
|
||||
return nfcData.lookupString(in.str[p:])
|
||||
}
|
||||
return nfcData.lookup(in.bytes[p:])
|
||||
}
|
||||
|
||||
func (in *input) charinfoNFKC(p int) (uint16, int) {
|
||||
if in.bytes == nil {
|
||||
return nfkcData.lookupString(in.str[p:])
|
||||
}
|
||||
return nfkcData.lookup(in.bytes[p:])
|
||||
}
|
||||
|
||||
func (in *input) hangul(p int) (r rune) {
|
||||
var size int
|
||||
if in.bytes == nil {
|
||||
if !isHangulString(in.str[p:]) {
|
||||
return 0
|
||||
}
|
||||
r, size = utf8.DecodeRuneInString(in.str[p:])
|
||||
} else {
|
||||
if !isHangul(in.bytes[p:]) {
|
||||
return 0
|
||||
}
|
||||
r, size = utf8.DecodeRune(in.bytes[p:])
|
||||
}
|
||||
if size != hangulUTF8Size {
|
||||
return 0
|
||||
}
|
||||
return r
|
||||
}
|
457
vendor/golang.org/x/text/unicode/norm/iter.go
generated
vendored
Normal file
457
vendor/golang.org/x/text/unicode/norm/iter.go
generated
vendored
Normal file
@ -0,0 +1,457 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package norm
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// MaxSegmentSize is the maximum size of a byte buffer needed to consider any
|
||||
// sequence of starter and non-starter runes for the purpose of normalization.
|
||||
const MaxSegmentSize = maxByteBufferSize
|
||||
|
||||
// An Iter iterates over a string or byte slice, while normalizing it
|
||||
// to a given Form.
|
||||
type Iter struct {
|
||||
rb reorderBuffer
|
||||
buf [maxByteBufferSize]byte
|
||||
info Properties // first character saved from previous iteration
|
||||
next iterFunc // implementation of next depends on form
|
||||
asciiF iterFunc
|
||||
|
||||
p int // current position in input source
|
||||
multiSeg []byte // remainder of multi-segment decomposition
|
||||
}
|
||||
|
||||
type iterFunc func(*Iter) []byte
|
||||
|
||||
// Init initializes i to iterate over src after normalizing it to Form f.
|
||||
func (i *Iter) Init(f Form, src []byte) {
|
||||
i.p = 0
|
||||
if len(src) == 0 {
|
||||
i.setDone()
|
||||
i.rb.nsrc = 0
|
||||
return
|
||||
}
|
||||
i.multiSeg = nil
|
||||
i.rb.init(f, src)
|
||||
i.next = i.rb.f.nextMain
|
||||
i.asciiF = nextASCIIBytes
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
i.rb.ss.first(i.info)
|
||||
}
|
||||
|
||||
// InitString initializes i to iterate over src after normalizing it to Form f.
|
||||
func (i *Iter) InitString(f Form, src string) {
|
||||
i.p = 0
|
||||
if len(src) == 0 {
|
||||
i.setDone()
|
||||
i.rb.nsrc = 0
|
||||
return
|
||||
}
|
||||
i.multiSeg = nil
|
||||
i.rb.initString(f, src)
|
||||
i.next = i.rb.f.nextMain
|
||||
i.asciiF = nextASCIIString
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
i.rb.ss.first(i.info)
|
||||
}
|
||||
|
||||
// Seek sets the segment to be returned by the next call to Next to start
|
||||
// at position p. It is the responsibility of the caller to set p to the
|
||||
// start of a segment.
|
||||
func (i *Iter) Seek(offset int64, whence int) (int64, error) {
|
||||
var abs int64
|
||||
switch whence {
|
||||
case 0:
|
||||
abs = offset
|
||||
case 1:
|
||||
abs = int64(i.p) + offset
|
||||
case 2:
|
||||
abs = int64(i.rb.nsrc) + offset
|
||||
default:
|
||||
return 0, fmt.Errorf("norm: invalid whence")
|
||||
}
|
||||
if abs < 0 {
|
||||
return 0, fmt.Errorf("norm: negative position")
|
||||
}
|
||||
if int(abs) >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
return int64(i.p), nil
|
||||
}
|
||||
i.p = int(abs)
|
||||
i.multiSeg = nil
|
||||
i.next = i.rb.f.nextMain
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
i.rb.ss.first(i.info)
|
||||
return abs, nil
|
||||
}
|
||||
|
||||
// returnSlice returns a slice of the underlying input type as a byte slice.
|
||||
// If the underlying is of type []byte, it will simply return a slice.
|
||||
// If the underlying is of type string, it will copy the slice to the buffer
|
||||
// and return that.
|
||||
func (i *Iter) returnSlice(a, b int) []byte {
|
||||
if i.rb.src.bytes == nil {
|
||||
return i.buf[:copy(i.buf[:], i.rb.src.str[a:b])]
|
||||
}
|
||||
return i.rb.src.bytes[a:b]
|
||||
}
|
||||
|
||||
// Pos returns the byte position at which the next call to Next will commence processing.
|
||||
func (i *Iter) Pos() int {
|
||||
return i.p
|
||||
}
|
||||
|
||||
func (i *Iter) setDone() {
|
||||
i.next = nextDone
|
||||
i.p = i.rb.nsrc
|
||||
}
|
||||
|
||||
// Done returns true if there is no more input to process.
|
||||
func (i *Iter) Done() bool {
|
||||
return i.p >= i.rb.nsrc
|
||||
}
|
||||
|
||||
// Next returns f(i.input[i.Pos():n]), where n is a boundary of i.input.
|
||||
// For any input a and b for which f(a) == f(b), subsequent calls
|
||||
// to Next will return the same segments.
|
||||
// Modifying runes are grouped together with the preceding starter, if such a starter exists.
|
||||
// Although not guaranteed, n will typically be the smallest possible n.
|
||||
func (i *Iter) Next() []byte {
|
||||
return i.next(i)
|
||||
}
|
||||
|
||||
func nextASCIIBytes(i *Iter) []byte {
|
||||
p := i.p + 1
|
||||
if p >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
return i.rb.src.bytes[i.p:p]
|
||||
}
|
||||
if i.rb.src.bytes[p] < utf8.RuneSelf {
|
||||
p0 := i.p
|
||||
i.p = p
|
||||
return i.rb.src.bytes[p0:p]
|
||||
}
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
i.next = i.rb.f.nextMain
|
||||
return i.next(i)
|
||||
}
|
||||
|
||||
func nextASCIIString(i *Iter) []byte {
|
||||
p := i.p + 1
|
||||
if p >= i.rb.nsrc {
|
||||
i.buf[0] = i.rb.src.str[i.p]
|
||||
i.setDone()
|
||||
return i.buf[:1]
|
||||
}
|
||||
if i.rb.src.str[p] < utf8.RuneSelf {
|
||||
i.buf[0] = i.rb.src.str[i.p]
|
||||
i.p = p
|
||||
return i.buf[:1]
|
||||
}
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
i.next = i.rb.f.nextMain
|
||||
return i.next(i)
|
||||
}
|
||||
|
||||
func nextHangul(i *Iter) []byte {
|
||||
p := i.p
|
||||
next := p + hangulUTF8Size
|
||||
if next >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
} else if i.rb.src.hangul(next) == 0 {
|
||||
i.rb.ss.next(i.info)
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
i.next = i.rb.f.nextMain
|
||||
return i.next(i)
|
||||
}
|
||||
i.p = next
|
||||
return i.buf[:decomposeHangul(i.buf[:], i.rb.src.hangul(p))]
|
||||
}
|
||||
|
||||
func nextDone(i *Iter) []byte {
|
||||
return nil
|
||||
}
|
||||
|
||||
// nextMulti is used for iterating over multi-segment decompositions
|
||||
// for decomposing normal forms.
|
||||
func nextMulti(i *Iter) []byte {
|
||||
j := 0
|
||||
d := i.multiSeg
|
||||
// skip first rune
|
||||
for j = 1; j < len(d) && !utf8.RuneStart(d[j]); j++ {
|
||||
}
|
||||
for j < len(d) {
|
||||
info := i.rb.f.info(input{bytes: d}, j)
|
||||
if info.BoundaryBefore() {
|
||||
i.multiSeg = d[j:]
|
||||
return d[:j]
|
||||
}
|
||||
j += int(info.size)
|
||||
}
|
||||
// treat last segment as normal decomposition
|
||||
i.next = i.rb.f.nextMain
|
||||
return i.next(i)
|
||||
}
|
||||
|
||||
// nextMultiNorm is used for iterating over multi-segment decompositions
|
||||
// for composing normal forms.
|
||||
func nextMultiNorm(i *Iter) []byte {
|
||||
j := 0
|
||||
d := i.multiSeg
|
||||
for j < len(d) {
|
||||
info := i.rb.f.info(input{bytes: d}, j)
|
||||
if info.BoundaryBefore() {
|
||||
i.rb.compose()
|
||||
seg := i.buf[:i.rb.flushCopy(i.buf[:])]
|
||||
i.rb.insertUnsafe(input{bytes: d}, j, info)
|
||||
i.multiSeg = d[j+int(info.size):]
|
||||
return seg
|
||||
}
|
||||
i.rb.insertUnsafe(input{bytes: d}, j, info)
|
||||
j += int(info.size)
|
||||
}
|
||||
i.multiSeg = nil
|
||||
i.next = nextComposed
|
||||
return doNormComposed(i)
|
||||
}
|
||||
|
||||
// nextDecomposed is the implementation of Next for forms NFD and NFKD.
|
||||
func nextDecomposed(i *Iter) (next []byte) {
|
||||
outp := 0
|
||||
inCopyStart, outCopyStart := i.p, 0
|
||||
for {
|
||||
if sz := int(i.info.size); sz <= 1 {
|
||||
i.rb.ss = 0
|
||||
p := i.p
|
||||
i.p++ // ASCII or illegal byte. Either way, advance by 1.
|
||||
if i.p >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
return i.returnSlice(p, i.p)
|
||||
} else if i.rb.src._byte(i.p) < utf8.RuneSelf {
|
||||
i.next = i.asciiF
|
||||
return i.returnSlice(p, i.p)
|
||||
}
|
||||
outp++
|
||||
} else if d := i.info.Decomposition(); d != nil {
|
||||
// Note: If leading CCC != 0, then len(d) == 2 and last is also non-zero.
|
||||
// Case 1: there is a leftover to copy. In this case the decomposition
|
||||
// must begin with a modifier and should always be appended.
|
||||
// Case 2: no leftover. Simply return d if followed by a ccc == 0 value.
|
||||
p := outp + len(d)
|
||||
if outp > 0 {
|
||||
i.rb.src.copySlice(i.buf[outCopyStart:], inCopyStart, i.p)
|
||||
// TODO: this condition should not be possible, but we leave it
|
||||
// in for defensive purposes.
|
||||
if p > len(i.buf) {
|
||||
return i.buf[:outp]
|
||||
}
|
||||
} else if i.info.multiSegment() {
|
||||
// outp must be 0 as multi-segment decompositions always
|
||||
// start a new segment.
|
||||
if i.multiSeg == nil {
|
||||
i.multiSeg = d
|
||||
i.next = nextMulti
|
||||
return nextMulti(i)
|
||||
}
|
||||
// We are in the last segment. Treat as normal decomposition.
|
||||
d = i.multiSeg
|
||||
i.multiSeg = nil
|
||||
p = len(d)
|
||||
}
|
||||
prevCC := i.info.tccc
|
||||
if i.p += sz; i.p >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
i.info = Properties{} // Force BoundaryBefore to succeed.
|
||||
} else {
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
}
|
||||
switch i.rb.ss.next(i.info) {
|
||||
case ssOverflow:
|
||||
i.next = nextCGJDecompose
|
||||
fallthrough
|
||||
case ssStarter:
|
||||
if outp > 0 {
|
||||
copy(i.buf[outp:], d)
|
||||
return i.buf[:p]
|
||||
}
|
||||
return d
|
||||
}
|
||||
copy(i.buf[outp:], d)
|
||||
outp = p
|
||||
inCopyStart, outCopyStart = i.p, outp
|
||||
if i.info.ccc < prevCC {
|
||||
goto doNorm
|
||||
}
|
||||
continue
|
||||
} else if r := i.rb.src.hangul(i.p); r != 0 {
|
||||
outp = decomposeHangul(i.buf[:], r)
|
||||
i.p += hangulUTF8Size
|
||||
inCopyStart, outCopyStart = i.p, outp
|
||||
if i.p >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
break
|
||||
} else if i.rb.src.hangul(i.p) != 0 {
|
||||
i.next = nextHangul
|
||||
return i.buf[:outp]
|
||||
}
|
||||
} else {
|
||||
p := outp + sz
|
||||
if p > len(i.buf) {
|
||||
break
|
||||
}
|
||||
outp = p
|
||||
i.p += sz
|
||||
}
|
||||
if i.p >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
break
|
||||
}
|
||||
prevCC := i.info.tccc
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
if v := i.rb.ss.next(i.info); v == ssStarter {
|
||||
break
|
||||
} else if v == ssOverflow {
|
||||
i.next = nextCGJDecompose
|
||||
break
|
||||
}
|
||||
if i.info.ccc < prevCC {
|
||||
goto doNorm
|
||||
}
|
||||
}
|
||||
if outCopyStart == 0 {
|
||||
return i.returnSlice(inCopyStart, i.p)
|
||||
} else if inCopyStart < i.p {
|
||||
i.rb.src.copySlice(i.buf[outCopyStart:], inCopyStart, i.p)
|
||||
}
|
||||
return i.buf[:outp]
|
||||
doNorm:
|
||||
// Insert what we have decomposed so far in the reorderBuffer.
|
||||
// As we will only reorder, there will always be enough room.
|
||||
i.rb.src.copySlice(i.buf[outCopyStart:], inCopyStart, i.p)
|
||||
i.rb.insertDecomposed(i.buf[0:outp])
|
||||
return doNormDecomposed(i)
|
||||
}
|
||||
|
||||
func doNormDecomposed(i *Iter) []byte {
|
||||
for {
|
||||
i.rb.insertUnsafe(i.rb.src, i.p, i.info)
|
||||
if i.p += int(i.info.size); i.p >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
break
|
||||
}
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
if i.info.ccc == 0 {
|
||||
break
|
||||
}
|
||||
if s := i.rb.ss.next(i.info); s == ssOverflow {
|
||||
i.next = nextCGJDecompose
|
||||
break
|
||||
}
|
||||
}
|
||||
// new segment or too many combining characters: exit normalization
|
||||
return i.buf[:i.rb.flushCopy(i.buf[:])]
|
||||
}
|
||||
|
||||
func nextCGJDecompose(i *Iter) []byte {
|
||||
i.rb.ss = 0
|
||||
i.rb.insertCGJ()
|
||||
i.next = nextDecomposed
|
||||
i.rb.ss.first(i.info)
|
||||
buf := doNormDecomposed(i)
|
||||
return buf
|
||||
}
|
||||
|
||||
// nextComposed is the implementation of Next for forms NFC and NFKC.
|
||||
func nextComposed(i *Iter) []byte {
|
||||
outp, startp := 0, i.p
|
||||
var prevCC uint8
|
||||
for {
|
||||
if !i.info.isYesC() {
|
||||
goto doNorm
|
||||
}
|
||||
prevCC = i.info.tccc
|
||||
sz := int(i.info.size)
|
||||
if sz == 0 {
|
||||
sz = 1 // illegal rune: copy byte-by-byte
|
||||
}
|
||||
p := outp + sz
|
||||
if p > len(i.buf) {
|
||||
break
|
||||
}
|
||||
outp = p
|
||||
i.p += sz
|
||||
if i.p >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
break
|
||||
} else if i.rb.src._byte(i.p) < utf8.RuneSelf {
|
||||
i.rb.ss = 0
|
||||
i.next = i.asciiF
|
||||
break
|
||||
}
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
if v := i.rb.ss.next(i.info); v == ssStarter {
|
||||
break
|
||||
} else if v == ssOverflow {
|
||||
i.next = nextCGJCompose
|
||||
break
|
||||
}
|
||||
if i.info.ccc < prevCC {
|
||||
goto doNorm
|
||||
}
|
||||
}
|
||||
return i.returnSlice(startp, i.p)
|
||||
doNorm:
|
||||
// reset to start position
|
||||
i.p = startp
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
i.rb.ss.first(i.info)
|
||||
if i.info.multiSegment() {
|
||||
d := i.info.Decomposition()
|
||||
info := i.rb.f.info(input{bytes: d}, 0)
|
||||
i.rb.insertUnsafe(input{bytes: d}, 0, info)
|
||||
i.multiSeg = d[int(info.size):]
|
||||
i.next = nextMultiNorm
|
||||
return nextMultiNorm(i)
|
||||
}
|
||||
i.rb.ss.first(i.info)
|
||||
i.rb.insertUnsafe(i.rb.src, i.p, i.info)
|
||||
return doNormComposed(i)
|
||||
}
|
||||
|
||||
func doNormComposed(i *Iter) []byte {
|
||||
// First rune should already be inserted.
|
||||
for {
|
||||
if i.p += int(i.info.size); i.p >= i.rb.nsrc {
|
||||
i.setDone()
|
||||
break
|
||||
}
|
||||
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||
if s := i.rb.ss.next(i.info); s == ssStarter {
|
||||
break
|
||||
} else if s == ssOverflow {
|
||||
i.next = nextCGJCompose
|
||||
break
|
||||
}
|
||||
i.rb.insertUnsafe(i.rb.src, i.p, i.info)
|
||||
}
|
||||
i.rb.compose()
|
||||
seg := i.buf[:i.rb.flushCopy(i.buf[:])]
|
||||
return seg
|
||||
}
|
||||
|
||||
func nextCGJCompose(i *Iter) []byte {
|
||||
i.rb.ss = 0 // instead of first
|
||||
i.rb.insertCGJ()
|
||||
i.next = nextComposed
|
||||
// Note that we treat any rune with nLeadingNonStarters > 0 as a non-starter,
|
||||
// even if they are not. This is particularly dubious for U+FF9E and UFF9A.
|
||||
// If we ever change that, insert a check here.
|
||||
i.rb.ss.first(i.info)
|
||||
i.rb.insertUnsafe(i.rb.src, i.p, i.info)
|
||||
return doNormComposed(i)
|
||||
}
|
976
vendor/golang.org/x/text/unicode/norm/maketables.go
generated
vendored
Normal file
976
vendor/golang.org/x/text/unicode/norm/maketables.go
generated
vendored
Normal file
@ -0,0 +1,976 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// Normalization table generator.
|
||||
// Data read from the web.
|
||||
// See forminfo.go for a description of the trie values associated with each rune.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/text/internal/gen"
|
||||
"golang.org/x/text/internal/triegen"
|
||||
"golang.org/x/text/internal/ucd"
|
||||
)
|
||||
|
||||
func main() {
|
||||
gen.Init()
|
||||
loadUnicodeData()
|
||||
compactCCC()
|
||||
loadCompositionExclusions()
|
||||
completeCharFields(FCanonical)
|
||||
completeCharFields(FCompatibility)
|
||||
computeNonStarterCounts()
|
||||
verifyComputed()
|
||||
printChars()
|
||||
testDerived()
|
||||
printTestdata()
|
||||
makeTables()
|
||||
}
|
||||
|
||||
var (
|
||||
tablelist = flag.String("tables",
|
||||
"all",
|
||||
"comma-separated list of which tables to generate; "+
|
||||
"can be 'decomp', 'recomp', 'info' and 'all'")
|
||||
test = flag.Bool("test",
|
||||
false,
|
||||
"test existing tables against DerivedNormalizationProps and generate test data for regression testing")
|
||||
verbose = flag.Bool("verbose",
|
||||
false,
|
||||
"write data to stdout as it is parsed")
|
||||
)
|
||||
|
||||
const MaxChar = 0x10FFFF // anything above this shouldn't exist
|
||||
|
||||
// Quick Check properties of runes allow us to quickly
|
||||
// determine whether a rune may occur in a normal form.
|
||||
// For a given normal form, a rune may be guaranteed to occur
|
||||
// verbatim (QC=Yes), may or may not combine with another
|
||||
// rune (QC=Maybe), or may not occur (QC=No).
|
||||
type QCResult int
|
||||
|
||||
const (
|
||||
QCUnknown QCResult = iota
|
||||
QCYes
|
||||
QCNo
|
||||
QCMaybe
|
||||
)
|
||||
|
||||
func (r QCResult) String() string {
|
||||
switch r {
|
||||
case QCYes:
|
||||
return "Yes"
|
||||
case QCNo:
|
||||
return "No"
|
||||
case QCMaybe:
|
||||
return "Maybe"
|
||||
}
|
||||
return "***UNKNOWN***"
|
||||
}
|
||||
|
||||
const (
|
||||
FCanonical = iota // NFC or NFD
|
||||
FCompatibility // NFKC or NFKD
|
||||
FNumberOfFormTypes
|
||||
)
|
||||
|
||||
const (
|
||||
MComposed = iota // NFC or NFKC
|
||||
MDecomposed // NFD or NFKD
|
||||
MNumberOfModes
|
||||
)
|
||||
|
||||
// This contains only the properties we're interested in.
|
||||
type Char struct {
|
||||
name string
|
||||
codePoint rune // if zero, this index is not a valid code point.
|
||||
ccc uint8 // canonical combining class
|
||||
origCCC uint8
|
||||
excludeInComp bool // from CompositionExclusions.txt
|
||||
compatDecomp bool // it has a compatibility expansion
|
||||
|
||||
nTrailingNonStarters uint8
|
||||
nLeadingNonStarters uint8 // must be equal to trailing if non-zero
|
||||
|
||||
forms [FNumberOfFormTypes]FormInfo // For FCanonical and FCompatibility
|
||||
|
||||
state State
|
||||
}
|
||||
|
||||
var chars = make([]Char, MaxChar+1)
|
||||
var cccMap = make(map[uint8]uint8)
|
||||
|
||||
func (c Char) String() string {
|
||||
buf := new(bytes.Buffer)
|
||||
|
||||
fmt.Fprintf(buf, "%U [%s]:\n", c.codePoint, c.name)
|
||||
fmt.Fprintf(buf, " ccc: %v\n", c.ccc)
|
||||
fmt.Fprintf(buf, " excludeInComp: %v\n", c.excludeInComp)
|
||||
fmt.Fprintf(buf, " compatDecomp: %v\n", c.compatDecomp)
|
||||
fmt.Fprintf(buf, " state: %v\n", c.state)
|
||||
fmt.Fprintf(buf, " NFC:\n")
|
||||
fmt.Fprint(buf, c.forms[FCanonical])
|
||||
fmt.Fprintf(buf, " NFKC:\n")
|
||||
fmt.Fprint(buf, c.forms[FCompatibility])
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// In UnicodeData.txt, some ranges are marked like this:
|
||||
// 3400;<CJK Ideograph Extension A, First>;Lo;0;L;;;;;N;;;;;
|
||||
// 4DB5;<CJK Ideograph Extension A, Last>;Lo;0;L;;;;;N;;;;;
|
||||
// parseCharacter keeps a state variable indicating the weirdness.
|
||||
type State int
|
||||
|
||||
const (
|
||||
SNormal State = iota // known to be zero for the type
|
||||
SFirst
|
||||
SLast
|
||||
SMissing
|
||||
)
|
||||
|
||||
var lastChar = rune('\u0000')
|
||||
|
||||
func (c Char) isValid() bool {
|
||||
return c.codePoint != 0 && c.state != SMissing
|
||||
}
|
||||
|
||||
type FormInfo struct {
|
||||
quickCheck [MNumberOfModes]QCResult // index: MComposed or MDecomposed
|
||||
verified [MNumberOfModes]bool // index: MComposed or MDecomposed
|
||||
|
||||
combinesForward bool // May combine with rune on the right
|
||||
combinesBackward bool // May combine with rune on the left
|
||||
isOneWay bool // Never appears in result
|
||||
inDecomp bool // Some decompositions result in this char.
|
||||
decomp Decomposition
|
||||
expandedDecomp Decomposition
|
||||
}
|
||||
|
||||
func (f FormInfo) String() string {
|
||||
buf := bytes.NewBuffer(make([]byte, 0))
|
||||
|
||||
fmt.Fprintf(buf, " quickCheck[C]: %v\n", f.quickCheck[MComposed])
|
||||
fmt.Fprintf(buf, " quickCheck[D]: %v\n", f.quickCheck[MDecomposed])
|
||||
fmt.Fprintf(buf, " cmbForward: %v\n", f.combinesForward)
|
||||
fmt.Fprintf(buf, " cmbBackward: %v\n", f.combinesBackward)
|
||||
fmt.Fprintf(buf, " isOneWay: %v\n", f.isOneWay)
|
||||
fmt.Fprintf(buf, " inDecomp: %v\n", f.inDecomp)
|
||||
fmt.Fprintf(buf, " decomposition: %X\n", f.decomp)
|
||||
fmt.Fprintf(buf, " expandedDecomp: %X\n", f.expandedDecomp)
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
type Decomposition []rune
|
||||
|
||||
func parseDecomposition(s string, skipfirst bool) (a []rune, err error) {
|
||||
decomp := strings.Split(s, " ")
|
||||
if len(decomp) > 0 && skipfirst {
|
||||
decomp = decomp[1:]
|
||||
}
|
||||
for _, d := range decomp {
|
||||
point, err := strconv.ParseUint(d, 16, 64)
|
||||
if err != nil {
|
||||
return a, err
|
||||
}
|
||||
a = append(a, rune(point))
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func loadUnicodeData() {
|
||||
f := gen.OpenUCDFile("UnicodeData.txt")
|
||||
defer f.Close()
|
||||
p := ucd.New(f)
|
||||
for p.Next() {
|
||||
r := p.Rune(ucd.CodePoint)
|
||||
char := &chars[r]
|
||||
|
||||
char.ccc = uint8(p.Uint(ucd.CanonicalCombiningClass))
|
||||
decmap := p.String(ucd.DecompMapping)
|
||||
|
||||
exp, err := parseDecomposition(decmap, false)
|
||||
isCompat := false
|
||||
if err != nil {
|
||||
if len(decmap) > 0 {
|
||||
exp, err = parseDecomposition(decmap, true)
|
||||
if err != nil {
|
||||
log.Fatalf(`%U: bad decomp |%v|: "%s"`, r, decmap, err)
|
||||
}
|
||||
isCompat = true
|
||||
}
|
||||
}
|
||||
|
||||
char.name = p.String(ucd.Name)
|
||||
char.codePoint = r
|
||||
char.forms[FCompatibility].decomp = exp
|
||||
if !isCompat {
|
||||
char.forms[FCanonical].decomp = exp
|
||||
} else {
|
||||
char.compatDecomp = true
|
||||
}
|
||||
if len(decmap) > 0 {
|
||||
char.forms[FCompatibility].decomp = exp
|
||||
}
|
||||
}
|
||||
if err := p.Err(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
// compactCCC converts the sparse set of CCC values to a continguous one,
|
||||
// reducing the number of bits needed from 8 to 6.
|
||||
func compactCCC() {
|
||||
m := make(map[uint8]uint8)
|
||||
for i := range chars {
|
||||
c := &chars[i]
|
||||
m[c.ccc] = 0
|
||||
}
|
||||
cccs := []int{}
|
||||
for v, _ := range m {
|
||||
cccs = append(cccs, int(v))
|
||||
}
|
||||
sort.Ints(cccs)
|
||||
for i, c := range cccs {
|
||||
cccMap[uint8(i)] = uint8(c)
|
||||
m[uint8(c)] = uint8(i)
|
||||
}
|
||||
for i := range chars {
|
||||
c := &chars[i]
|
||||
c.origCCC = c.ccc
|
||||
c.ccc = m[c.ccc]
|
||||
}
|
||||
if len(m) >= 1<<6 {
|
||||
log.Fatalf("too many difference CCC values: %d >= 64", len(m))
|
||||
}
|
||||
}
|
||||
|
||||
// CompositionExclusions.txt has form:
|
||||
// 0958 # ...
|
||||
// See http://unicode.org/reports/tr44/ for full explanation
|
||||
func loadCompositionExclusions() {
|
||||
f := gen.OpenUCDFile("CompositionExclusions.txt")
|
||||
defer f.Close()
|
||||
p := ucd.New(f)
|
||||
for p.Next() {
|
||||
c := &chars[p.Rune(0)]
|
||||
if c.excludeInComp {
|
||||
log.Fatalf("%U: Duplicate entry in exclusions.", c.codePoint)
|
||||
}
|
||||
c.excludeInComp = true
|
||||
}
|
||||
if e := p.Err(); e != nil {
|
||||
log.Fatal(e)
|
||||
}
|
||||
}
|
||||
|
||||
// hasCompatDecomp returns true if any of the recursive
|
||||
// decompositions contains a compatibility expansion.
|
||||
// In this case, the character may not occur in NFK*.
|
||||
func hasCompatDecomp(r rune) bool {
|
||||
c := &chars[r]
|
||||
if c.compatDecomp {
|
||||
return true
|
||||
}
|
||||
for _, d := range c.forms[FCompatibility].decomp {
|
||||
if hasCompatDecomp(d) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Hangul related constants.
|
||||
const (
|
||||
HangulBase = 0xAC00
|
||||
HangulEnd = 0xD7A4 // hangulBase + Jamo combinations (19 * 21 * 28)
|
||||
|
||||
JamoLBase = 0x1100
|
||||
JamoLEnd = 0x1113
|
||||
JamoVBase = 0x1161
|
||||
JamoVEnd = 0x1176
|
||||
JamoTBase = 0x11A8
|
||||
JamoTEnd = 0x11C3
|
||||
|
||||
JamoLVTCount = 19 * 21 * 28
|
||||
JamoTCount = 28
|
||||
)
|
||||
|
||||
func isHangul(r rune) bool {
|
||||
return HangulBase <= r && r < HangulEnd
|
||||
}
|
||||
|
||||
func isHangulWithoutJamoT(r rune) bool {
|
||||
if !isHangul(r) {
|
||||
return false
|
||||
}
|
||||
r -= HangulBase
|
||||
return r < JamoLVTCount && r%JamoTCount == 0
|
||||
}
|
||||
|
||||
func ccc(r rune) uint8 {
|
||||
return chars[r].ccc
|
||||
}
|
||||
|
||||
// Insert a rune in a buffer, ordered by Canonical Combining Class.
|
||||
func insertOrdered(b Decomposition, r rune) Decomposition {
|
||||
n := len(b)
|
||||
b = append(b, 0)
|
||||
cc := ccc(r)
|
||||
if cc > 0 {
|
||||
// Use bubble sort.
|
||||
for ; n > 0; n-- {
|
||||
if ccc(b[n-1]) <= cc {
|
||||
break
|
||||
}
|
||||
b[n] = b[n-1]
|
||||
}
|
||||
}
|
||||
b[n] = r
|
||||
return b
|
||||
}
|
||||
|
||||
// Recursively decompose.
|
||||
func decomposeRecursive(form int, r rune, d Decomposition) Decomposition {
|
||||
dcomp := chars[r].forms[form].decomp
|
||||
if len(dcomp) == 0 {
|
||||
return insertOrdered(d, r)
|
||||
}
|
||||
for _, c := range dcomp {
|
||||
d = decomposeRecursive(form, c, d)
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
func completeCharFields(form int) {
|
||||
// Phase 0: pre-expand decomposition.
|
||||
for i := range chars {
|
||||
f := &chars[i].forms[form]
|
||||
if len(f.decomp) == 0 {
|
||||
continue
|
||||
}
|
||||
exp := make(Decomposition, 0)
|
||||
for _, c := range f.decomp {
|
||||
exp = decomposeRecursive(form, c, exp)
|
||||
}
|
||||
f.expandedDecomp = exp
|
||||
}
|
||||
|
||||
// Phase 1: composition exclusion, mark decomposition.
|
||||
for i := range chars {
|
||||
c := &chars[i]
|
||||
f := &c.forms[form]
|
||||
|
||||
// Marks script-specific exclusions and version restricted.
|
||||
f.isOneWay = c.excludeInComp
|
||||
|
||||
// Singletons
|
||||
f.isOneWay = f.isOneWay || len(f.decomp) == 1
|
||||
|
||||
// Non-starter decompositions
|
||||
if len(f.decomp) > 1 {
|
||||
chk := c.ccc != 0 || chars[f.decomp[0]].ccc != 0
|
||||
f.isOneWay = f.isOneWay || chk
|
||||
}
|
||||
|
||||
// Runes that decompose into more than two runes.
|
||||
f.isOneWay = f.isOneWay || len(f.decomp) > 2
|
||||
|
||||
if form == FCompatibility {
|
||||
f.isOneWay = f.isOneWay || hasCompatDecomp(c.codePoint)
|
||||
}
|
||||
|
||||
for _, r := range f.decomp {
|
||||
chars[r].forms[form].inDecomp = true
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: forward and backward combining.
|
||||
for i := range chars {
|
||||
c := &chars[i]
|
||||
f := &c.forms[form]
|
||||
|
||||
if !f.isOneWay && len(f.decomp) == 2 {
|
||||
f0 := &chars[f.decomp[0]].forms[form]
|
||||
f1 := &chars[f.decomp[1]].forms[form]
|
||||
if !f0.isOneWay {
|
||||
f0.combinesForward = true
|
||||
}
|
||||
if !f1.isOneWay {
|
||||
f1.combinesBackward = true
|
||||
}
|
||||
}
|
||||
if isHangulWithoutJamoT(rune(i)) {
|
||||
f.combinesForward = true
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 3: quick check values.
|
||||
for i := range chars {
|
||||
c := &chars[i]
|
||||
f := &c.forms[form]
|
||||
|
||||
switch {
|
||||
case len(f.decomp) > 0:
|
||||
f.quickCheck[MDecomposed] = QCNo
|
||||
case isHangul(rune(i)):
|
||||
f.quickCheck[MDecomposed] = QCNo
|
||||
default:
|
||||
f.quickCheck[MDecomposed] = QCYes
|
||||
}
|
||||
switch {
|
||||
case f.isOneWay:
|
||||
f.quickCheck[MComposed] = QCNo
|
||||
case (i & 0xffff00) == JamoLBase:
|
||||
f.quickCheck[MComposed] = QCYes
|
||||
if JamoLBase <= i && i < JamoLEnd {
|
||||
f.combinesForward = true
|
||||
}
|
||||
if JamoVBase <= i && i < JamoVEnd {
|
||||
f.quickCheck[MComposed] = QCMaybe
|
||||
f.combinesBackward = true
|
||||
f.combinesForward = true
|
||||
}
|
||||
if JamoTBase <= i && i < JamoTEnd {
|
||||
f.quickCheck[MComposed] = QCMaybe
|
||||
f.combinesBackward = true
|
||||
}
|
||||
case !f.combinesBackward:
|
||||
f.quickCheck[MComposed] = QCYes
|
||||
default:
|
||||
f.quickCheck[MComposed] = QCMaybe
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func computeNonStarterCounts() {
|
||||
// Phase 4: leading and trailing non-starter count
|
||||
for i := range chars {
|
||||
c := &chars[i]
|
||||
|
||||
runes := []rune{rune(i)}
|
||||
// We always use FCompatibility so that the CGJ insertion points do not
|
||||
// change for repeated normalizations with different forms.
|
||||
if exp := c.forms[FCompatibility].expandedDecomp; len(exp) > 0 {
|
||||
runes = exp
|
||||
}
|
||||
// We consider runes that combine backwards to be non-starters for the
|
||||
// purpose of Stream-Safe Text Processing.
|
||||
for _, r := range runes {
|
||||
if cr := &chars[r]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward {
|
||||
break
|
||||
}
|
||||
c.nLeadingNonStarters++
|
||||
}
|
||||
for i := len(runes) - 1; i >= 0; i-- {
|
||||
if cr := &chars[runes[i]]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward {
|
||||
break
|
||||
}
|
||||
c.nTrailingNonStarters++
|
||||
}
|
||||
if c.nTrailingNonStarters > 3 {
|
||||
log.Fatalf("%U: Decomposition with more than 3 (%d) trailing modifiers (%U)", i, c.nTrailingNonStarters, runes)
|
||||
}
|
||||
|
||||
if isHangul(rune(i)) {
|
||||
c.nTrailingNonStarters = 2
|
||||
if isHangulWithoutJamoT(rune(i)) {
|
||||
c.nTrailingNonStarters = 1
|
||||
}
|
||||
}
|
||||
|
||||
if l, t := c.nLeadingNonStarters, c.nTrailingNonStarters; l > 0 && l != t {
|
||||
log.Fatalf("%U: number of leading and trailing non-starters should be equal (%d vs %d)", i, l, t)
|
||||
}
|
||||
if t := c.nTrailingNonStarters; t > 3 {
|
||||
log.Fatalf("%U: number of trailing non-starters is %d > 3", t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func printBytes(w io.Writer, b []byte, name string) {
|
||||
fmt.Fprintf(w, "// %s: %d bytes\n", name, len(b))
|
||||
fmt.Fprintf(w, "var %s = [...]byte {", name)
|
||||
for i, c := range b {
|
||||
switch {
|
||||
case i%64 == 0:
|
||||
fmt.Fprintf(w, "\n// Bytes %x - %x\n", i, i+63)
|
||||
case i%8 == 0:
|
||||
fmt.Fprintf(w, "\n")
|
||||
}
|
||||
fmt.Fprintf(w, "0x%.2X, ", c)
|
||||
}
|
||||
fmt.Fprint(w, "\n}\n\n")
|
||||
}
|
||||
|
||||
// See forminfo.go for format.
|
||||
func makeEntry(f *FormInfo, c *Char) uint16 {
|
||||
e := uint16(0)
|
||||
if r := c.codePoint; HangulBase <= r && r < HangulEnd {
|
||||
e |= 0x40
|
||||
}
|
||||
if f.combinesForward {
|
||||
e |= 0x20
|
||||
}
|
||||
if f.quickCheck[MDecomposed] == QCNo {
|
||||
e |= 0x4
|
||||
}
|
||||
switch f.quickCheck[MComposed] {
|
||||
case QCYes:
|
||||
case QCNo:
|
||||
e |= 0x10
|
||||
case QCMaybe:
|
||||
e |= 0x18
|
||||
default:
|
||||
log.Fatalf("Illegal quickcheck value %v.", f.quickCheck[MComposed])
|
||||
}
|
||||
e |= uint16(c.nTrailingNonStarters)
|
||||
return e
|
||||
}
|
||||
|
||||
// decompSet keeps track of unique decompositions, grouped by whether
|
||||
// the decomposition is followed by a trailing and/or leading CCC.
|
||||
type decompSet [7]map[string]bool
|
||||
|
||||
const (
|
||||
normalDecomp = iota
|
||||
firstMulti
|
||||
firstCCC
|
||||
endMulti
|
||||
firstLeadingCCC
|
||||
firstCCCZeroExcept
|
||||
firstStarterWithNLead
|
||||
lastDecomp
|
||||
)
|
||||
|
||||
var cname = []string{"firstMulti", "firstCCC", "endMulti", "firstLeadingCCC", "firstCCCZeroExcept", "firstStarterWithNLead", "lastDecomp"}
|
||||
|
||||
func makeDecompSet() decompSet {
|
||||
m := decompSet{}
|
||||
for i := range m {
|
||||
m[i] = make(map[string]bool)
|
||||
}
|
||||
return m
|
||||
}
|
||||
func (m *decompSet) insert(key int, s string) {
|
||||
m[key][s] = true
|
||||
}
|
||||
|
||||
func printCharInfoTables(w io.Writer) int {
|
||||
mkstr := func(r rune, f *FormInfo) (int, string) {
|
||||
d := f.expandedDecomp
|
||||
s := string([]rune(d))
|
||||
if max := 1 << 6; len(s) >= max {
|
||||
const msg = "%U: too many bytes in decomposition: %d >= %d"
|
||||
log.Fatalf(msg, r, len(s), max)
|
||||
}
|
||||
head := uint8(len(s))
|
||||
if f.quickCheck[MComposed] != QCYes {
|
||||
head |= 0x40
|
||||
}
|
||||
if f.combinesForward {
|
||||
head |= 0x80
|
||||
}
|
||||
s = string([]byte{head}) + s
|
||||
|
||||
lccc := ccc(d[0])
|
||||
tccc := ccc(d[len(d)-1])
|
||||
cc := ccc(r)
|
||||
if cc != 0 && lccc == 0 && tccc == 0 {
|
||||
log.Fatalf("%U: trailing and leading ccc are 0 for non-zero ccc %d", r, cc)
|
||||
}
|
||||
if tccc < lccc && lccc != 0 {
|
||||
const msg = "%U: lccc (%d) must be <= tcc (%d)"
|
||||
log.Fatalf(msg, r, lccc, tccc)
|
||||
}
|
||||
index := normalDecomp
|
||||
nTrail := chars[r].nTrailingNonStarters
|
||||
nLead := chars[r].nLeadingNonStarters
|
||||
if tccc > 0 || lccc > 0 || nTrail > 0 {
|
||||
tccc <<= 2
|
||||
tccc |= nTrail
|
||||
s += string([]byte{tccc})
|
||||
index = endMulti
|
||||
for _, r := range d[1:] {
|
||||
if ccc(r) == 0 {
|
||||
index = firstCCC
|
||||
}
|
||||
}
|
||||
if lccc > 0 || nLead > 0 {
|
||||
s += string([]byte{lccc})
|
||||
if index == firstCCC {
|
||||
log.Fatalf("%U: multi-segment decomposition not supported for decompositions with leading CCC != 0", r)
|
||||
}
|
||||
index = firstLeadingCCC
|
||||
}
|
||||
if cc != lccc {
|
||||
if cc != 0 {
|
||||
log.Fatalf("%U: for lccc != ccc, expected ccc to be 0; was %d", r, cc)
|
||||
}
|
||||
index = firstCCCZeroExcept
|
||||
}
|
||||
} else if len(d) > 1 {
|
||||
index = firstMulti
|
||||
}
|
||||
return index, s
|
||||
}
|
||||
|
||||
decompSet := makeDecompSet()
|
||||
const nLeadStr = "\x00\x01" // 0-byte length and tccc with nTrail.
|
||||
decompSet.insert(firstStarterWithNLead, nLeadStr)
|
||||
|
||||
// Store the uniqued decompositions in a byte buffer,
|
||||
// preceded by their byte length.
|
||||
for _, c := range chars {
|
||||
for _, f := range c.forms {
|
||||
if len(f.expandedDecomp) == 0 {
|
||||
continue
|
||||
}
|
||||
if f.combinesBackward {
|
||||
log.Fatalf("%U: combinesBackward and decompose", c.codePoint)
|
||||
}
|
||||
index, s := mkstr(c.codePoint, &f)
|
||||
decompSet.insert(index, s)
|
||||
}
|
||||
}
|
||||
|
||||
decompositions := bytes.NewBuffer(make([]byte, 0, 10000))
|
||||
size := 0
|
||||
positionMap := make(map[string]uint16)
|
||||
decompositions.WriteString("\000")
|
||||
fmt.Fprintln(w, "const (")
|
||||
for i, m := range decompSet {
|
||||
sa := []string{}
|
||||
for s := range m {
|
||||
sa = append(sa, s)
|
||||
}
|
||||
sort.Strings(sa)
|
||||
for _, s := range sa {
|
||||
p := decompositions.Len()
|
||||
decompositions.WriteString(s)
|
||||
positionMap[s] = uint16(p)
|
||||
}
|
||||
if cname[i] != "" {
|
||||
fmt.Fprintf(w, "%s = 0x%X\n", cname[i], decompositions.Len())
|
||||
}
|
||||
}
|
||||
fmt.Fprintln(w, "maxDecomp = 0x8000")
|
||||
fmt.Fprintln(w, ")")
|
||||
b := decompositions.Bytes()
|
||||
printBytes(w, b, "decomps")
|
||||
size += len(b)
|
||||
|
||||
varnames := []string{"nfc", "nfkc"}
|
||||
for i := 0; i < FNumberOfFormTypes; i++ {
|
||||
trie := triegen.NewTrie(varnames[i])
|
||||
|
||||
for r, c := range chars {
|
||||
f := c.forms[i]
|
||||
d := f.expandedDecomp
|
||||
if len(d) != 0 {
|
||||
_, key := mkstr(c.codePoint, &f)
|
||||
trie.Insert(rune(r), uint64(positionMap[key]))
|
||||
if c.ccc != ccc(d[0]) {
|
||||
// We assume the lead ccc of a decomposition !=0 in this case.
|
||||
if ccc(d[0]) == 0 {
|
||||
log.Fatalf("Expected leading CCC to be non-zero; ccc is %d", c.ccc)
|
||||
}
|
||||
}
|
||||
} else if c.nLeadingNonStarters > 0 && len(f.expandedDecomp) == 0 && c.ccc == 0 && !f.combinesBackward {
|
||||
// Handle cases where it can't be detected that the nLead should be equal
|
||||
// to nTrail.
|
||||
trie.Insert(c.codePoint, uint64(positionMap[nLeadStr]))
|
||||
} else if v := makeEntry(&f, &c)<<8 | uint16(c.ccc); v != 0 {
|
||||
trie.Insert(c.codePoint, uint64(0x8000|v))
|
||||
}
|
||||
}
|
||||
sz, err := trie.Gen(w, triegen.Compact(&normCompacter{name: varnames[i]}))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
size += sz
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
||||
func contains(sa []string, s string) bool {
|
||||
for _, a := range sa {
|
||||
if a == s {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func makeTables() {
|
||||
w := &bytes.Buffer{}
|
||||
|
||||
size := 0
|
||||
if *tablelist == "" {
|
||||
return
|
||||
}
|
||||
list := strings.Split(*tablelist, ",")
|
||||
if *tablelist == "all" {
|
||||
list = []string{"recomp", "info"}
|
||||
}
|
||||
|
||||
// Compute maximum decomposition size.
|
||||
max := 0
|
||||
for _, c := range chars {
|
||||
if n := len(string(c.forms[FCompatibility].expandedDecomp)); n > max {
|
||||
max = n
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintln(w, "const (")
|
||||
fmt.Fprintln(w, "\t// Version is the Unicode edition from which the tables are derived.")
|
||||
fmt.Fprintf(w, "\tVersion = %q\n", gen.UnicodeVersion())
|
||||
fmt.Fprintln(w)
|
||||
fmt.Fprintln(w, "\t// MaxTransformChunkSize indicates the maximum number of bytes that Transform")
|
||||
fmt.Fprintln(w, "\t// may need to write atomically for any Form. Making a destination buffer at")
|
||||
fmt.Fprintln(w, "\t// least this size ensures that Transform can always make progress and that")
|
||||
fmt.Fprintln(w, "\t// the user does not need to grow the buffer on an ErrShortDst.")
|
||||
fmt.Fprintf(w, "\tMaxTransformChunkSize = %d+maxNonStarters*4\n", len(string(0x034F))+max)
|
||||
fmt.Fprintln(w, ")\n")
|
||||
|
||||
// Print the CCC remap table.
|
||||
size += len(cccMap)
|
||||
fmt.Fprintf(w, "var ccc = [%d]uint8{", len(cccMap))
|
||||
for i := 0; i < len(cccMap); i++ {
|
||||
if i%8 == 0 {
|
||||
fmt.Fprintln(w)
|
||||
}
|
||||
fmt.Fprintf(w, "%3d, ", cccMap[uint8(i)])
|
||||
}
|
||||
fmt.Fprintln(w, "\n}\n")
|
||||
|
||||
if contains(list, "info") {
|
||||
size += printCharInfoTables(w)
|
||||
}
|
||||
|
||||
if contains(list, "recomp") {
|
||||
// Note that we use 32 bit keys, instead of 64 bit.
|
||||
// This clips the bits of three entries, but we know
|
||||
// this won't cause a collision. The compiler will catch
|
||||
// any changes made to UnicodeData.txt that introduces
|
||||
// a collision.
|
||||
// Note that the recomposition map for NFC and NFKC
|
||||
// are identical.
|
||||
|
||||
// Recomposition map
|
||||
nrentries := 0
|
||||
for _, c := range chars {
|
||||
f := c.forms[FCanonical]
|
||||
if !f.isOneWay && len(f.decomp) > 0 {
|
||||
nrentries++
|
||||
}
|
||||
}
|
||||
sz := nrentries * 8
|
||||
size += sz
|
||||
fmt.Fprintf(w, "// recompMap: %d bytes (entries only)\n", sz)
|
||||
fmt.Fprintln(w, "var recompMap = map[uint32]rune{")
|
||||
for i, c := range chars {
|
||||
f := c.forms[FCanonical]
|
||||
d := f.decomp
|
||||
if !f.isOneWay && len(d) > 0 {
|
||||
key := uint32(uint16(d[0]))<<16 + uint32(uint16(d[1]))
|
||||
fmt.Fprintf(w, "0x%.8X: 0x%.4X,\n", key, i)
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(w, "}\n\n")
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, "// Total size of tables: %dKB (%d bytes)\n", (size+512)/1024, size)
|
||||
gen.WriteVersionedGoFile("tables.go", "norm", w.Bytes())
|
||||
}
|
||||
|
||||
func printChars() {
|
||||
if *verbose {
|
||||
for _, c := range chars {
|
||||
if !c.isValid() || c.state == SMissing {
|
||||
continue
|
||||
}
|
||||
fmt.Println(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// verifyComputed does various consistency tests.
|
||||
func verifyComputed() {
|
||||
for i, c := range chars {
|
||||
for _, f := range c.forms {
|
||||
isNo := (f.quickCheck[MDecomposed] == QCNo)
|
||||
if (len(f.decomp) > 0) != isNo && !isHangul(rune(i)) {
|
||||
log.Fatalf("%U: NF*D QC must be No if rune decomposes", i)
|
||||
}
|
||||
|
||||
isMaybe := f.quickCheck[MComposed] == QCMaybe
|
||||
if f.combinesBackward != isMaybe {
|
||||
log.Fatalf("%U: NF*C QC must be Maybe if combinesBackward", i)
|
||||
}
|
||||
if len(f.decomp) > 0 && f.combinesForward && isMaybe {
|
||||
log.Fatalf("%U: NF*C QC must be Yes or No if combinesForward and decomposes", i)
|
||||
}
|
||||
|
||||
if len(f.expandedDecomp) != 0 {
|
||||
continue
|
||||
}
|
||||
if a, b := c.nLeadingNonStarters > 0, (c.ccc > 0 || f.combinesBackward); a != b {
|
||||
// We accept these runes to be treated differently (it only affects
|
||||
// segment breaking in iteration, most likely on improper use), but
|
||||
// reconsider if more characters are added.
|
||||
// U+FF9E HALFWIDTH KATAKANA VOICED SOUND MARK;Lm;0;L;<narrow> 3099;;;;N;;;;;
|
||||
// U+FF9F HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK;Lm;0;L;<narrow> 309A;;;;N;;;;;
|
||||
// U+3133 HANGUL LETTER KIYEOK-SIOS;Lo;0;L;<compat> 11AA;;;;N;HANGUL LETTER GIYEOG SIOS;;;;
|
||||
// U+318E HANGUL LETTER ARAEAE;Lo;0;L;<compat> 11A1;;;;N;HANGUL LETTER ALAE AE;;;;
|
||||
// U+FFA3 HALFWIDTH HANGUL LETTER KIYEOK-SIOS;Lo;0;L;<narrow> 3133;;;;N;HALFWIDTH HANGUL LETTER GIYEOG SIOS;;;;
|
||||
// U+FFDC HALFWIDTH HANGUL LETTER I;Lo;0;L;<narrow> 3163;;;;N;;;;;
|
||||
if i != 0xFF9E && i != 0xFF9F && !(0x3133 <= i && i <= 0x318E) && !(0xFFA3 <= i && i <= 0xFFDC) {
|
||||
log.Fatalf("%U: nLead was %v; want %v", i, a, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
nfc := c.forms[FCanonical]
|
||||
nfkc := c.forms[FCompatibility]
|
||||
if nfc.combinesBackward != nfkc.combinesBackward {
|
||||
log.Fatalf("%U: Cannot combine combinesBackward\n", c.codePoint)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use values in DerivedNormalizationProps.txt to compare against the
|
||||
// values we computed.
|
||||
// DerivedNormalizationProps.txt has form:
|
||||
// 00C0..00C5 ; NFD_QC; N # ...
|
||||
// 0374 ; NFD_QC; N # ...
|
||||
// See http://unicode.org/reports/tr44/ for full explanation
|
||||
func testDerived() {
|
||||
f := gen.OpenUCDFile("DerivedNormalizationProps.txt")
|
||||
defer f.Close()
|
||||
p := ucd.New(f)
|
||||
for p.Next() {
|
||||
r := p.Rune(0)
|
||||
c := &chars[r]
|
||||
|
||||
var ftype, mode int
|
||||
qt := p.String(1)
|
||||
switch qt {
|
||||
case "NFC_QC":
|
||||
ftype, mode = FCanonical, MComposed
|
||||
case "NFD_QC":
|
||||
ftype, mode = FCanonical, MDecomposed
|
||||
case "NFKC_QC":
|
||||
ftype, mode = FCompatibility, MComposed
|
||||
case "NFKD_QC":
|
||||
ftype, mode = FCompatibility, MDecomposed
|
||||
default:
|
||||
continue
|
||||
}
|
||||
var qr QCResult
|
||||
switch p.String(2) {
|
||||
case "Y":
|
||||
qr = QCYes
|
||||
case "N":
|
||||
qr = QCNo
|
||||
case "M":
|
||||
qr = QCMaybe
|
||||
default:
|
||||
log.Fatalf(`Unexpected quick check value "%s"`, p.String(2))
|
||||
}
|
||||
if got := c.forms[ftype].quickCheck[mode]; got != qr {
|
||||
log.Printf("%U: FAILED %s (was %v need %v)\n", r, qt, got, qr)
|
||||
}
|
||||
c.forms[ftype].verified[mode] = true
|
||||
}
|
||||
if err := p.Err(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
// Any unspecified value must be QCYes. Verify this.
|
||||
for i, c := range chars {
|
||||
for j, fd := range c.forms {
|
||||
for k, qr := range fd.quickCheck {
|
||||
if !fd.verified[k] && qr != QCYes {
|
||||
m := "%U: FAIL F:%d M:%d (was %v need Yes) %s\n"
|
||||
log.Printf(m, i, j, k, qr, c.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var testHeader = `const (
|
||||
Yes = iota
|
||||
No
|
||||
Maybe
|
||||
)
|
||||
|
||||
type formData struct {
|
||||
qc uint8
|
||||
combinesForward bool
|
||||
decomposition string
|
||||
}
|
||||
|
||||
type runeData struct {
|
||||
r rune
|
||||
ccc uint8
|
||||
nLead uint8
|
||||
nTrail uint8
|
||||
f [2]formData // 0: canonical; 1: compatibility
|
||||
}
|
||||
|
||||
func f(qc uint8, cf bool, dec string) [2]formData {
|
||||
return [2]formData{{qc, cf, dec}, {qc, cf, dec}}
|
||||
}
|
||||
|
||||
func g(qc, qck uint8, cf, cfk bool, d, dk string) [2]formData {
|
||||
return [2]formData{{qc, cf, d}, {qck, cfk, dk}}
|
||||
}
|
||||
|
||||
var testData = []runeData{
|
||||
`
|
||||
|
||||
func printTestdata() {
|
||||
type lastInfo struct {
|
||||
ccc uint8
|
||||
nLead uint8
|
||||
nTrail uint8
|
||||
f string
|
||||
}
|
||||
|
||||
last := lastInfo{}
|
||||
w := &bytes.Buffer{}
|
||||
fmt.Fprintf(w, testHeader)
|
||||
for r, c := range chars {
|
||||
f := c.forms[FCanonical]
|
||||
qc, cf, d := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
|
||||
f = c.forms[FCompatibility]
|
||||
qck, cfk, dk := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
|
||||
s := ""
|
||||
if d == dk && qc == qck && cf == cfk {
|
||||
s = fmt.Sprintf("f(%s, %v, %q)", qc, cf, d)
|
||||
} else {
|
||||
s = fmt.Sprintf("g(%s, %s, %v, %v, %q, %q)", qc, qck, cf, cfk, d, dk)
|
||||
}
|
||||
current := lastInfo{c.ccc, c.nLeadingNonStarters, c.nTrailingNonStarters, s}
|
||||
if last != current {
|
||||
fmt.Fprintf(w, "\t{0x%x, %d, %d, %d, %s},\n", r, c.origCCC, c.nLeadingNonStarters, c.nTrailingNonStarters, s)
|
||||
last = current
|
||||
}
|
||||
}
|
||||
fmt.Fprintln(w, "}")
|
||||
gen.WriteVersionedGoFile("data_test.go", "norm", w.Bytes())
|
||||
}
|
609
vendor/golang.org/x/text/unicode/norm/normalize.go
generated
vendored
Normal file
609
vendor/golang.org/x/text/unicode/norm/normalize.go
generated
vendored
Normal file
@ -0,0 +1,609 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Note: the file data_test.go that is generated should not be checked in.
|
||||
//go:generate go run maketables.go triegen.go
|
||||
//go:generate go test -tags test
|
||||
|
||||
// Package norm contains types and functions for normalizing Unicode strings.
|
||||
package norm // import "golang.org/x/text/unicode/norm"
|
||||
|
||||
import (
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/text/transform"
|
||||
)
|
||||
|
||||
// A Form denotes a canonical representation of Unicode code points.
|
||||
// The Unicode-defined normalization and equivalence forms are:
|
||||
//
|
||||
// NFC Unicode Normalization Form C
|
||||
// NFD Unicode Normalization Form D
|
||||
// NFKC Unicode Normalization Form KC
|
||||
// NFKD Unicode Normalization Form KD
|
||||
//
|
||||
// For a Form f, this documentation uses the notation f(x) to mean
|
||||
// the bytes or string x converted to the given form.
|
||||
// A position n in x is called a boundary if conversion to the form can
|
||||
// proceed independently on both sides:
|
||||
// f(x) == append(f(x[0:n]), f(x[n:])...)
|
||||
//
|
||||
// References: http://unicode.org/reports/tr15/ and
|
||||
// http://unicode.org/notes/tn5/.
|
||||
type Form int
|
||||
|
||||
const (
|
||||
NFC Form = iota
|
||||
NFD
|
||||
NFKC
|
||||
NFKD
|
||||
)
|
||||
|
||||
// Bytes returns f(b). May return b if f(b) = b.
|
||||
func (f Form) Bytes(b []byte) []byte {
|
||||
src := inputBytes(b)
|
||||
ft := formTable[f]
|
||||
n, ok := ft.quickSpan(src, 0, len(b), true)
|
||||
if ok {
|
||||
return b
|
||||
}
|
||||
out := make([]byte, n, len(b))
|
||||
copy(out, b[0:n])
|
||||
rb := reorderBuffer{f: *ft, src: src, nsrc: len(b), out: out, flushF: appendFlush}
|
||||
return doAppendInner(&rb, n)
|
||||
}
|
||||
|
||||
// String returns f(s).
|
||||
func (f Form) String(s string) string {
|
||||
src := inputString(s)
|
||||
ft := formTable[f]
|
||||
n, ok := ft.quickSpan(src, 0, len(s), true)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
out := make([]byte, n, len(s))
|
||||
copy(out, s[0:n])
|
||||
rb := reorderBuffer{f: *ft, src: src, nsrc: len(s), out: out, flushF: appendFlush}
|
||||
return string(doAppendInner(&rb, n))
|
||||
}
|
||||
|
||||
// IsNormal returns true if b == f(b).
|
||||
func (f Form) IsNormal(b []byte) bool {
|
||||
src := inputBytes(b)
|
||||
ft := formTable[f]
|
||||
bp, ok := ft.quickSpan(src, 0, len(b), true)
|
||||
if ok {
|
||||
return true
|
||||
}
|
||||
rb := reorderBuffer{f: *ft, src: src, nsrc: len(b)}
|
||||
rb.setFlusher(nil, cmpNormalBytes)
|
||||
for bp < len(b) {
|
||||
rb.out = b[bp:]
|
||||
if bp = decomposeSegment(&rb, bp, true); bp < 0 {
|
||||
return false
|
||||
}
|
||||
bp, _ = rb.f.quickSpan(rb.src, bp, len(b), true)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func cmpNormalBytes(rb *reorderBuffer) bool {
|
||||
b := rb.out
|
||||
for i := 0; i < rb.nrune; i++ {
|
||||
info := rb.rune[i]
|
||||
if int(info.size) > len(b) {
|
||||
return false
|
||||
}
|
||||
p := info.pos
|
||||
pe := p + info.size
|
||||
for ; p < pe; p++ {
|
||||
if b[0] != rb.byte[p] {
|
||||
return false
|
||||
}
|
||||
b = b[1:]
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// IsNormalString returns true if s == f(s).
|
||||
func (f Form) IsNormalString(s string) bool {
|
||||
src := inputString(s)
|
||||
ft := formTable[f]
|
||||
bp, ok := ft.quickSpan(src, 0, len(s), true)
|
||||
if ok {
|
||||
return true
|
||||
}
|
||||
rb := reorderBuffer{f: *ft, src: src, nsrc: len(s)}
|
||||
rb.setFlusher(nil, func(rb *reorderBuffer) bool {
|
||||
for i := 0; i < rb.nrune; i++ {
|
||||
info := rb.rune[i]
|
||||
if bp+int(info.size) > len(s) {
|
||||
return false
|
||||
}
|
||||
p := info.pos
|
||||
pe := p + info.size
|
||||
for ; p < pe; p++ {
|
||||
if s[bp] != rb.byte[p] {
|
||||
return false
|
||||
}
|
||||
bp++
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
for bp < len(s) {
|
||||
if bp = decomposeSegment(&rb, bp, true); bp < 0 {
|
||||
return false
|
||||
}
|
||||
bp, _ = rb.f.quickSpan(rb.src, bp, len(s), true)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// patchTail fixes a case where a rune may be incorrectly normalized
|
||||
// if it is followed by illegal continuation bytes. It returns the
|
||||
// patched buffer and whether the decomposition is still in progress.
|
||||
func patchTail(rb *reorderBuffer) bool {
|
||||
info, p := lastRuneStart(&rb.f, rb.out)
|
||||
if p == -1 || info.size == 0 {
|
||||
return true
|
||||
}
|
||||
end := p + int(info.size)
|
||||
extra := len(rb.out) - end
|
||||
if extra > 0 {
|
||||
// Potentially allocating memory. However, this only
|
||||
// happens with ill-formed UTF-8.
|
||||
x := make([]byte, 0)
|
||||
x = append(x, rb.out[len(rb.out)-extra:]...)
|
||||
rb.out = rb.out[:end]
|
||||
decomposeToLastBoundary(rb)
|
||||
rb.doFlush()
|
||||
rb.out = append(rb.out, x...)
|
||||
return false
|
||||
}
|
||||
buf := rb.out[p:]
|
||||
rb.out = rb.out[:p]
|
||||
decomposeToLastBoundary(rb)
|
||||
if s := rb.ss.next(info); s == ssStarter {
|
||||
rb.doFlush()
|
||||
rb.ss.first(info)
|
||||
} else if s == ssOverflow {
|
||||
rb.doFlush()
|
||||
rb.insertCGJ()
|
||||
rb.ss = 0
|
||||
}
|
||||
rb.insertUnsafe(inputBytes(buf), 0, info)
|
||||
return true
|
||||
}
|
||||
|
||||
func appendQuick(rb *reorderBuffer, i int) int {
|
||||
if rb.nsrc == i {
|
||||
return i
|
||||
}
|
||||
end, _ := rb.f.quickSpan(rb.src, i, rb.nsrc, true)
|
||||
rb.out = rb.src.appendSlice(rb.out, i, end)
|
||||
return end
|
||||
}
|
||||
|
||||
// Append returns f(append(out, b...)).
|
||||
// The buffer out must be nil, empty, or equal to f(out).
|
||||
func (f Form) Append(out []byte, src ...byte) []byte {
|
||||
return f.doAppend(out, inputBytes(src), len(src))
|
||||
}
|
||||
|
||||
func (f Form) doAppend(out []byte, src input, n int) []byte {
|
||||
if n == 0 {
|
||||
return out
|
||||
}
|
||||
ft := formTable[f]
|
||||
// Attempt to do a quickSpan first so we can avoid initializing the reorderBuffer.
|
||||
if len(out) == 0 {
|
||||
p, _ := ft.quickSpan(src, 0, n, true)
|
||||
out = src.appendSlice(out, 0, p)
|
||||
if p == n {
|
||||
return out
|
||||
}
|
||||
rb := reorderBuffer{f: *ft, src: src, nsrc: n, out: out, flushF: appendFlush}
|
||||
return doAppendInner(&rb, p)
|
||||
}
|
||||
rb := reorderBuffer{f: *ft, src: src, nsrc: n}
|
||||
return doAppend(&rb, out, 0)
|
||||
}
|
||||
|
||||
func doAppend(rb *reorderBuffer, out []byte, p int) []byte {
|
||||
rb.setFlusher(out, appendFlush)
|
||||
src, n := rb.src, rb.nsrc
|
||||
doMerge := len(out) > 0
|
||||
if q := src.skipContinuationBytes(p); q > p {
|
||||
// Move leading non-starters to destination.
|
||||
rb.out = src.appendSlice(rb.out, p, q)
|
||||
p = q
|
||||
doMerge = patchTail(rb)
|
||||
}
|
||||
fd := &rb.f
|
||||
if doMerge {
|
||||
var info Properties
|
||||
if p < n {
|
||||
info = fd.info(src, p)
|
||||
if !info.BoundaryBefore() || info.nLeadingNonStarters() > 0 {
|
||||
if p == 0 {
|
||||
decomposeToLastBoundary(rb)
|
||||
}
|
||||
p = decomposeSegment(rb, p, true)
|
||||
}
|
||||
}
|
||||
if info.size == 0 {
|
||||
rb.doFlush()
|
||||
// Append incomplete UTF-8 encoding.
|
||||
return src.appendSlice(rb.out, p, n)
|
||||
}
|
||||
if rb.nrune > 0 {
|
||||
return doAppendInner(rb, p)
|
||||
}
|
||||
}
|
||||
p = appendQuick(rb, p)
|
||||
return doAppendInner(rb, p)
|
||||
}
|
||||
|
||||
func doAppendInner(rb *reorderBuffer, p int) []byte {
|
||||
for n := rb.nsrc; p < n; {
|
||||
p = decomposeSegment(rb, p, true)
|
||||
p = appendQuick(rb, p)
|
||||
}
|
||||
return rb.out
|
||||
}
|
||||
|
||||
// AppendString returns f(append(out, []byte(s))).
|
||||
// The buffer out must be nil, empty, or equal to f(out).
|
||||
func (f Form) AppendString(out []byte, src string) []byte {
|
||||
return f.doAppend(out, inputString(src), len(src))
|
||||
}
|
||||
|
||||
// QuickSpan returns a boundary n such that b[0:n] == f(b[0:n]).
|
||||
// It is not guaranteed to return the largest such n.
|
||||
func (f Form) QuickSpan(b []byte) int {
|
||||
n, _ := formTable[f].quickSpan(inputBytes(b), 0, len(b), true)
|
||||
return n
|
||||
}
|
||||
|
||||
// Span implements transform.SpanningTransformer. It returns a boundary n such
|
||||
// that b[0:n] == f(b[0:n]). It is not guaranteed to return the largest such n.
|
||||
func (f Form) Span(b []byte, atEOF bool) (n int, err error) {
|
||||
n, ok := formTable[f].quickSpan(inputBytes(b), 0, len(b), atEOF)
|
||||
if n < len(b) {
|
||||
if !ok {
|
||||
err = transform.ErrEndOfSpan
|
||||
} else {
|
||||
err = transform.ErrShortSrc
|
||||
}
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
// SpanString returns a boundary n such that s[0:n] == f(s[0:n]).
|
||||
// It is not guaranteed to return the largest such n.
|
||||
func (f Form) SpanString(s string, atEOF bool) (n int, err error) {
|
||||
n, ok := formTable[f].quickSpan(inputString(s), 0, len(s), atEOF)
|
||||
if n < len(s) {
|
||||
if !ok {
|
||||
err = transform.ErrEndOfSpan
|
||||
} else {
|
||||
err = transform.ErrShortSrc
|
||||
}
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
// quickSpan returns a boundary n such that src[0:n] == f(src[0:n]) and
|
||||
// whether any non-normalized parts were found. If atEOF is false, n will
|
||||
// not point past the last segment if this segment might be become
|
||||
// non-normalized by appending other runes.
|
||||
func (f *formInfo) quickSpan(src input, i, end int, atEOF bool) (n int, ok bool) {
|
||||
var lastCC uint8
|
||||
ss := streamSafe(0)
|
||||
lastSegStart := i
|
||||
for n = end; i < n; {
|
||||
if j := src.skipASCII(i, n); i != j {
|
||||
i = j
|
||||
lastSegStart = i - 1
|
||||
lastCC = 0
|
||||
ss = 0
|
||||
continue
|
||||
}
|
||||
info := f.info(src, i)
|
||||
if info.size == 0 {
|
||||
if atEOF {
|
||||
// include incomplete runes
|
||||
return n, true
|
||||
}
|
||||
return lastSegStart, true
|
||||
}
|
||||
// This block needs to be before the next, because it is possible to
|
||||
// have an overflow for runes that are starters (e.g. with U+FF9E).
|
||||
switch ss.next(info) {
|
||||
case ssStarter:
|
||||
lastSegStart = i
|
||||
case ssOverflow:
|
||||
return lastSegStart, false
|
||||
case ssSuccess:
|
||||
if lastCC > info.ccc {
|
||||
return lastSegStart, false
|
||||
}
|
||||
}
|
||||
if f.composing {
|
||||
if !info.isYesC() {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
if !info.isYesD() {
|
||||
break
|
||||
}
|
||||
}
|
||||
lastCC = info.ccc
|
||||
i += int(info.size)
|
||||
}
|
||||
if i == n {
|
||||
if !atEOF {
|
||||
n = lastSegStart
|
||||
}
|
||||
return n, true
|
||||
}
|
||||
return lastSegStart, false
|
||||
}
|
||||
|
||||
// QuickSpanString returns a boundary n such that s[0:n] == f(s[0:n]).
|
||||
// It is not guaranteed to return the largest such n.
|
||||
func (f Form) QuickSpanString(s string) int {
|
||||
n, _ := formTable[f].quickSpan(inputString(s), 0, len(s), true)
|
||||
return n
|
||||
}
|
||||
|
||||
// FirstBoundary returns the position i of the first boundary in b
|
||||
// or -1 if b contains no boundary.
|
||||
func (f Form) FirstBoundary(b []byte) int {
|
||||
return f.firstBoundary(inputBytes(b), len(b))
|
||||
}
|
||||
|
||||
func (f Form) firstBoundary(src input, nsrc int) int {
|
||||
i := src.skipContinuationBytes(0)
|
||||
if i >= nsrc {
|
||||
return -1
|
||||
}
|
||||
fd := formTable[f]
|
||||
ss := streamSafe(0)
|
||||
// We should call ss.first here, but we can't as the first rune is
|
||||
// skipped already. This means FirstBoundary can't really determine
|
||||
// CGJ insertion points correctly. Luckily it doesn't have to.
|
||||
for {
|
||||
info := fd.info(src, i)
|
||||
if info.size == 0 {
|
||||
return -1
|
||||
}
|
||||
if s := ss.next(info); s != ssSuccess {
|
||||
return i
|
||||
}
|
||||
i += int(info.size)
|
||||
if i >= nsrc {
|
||||
if !info.BoundaryAfter() && !ss.isMax() {
|
||||
return -1
|
||||
}
|
||||
return nsrc
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FirstBoundaryInString returns the position i of the first boundary in s
|
||||
// or -1 if s contains no boundary.
|
||||
func (f Form) FirstBoundaryInString(s string) int {
|
||||
return f.firstBoundary(inputString(s), len(s))
|
||||
}
|
||||
|
||||
// NextBoundary reports the index of the boundary between the first and next
|
||||
// segment in b or -1 if atEOF is false and there are not enough bytes to
|
||||
// determine this boundary.
|
||||
func (f Form) NextBoundary(b []byte, atEOF bool) int {
|
||||
return f.nextBoundary(inputBytes(b), len(b), atEOF)
|
||||
}
|
||||
|
||||
// NextBoundaryInString reports the index of the boundary between the first and
|
||||
// next segment in b or -1 if atEOF is false and there are not enough bytes to
|
||||
// determine this boundary.
|
||||
func (f Form) NextBoundaryInString(s string, atEOF bool) int {
|
||||
return f.nextBoundary(inputString(s), len(s), atEOF)
|
||||
}
|
||||
|
||||
func (f Form) nextBoundary(src input, nsrc int, atEOF bool) int {
|
||||
if nsrc == 0 {
|
||||
if atEOF {
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
}
|
||||
fd := formTable[f]
|
||||
info := fd.info(src, 0)
|
||||
if info.size == 0 {
|
||||
if atEOF {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
}
|
||||
ss := streamSafe(0)
|
||||
ss.first(info)
|
||||
|
||||
for i := int(info.size); i < nsrc; i += int(info.size) {
|
||||
info = fd.info(src, i)
|
||||
if info.size == 0 {
|
||||
if atEOF {
|
||||
return i
|
||||
}
|
||||
return -1
|
||||
}
|
||||
// TODO: Using streamSafe to determine the boundary isn't the same as
|
||||
// using BoundaryBefore. Determine which should be used.
|
||||
if s := ss.next(info); s != ssSuccess {
|
||||
return i
|
||||
}
|
||||
}
|
||||
if !atEOF && !info.BoundaryAfter() && !ss.isMax() {
|
||||
return -1
|
||||
}
|
||||
return nsrc
|
||||
}
|
||||
|
||||
// LastBoundary returns the position i of the last boundary in b
|
||||
// or -1 if b contains no boundary.
|
||||
func (f Form) LastBoundary(b []byte) int {
|
||||
return lastBoundary(formTable[f], b)
|
||||
}
|
||||
|
||||
func lastBoundary(fd *formInfo, b []byte) int {
|
||||
i := len(b)
|
||||
info, p := lastRuneStart(fd, b)
|
||||
if p == -1 {
|
||||
return -1
|
||||
}
|
||||
if info.size == 0 { // ends with incomplete rune
|
||||
if p == 0 { // starts with incomplete rune
|
||||
return -1
|
||||
}
|
||||
i = p
|
||||
info, p = lastRuneStart(fd, b[:i])
|
||||
if p == -1 { // incomplete UTF-8 encoding or non-starter bytes without a starter
|
||||
return i
|
||||
}
|
||||
}
|
||||
if p+int(info.size) != i { // trailing non-starter bytes: illegal UTF-8
|
||||
return i
|
||||
}
|
||||
if info.BoundaryAfter() {
|
||||
return i
|
||||
}
|
||||
ss := streamSafe(0)
|
||||
v := ss.backwards(info)
|
||||
for i = p; i >= 0 && v != ssStarter; i = p {
|
||||
info, p = lastRuneStart(fd, b[:i])
|
||||
if v = ss.backwards(info); v == ssOverflow {
|
||||
break
|
||||
}
|
||||
if p+int(info.size) != i {
|
||||
if p == -1 { // no boundary found
|
||||
return -1
|
||||
}
|
||||
return i // boundary after an illegal UTF-8 encoding
|
||||
}
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
// decomposeSegment scans the first segment in src into rb. It inserts 0x034f
|
||||
// (Grapheme Joiner) when it encounters a sequence of more than 30 non-starters
|
||||
// and returns the number of bytes consumed from src or iShortDst or iShortSrc.
|
||||
func decomposeSegment(rb *reorderBuffer, sp int, atEOF bool) int {
|
||||
// Force one character to be consumed.
|
||||
info := rb.f.info(rb.src, sp)
|
||||
if info.size == 0 {
|
||||
return 0
|
||||
}
|
||||
if s := rb.ss.next(info); s == ssStarter {
|
||||
// TODO: this could be removed if we don't support merging.
|
||||
if rb.nrune > 0 {
|
||||
goto end
|
||||
}
|
||||
} else if s == ssOverflow {
|
||||
rb.insertCGJ()
|
||||
goto end
|
||||
}
|
||||
if err := rb.insertFlush(rb.src, sp, info); err != iSuccess {
|
||||
return int(err)
|
||||
}
|
||||
for {
|
||||
sp += int(info.size)
|
||||
if sp >= rb.nsrc {
|
||||
if !atEOF && !info.BoundaryAfter() {
|
||||
return int(iShortSrc)
|
||||
}
|
||||
break
|
||||
}
|
||||
info = rb.f.info(rb.src, sp)
|
||||
if info.size == 0 {
|
||||
if !atEOF {
|
||||
return int(iShortSrc)
|
||||
}
|
||||
break
|
||||
}
|
||||
if s := rb.ss.next(info); s == ssStarter {
|
||||
break
|
||||
} else if s == ssOverflow {
|
||||
rb.insertCGJ()
|
||||
break
|
||||
}
|
||||
if err := rb.insertFlush(rb.src, sp, info); err != iSuccess {
|
||||
return int(err)
|
||||
}
|
||||
}
|
||||
end:
|
||||
if !rb.doFlush() {
|
||||
return int(iShortDst)
|
||||
}
|
||||
return sp
|
||||
}
|
||||
|
||||
// lastRuneStart returns the runeInfo and position of the last
|
||||
// rune in buf or the zero runeInfo and -1 if no rune was found.
|
||||
func lastRuneStart(fd *formInfo, buf []byte) (Properties, int) {
|
||||
p := len(buf) - 1
|
||||
for ; p >= 0 && !utf8.RuneStart(buf[p]); p-- {
|
||||
}
|
||||
if p < 0 {
|
||||
return Properties{}, -1
|
||||
}
|
||||
return fd.info(inputBytes(buf), p), p
|
||||
}
|
||||
|
||||
// decomposeToLastBoundary finds an open segment at the end of the buffer
|
||||
// and scans it into rb. Returns the buffer minus the last segment.
|
||||
func decomposeToLastBoundary(rb *reorderBuffer) {
|
||||
fd := &rb.f
|
||||
info, i := lastRuneStart(fd, rb.out)
|
||||
if int(info.size) != len(rb.out)-i {
|
||||
// illegal trailing continuation bytes
|
||||
return
|
||||
}
|
||||
if info.BoundaryAfter() {
|
||||
return
|
||||
}
|
||||
var add [maxNonStarters + 1]Properties // stores runeInfo in reverse order
|
||||
padd := 0
|
||||
ss := streamSafe(0)
|
||||
p := len(rb.out)
|
||||
for {
|
||||
add[padd] = info
|
||||
v := ss.backwards(info)
|
||||
if v == ssOverflow {
|
||||
// Note that if we have an overflow, it the string we are appending to
|
||||
// is not correctly normalized. In this case the behavior is undefined.
|
||||
break
|
||||
}
|
||||
padd++
|
||||
p -= int(info.size)
|
||||
if v == ssStarter || p < 0 {
|
||||
break
|
||||
}
|
||||
info, i = lastRuneStart(fd, rb.out[:p])
|
||||
if int(info.size) != p-i {
|
||||
break
|
||||
}
|
||||
}
|
||||
rb.ss = ss
|
||||
// Copy bytes for insertion as we may need to overwrite rb.out.
|
||||
var buf [maxBufferSize * utf8.UTFMax]byte
|
||||
cp := buf[:copy(buf[:], rb.out[p:])]
|
||||
rb.out = rb.out[:p]
|
||||
for padd--; padd >= 0; padd-- {
|
||||
info = add[padd]
|
||||
rb.insertUnsafe(inputBytes(cp), 0, info)
|
||||
cp = cp[info.size:]
|
||||
}
|
||||
}
|
125
vendor/golang.org/x/text/unicode/norm/readwriter.go
generated
vendored
Normal file
125
vendor/golang.org/x/text/unicode/norm/readwriter.go
generated
vendored
Normal file
@ -0,0 +1,125 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package norm
|
||||
|
||||
import "io"
|
||||
|
||||
type normWriter struct {
|
||||
rb reorderBuffer
|
||||
w io.Writer
|
||||
buf []byte
|
||||
}
|
||||
|
||||
// Write implements the standard write interface. If the last characters are
|
||||
// not at a normalization boundary, the bytes will be buffered for the next
|
||||
// write. The remaining bytes will be written on close.
|
||||
func (w *normWriter) Write(data []byte) (n int, err error) {
|
||||
// Process data in pieces to keep w.buf size bounded.
|
||||
const chunk = 4000
|
||||
|
||||
for len(data) > 0 {
|
||||
// Normalize into w.buf.
|
||||
m := len(data)
|
||||
if m > chunk {
|
||||
m = chunk
|
||||
}
|
||||
w.rb.src = inputBytes(data[:m])
|
||||
w.rb.nsrc = m
|
||||
w.buf = doAppend(&w.rb, w.buf, 0)
|
||||
data = data[m:]
|
||||
n += m
|
||||
|
||||
// Write out complete prefix, save remainder.
|
||||
// Note that lastBoundary looks back at most 31 runes.
|
||||
i := lastBoundary(&w.rb.f, w.buf)
|
||||
if i == -1 {
|
||||
i = 0
|
||||
}
|
||||
if i > 0 {
|
||||
if _, err = w.w.Write(w.buf[:i]); err != nil {
|
||||
break
|
||||
}
|
||||
bn := copy(w.buf, w.buf[i:])
|
||||
w.buf = w.buf[:bn]
|
||||
}
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
// Close forces data that remains in the buffer to be written.
|
||||
func (w *normWriter) Close() error {
|
||||
if len(w.buf) > 0 {
|
||||
_, err := w.w.Write(w.buf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Writer returns a new writer that implements Write(b)
|
||||
// by writing f(b) to w. The returned writer may use an
|
||||
// an internal buffer to maintain state across Write calls.
|
||||
// Calling its Close method writes any buffered data to w.
|
||||
func (f Form) Writer(w io.Writer) io.WriteCloser {
|
||||
wr := &normWriter{rb: reorderBuffer{}, w: w}
|
||||
wr.rb.init(f, nil)
|
||||
return wr
|
||||
}
|
||||
|
||||
type normReader struct {
|
||||
rb reorderBuffer
|
||||
r io.Reader
|
||||
inbuf []byte
|
||||
outbuf []byte
|
||||
bufStart int
|
||||
lastBoundary int
|
||||
err error
|
||||
}
|
||||
|
||||
// Read implements the standard read interface.
|
||||
func (r *normReader) Read(p []byte) (int, error) {
|
||||
for {
|
||||
if r.lastBoundary-r.bufStart > 0 {
|
||||
n := copy(p, r.outbuf[r.bufStart:r.lastBoundary])
|
||||
r.bufStart += n
|
||||
if r.lastBoundary-r.bufStart > 0 {
|
||||
return n, nil
|
||||
}
|
||||
return n, r.err
|
||||
}
|
||||
if r.err != nil {
|
||||
return 0, r.err
|
||||
}
|
||||
outn := copy(r.outbuf, r.outbuf[r.lastBoundary:])
|
||||
r.outbuf = r.outbuf[0:outn]
|
||||
r.bufStart = 0
|
||||
|
||||
n, err := r.r.Read(r.inbuf)
|
||||
r.rb.src = inputBytes(r.inbuf[0:n])
|
||||
r.rb.nsrc, r.err = n, err
|
||||
if n > 0 {
|
||||
r.outbuf = doAppend(&r.rb, r.outbuf, 0)
|
||||
}
|
||||
if err == io.EOF {
|
||||
r.lastBoundary = len(r.outbuf)
|
||||
} else {
|
||||
r.lastBoundary = lastBoundary(&r.rb.f, r.outbuf)
|
||||
if r.lastBoundary == -1 {
|
||||
r.lastBoundary = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reader returns a new reader that implements Read
|
||||
// by reading data from r and returning f(data).
|
||||
func (f Form) Reader(r io.Reader) io.Reader {
|
||||
const chunk = 4000
|
||||
buf := make([]byte, chunk)
|
||||
rr := &normReader{rb: reorderBuffer{}, r: r, inbuf: buf}
|
||||
rr.rb.init(f, buf)
|
||||
return rr
|
||||
}
|
7653
vendor/golang.org/x/text/unicode/norm/tables10.0.0.go
generated
vendored
Normal file
7653
vendor/golang.org/x/text/unicode/norm/tables10.0.0.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
7633
vendor/golang.org/x/text/unicode/norm/tables9.0.0.go
generated
vendored
Normal file
7633
vendor/golang.org/x/text/unicode/norm/tables9.0.0.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
88
vendor/golang.org/x/text/unicode/norm/transform.go
generated
vendored
Normal file
88
vendor/golang.org/x/text/unicode/norm/transform.go
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package norm
|
||||
|
||||
import (
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/text/transform"
|
||||
)
|
||||
|
||||
// Reset implements the Reset method of the transform.Transformer interface.
|
||||
func (Form) Reset() {}
|
||||
|
||||
// Transform implements the Transform method of the transform.Transformer
|
||||
// interface. It may need to write segments of up to MaxSegmentSize at once.
|
||||
// Users should either catch ErrShortDst and allow dst to grow or have dst be at
|
||||
// least of size MaxTransformChunkSize to be guaranteed of progress.
|
||||
func (f Form) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
|
||||
n := 0
|
||||
// Cap the maximum number of src bytes to check.
|
||||
b := src
|
||||
eof := atEOF
|
||||
if ns := len(dst); ns < len(b) {
|
||||
err = transform.ErrShortDst
|
||||
eof = false
|
||||
b = b[:ns]
|
||||
}
|
||||
i, ok := formTable[f].quickSpan(inputBytes(b), n, len(b), eof)
|
||||
n += copy(dst[n:], b[n:i])
|
||||
if !ok {
|
||||
nDst, nSrc, err = f.transform(dst[n:], src[n:], atEOF)
|
||||
return nDst + n, nSrc + n, err
|
||||
}
|
||||
if n < len(src) && !atEOF {
|
||||
err = transform.ErrShortSrc
|
||||
}
|
||||
return n, n, err
|
||||
}
|
||||
|
||||
func flushTransform(rb *reorderBuffer) bool {
|
||||
// Write out (must fully fit in dst, or else it is an ErrShortDst).
|
||||
if len(rb.out) < rb.nrune*utf8.UTFMax {
|
||||
return false
|
||||
}
|
||||
rb.out = rb.out[rb.flushCopy(rb.out):]
|
||||
return true
|
||||
}
|
||||
|
||||
var errs = []error{nil, transform.ErrShortDst, transform.ErrShortSrc}
|
||||
|
||||
// transform implements the transform.Transformer interface. It is only called
|
||||
// when quickSpan does not pass for a given string.
|
||||
func (f Form) transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
|
||||
// TODO: get rid of reorderBuffer. See CL 23460044.
|
||||
rb := reorderBuffer{}
|
||||
rb.init(f, src)
|
||||
for {
|
||||
// Load segment into reorder buffer.
|
||||
rb.setFlusher(dst[nDst:], flushTransform)
|
||||
end := decomposeSegment(&rb, nSrc, atEOF)
|
||||
if end < 0 {
|
||||
return nDst, nSrc, errs[-end]
|
||||
}
|
||||
nDst = len(dst) - len(rb.out)
|
||||
nSrc = end
|
||||
|
||||
// Next quickSpan.
|
||||
end = rb.nsrc
|
||||
eof := atEOF
|
||||
if n := nSrc + len(dst) - nDst; n < end {
|
||||
err = transform.ErrShortDst
|
||||
end = n
|
||||
eof = false
|
||||
}
|
||||
end, ok := rb.f.quickSpan(rb.src, nSrc, end, eof)
|
||||
n := copy(dst[nDst:], rb.src.bytes[nSrc:end])
|
||||
nSrc += n
|
||||
nDst += n
|
||||
if ok {
|
||||
if n < rb.nsrc && !atEOF {
|
||||
err = transform.ErrShortSrc
|
||||
}
|
||||
return nDst, nSrc, err
|
||||
}
|
||||
}
|
||||
}
|
54
vendor/golang.org/x/text/unicode/norm/trie.go
generated
vendored
Normal file
54
vendor/golang.org/x/text/unicode/norm/trie.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package norm
|
||||
|
||||
type valueRange struct {
|
||||
value uint16 // header: value:stride
|
||||
lo, hi byte // header: lo:n
|
||||
}
|
||||
|
||||
type sparseBlocks struct {
|
||||
values []valueRange
|
||||
offset []uint16
|
||||
}
|
||||
|
||||
var nfcSparse = sparseBlocks{
|
||||
values: nfcSparseValues[:],
|
||||
offset: nfcSparseOffset[:],
|
||||
}
|
||||
|
||||
var nfkcSparse = sparseBlocks{
|
||||
values: nfkcSparseValues[:],
|
||||
offset: nfkcSparseOffset[:],
|
||||
}
|
||||
|
||||
var (
|
||||
nfcData = newNfcTrie(0)
|
||||
nfkcData = newNfkcTrie(0)
|
||||
)
|
||||
|
||||
// lookupValue determines the type of block n and looks up the value for b.
|
||||
// For n < t.cutoff, the block is a simple lookup table. Otherwise, the block
|
||||
// is a list of ranges with an accompanying value. Given a matching range r,
|
||||
// the value for b is by r.value + (b - r.lo) * stride.
|
||||
func (t *sparseBlocks) lookup(n uint32, b byte) uint16 {
|
||||
offset := t.offset[n]
|
||||
header := t.values[offset]
|
||||
lo := offset + 1
|
||||
hi := lo + uint16(header.lo)
|
||||
for lo < hi {
|
||||
m := lo + (hi-lo)/2
|
||||
r := t.values[m]
|
||||
if r.lo <= b && b <= r.hi {
|
||||
return r.value + uint16(b-r.lo)*header.value
|
||||
}
|
||||
if b < r.lo {
|
||||
hi = m
|
||||
} else {
|
||||
lo = m + 1
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
117
vendor/golang.org/x/text/unicode/norm/triegen.go
generated
vendored
Normal file
117
vendor/golang.org/x/text/unicode/norm/triegen.go
generated
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// Trie table generator.
|
||||
// Used by make*tables tools to generate a go file with trie data structures
|
||||
// for mapping UTF-8 to a 16-bit value. All but the last byte in a UTF-8 byte
|
||||
// sequence are used to lookup offsets in the index table to be used for the
|
||||
// next byte. The last byte is used to index into a table with 16-bit values.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
const maxSparseEntries = 16
|
||||
|
||||
type normCompacter struct {
|
||||
sparseBlocks [][]uint64
|
||||
sparseOffset []uint16
|
||||
sparseCount int
|
||||
name string
|
||||
}
|
||||
|
||||
func mostFrequentStride(a []uint64) int {
|
||||
counts := make(map[int]int)
|
||||
var v int
|
||||
for _, x := range a {
|
||||
if stride := int(x) - v; v != 0 && stride >= 0 {
|
||||
counts[stride]++
|
||||
}
|
||||
v = int(x)
|
||||
}
|
||||
var maxs, maxc int
|
||||
for stride, cnt := range counts {
|
||||
if cnt > maxc || (cnt == maxc && stride < maxs) {
|
||||
maxs, maxc = stride, cnt
|
||||
}
|
||||
}
|
||||
return maxs
|
||||
}
|
||||
|
||||
func countSparseEntries(a []uint64) int {
|
||||
stride := mostFrequentStride(a)
|
||||
var v, count int
|
||||
for _, tv := range a {
|
||||
if int(tv)-v != stride {
|
||||
if tv != 0 {
|
||||
count++
|
||||
}
|
||||
}
|
||||
v = int(tv)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (c *normCompacter) Size(v []uint64) (sz int, ok bool) {
|
||||
if n := countSparseEntries(v); n <= maxSparseEntries {
|
||||
return (n+1)*4 + 2, true
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func (c *normCompacter) Store(v []uint64) uint32 {
|
||||
h := uint32(len(c.sparseOffset))
|
||||
c.sparseBlocks = append(c.sparseBlocks, v)
|
||||
c.sparseOffset = append(c.sparseOffset, uint16(c.sparseCount))
|
||||
c.sparseCount += countSparseEntries(v) + 1
|
||||
return h
|
||||
}
|
||||
|
||||
func (c *normCompacter) Handler() string {
|
||||
return c.name + "Sparse.lookup"
|
||||
}
|
||||
|
||||
func (c *normCompacter) Print(w io.Writer) (retErr error) {
|
||||
p := func(f string, x ...interface{}) {
|
||||
if _, err := fmt.Fprintf(w, f, x...); retErr == nil && err != nil {
|
||||
retErr = err
|
||||
}
|
||||
}
|
||||
|
||||
ls := len(c.sparseBlocks)
|
||||
p("// %sSparseOffset: %d entries, %d bytes\n", c.name, ls, ls*2)
|
||||
p("var %sSparseOffset = %#v\n\n", c.name, c.sparseOffset)
|
||||
|
||||
ns := c.sparseCount
|
||||
p("// %sSparseValues: %d entries, %d bytes\n", c.name, ns, ns*4)
|
||||
p("var %sSparseValues = [%d]valueRange {", c.name, ns)
|
||||
for i, b := range c.sparseBlocks {
|
||||
p("\n// Block %#x, offset %#x", i, c.sparseOffset[i])
|
||||
var v int
|
||||
stride := mostFrequentStride(b)
|
||||
n := countSparseEntries(b)
|
||||
p("\n{value:%#04x,lo:%#02x},", stride, uint8(n))
|
||||
for i, nv := range b {
|
||||
if int(nv)-v != stride {
|
||||
if v != 0 {
|
||||
p(",hi:%#02x},", 0x80+i-1)
|
||||
}
|
||||
if nv != 0 {
|
||||
p("\n{value:%#04x,lo:%#02x", nv, 0x80+i)
|
||||
}
|
||||
}
|
||||
v = int(nv)
|
||||
}
|
||||
if v != 0 {
|
||||
p(",hi:%#02x},", 0x80+len(b)-1)
|
||||
}
|
||||
}
|
||||
p("\n}\n\n")
|
||||
return
|
||||
}
|
Reference in New Issue
Block a user