This commit is contained in:
2024-05-14 12:10:58 +02:00
parent a9bb79b01c
commit 59911aebb9
645 changed files with 263320 additions and 0 deletions

2
vendor/github.com/agext/levenshtein/.gitignore generated vendored Normal file
View File

@@ -0,0 +1,2 @@
README.html
coverage.out

70
vendor/github.com/agext/levenshtein/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,70 @@
language: go
sudo: false
go:
- 1.8
- 1.7.5
- 1.7.4
- 1.7.3
- 1.7.2
- 1.7.1
- 1.7
- tip
- 1.6.4
- 1.6.3
- 1.6.2
- 1.6.1
- 1.6
- 1.5.4
- 1.5.3
- 1.5.2
- 1.5.1
- 1.5
- 1.4.3
- 1.4.2
- 1.4.1
- 1.4
- 1.3.3
- 1.3.2
- 1.3.1
- 1.3
- 1.2.2
- 1.2.1
- 1.2
- 1.1.2
- 1.1.1
- 1.1
before_install:
- go get github.com/mattn/goveralls
script:
- $HOME/gopath/bin/goveralls -service=travis-ci
notifications:
email:
on_success: never
matrix:
fast_finish: true
allow_failures:
- go: tip
- go: 1.6.4
- go: 1.6.3
- go: 1.6.2
- go: 1.6.1
- go: 1.6
- go: 1.5.4
- go: 1.5.3
- go: 1.5.2
- go: 1.5.1
- go: 1.5
- go: 1.4.3
- go: 1.4.2
- go: 1.4.1
- go: 1.4
- go: 1.3.3
- go: 1.3.2
- go: 1.3.1
- go: 1.3
- go: 1.2.2
- go: 1.2.1
- go: 1.2
- go: 1.1.2
- go: 1.1.1
- go: 1.1

36
vendor/github.com/agext/levenshtein/DCO generated vendored Normal file
View File

@@ -0,0 +1,36 @@
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
660 York Street, Suite 102,
San Francisco, CA 94110 USA
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.

201
vendor/github.com/agext/levenshtein/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

1
vendor/github.com/agext/levenshtein/MAINTAINERS generated vendored Normal file
View File

@@ -0,0 +1 @@
Alex Bucataru <alex@alrux.com> (@AlexBucataru)

5
vendor/github.com/agext/levenshtein/NOTICE generated vendored Normal file
View File

@@ -0,0 +1,5 @@
Alrux Go EXTensions (AGExt) - package levenshtein
Copyright 2016 ALRUX Inc.
This product includes software developed at ALRUX Inc.
(http://www.alrux.com/).

38
vendor/github.com/agext/levenshtein/README.md generated vendored Normal file
View File

@@ -0,0 +1,38 @@
# A Go package for calculating the Levenshtein distance between two strings
[![Release](https://img.shields.io/github/release/agext/levenshtein.svg?style=flat)](https://github.com/agext/levenshtein/releases/latest)
[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg?style=flat)](https://godoc.org/github.com/agext/levenshtein) 
[![Build Status](https://travis-ci.org/agext/levenshtein.svg?branch=master&style=flat)](https://travis-ci.org/agext/levenshtein)
[![Coverage Status](https://coveralls.io/repos/github/agext/levenshtein/badge.svg?style=flat)](https://coveralls.io/github/agext/levenshtein)
[![Go Report Card](https://goreportcard.com/badge/github.com/agext/levenshtein?style=flat)](https://goreportcard.com/report/github.com/agext/levenshtein)
This package implements distance and similarity metrics for strings, based on the Levenshtein measure, in [Go](http://golang.org).
## Project Status
v1.2.1 Stable: Guaranteed no breaking changes to the API in future v1.x releases. Probably safe to use in production, though provided on "AS IS" basis.
This package is being actively maintained. If you encounter any problems or have any suggestions for improvement, please [open an issue](https://github.com/agext/levenshtein/issues). Pull requests are welcome.
## Overview
The Levenshtein `Distance` between two strings is the minimum total cost of edits that would convert the first string into the second. The allowed edit operations are insertions, deletions, and substitutions, all at character (one UTF-8 code point) level. Each operation has a default cost of 1, but each can be assigned its own cost equal to or greater than 0.
A `Distance` of 0 means the two strings are identical, and the higher the value the more different the strings. Since in practice we are interested in finding if the two strings are "close enough", it often does not make sense to continue the calculation once the result is mathematically guaranteed to exceed a desired threshold. Providing this value to the `Distance` function allows it to take a shortcut and return a lower bound instead of an exact cost when the threshold is exceeded.
The `Similarity` function calculates the distance, then converts it into a normalized metric within the range 0..1, with 1 meaning the strings are identical, and 0 that they have nothing in common. A minimum similarity threshold can be provided to speed up the calculation of the metric for strings that are far too dissimilar for the purpose at hand. All values under this threshold are rounded down to 0.
The `Match` function provides a similarity metric, with the same range and meaning as `Similarity`, but with a bonus for string pairs that share a common prefix and have a similarity above a "bonus threshold". It uses the same method as proposed by Winkler for the Jaro distance, and the reasoning behind it is that these string pairs are very likely spelling variations or errors, and they are more closely linked than the edit distance alone would suggest.
The underlying `Calculate` function is also exported, to allow the building of other derivative metrics, if needed.
## Installation
```
go get github.com/agext/levenshtein
```
## License
Package levenshtein is released under the Apache 2.0 license. See the [LICENSE](LICENSE) file for details.

290
vendor/github.com/agext/levenshtein/levenshtein.go generated vendored Normal file
View File

@@ -0,0 +1,290 @@
// Copyright 2016 ALRUX Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package levenshtein implements distance and similarity metrics for strings, based on the Levenshtein measure.
The Levenshtein `Distance` between two strings is the minimum total cost of edits that would convert the first string into the second. The allowed edit operations are insertions, deletions, and substitutions, all at character (one UTF-8 code point) level. Each operation has a default cost of 1, but each can be assigned its own cost equal to or greater than 0.
A `Distance` of 0 means the two strings are identical, and the higher the value the more different the strings. Since in practice we are interested in finding if the two strings are "close enough", it often does not make sense to continue the calculation once the result is mathematically guaranteed to exceed a desired threshold. Providing this value to the `Distance` function allows it to take a shortcut and return a lower bound instead of an exact cost when the threshold is exceeded.
The `Similarity` function calculates the distance, then converts it into a normalized metric within the range 0..1, with 1 meaning the strings are identical, and 0 that they have nothing in common. A minimum similarity threshold can be provided to speed up the calculation of the metric for strings that are far too dissimilar for the purpose at hand. All values under this threshold are rounded down to 0.
The `Match` function provides a similarity metric, with the same range and meaning as `Similarity`, but with a bonus for string pairs that share a common prefix and have a similarity above a "bonus threshold". It uses the same method as proposed by Winkler for the Jaro distance, and the reasoning behind it is that these string pairs are very likely spelling variations or errors, and they are more closely linked than the edit distance alone would suggest.
The underlying `Calculate` function is also exported, to allow the building of other derivative metrics, if needed.
*/
package levenshtein
// Calculate determines the Levenshtein distance between two strings, using
// the given costs for each edit operation. It returns the distance along with
// the lengths of the longest common prefix and suffix.
//
// If maxCost is non-zero, the calculation stops as soon as the distance is determined
// to be greater than maxCost. Therefore, any return value higher than maxCost is a
// lower bound for the actual distance.
func Calculate(str1, str2 []rune, maxCost, insCost, subCost, delCost int) (dist, prefixLen, suffixLen int) {
l1, l2 := len(str1), len(str2)
// trim common prefix, if any, as it doesn't affect the distance
for ; prefixLen < l1 && prefixLen < l2; prefixLen++ {
if str1[prefixLen] != str2[prefixLen] {
break
}
}
str1, str2 = str1[prefixLen:], str2[prefixLen:]
l1 -= prefixLen
l2 -= prefixLen
// trim common suffix, if any, as it doesn't affect the distance
for 0 < l1 && 0 < l2 {
if str1[l1-1] != str2[l2-1] {
str1, str2 = str1[:l1], str2[:l2]
break
}
l1--
l2--
suffixLen++
}
// if the first string is empty, the distance is the length of the second string times the cost of insertion
if l1 == 0 {
dist = l2 * insCost
return
}
// if the second string is empty, the distance is the length of the first string times the cost of deletion
if l2 == 0 {
dist = l1 * delCost
return
}
// variables used in inner "for" loops
var y, dy, c, l int
// if maxCost is greater than or equal to the maximum possible distance, it's equivalent to 'unlimited'
if maxCost > 0 {
if subCost < delCost+insCost {
if maxCost >= l1*subCost+(l2-l1)*insCost {
maxCost = 0
}
} else {
if maxCost >= l1*delCost+l2*insCost {
maxCost = 0
}
}
}
if maxCost > 0 {
// prefer the longer string first, to minimize time;
// a swap also transposes the meanings of insertion and deletion.
if l1 < l2 {
str1, str2, l1, l2, insCost, delCost = str2, str1, l2, l1, delCost, insCost
}
// the length differential times cost of deletion is a lower bound for the cost;
// if it is higher than the maxCost, there is no point going into the main calculation.
if dist = (l1 - l2) * delCost; dist > maxCost {
return
}
d := make([]int, l1+1)
// offset and length of d in the current row
doff, dlen := 0, 1
for y, dy = 1, delCost; y <= l1 && dy <= maxCost; dlen++ {
d[y] = dy
y++
dy = y * delCost
}
// fmt.Printf("%q -> %q: init doff=%d dlen=%d d[%d:%d]=%v\n", str1, str2, doff, dlen, doff, doff+dlen, d[doff:doff+dlen])
for x := 0; x < l2; x++ {
dy, d[doff] = d[doff], d[doff]+insCost
for d[doff] > maxCost && dlen > 0 {
if str1[doff] != str2[x] {
dy += subCost
}
doff++
dlen--
if c = d[doff] + insCost; c < dy {
dy = c
}
dy, d[doff] = d[doff], dy
}
for y, l = doff, doff+dlen-1; y < l; dy, d[y] = d[y], dy {
if str1[y] != str2[x] {
dy += subCost
}
if c = d[y] + delCost; c < dy {
dy = c
}
y++
if c = d[y] + insCost; c < dy {
dy = c
}
}
if y < l1 {
if str1[y] != str2[x] {
dy += subCost
}
if c = d[y] + delCost; c < dy {
dy = c
}
for ; dy <= maxCost && y < l1; dy, d[y] = dy+delCost, dy {
y++
dlen++
}
}
// fmt.Printf("%q -> %q: x=%d doff=%d dlen=%d d[%d:%d]=%v\n", str1, str2, x, doff, dlen, doff, doff+dlen, d[doff:doff+dlen])
if dlen == 0 {
dist = maxCost + 1
return
}
}
if doff+dlen-1 < l1 {
dist = maxCost + 1
return
}
dist = d[l1]
} else {
// ToDo: This is O(l1*l2) time and O(min(l1,l2)) space; investigate if it is
// worth to implement diagonal approach - O(l1*(1+dist)) time, up to O(l1*l2) space
// http://www.csse.monash.edu.au/~lloyd/tildeStrings/Alignment/92.IPL.html
// prefer the shorter string first, to minimize space; time is O(l1*l2) anyway;
// a swap also transposes the meanings of insertion and deletion.
if l1 > l2 {
str1, str2, l1, l2, insCost, delCost = str2, str1, l2, l1, delCost, insCost
}
d := make([]int, l1+1)
for y = 1; y <= l1; y++ {
d[y] = y * delCost
}
for x := 0; x < l2; x++ {
dy, d[0] = d[0], d[0]+insCost
for y = 0; y < l1; dy, d[y] = d[y], dy {
if str1[y] != str2[x] {
dy += subCost
}
if c = d[y] + delCost; c < dy {
dy = c
}
y++
if c = d[y] + insCost; c < dy {
dy = c
}
}
}
dist = d[l1]
}
return
}
// Distance returns the Levenshtein distance between str1 and str2, using the
// default or provided cost values. Pass nil for the third argument to use the
// default cost of 1 for all three operations, with no maximum.
func Distance(str1, str2 string, p *Params) int {
if p == nil {
p = defaultParams
}
dist, _, _ := Calculate([]rune(str1), []rune(str2), p.maxCost, p.insCost, p.subCost, p.delCost)
return dist
}
// Similarity returns a score in the range of 0..1 for how similar the two strings are.
// A score of 1 means the strings are identical, and 0 means they have nothing in common.
//
// A nil third argument uses the default cost of 1 for all three operations.
//
// If a non-zero MinScore value is provided in the parameters, scores lower than it
// will be returned as 0.
func Similarity(str1, str2 string, p *Params) float64 {
return Match(str1, str2, p.Clone().BonusThreshold(1.1)) // guaranteed no bonus
}
// Match returns a similarity score adjusted by the same method as proposed by Winkler for
// the Jaro distance - giving a bonus to string pairs that share a common prefix, only if their
// similarity score is already over a threshold.
//
// The score is in the range of 0..1, with 1 meaning the strings are identical,
// and 0 meaning they have nothing in common.
//
// A nil third argument uses the default cost of 1 for all three operations, maximum length of
// common prefix to consider for bonus of 4, scaling factor of 0.1, and bonus threshold of 0.7.
//
// If a non-zero MinScore value is provided in the parameters, scores lower than it
// will be returned as 0.
func Match(str1, str2 string, p *Params) float64 {
s1, s2 := []rune(str1), []rune(str2)
l1, l2 := len(s1), len(s2)
// two empty strings are identical; shortcut also avoids divByZero issues later on.
if l1 == 0 && l2 == 0 {
return 1
}
if p == nil {
p = defaultParams
}
// a min over 1 can never be satisfied, so the score is 0.
if p.minScore > 1 {
return 0
}
insCost, delCost, maxDist, max := p.insCost, p.delCost, 0, 0
if l1 > l2 {
l1, l2, insCost, delCost = l2, l1, delCost, insCost
}
if p.subCost < delCost+insCost {
maxDist = l1*p.subCost + (l2-l1)*insCost
} else {
maxDist = l1*delCost + l2*insCost
}
// a zero min is always satisfied, so no need to set a max cost.
if p.minScore > 0 {
// if p.minScore is lower than p.bonusThreshold, we can use a simplified formula
// for the max cost, because a sim score below min cannot receive a bonus.
if p.minScore < p.bonusThreshold {
// round down the max - a cost equal to a rounded up max would already be under min.
max = int((1 - p.minScore) * float64(maxDist))
} else {
// p.minScore <= sim + p.bonusPrefix*p.bonusScale*(1-sim)
// p.minScore <= (1-dist/maxDist) + p.bonusPrefix*p.bonusScale*(1-(1-dist/maxDist))
// p.minScore <= 1 - dist/maxDist + p.bonusPrefix*p.bonusScale*dist/maxDist
// 1 - p.minScore >= dist/maxDist - p.bonusPrefix*p.bonusScale*dist/maxDist
// (1-p.minScore)*maxDist/(1-p.bonusPrefix*p.bonusScale) >= dist
max = int((1 - p.minScore) * float64(maxDist) / (1 - float64(p.bonusPrefix)*p.bonusScale))
}
}
dist, pl, _ := Calculate(s1, s2, max, p.insCost, p.subCost, p.delCost)
if max > 0 && dist > max {
return 0
}
sim := 1 - float64(dist)/float64(maxDist)
if sim >= p.bonusThreshold && sim < 1 && p.bonusPrefix > 0 && p.bonusScale > 0 {
if pl > p.bonusPrefix {
pl = p.bonusPrefix
}
sim += float64(pl) * p.bonusScale * (1 - sim)
}
if sim < p.minScore {
return 0
}
return sim
}

152
vendor/github.com/agext/levenshtein/params.go generated vendored Normal file
View File

@@ -0,0 +1,152 @@
// Copyright 2016 ALRUX Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package levenshtein
// Params represents a set of parameter values for the various formulas involved
// in the calculation of the Levenshtein string metrics.
type Params struct {
insCost int
subCost int
delCost int
maxCost int
minScore float64
bonusPrefix int
bonusScale float64
bonusThreshold float64
}
var (
defaultParams = NewParams()
)
// NewParams creates a new set of parameters and initializes it with the default values.
func NewParams() *Params {
return &Params{
insCost: 1,
subCost: 1,
delCost: 1,
maxCost: 0,
minScore: 0,
bonusPrefix: 4,
bonusScale: .1,
bonusThreshold: .7,
}
}
// Clone returns a pointer to a copy of the receiver parameter set, or of a new
// default parameter set if the receiver is nil.
func (p *Params) Clone() *Params {
if p == nil {
return NewParams()
}
return &Params{
insCost: p.insCost,
subCost: p.subCost,
delCost: p.delCost,
maxCost: p.maxCost,
minScore: p.minScore,
bonusPrefix: p.bonusPrefix,
bonusScale: p.bonusScale,
bonusThreshold: p.bonusThreshold,
}
}
// InsCost overrides the default value of 1 for the cost of insertion.
// The new value must be zero or positive.
func (p *Params) InsCost(v int) *Params {
if v >= 0 {
p.insCost = v
}
return p
}
// SubCost overrides the default value of 1 for the cost of substitution.
// The new value must be zero or positive.
func (p *Params) SubCost(v int) *Params {
if v >= 0 {
p.subCost = v
}
return p
}
// DelCost overrides the default value of 1 for the cost of deletion.
// The new value must be zero or positive.
func (p *Params) DelCost(v int) *Params {
if v >= 0 {
p.delCost = v
}
return p
}
// MaxCost overrides the default value of 0 (meaning unlimited) for the maximum cost.
// The calculation of Distance() stops when the result is guaranteed to exceed
// this maximum, returning a lower-bound rather than exact value.
// The new value must be zero or positive.
func (p *Params) MaxCost(v int) *Params {
if v >= 0 {
p.maxCost = v
}
return p
}
// MinScore overrides the default value of 0 for the minimum similarity score.
// Scores below this threshold are returned as 0 by Similarity() and Match().
// The new value must be zero or positive. Note that a minimum greater than 1
// can never be satisfied, resulting in a score of 0 for any pair of strings.
func (p *Params) MinScore(v float64) *Params {
if v >= 0 {
p.minScore = v
}
return p
}
// BonusPrefix overrides the default value for the maximum length of
// common prefix to be considered for bonus by Match().
// The new value must be zero or positive.
func (p *Params) BonusPrefix(v int) *Params {
if v >= 0 {
p.bonusPrefix = v
}
return p
}
// BonusScale overrides the default value for the scaling factor used by Match()
// in calculating the bonus.
// The new value must be zero or positive. To guarantee that the similarity score
// remains in the interval 0..1, this scaling factor is not allowed to exceed
// 1 / BonusPrefix.
func (p *Params) BonusScale(v float64) *Params {
if v >= 0 {
p.bonusScale = v
}
// the bonus cannot exceed (1-sim), or the score may become greater than 1.
if float64(p.bonusPrefix)*p.bonusScale > 1 {
p.bonusScale = 1 / float64(p.bonusPrefix)
}
return p
}
// BonusThreshold overrides the default value for the minimum similarity score
// for which Match() can assign a bonus.
// The new value must be zero or positive. Note that a threshold greater than 1
// effectively makes Match() become the equivalent of Similarity().
func (p *Params) BonusThreshold(v float64) *Params {
if v >= 0 {
p.bonusThreshold = v
}
return p
}

21
vendor/github.com/alecthomas/repr/COPYING generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 Alec Thomas
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

90
vendor/github.com/alecthomas/repr/README.md generated vendored Normal file
View File

@@ -0,0 +1,90 @@
# Python's repr() for Go [![](https://godoc.org/github.com/alecthomas/repr?status.svg)](http://godoc.org/github.com/alecthomas/repr) [![CircleCI](https://img.shields.io/circleci/project/github/alecthomas/repr.svg)](https://circleci.com/gh/alecthomas/repr)
This package attempts to represent Go values in a form that can be used almost directly in Go source
code.
Unfortunately, some values (such as pointers to basic types) can not be represented directly in Go.
These values will be represented as `&<value>`. eg. `&23`
## Example
```go
type test struct {
S string
I int
A []int
}
func main() {
repr.Print(&test{
S: "String",
I: 123,
A: []int{1, 2, 3},
})
}
```
Outputs
```
&main.test{S: "String", I: 123, A: []int{1, 2, 3}}
```
## Why repr and not [pp](https://github.com/k0kubun/pp)?
pp is designed for printing coloured output to consoles, with (seemingly?) no way to disable this. If you don't want coloured output (eg. for use in diffs, logs, etc.) repr is for you.
## Why repr and not [go-spew](https://github.com/davecgh/go-spew)?
Repr deliberately contains much less metadata about values. It is designed to (generally) be copyable directly into source code.
Compare go-spew:
```go
(parser.expression) (len=1 cap=1) {
(parser.alternative) (len=1 cap=1) {
([]interface {}) (len=1 cap=1) {
(*parser.repitition)(0xc82000b220)({
expression: (parser.expression) (len=2 cap=2) {
(parser.alternative) (len=1 cap=1) {
([]interface {}) (len=1 cap=1) {
(parser.str) (len=1) "a"
}
},
(parser.alternative) (len=1 cap=1) {
([]interface {}) (len=1 cap=1) {
(*parser.self)(0x593ef0)({
})
}
}
}
})
}
}
}
```
To repr:
```go
parser.expression{
parser.alternative{
[]interface {}{
&parser.repitition{
expression: parser.expression{
parser.alternative{
[]interface {}{
parser.str("a"),
},
},
parser.alternative{
[]interface {}{
&parser.self{ },
},
},
},
},
},
},
}
```

353
vendor/github.com/alecthomas/repr/repr.go generated vendored Normal file
View File

@@ -0,0 +1,353 @@
// Package repr attempts to represent Go values in a form that can be copy-and-pasted into source
// code directly.
//
// Some values (such as pointers to basic types) can not be represented directly in
// Go. These values will be output as `&<value>`. eg. `&23`
package repr
import (
"bytes"
"fmt"
"io"
"os"
"reflect"
"sort"
"time"
"unsafe"
)
var (
// "Real" names of basic kinds, used to differentiate type aliases.
realKindName = map[reflect.Kind]string{
reflect.Bool: "bool",
reflect.Int: "int",
reflect.Int8: "int8",
reflect.Int16: "int16",
reflect.Int32: "int32",
reflect.Int64: "int64",
reflect.Uint: "uint",
reflect.Uint8: "uint8",
reflect.Uint16: "uint16",
reflect.Uint32: "uint32",
reflect.Uint64: "uint64",
reflect.Uintptr: "uintptr",
reflect.Float32: "float32",
reflect.Float64: "float64",
reflect.Complex64: "complex64",
reflect.Complex128: "complex128",
reflect.Array: "array",
reflect.Chan: "chan",
reflect.Func: "func",
reflect.Map: "map",
reflect.Slice: "slice",
reflect.String: "string",
}
goStringerType = reflect.TypeOf((*fmt.GoStringer)(nil)).Elem()
byteSliceType = reflect.TypeOf([]byte{})
)
// Default prints to os.Stdout with two space indentation.
var Default = New(os.Stdout, Indent(" "))
// An Option modifies the default behaviour of a Printer.
type Option func(o *Printer)
// Indent output by this much.
func Indent(indent string) Option { return func(o *Printer) { o.indent = indent } }
// NoIndent disables indenting.
func NoIndent() Option { return Indent("") }
// OmitEmpty sets whether empty field members should be omitted from output.
func OmitEmpty(omitEmpty bool) Option { return func(o *Printer) { o.omitEmpty = omitEmpty } }
// ExplicitTypes adds explicit typing to slice and map struct values that would normally be inferred by Go.
func ExplicitTypes(ok bool) Option { return func(o *Printer) { o.explicitTypes = true } }
// IgnoreGoStringer disables use of the .GoString() method.
func IgnoreGoStringer() Option { return func(o *Printer) { o.ignoreGoStringer = true } }
// Hide excludes the given types from representation, instead just printing the name of the type.
func Hide(ts ...interface{}) Option {
return func(o *Printer) {
for _, t := range ts {
rt := reflect.Indirect(reflect.ValueOf(t)).Type()
o.exclude[rt] = true
}
}
}
// AlwaysIncludeType always includes explicit type information for each item.
func AlwaysIncludeType() Option { return func(o *Printer) { o.alwaysIncludeType = true } }
// Printer represents structs in a printable manner.
type Printer struct {
indent string
omitEmpty bool
ignoreGoStringer bool
alwaysIncludeType bool
explicitTypes bool
exclude map[reflect.Type]bool
w io.Writer
}
// New creates a new Printer on w with the given Options.
func New(w io.Writer, options ...Option) *Printer {
p := &Printer{
w: w,
indent: " ",
omitEmpty: true,
exclude: map[reflect.Type]bool{},
}
for _, option := range options {
option(p)
}
return p
}
func (p *Printer) nextIndent(indent string) string {
if p.indent != "" {
return indent + p.indent
}
return ""
}
func (p *Printer) thisIndent(indent string) string {
if p.indent != "" {
return indent
}
return ""
}
// Print the values.
func (p *Printer) Print(vs ...interface{}) {
for i, v := range vs {
if i > 0 {
fmt.Fprint(p.w, " ")
}
p.reprValue(map[reflect.Value]bool{}, reflect.ValueOf(v), "", true)
}
}
// Println prints each value on a new line.
func (p *Printer) Println(vs ...interface{}) {
for i, v := range vs {
if i > 0 {
fmt.Fprint(p.w, " ")
}
p.reprValue(map[reflect.Value]bool{}, reflect.ValueOf(v), "", true)
}
fmt.Fprintln(p.w)
}
func (p *Printer) reprValue(seen map[reflect.Value]bool, v reflect.Value, indent string, showType bool) { // nolint: gocyclo
if seen[v] {
fmt.Fprint(p.w, "...")
return
}
seen[v] = true
defer delete(seen, v)
if v.Kind() == reflect.Invalid || (v.Kind() == reflect.Ptr || v.Kind() == reflect.Map || v.Kind() == reflect.Chan || v.Kind() == reflect.Slice || v.Kind() == reflect.Func || v.Kind() == reflect.Interface) && v.IsNil() {
fmt.Fprint(p.w, "nil")
return
}
if p.exclude[v.Type()] {
fmt.Fprintf(p.w, "%s...", v.Type().Name())
return
}
t := v.Type()
if t == byteSliceType {
fmt.Fprintf(p.w, "[]byte(%q)", v.Bytes())
return
}
// If we can't access a private field directly with reflection, try and do so via unsafe.
if !v.CanInterface() && v.CanAddr() {
uv := reflect.NewAt(t, unsafe.Pointer(v.UnsafeAddr())).Elem()
if uv.CanInterface() {
v = uv
}
}
// Attempt to use fmt.GoStringer interface.
if !p.ignoreGoStringer && t.Implements(goStringerType) {
fmt.Fprint(p.w, v.Interface().(fmt.GoStringer).GoString())
return
}
in := p.thisIndent(indent)
ni := p.nextIndent(indent)
switch v.Kind() {
case reflect.Slice, reflect.Array:
fmt.Fprintf(p.w, "%s{", v.Type())
if v.Len() == 0 {
fmt.Fprint(p.w, "}")
} else {
if p.indent != "" {
fmt.Fprintf(p.w, "\n")
}
for i := 0; i < v.Len(); i++ {
e := v.Index(i)
fmt.Fprintf(p.w, "%s", ni)
p.reprValue(seen, e, ni, p.alwaysIncludeType || p.explicitTypes)
if p.indent != "" {
fmt.Fprintf(p.w, ",\n")
} else if i < v.Len()-1 {
fmt.Fprintf(p.w, ", ")
}
}
fmt.Fprintf(p.w, "%s}", in)
}
case reflect.Chan:
fmt.Fprintf(p.w, "make(")
fmt.Fprintf(p.w, "%s", v.Type())
fmt.Fprintf(p.w, ", %d)", v.Cap())
case reflect.Map:
fmt.Fprintf(p.w, "%s{", v.Type())
if p.indent != "" && v.Len() != 0 {
fmt.Fprintf(p.w, "\n")
}
keys := v.MapKeys()
sort.Slice(keys, func(i, j int) bool {
return fmt.Sprint(keys[i]) < fmt.Sprint(keys[j])
})
for i, k := range keys {
kv := v.MapIndex(k)
fmt.Fprintf(p.w, "%s", ni)
p.reprValue(seen, k, ni, p.alwaysIncludeType || p.explicitTypes)
fmt.Fprintf(p.w, ": ")
p.reprValue(seen, kv, ni, true)
if p.indent != "" {
fmt.Fprintf(p.w, ",\n")
} else if i < v.Len()-1 {
fmt.Fprintf(p.w, ", ")
}
}
fmt.Fprintf(p.w, "%s}", in)
case reflect.Struct:
if td, ok := asTime(v); ok {
timeToGo(p.w, td)
} else {
if showType {
fmt.Fprintf(p.w, "%s{", v.Type())
} else {
fmt.Fprint(p.w, "{")
}
if p.indent != "" && v.NumField() != 0 {
fmt.Fprintf(p.w, "\n")
}
for i := 0; i < v.NumField(); i++ {
t := v.Type().Field(i)
f := v.Field(i)
if p.omitEmpty && f.IsZero() {
continue
}
fmt.Fprintf(p.w, "%s%s: ", ni, t.Name)
p.reprValue(seen, f, ni, true)
if p.indent != "" {
fmt.Fprintf(p.w, ",\n")
} else if i < v.NumField()-1 {
fmt.Fprintf(p.w, ", ")
}
}
fmt.Fprintf(p.w, "%s}", indent)
}
case reflect.Ptr:
if v.IsNil() {
fmt.Fprintf(p.w, "nil")
return
}
if showType {
fmt.Fprintf(p.w, "&")
}
p.reprValue(seen, v.Elem(), indent, showType)
case reflect.String:
if t.Name() != "string" || p.alwaysIncludeType {
fmt.Fprintf(p.w, "%s(%q)", t, v.String())
} else {
fmt.Fprintf(p.w, "%q", v.String())
}
case reflect.Interface:
if v.IsNil() {
fmt.Fprintf(p.w, "interface {}(nil)")
} else {
p.reprValue(seen, v.Elem(), indent, true)
}
default:
if t.Name() != realKindName[t.Kind()] || p.alwaysIncludeType {
fmt.Fprintf(p.w, "%s(%v)", t, v)
} else {
fmt.Fprintf(p.w, "%v", v)
}
}
}
func asTime(v reflect.Value) (time.Time, bool) {
if !v.CanInterface() {
return time.Time{}, false
}
t, ok := v.Interface().(time.Time)
return t, ok
}
// String returns a string representing v.
func String(v interface{}, options ...Option) string {
w := bytes.NewBuffer(nil)
options = append([]Option{NoIndent()}, options...)
p := New(w, options...)
p.Print(v)
return w.String()
}
func extractOptions(vs ...interface{}) (args []interface{}, options []Option) {
for _, v := range vs {
if o, ok := v.(Option); ok {
options = append(options, o)
} else {
args = append(args, v)
}
}
return
}
// Println prints v to os.Stdout, one per line.
func Println(vs ...interface{}) {
args, options := extractOptions(vs...)
New(os.Stdout, options...).Println(args...)
}
// Print writes a representation of v to os.Stdout, separated by spaces.
func Print(vs ...interface{}) {
args, options := extractOptions(vs...)
New(os.Stdout, options...).Print(args...)
}
func timeToGo(w io.Writer, t time.Time) {
if t.IsZero() {
fmt.Fprint(w, "time.Time{}")
return
}
var zone string
switch loc := t.Location(); loc {
case nil:
zone = "nil"
case time.UTC:
zone = "time.UTC"
case time.Local:
zone = "time.Local"
default:
n, off := t.Zone()
zone = fmt.Sprintf("time.FixedZone(%q, %d)", n, off)
}
y, m, d := t.Date()
fmt.Fprintf(w, `time.Date(%d, %d, %d, %d, %d, %d, %d, %s)`, y, m, d, t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), zone)
}

View File

@@ -0,0 +1,95 @@
Copyright (c) 2017 Martin Atkins
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
---------
Unicode table generation programs are under a separate copyright and license:
Copyright (c) 2014 Couchbase, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions
and limitations under the License.
---------
Grapheme break data is provided as part of the Unicode character database,
copright 2016 Unicode, Inc, which is provided with the following license:
Unicode Data Files include all data files under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
Unicode Data Files do not include PDF online code charts under the
directory http://www.unicode.org/Public/.
Software includes any source code published in the Unicode Standard
or under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
NOTICE TO USER: Carefully read the following legal agreement.
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
TERMS AND CONDITIONS OF THIS AGREEMENT.
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
THE DATA FILES OR SOFTWARE.
COPYRIGHT AND PERMISSION NOTICE
Copyright © 1991-2017 Unicode, Inc. All rights reserved.
Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Unicode data files and any associated documentation
(the "Data Files") or Unicode software and any associated documentation
(the "Software") to deal in the Data Files or Software
without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, and/or sell copies of
the Data Files or Software, and to permit persons to whom the Data Files
or Software are furnished to do so, provided that either
(a) this copyright and permission notice appear with all copies
of the Data Files or Software, or
(b) this copyright and permission notice appear in associated
Documentation.
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
Except as contained in this notice, the name of a copyright holder
shall not be used in advertising or otherwise to promote the sale,
use or other dealings in these Data Files or Software without prior
written authorization of the copyright holder.

View File

@@ -0,0 +1,30 @@
package textseg
import (
"bufio"
"bytes"
)
// AllTokens is a utility that uses a bufio.SplitFunc to produce a slice of
// all of the recognized tokens in the given buffer.
func AllTokens(buf []byte, splitFunc bufio.SplitFunc) ([][]byte, error) {
scanner := bufio.NewScanner(bytes.NewReader(buf))
scanner.Split(splitFunc)
var ret [][]byte
for scanner.Scan() {
ret = append(ret, scanner.Bytes())
}
return ret, scanner.Err()
}
// TokenCount is a utility that uses a bufio.SplitFunc to count the number of
// recognized tokens in the given buffer.
func TokenCount(buf []byte, splitFunc bufio.SplitFunc) (int, error) {
scanner := bufio.NewScanner(bytes.NewReader(buf))
scanner.Split(splitFunc)
var ret int
for scanner.Scan() {
ret++
}
return ret, scanner.Err()
}

View File

@@ -0,0 +1,525 @@
# The following Ragel file was autogenerated with unicode2ragel.rb
# from: https://www.unicode.org/Public/13.0.0/ucd/emoji/emoji-data.txt
#
# It defines ["Extended_Pictographic"].
#
# To use this, make sure that your alphtype is set to byte,
# and that your input is in utf8.
%%{
machine Emoji;
Extended_Pictographic =
0xC2 0xA9 #E0.6 [1] (©️) copyright
| 0xC2 0xAE #E0.6 [1] (®️) registered
| 0xE2 0x80 0xBC #E0.6 [1] (‼️) double exclamation mark
| 0xE2 0x81 0x89 #E0.6 [1] (⁉️) exclamation question ...
| 0xE2 0x84 0xA2 #E0.6 [1] (™️) trade mark
| 0xE2 0x84 0xB9 #E0.6 [1] () information
| 0xE2 0x86 0x94..0x99 #E0.6 [6] (↔️..↙️) left-right arrow..do...
| 0xE2 0x86 0xA9..0xAA #E0.6 [2] (↩️..↪️) right arrow curving ...
| 0xE2 0x8C 0x9A..0x9B #E0.6 [2] (⌚..⌛) watch..hourglass done
| 0xE2 0x8C 0xA8 #E1.0 [1] (⌨️) keyboard
| 0xE2 0x8E 0x88 #E0.0 [1] (⎈) HELM SYMBOL
| 0xE2 0x8F 0x8F #E1.0 [1] (⏏️) eject button
| 0xE2 0x8F 0xA9..0xAC #E0.6 [4] (⏩..⏬) fast-forward button..f...
| 0xE2 0x8F 0xAD..0xAE #E0.7 [2] (⏭️..⏮️) next track button..l...
| 0xE2 0x8F 0xAF #E1.0 [1] (⏯️) play or pause button
| 0xE2 0x8F 0xB0 #E0.6 [1] (⏰) alarm clock
| 0xE2 0x8F 0xB1..0xB2 #E1.0 [2] (⏱️..⏲️) stopwatch..timer clock
| 0xE2 0x8F 0xB3 #E0.6 [1] (⏳) hourglass not done
| 0xE2 0x8F 0xB8..0xBA #E0.7 [3] (⏸️..⏺️) pause button..record...
| 0xE2 0x93 0x82 #E0.6 [1] (Ⓜ️) circled M
| 0xE2 0x96 0xAA..0xAB #E0.6 [2] (▪️..▫️) black small square.....
| 0xE2 0x96 0xB6 #E0.6 [1] (▶️) play button
| 0xE2 0x97 0x80 #E0.6 [1] (◀️) reverse button
| 0xE2 0x97 0xBB..0xBE #E0.6 [4] (◻️..◾) white medium square.....
| 0xE2 0x98 0x80..0x81 #E0.6 [2] (☀️..☁️) sun..cloud
| 0xE2 0x98 0x82..0x83 #E0.7 [2] (☂️..☃️) umbrella..snowman
| 0xE2 0x98 0x84 #E1.0 [1] (☄️) comet
| 0xE2 0x98 0x85 #E0.0 [1] (★) BLACK STAR
| 0xE2 0x98 0x87..0x8D #E0.0 [7] (☇..☍) LIGHTNING..OPPOSITION
| 0xE2 0x98 0x8E #E0.6 [1] (☎️) telephone
| 0xE2 0x98 0x8F..0x90 #E0.0 [2] (☏..☐) WHITE TELEPHONE..BALLO...
| 0xE2 0x98 0x91 #E0.6 [1] (☑️) check box with check
| 0xE2 0x98 0x92 #E0.0 [1] (☒) BALLOT BOX WITH X
| 0xE2 0x98 0x94..0x95 #E0.6 [2] (☔..☕) umbrella with rain dro...
| 0xE2 0x98 0x96..0x97 #E0.0 [2] (☖..☗) WHITE SHOGI PIECE..BLA...
| 0xE2 0x98 0x98 #E1.0 [1] (☘️) shamrock
| 0xE2 0x98 0x99..0x9C #E0.0 [4] (☙..☜) REVERSED ROTATED FLORA...
| 0xE2 0x98 0x9D #E0.6 [1] (☝️) index pointing up
| 0xE2 0x98 0x9E..0x9F #E0.0 [2] (☞..☟) WHITE RIGHT POINTING I...
| 0xE2 0x98 0xA0 #E1.0 [1] (☠️) skull and crossbones
| 0xE2 0x98 0xA1 #E0.0 [1] (☡) CAUTION SIGN
| 0xE2 0x98 0xA2..0xA3 #E1.0 [2] (☢️..☣️) radioactive..biohazard
| 0xE2 0x98 0xA4..0xA5 #E0.0 [2] (☤..☥) CADUCEUS..ANKH
| 0xE2 0x98 0xA6 #E1.0 [1] (☦️) orthodox cross
| 0xE2 0x98 0xA7..0xA9 #E0.0 [3] (☧..☩) CHI RHO..CROSS OF JERU...
| 0xE2 0x98 0xAA #E0.7 [1] (☪️) star and crescent
| 0xE2 0x98 0xAB..0xAD #E0.0 [3] (☫..☭) FARSI SYMBOL..HAMMER A...
| 0xE2 0x98 0xAE #E1.0 [1] (☮️) peace symbol
| 0xE2 0x98 0xAF #E0.7 [1] (☯️) yin yang
| 0xE2 0x98 0xB0..0xB7 #E0.0 [8] (☰..☷) TRIGRAM FOR HEAVEN..TR...
| 0xE2 0x98 0xB8..0xB9 #E0.7 [2] (☸️..☹️) wheel of dharma..fro...
| 0xE2 0x98 0xBA #E0.6 [1] (☺️) smiling face
| 0xE2 0x98 0xBB..0xBF #E0.0 [5] (☻..☿) BLACK SMILING FACE..ME...
| 0xE2 0x99 0x80 #E4.0 [1] (♀️) female sign
| 0xE2 0x99 0x81 #E0.0 [1] (♁) EARTH
| 0xE2 0x99 0x82 #E4.0 [1] (♂️) male sign
| 0xE2 0x99 0x83..0x87 #E0.0 [5] (♃..♇) JUPITER..PLUTO
| 0xE2 0x99 0x88..0x93 #E0.6 [12] (♈..♓) Aries..Pisces
| 0xE2 0x99 0x94..0x9E #E0.0 [11] (♔..♞) WHITE CHESS KING..BLAC...
| 0xE2 0x99 0x9F #E11.0 [1] (♟️) chess pawn
| 0xE2 0x99 0xA0 #E0.6 [1] (♠️) spade suit
| 0xE2 0x99 0xA1..0xA2 #E0.0 [2] (♡..♢) WHITE HEART SUIT..WHIT...
| 0xE2 0x99 0xA3 #E0.6 [1] (♣️) club suit
| 0xE2 0x99 0xA4 #E0.0 [1] (♤) WHITE SPADE SUIT
| 0xE2 0x99 0xA5..0xA6 #E0.6 [2] (♥️..♦️) heart suit..diamond ...
| 0xE2 0x99 0xA7 #E0.0 [1] (♧) WHITE CLUB SUIT
| 0xE2 0x99 0xA8 #E0.6 [1] (♨️) hot springs
| 0xE2 0x99 0xA9..0xBA #E0.0 [18] (♩..♺) QUARTER NOTE..RECYCLIN...
| 0xE2 0x99 0xBB #E0.6 [1] (♻️) recycling symbol
| 0xE2 0x99 0xBC..0xBD #E0.0 [2] (♼..♽) RECYCLED PAPER SYMBOL....
| 0xE2 0x99 0xBE #E11.0 [1] (♾️) infinity
| 0xE2 0x99 0xBF #E0.6 [1] (♿) wheelchair symbol
| 0xE2 0x9A 0x80..0x85 #E0.0 [6] (⚀..⚅) DIE FACE-1..DIE FACE-6
| 0xE2 0x9A 0x90..0x91 #E0.0 [2] (⚐..⚑) WHITE FLAG..BLACK FLAG
| 0xE2 0x9A 0x92 #E1.0 [1] (⚒️) hammer and pick
| 0xE2 0x9A 0x93 #E0.6 [1] (⚓) anchor
| 0xE2 0x9A 0x94 #E1.0 [1] (⚔️) crossed swords
| 0xE2 0x9A 0x95 #E4.0 [1] (⚕️) medical symbol
| 0xE2 0x9A 0x96..0x97 #E1.0 [2] (⚖️..⚗️) balance scale..alembic
| 0xE2 0x9A 0x98 #E0.0 [1] (⚘) FLOWER
| 0xE2 0x9A 0x99 #E1.0 [1] (⚙️) gear
| 0xE2 0x9A 0x9A #E0.0 [1] (⚚) STAFF OF HERMES
| 0xE2 0x9A 0x9B..0x9C #E1.0 [2] (⚛️..⚜️) atom symbol..fleur-d...
| 0xE2 0x9A 0x9D..0x9F #E0.0 [3] (⚝..⚟) OUTLINED WHITE STAR..T...
| 0xE2 0x9A 0xA0..0xA1 #E0.6 [2] (⚠️..⚡) warning..high voltage
| 0xE2 0x9A 0xA2..0xA6 #E0.0 [5] (⚢..⚦) DOUBLED FEMALE SIGN..M...
| 0xE2 0x9A 0xA7 #E13.0 [1] (⚧️) transgender symbol
| 0xE2 0x9A 0xA8..0xA9 #E0.0 [2] (⚨..⚩) VERTICAL MALE WITH STR...
| 0xE2 0x9A 0xAA..0xAB #E0.6 [2] (⚪..⚫) white circle..black ci...
| 0xE2 0x9A 0xAC..0xAF #E0.0 [4] (⚬..⚯) MEDIUM SMALL WHITE CIR...
| 0xE2 0x9A 0xB0..0xB1 #E1.0 [2] (⚰️..⚱️) coffin..funeral urn
| 0xE2 0x9A 0xB2..0xBC #E0.0 [11] (⚲..⚼) NEUTER..SESQUIQUADRATE
| 0xE2 0x9A 0xBD..0xBE #E0.6 [2] (⚽..⚾) soccer ball..baseball
| 0xE2 0x9A 0xBF..0xFF #E0.0 [5] (⚿..⛃) SQUARED KEY..BLACK DRA...
| 0xE2 0x9B 0x00..0x83 #
| 0xE2 0x9B 0x84..0x85 #E0.6 [2] (⛄..⛅) snowman without snow.....
| 0xE2 0x9B 0x86..0x87 #E0.0 [2] (⛆..⛇) RAIN..BLACK SNOWMAN
| 0xE2 0x9B 0x88 #E0.7 [1] (⛈️) cloud with lightning ...
| 0xE2 0x9B 0x89..0x8D #E0.0 [5] (⛉..⛍) TURNED WHITE SHOGI PIE...
| 0xE2 0x9B 0x8E #E0.6 [1] (⛎) Ophiuchus
| 0xE2 0x9B 0x8F #E0.7 [1] (⛏️) pick
| 0xE2 0x9B 0x90 #E0.0 [1] (⛐) CAR SLIDING
| 0xE2 0x9B 0x91 #E0.7 [1] (⛑️) rescue workers helmet
| 0xE2 0x9B 0x92 #E0.0 [1] (⛒) CIRCLED CROSSING LANES
| 0xE2 0x9B 0x93 #E0.7 [1] (⛓️) chains
| 0xE2 0x9B 0x94 #E0.6 [1] (⛔) no entry
| 0xE2 0x9B 0x95..0xA8 #E0.0 [20] (⛕..⛨) ALTERNATE ONE-WAY LEFT...
| 0xE2 0x9B 0xA9 #E0.7 [1] (⛩️) shinto shrine
| 0xE2 0x9B 0xAA #E0.6 [1] (⛪) church
| 0xE2 0x9B 0xAB..0xAF #E0.0 [5] (⛫..⛯) CASTLE..MAP SYMBOL FOR...
| 0xE2 0x9B 0xB0..0xB1 #E0.7 [2] (⛰️..⛱️) mountain..umbrella o...
| 0xE2 0x9B 0xB2..0xB3 #E0.6 [2] (⛲..⛳) fountain..flag in hole
| 0xE2 0x9B 0xB4 #E0.7 [1] (⛴️) ferry
| 0xE2 0x9B 0xB5 #E0.6 [1] (⛵) sailboat
| 0xE2 0x9B 0xB6 #E0.0 [1] (⛶) SQUARE FOUR CORNERS
| 0xE2 0x9B 0xB7..0xB9 #E0.7 [3] (⛷️..⛹️) skier..person bounci...
| 0xE2 0x9B 0xBA #E0.6 [1] (⛺) tent
| 0xE2 0x9B 0xBB..0xBC #E0.0 [2] (⛻..⛼) JAPANESE BANK SYMBOL.....
| 0xE2 0x9B 0xBD #E0.6 [1] (⛽) fuel pump
| 0xE2 0x9B 0xBE..0xFF #E0.0 [4] (⛾..✁) CUP ON BLACK SQUARE..U...
| 0xE2 0x9C 0x00..0x81 #
| 0xE2 0x9C 0x82 #E0.6 [1] (✂️) scissors
| 0xE2 0x9C 0x83..0x84 #E0.0 [2] (✃..✄) LOWER BLADE SCISSORS.....
| 0xE2 0x9C 0x85 #E0.6 [1] (✅) check mark button
| 0xE2 0x9C 0x88..0x8C #E0.6 [5] (✈️..✌️) airplane..victory hand
| 0xE2 0x9C 0x8D #E0.7 [1] (✍️) writing hand
| 0xE2 0x9C 0x8E #E0.0 [1] (✎) LOWER RIGHT PENCIL
| 0xE2 0x9C 0x8F #E0.6 [1] (✏️) pencil
| 0xE2 0x9C 0x90..0x91 #E0.0 [2] (✐..✑) UPPER RIGHT PENCIL..WH...
| 0xE2 0x9C 0x92 #E0.6 [1] (✒️) black nib
| 0xE2 0x9C 0x94 #E0.6 [1] (✔️) check mark
| 0xE2 0x9C 0x96 #E0.6 [1] (✖️) multiply
| 0xE2 0x9C 0x9D #E0.7 [1] (✝️) latin cross
| 0xE2 0x9C 0xA1 #E0.7 [1] (✡️) star of David
| 0xE2 0x9C 0xA8 #E0.6 [1] (✨) sparkles
| 0xE2 0x9C 0xB3..0xB4 #E0.6 [2] (✳️..✴️) eight-spoked asteris...
| 0xE2 0x9D 0x84 #E0.6 [1] (❄️) snowflake
| 0xE2 0x9D 0x87 #E0.6 [1] (❇️) sparkle
| 0xE2 0x9D 0x8C #E0.6 [1] (❌) cross mark
| 0xE2 0x9D 0x8E #E0.6 [1] (❎) cross mark button
| 0xE2 0x9D 0x93..0x95 #E0.6 [3] (❓..❕) question mark..white e...
| 0xE2 0x9D 0x97 #E0.6 [1] (❗) exclamation mark
| 0xE2 0x9D 0xA3 #E1.0 [1] (❣️) heart exclamation
| 0xE2 0x9D 0xA4 #E0.6 [1] (❤️) red heart
| 0xE2 0x9D 0xA5..0xA7 #E0.0 [3] (❥..❧) ROTATED HEAVY BLACK HE...
| 0xE2 0x9E 0x95..0x97 #E0.6 [3] (..➗) plus..divide
| 0xE2 0x9E 0xA1 #E0.6 [1] (➡️) right arrow
| 0xE2 0x9E 0xB0 #E0.6 [1] (➰) curly loop
| 0xE2 0x9E 0xBF #E1.0 [1] (➿) double curly loop
| 0xE2 0xA4 0xB4..0xB5 #E0.6 [2] (⤴️..⤵️) right arrow curving ...
| 0xE2 0xAC 0x85..0x87 #E0.6 [3] (⬅️..⬇️) left arrow..down arrow
| 0xE2 0xAC 0x9B..0x9C #E0.6 [2] (⬛..⬜) black large square..wh...
| 0xE2 0xAD 0x90 #E0.6 [1] (⭐) star
| 0xE2 0xAD 0x95 #E0.6 [1] (⭕) hollow red circle
| 0xE3 0x80 0xB0 #E0.6 [1] (〰️) wavy dash
| 0xE3 0x80 0xBD #E0.6 [1] (〽️) part alternation mark
| 0xE3 0x8A 0x97 #E0.6 [1] (㊗️) Japanese “congratulat...
| 0xE3 0x8A 0x99 #E0.6 [1] (㊙️) Japanese “secret” button
| 0xF0 0x9F 0x80 0x80..0x83 #E0.0 [4] (🀀..🀃) MAHJONG TILE EAST W...
| 0xF0 0x9F 0x80 0x84 #E0.6 [1] (🀄) mahjong red dragon
| 0xF0 0x9F 0x80 0x85..0xFF #E0.0 [202] (🀅..🃎) MAHJONG TILE ...
| 0xF0 0x9F 0x81..0x82 0x00..0xFF #
| 0xF0 0x9F 0x83 0x00..0x8E #
| 0xF0 0x9F 0x83 0x8F #E0.6 [1] (🃏) joker
| 0xF0 0x9F 0x83 0x90..0xBF #E0.0 [48] (🃐..🃿) <reserved-1F0D0>..<...
| 0xF0 0x9F 0x84 0x8D..0x8F #E0.0 [3] (🄍..🄏) CIRCLED ZERO WITH S...
| 0xF0 0x9F 0x84 0xAF #E0.0 [1] (🄯) COPYLEFT SYMBOL
| 0xF0 0x9F 0x85 0xAC..0xAF #E0.0 [4] (🅬..🅯) RAISED MR SIGN..CIR...
| 0xF0 0x9F 0x85 0xB0..0xB1 #E0.6 [2] (🅰️..🅱️) A button (blood t...
| 0xF0 0x9F 0x85 0xBE..0xBF #E0.6 [2] (🅾️..🅿️) O button (blood t...
| 0xF0 0x9F 0x86 0x8E #E0.6 [1] (🆎) AB button (blood type)
| 0xF0 0x9F 0x86 0x91..0x9A #E0.6 [10] (🆑..🆚) CL button..VS button
| 0xF0 0x9F 0x86 0xAD..0xFF #E0.0 [57] (🆭..🇥) MASK WORK SYMBOL..<...
| 0xF0 0x9F 0x87 0x00..0xA5 #
| 0xF0 0x9F 0x88 0x81..0x82 #E0.6 [2] (🈁..🈂️) Japanese “here” bu...
| 0xF0 0x9F 0x88 0x83..0x8F #E0.0 [13] (🈃..🈏) <reserved-1F203>..<...
| 0xF0 0x9F 0x88 0x9A #E0.6 [1] (🈚) Japanese “free of char...
| 0xF0 0x9F 0x88 0xAF #E0.6 [1] (🈯) Japanese “reserved” bu...
| 0xF0 0x9F 0x88 0xB2..0xBA #E0.6 [9] (🈲..🈺) Japanese “prohibite...
| 0xF0 0x9F 0x88 0xBC..0xBF #E0.0 [4] (🈼..🈿) <reserved-1F23C>..<...
| 0xF0 0x9F 0x89 0x89..0x8F #E0.0 [7] (🉉..🉏) <reserved-1F249>..<...
| 0xF0 0x9F 0x89 0x90..0x91 #E0.6 [2] (🉐..🉑) Japanese “bargain” ...
| 0xF0 0x9F 0x89 0x92..0xFF #E0.0 [174] (🉒..🋿) <reserved-1F2...
| 0xF0 0x9F 0x8A..0x8A 0x00..0xFF #
| 0xF0 0x9F 0x8B 0x00..0xBF #
| 0xF0 0x9F 0x8C 0x80..0x8C #E0.6 [13] (🌀..🌌) cyclone..milky way
| 0xF0 0x9F 0x8C 0x8D..0x8E #E0.7 [2] (🌍..🌎) globe showing Europ...
| 0xF0 0x9F 0x8C 0x8F #E0.6 [1] (🌏) globe showing Asia-Aus...
| 0xF0 0x9F 0x8C 0x90 #E1.0 [1] (🌐) globe with meridians
| 0xF0 0x9F 0x8C 0x91 #E0.6 [1] (🌑) new moon
| 0xF0 0x9F 0x8C 0x92 #E1.0 [1] (🌒) waxing crescent moon
| 0xF0 0x9F 0x8C 0x93..0x95 #E0.6 [3] (🌓..🌕) first quarter moon....
| 0xF0 0x9F 0x8C 0x96..0x98 #E1.0 [3] (🌖..🌘) waning gibbous moon...
| 0xF0 0x9F 0x8C 0x99 #E0.6 [1] (🌙) crescent moon
| 0xF0 0x9F 0x8C 0x9A #E1.0 [1] (🌚) new moon face
| 0xF0 0x9F 0x8C 0x9B #E0.6 [1] (🌛) first quarter moon face
| 0xF0 0x9F 0x8C 0x9C #E0.7 [1] (🌜) last quarter moon face
| 0xF0 0x9F 0x8C 0x9D..0x9E #E1.0 [2] (🌝..🌞) full moon face..sun...
| 0xF0 0x9F 0x8C 0x9F..0xA0 #E0.6 [2] (🌟..🌠) glowing star..shoot...
| 0xF0 0x9F 0x8C 0xA1 #E0.7 [1] (🌡️) thermometer
| 0xF0 0x9F 0x8C 0xA2..0xA3 #E0.0 [2] (🌢..🌣) BLACK DROPLET..WHIT...
| 0xF0 0x9F 0x8C 0xA4..0xAC #E0.7 [9] (🌤️..🌬️) sun behind small ...
| 0xF0 0x9F 0x8C 0xAD..0xAF #E1.0 [3] (🌭..🌯) hot dog..burrito
| 0xF0 0x9F 0x8C 0xB0..0xB1 #E0.6 [2] (🌰..🌱) chestnut..seedling
| 0xF0 0x9F 0x8C 0xB2..0xB3 #E1.0 [2] (🌲..🌳) evergreen tree..dec...
| 0xF0 0x9F 0x8C 0xB4..0xB5 #E0.6 [2] (🌴..🌵) palm tree..cactus
| 0xF0 0x9F 0x8C 0xB6 #E0.7 [1] (🌶️) hot pepper
| 0xF0 0x9F 0x8C 0xB7..0xFF #E0.6 [20] (🌷..🍊) tulip..tangerine
| 0xF0 0x9F 0x8D 0x00..0x8A #
| 0xF0 0x9F 0x8D 0x8B #E1.0 [1] (🍋) lemon
| 0xF0 0x9F 0x8D 0x8C..0x8F #E0.6 [4] (🍌..🍏) banana..green apple
| 0xF0 0x9F 0x8D 0x90 #E1.0 [1] (🍐) pear
| 0xF0 0x9F 0x8D 0x91..0xBB #E0.6 [43] (🍑..🍻) peach..clinking bee...
| 0xF0 0x9F 0x8D 0xBC #E1.0 [1] (🍼) baby bottle
| 0xF0 0x9F 0x8D 0xBD #E0.7 [1] (🍽️) fork and knife with p...
| 0xF0 0x9F 0x8D 0xBE..0xBF #E1.0 [2] (🍾..🍿) bottle with popping...
| 0xF0 0x9F 0x8E 0x80..0x93 #E0.6 [20] (🎀..🎓) ribbon..graduation cap
| 0xF0 0x9F 0x8E 0x94..0x95 #E0.0 [2] (🎔..🎕) HEART WITH TIP ON T...
| 0xF0 0x9F 0x8E 0x96..0x97 #E0.7 [2] (🎖️..🎗️) military medal..r...
| 0xF0 0x9F 0x8E 0x98 #E0.0 [1] (🎘) MUSICAL KEYBOARD WITH ...
| 0xF0 0x9F 0x8E 0x99..0x9B #E0.7 [3] (🎙️..🎛️) studio microphone...
| 0xF0 0x9F 0x8E 0x9C..0x9D #E0.0 [2] (🎜..🎝) BEAMED ASCENDING MU...
| 0xF0 0x9F 0x8E 0x9E..0x9F #E0.7 [2] (🎞️..🎟️) film frames..admi...
| 0xF0 0x9F 0x8E 0xA0..0xFF #E0.6 [37] (🎠..🏄) carousel horse..per...
| 0xF0 0x9F 0x8F 0x00..0x84 #
| 0xF0 0x9F 0x8F 0x85 #E1.0 [1] (🏅) sports medal
| 0xF0 0x9F 0x8F 0x86 #E0.6 [1] (🏆) trophy
| 0xF0 0x9F 0x8F 0x87 #E1.0 [1] (🏇) horse racing
| 0xF0 0x9F 0x8F 0x88 #E0.6 [1] (🏈) american football
| 0xF0 0x9F 0x8F 0x89 #E1.0 [1] (🏉) rugby football
| 0xF0 0x9F 0x8F 0x8A #E0.6 [1] (🏊) person swimming
| 0xF0 0x9F 0x8F 0x8B..0x8E #E0.7 [4] (🏋️..🏎️) person lifting we...
| 0xF0 0x9F 0x8F 0x8F..0x93 #E1.0 [5] (🏏..🏓) cricket game..ping ...
| 0xF0 0x9F 0x8F 0x94..0x9F #E0.7 [12] (🏔️..🏟️) snow-capped mount...
| 0xF0 0x9F 0x8F 0xA0..0xA3 #E0.6 [4] (🏠..🏣) house..Japanese pos...
| 0xF0 0x9F 0x8F 0xA4 #E1.0 [1] (🏤) post office
| 0xF0 0x9F 0x8F 0xA5..0xB0 #E0.6 [12] (🏥..🏰) hospital..castle
| 0xF0 0x9F 0x8F 0xB1..0xB2 #E0.0 [2] (🏱..🏲) WHITE PENNANT..BLAC...
| 0xF0 0x9F 0x8F 0xB3 #E0.7 [1] (🏳️) white flag
| 0xF0 0x9F 0x8F 0xB4 #E1.0 [1] (🏴) black flag
| 0xF0 0x9F 0x8F 0xB5 #E0.7 [1] (🏵️) rosette
| 0xF0 0x9F 0x8F 0xB6 #E0.0 [1] (🏶) BLACK ROSETTE
| 0xF0 0x9F 0x8F 0xB7 #E0.7 [1] (🏷️) label
| 0xF0 0x9F 0x8F 0xB8..0xBA #E1.0 [3] (🏸..🏺) badminton..amphora
| 0xF0 0x9F 0x90 0x80..0x87 #E1.0 [8] (🐀..🐇) rat..rabbit
| 0xF0 0x9F 0x90 0x88 #E0.7 [1] (🐈) cat
| 0xF0 0x9F 0x90 0x89..0x8B #E1.0 [3] (🐉..🐋) dragon..whale
| 0xF0 0x9F 0x90 0x8C..0x8E #E0.6 [3] (🐌..🐎) snail..horse
| 0xF0 0x9F 0x90 0x8F..0x90 #E1.0 [2] (🐏..🐐) ram..goat
| 0xF0 0x9F 0x90 0x91..0x92 #E0.6 [2] (🐑..🐒) ewe..monkey
| 0xF0 0x9F 0x90 0x93 #E1.0 [1] (🐓) rooster
| 0xF0 0x9F 0x90 0x94 #E0.6 [1] (🐔) chicken
| 0xF0 0x9F 0x90 0x95 #E0.7 [1] (🐕) dog
| 0xF0 0x9F 0x90 0x96 #E1.0 [1] (🐖) pig
| 0xF0 0x9F 0x90 0x97..0xA9 #E0.6 [19] (🐗..🐩) boar..poodle
| 0xF0 0x9F 0x90 0xAA #E1.0 [1] (🐪) camel
| 0xF0 0x9F 0x90 0xAB..0xBE #E0.6 [20] (🐫..🐾) two-hump camel..paw...
| 0xF0 0x9F 0x90 0xBF #E0.7 [1] (🐿️) chipmunk
| 0xF0 0x9F 0x91 0x80 #E0.6 [1] (👀) eyes
| 0xF0 0x9F 0x91 0x81 #E0.7 [1] (👁️) eye
| 0xF0 0x9F 0x91 0x82..0xA4 #E0.6 [35] (👂..👤) ear..bust in silhou...
| 0xF0 0x9F 0x91 0xA5 #E1.0 [1] (👥) busts in silhouette
| 0xF0 0x9F 0x91 0xA6..0xAB #E0.6 [6] (👦..👫) boy..woman and man ...
| 0xF0 0x9F 0x91 0xAC..0xAD #E1.0 [2] (👬..👭) men holding hands.....
| 0xF0 0x9F 0x91 0xAE..0xFF #E0.6 [63] (👮..💬) police officer..spe...
| 0xF0 0x9F 0x92 0x00..0xAC #
| 0xF0 0x9F 0x92 0xAD #E1.0 [1] (💭) thought balloon
| 0xF0 0x9F 0x92 0xAE..0xB5 #E0.6 [8] (💮..💵) white flower..dolla...
| 0xF0 0x9F 0x92 0xB6..0xB7 #E1.0 [2] (💶..💷) euro banknote..poun...
| 0xF0 0x9F 0x92 0xB8..0xFF #E0.6 [52] (💸..📫) money with wings..c...
| 0xF0 0x9F 0x93 0x00..0xAB #
| 0xF0 0x9F 0x93 0xAC..0xAD #E0.7 [2] (📬..📭) open mailbox with r...
| 0xF0 0x9F 0x93 0xAE #E0.6 [1] (📮) postbox
| 0xF0 0x9F 0x93 0xAF #E1.0 [1] (📯) postal horn
| 0xF0 0x9F 0x93 0xB0..0xB4 #E0.6 [5] (📰..📴) newspaper..mobile p...
| 0xF0 0x9F 0x93 0xB5 #E1.0 [1] (📵) no mobile phones
| 0xF0 0x9F 0x93 0xB6..0xB7 #E0.6 [2] (📶..📷) antenna bars..camera
| 0xF0 0x9F 0x93 0xB8 #E1.0 [1] (📸) camera with flash
| 0xF0 0x9F 0x93 0xB9..0xBC #E0.6 [4] (📹..📼) video camera..video...
| 0xF0 0x9F 0x93 0xBD #E0.7 [1] (📽️) film projector
| 0xF0 0x9F 0x93 0xBE #E0.0 [1] (📾) PORTABLE STEREO
| 0xF0 0x9F 0x93 0xBF..0xFF #E1.0 [4] (📿..🔂) prayer beads..repea...
| 0xF0 0x9F 0x94 0x00..0x82 #
| 0xF0 0x9F 0x94 0x83 #E0.6 [1] (🔃) clockwise vertical arrows
| 0xF0 0x9F 0x94 0x84..0x87 #E1.0 [4] (🔄..🔇) counterclockwise ar...
| 0xF0 0x9F 0x94 0x88 #E0.7 [1] (🔈) speaker low volume
| 0xF0 0x9F 0x94 0x89 #E1.0 [1] (🔉) speaker medium volume
| 0xF0 0x9F 0x94 0x8A..0x94 #E0.6 [11] (🔊..🔔) speaker high volume...
| 0xF0 0x9F 0x94 0x95 #E1.0 [1] (🔕) bell with slash
| 0xF0 0x9F 0x94 0x96..0xAB #E0.6 [22] (🔖..🔫) bookmark..pistol
| 0xF0 0x9F 0x94 0xAC..0xAD #E1.0 [2] (🔬..🔭) microscope..telescope
| 0xF0 0x9F 0x94 0xAE..0xBD #E0.6 [16] (🔮..🔽) crystal ball..downw...
| 0xF0 0x9F 0x95 0x86..0x88 #E0.0 [3] (🕆..🕈) WHITE LATIN CROSS.....
| 0xF0 0x9F 0x95 0x89..0x8A #E0.7 [2] (🕉️..🕊️) om..dove
| 0xF0 0x9F 0x95 0x8B..0x8E #E1.0 [4] (🕋..🕎) kaaba..menorah
| 0xF0 0x9F 0x95 0x8F #E0.0 [1] (🕏) BOWL OF HYGIEIA
| 0xF0 0x9F 0x95 0x90..0x9B #E0.6 [12] (🕐..🕛) one oclock..twelve...
| 0xF0 0x9F 0x95 0x9C..0xA7 #E0.7 [12] (🕜..🕧) one-thirty..twelve-...
| 0xF0 0x9F 0x95 0xA8..0xAE #E0.0 [7] (🕨..🕮) RIGHT SPEAKER..BOOK
| 0xF0 0x9F 0x95 0xAF..0xB0 #E0.7 [2] (🕯️..🕰️) candle..mantelpie...
| 0xF0 0x9F 0x95 0xB1..0xB2 #E0.0 [2] (🕱..🕲) BLACK SKULL AND CRO...
| 0xF0 0x9F 0x95 0xB3..0xB9 #E0.7 [7] (🕳️..🕹️) hole..joystick
| 0xF0 0x9F 0x95 0xBA #E3.0 [1] (🕺) man dancing
| 0xF0 0x9F 0x95 0xBB..0xFF #E0.0 [12] (🕻..🖆) LEFT HAND TELEPHONE...
| 0xF0 0x9F 0x96 0x00..0x86 #
| 0xF0 0x9F 0x96 0x87 #E0.7 [1] (🖇️) linked paperclips
| 0xF0 0x9F 0x96 0x88..0x89 #E0.0 [2] (🖈..🖉) BLACK PUSHPIN..LOWE...
| 0xF0 0x9F 0x96 0x8A..0x8D #E0.7 [4] (🖊️..🖍️) pen..crayon
| 0xF0 0x9F 0x96 0x8E..0x8F #E0.0 [2] (🖎..🖏) LEFT WRITING HAND.....
| 0xF0 0x9F 0x96 0x90 #E0.7 [1] (🖐️) hand with fingers spl...
| 0xF0 0x9F 0x96 0x91..0x94 #E0.0 [4] (🖑..🖔) REVERSED RAISED HAN...
| 0xF0 0x9F 0x96 0x95..0x96 #E1.0 [2] (🖕..🖖) middle finger..vulc...
| 0xF0 0x9F 0x96 0x97..0xA3 #E0.0 [13] (🖗..🖣) WHITE DOWN POINTING...
| 0xF0 0x9F 0x96 0xA4 #E3.0 [1] (🖤) black heart
| 0xF0 0x9F 0x96 0xA5 #E0.7 [1] (🖥️) desktop computer
| 0xF0 0x9F 0x96 0xA6..0xA7 #E0.0 [2] (🖦..🖧) KEYBOARD AND MOUSE....
| 0xF0 0x9F 0x96 0xA8 #E0.7 [1] (🖨️) printer
| 0xF0 0x9F 0x96 0xA9..0xB0 #E0.0 [8] (🖩..🖰) POCKET CALCULATOR.....
| 0xF0 0x9F 0x96 0xB1..0xB2 #E0.7 [2] (🖱️..🖲️) computer mouse..t...
| 0xF0 0x9F 0x96 0xB3..0xBB #E0.0 [9] (🖳..🖻) OLD PERSONAL COMPUT...
| 0xF0 0x9F 0x96 0xBC #E0.7 [1] (🖼️) framed picture
| 0xF0 0x9F 0x96 0xBD..0xFF #E0.0 [5] (🖽..🗁) FRAME WITH TILES..O...
| 0xF0 0x9F 0x97 0x00..0x81 #
| 0xF0 0x9F 0x97 0x82..0x84 #E0.7 [3] (🗂️..🗄️) card index divide...
| 0xF0 0x9F 0x97 0x85..0x90 #E0.0 [12] (🗅..🗐) EMPTY NOTE..PAGES
| 0xF0 0x9F 0x97 0x91..0x93 #E0.7 [3] (🗑️..🗓️) wastebasket..spir...
| 0xF0 0x9F 0x97 0x94..0x9B #E0.0 [8] (🗔..🗛) DESKTOP WINDOW..DEC...
| 0xF0 0x9F 0x97 0x9C..0x9E #E0.7 [3] (🗜️..🗞️) clamp..rolled-up ...
| 0xF0 0x9F 0x97 0x9F..0xA0 #E0.0 [2] (🗟..🗠) PAGE WITH CIRCLED T...
| 0xF0 0x9F 0x97 0xA1 #E0.7 [1] (🗡️) dagger
| 0xF0 0x9F 0x97 0xA2 #E0.0 [1] (🗢) LIPS
| 0xF0 0x9F 0x97 0xA3 #E0.7 [1] (🗣️) speaking head
| 0xF0 0x9F 0x97 0xA4..0xA7 #E0.0 [4] (🗤..🗧) THREE RAYS ABOVE..T...
| 0xF0 0x9F 0x97 0xA8 #E2.0 [1] (🗨️) left speech bubble
| 0xF0 0x9F 0x97 0xA9..0xAE #E0.0 [6] (🗩..🗮) RIGHT SPEECH BUBBLE...
| 0xF0 0x9F 0x97 0xAF #E0.7 [1] (🗯️) right anger bubble
| 0xF0 0x9F 0x97 0xB0..0xB2 #E0.0 [3] (🗰..🗲) MOOD BUBBLE..LIGHTN...
| 0xF0 0x9F 0x97 0xB3 #E0.7 [1] (🗳️) ballot box with ballot
| 0xF0 0x9F 0x97 0xB4..0xB9 #E0.0 [6] (🗴..🗹) BALLOT SCRIPT X..BA...
| 0xF0 0x9F 0x97 0xBA #E0.7 [1] (🗺️) world map
| 0xF0 0x9F 0x97 0xBB..0xBF #E0.6 [5] (🗻..🗿) mount fuji..moai
| 0xF0 0x9F 0x98 0x80 #E1.0 [1] (😀) grinning face
| 0xF0 0x9F 0x98 0x81..0x86 #E0.6 [6] (😁..😆) beaming face with s...
| 0xF0 0x9F 0x98 0x87..0x88 #E1.0 [2] (😇..😈) smiling face with h...
| 0xF0 0x9F 0x98 0x89..0x8D #E0.6 [5] (😉..😍) winking face..smili...
| 0xF0 0x9F 0x98 0x8E #E1.0 [1] (😎) smiling face with sung...
| 0xF0 0x9F 0x98 0x8F #E0.6 [1] (😏) smirking face
| 0xF0 0x9F 0x98 0x90 #E0.7 [1] (😐) neutral face
| 0xF0 0x9F 0x98 0x91 #E1.0 [1] (😑) expressionless face
| 0xF0 0x9F 0x98 0x92..0x94 #E0.6 [3] (😒..😔) unamused face..pens...
| 0xF0 0x9F 0x98 0x95 #E1.0 [1] (😕) confused face
| 0xF0 0x9F 0x98 0x96 #E0.6 [1] (😖) confounded face
| 0xF0 0x9F 0x98 0x97 #E1.0 [1] (😗) kissing face
| 0xF0 0x9F 0x98 0x98 #E0.6 [1] (😘) face blowing a kiss
| 0xF0 0x9F 0x98 0x99 #E1.0 [1] (😙) kissing face with smil...
| 0xF0 0x9F 0x98 0x9A #E0.6 [1] (😚) kissing face with clos...
| 0xF0 0x9F 0x98 0x9B #E1.0 [1] (😛) face with tongue
| 0xF0 0x9F 0x98 0x9C..0x9E #E0.6 [3] (😜..😞) winking face with t...
| 0xF0 0x9F 0x98 0x9F #E1.0 [1] (😟) worried face
| 0xF0 0x9F 0x98 0xA0..0xA5 #E0.6 [6] (😠..😥) angry face..sad but...
| 0xF0 0x9F 0x98 0xA6..0xA7 #E1.0 [2] (😦..😧) frowning face with ...
| 0xF0 0x9F 0x98 0xA8..0xAB #E0.6 [4] (😨..😫) fearful face..tired...
| 0xF0 0x9F 0x98 0xAC #E1.0 [1] (😬) grimacing face
| 0xF0 0x9F 0x98 0xAD #E0.6 [1] (😭) loudly crying face
| 0xF0 0x9F 0x98 0xAE..0xAF #E1.0 [2] (😮..😯) face with open mout...
| 0xF0 0x9F 0x98 0xB0..0xB3 #E0.6 [4] (😰..😳) anxious face with s...
| 0xF0 0x9F 0x98 0xB4 #E1.0 [1] (😴) sleeping face
| 0xF0 0x9F 0x98 0xB5 #E0.6 [1] (😵) dizzy face
| 0xF0 0x9F 0x98 0xB6 #E1.0 [1] (😶) face without mouth
| 0xF0 0x9F 0x98 0xB7..0xFF #E0.6 [10] (😷..🙀) face with medical m...
| 0xF0 0x9F 0x99 0x00..0x80 #
| 0xF0 0x9F 0x99 0x81..0x84 #E1.0 [4] (🙁..🙄) slightly frowning f...
| 0xF0 0x9F 0x99 0x85..0x8F #E0.6 [11] (🙅..🙏) person gesturing NO...
| 0xF0 0x9F 0x9A 0x80 #E0.6 [1] (🚀) rocket
| 0xF0 0x9F 0x9A 0x81..0x82 #E1.0 [2] (🚁..🚂) helicopter..locomotive
| 0xF0 0x9F 0x9A 0x83..0x85 #E0.6 [3] (🚃..🚅) railway car..bullet...
| 0xF0 0x9F 0x9A 0x86 #E1.0 [1] (🚆) train
| 0xF0 0x9F 0x9A 0x87 #E0.6 [1] (🚇) metro
| 0xF0 0x9F 0x9A 0x88 #E1.0 [1] (🚈) light rail
| 0xF0 0x9F 0x9A 0x89 #E0.6 [1] (🚉) station
| 0xF0 0x9F 0x9A 0x8A..0x8B #E1.0 [2] (🚊..🚋) tram..tram car
| 0xF0 0x9F 0x9A 0x8C #E0.6 [1] (🚌) bus
| 0xF0 0x9F 0x9A 0x8D #E0.7 [1] (🚍) oncoming bus
| 0xF0 0x9F 0x9A 0x8E #E1.0 [1] (🚎) trolleybus
| 0xF0 0x9F 0x9A 0x8F #E0.6 [1] (🚏) bus stop
| 0xF0 0x9F 0x9A 0x90 #E1.0 [1] (🚐) minibus
| 0xF0 0x9F 0x9A 0x91..0x93 #E0.6 [3] (🚑..🚓) ambulance..police car
| 0xF0 0x9F 0x9A 0x94 #E0.7 [1] (🚔) oncoming police car
| 0xF0 0x9F 0x9A 0x95 #E0.6 [1] (🚕) taxi
| 0xF0 0x9F 0x9A 0x96 #E1.0 [1] (🚖) oncoming taxi
| 0xF0 0x9F 0x9A 0x97 #E0.6 [1] (🚗) automobile
| 0xF0 0x9F 0x9A 0x98 #E0.7 [1] (🚘) oncoming automobile
| 0xF0 0x9F 0x9A 0x99..0x9A #E0.6 [2] (🚙..🚚) sport utility vehic...
| 0xF0 0x9F 0x9A 0x9B..0xA1 #E1.0 [7] (🚛..🚡) articulated lorry.....
| 0xF0 0x9F 0x9A 0xA2 #E0.6 [1] (🚢) ship
| 0xF0 0x9F 0x9A 0xA3 #E1.0 [1] (🚣) person rowing boat
| 0xF0 0x9F 0x9A 0xA4..0xA5 #E0.6 [2] (🚤..🚥) speedboat..horizont...
| 0xF0 0x9F 0x9A 0xA6 #E1.0 [1] (🚦) vertical traffic light
| 0xF0 0x9F 0x9A 0xA7..0xAD #E0.6 [7] (🚧..🚭) construction..no sm...
| 0xF0 0x9F 0x9A 0xAE..0xB1 #E1.0 [4] (🚮..🚱) litter in bin sign....
| 0xF0 0x9F 0x9A 0xB2 #E0.6 [1] (🚲) bicycle
| 0xF0 0x9F 0x9A 0xB3..0xB5 #E1.0 [3] (🚳..🚵) no bicycles..person...
| 0xF0 0x9F 0x9A 0xB6 #E0.6 [1] (🚶) person walking
| 0xF0 0x9F 0x9A 0xB7..0xB8 #E1.0 [2] (🚷..🚸) no pedestrians..chi...
| 0xF0 0x9F 0x9A 0xB9..0xBE #E0.6 [6] (🚹..🚾) mens room..water c...
| 0xF0 0x9F 0x9A 0xBF #E1.0 [1] (🚿) shower
| 0xF0 0x9F 0x9B 0x80 #E0.6 [1] (🛀) person taking bath
| 0xF0 0x9F 0x9B 0x81..0x85 #E1.0 [5] (🛁..🛅) bathtub..left luggage
| 0xF0 0x9F 0x9B 0x86..0x8A #E0.0 [5] (🛆..🛊) TRIANGLE WITH ROUND...
| 0xF0 0x9F 0x9B 0x8B #E0.7 [1] (🛋️) couch and lamp
| 0xF0 0x9F 0x9B 0x8C #E1.0 [1] (🛌) person in bed
| 0xF0 0x9F 0x9B 0x8D..0x8F #E0.7 [3] (🛍️..🛏️) shopping bags..bed
| 0xF0 0x9F 0x9B 0x90 #E1.0 [1] (🛐) place of worship
| 0xF0 0x9F 0x9B 0x91..0x92 #E3.0 [2] (🛑..🛒) stop sign..shopping...
| 0xF0 0x9F 0x9B 0x93..0x94 #E0.0 [2] (🛓..🛔) STUPA..PAGODA
| 0xF0 0x9F 0x9B 0x95 #E12.0 [1] (🛕) hindu temple
| 0xF0 0x9F 0x9B 0x96..0x97 #E13.0 [2] (🛖..🛗) hut..elevator
| 0xF0 0x9F 0x9B 0x98..0x9F #E0.0 [8] (🛘..🛟) <reserved-1F6D8>..<...
| 0xF0 0x9F 0x9B 0xA0..0xA5 #E0.7 [6] (🛠️..🛥️) hammer and wrench...
| 0xF0 0x9F 0x9B 0xA6..0xA8 #E0.0 [3] (🛦..🛨) UP-POINTING MILITAR...
| 0xF0 0x9F 0x9B 0xA9 #E0.7 [1] (🛩️) small airplane
| 0xF0 0x9F 0x9B 0xAA #E0.0 [1] (🛪) NORTHEAST-POINTING AIR...
| 0xF0 0x9F 0x9B 0xAB..0xAC #E1.0 [2] (🛫..🛬) airplane departure....
| 0xF0 0x9F 0x9B 0xAD..0xAF #E0.0 [3] (🛭..🛯) <reserved-1F6ED>..<...
| 0xF0 0x9F 0x9B 0xB0 #E0.7 [1] (🛰️) satellite
| 0xF0 0x9F 0x9B 0xB1..0xB2 #E0.0 [2] (🛱..🛲) ONCOMING FIRE ENGIN...
| 0xF0 0x9F 0x9B 0xB3 #E0.7 [1] (🛳️) passenger ship
| 0xF0 0x9F 0x9B 0xB4..0xB6 #E3.0 [3] (🛴..🛶) kick scooter..canoe
| 0xF0 0x9F 0x9B 0xB7..0xB8 #E5.0 [2] (🛷..🛸) sled..flying saucer
| 0xF0 0x9F 0x9B 0xB9 #E11.0 [1] (🛹) skateboard
| 0xF0 0x9F 0x9B 0xBA #E12.0 [1] (🛺) auto rickshaw
| 0xF0 0x9F 0x9B 0xBB..0xBC #E13.0 [2] (🛻..🛼) pickup truck..rolle...
| 0xF0 0x9F 0x9B 0xBD..0xBF #E0.0 [3] (🛽..🛿) <reserved-1F6FD>..<...
| 0xF0 0x9F 0x9D 0xB4..0xBF #E0.0 [12] (🝴..🝿) <reserved-1F774>..<...
| 0xF0 0x9F 0x9F 0x95..0x9F #E0.0 [11] (🟕..🟟) CIRCLED TRIANGLE..<...
| 0xF0 0x9F 0x9F 0xA0..0xAB #E12.0 [12] (🟠..🟫) orange circle..brow...
| 0xF0 0x9F 0x9F 0xAC..0xBF #E0.0 [20] (🟬..🟿) <reserved-1F7EC>..<...
| 0xF0 0x9F 0xA0 0x8C..0x8F #E0.0 [4] (🠌..🠏) <reserved-1F80C>..<...
| 0xF0 0x9F 0xA1 0x88..0x8F #E0.0 [8] (🡈..🡏) <reserved-1F848>..<...
| 0xF0 0x9F 0xA1 0x9A..0x9F #E0.0 [6] (🡚..🡟) <reserved-1F85A>..<...
| 0xF0 0x9F 0xA2 0x88..0x8F #E0.0 [8] (🢈..🢏) <reserved-1F888>..<...
| 0xF0 0x9F 0xA2 0xAE..0xFF #E0.0 [82] (🢮..🣿) <reserved-1F8AE>..<...
| 0xF0 0x9F 0xA3 0x00..0xBF #
| 0xF0 0x9F 0xA4 0x8C #E13.0 [1] (🤌) pinched fingers
| 0xF0 0x9F 0xA4 0x8D..0x8F #E12.0 [3] (🤍..🤏) white heart..pinchi...
| 0xF0 0x9F 0xA4 0x90..0x98 #E1.0 [9] (🤐..🤘) zipper-mouth face.....
| 0xF0 0x9F 0xA4 0x99..0x9E #E3.0 [6] (🤙..🤞) call me hand..cross...
| 0xF0 0x9F 0xA4 0x9F #E5.0 [1] (🤟) love-you gesture
| 0xF0 0x9F 0xA4 0xA0..0xA7 #E3.0 [8] (🤠..🤧) cowboy hat face..sn...
| 0xF0 0x9F 0xA4 0xA8..0xAF #E5.0 [8] (🤨..🤯) face with raised ey...
| 0xF0 0x9F 0xA4 0xB0 #E3.0 [1] (🤰) pregnant woman
| 0xF0 0x9F 0xA4 0xB1..0xB2 #E5.0 [2] (🤱..🤲) breast-feeding..pal...
| 0xF0 0x9F 0xA4 0xB3..0xBA #E3.0 [8] (🤳..🤺) selfie..person fencing
| 0xF0 0x9F 0xA4 0xBC..0xBE #E3.0 [3] (🤼..🤾) people wrestling..p...
| 0xF0 0x9F 0xA4 0xBF #E12.0 [1] (🤿) diving mask
| 0xF0 0x9F 0xA5 0x80..0x85 #E3.0 [6] (🥀..🥅) wilted flower..goal...
| 0xF0 0x9F 0xA5 0x87..0x8B #E3.0 [5] (🥇..🥋) 1st place medal..ma...
| 0xF0 0x9F 0xA5 0x8C #E5.0 [1] (🥌) curling stone
| 0xF0 0x9F 0xA5 0x8D..0x8F #E11.0 [3] (🥍..🥏) lacrosse..flying disc
| 0xF0 0x9F 0xA5 0x90..0x9E #E3.0 [15] (🥐..🥞) croissant..pancakes
| 0xF0 0x9F 0xA5 0x9F..0xAB #E5.0 [13] (🥟..🥫) dumpling..canned food
| 0xF0 0x9F 0xA5 0xAC..0xB0 #E11.0 [5] (🥬..🥰) leafy green..smilin...
| 0xF0 0x9F 0xA5 0xB1 #E12.0 [1] (🥱) yawning face
| 0xF0 0x9F 0xA5 0xB2 #E13.0 [1] (🥲) smiling face with tear
| 0xF0 0x9F 0xA5 0xB3..0xB6 #E11.0 [4] (🥳..🥶) partying face..cold...
| 0xF0 0x9F 0xA5 0xB7..0xB8 #E13.0 [2] (🥷..🥸) ninja..disguised face
| 0xF0 0x9F 0xA5 0xB9 #E0.0 [1] (🥹) <reserved-1F979>
| 0xF0 0x9F 0xA5 0xBA #E11.0 [1] (🥺) pleading face
| 0xF0 0x9F 0xA5 0xBB #E12.0 [1] (🥻) sari
| 0xF0 0x9F 0xA5 0xBC..0xBF #E11.0 [4] (🥼..🥿) lab coat..flat shoe
| 0xF0 0x9F 0xA6 0x80..0x84 #E1.0 [5] (🦀..🦄) crab..unicorn
| 0xF0 0x9F 0xA6 0x85..0x91 #E3.0 [13] (🦅..🦑) eagle..squid
| 0xF0 0x9F 0xA6 0x92..0x97 #E5.0 [6] (🦒..🦗) giraffe..cricket
| 0xF0 0x9F 0xA6 0x98..0xA2 #E11.0 [11] (🦘..🦢) kangaroo..swan
| 0xF0 0x9F 0xA6 0xA3..0xA4 #E13.0 [2] (🦣..🦤) mammoth..dodo
| 0xF0 0x9F 0xA6 0xA5..0xAA #E12.0 [6] (🦥..🦪) sloth..oyster
| 0xF0 0x9F 0xA6 0xAB..0xAD #E13.0 [3] (🦫..🦭) beaver..seal
| 0xF0 0x9F 0xA6 0xAE..0xAF #E12.0 [2] (🦮..🦯) guide dog..white cane
| 0xF0 0x9F 0xA6 0xB0..0xB9 #E11.0 [10] (🦰..🦹) red hair..supervillain
| 0xF0 0x9F 0xA6 0xBA..0xBF #E12.0 [6] (🦺..🦿) safety vest..mechan...
| 0xF0 0x9F 0xA7 0x80 #E1.0 [1] (🧀) cheese wedge
| 0xF0 0x9F 0xA7 0x81..0x82 #E11.0 [2] (🧁..🧂) cupcake..salt
| 0xF0 0x9F 0xA7 0x83..0x8A #E12.0 [8] (🧃..🧊) beverage box..ice
| 0xF0 0x9F 0xA7 0x8B #E13.0 [1] (🧋) bubble tea
| 0xF0 0x9F 0xA7 0x8C #E0.0 [1] (🧌) <reserved-1F9CC>
| 0xF0 0x9F 0xA7 0x8D..0x8F #E12.0 [3] (🧍..🧏) person standing..de...
| 0xF0 0x9F 0xA7 0x90..0xA6 #E5.0 [23] (🧐..🧦) face with monocle.....
| 0xF0 0x9F 0xA7 0xA7..0xBF #E11.0 [25] (🧧..🧿) red envelope..nazar...
| 0xF0 0x9F 0xA8 0x80..0xFF #E0.0 [112] (🨀..🩯) NEUTRAL CHESS KING....
| 0xF0 0x9F 0xA9 0x00..0xAF #
| 0xF0 0x9F 0xA9 0xB0..0xB3 #E12.0 [4] (🩰..🩳) ballet shoes..shorts
| 0xF0 0x9F 0xA9 0xB4 #E13.0 [1] (🩴) thong sandal
| 0xF0 0x9F 0xA9 0xB5..0xB7 #E0.0 [3] (🩵..🩷) <reserved-1FA75>..<...
| 0xF0 0x9F 0xA9 0xB8..0xBA #E12.0 [3] (🩸..🩺) drop of blood..stet...
| 0xF0 0x9F 0xA9 0xBB..0xBF #E0.0 [5] (🩻..🩿) <reserved-1FA7B>..<...
| 0xF0 0x9F 0xAA 0x80..0x82 #E12.0 [3] (🪀..🪂) yo-yo..parachute
| 0xF0 0x9F 0xAA 0x83..0x86 #E13.0 [4] (🪃..🪆) boomerang..nesting ...
| 0xF0 0x9F 0xAA 0x87..0x8F #E0.0 [9] (🪇..🪏) <reserved-1FA87>..<...
| 0xF0 0x9F 0xAA 0x90..0x95 #E12.0 [6] (🪐..🪕) ringed planet..banjo
| 0xF0 0x9F 0xAA 0x96..0xA8 #E13.0 [19] (🪖..🪨) military helmet..rock
| 0xF0 0x9F 0xAA 0xA9..0xAF #E0.0 [7] (🪩..🪯) <reserved-1FAA9>..<...
| 0xF0 0x9F 0xAA 0xB0..0xB6 #E13.0 [7] (🪰..🪶) fly..feather
| 0xF0 0x9F 0xAA 0xB7..0xBF #E0.0 [9] (🪷..🪿) <reserved-1FAB7>..<...
| 0xF0 0x9F 0xAB 0x80..0x82 #E13.0 [3] (🫀..🫂) anatomical heart..p...
| 0xF0 0x9F 0xAB 0x83..0x8F #E0.0 [13] (🫃..🫏) <reserved-1FAC3>..<...
| 0xF0 0x9F 0xAB 0x90..0x96 #E13.0 [7] (🫐..🫖) blueberries..teapot
| 0xF0 0x9F 0xAB 0x97..0xBF #E0.0 [41] (🫗..🫿) <reserved-1FAD7>..<...
| 0xF0 0x9F 0xB0 0x80..0xFF #E0.0[1022] (🰀..🿽) <reserved-1FC...
| 0xF0 0x9F 0xB1..0xBE 0x00..0xFF #
| 0xF0 0x9F 0xBF 0x00..0xBD #
;
}%%

View File

@@ -0,0 +1,8 @@
package textseg
//go:generate go run make_tables.go -output tables.go
//go:generate go run make_test_tables.go -output tables_test.go
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/13.0.0/ucd/auxiliary/GraphemeBreakProperty.txt -m GraphemeCluster -p "Prepend,CR,LF,Control,Extend,Regional_Indicator,SpacingMark,L,V,T,LV,LVT,ZWJ" -o grapheme_clusters_table.rl
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/13.0.0/ucd/emoji/emoji-data.txt -m Emoji -p "Extended_Pictographic" -o emoji_table.rl
//go:generate ragel -Z grapheme_clusters.rl
//go:generate gofmt -w grapheme_clusters.go

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,133 @@
package textseg
import (
"errors"
"unicode/utf8"
)
// Generated from grapheme_clusters.rl. DO NOT EDIT
%%{
# (except you are actually in grapheme_clusters.rl here, so edit away!)
machine graphclust;
write data;
}%%
var Error = errors.New("invalid UTF8 text")
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
// on grapheme cluster boundaries.
func ScanGraphemeClusters(data []byte, atEOF bool) (int, []byte, error) {
if len(data) == 0 {
return 0, nil, nil
}
// Ragel state
cs := 0 // Current State
p := 0 // "Pointer" into data
pe := len(data) // End-of-data "pointer"
ts := 0
te := 0
act := 0
eof := pe
// Make Go compiler happy
_ = ts
_ = te
_ = act
_ = eof
startPos := 0
endPos := 0
%%{
include GraphemeCluster "grapheme_clusters_table.rl";
include Emoji "emoji_table.rl";
action start {
startPos = p
}
action end {
endPos = p
}
action emit {
return endPos+1, data[startPos:endPos+1], nil
}
ZWJGlue = ZWJ (Extended_Pictographic Extend*)?;
AnyExtender = Extend | ZWJGlue | SpacingMark;
Extension = AnyExtender*;
ReplacementChar = (0xEF 0xBF 0xBD);
CRLFSeq = CR LF;
ControlSeq = Control | ReplacementChar;
HangulSeq = (
L+ (((LV? V+ | LVT) T*)?|LV?) |
LV V* T* |
V+ T* |
LVT T* |
T+
) Extension;
EmojiSeq = Extended_Pictographic Extend* Extension;
ZWJSeq = ZWJ (ZWJ | Extend | SpacingMark)*;
EmojiFlagSeq = Regional_Indicator Regional_Indicator? Extension;
UTF8Cont = 0x80 .. 0xBF;
AnyUTF8 = (
0x00..0x7F |
0xC0..0xDF . UTF8Cont |
0xE0..0xEF . UTF8Cont . UTF8Cont |
0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
);
# OtherSeq is any character that isn't at the start of one of the extended sequences above, followed by extension
OtherSeq = (AnyUTF8 - (CR|LF|Control|ReplacementChar|L|LV|V|LVT|T|Extended_Pictographic|ZWJ|Regional_Indicator|Prepend)) (Extend | ZWJ | SpacingMark)*;
# PrependSeq is prepend followed by any of the other patterns above, except control characters which explicitly break
PrependSeq = Prepend+ (HangulSeq|EmojiSeq|ZWJSeq|EmojiFlagSeq|OtherSeq)?;
CRLFTok = CRLFSeq >start @end;
ControlTok = ControlSeq >start @end;
HangulTok = HangulSeq >start @end;
EmojiTok = EmojiSeq >start @end;
ZWJTok = ZWJSeq >start @end;
EmojiFlagTok = EmojiFlagSeq >start @end;
OtherTok = OtherSeq >start @end;
PrependTok = PrependSeq >start @end;
main := |*
CRLFTok => emit;
ControlTok => emit;
HangulTok => emit;
EmojiTok => emit;
ZWJTok => emit;
EmojiFlagTok => emit;
PrependTok => emit;
OtherTok => emit;
# any single valid UTF-8 character would also be valid per spec,
# but we'll handle that separately after the loop so we can deal
# with requesting more bytes if we're not at EOF.
*|;
write init;
write exec;
}%%
// If we fall out here then we were unable to complete a sequence.
// If we weren't able to complete a sequence then either we've
// reached the end of a partial buffer (so there's more data to come)
// or we have an isolated symbol that would normally be part of a
// grapheme cluster but has appeared in isolation here.
if !atEOF {
// Request more
return 0, nil, nil
}
// Just take the first UTF-8 sequence and return that.
_, seqLen := utf8.DecodeRune(data)
return seqLen, data[:seqLen], nil
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,335 @@
#!/usr/bin/env ruby
#
# This scripted has been updated to accept more command-line arguments:
#
# -u, --url URL to process
# -m, --machine Machine name
# -p, --properties Properties to add to the machine
# -o, --output Write output to file
#
# Updated by: Marty Schoch <marty.schoch@gmail.com>
#
# This script uses the unicode spec to generate a Ragel state machine
# that recognizes unicode alphanumeric characters. It generates 5
# character classes: uupper, ulower, ualpha, udigit, and ualnum.
# Currently supported encodings are UTF-8 [default] and UCS-4.
#
# Usage: unicode2ragel.rb [options]
# -e, --encoding [ucs4 | utf8] Data encoding
# -h, --help Show this message
#
# This script was originally written as part of the Ferret search
# engine library.
#
# Author: Rakan El-Khalil <rakan@well.com>
require 'optparse'
require 'open-uri'
ENCODINGS = [ :utf8, :ucs4 ]
ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" }
DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt"
DEFAULT_MACHINE_NAME= "WChar"
###
# Display vars & default option
TOTAL_WIDTH = 80
RANGE_WIDTH = 23
@encoding = :utf8
@chart_url = DEFAULT_CHART_URL
machine_name = DEFAULT_MACHINE_NAME
properties = []
@output = $stdout
###
# Option parsing
cli_opts = OptionParser.new do |opts|
opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o|
@encoding = o.downcase.to_sym
end
opts.on("-h", "--help", "Show this message") do
puts opts
exit
end
opts.on("-u", "--url URL", "URL to process") do |o|
@chart_url = o
end
opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o|
machine_name = o
end
opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o|
properties = o
end
opts.on("-o", "--output FILE", "output file") do |o|
@output = File.new(o, "w+")
end
end
cli_opts.parse(ARGV)
unless ENCODINGS.member? @encoding
puts "Invalid encoding: #{@encoding}"
puts cli_opts
exit
end
##
# Downloads the document at url and yields every alpha line's hex
# range and description.
def each_alpha( url, property )
URI.open( url ) do |file|
file.each_line do |line|
next if line =~ /^#/;
next if line !~ /; #{property} *#/;
range, description = line.split(/;/)
range.strip!
description.gsub!(/.*#/, '').strip!
if range =~ /\.\./
start, stop = range.split '..'
else start = stop = range
end
yield start.hex .. stop.hex, description
end
end
end
###
# Formats to hex at minimum width
def to_hex( n )
r = "%0X" % n
r = "0#{r}" unless (r.length % 2).zero?
r
end
###
# UCS4 is just a straight hex conversion of the unicode codepoint.
def to_ucs4( range )
rangestr = "0x" + to_hex(range.begin)
rangestr << "..0x" + to_hex(range.end) if range.begin != range.end
[ rangestr ]
end
##
# 0x00 - 0x7f -> 0zzzzzzz[7]
# 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6]
# 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6]
# 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6]
UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff]
def to_utf8_enc( n )
r = 0
if n <= 0x7f
r = n
elsif n <= 0x7ff
y = 0xc0 | (n >> 6)
z = 0x80 | (n & 0x3f)
r = y << 8 | z
elsif n <= 0xffff
x = 0xe0 | (n >> 12)
y = 0x80 | (n >> 6) & 0x3f
z = 0x80 | n & 0x3f
r = x << 16 | y << 8 | z
elsif n <= 0x10ffff
w = 0xf0 | (n >> 18)
x = 0x80 | (n >> 12) & 0x3f
y = 0x80 | (n >> 6) & 0x3f
z = 0x80 | n & 0x3f
r = w << 24 | x << 16 | y << 8 | z
end
to_hex(r)
end
def from_utf8_enc( n )
n = n.hex
r = 0
if n <= 0x7f
r = n
elsif n <= 0xdfff
y = (n >> 8) & 0x1f
z = n & 0x3f
r = y << 6 | z
elsif n <= 0xefffff
x = (n >> 16) & 0x0f
y = (n >> 8) & 0x3f
z = n & 0x3f
r = x << 10 | y << 6 | z
elsif n <= 0xf7ffffff
w = (n >> 24) & 0x07
x = (n >> 16) & 0x3f
y = (n >> 8) & 0x3f
z = n & 0x3f
r = w << 18 | x << 12 | y << 6 | z
end
r
end
###
# Given a range, splits it up into ranges that can be continuously
# encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff]
# This is not strictly needed since the current [5.1] unicode standard
# doesn't have ranges that straddle utf8 boundaries. This is included
# for completeness as there is no telling if that will ever change.
def utf8_ranges( range )
ranges = []
UTF8_BOUNDARIES.each do |max|
if range.begin <= max
if range.end <= max
ranges << range
return ranges
end
ranges << (range.begin .. max)
range = (max + 1) .. range.end
end
end
ranges
end
def build_range( start, stop )
size = start.size/2
left = size - 1
return [""] if size < 1
a = start[0..1]
b = stop[0..1]
###
# Shared prefix
if a == b
return build_range(start[2..-1], stop[2..-1]).map do |elt|
"0x#{a} " + elt
end
end
###
# Unshared prefix, end of run
return ["0x#{a}..0x#{b} "] if left.zero?
###
# Unshared prefix, not end of run
# Range can be 0x123456..0x56789A
# Which is equivalent to:
# 0x123456 .. 0x12FFFF
# 0x130000 .. 0x55FFFF
# 0x560000 .. 0x56789A
ret = []
ret << build_range(start, a + "FF" * left)
###
# Only generate middle range if need be.
if a.hex+1 != b.hex
max = to_hex(b.hex - 1)
max = "FF" if b == "FF"
ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left
end
###
# Don't generate last range if it is covered by first range
ret << build_range(b + "00" * left, stop) unless b == "FF"
ret.flatten!
end
def to_utf8( range )
utf8_ranges( range ).map do |r|
begin_enc = to_utf8_enc(r.begin)
end_enc = to_utf8_enc(r.end)
build_range begin_enc, end_enc
end.flatten!
end
##
# Perform a 3-way comparison of the number of codepoints advertised by
# the unicode spec for the given range, the originally parsed range,
# and the resulting utf8 encoded range.
def count_codepoints( code )
code.split(' ').inject(1) do |acc, elt|
if elt =~ /0x(.+)\.\.0x(.+)/
if @encoding == :utf8
acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1)
else
acc * ($2.hex - $1.hex + 1)
end
else
acc
end
end
end
def is_valid?( range, desc, codes )
spec_count = 1
spec_count = $1.to_i if desc =~ /\[(\d+)\]/
range_count = range.end - range.begin + 1
sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) }
sum == spec_count and sum == range_count
end
##
# Generate the state maching to stdout
def generate_machine( name, property )
pipe = " "
@output.puts " #{name} = "
each_alpha( @chart_url, property ) do |range, desc|
codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range)
#raise "Invalid encoding of range #{range}: #{codes.inspect}" unless
# is_valid? range, desc, codes
range_width = codes.map { |a| a.size }.max
range_width = RANGE_WIDTH if range_width < RANGE_WIDTH
desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11
desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH
if desc.size > desc_width
desc = desc[0..desc_width - 4] + "..."
end
codes.each_with_index do |r, idx|
desc = "" unless idx.zero?
code = "%-#{range_width}s" % r
@output.puts " #{pipe} #{code} ##{desc}"
pipe = "|"
end
end
@output.puts " ;"
@output.puts ""
end
@output.puts <<EOF
# The following Ragel file was autogenerated with #{$0}
# from: #{@chart_url}
#
# It defines #{properties}.
#
# To use this, make sure that your alphtype is set to #{ALPHTYPES[@encoding]},
# and that your input is in #{@encoding}.
%%{
machine #{machine_name};
EOF
properties.each { |x| generate_machine( x, x ) }
@output.puts <<EOF
}%%
EOF

View File

@@ -0,0 +1,19 @@
package textseg
import "unicode/utf8"
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
// on UTF8 sequence boundaries.
//
// This is included largely for completeness, since this behavior is already
// built in to Go when ranging over a string.
func ScanUTF8Sequences(data []byte, atEOF bool) (int, []byte, error) {
if len(data) == 0 {
return 0, nil, nil
}
r, seqLen := utf8.DecodeRune(data)
if r == utf8.RuneError && !atEOF {
return 0, nil, nil
}
return seqLen, data[:seqLen], nil
}

View File

@@ -0,0 +1,95 @@
Copyright (c) 2017 Martin Atkins
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
---------
Unicode table generation programs are under a separate copyright and license:
Copyright (c) 2014 Couchbase, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions
and limitations under the License.
---------
Grapheme break data is provided as part of the Unicode character database,
copright 2016 Unicode, Inc, which is provided with the following license:
Unicode Data Files include all data files under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
Unicode Data Files do not include PDF online code charts under the
directory http://www.unicode.org/Public/.
Software includes any source code published in the Unicode Standard
or under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
NOTICE TO USER: Carefully read the following legal agreement.
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
TERMS AND CONDITIONS OF THIS AGREEMENT.
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
THE DATA FILES OR SOFTWARE.
COPYRIGHT AND PERMISSION NOTICE
Copyright © 1991-2017 Unicode, Inc. All rights reserved.
Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Unicode data files and any associated documentation
(the "Data Files") or Unicode software and any associated documentation
(the "Software") to deal in the Data Files or Software
without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, and/or sell copies of
the Data Files or Software, and to permit persons to whom the Data Files
or Software are furnished to do so, provided that either
(a) this copyright and permission notice appear with all copies
of the Data Files or Software, or
(b) this copyright and permission notice appear in associated
Documentation.
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
Except as contained in this notice, the name of a copyright holder
shall not be used in advertising or otherwise to promote the sale,
use or other dealings in these Data Files or Software without prior
written authorization of the copyright holder.

View File

@@ -0,0 +1,30 @@
package textseg
import (
"bufio"
"bytes"
)
// AllTokens is a utility that uses a bufio.SplitFunc to produce a slice of
// all of the recognized tokens in the given buffer.
func AllTokens(buf []byte, splitFunc bufio.SplitFunc) ([][]byte, error) {
scanner := bufio.NewScanner(bytes.NewReader(buf))
scanner.Split(splitFunc)
var ret [][]byte
for scanner.Scan() {
ret = append(ret, scanner.Bytes())
}
return ret, scanner.Err()
}
// TokenCount is a utility that uses a bufio.SplitFunc to count the number of
// recognized tokens in the given buffer.
func TokenCount(buf []byte, splitFunc bufio.SplitFunc) (int, error) {
scanner := bufio.NewScanner(bytes.NewReader(buf))
scanner.Split(splitFunc)
var ret int
for scanner.Scan() {
ret++
}
return ret, scanner.Err()
}

View File

@@ -0,0 +1,545 @@
# The following Ragel file was autogenerated with unicode2ragel.rb
# from: https://www.unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
#
# It defines ["Extended_Pictographic"].
#
# To use this, make sure that your alphtype is set to byte,
# and that your input is in utf8.
%%{
machine Emoji;
Extended_Pictographic =
0xC2 0xA9 #E0.6 [1] (©️) copyright
| 0xC2 0xAE #E0.6 [1] (®️) registered
| 0xE2 0x80 0xBC #E0.6 [1] (‼️) double exclamation mark
| 0xE2 0x81 0x89 #E0.6 [1] (⁉️) exclamation question ...
| 0xE2 0x84 0xA2 #E0.6 [1] (™️) trade mark
| 0xE2 0x84 0xB9 #E0.6 [1] () information
| 0xE2 0x86 0x94..0x99 #E0.6 [6] (↔️..↙️) left-right arrow..do...
| 0xE2 0x86 0xA9..0xAA #E0.6 [2] (↩️..↪️) right arrow curving ...
| 0xE2 0x8C 0x9A..0x9B #E0.6 [2] (⌚..⌛) watch..hourglass done
| 0xE2 0x8C 0xA8 #E1.0 [1] (⌨️) keyboard
| 0xE2 0x8E 0x88 #E0.0 [1] (⎈) HELM SYMBOL
| 0xE2 0x8F 0x8F #E1.0 [1] (⏏️) eject button
| 0xE2 0x8F 0xA9..0xAC #E0.6 [4] (⏩..⏬) fast-forward button..f...
| 0xE2 0x8F 0xAD..0xAE #E0.7 [2] (⏭️..⏮️) next track button..l...
| 0xE2 0x8F 0xAF #E1.0 [1] (⏯️) play or pause button
| 0xE2 0x8F 0xB0 #E0.6 [1] (⏰) alarm clock
| 0xE2 0x8F 0xB1..0xB2 #E1.0 [2] (⏱️..⏲️) stopwatch..timer clock
| 0xE2 0x8F 0xB3 #E0.6 [1] (⏳) hourglass not done
| 0xE2 0x8F 0xB8..0xBA #E0.7 [3] (⏸️..⏺️) pause button..record...
| 0xE2 0x93 0x82 #E0.6 [1] (Ⓜ️) circled M
| 0xE2 0x96 0xAA..0xAB #E0.6 [2] (▪️..▫️) black small square.....
| 0xE2 0x96 0xB6 #E0.6 [1] (▶️) play button
| 0xE2 0x97 0x80 #E0.6 [1] (◀️) reverse button
| 0xE2 0x97 0xBB..0xBE #E0.6 [4] (◻️..◾) white medium square.....
| 0xE2 0x98 0x80..0x81 #E0.6 [2] (☀️..☁️) sun..cloud
| 0xE2 0x98 0x82..0x83 #E0.7 [2] (☂️..☃️) umbrella..snowman
| 0xE2 0x98 0x84 #E1.0 [1] (☄️) comet
| 0xE2 0x98 0x85 #E0.0 [1] (★) BLACK STAR
| 0xE2 0x98 0x87..0x8D #E0.0 [7] (☇..☍) LIGHTNING..OPPOSITION
| 0xE2 0x98 0x8E #E0.6 [1] (☎️) telephone
| 0xE2 0x98 0x8F..0x90 #E0.0 [2] (☏..☐) WHITE TELEPHONE..BALLO...
| 0xE2 0x98 0x91 #E0.6 [1] (☑️) check box with check
| 0xE2 0x98 0x92 #E0.0 [1] (☒) BALLOT BOX WITH X
| 0xE2 0x98 0x94..0x95 #E0.6 [2] (☔..☕) umbrella with rain dro...
| 0xE2 0x98 0x96..0x97 #E0.0 [2] (☖..☗) WHITE SHOGI PIECE..BLA...
| 0xE2 0x98 0x98 #E1.0 [1] (☘️) shamrock
| 0xE2 0x98 0x99..0x9C #E0.0 [4] (☙..☜) REVERSED ROTATED FLORA...
| 0xE2 0x98 0x9D #E0.6 [1] (☝️) index pointing up
| 0xE2 0x98 0x9E..0x9F #E0.0 [2] (☞..☟) WHITE RIGHT POINTING I...
| 0xE2 0x98 0xA0 #E1.0 [1] (☠️) skull and crossbones
| 0xE2 0x98 0xA1 #E0.0 [1] (☡) CAUTION SIGN
| 0xE2 0x98 0xA2..0xA3 #E1.0 [2] (☢️..☣️) radioactive..biohazard
| 0xE2 0x98 0xA4..0xA5 #E0.0 [2] (☤..☥) CADUCEUS..ANKH
| 0xE2 0x98 0xA6 #E1.0 [1] (☦️) orthodox cross
| 0xE2 0x98 0xA7..0xA9 #E0.0 [3] (☧..☩) CHI RHO..CROSS OF JERU...
| 0xE2 0x98 0xAA #E0.7 [1] (☪️) star and crescent
| 0xE2 0x98 0xAB..0xAD #E0.0 [3] (☫..☭) FARSI SYMBOL..HAMMER A...
| 0xE2 0x98 0xAE #E1.0 [1] (☮️) peace symbol
| 0xE2 0x98 0xAF #E0.7 [1] (☯️) yin yang
| 0xE2 0x98 0xB0..0xB7 #E0.0 [8] (☰..☷) TRIGRAM FOR HEAVEN..TR...
| 0xE2 0x98 0xB8..0xB9 #E0.7 [2] (☸️..☹️) wheel of dharma..fro...
| 0xE2 0x98 0xBA #E0.6 [1] (☺️) smiling face
| 0xE2 0x98 0xBB..0xBF #E0.0 [5] (☻..☿) BLACK SMILING FACE..ME...
| 0xE2 0x99 0x80 #E4.0 [1] (♀️) female sign
| 0xE2 0x99 0x81 #E0.0 [1] (♁) EARTH
| 0xE2 0x99 0x82 #E4.0 [1] (♂️) male sign
| 0xE2 0x99 0x83..0x87 #E0.0 [5] (♃..♇) JUPITER..PLUTO
| 0xE2 0x99 0x88..0x93 #E0.6 [12] (♈..♓) Aries..Pisces
| 0xE2 0x99 0x94..0x9E #E0.0 [11] (♔..♞) WHITE CHESS KING..BLAC...
| 0xE2 0x99 0x9F #E11.0 [1] (♟️) chess pawn
| 0xE2 0x99 0xA0 #E0.6 [1] (♠️) spade suit
| 0xE2 0x99 0xA1..0xA2 #E0.0 [2] (♡..♢) WHITE HEART SUIT..WHIT...
| 0xE2 0x99 0xA3 #E0.6 [1] (♣️) club suit
| 0xE2 0x99 0xA4 #E0.0 [1] (♤) WHITE SPADE SUIT
| 0xE2 0x99 0xA5..0xA6 #E0.6 [2] (♥️..♦️) heart suit..diamond ...
| 0xE2 0x99 0xA7 #E0.0 [1] (♧) WHITE CLUB SUIT
| 0xE2 0x99 0xA8 #E0.6 [1] (♨️) hot springs
| 0xE2 0x99 0xA9..0xBA #E0.0 [18] (♩..♺) QUARTER NOTE..RECYCLIN...
| 0xE2 0x99 0xBB #E0.6 [1] (♻️) recycling symbol
| 0xE2 0x99 0xBC..0xBD #E0.0 [2] (♼..♽) RECYCLED PAPER SYMBOL....
| 0xE2 0x99 0xBE #E11.0 [1] (♾️) infinity
| 0xE2 0x99 0xBF #E0.6 [1] (♿) wheelchair symbol
| 0xE2 0x9A 0x80..0x85 #E0.0 [6] (⚀..⚅) DIE FACE-1..DIE FACE-6
| 0xE2 0x9A 0x90..0x91 #E0.0 [2] (⚐..⚑) WHITE FLAG..BLACK FLAG
| 0xE2 0x9A 0x92 #E1.0 [1] (⚒️) hammer and pick
| 0xE2 0x9A 0x93 #E0.6 [1] (⚓) anchor
| 0xE2 0x9A 0x94 #E1.0 [1] (⚔️) crossed swords
| 0xE2 0x9A 0x95 #E4.0 [1] (⚕️) medical symbol
| 0xE2 0x9A 0x96..0x97 #E1.0 [2] (⚖️..⚗️) balance scale..alembic
| 0xE2 0x9A 0x98 #E0.0 [1] (⚘) FLOWER
| 0xE2 0x9A 0x99 #E1.0 [1] (⚙️) gear
| 0xE2 0x9A 0x9A #E0.0 [1] (⚚) STAFF OF HERMES
| 0xE2 0x9A 0x9B..0x9C #E1.0 [2] (⚛️..⚜️) atom symbol..fleur-d...
| 0xE2 0x9A 0x9D..0x9F #E0.0 [3] (⚝..⚟) OUTLINED WHITE STAR..T...
| 0xE2 0x9A 0xA0..0xA1 #E0.6 [2] (⚠️..⚡) warning..high voltage
| 0xE2 0x9A 0xA2..0xA6 #E0.0 [5] (⚢..⚦) DOUBLED FEMALE SIGN..M...
| 0xE2 0x9A 0xA7 #E13.0 [1] (⚧️) transgender symbol
| 0xE2 0x9A 0xA8..0xA9 #E0.0 [2] (⚨..⚩) VERTICAL MALE WITH STR...
| 0xE2 0x9A 0xAA..0xAB #E0.6 [2] (⚪..⚫) white circle..black ci...
| 0xE2 0x9A 0xAC..0xAF #E0.0 [4] (⚬..⚯) MEDIUM SMALL WHITE CIR...
| 0xE2 0x9A 0xB0..0xB1 #E1.0 [2] (⚰️..⚱️) coffin..funeral urn
| 0xE2 0x9A 0xB2..0xBC #E0.0 [11] (⚲..⚼) NEUTER..SESQUIQUADRATE
| 0xE2 0x9A 0xBD..0xBE #E0.6 [2] (⚽..⚾) soccer ball..baseball
| 0xE2 0x9A 0xBF..0xFF #E0.0 [5] (⚿..⛃) SQUARED KEY..BLACK DRA...
| 0xE2 0x9B 0x00..0x83 #
| 0xE2 0x9B 0x84..0x85 #E0.6 [2] (⛄..⛅) snowman without snow.....
| 0xE2 0x9B 0x86..0x87 #E0.0 [2] (⛆..⛇) RAIN..BLACK SNOWMAN
| 0xE2 0x9B 0x88 #E0.7 [1] (⛈️) cloud with lightning ...
| 0xE2 0x9B 0x89..0x8D #E0.0 [5] (⛉..⛍) TURNED WHITE SHOGI PIE...
| 0xE2 0x9B 0x8E #E0.6 [1] (⛎) Ophiuchus
| 0xE2 0x9B 0x8F #E0.7 [1] (⛏️) pick
| 0xE2 0x9B 0x90 #E0.0 [1] (⛐) CAR SLIDING
| 0xE2 0x9B 0x91 #E0.7 [1] (⛑️) rescue workers helmet
| 0xE2 0x9B 0x92 #E0.0 [1] (⛒) CIRCLED CROSSING LANES
| 0xE2 0x9B 0x93 #E0.7 [1] (⛓️) chains
| 0xE2 0x9B 0x94 #E0.6 [1] (⛔) no entry
| 0xE2 0x9B 0x95..0xA8 #E0.0 [20] (⛕..⛨) ALTERNATE ONE-WAY LEFT...
| 0xE2 0x9B 0xA9 #E0.7 [1] (⛩️) shinto shrine
| 0xE2 0x9B 0xAA #E0.6 [1] (⛪) church
| 0xE2 0x9B 0xAB..0xAF #E0.0 [5] (⛫..⛯) CASTLE..MAP SYMBOL FOR...
| 0xE2 0x9B 0xB0..0xB1 #E0.7 [2] (⛰️..⛱️) mountain..umbrella o...
| 0xE2 0x9B 0xB2..0xB3 #E0.6 [2] (⛲..⛳) fountain..flag in hole
| 0xE2 0x9B 0xB4 #E0.7 [1] (⛴️) ferry
| 0xE2 0x9B 0xB5 #E0.6 [1] (⛵) sailboat
| 0xE2 0x9B 0xB6 #E0.0 [1] (⛶) SQUARE FOUR CORNERS
| 0xE2 0x9B 0xB7..0xB9 #E0.7 [3] (⛷️..⛹️) skier..person bounci...
| 0xE2 0x9B 0xBA #E0.6 [1] (⛺) tent
| 0xE2 0x9B 0xBB..0xBC #E0.0 [2] (⛻..⛼) JAPANESE BANK SYMBOL.....
| 0xE2 0x9B 0xBD #E0.6 [1] (⛽) fuel pump
| 0xE2 0x9B 0xBE..0xFF #E0.0 [4] (⛾..✁) CUP ON BLACK SQUARE..U...
| 0xE2 0x9C 0x00..0x81 #
| 0xE2 0x9C 0x82 #E0.6 [1] (✂️) scissors
| 0xE2 0x9C 0x83..0x84 #E0.0 [2] (✃..✄) LOWER BLADE SCISSORS.....
| 0xE2 0x9C 0x85 #E0.6 [1] (✅) check mark button
| 0xE2 0x9C 0x88..0x8C #E0.6 [5] (✈️..✌️) airplane..victory hand
| 0xE2 0x9C 0x8D #E0.7 [1] (✍️) writing hand
| 0xE2 0x9C 0x8E #E0.0 [1] (✎) LOWER RIGHT PENCIL
| 0xE2 0x9C 0x8F #E0.6 [1] (✏️) pencil
| 0xE2 0x9C 0x90..0x91 #E0.0 [2] (✐..✑) UPPER RIGHT PENCIL..WH...
| 0xE2 0x9C 0x92 #E0.6 [1] (✒️) black nib
| 0xE2 0x9C 0x94 #E0.6 [1] (✔️) check mark
| 0xE2 0x9C 0x96 #E0.6 [1] (✖️) multiply
| 0xE2 0x9C 0x9D #E0.7 [1] (✝️) latin cross
| 0xE2 0x9C 0xA1 #E0.7 [1] (✡️) star of David
| 0xE2 0x9C 0xA8 #E0.6 [1] (✨) sparkles
| 0xE2 0x9C 0xB3..0xB4 #E0.6 [2] (✳️..✴️) eight-spoked asteris...
| 0xE2 0x9D 0x84 #E0.6 [1] (❄️) snowflake
| 0xE2 0x9D 0x87 #E0.6 [1] (❇️) sparkle
| 0xE2 0x9D 0x8C #E0.6 [1] (❌) cross mark
| 0xE2 0x9D 0x8E #E0.6 [1] (❎) cross mark button
| 0xE2 0x9D 0x93..0x95 #E0.6 [3] (❓..❕) red question mark..whi...
| 0xE2 0x9D 0x97 #E0.6 [1] (❗) red exclamation mark
| 0xE2 0x9D 0xA3 #E1.0 [1] (❣️) heart exclamation
| 0xE2 0x9D 0xA4 #E0.6 [1] (❤️) red heart
| 0xE2 0x9D 0xA5..0xA7 #E0.0 [3] (❥..❧) ROTATED HEAVY BLACK HE...
| 0xE2 0x9E 0x95..0x97 #E0.6 [3] (..➗) plus..divide
| 0xE2 0x9E 0xA1 #E0.6 [1] (➡️) right arrow
| 0xE2 0x9E 0xB0 #E0.6 [1] (➰) curly loop
| 0xE2 0x9E 0xBF #E1.0 [1] (➿) double curly loop
| 0xE2 0xA4 0xB4..0xB5 #E0.6 [2] (⤴️..⤵️) right arrow curving ...
| 0xE2 0xAC 0x85..0x87 #E0.6 [3] (⬅️..⬇️) left arrow..down arrow
| 0xE2 0xAC 0x9B..0x9C #E0.6 [2] (⬛..⬜) black large square..wh...
| 0xE2 0xAD 0x90 #E0.6 [1] (⭐) star
| 0xE2 0xAD 0x95 #E0.6 [1] (⭕) hollow red circle
| 0xE3 0x80 0xB0 #E0.6 [1] (〰️) wavy dash
| 0xE3 0x80 0xBD #E0.6 [1] (〽️) part alternation mark
| 0xE3 0x8A 0x97 #E0.6 [1] (㊗️) Japanese “congratulat...
| 0xE3 0x8A 0x99 #E0.6 [1] (㊙️) Japanese “secret” button
| 0xF0 0x9F 0x80 0x80..0x83 #E0.0 [4] (🀀..🀃) MAHJONG TILE EAST W...
| 0xF0 0x9F 0x80 0x84 #E0.6 [1] (🀄) mahjong red dragon
| 0xF0 0x9F 0x80 0x85..0xFF #E0.0 [202] (🀅..🃎) MAHJONG TILE ...
| 0xF0 0x9F 0x81..0x82 0x00..0xFF #
| 0xF0 0x9F 0x83 0x00..0x8E #
| 0xF0 0x9F 0x83 0x8F #E0.6 [1] (🃏) joker
| 0xF0 0x9F 0x83 0x90..0xBF #E0.0 [48] (🃐..🃿) <reserved-1F0D0>..<...
| 0xF0 0x9F 0x84 0x8D..0x8F #E0.0 [3] (🄍..🄏) CIRCLED ZERO WITH S...
| 0xF0 0x9F 0x84 0xAF #E0.0 [1] (🄯) COPYLEFT SYMBOL
| 0xF0 0x9F 0x85 0xAC..0xAF #E0.0 [4] (🅬..🅯) RAISED MR SIGN..CIR...
| 0xF0 0x9F 0x85 0xB0..0xB1 #E0.6 [2] (🅰️..🅱️) A button (blood t...
| 0xF0 0x9F 0x85 0xBE..0xBF #E0.6 [2] (🅾️..🅿️) O button (blood t...
| 0xF0 0x9F 0x86 0x8E #E0.6 [1] (🆎) AB button (blood type)
| 0xF0 0x9F 0x86 0x91..0x9A #E0.6 [10] (🆑..🆚) CL button..VS button
| 0xF0 0x9F 0x86 0xAD..0xFF #E0.0 [57] (🆭..🇥) MASK WORK SYMBOL..<...
| 0xF0 0x9F 0x87 0x00..0xA5 #
| 0xF0 0x9F 0x88 0x81..0x82 #E0.6 [2] (🈁..🈂️) Japanese “here” bu...
| 0xF0 0x9F 0x88 0x83..0x8F #E0.0 [13] (🈃..🈏) <reserved-1F203>..<...
| 0xF0 0x9F 0x88 0x9A #E0.6 [1] (🈚) Japanese “free of char...
| 0xF0 0x9F 0x88 0xAF #E0.6 [1] (🈯) Japanese “reserved” bu...
| 0xF0 0x9F 0x88 0xB2..0xBA #E0.6 [9] (🈲..🈺) Japanese “prohibite...
| 0xF0 0x9F 0x88 0xBC..0xBF #E0.0 [4] (🈼..🈿) <reserved-1F23C>..<...
| 0xF0 0x9F 0x89 0x89..0x8F #E0.0 [7] (🉉..🉏) <reserved-1F249>..<...
| 0xF0 0x9F 0x89 0x90..0x91 #E0.6 [2] (🉐..🉑) Japanese “bargain” ...
| 0xF0 0x9F 0x89 0x92..0xFF #E0.0 [174] (🉒..🋿) <reserved-1F2...
| 0xF0 0x9F 0x8A..0x8A 0x00..0xFF #
| 0xF0 0x9F 0x8B 0x00..0xBF #
| 0xF0 0x9F 0x8C 0x80..0x8C #E0.6 [13] (🌀..🌌) cyclone..milky way
| 0xF0 0x9F 0x8C 0x8D..0x8E #E0.7 [2] (🌍..🌎) globe showing Europ...
| 0xF0 0x9F 0x8C 0x8F #E0.6 [1] (🌏) globe showing Asia-Aus...
| 0xF0 0x9F 0x8C 0x90 #E1.0 [1] (🌐) globe with meridians
| 0xF0 0x9F 0x8C 0x91 #E0.6 [1] (🌑) new moon
| 0xF0 0x9F 0x8C 0x92 #E1.0 [1] (🌒) waxing crescent moon
| 0xF0 0x9F 0x8C 0x93..0x95 #E0.6 [3] (🌓..🌕) first quarter moon....
| 0xF0 0x9F 0x8C 0x96..0x98 #E1.0 [3] (🌖..🌘) waning gibbous moon...
| 0xF0 0x9F 0x8C 0x99 #E0.6 [1] (🌙) crescent moon
| 0xF0 0x9F 0x8C 0x9A #E1.0 [1] (🌚) new moon face
| 0xF0 0x9F 0x8C 0x9B #E0.6 [1] (🌛) first quarter moon face
| 0xF0 0x9F 0x8C 0x9C #E0.7 [1] (🌜) last quarter moon face
| 0xF0 0x9F 0x8C 0x9D..0x9E #E1.0 [2] (🌝..🌞) full moon face..sun...
| 0xF0 0x9F 0x8C 0x9F..0xA0 #E0.6 [2] (🌟..🌠) glowing star..shoot...
| 0xF0 0x9F 0x8C 0xA1 #E0.7 [1] (🌡️) thermometer
| 0xF0 0x9F 0x8C 0xA2..0xA3 #E0.0 [2] (🌢..🌣) BLACK DROPLET..WHIT...
| 0xF0 0x9F 0x8C 0xA4..0xAC #E0.7 [9] (🌤️..🌬️) sun behind small ...
| 0xF0 0x9F 0x8C 0xAD..0xAF #E1.0 [3] (🌭..🌯) hot dog..burrito
| 0xF0 0x9F 0x8C 0xB0..0xB1 #E0.6 [2] (🌰..🌱) chestnut..seedling
| 0xF0 0x9F 0x8C 0xB2..0xB3 #E1.0 [2] (🌲..🌳) evergreen tree..dec...
| 0xF0 0x9F 0x8C 0xB4..0xB5 #E0.6 [2] (🌴..🌵) palm tree..cactus
| 0xF0 0x9F 0x8C 0xB6 #E0.7 [1] (🌶️) hot pepper
| 0xF0 0x9F 0x8C 0xB7..0xFF #E0.6 [20] (🌷..🍊) tulip..tangerine
| 0xF0 0x9F 0x8D 0x00..0x8A #
| 0xF0 0x9F 0x8D 0x8B #E1.0 [1] (🍋) lemon
| 0xF0 0x9F 0x8D 0x8C..0x8F #E0.6 [4] (🍌..🍏) banana..green apple
| 0xF0 0x9F 0x8D 0x90 #E1.0 [1] (🍐) pear
| 0xF0 0x9F 0x8D 0x91..0xBB #E0.6 [43] (🍑..🍻) peach..clinking bee...
| 0xF0 0x9F 0x8D 0xBC #E1.0 [1] (🍼) baby bottle
| 0xF0 0x9F 0x8D 0xBD #E0.7 [1] (🍽️) fork and knife with p...
| 0xF0 0x9F 0x8D 0xBE..0xBF #E1.0 [2] (🍾..🍿) bottle with popping...
| 0xF0 0x9F 0x8E 0x80..0x93 #E0.6 [20] (🎀..🎓) ribbon..graduation cap
| 0xF0 0x9F 0x8E 0x94..0x95 #E0.0 [2] (🎔..🎕) HEART WITH TIP ON T...
| 0xF0 0x9F 0x8E 0x96..0x97 #E0.7 [2] (🎖️..🎗️) military medal..r...
| 0xF0 0x9F 0x8E 0x98 #E0.0 [1] (🎘) MUSICAL KEYBOARD WITH ...
| 0xF0 0x9F 0x8E 0x99..0x9B #E0.7 [3] (🎙️..🎛️) studio microphone...
| 0xF0 0x9F 0x8E 0x9C..0x9D #E0.0 [2] (🎜..🎝) BEAMED ASCENDING MU...
| 0xF0 0x9F 0x8E 0x9E..0x9F #E0.7 [2] (🎞️..🎟️) film frames..admi...
| 0xF0 0x9F 0x8E 0xA0..0xFF #E0.6 [37] (🎠..🏄) carousel horse..per...
| 0xF0 0x9F 0x8F 0x00..0x84 #
| 0xF0 0x9F 0x8F 0x85 #E1.0 [1] (🏅) sports medal
| 0xF0 0x9F 0x8F 0x86 #E0.6 [1] (🏆) trophy
| 0xF0 0x9F 0x8F 0x87 #E1.0 [1] (🏇) horse racing
| 0xF0 0x9F 0x8F 0x88 #E0.6 [1] (🏈) american football
| 0xF0 0x9F 0x8F 0x89 #E1.0 [1] (🏉) rugby football
| 0xF0 0x9F 0x8F 0x8A #E0.6 [1] (🏊) person swimming
| 0xF0 0x9F 0x8F 0x8B..0x8E #E0.7 [4] (🏋️..🏎️) person lifting we...
| 0xF0 0x9F 0x8F 0x8F..0x93 #E1.0 [5] (🏏..🏓) cricket game..ping ...
| 0xF0 0x9F 0x8F 0x94..0x9F #E0.7 [12] (🏔️..🏟️) snow-capped mount...
| 0xF0 0x9F 0x8F 0xA0..0xA3 #E0.6 [4] (🏠..🏣) house..Japanese pos...
| 0xF0 0x9F 0x8F 0xA4 #E1.0 [1] (🏤) post office
| 0xF0 0x9F 0x8F 0xA5..0xB0 #E0.6 [12] (🏥..🏰) hospital..castle
| 0xF0 0x9F 0x8F 0xB1..0xB2 #E0.0 [2] (🏱..🏲) WHITE PENNANT..BLAC...
| 0xF0 0x9F 0x8F 0xB3 #E0.7 [1] (🏳️) white flag
| 0xF0 0x9F 0x8F 0xB4 #E1.0 [1] (🏴) black flag
| 0xF0 0x9F 0x8F 0xB5 #E0.7 [1] (🏵️) rosette
| 0xF0 0x9F 0x8F 0xB6 #E0.0 [1] (🏶) BLACK ROSETTE
| 0xF0 0x9F 0x8F 0xB7 #E0.7 [1] (🏷️) label
| 0xF0 0x9F 0x8F 0xB8..0xBA #E1.0 [3] (🏸..🏺) badminton..amphora
| 0xF0 0x9F 0x90 0x80..0x87 #E1.0 [8] (🐀..🐇) rat..rabbit
| 0xF0 0x9F 0x90 0x88 #E0.7 [1] (🐈) cat
| 0xF0 0x9F 0x90 0x89..0x8B #E1.0 [3] (🐉..🐋) dragon..whale
| 0xF0 0x9F 0x90 0x8C..0x8E #E0.6 [3] (🐌..🐎) snail..horse
| 0xF0 0x9F 0x90 0x8F..0x90 #E1.0 [2] (🐏..🐐) ram..goat
| 0xF0 0x9F 0x90 0x91..0x92 #E0.6 [2] (🐑..🐒) ewe..monkey
| 0xF0 0x9F 0x90 0x93 #E1.0 [1] (🐓) rooster
| 0xF0 0x9F 0x90 0x94 #E0.6 [1] (🐔) chicken
| 0xF0 0x9F 0x90 0x95 #E0.7 [1] (🐕) dog
| 0xF0 0x9F 0x90 0x96 #E1.0 [1] (🐖) pig
| 0xF0 0x9F 0x90 0x97..0xA9 #E0.6 [19] (🐗..🐩) boar..poodle
| 0xF0 0x9F 0x90 0xAA #E1.0 [1] (🐪) camel
| 0xF0 0x9F 0x90 0xAB..0xBE #E0.6 [20] (🐫..🐾) two-hump camel..paw...
| 0xF0 0x9F 0x90 0xBF #E0.7 [1] (🐿️) chipmunk
| 0xF0 0x9F 0x91 0x80 #E0.6 [1] (👀) eyes
| 0xF0 0x9F 0x91 0x81 #E0.7 [1] (👁️) eye
| 0xF0 0x9F 0x91 0x82..0xA4 #E0.6 [35] (👂..👤) ear..bust in silhou...
| 0xF0 0x9F 0x91 0xA5 #E1.0 [1] (👥) busts in silhouette
| 0xF0 0x9F 0x91 0xA6..0xAB #E0.6 [6] (👦..👫) boy..woman and man ...
| 0xF0 0x9F 0x91 0xAC..0xAD #E1.0 [2] (👬..👭) men holding hands.....
| 0xF0 0x9F 0x91 0xAE..0xFF #E0.6 [63] (👮..💬) police officer..spe...
| 0xF0 0x9F 0x92 0x00..0xAC #
| 0xF0 0x9F 0x92 0xAD #E1.0 [1] (💭) thought balloon
| 0xF0 0x9F 0x92 0xAE..0xB5 #E0.6 [8] (💮..💵) white flower..dolla...
| 0xF0 0x9F 0x92 0xB6..0xB7 #E1.0 [2] (💶..💷) euro banknote..poun...
| 0xF0 0x9F 0x92 0xB8..0xFF #E0.6 [52] (💸..📫) money with wings..c...
| 0xF0 0x9F 0x93 0x00..0xAB #
| 0xF0 0x9F 0x93 0xAC..0xAD #E0.7 [2] (📬..📭) open mailbox with r...
| 0xF0 0x9F 0x93 0xAE #E0.6 [1] (📮) postbox
| 0xF0 0x9F 0x93 0xAF #E1.0 [1] (📯) postal horn
| 0xF0 0x9F 0x93 0xB0..0xB4 #E0.6 [5] (📰..📴) newspaper..mobile p...
| 0xF0 0x9F 0x93 0xB5 #E1.0 [1] (📵) no mobile phones
| 0xF0 0x9F 0x93 0xB6..0xB7 #E0.6 [2] (📶..📷) antenna bars..camera
| 0xF0 0x9F 0x93 0xB8 #E1.0 [1] (📸) camera with flash
| 0xF0 0x9F 0x93 0xB9..0xBC #E0.6 [4] (📹..📼) video camera..video...
| 0xF0 0x9F 0x93 0xBD #E0.7 [1] (📽️) film projector
| 0xF0 0x9F 0x93 0xBE #E0.0 [1] (📾) PORTABLE STEREO
| 0xF0 0x9F 0x93 0xBF..0xFF #E1.0 [4] (📿..🔂) prayer beads..repea...
| 0xF0 0x9F 0x94 0x00..0x82 #
| 0xF0 0x9F 0x94 0x83 #E0.6 [1] (🔃) clockwise vertical arrows
| 0xF0 0x9F 0x94 0x84..0x87 #E1.0 [4] (🔄..🔇) counterclockwise ar...
| 0xF0 0x9F 0x94 0x88 #E0.7 [1] (🔈) speaker low volume
| 0xF0 0x9F 0x94 0x89 #E1.0 [1] (🔉) speaker medium volume
| 0xF0 0x9F 0x94 0x8A..0x94 #E0.6 [11] (🔊..🔔) speaker high volume...
| 0xF0 0x9F 0x94 0x95 #E1.0 [1] (🔕) bell with slash
| 0xF0 0x9F 0x94 0x96..0xAB #E0.6 [22] (🔖..🔫) bookmark..water pistol
| 0xF0 0x9F 0x94 0xAC..0xAD #E1.0 [2] (🔬..🔭) microscope..telescope
| 0xF0 0x9F 0x94 0xAE..0xBD #E0.6 [16] (🔮..🔽) crystal ball..downw...
| 0xF0 0x9F 0x95 0x86..0x88 #E0.0 [3] (🕆..🕈) WHITE LATIN CROSS.....
| 0xF0 0x9F 0x95 0x89..0x8A #E0.7 [2] (🕉️..🕊️) om..dove
| 0xF0 0x9F 0x95 0x8B..0x8E #E1.0 [4] (🕋..🕎) kaaba..menorah
| 0xF0 0x9F 0x95 0x8F #E0.0 [1] (🕏) BOWL OF HYGIEIA
| 0xF0 0x9F 0x95 0x90..0x9B #E0.6 [12] (🕐..🕛) one oclock..twelve...
| 0xF0 0x9F 0x95 0x9C..0xA7 #E0.7 [12] (🕜..🕧) one-thirty..twelve-...
| 0xF0 0x9F 0x95 0xA8..0xAE #E0.0 [7] (🕨..🕮) RIGHT SPEAKER..BOOK
| 0xF0 0x9F 0x95 0xAF..0xB0 #E0.7 [2] (🕯️..🕰️) candle..mantelpie...
| 0xF0 0x9F 0x95 0xB1..0xB2 #E0.0 [2] (🕱..🕲) BLACK SKULL AND CRO...
| 0xF0 0x9F 0x95 0xB3..0xB9 #E0.7 [7] (🕳️..🕹️) hole..joystick
| 0xF0 0x9F 0x95 0xBA #E3.0 [1] (🕺) man dancing
| 0xF0 0x9F 0x95 0xBB..0xFF #E0.0 [12] (🕻..🖆) LEFT HAND TELEPHONE...
| 0xF0 0x9F 0x96 0x00..0x86 #
| 0xF0 0x9F 0x96 0x87 #E0.7 [1] (🖇️) linked paperclips
| 0xF0 0x9F 0x96 0x88..0x89 #E0.0 [2] (🖈..🖉) BLACK PUSHPIN..LOWE...
| 0xF0 0x9F 0x96 0x8A..0x8D #E0.7 [4] (🖊️..🖍️) pen..crayon
| 0xF0 0x9F 0x96 0x8E..0x8F #E0.0 [2] (🖎..🖏) LEFT WRITING HAND.....
| 0xF0 0x9F 0x96 0x90 #E0.7 [1] (🖐️) hand with fingers spl...
| 0xF0 0x9F 0x96 0x91..0x94 #E0.0 [4] (🖑..🖔) REVERSED RAISED HAN...
| 0xF0 0x9F 0x96 0x95..0x96 #E1.0 [2] (🖕..🖖) middle finger..vulc...
| 0xF0 0x9F 0x96 0x97..0xA3 #E0.0 [13] (🖗..🖣) WHITE DOWN POINTING...
| 0xF0 0x9F 0x96 0xA4 #E3.0 [1] (🖤) black heart
| 0xF0 0x9F 0x96 0xA5 #E0.7 [1] (🖥️) desktop computer
| 0xF0 0x9F 0x96 0xA6..0xA7 #E0.0 [2] (🖦..🖧) KEYBOARD AND MOUSE....
| 0xF0 0x9F 0x96 0xA8 #E0.7 [1] (🖨️) printer
| 0xF0 0x9F 0x96 0xA9..0xB0 #E0.0 [8] (🖩..🖰) POCKET CALCULATOR.....
| 0xF0 0x9F 0x96 0xB1..0xB2 #E0.7 [2] (🖱️..🖲️) computer mouse..t...
| 0xF0 0x9F 0x96 0xB3..0xBB #E0.0 [9] (🖳..🖻) OLD PERSONAL COMPUT...
| 0xF0 0x9F 0x96 0xBC #E0.7 [1] (🖼️) framed picture
| 0xF0 0x9F 0x96 0xBD..0xFF #E0.0 [5] (🖽..🗁) FRAME WITH TILES..O...
| 0xF0 0x9F 0x97 0x00..0x81 #
| 0xF0 0x9F 0x97 0x82..0x84 #E0.7 [3] (🗂️..🗄️) card index divide...
| 0xF0 0x9F 0x97 0x85..0x90 #E0.0 [12] (🗅..🗐) EMPTY NOTE..PAGES
| 0xF0 0x9F 0x97 0x91..0x93 #E0.7 [3] (🗑️..🗓️) wastebasket..spir...
| 0xF0 0x9F 0x97 0x94..0x9B #E0.0 [8] (🗔..🗛) DESKTOP WINDOW..DEC...
| 0xF0 0x9F 0x97 0x9C..0x9E #E0.7 [3] (🗜️..🗞️) clamp..rolled-up ...
| 0xF0 0x9F 0x97 0x9F..0xA0 #E0.0 [2] (🗟..🗠) PAGE WITH CIRCLED T...
| 0xF0 0x9F 0x97 0xA1 #E0.7 [1] (🗡️) dagger
| 0xF0 0x9F 0x97 0xA2 #E0.0 [1] (🗢) LIPS
| 0xF0 0x9F 0x97 0xA3 #E0.7 [1] (🗣️) speaking head
| 0xF0 0x9F 0x97 0xA4..0xA7 #E0.0 [4] (🗤..🗧) THREE RAYS ABOVE..T...
| 0xF0 0x9F 0x97 0xA8 #E2.0 [1] (🗨️) left speech bubble
| 0xF0 0x9F 0x97 0xA9..0xAE #E0.0 [6] (🗩..🗮) RIGHT SPEECH BUBBLE...
| 0xF0 0x9F 0x97 0xAF #E0.7 [1] (🗯️) right anger bubble
| 0xF0 0x9F 0x97 0xB0..0xB2 #E0.0 [3] (🗰..🗲) MOOD BUBBLE..LIGHTN...
| 0xF0 0x9F 0x97 0xB3 #E0.7 [1] (🗳️) ballot box with ballot
| 0xF0 0x9F 0x97 0xB4..0xB9 #E0.0 [6] (🗴..🗹) BALLOT SCRIPT X..BA...
| 0xF0 0x9F 0x97 0xBA #E0.7 [1] (🗺️) world map
| 0xF0 0x9F 0x97 0xBB..0xBF #E0.6 [5] (🗻..🗿) mount fuji..moai
| 0xF0 0x9F 0x98 0x80 #E1.0 [1] (😀) grinning face
| 0xF0 0x9F 0x98 0x81..0x86 #E0.6 [6] (😁..😆) beaming face with s...
| 0xF0 0x9F 0x98 0x87..0x88 #E1.0 [2] (😇..😈) smiling face with h...
| 0xF0 0x9F 0x98 0x89..0x8D #E0.6 [5] (😉..😍) winking face..smili...
| 0xF0 0x9F 0x98 0x8E #E1.0 [1] (😎) smiling face with sung...
| 0xF0 0x9F 0x98 0x8F #E0.6 [1] (😏) smirking face
| 0xF0 0x9F 0x98 0x90 #E0.7 [1] (😐) neutral face
| 0xF0 0x9F 0x98 0x91 #E1.0 [1] (😑) expressionless face
| 0xF0 0x9F 0x98 0x92..0x94 #E0.6 [3] (😒..😔) unamused face..pens...
| 0xF0 0x9F 0x98 0x95 #E1.0 [1] (😕) confused face
| 0xF0 0x9F 0x98 0x96 #E0.6 [1] (😖) confounded face
| 0xF0 0x9F 0x98 0x97 #E1.0 [1] (😗) kissing face
| 0xF0 0x9F 0x98 0x98 #E0.6 [1] (😘) face blowing a kiss
| 0xF0 0x9F 0x98 0x99 #E1.0 [1] (😙) kissing face with smil...
| 0xF0 0x9F 0x98 0x9A #E0.6 [1] (😚) kissing face with clos...
| 0xF0 0x9F 0x98 0x9B #E1.0 [1] (😛) face with tongue
| 0xF0 0x9F 0x98 0x9C..0x9E #E0.6 [3] (😜..😞) winking face with t...
| 0xF0 0x9F 0x98 0x9F #E1.0 [1] (😟) worried face
| 0xF0 0x9F 0x98 0xA0..0xA5 #E0.6 [6] (😠..😥) angry face..sad but...
| 0xF0 0x9F 0x98 0xA6..0xA7 #E1.0 [2] (😦..😧) frowning face with ...
| 0xF0 0x9F 0x98 0xA8..0xAB #E0.6 [4] (😨..😫) fearful face..tired...
| 0xF0 0x9F 0x98 0xAC #E1.0 [1] (😬) grimacing face
| 0xF0 0x9F 0x98 0xAD #E0.6 [1] (😭) loudly crying face
| 0xF0 0x9F 0x98 0xAE..0xAF #E1.0 [2] (😮..😯) face with open mout...
| 0xF0 0x9F 0x98 0xB0..0xB3 #E0.6 [4] (😰..😳) anxious face with s...
| 0xF0 0x9F 0x98 0xB4 #E1.0 [1] (😴) sleeping face
| 0xF0 0x9F 0x98 0xB5 #E0.6 [1] (😵) face with crossed-out ...
| 0xF0 0x9F 0x98 0xB6 #E1.0 [1] (😶) face without mouth
| 0xF0 0x9F 0x98 0xB7..0xFF #E0.6 [10] (😷..🙀) face with medical m...
| 0xF0 0x9F 0x99 0x00..0x80 #
| 0xF0 0x9F 0x99 0x81..0x84 #E1.0 [4] (🙁..🙄) slightly frowning f...
| 0xF0 0x9F 0x99 0x85..0x8F #E0.6 [11] (🙅..🙏) person gesturing NO...
| 0xF0 0x9F 0x9A 0x80 #E0.6 [1] (🚀) rocket
| 0xF0 0x9F 0x9A 0x81..0x82 #E1.0 [2] (🚁..🚂) helicopter..locomotive
| 0xF0 0x9F 0x9A 0x83..0x85 #E0.6 [3] (🚃..🚅) railway car..bullet...
| 0xF0 0x9F 0x9A 0x86 #E1.0 [1] (🚆) train
| 0xF0 0x9F 0x9A 0x87 #E0.6 [1] (🚇) metro
| 0xF0 0x9F 0x9A 0x88 #E1.0 [1] (🚈) light rail
| 0xF0 0x9F 0x9A 0x89 #E0.6 [1] (🚉) station
| 0xF0 0x9F 0x9A 0x8A..0x8B #E1.0 [2] (🚊..🚋) tram..tram car
| 0xF0 0x9F 0x9A 0x8C #E0.6 [1] (🚌) bus
| 0xF0 0x9F 0x9A 0x8D #E0.7 [1] (🚍) oncoming bus
| 0xF0 0x9F 0x9A 0x8E #E1.0 [1] (🚎) trolleybus
| 0xF0 0x9F 0x9A 0x8F #E0.6 [1] (🚏) bus stop
| 0xF0 0x9F 0x9A 0x90 #E1.0 [1] (🚐) minibus
| 0xF0 0x9F 0x9A 0x91..0x93 #E0.6 [3] (🚑..🚓) ambulance..police car
| 0xF0 0x9F 0x9A 0x94 #E0.7 [1] (🚔) oncoming police car
| 0xF0 0x9F 0x9A 0x95 #E0.6 [1] (🚕) taxi
| 0xF0 0x9F 0x9A 0x96 #E1.0 [1] (🚖) oncoming taxi
| 0xF0 0x9F 0x9A 0x97 #E0.6 [1] (🚗) automobile
| 0xF0 0x9F 0x9A 0x98 #E0.7 [1] (🚘) oncoming automobile
| 0xF0 0x9F 0x9A 0x99..0x9A #E0.6 [2] (🚙..🚚) sport utility vehic...
| 0xF0 0x9F 0x9A 0x9B..0xA1 #E1.0 [7] (🚛..🚡) articulated lorry.....
| 0xF0 0x9F 0x9A 0xA2 #E0.6 [1] (🚢) ship
| 0xF0 0x9F 0x9A 0xA3 #E1.0 [1] (🚣) person rowing boat
| 0xF0 0x9F 0x9A 0xA4..0xA5 #E0.6 [2] (🚤..🚥) speedboat..horizont...
| 0xF0 0x9F 0x9A 0xA6 #E1.0 [1] (🚦) vertical traffic light
| 0xF0 0x9F 0x9A 0xA7..0xAD #E0.6 [7] (🚧..🚭) construction..no sm...
| 0xF0 0x9F 0x9A 0xAE..0xB1 #E1.0 [4] (🚮..🚱) litter in bin sign....
| 0xF0 0x9F 0x9A 0xB2 #E0.6 [1] (🚲) bicycle
| 0xF0 0x9F 0x9A 0xB3..0xB5 #E1.0 [3] (🚳..🚵) no bicycles..person...
| 0xF0 0x9F 0x9A 0xB6 #E0.6 [1] (🚶) person walking
| 0xF0 0x9F 0x9A 0xB7..0xB8 #E1.0 [2] (🚷..🚸) no pedestrians..chi...
| 0xF0 0x9F 0x9A 0xB9..0xBE #E0.6 [6] (🚹..🚾) mens room..water c...
| 0xF0 0x9F 0x9A 0xBF #E1.0 [1] (🚿) shower
| 0xF0 0x9F 0x9B 0x80 #E0.6 [1] (🛀) person taking bath
| 0xF0 0x9F 0x9B 0x81..0x85 #E1.0 [5] (🛁..🛅) bathtub..left luggage
| 0xF0 0x9F 0x9B 0x86..0x8A #E0.0 [5] (🛆..🛊) TRIANGLE WITH ROUND...
| 0xF0 0x9F 0x9B 0x8B #E0.7 [1] (🛋️) couch and lamp
| 0xF0 0x9F 0x9B 0x8C #E1.0 [1] (🛌) person in bed
| 0xF0 0x9F 0x9B 0x8D..0x8F #E0.7 [3] (🛍️..🛏️) shopping bags..bed
| 0xF0 0x9F 0x9B 0x90 #E1.0 [1] (🛐) place of worship
| 0xF0 0x9F 0x9B 0x91..0x92 #E3.0 [2] (🛑..🛒) stop sign..shopping...
| 0xF0 0x9F 0x9B 0x93..0x94 #E0.0 [2] (🛓..🛔) STUPA..PAGODA
| 0xF0 0x9F 0x9B 0x95 #E12.0 [1] (🛕) hindu temple
| 0xF0 0x9F 0x9B 0x96..0x97 #E13.0 [2] (🛖..🛗) hut..elevator
| 0xF0 0x9F 0x9B 0x98..0x9B #E0.0 [4] (🛘..🛛) <reserved-1F6D8>..<...
| 0xF0 0x9F 0x9B 0x9C #E15.0 [1] (🛜) wireless
| 0xF0 0x9F 0x9B 0x9D..0x9F #E14.0 [3] (🛝..🛟) playground slide..r...
| 0xF0 0x9F 0x9B 0xA0..0xA5 #E0.7 [6] (🛠️..🛥️) hammer and wrench...
| 0xF0 0x9F 0x9B 0xA6..0xA8 #E0.0 [3] (🛦..🛨) UP-POINTING MILITAR...
| 0xF0 0x9F 0x9B 0xA9 #E0.7 [1] (🛩️) small airplane
| 0xF0 0x9F 0x9B 0xAA #E0.0 [1] (🛪) NORTHEAST-POINTING AIR...
| 0xF0 0x9F 0x9B 0xAB..0xAC #E1.0 [2] (🛫..🛬) airplane departure....
| 0xF0 0x9F 0x9B 0xAD..0xAF #E0.0 [3] (🛭..🛯) <reserved-1F6ED>..<...
| 0xF0 0x9F 0x9B 0xB0 #E0.7 [1] (🛰️) satellite
| 0xF0 0x9F 0x9B 0xB1..0xB2 #E0.0 [2] (🛱..🛲) ONCOMING FIRE ENGIN...
| 0xF0 0x9F 0x9B 0xB3 #E0.7 [1] (🛳️) passenger ship
| 0xF0 0x9F 0x9B 0xB4..0xB6 #E3.0 [3] (🛴..🛶) kick scooter..canoe
| 0xF0 0x9F 0x9B 0xB7..0xB8 #E5.0 [2] (🛷..🛸) sled..flying saucer
| 0xF0 0x9F 0x9B 0xB9 #E11.0 [1] (🛹) skateboard
| 0xF0 0x9F 0x9B 0xBA #E12.0 [1] (🛺) auto rickshaw
| 0xF0 0x9F 0x9B 0xBB..0xBC #E13.0 [2] (🛻..🛼) pickup truck..rolle...
| 0xF0 0x9F 0x9B 0xBD..0xBF #E0.0 [3] (🛽..🛿) <reserved-1F6FD>..<...
| 0xF0 0x9F 0x9D 0xB4..0xBF #E0.0 [12] (🝴..🝿) LOT OF FORTUNE..ORCUS
| 0xF0 0x9F 0x9F 0x95..0x9F #E0.0 [11] (🟕..🟟) CIRCLED TRIANGLE..<...
| 0xF0 0x9F 0x9F 0xA0..0xAB #E12.0 [12] (🟠..🟫) orange circle..brow...
| 0xF0 0x9F 0x9F 0xAC..0xAF #E0.0 [4] (🟬..🟯) <reserved-1F7EC>..<...
| 0xF0 0x9F 0x9F 0xB0 #E14.0 [1] (🟰) heavy equals sign
| 0xF0 0x9F 0x9F 0xB1..0xBF #E0.0 [15] (🟱..🟿) <reserved-1F7F1>..<...
| 0xF0 0x9F 0xA0 0x8C..0x8F #E0.0 [4] (🠌..🠏) <reserved-1F80C>..<...
| 0xF0 0x9F 0xA1 0x88..0x8F #E0.0 [8] (🡈..🡏) <reserved-1F848>..<...
| 0xF0 0x9F 0xA1 0x9A..0x9F #E0.0 [6] (🡚..🡟) <reserved-1F85A>..<...
| 0xF0 0x9F 0xA2 0x88..0x8F #E0.0 [8] (🢈..🢏) <reserved-1F888>..<...
| 0xF0 0x9F 0xA2 0xAE..0xFF #E0.0 [82] (🢮..🣿) <reserved-1F8AE>..<...
| 0xF0 0x9F 0xA3 0x00..0xBF #
| 0xF0 0x9F 0xA4 0x8C #E13.0 [1] (🤌) pinched fingers
| 0xF0 0x9F 0xA4 0x8D..0x8F #E12.0 [3] (🤍..🤏) white heart..pinchi...
| 0xF0 0x9F 0xA4 0x90..0x98 #E1.0 [9] (🤐..🤘) zipper-mouth face.....
| 0xF0 0x9F 0xA4 0x99..0x9E #E3.0 [6] (🤙..🤞) call me hand..cross...
| 0xF0 0x9F 0xA4 0x9F #E5.0 [1] (🤟) love-you gesture
| 0xF0 0x9F 0xA4 0xA0..0xA7 #E3.0 [8] (🤠..🤧) cowboy hat face..sn...
| 0xF0 0x9F 0xA4 0xA8..0xAF #E5.0 [8] (🤨..🤯) face with raised ey...
| 0xF0 0x9F 0xA4 0xB0 #E3.0 [1] (🤰) pregnant woman
| 0xF0 0x9F 0xA4 0xB1..0xB2 #E5.0 [2] (🤱..🤲) breast-feeding..pal...
| 0xF0 0x9F 0xA4 0xB3..0xBA #E3.0 [8] (🤳..🤺) selfie..person fencing
| 0xF0 0x9F 0xA4 0xBC..0xBE #E3.0 [3] (🤼..🤾) people wrestling..p...
| 0xF0 0x9F 0xA4 0xBF #E12.0 [1] (🤿) diving mask
| 0xF0 0x9F 0xA5 0x80..0x85 #E3.0 [6] (🥀..🥅) wilted flower..goal...
| 0xF0 0x9F 0xA5 0x87..0x8B #E3.0 [5] (🥇..🥋) 1st place medal..ma...
| 0xF0 0x9F 0xA5 0x8C #E5.0 [1] (🥌) curling stone
| 0xF0 0x9F 0xA5 0x8D..0x8F #E11.0 [3] (🥍..🥏) lacrosse..flying disc
| 0xF0 0x9F 0xA5 0x90..0x9E #E3.0 [15] (🥐..🥞) croissant..pancakes
| 0xF0 0x9F 0xA5 0x9F..0xAB #E5.0 [13] (🥟..🥫) dumpling..canned food
| 0xF0 0x9F 0xA5 0xAC..0xB0 #E11.0 [5] (🥬..🥰) leafy green..smilin...
| 0xF0 0x9F 0xA5 0xB1 #E12.0 [1] (🥱) yawning face
| 0xF0 0x9F 0xA5 0xB2 #E13.0 [1] (🥲) smiling face with tear
| 0xF0 0x9F 0xA5 0xB3..0xB6 #E11.0 [4] (🥳..🥶) partying face..cold...
| 0xF0 0x9F 0xA5 0xB7..0xB8 #E13.0 [2] (🥷..🥸) ninja..disguised face
| 0xF0 0x9F 0xA5 0xB9 #E14.0 [1] (🥹) face holding back tears
| 0xF0 0x9F 0xA5 0xBA #E11.0 [1] (🥺) pleading face
| 0xF0 0x9F 0xA5 0xBB #E12.0 [1] (🥻) sari
| 0xF0 0x9F 0xA5 0xBC..0xBF #E11.0 [4] (🥼..🥿) lab coat..flat shoe
| 0xF0 0x9F 0xA6 0x80..0x84 #E1.0 [5] (🦀..🦄) crab..unicorn
| 0xF0 0x9F 0xA6 0x85..0x91 #E3.0 [13] (🦅..🦑) eagle..squid
| 0xF0 0x9F 0xA6 0x92..0x97 #E5.0 [6] (🦒..🦗) giraffe..cricket
| 0xF0 0x9F 0xA6 0x98..0xA2 #E11.0 [11] (🦘..🦢) kangaroo..swan
| 0xF0 0x9F 0xA6 0xA3..0xA4 #E13.0 [2] (🦣..🦤) mammoth..dodo
| 0xF0 0x9F 0xA6 0xA5..0xAA #E12.0 [6] (🦥..🦪) sloth..oyster
| 0xF0 0x9F 0xA6 0xAB..0xAD #E13.0 [3] (🦫..🦭) beaver..seal
| 0xF0 0x9F 0xA6 0xAE..0xAF #E12.0 [2] (🦮..🦯) guide dog..white cane
| 0xF0 0x9F 0xA6 0xB0..0xB9 #E11.0 [10] (🦰..🦹) red hair..supervillain
| 0xF0 0x9F 0xA6 0xBA..0xBF #E12.0 [6] (🦺..🦿) safety vest..mechan...
| 0xF0 0x9F 0xA7 0x80 #E1.0 [1] (🧀) cheese wedge
| 0xF0 0x9F 0xA7 0x81..0x82 #E11.0 [2] (🧁..🧂) cupcake..salt
| 0xF0 0x9F 0xA7 0x83..0x8A #E12.0 [8] (🧃..🧊) beverage box..ice
| 0xF0 0x9F 0xA7 0x8B #E13.0 [1] (🧋) bubble tea
| 0xF0 0x9F 0xA7 0x8C #E14.0 [1] (🧌) troll
| 0xF0 0x9F 0xA7 0x8D..0x8F #E12.0 [3] (🧍..🧏) person standing..de...
| 0xF0 0x9F 0xA7 0x90..0xA6 #E5.0 [23] (🧐..🧦) face with monocle.....
| 0xF0 0x9F 0xA7 0xA7..0xBF #E11.0 [25] (🧧..🧿) red envelope..nazar...
| 0xF0 0x9F 0xA8 0x80..0xFF #E0.0 [112] (🨀..🩯) NEUTRAL CHESS KING....
| 0xF0 0x9F 0xA9 0x00..0xAF #
| 0xF0 0x9F 0xA9 0xB0..0xB3 #E12.0 [4] (🩰..🩳) ballet shoes..shorts
| 0xF0 0x9F 0xA9 0xB4 #E13.0 [1] (🩴) thong sandal
| 0xF0 0x9F 0xA9 0xB5..0xB7 #E15.0 [3] (🩵..🩷) light blue heart..p...
| 0xF0 0x9F 0xA9 0xB8..0xBA #E12.0 [3] (🩸..🩺) drop of blood..stet...
| 0xF0 0x9F 0xA9 0xBB..0xBC #E14.0 [2] (🩻..🩼) x-ray..crutch
| 0xF0 0x9F 0xA9 0xBD..0xBF #E0.0 [3] (🩽..🩿) <reserved-1FA7D>..<...
| 0xF0 0x9F 0xAA 0x80..0x82 #E12.0 [3] (🪀..🪂) yo-yo..parachute
| 0xF0 0x9F 0xAA 0x83..0x86 #E13.0 [4] (🪃..🪆) boomerang..nesting ...
| 0xF0 0x9F 0xAA 0x87..0x88 #E15.0 [2] (🪇..🪈) maracas..flute
| 0xF0 0x9F 0xAA 0x89..0x8F #E0.0 [7] (🪉..🪏) <reserved-1FA89>..<...
| 0xF0 0x9F 0xAA 0x90..0x95 #E12.0 [6] (🪐..🪕) ringed planet..banjo
| 0xF0 0x9F 0xAA 0x96..0xA8 #E13.0 [19] (🪖..🪨) military helmet..rock
| 0xF0 0x9F 0xAA 0xA9..0xAC #E14.0 [4] (🪩..🪬) mirror ball..hamsa
| 0xF0 0x9F 0xAA 0xAD..0xAF #E15.0 [3] (🪭..🪯) folding hand fan..k...
| 0xF0 0x9F 0xAA 0xB0..0xB6 #E13.0 [7] (🪰..🪶) fly..feather
| 0xF0 0x9F 0xAA 0xB7..0xBA #E14.0 [4] (🪷..🪺) lotus..nest with eggs
| 0xF0 0x9F 0xAA 0xBB..0xBD #E15.0 [3] (🪻..🪽) hyacinth..wing
| 0xF0 0x9F 0xAA 0xBE #E0.0 [1] (🪾) <reserved-1FABE>
| 0xF0 0x9F 0xAA 0xBF #E15.0 [1] (🪿) goose
| 0xF0 0x9F 0xAB 0x80..0x82 #E13.0 [3] (🫀..🫂) anatomical heart..p...
| 0xF0 0x9F 0xAB 0x83..0x85 #E14.0 [3] (🫃..🫅) pregnant man..perso...
| 0xF0 0x9F 0xAB 0x86..0x8D #E0.0 [8] (🫆..🫍) <reserved-1FAC6>..<...
| 0xF0 0x9F 0xAB 0x8E..0x8F #E15.0 [2] (🫎..🫏) moose..donkey
| 0xF0 0x9F 0xAB 0x90..0x96 #E13.0 [7] (🫐..🫖) blueberries..teapot
| 0xF0 0x9F 0xAB 0x97..0x99 #E14.0 [3] (🫗..🫙) pouring liquid..jar
| 0xF0 0x9F 0xAB 0x9A..0x9B #E15.0 [2] (🫚..🫛) ginger root..pea pod
| 0xF0 0x9F 0xAB 0x9C..0x9F #E0.0 [4] (🫜..🫟) <reserved-1FADC>..<...
| 0xF0 0x9F 0xAB 0xA0..0xA7 #E14.0 [8] (🫠..🫧) melting face..bubbles
| 0xF0 0x9F 0xAB 0xA8 #E15.0 [1] (🫨) shaking face
| 0xF0 0x9F 0xAB 0xA9..0xAF #E0.0 [7] (🫩..🫯) <reserved-1FAE9>..<...
| 0xF0 0x9F 0xAB 0xB0..0xB6 #E14.0 [7] (🫰..🫶) hand with index fin...
| 0xF0 0x9F 0xAB 0xB7..0xB8 #E15.0 [2] (🫷..🫸) leftwards pushing h...
| 0xF0 0x9F 0xAB 0xB9..0xBF #E0.0 [7] (🫹..🫿) <reserved-1FAF9>..<...
| 0xF0 0x9F 0xB0 0x80..0xFF #E0.0[1022] (🰀..🿽) <reserved-1FC...
| 0xF0 0x9F 0xB1..0xBE 0x00..0xFF #
| 0xF0 0x9F 0xBF 0x00..0xBD #
;
}%%

View File

@@ -0,0 +1,8 @@
package textseg
//go:generate go run make_tables.go -output tables.go
//go:generate go run make_test_tables.go -output tables_test.go
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/15.0.0/ucd/auxiliary/GraphemeBreakProperty.txt -m GraphemeCluster -p "Prepend,CR,LF,Control,Extend,Regional_Indicator,SpacingMark,L,V,T,LV,LVT,ZWJ" -o grapheme_clusters_table.rl
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt -m Emoji -p "Extended_Pictographic" -o emoji_table.rl
//go:generate ragel -Z grapheme_clusters.rl
//go:generate gofmt -w grapheme_clusters.go

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,133 @@
package textseg
import (
"errors"
"unicode/utf8"
)
// Generated from grapheme_clusters.rl. DO NOT EDIT
%%{
# (except you are actually in grapheme_clusters.rl here, so edit away!)
machine graphclust;
write data;
}%%
var Error = errors.New("invalid UTF8 text")
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
// on grapheme cluster boundaries.
func ScanGraphemeClusters(data []byte, atEOF bool) (int, []byte, error) {
if len(data) == 0 {
return 0, nil, nil
}
// Ragel state
cs := 0 // Current State
p := 0 // "Pointer" into data
pe := len(data) // End-of-data "pointer"
ts := 0
te := 0
act := 0
eof := pe
// Make Go compiler happy
_ = ts
_ = te
_ = act
_ = eof
startPos := 0
endPos := 0
%%{
include GraphemeCluster "grapheme_clusters_table.rl";
include Emoji "emoji_table.rl";
action start {
startPos = p
}
action end {
endPos = p
}
action emit {
return endPos+1, data[startPos:endPos+1], nil
}
ZWJGlue = ZWJ (Extended_Pictographic Extend*)?;
AnyExtender = Extend | ZWJGlue | SpacingMark;
Extension = AnyExtender*;
ReplacementChar = (0xEF 0xBF 0xBD);
CRLFSeq = CR LF;
ControlSeq = Control | ReplacementChar;
HangulSeq = (
L+ (((LV? V+ | LVT) T*)?|LV?) |
LV V* T* |
V+ T* |
LVT T* |
T+
) Extension;
EmojiSeq = Extended_Pictographic Extend* Extension;
ZWJSeq = ZWJ (ZWJ | Extend | SpacingMark)*;
EmojiFlagSeq = Regional_Indicator Regional_Indicator? Extension;
UTF8Cont = 0x80 .. 0xBF;
AnyUTF8 = (
0x00..0x7F |
0xC0..0xDF . UTF8Cont |
0xE0..0xEF . UTF8Cont . UTF8Cont |
0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
);
# OtherSeq is any character that isn't at the start of one of the extended sequences above, followed by extension
OtherSeq = (AnyUTF8 - (CR|LF|Control|ReplacementChar|L|LV|V|LVT|T|Extended_Pictographic|ZWJ|Regional_Indicator|Prepend)) (Extend | ZWJ | SpacingMark)*;
# PrependSeq is prepend followed by any of the other patterns above, except control characters which explicitly break
PrependSeq = Prepend+ (HangulSeq|EmojiSeq|ZWJSeq|EmojiFlagSeq|OtherSeq)?;
CRLFTok = CRLFSeq >start @end;
ControlTok = ControlSeq >start @end;
HangulTok = HangulSeq >start @end;
EmojiTok = EmojiSeq >start @end;
ZWJTok = ZWJSeq >start @end;
EmojiFlagTok = EmojiFlagSeq >start @end;
OtherTok = OtherSeq >start @end;
PrependTok = PrependSeq >start @end;
main := |*
CRLFTok => emit;
ControlTok => emit;
HangulTok => emit;
EmojiTok => emit;
ZWJTok => emit;
EmojiFlagTok => emit;
PrependTok => emit;
OtherTok => emit;
# any single valid UTF-8 character would also be valid per spec,
# but we'll handle that separately after the loop so we can deal
# with requesting more bytes if we're not at EOF.
*|;
write init;
write exec;
}%%
// If we fall out here then we were unable to complete a sequence.
// If we weren't able to complete a sequence then either we've
// reached the end of a partial buffer (so there's more data to come)
// or we have an isolated symbol that would normally be part of a
// grapheme cluster but has appeared in isolation here.
if !atEOF {
// Request more
return 0, nil, nil
}
// Just take the first UTF-8 sequence and return that.
_, seqLen := utf8.DecodeRune(data)
return seqLen, data[:seqLen], nil
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,335 @@
#!/usr/bin/env ruby
#
# This scripted has been updated to accept more command-line arguments:
#
# -u, --url URL to process
# -m, --machine Machine name
# -p, --properties Properties to add to the machine
# -o, --output Write output to file
#
# Updated by: Marty Schoch <marty.schoch@gmail.com>
#
# This script uses the unicode spec to generate a Ragel state machine
# that recognizes unicode alphanumeric characters. It generates 5
# character classes: uupper, ulower, ualpha, udigit, and ualnum.
# Currently supported encodings are UTF-8 [default] and UCS-4.
#
# Usage: unicode2ragel.rb [options]
# -e, --encoding [ucs4 | utf8] Data encoding
# -h, --help Show this message
#
# This script was originally written as part of the Ferret search
# engine library.
#
# Author: Rakan El-Khalil <rakan@well.com>
require 'optparse'
require 'open-uri'
ENCODINGS = [ :utf8, :ucs4 ]
ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" }
DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt"
DEFAULT_MACHINE_NAME= "WChar"
###
# Display vars & default option
TOTAL_WIDTH = 80
RANGE_WIDTH = 23
@encoding = :utf8
@chart_url = DEFAULT_CHART_URL
machine_name = DEFAULT_MACHINE_NAME
properties = []
@output = $stdout
###
# Option parsing
cli_opts = OptionParser.new do |opts|
opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o|
@encoding = o.downcase.to_sym
end
opts.on("-h", "--help", "Show this message") do
puts opts
exit
end
opts.on("-u", "--url URL", "URL to process") do |o|
@chart_url = o
end
opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o|
machine_name = o
end
opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o|
properties = o
end
opts.on("-o", "--output FILE", "output file") do |o|
@output = File.new(o, "w+")
end
end
cli_opts.parse(ARGV)
unless ENCODINGS.member? @encoding
puts "Invalid encoding: #{@encoding}"
puts cli_opts
exit
end
##
# Downloads the document at url and yields every alpha line's hex
# range and description.
def each_alpha( url, property )
URI.open( url ) do |file|
file.each_line do |line|
next if line =~ /^#/;
next if line !~ /; #{property} *#/;
range, description = line.split(/;/)
range.strip!
description.gsub!(/.*#/, '').strip!
if range =~ /\.\./
start, stop = range.split '..'
else start = stop = range
end
yield start.hex .. stop.hex, description
end
end
end
###
# Formats to hex at minimum width
def to_hex( n )
r = "%0X" % n
r = "0#{r}" unless (r.length % 2).zero?
r
end
###
# UCS4 is just a straight hex conversion of the unicode codepoint.
def to_ucs4( range )
rangestr = "0x" + to_hex(range.begin)
rangestr << "..0x" + to_hex(range.end) if range.begin != range.end
[ rangestr ]
end
##
# 0x00 - 0x7f -> 0zzzzzzz[7]
# 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6]
# 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6]
# 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6]
UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff]
def to_utf8_enc( n )
r = 0
if n <= 0x7f
r = n
elsif n <= 0x7ff
y = 0xc0 | (n >> 6)
z = 0x80 | (n & 0x3f)
r = y << 8 | z
elsif n <= 0xffff
x = 0xe0 | (n >> 12)
y = 0x80 | (n >> 6) & 0x3f
z = 0x80 | n & 0x3f
r = x << 16 | y << 8 | z
elsif n <= 0x10ffff
w = 0xf0 | (n >> 18)
x = 0x80 | (n >> 12) & 0x3f
y = 0x80 | (n >> 6) & 0x3f
z = 0x80 | n & 0x3f
r = w << 24 | x << 16 | y << 8 | z
end
to_hex(r)
end
def from_utf8_enc( n )
n = n.hex
r = 0
if n <= 0x7f
r = n
elsif n <= 0xdfff
y = (n >> 8) & 0x1f
z = n & 0x3f
r = y << 6 | z
elsif n <= 0xefffff
x = (n >> 16) & 0x0f
y = (n >> 8) & 0x3f
z = n & 0x3f
r = x << 10 | y << 6 | z
elsif n <= 0xf7ffffff
w = (n >> 24) & 0x07
x = (n >> 16) & 0x3f
y = (n >> 8) & 0x3f
z = n & 0x3f
r = w << 18 | x << 12 | y << 6 | z
end
r
end
###
# Given a range, splits it up into ranges that can be continuously
# encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff]
# This is not strictly needed since the current [5.1] unicode standard
# doesn't have ranges that straddle utf8 boundaries. This is included
# for completeness as there is no telling if that will ever change.
def utf8_ranges( range )
ranges = []
UTF8_BOUNDARIES.each do |max|
if range.begin <= max
if range.end <= max
ranges << range
return ranges
end
ranges << (range.begin .. max)
range = (max + 1) .. range.end
end
end
ranges
end
def build_range( start, stop )
size = start.size/2
left = size - 1
return [""] if size < 1
a = start[0..1]
b = stop[0..1]
###
# Shared prefix
if a == b
return build_range(start[2..-1], stop[2..-1]).map do |elt|
"0x#{a} " + elt
end
end
###
# Unshared prefix, end of run
return ["0x#{a}..0x#{b} "] if left.zero?
###
# Unshared prefix, not end of run
# Range can be 0x123456..0x56789A
# Which is equivalent to:
# 0x123456 .. 0x12FFFF
# 0x130000 .. 0x55FFFF
# 0x560000 .. 0x56789A
ret = []
ret << build_range(start, a + "FF" * left)
###
# Only generate middle range if need be.
if a.hex+1 != b.hex
max = to_hex(b.hex - 1)
max = "FF" if b == "FF"
ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left
end
###
# Don't generate last range if it is covered by first range
ret << build_range(b + "00" * left, stop) unless b == "FF"
ret.flatten!
end
def to_utf8( range )
utf8_ranges( range ).map do |r|
begin_enc = to_utf8_enc(r.begin)
end_enc = to_utf8_enc(r.end)
build_range begin_enc, end_enc
end.flatten!
end
##
# Perform a 3-way comparison of the number of codepoints advertised by
# the unicode spec for the given range, the originally parsed range,
# and the resulting utf8 encoded range.
def count_codepoints( code )
code.split(' ').inject(1) do |acc, elt|
if elt =~ /0x(.+)\.\.0x(.+)/
if @encoding == :utf8
acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1)
else
acc * ($2.hex - $1.hex + 1)
end
else
acc
end
end
end
def is_valid?( range, desc, codes )
spec_count = 1
spec_count = $1.to_i if desc =~ /\[(\d+)\]/
range_count = range.end - range.begin + 1
sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) }
sum == spec_count and sum == range_count
end
##
# Generate the state maching to stdout
def generate_machine( name, property )
pipe = " "
@output.puts " #{name} = "
each_alpha( @chart_url, property ) do |range, desc|
codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range)
#raise "Invalid encoding of range #{range}: #{codes.inspect}" unless
# is_valid? range, desc, codes
range_width = codes.map { |a| a.size }.max
range_width = RANGE_WIDTH if range_width < RANGE_WIDTH
desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11
desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH
if desc.size > desc_width
desc = desc[0..desc_width - 4] + "..."
end
codes.each_with_index do |r, idx|
desc = "" unless idx.zero?
code = "%-#{range_width}s" % r
@output.puts " #{pipe} #{code} ##{desc}"
pipe = "|"
end
end
@output.puts " ;"
@output.puts ""
end
@output.puts <<EOF
# The following Ragel file was autogenerated with #{$0}
# from: #{@chart_url}
#
# It defines #{properties}.
#
# To use this, make sure that your alphtype is set to #{ALPHTYPES[@encoding]},
# and that your input is in #{@encoding}.
%%{
machine #{machine_name};
EOF
properties.each { |x| generate_machine( x, x ) }
@output.puts <<EOF
}%%
EOF

View File

@@ -0,0 +1,19 @@
package textseg
import "unicode/utf8"
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
// on UTF8 sequence boundaries.
//
// This is included largely for completeness, since this behavior is already
// built in to Go when ranging over a string.
func ScanUTF8Sequences(data []byte, atEOF bool) (int, []byte, error) {
if len(data) == 0 {
return 0, nil, nil
}
r, seqLen := utf8.DecodeRune(data)
if r == utf8.RuneError && !atEOF {
return 0, nil, nil
}
return seqLen, data[:seqLen], nil
}

13
vendor/github.com/araddon/dateparse/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,13 @@
language: go
go:
- 1.13.x
before_install:
- go get -t -v ./...
script:
- go test -race -coverprofile=coverage.txt -covermode=atomic
after_success:
- bash <(curl -s https://codecov.io/bash)

21
vendor/github.com/araddon/dateparse/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015-2017 Aaron Raddon
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

323
vendor/github.com/araddon/dateparse/README.md generated vendored Normal file
View File

@@ -0,0 +1,323 @@
Go Date Parser
---------------------------
Parse many date strings without knowing format in advance. Uses a scanner to read bytes and use a state machine to find format. Much faster than shotgun based parse methods. See [bench_test.go](https://github.com/araddon/dateparse/blob/master/bench_test.go) for performance comparison.
[![Code Coverage](https://codecov.io/gh/araddon/dateparse/branch/master/graph/badge.svg)](https://codecov.io/gh/araddon/dateparse)
[![GoDoc](https://godoc.org/github.com/araddon/dateparse?status.svg)](http://godoc.org/github.com/araddon/dateparse)
[![Build Status](https://travis-ci.org/araddon/dateparse.svg?branch=master)](https://travis-ci.org/araddon/dateparse)
[![Go ReportCard](https://goreportcard.com/badge/araddon/dateparse)](https://goreportcard.com/report/araddon/dateparse)
**MM/DD/YYYY VS DD/MM/YYYY** Right now this uses mm/dd/yyyy WHEN ambiguous if this is not desired behavior, use `ParseStrict` which will fail on ambiguous date strings.
**Timezones** The location your server is configured affects the results! See example or https://play.golang.org/p/IDHRalIyXh and last paragraph here https://golang.org/pkg/time/#Parse.
```go
// Normal parse. Equivalent Timezone rules as time.Parse()
t, err := dateparse.ParseAny("3/1/2014")
// Parse Strict, error on ambigous mm/dd vs dd/mm dates
t, err := dateparse.ParseStrict("3/1/2014")
> returns error
// Return a string that represents the layout to parse the given date-time.
layout, err := dateparse.ParseFormat("May 8, 2009 5:57:51 PM")
> "Jan 2, 2006 3:04:05 PM"
```
cli tool for testing dateformats
----------------------------------
[Date Parse CLI](https://github.com/araddon/dateparse/blob/master/dateparse)
Extended example
-------------------
https://github.com/araddon/dateparse/blob/master/example/main.go
```go
package main
import (
"flag"
"fmt"
"time"
"github.com/scylladb/termtables"
"github.com/araddon/dateparse"
)
var examples = []string{
"May 8, 2009 5:57:51 PM",
"oct 7, 1970",
"oct 7, '70",
"oct. 7, 1970",
"oct. 7, 70",
"Mon Jan 2 15:04:05 2006",
"Mon Jan 2 15:04:05 MST 2006",
"Mon Jan 02 15:04:05 -0700 2006",
"Monday, 02-Jan-06 15:04:05 MST",
"Mon, 02 Jan 2006 15:04:05 MST",
"Tue, 11 Jul 2017 16:28:13 +0200 (CEST)",
"Mon, 02 Jan 2006 15:04:05 -0700",
"Mon 30 Sep 2018 09:09:09 PM UTC",
"Mon Aug 10 15:44:11 UTC+0100 2015",
"Thu, 4 Jan 2018 17:53:36 +0000",
"Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)",
"Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00)",
"September 17, 2012 10:09am",
"September 17, 2012 at 10:09am PST-08",
"September 17, 2012, 10:10:09",
"October 7, 1970",
"October 7th, 1970",
"12 Feb 2006, 19:17",
"12 Feb 2006 19:17",
"14 May 2019 19:11:40.164",
"7 oct 70",
"7 oct 1970",
"03 February 2013",
"1 July 2013",
"2013-Feb-03",
// dd/Mon/yyy alpha Months
"06/Jan/2008:15:04:05 -0700",
"06/Jan/2008 15:04:05 -0700",
// mm/dd/yy
"3/31/2014",
"03/31/2014",
"08/21/71",
"8/1/71",
"4/8/2014 22:05",
"04/08/2014 22:05",
"4/8/14 22:05",
"04/2/2014 03:00:51",
"8/8/1965 12:00:00 AM",
"8/8/1965 01:00:01 PM",
"8/8/1965 01:00 PM",
"8/8/1965 1:00 PM",
"8/8/1965 12:00 AM",
"4/02/2014 03:00:51",
"03/19/2012 10:11:59",
"03/19/2012 10:11:59.3186369",
// yyyy/mm/dd
"2014/3/31",
"2014/03/31",
"2014/4/8 22:05",
"2014/04/08 22:05",
"2014/04/2 03:00:51",
"2014/4/02 03:00:51",
"2012/03/19 10:11:59",
"2012/03/19 10:11:59.3186369",
// yyyy:mm:dd
"2014:3:31",
"2014:03:31",
"2014:4:8 22:05",
"2014:04:08 22:05",
"2014:04:2 03:00:51",
"2014:4:02 03:00:51",
"2012:03:19 10:11:59",
"2012:03:19 10:11:59.3186369",
// Chinese
"2014年04月08日",
// yyyy-mm-ddThh
"2006-01-02T15:04:05+0000",
"2009-08-12T22:15:09-07:00",
"2009-08-12T22:15:09",
"2009-08-12T22:15:09.988",
"2009-08-12T22:15:09Z",
"2017-07-19T03:21:51:897+0100",
"2019-05-29T08:41-04", // no seconds, 2 digit TZ offset
// yyyy-mm-dd hh:mm:ss
"2014-04-26 17:24:37.3186369",
"2012-08-03 18:31:59.257000000",
"2014-04-26 17:24:37.123",
"2013-04-01 22:43",
"2013-04-01 22:43:22",
"2014-12-16 06:20:00 UTC",
"2014-12-16 06:20:00 GMT",
"2014-04-26 05:24:37 PM",
"2014-04-26 13:13:43 +0800",
"2014-04-26 13:13:43 +0800 +08",
"2014-04-26 13:13:44 +09:00",
"2012-08-03 18:31:59.257000000 +0000 UTC",
"2015-09-30 18:48:56.35272715 +0000 UTC",
"2015-02-18 00:12:00 +0000 GMT",
"2015-02-18 00:12:00 +0000 UTC",
"2015-02-08 03:02:00 +0300 MSK m=+0.000000001",
"2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001",
"2017-07-19 03:21:51+00:00",
"2014-04-26",
"2014-04",
"2014",
"2014-05-11 08:20:13,787",
// yyyy-mm-dd-07:00
"2020-07-20+08:00",
// mm.dd.yy
"3.31.2014",
"03.31.2014",
"08.21.71",
"2014.03",
"2014.03.30",
// yyyymmdd and similar
"20140601",
"20140722105203",
// yymmdd hh:mm:yy mysql log
// 080313 05:21:55 mysqld started
"171113 14:14:20",
// unix seconds, ms, micro, nano
"1332151919",
"1384216367189",
"1384216367111222",
"1384216367111222333",
}
var (
timezone = ""
)
func main() {
flag.StringVar(&timezone, "timezone", "UTC", "Timezone aka `America/Los_Angeles` formatted time-zone")
flag.Parse()
if timezone != "" {
// NOTE: This is very, very important to understand
// time-parsing in go
loc, err := time.LoadLocation(timezone)
if err != nil {
panic(err.Error())
}
time.Local = loc
}
table := termtables.CreateTable()
table.AddHeaders("Input", "Parsed, and Output as %v")
for _, dateExample := range examples {
t, err := dateparse.ParseLocal(dateExample)
if err != nil {
panic(err.Error())
}
table.AddRow(dateExample, fmt.Sprintf("%v", t))
}
fmt.Println(table.Render())
}
/*
+-------------------------------------------------------+-----------------------------------------+
| Input | Parsed, and Output as %v |
+-------------------------------------------------------+-----------------------------------------+
| May 8, 2009 5:57:51 PM | 2009-05-08 17:57:51 +0000 UTC |
| oct 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
| oct 7, '70 | 1970-10-07 00:00:00 +0000 UTC |
| oct. 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
| oct. 7, 70 | 1970-10-07 00:00:00 +0000 UTC |
| Mon Jan 2 15:04:05 2006 | 2006-01-02 15:04:05 +0000 UTC |
| Mon Jan 2 15:04:05 MST 2006 | 2006-01-02 15:04:05 +0000 MST |
| Mon Jan 02 15:04:05 -0700 2006 | 2006-01-02 15:04:05 -0700 -0700 |
| Monday, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST |
| Mon, 02 Jan 2006 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST |
| Tue, 11 Jul 2017 16:28:13 +0200 (CEST) | 2017-07-11 16:28:13 +0200 +0200 |
| Mon, 02 Jan 2006 15:04:05 -0700 | 2006-01-02 15:04:05 -0700 -0700 |
| Mon 30 Sep 2018 09:09:09 PM UTC | 2018-09-30 21:09:09 +0000 UTC |
| Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC |
| Thu, 4 Jan 2018 17:53:36 +0000 | 2018-01-04 17:53:36 +0000 UTC |
| Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT |
| Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00) | 2021-01-03 00:12:23 +0800 +0800 |
| September 17, 2012 10:09am | 2012-09-17 10:09:00 +0000 UTC |
| September 17, 2012 at 10:09am PST-08 | 2012-09-17 10:09:00 -0800 PST |
| September 17, 2012, 10:10:09 | 2012-09-17 10:10:09 +0000 UTC |
| October 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
| October 7th, 1970 | 1970-10-07 00:00:00 +0000 UTC |
| 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC |
| 12 Feb 2006 19:17 | 2006-02-12 19:17:00 +0000 UTC |
| 14 May 2019 19:11:40.164 | 2019-05-14 19:11:40.164 +0000 UTC |
| 7 oct 70 | 1970-10-07 00:00:00 +0000 UTC |
| 7 oct 1970 | 1970-10-07 00:00:00 +0000 UTC |
| 03 February 2013 | 2013-02-03 00:00:00 +0000 UTC |
| 1 July 2013 | 2013-07-01 00:00:00 +0000 UTC |
| 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC |
| 06/Jan/2008:15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
| 06/Jan/2008 15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
| 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
| 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
| 08/21/71 | 1971-08-21 00:00:00 +0000 UTC |
| 8/1/71 | 1971-08-01 00:00:00 +0000 UTC |
| 4/8/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 04/08/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 4/8/14 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 04/2/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 8/8/1965 12:00:00 AM | 1965-08-08 00:00:00 +0000 UTC |
| 8/8/1965 01:00:01 PM | 1965-08-08 13:00:01 +0000 UTC |
| 8/8/1965 01:00 PM | 1965-08-08 13:00:00 +0000 UTC |
| 8/8/1965 1:00 PM | 1965-08-08 13:00:00 +0000 UTC |
| 8/8/1965 12:00 AM | 1965-08-08 00:00:00 +0000 UTC |
| 4/02/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 03/19/2012 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
| 03/19/2012 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
| 2014/3/31 | 2014-03-31 00:00:00 +0000 UTC |
| 2014/03/31 | 2014-03-31 00:00:00 +0000 UTC |
| 2014/4/8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 2014/04/08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 2014/04/2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
| 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
| 2014:3:31 | 2014-03-31 00:00:00 +0000 UTC |
| 2014:03:31 | 2014-03-31 00:00:00 +0000 UTC |
| 2014:4:8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 2014:04:08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 2014:04:2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 2014:4:02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 2012:03:19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
| 2012:03:19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
| 2014年04月08日 | 2014-04-08 00:00:00 +0000 UTC |
| 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC |
| 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 |
| 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC |
| 2009-08-12T22:15:09.988 | 2009-08-12 22:15:09.988 +0000 UTC |
| 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC |
| 2017-07-19T03:21:51:897+0100 | 2017-07-19 03:21:51.897 +0100 +0100 |
| 2019-05-29T08:41-04 | 2019-05-29 08:41:00 -0400 -0400 |
| 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC |
| 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC |
| 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC |
| 2013-04-01 22:43 | 2013-04-01 22:43:00 +0000 UTC |
| 2013-04-01 22:43:22 | 2013-04-01 22:43:22 +0000 UTC |
| 2014-12-16 06:20:00 UTC | 2014-12-16 06:20:00 +0000 UTC |
| 2014-12-16 06:20:00 GMT | 2014-12-16 06:20:00 +0000 UTC |
| 2014-04-26 05:24:37 PM | 2014-04-26 17:24:37 +0000 UTC |
| 2014-04-26 13:13:43 +0800 | 2014-04-26 13:13:43 +0800 +0800 |
| 2014-04-26 13:13:43 +0800 +08 | 2014-04-26 13:13:43 +0800 +0800 |
| 2014-04-26 13:13:44 +09:00 | 2014-04-26 13:13:44 +0900 +0900 |
| 2012-08-03 18:31:59.257000000 +0000 UTC | 2012-08-03 18:31:59.257 +0000 UTC |
| 2015-09-30 18:48:56.35272715 +0000 UTC | 2015-09-30 18:48:56.35272715 +0000 UTC |
| 2015-02-18 00:12:00 +0000 GMT | 2015-02-18 00:12:00 +0000 UTC |
| 2015-02-18 00:12:00 +0000 UTC | 2015-02-18 00:12:00 +0000 UTC |
| 2015-02-08 03:02:00 +0300 MSK m=+0.000000001 | 2015-02-08 03:02:00 +0300 +0300 |
| 2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001 | 2015-02-08 03:02:00.001 +0300 +0300 |
| 2017-07-19 03:21:51+00:00 | 2017-07-19 03:21:51 +0000 UTC |
| 2014-04-26 | 2014-04-26 00:00:00 +0000 UTC |
| 2014-04 | 2014-04-01 00:00:00 +0000 UTC |
| 2014 | 2014-01-01 00:00:00 +0000 UTC |
| 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC |
| 2020-07-20+08:00 | 2020-07-20 00:00:00 +0800 +0800 |
| 3.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
| 03.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
| 08.21.71 | 1971-08-21 00:00:00 +0000 UTC |
| 2014.03 | 2014-03-01 00:00:00 +0000 UTC |
| 2014.03.30 | 2014-03-30 00:00:00 +0000 UTC |
| 20140601 | 2014-06-01 00:00:00 +0000 UTC |
| 20140722105203 | 2014-07-22 10:52:03 +0000 UTC |
| 171113 14:14:20 | 2017-11-13 14:14:20 +0000 UTC |
| 1332151919 | 2012-03-19 10:11:59 +0000 UTC |
| 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC |
| 1384216367111222 | 2013-11-12 00:32:47.111222 +0000 UTC |
| 1384216367111222333 | 2013-11-12 00:32:47.111222333 +0000 UTC |
+-------------------------------------------------------+-----------------------------------------+
*/
```

2189
vendor/github.com/araddon/dateparse/parseany.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

23
vendor/github.com/glycerine/blake2b/README generated vendored Normal file
View File

@@ -0,0 +1,23 @@
Go implementation of BLAKE2b collision-resistant cryptographic hash function
created by Jean-Philippe Aumasson, Samuel Neves, Zooko Wilcox-O'Hearn, and
Christian Winnerlein (https://blake2.net).
INSTALLATION
$ go get github.com/dchest/blake2b
DOCUMENTATION
See http://godoc.org/github.com/dchest/blake2b
PUBLIC DOMAIN DEDICATION
Written in 2012 by Dmitry Chestnykh.
To the extent possible under law, the author have dedicated all copyright
and related and neighboring rights to this software to the public domain
worldwide. This software is distributed without any warranty.
http://creativecommons.org/publicdomain/zero/1.0/

299
vendor/github.com/glycerine/blake2b/blake2b.go generated vendored Normal file
View File

@@ -0,0 +1,299 @@
// Written in 2012 by Dmitry Chestnykh.
//
// To the extent possible under law, the author have dedicated all copyright
// and related and neighboring rights to this software to the public domain
// worldwide. This software is distributed without any warranty.
// http://creativecommons.org/publicdomain/zero/1.0/
// Package blake2b implements BLAKE2b cryptographic hash function.
package blake2b
import (
"encoding/binary"
"errors"
"hash"
)
const (
BlockSize = 128 // block size of algorithm
Size = 64 // maximum digest size
SaltSize = 16 // maximum salt size
PersonSize = 16 // maximum personalization string size
KeySize = 64 // maximum size of key
)
type digest struct {
h [8]uint64 // current chain value
t [2]uint64 // message bytes counter
f [2]uint64 // finalization flags
x [BlockSize]byte // buffer for data not yet compressed
nx int // number of bytes in buffer
ih [8]uint64 // initial chain value (after config)
paddedKey [BlockSize]byte // copy of key, padded with zeros
isKeyed bool // indicates whether hash was keyed
size uint8 // digest size in bytes
isLastNode bool // indicates processing of the last node in tree hashing
}
// Initialization values.
var iv = [8]uint64{
0x6a09e667f3bcc908, 0xbb67ae8584caa73b,
0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
0x510e527fade682d1, 0x9b05688c2b3e6c1f,
0x1f83d9abfb41bd6b, 0x5be0cd19137e2179,
}
// Config is used to configure hash function parameters and keying.
// All parameters are optional.
type Config struct {
Size uint8 // digest size (if zero, default size of 64 bytes is used)
Key []byte // key for prefix-MAC
Salt []byte // salt (if < 16 bytes, padded with zeros)
Person []byte // personalization (if < 16 bytes, padded with zeros)
Tree *Tree // parameters for tree hashing
}
// Tree represents parameters for tree hashing.
type Tree struct {
Fanout uint8 // fanout
MaxDepth uint8 // maximal depth
LeafSize uint32 // leaf maximal byte length (0 for unlimited)
NodeOffset uint64 // node offset (0 for first, leftmost or leaf)
NodeDepth uint8 // node depth (0 for leaves)
InnerHashSize uint8 // inner hash byte length
IsLastNode bool // indicates processing of the last node of layer
}
var (
defaultConfig = &Config{Size: Size}
config256 = &Config{Size: 32}
)
func verifyConfig(c *Config) error {
if c.Size > Size {
return errors.New("digest size is too large")
}
if len(c.Key) > KeySize {
return errors.New("key is too large")
}
if len(c.Salt) > SaltSize {
// Smaller salt is okay: it will be padded with zeros.
return errors.New("salt is too large")
}
if len(c.Person) > PersonSize {
// Smaller personalization is okay: it will be padded with zeros.
return errors.New("personalization is too large")
}
if c.Tree != nil {
if c.Tree.Fanout == 1 {
return errors.New("fanout of 1 is not allowed in tree mode")
}
if c.Tree.MaxDepth < 2 {
return errors.New("incorrect tree depth")
}
if c.Tree.InnerHashSize < 1 || c.Tree.InnerHashSize > Size {
return errors.New("incorrect tree inner hash size")
}
}
return nil
}
// New returns a new hash.Hash configured with the given Config.
// Config can be nil, in which case the default one is used, calculating 64-byte digest.
// Returns non-nil error if Config contains invalid parameters.
func New(c *Config) (hash.Hash, error) {
if c == nil {
c = defaultConfig
} else {
if c.Size == 0 {
// Set default size if it's zero.
c.Size = Size
}
if err := verifyConfig(c); err != nil {
return nil, err
}
}
d := new(digest)
d.initialize(c)
return d, nil
}
// initialize initializes digest with the given
// config, which must be non-nil and verified.
func (d *digest) initialize(c *Config) {
// Create parameter block.
var p [BlockSize]byte
p[0] = c.Size
p[1] = uint8(len(c.Key))
if c.Salt != nil {
copy(p[32:], c.Salt)
}
if c.Person != nil {
copy(p[48:], c.Person)
}
if c.Tree != nil {
p[2] = c.Tree.Fanout
p[3] = c.Tree.MaxDepth
binary.LittleEndian.PutUint32(p[4:], c.Tree.LeafSize)
binary.LittleEndian.PutUint64(p[8:], c.Tree.NodeOffset)
p[16] = c.Tree.NodeDepth
p[17] = c.Tree.InnerHashSize
} else {
p[2] = 1
p[3] = 1
}
// Initialize.
d.size = c.Size
for i := 0; i < 8; i++ {
d.h[i] = iv[i] ^ binary.LittleEndian.Uint64(p[i*8:])
}
if c.Tree != nil && c.Tree.IsLastNode {
d.isLastNode = true
}
// Process key.
if c.Key != nil {
copy(d.paddedKey[:], c.Key)
d.Write(d.paddedKey[:])
d.isKeyed = true
}
// Save a copy of initialized state.
copy(d.ih[:], d.h[:])
}
// New512 returns a new hash.Hash computing the BLAKE2b 64-byte checksum.
func New512() hash.Hash {
d := new(digest)
d.initialize(defaultConfig)
return d
}
// New256 returns a new hash.Hash computing the BLAKE2b 32-byte checksum.
func New256() hash.Hash {
d := new(digest)
d.initialize(config256)
return d
}
// NewMAC returns a new hash.Hash computing BLAKE2b prefix-
// Message Authentication Code of the given size in bytes
// (up to 64) with the given key (up to 64 bytes in length).
func NewMAC(outBytes uint8, key []byte) hash.Hash {
d, err := New(&Config{Size: outBytes, Key: key})
if err != nil {
panic(err.Error())
}
return d
}
// Reset resets the state of digest to the initial state
// after configuration and keying.
func (d *digest) Reset() {
copy(d.h[:], d.ih[:])
d.t[0] = 0
d.t[1] = 0
d.f[0] = 0
d.f[1] = 0
d.nx = 0
if d.isKeyed {
d.Write(d.paddedKey[:])
}
}
// Size returns the digest size in bytes.
func (d *digest) Size() int { return int(d.size) }
// BlockSize returns the algorithm block size in bytes.
func (d *digest) BlockSize() int { return BlockSize }
func (d *digest) Write(p []byte) (nn int, err error) {
nn = len(p)
left := BlockSize - d.nx
if len(p) > left {
// Process buffer.
copy(d.x[d.nx:], p[:left])
p = p[left:]
blocks(d, d.x[:])
d.nx = 0
}
// Process full blocks except for the last one.
if len(p) > BlockSize {
n := len(p) &^ (BlockSize - 1)
if n == len(p) {
n -= BlockSize
}
blocks(d, p[:n])
p = p[n:]
}
// Fill buffer.
d.nx += copy(d.x[d.nx:], p)
return
}
// Sum returns the calculated checksum.
func (d0 *digest) Sum(in []byte) []byte {
// Make a copy of d0 so that caller can keep writing and summing.
d := *d0
hash := d.checkSum()
return append(in, hash[:d.size]...)
}
func (d *digest) checkSum() [Size]byte {
// Do not create unnecessary copies of the key.
if d.isKeyed {
for i := 0; i < len(d.paddedKey); i++ {
d.paddedKey[i] = 0
}
}
dec := BlockSize - uint64(d.nx)
if d.t[0] < dec {
d.t[1]--
}
d.t[0] -= dec
// Pad buffer with zeros.
for i := d.nx; i < len(d.x); i++ {
d.x[i] = 0
}
// Set last block flag.
d.f[0] = 0xffffffffffffffff
if d.isLastNode {
d.f[1] = 0xffffffffffffffff
}
// Compress last block.
blocks(d, d.x[:])
var out [Size]byte
j := 0
for _, s := range d.h[:(d.size-1)/8+1] {
out[j+0] = byte(s >> 0)
out[j+1] = byte(s >> 8)
out[j+2] = byte(s >> 16)
out[j+3] = byte(s >> 24)
out[j+4] = byte(s >> 32)
out[j+5] = byte(s >> 40)
out[j+6] = byte(s >> 48)
out[j+7] = byte(s >> 56)
j += 8
}
return out
}
// Sum512 returns a 64-byte BLAKE2b hash of data.
func Sum512(data []byte) [64]byte {
var d digest
d.initialize(defaultConfig)
d.Write(data)
return d.checkSum()
}
// Sum256 returns a 32-byte BLAKE2b hash of data.
func Sum256(data []byte) (out [32]byte) {
var d digest
d.initialize(config256)
d.Write(data)
sum := d.checkSum()
copy(out[:], sum[:32])
return
}

1420
vendor/github.com/glycerine/blake2b/block.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

25
vendor/github.com/glycerine/greenpack/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,25 @@
MIT License
Portions Copyright (c) 2016 Jason E. Aten
Portions Copyright (c) 2014 Philip Hofer
Portions Copyright (c) 2009 The Go Authors (license at http://golang.org) where indicated
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom
the Software is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,24 @@
// +build linux,!appengine
package msgp
import (
"os"
"syscall"
)
func adviseRead(mem []byte) {
syscall.Madvise(mem, syscall.MADV_SEQUENTIAL|syscall.MADV_WILLNEED)
}
func adviseWrite(mem []byte) {
syscall.Madvise(mem, syscall.MADV_SEQUENTIAL)
}
func fallocate(f *os.File, sz int64) error {
err := syscall.Fallocate(int(f.Fd()), 0, 0, sz)
if err == syscall.ENOTSUP {
return f.Truncate(sz)
}
return err
}

View File

@@ -0,0 +1,17 @@
// +build !linux appengine
package msgp
import (
"os"
)
// TODO: darwin, BSD support
func adviseRead(mem []byte) {}
func adviseWrite(mem []byte) {}
func fallocate(f *os.File, sz int64) error {
return f.Truncate(sz)
}

View File

@@ -0,0 +1,15 @@
// +build appengine
package msgp
// let's just assume appengine
// uses 64-bit hardware...
const smallint = false
func UnsafeString(b []byte) string {
return string(b)
}
func UnsafeBytes(s string) []byte {
return []byte(s)
}

39
vendor/github.com/glycerine/greenpack/msgp/circular.go generated vendored Normal file
View File

@@ -0,0 +1,39 @@
package msgp
type timer interface {
StartTimer()
StopTimer()
}
// EndlessReader is an io.Reader
// that loops over the same data
// endlessly. It is used for benchmarking.
type EndlessReader struct {
tb timer
data []byte
offset int
}
// NewEndlessReader returns a new endless reader
func NewEndlessReader(b []byte, tb timer) *EndlessReader {
return &EndlessReader{tb: tb, data: b, offset: 0}
}
// Read implements io.Reader. In practice, it
// always returns (len(p), nil), although it
// fills the supplied slice while the benchmark
// timer is stopped.
func (c *EndlessReader) Read(p []byte) (int, error) {
c.tb.StopTimer()
var n int
l := len(p)
m := len(c.data)
for n < l {
nn := copy(p[n:], c.data[c.offset:])
n += nn
c.offset += nn
c.offset %= m
}
c.tb.StartTimer()
return n, nil
}

37
vendor/github.com/glycerine/greenpack/msgp/clue.go generated vendored Normal file
View File

@@ -0,0 +1,37 @@
package msgp
import (
"fmt"
"strconv"
"strings"
)
func Clue2Field(name string, clue string, zid int64) string {
if zid >= 0 {
return fmt.Sprintf("%s_zid%02d_%s", name, zid, clue)
}
// handle the missing zid, and don't write -1 as the zid.
return fmt.Sprintf("%s__%s", name, clue)
}
func Field2Clue(fieldname string) (name string, clue string, zid int64, err error) {
parts := strings.Split(fieldname, "_")
n := len(parts)
if n < 3 {
err = fmt.Errorf("too few underscore (expect at least two) in fieldname '%s'", fieldname)
return
}
clue = parts[n-1]
if strings.HasPrefix(parts[n-2], "zid") {
tmp, err2 := strconv.Atoi(parts[n-2][3:])
if err2 == nil {
zid = int64(tmp)
} else {
err = fmt.Errorf("problem parsing out _zid field in fieldname '%s': '%v'", fieldname, err2)
return
}
}
used := len(parts[n-1]) + len(parts[n-2]) + 2
name = fieldname[:len(fieldname)-used]
return
}

146
vendor/github.com/glycerine/greenpack/msgp/dedup.go generated vendored Normal file
View File

@@ -0,0 +1,146 @@
package msgp
import (
"fmt"
"reflect"
)
var _ = fmt.Printf
// Methods for deduplicating repeated occurances of the same pointer.
//
// When writing, we track the sequence of pointers written.
// When we see a duplicate pointer, we write the special
// extension "duplicate" value along with the pointer's
// occurance order in the serialization.
//
// As we read back, we keep a count that increments for every
// pointer we read, and we save a map from the count to the pointer.
// When we encounter a value that is the special value indicating reuse, then
// we refer back to the pointer k (k being found within the special extension value)
// and we plug in the k-th pointer instead.
// writer then reader methods
// ===============================
// ===============================
// Writer methods
// ===============================
// ===============================
func (mw *Writer) DedupReset() {
mw.ptrWrit = make(map[interface{}]int)
mw.ptrCountNext = 0
}
// diagnostic
func (mw *Writer) DedupPointerCount() int {
return len(mw.ptrWrit)
}
// upon writing each pointer, first check if it is a duplicate;
// i.e. appears more than once, pointing to the same object.
func (mw *Writer) DedupWriteIsDup(v interface{}) (res bool, err error) {
defer func() {
// This recover allows test 911 (_generated/gen_test.go:67) to run green.
// It turns indexing by []byte msgp.Raw into a no-op. Which it
// should be.
if recover() != nil {
return
}
}()
if v == nil || reflect.ValueOf(v).IsNil() {
return false, nil
}
k, dup := mw.ptrWrit[v]
if !dup {
mw.ptrWrit[v] = mw.ptrCountNext
//fmt.Printf("\n\n $$$ NOT dup write %p -> k=%v / %#v\n\n", v, mw.ptrCountNext, v)
mw.ptrCountNext++
return false, nil
} else {
//fmt.Printf("\n\n $$$ DUP write %p -> k=%v / %#v\n\n", v, k, v)
}
return true, mw.DedupWriteExt(k)
}
// write DedupExtension with k integer count
// of the pointer that is duplicated here. k is
// runtime appearance order.
func (mw *Writer) DedupWriteExt(k int) error {
var by [8]byte
kby := AppendInt(by[:0], k)
ext := RawExtension{
Data: kby,
Type: DedupExtension,
}
return mw.WriteExtension(&ext)
}
// =============================
// =============================
// Reader side
// =============================
// =============================
func (m *Reader) DedupReadExt() (int, error) {
ext := RawExtension{
Type: DedupExtension,
}
err := m.ReadExtension(&ext)
if err != nil {
return -1, err
}
var nbs NilBitsStack
k, _, err := nbs.ReadIntBytes(ext.Data)
if err != nil {
return -1, err
}
return k, nil
}
func (r *Reader) DedupReset() {
r.dedupPointers = r.dedupPointers[:0]
}
func (r *Reader) DedupPtr(k int) interface{} {
if k >= 0 && k < len(r.dedupPointers) {
return r.dedupPointers[k]
}
panic(fmt.Sprintf("Reader.DedupPtr requested for k=%v but that was out of range! (avail=%v)", k, len(r.dedupPointers)))
return nil
}
func (m *Reader) DedupIndexEachPtr(ptr interface{}) {
//fmt.Printf("\n DedupIndexEachPtr called for ptr=%p/%T/val='%#v'\n", ptr, ptr, ptr)
if ptr == nil {
return
}
va := reflect.ValueOf(ptr)
if va.IsNil() {
return
}
m.dedupPointers = append(m.dedupPointers, ptr)
//fmt.Printf("\n\n *** Reader.DedupIndexEachPtr stored ptr '%#v', as sequence k=%v\n\n", ptr, len(m.dedupPointers)-1)
}
func (m *Reader) DedupReadIsDup(field, typeTarget string) (iface interface{}, res bool) {
//fmt.Printf("\n+++ Reader.DedupReadIsDup(field:'%s', type:'%s') starting.\n", field, typeTarget)
//defer func() {
// fmt.Printf("\n^^^ Reader.DedupReadIsDup() returning res=%v\n", res)
//}()
typ, err := m.peekExtensionType()
if err != nil {
return nil, false
}
if typ != DedupExtension {
return nil, false
}
k, err := m.DedupReadExt()
if err != nil {
return nil, false
}
ptr := m.DedupPtr(k)
//fmt.Printf("\n m.DedupReadIsDup() substituting, ptr= %p b/c read k=%v\n", ptr, k)
return ptr, true
}

142
vendor/github.com/glycerine/greenpack/msgp/defs.go generated vendored Normal file
View File

@@ -0,0 +1,142 @@
// This package is the support library for the greenpack code generator (http://github.com/glycerine/greenpack).
//
// This package defines the utilites used by the greenpack code generator for encoding and decoding MessagePack
// from []byte and io.Reader/io.Writer types. Much of this package is devoted to helping the greenpack code
// generator implement the Marshaler/Unmarshaler and Encodable/Decodable interfaces.
//
// This package defines four "families" of functions:
// - AppendXxxx() appends an object to a []byte in MessagePack encoding.
// - ReadXxxxBytes() reads an object from a []byte and returns the remaining bytes.
// - (*Writer).WriteXxxx() writes an object to the buffered *Writer type.
// - (*Reader).ReadXxxx() reads an object from a buffered *Reader type.
//
// Once a type has satisfied the `Encodable` and `Decodable` interfaces,
// it can be written and read from arbitrary `io.Writer`s and `io.Reader`s using
// msgp.Encode(io.Writer, msgp.Encodable)
// and
// msgp.Decode(io.Reader, msgp.Decodable)
//
// There are also methods for converting MessagePack to JSON without
// an explicit de-serialization step.
//
// For additional tips, tricks, and gotchas, please visit
// the wiki at http://github.com/glycerine/greenpack
package msgp
const last4 = 0x0f
const first4 = 0xf0
const last5 = 0x1f
const first3 = 0xe0
const last7 = 0x7f
func isfixint(b byte) bool {
return b>>7 == 0
}
func isnfixint(b byte) bool {
return b&first3 == mnfixint
}
func isfixmap(b byte) bool {
return b&first4 == mfixmap
}
func isfixarray(b byte) bool {
return b&first4 == mfixarray
}
func isfixstr(b byte) bool {
return b&first3 == mfixstr
}
func wfixint(u uint8) byte {
return u & last7
}
func rfixint(b byte) uint8 {
return b
}
func wnfixint(i int8) byte {
return byte(i) | mnfixint
}
func rnfixint(b byte) int8 {
return int8(b)
}
func rfixmap(b byte) uint8 {
return b & last4
}
func wfixmap(u uint8) byte {
return mfixmap | (u & last4)
}
func rfixstr(b byte) uint8 {
return b & last5
}
func wfixstr(u uint8) byte {
return (u & last5) | mfixstr
}
func rfixarray(b byte) uint8 {
return (b & last4)
}
func wfixarray(u uint8) byte {
return (u & last4) | mfixarray
}
// These are all the byte
// prefixes defined by the
// msgpack standard
const (
// 0XXXXXXX
mfixint uint8 = 0x00
// 111XXXXX
mnfixint uint8 = 0xe0
// 1000XXXX
mfixmap uint8 = 0x80
// 1001XXXX
mfixarray uint8 = 0x90
// 101XXXXX
mfixstr uint8 = 0xa0
mnil uint8 = 0xc0
mfalse uint8 = 0xc2
mtrue uint8 = 0xc3
mbin8 uint8 = 0xc4
mbin16 uint8 = 0xc5
mbin32 uint8 = 0xc6
mext8 uint8 = 0xc7
mext16 uint8 = 0xc8
mext32 uint8 = 0xc9
mfloat32 uint8 = 0xca
mfloat64 uint8 = 0xcb
muint8 uint8 = 0xcc
muint16 uint8 = 0xcd
muint32 uint8 = 0xce
muint64 uint8 = 0xcf
mint8 uint8 = 0xd0
mint16 uint8 = 0xd1
mint32 uint8 = 0xd2
mint64 uint8 = 0xd3
mfixext1 uint8 = 0xd4
mfixext2 uint8 = 0xd5
mfixext4 uint8 = 0xd6
mfixext8 uint8 = 0xd7
mfixext16 uint8 = 0xd8
mstr8 uint8 = 0xd9
mstr16 uint8 = 0xda
mstr32 uint8 = 0xdb
marray16 uint8 = 0xdc
marray32 uint8 = 0xdd
mmap16 uint8 = 0xde
mmap32 uint8 = 0xdf
)

245
vendor/github.com/glycerine/greenpack/msgp/edit.go generated vendored Normal file
View File

@@ -0,0 +1,245 @@
package msgp
import (
"math"
)
// Locate returns a []byte pointing to the field
// in a messagepack map with the provided key. (The returned []byte
// points to a sub-slice of 'raw'; Locate does no allocations.) If the
// key doesn't exist in the map, a zero-length []byte will be returned.
func Locate(key string, raw []byte) []byte {
s, n := locate(raw, key)
return raw[s:n]
}
// Replace takes a key ("key") in a messagepack map ("raw")
// and replaces its value with the one provided and returns
// the new []byte. The returned []byte may point to the same
// memory as "raw". Replace makes no effort to evaluate the validity
// of the contents of 'val'. It may use up to the full capacity of 'raw.'
// Replace returns 'nil' if the field doesn't exist or if the object in 'raw'
// is not a map.
func Replace(key string, raw []byte, val []byte) []byte {
start, end := locate(raw, key)
if start == end {
return nil
}
return replace(raw, start, end, val, true)
}
// CopyReplace works similarly to Replace except that the returned
// byte slice does not point to the same memory as 'raw'. CopyReplace
// returns 'nil' if the field doesn't exist or 'raw' isn't a map.
func CopyReplace(key string, raw []byte, val []byte) []byte {
start, end := locate(raw, key)
if start == end {
return nil
}
return replace(raw, start, end, val, false)
}
// Remove removes a key-value pair from 'raw'. It returns
// 'raw' unchanged if the key didn't exist.
func Remove(key string, raw []byte) []byte {
start, end := locateKV(raw, key)
if start == end {
return raw
}
raw = raw[:start+copy(raw[start:], raw[end:])]
return resizeMap(raw, -1)
}
// HasKey returns whether the map in 'raw' has
// a field with key 'key'
func HasKey(key string, raw []byte) bool {
var nbs *NilBitsStack
sz, bts, err := nbs.ReadMapHeaderBytes(raw)
if err != nil {
return false
}
var field []byte
for i := uint32(0); i < sz; i++ {
field, bts, err = nbs.ReadStringZC(bts)
if err != nil {
return false
}
if UnsafeString(field) == key {
return true
}
}
return false
}
func replace(raw []byte, start int, end int, val []byte, inplace bool) []byte {
ll := end - start // length of segment to replace
lv := len(val)
if inplace {
extra := lv - ll
// fastest case: we're doing
// a 1:1 replacement
if extra == 0 {
copy(raw[start:], val)
return raw
} else if extra < 0 {
// 'val' smaller than replaced value
// copy in place and shift back
x := copy(raw[start:], val)
y := copy(raw[start+x:], raw[end:])
return raw[:start+x+y]
} else if extra < cap(raw)-len(raw) {
// 'val' less than (cap-len) extra bytes
// copy in place and shift forward
raw = raw[0 : len(raw)+extra]
// shift end forward
copy(raw[end+extra:], raw[end:])
copy(raw[start:], val)
return raw
}
}
// we have to allocate new space
out := make([]byte, len(raw)+len(val)-ll)
x := copy(out, raw[:start])
y := copy(out[x:], val)
copy(out[x+y:], raw[end:])
return out
}
// locate does a naive O(n) search for the map key; returns start, end
// (returns 0,0 on error)
func locate(raw []byte, key string) (start int, end int) {
var (
sz uint32
bts []byte
field []byte
err error
)
var nbs *NilBitsStack
sz, bts, err = nbs.ReadMapHeaderBytes(raw)
if err != nil {
return
}
// loop and locate field
for i := uint32(0); i < sz; i++ {
field, bts, err = nbs.ReadStringZC(bts)
if err != nil {
return 0, 0
}
if UnsafeString(field) == key {
// start location
l := len(raw)
start = l - len(bts)
bts, err = Skip(bts)
if err != nil {
return 0, 0
}
end = l - len(bts)
return
}
bts, err = Skip(bts)
if err != nil {
return 0, 0
}
}
return 0, 0
}
// locate key AND value
func locateKV(raw []byte, key string) (start int, end int) {
var nbs *NilBitsStack
var (
sz uint32
bts []byte
field []byte
err error
)
sz, bts, err = nbs.ReadMapHeaderBytes(raw)
if err != nil {
return 0, 0
}
for i := uint32(0); i < sz; i++ {
tmp := len(bts)
field, bts, err = nbs.ReadStringZC(bts)
if err != nil {
return 0, 0
}
if UnsafeString(field) == key {
start = len(raw) - tmp
bts, err = Skip(bts)
if err != nil {
return 0, 0
}
end = len(raw) - len(bts)
return
}
bts, err = Skip(bts)
if err != nil {
return 0, 0
}
}
return 0, 0
}
// delta is delta on map size
func resizeMap(raw []byte, delta int64) []byte {
var sz int64
switch raw[0] {
case mmap16:
sz = int64(big.Uint16(raw[1:]))
if sz+delta <= math.MaxUint16 {
big.PutUint16(raw[1:], uint16(sz+delta))
return raw
}
if cap(raw)-len(raw) >= 2 {
raw = raw[0 : len(raw)+2]
copy(raw[5:], raw[3:])
big.PutUint32(raw[1:], uint32(sz+delta))
return raw
}
n := make([]byte, 0, len(raw)+5)
n = AppendMapHeader(n, uint32(sz+delta))
return append(n, raw[3:]...)
case mmap32:
sz = int64(big.Uint32(raw[1:]))
big.PutUint32(raw[1:], uint32(sz+delta))
return raw
default:
sz = int64(rfixmap(raw[0]))
if sz+delta < 16 {
raw[0] = wfixmap(uint8(sz + delta))
return raw
} else if sz+delta <= math.MaxUint16 {
if cap(raw)-len(raw) >= 2 {
raw = raw[0 : len(raw)+2]
copy(raw[3:], raw[1:])
raw[0] = mmap16
big.PutUint16(raw[1:], uint16(sz+delta))
return raw
}
n := make([]byte, 0, len(raw)+5)
n = AppendMapHeader(n, uint32(sz+delta))
return append(n, raw[1:]...)
}
if cap(raw)-len(raw) >= 4 {
raw = raw[0 : len(raw)+4]
copy(raw[5:], raw[1:])
raw[0] = mmap32
big.PutUint32(raw[1:], uint32(sz+delta))
return raw
}
n := make([]byte, 0, len(raw)+5)
n = AppendMapHeader(n, uint32(sz+delta))
return append(n, raw[1:]...)
}
}

99
vendor/github.com/glycerine/greenpack/msgp/elsize.go generated vendored Normal file
View File

@@ -0,0 +1,99 @@
package msgp
// size of every object on the wire,
// plus type information. gives us
// constant-time type information
// for traversing composite objects.
//
var sizes = [256]bytespec{
mnil: {size: 1, extra: constsize, typ: NilType},
mfalse: {size: 1, extra: constsize, typ: BoolType},
mtrue: {size: 1, extra: constsize, typ: BoolType},
mbin8: {size: 2, extra: extra8, typ: BinType},
mbin16: {size: 3, extra: extra16, typ: BinType},
mbin32: {size: 5, extra: extra32, typ: BinType},
mext8: {size: 3, extra: extra8, typ: ExtensionType},
mext16: {size: 4, extra: extra16, typ: ExtensionType},
mext32: {size: 6, extra: extra32, typ: ExtensionType},
mfloat32: {size: 5, extra: constsize, typ: Float32Type},
mfloat64: {size: 9, extra: constsize, typ: Float64Type},
muint8: {size: 2, extra: constsize, typ: UintType},
muint16: {size: 3, extra: constsize, typ: UintType},
muint32: {size: 5, extra: constsize, typ: UintType},
muint64: {size: 9, extra: constsize, typ: UintType},
mint8: {size: 2, extra: constsize, typ: IntType},
mint16: {size: 3, extra: constsize, typ: IntType},
mint32: {size: 5, extra: constsize, typ: IntType},
mint64: {size: 9, extra: constsize, typ: IntType},
mfixext1: {size: 3, extra: constsize, typ: ExtensionType},
mfixext2: {size: 4, extra: constsize, typ: ExtensionType},
mfixext4: {size: 6, extra: constsize, typ: ExtensionType},
mfixext8: {size: 10, extra: constsize, typ: ExtensionType},
mfixext16: {size: 18, extra: constsize, typ: ExtensionType},
mstr8: {size: 2, extra: extra8, typ: StrType},
mstr16: {size: 3, extra: extra16, typ: StrType},
mstr32: {size: 5, extra: extra32, typ: StrType},
marray16: {size: 3, extra: array16v, typ: ArrayType},
marray32: {size: 5, extra: array32v, typ: ArrayType},
mmap16: {size: 3, extra: map16v, typ: MapType},
mmap32: {size: 5, extra: map32v, typ: MapType},
}
func init() {
// set up fixed fields
// fixint
for i := mfixint; i < 0x80; i++ {
sizes[i] = bytespec{size: 1, extra: constsize, typ: IntType}
}
// nfixint
for i := uint16(mnfixint); i < 0x100; i++ {
sizes[uint8(i)] = bytespec{size: 1, extra: constsize, typ: IntType}
}
// fixstr gets constsize,
// since the prefix yields the size
for i := mfixstr; i < 0xc0; i++ {
sizes[i] = bytespec{size: 1 + rfixstr(i), extra: constsize, typ: StrType}
}
// fixmap
for i := mfixmap; i < 0x90; i++ {
sizes[i] = bytespec{size: 1, extra: varmode(2 * rfixmap(i)), typ: MapType}
}
// fixarray
for i := mfixarray; i < 0xa0; i++ {
sizes[i] = bytespec{size: 1, extra: varmode(rfixarray(i)), typ: ArrayType}
}
}
// a valid bytespsec has
// non-zero 'size' and
// non-zero 'typ'
type bytespec struct {
size uint8 // prefix size information
extra varmode // extra size information
typ Type // type
_ byte // makes bytespec 4 bytes (yes, this matters)
}
// size mode
// if positive, # elements for composites
type varmode int8
const (
constsize varmode = 0 // constant size (size bytes + uint8(varmode) objects)
extra8 = -1 // has uint8(p[1]) extra bytes
extra16 = -2 // has be16(p[1:]) extra bytes
extra32 = -3 // has be32(p[1:]) extra bytes
map16v = -4 // use map16
map32v = -5 // use map32
array16v = -6 // use array16
array32v = -7 // use array32
)
func getType(v byte) Type {
return sizes[v].typ
}

142
vendor/github.com/glycerine/greenpack/msgp/errors.go generated vendored Normal file
View File

@@ -0,0 +1,142 @@
package msgp
import (
"fmt"
"reflect"
)
var (
// ErrShortBytes is returned when the
// slice being decoded is too short to
// contain the contents of the message
ErrShortBytes error = errShort{}
// this error is only returned
// if we reach code that should
// be unreachable
fatal error = errFatal{}
)
// Error is the interface satisfied
// by all of the errors that originate
// from this package.
type Error interface {
error
// Resumable returns whether
// or not the error means that
// the stream of data is malformed
// and the information is unrecoverable.
Resumable() bool
}
type errShort struct{}
func (e errShort) Error() string { return "msgp: too few bytes left to read object" }
func (e errShort) Resumable() bool { return false }
type errFatal struct{}
func (f errFatal) Error() string { return "msgp: fatal decoding error (unreachable code)" }
func (f errFatal) Resumable() bool { return false }
// ArrayError is an error returned
// when decoding a fix-sized array
// of the wrong size
type ArrayError struct {
Wanted uint32
Got uint32
}
// Error implements the error interface
func (a ArrayError) Error() string {
return fmt.Sprintf("msgp: wanted array of size %d; got %d", a.Wanted, a.Got)
}
// Resumable is always 'true' for ArrayErrors
func (a ArrayError) Resumable() bool { return true }
// IntOverflow is returned when a call
// would downcast an integer to a type
// with too few bits to hold its value.
type IntOverflow struct {
Value int64 // the value of the integer
FailedBitsize int // the bit size that the int64 could not fit into
}
// Error implements the error interface
func (i IntOverflow) Error() string {
return fmt.Sprintf("msgp: %d overflows int%d", i.Value, i.FailedBitsize)
}
// Resumable is always 'true' for overflows
func (i IntOverflow) Resumable() bool { return true }
// UintOverflow is returned when a call
// would downcast an unsigned integer to a type
// with too few bits to hold its value
type UintOverflow struct {
Value uint64 // value of the uint
FailedBitsize int // the bit size that couldn't fit the value
}
// Error implements the error interface
func (u UintOverflow) Error() string {
return fmt.Sprintf("msgp: %d overflows uint%d", u.Value, u.FailedBitsize)
}
// Resumable is always 'true' for overflows
func (u UintOverflow) Resumable() bool { return true }
// A TypeError is returned when a particular
// decoding method is unsuitable for decoding
// a particular MessagePack value.
type TypeError struct {
Method Type // Type expected by method
Encoded Type // Type actually encoded
}
// Error implements the error interface
func (t TypeError) Error() string {
return fmt.Sprintf("msgp: attempted to decode type %q with method for %q", t.Encoded, t.Method)
}
// Resumable returns 'true' for TypeErrors
func (t TypeError) Resumable() bool { return true }
// returns either InvalidPrefixError or
// TypeError depending on whether or not
// the prefix is recognized
func badPrefix(want Type, lead byte) error {
t := sizes[lead].typ
if t == InvalidType {
return InvalidPrefixError(lead)
}
return TypeError{Method: want, Encoded: t}
}
// InvalidPrefixError is returned when a bad encoding
// uses a prefix that is not recognized in the MessagePack standard.
// This kind of error is unrecoverable.
type InvalidPrefixError byte
// Error implements the error interface
func (i InvalidPrefixError) Error() string {
return fmt.Sprintf("msgp: unrecognized type prefix 0x%x", byte(i))
}
// Resumable returns 'false' for InvalidPrefixErrors
func (i InvalidPrefixError) Resumable() bool { return false }
// ErrUnsupportedType is returned
// when a bad argument is supplied
// to a function that takes `interface{}`.
type ErrUnsupportedType struct {
T reflect.Type
}
// Error implements error
func (e *ErrUnsupportedType) Error() string { return fmt.Sprintf("msgp: type %q not supported(4)", e.T) }
// Resumable returns 'true' for ErrUnsupportedType
func (e *ErrUnsupportedType) Resumable() bool { return true }

559
vendor/github.com/glycerine/greenpack/msgp/extension.go generated vendored Normal file
View File

@@ -0,0 +1,559 @@
package msgp
import (
"fmt"
"math"
)
const (
// Complex64Extension is the extension number used for complex64
Complex64Extension = 3
// Complex128Extension is the extension number used for complex128
Complex128Extension = 4
// TimeExtension is the extension number used for time.Time
TimeExtension = 5
// DedupExtension allows us to avoid cycles and avoid excess
// space consumption for graphs that are not strictly trees.
DedupExtension = 6
// DurationExtension is used for time.Duration
DurationExtension = 7
)
// our extensions live here
var extensionReg = make(map[int8]func() Extension)
// RegisterExtension registers extensions so that they
// can be initialized and returned by methods that
// decode `interface{}` values. This should only
// be called during initialization. f() should return
// a newly-initialized zero value of the extension. Keep in
// mind that extensions 3, 4, and 5 are reserved for
// complex64, complex128, and time.Time, respectively,
// and that MessagePack reserves extension types from -127 to -1.
//
// For example, if you wanted to register a user-defined struct:
//
// msgp.RegisterExtension(10, func() msgp.Extension { &MyExtension{} })
//
// RegisterExtension will panic if you call it multiple times
// with the same 'typ' argument, or if you use a reserved
// type (3, 4, 5, 6, 7).
func RegisterExtension(typ int8, f func() Extension) {
switch typ {
case Complex64Extension, Complex128Extension, TimeExtension, DedupExtension, DurationExtension:
panic(fmt.Sprint("msgp: forbidden extension type:", typ))
}
if _, ok := extensionReg[typ]; ok {
panic(fmt.Sprint("msgp: RegisterExtension() called with typ", typ, "more than once"))
}
extensionReg[typ] = f
}
// ExtensionTypeError is an error type returned
// when there is a mis-match between an extension type
// and the type encoded on the wire
type ExtensionTypeError struct {
Got int8
Want int8
}
// Error implements the error interface
func (e ExtensionTypeError) Error() string {
return fmt.Sprintf("msgp: error decoding extension: wanted type %d; got type %d", e.Want, e.Got)
}
// Resumable returns 'true' for ExtensionTypeErrors
func (e ExtensionTypeError) Resumable() bool { return true }
func errExt(got int8, wanted int8) error {
return ExtensionTypeError{Got: got, Want: wanted}
}
// Extension is the interface fulfilled
// by types that want to define their
// own binary encoding.
type Extension interface {
// ExtensionType should return
// a int8 that identifies the concrete
// type of the extension. (Types <0 are
// officially reserved by the MessagePack
// specifications.)
ExtensionType() int8
// Len should return the length
// of the data to be encoded
Len() int
// MarshalBinaryTo should copy
// the data into the supplied slice,
// assuming that the slice has length Len()
MarshalBinaryTo([]byte) error
UnmarshalBinary([]byte) error
}
// RawExtension implements the Extension interface
type RawExtension struct {
Data []byte
Type int8
}
// ExtensionType implements Extension.ExtensionType, and returns r.Type
func (r *RawExtension) ExtensionType() int8 { return r.Type }
// Len implements Extension.Len, and returns len(r.Data)
func (r *RawExtension) Len() int { return len(r.Data) }
// MarshalBinaryTo implements Extension.MarshalBinaryTo,
// and returns a copy of r.Data
func (r *RawExtension) MarshalBinaryTo(d []byte) error {
copy(d, r.Data)
return nil
}
// UnmarshalBinary implements Extension.UnmarshalBinary,
// and sets r.Data to the contents of the provided slice
func (r *RawExtension) UnmarshalBinary(b []byte) error {
if cap(r.Data) >= len(b) {
r.Data = r.Data[0:len(b)]
} else {
r.Data = make([]byte, len(b))
}
copy(r.Data, b)
return nil
}
// WriteExtension writes an extension type to the writer
func (mw *Writer) WriteExtension(e Extension) error {
l := e.Len()
var err error
switch l {
case 0:
o, err := mw.require(3)
if err != nil {
return err
}
mw.buf[o] = mext8
mw.buf[o+1] = 0
mw.buf[o+2] = byte(e.ExtensionType())
case 1:
o, err := mw.require(2)
if err != nil {
return err
}
mw.buf[o] = mfixext1
mw.buf[o+1] = byte(e.ExtensionType())
case 2:
o, err := mw.require(2)
if err != nil {
return err
}
mw.buf[o] = mfixext2
mw.buf[o+1] = byte(e.ExtensionType())
case 4:
o, err := mw.require(2)
if err != nil {
return err
}
mw.buf[o] = mfixext4
mw.buf[o+1] = byte(e.ExtensionType())
case 8:
o, err := mw.require(2)
if err != nil {
return err
}
mw.buf[o] = mfixext8
mw.buf[o+1] = byte(e.ExtensionType())
case 16:
o, err := mw.require(2)
if err != nil {
return err
}
mw.buf[o] = mfixext16
mw.buf[o+1] = byte(e.ExtensionType())
default:
switch {
case l < math.MaxUint8:
o, err := mw.require(3)
if err != nil {
return err
}
mw.buf[o] = mext8
mw.buf[o+1] = byte(uint8(l))
mw.buf[o+2] = byte(e.ExtensionType())
case l < math.MaxUint16:
o, err := mw.require(4)
if err != nil {
return err
}
mw.buf[o] = mext16
big.PutUint16(mw.buf[o+1:], uint16(l))
mw.buf[o+3] = byte(e.ExtensionType())
default:
o, err := mw.require(6)
if err != nil {
return err
}
mw.buf[o] = mext32
big.PutUint32(mw.buf[o+1:], uint32(l))
mw.buf[o+5] = byte(e.ExtensionType())
}
}
// we can only write directly to the
// buffer if we're sure that it
// fits the object
if l <= mw.bufsize() {
o, err := mw.require(l)
if err != nil {
return err
}
return e.MarshalBinaryTo(mw.buf[o:])
}
// here we create a new buffer
// just large enough for the body
// and save it as the write buffer
err = mw.flush()
if err != nil {
return err
}
buf := make([]byte, l)
err = e.MarshalBinaryTo(buf)
if err != nil {
return err
}
mw.buf = buf
mw.wloc = l
return nil
}
// peek at the extension type, assuming the next
// kind to be read is Extension
func (m *Reader) peekExtensionType() (int8, error) {
p, err := m.R.Peek(2)
if err != nil {
return 0, err
}
spec := sizes[p[0]]
if spec.typ != ExtensionType {
return 0, badPrefix(ExtensionType, p[0])
}
if spec.extra == constsize {
return int8(p[1]), nil
}
size := spec.size
p, err = m.R.Peek(int(size))
if err != nil {
return 0, err
}
return int8(p[size-1]), nil
}
// peekExtension peeks at the extension encoding type
// (must guarantee at least 1 byte in 'b')
func peekExtension(b []byte) (int8, error) {
spec := sizes[b[0]]
size := spec.size
if spec.typ != ExtensionType {
return 0, badPrefix(ExtensionType, b[0])
}
if len(b) < int(size) {
return 0, ErrShortBytes
}
// for fixed extensions,
// the type information is in
// the second byte
if spec.extra == constsize {
return int8(b[1]), nil
}
// otherwise, it's in the last
// part of the prefix
return int8(b[size-1]), nil
}
// ReadExtension reads the next object from the reader
// as an extension. ReadExtension will fail if the next
// object in the stream is not an extension, or if
// e.Type() is not the same as the wire type.
func (m *Reader) ReadExtension(e Extension) (err error) {
var p []byte
p, err = m.R.Peek(2)
if err != nil {
return
}
lead := p[0]
var read int
var off int
switch lead {
case mfixext1:
if int8(p[1]) != e.ExtensionType() {
err = errExt(int8(p[1]), e.ExtensionType())
return
}
p, err = m.R.Peek(3)
if err != nil {
return
}
err = e.UnmarshalBinary(p[2:])
if err == nil {
_, err = m.R.Skip(3)
}
return
case mfixext2:
if int8(p[1]) != e.ExtensionType() {
err = errExt(int8(p[1]), e.ExtensionType())
return
}
p, err = m.R.Peek(4)
if err != nil {
return
}
err = e.UnmarshalBinary(p[2:])
if err == nil {
_, err = m.R.Skip(4)
}
return
case mfixext4:
if int8(p[1]) != e.ExtensionType() {
err = errExt(int8(p[1]), e.ExtensionType())
return
}
p, err = m.R.Peek(6)
if err != nil {
return
}
err = e.UnmarshalBinary(p[2:])
if err == nil {
_, err = m.R.Skip(6)
}
return
case mfixext8:
if int8(p[1]) != e.ExtensionType() {
err = errExt(int8(p[1]), e.ExtensionType())
return
}
p, err = m.R.Peek(10)
if err != nil {
return
}
err = e.UnmarshalBinary(p[2:])
if err == nil {
_, err = m.R.Skip(10)
}
return
case mfixext16:
if int8(p[1]) != e.ExtensionType() {
err = errExt(int8(p[1]), e.ExtensionType())
return
}
p, err = m.R.Peek(18)
if err != nil {
return
}
err = e.UnmarshalBinary(p[2:])
if err == nil {
_, err = m.R.Skip(18)
}
return
case mext8:
p, err = m.R.Peek(3)
if err != nil {
return
}
if int8(p[2]) != e.ExtensionType() {
err = errExt(int8(p[2]), e.ExtensionType())
return
}
read = int(uint8(p[1]))
off = 3
case mext16:
p, err = m.R.Peek(4)
if err != nil {
return
}
if int8(p[3]) != e.ExtensionType() {
err = errExt(int8(p[3]), e.ExtensionType())
return
}
read = int(big.Uint16(p[1:]))
off = 4
case mext32:
p, err = m.R.Peek(6)
if err != nil {
return
}
if int8(p[5]) != e.ExtensionType() {
err = errExt(int8(p[5]), e.ExtensionType())
return
}
read = int(big.Uint32(p[1:]))
off = 6
default:
err = badPrefix(ExtensionType, lead)
return
}
p, err = m.R.Peek(read + off)
if err != nil {
return
}
err = e.UnmarshalBinary(p[off:])
if err == nil {
_, err = m.R.Skip(read + off)
}
return
}
// AppendExtension appends a MessagePack extension to the provided slice
func AppendExtension(b []byte, e Extension) ([]byte, error) {
l := e.Len()
var o []byte
var n int
switch l {
case 0:
o, n = ensure(b, 3)
o[n] = mext8
o[n+1] = 0
o[n+2] = byte(e.ExtensionType())
return o[:n+3], nil
case 1:
o, n = ensure(b, 3)
o[n] = mfixext1
o[n+1] = byte(e.ExtensionType())
n += 2
case 2:
o, n = ensure(b, 4)
o[n] = mfixext2
o[n+1] = byte(e.ExtensionType())
n += 2
case 4:
o, n = ensure(b, 6)
o[n] = mfixext4
o[n+1] = byte(e.ExtensionType())
n += 2
case 8:
o, n = ensure(b, 10)
o[n] = mfixext8
o[n+1] = byte(e.ExtensionType())
n += 2
case 16:
o, n = ensure(b, 18)
o[n] = mfixext16
o[n+1] = byte(e.ExtensionType())
n += 2
}
switch {
case l < math.MaxUint8:
o, n = ensure(b, l+3)
o[n] = mext8
o[n+1] = byte(uint8(l))
o[n+2] = byte(e.ExtensionType())
n += 3
case l < math.MaxUint16:
o, n = ensure(b, l+4)
o[n] = mext16
big.PutUint16(o[n+1:], uint16(l))
o[n+3] = byte(e.ExtensionType())
n += 4
default:
o, n = ensure(b, l+6)
o[n] = mext32
big.PutUint32(o[n+1:], uint32(l))
o[n+5] = byte(e.ExtensionType())
n += 6
}
return o, e.MarshalBinaryTo(o[n:])
}
// ReadExtensionBytes reads an extension from 'b' into 'e'
// and returns any remaining bytes.
// Possible errors:
// - ErrShortBytes ('b' not long enough)
// - ExtensionTypeErorr{} (wire type not the same as e.Type())
// - TypeErorr{} (next object not an extension)
// - InvalidPrefixError
// - An umarshal error returned from e.UnmarshalBinary
func (nbs *NilBitsStack) ReadExtensionBytes(b []byte, e Extension) ([]byte, error) {
if nbs != nil && nbs.AlwaysNil {
return b, nil
}
l := len(b)
if l < 3 {
return b, ErrShortBytes
}
lead := b[0]
var (
sz int // size of 'data'
off int // offset of 'data'
typ int8
)
switch lead {
case mfixext1:
typ = int8(b[1])
sz = 1
off = 2
case mfixext2:
typ = int8(b[1])
sz = 2
off = 2
case mfixext4:
typ = int8(b[1])
sz = 4
off = 2
case mfixext8:
typ = int8(b[1])
sz = 8
off = 2
case mfixext16:
typ = int8(b[1])
sz = 16
off = 2
case mext8:
sz = int(uint8(b[1]))
typ = int8(b[2])
off = 3
if sz == 0 {
return b[3:], e.UnmarshalBinary(b[3:3])
}
case mext16:
if l < 4 {
return b, ErrShortBytes
}
sz = int(big.Uint16(b[1:]))
typ = int8(b[3])
off = 4
case mext32:
if l < 6 {
return b, ErrShortBytes
}
sz = int(big.Uint32(b[1:]))
typ = int8(b[5])
off = 6
default:
return b, badPrefix(ExtensionType, lead)
}
if typ != e.ExtensionType() {
return b, errExt(typ, e.ExtensionType())
}
// the data of the extension starts
// at 'off' and is 'sz' bytes long
if len(b[off:]) < sz {
return b, ErrShortBytes
}
tot := off + sz
return b[tot:], e.UnmarshalBinary(b[off:tot])
}

91
vendor/github.com/glycerine/greenpack/msgp/file.go generated vendored Normal file
View File

@@ -0,0 +1,91 @@
// +build linux,!appengine darwin dragonfly freebsd netbsd openbsd
package msgp
import (
"os"
"syscall"
)
// ReadFile reads a file into 'dst' using
// a read-only memory mapping. Consequently,
// the file must be mmap-able, and the
// Unmarshaler should never write to
// the source memory. (Methods generated
// by the msgp tool obey that constraint, but
// user-defined implementations may not.)
//
// Reading and writing through file mappings
// is only efficient for large files; small
// files are best read and written using
// the ordinary streaming interfaces.
//
func ReadFile(dst Unmarshaler, file *os.File) error {
stat, err := file.Stat()
if err != nil {
return err
}
data, err := syscall.Mmap(int(file.Fd()), 0, int(stat.Size()), syscall.PROT_READ, syscall.MAP_SHARED)
if err != nil {
return err
}
adviseRead(data)
_, err = dst.UnmarshalMsg(data)
uerr := syscall.Munmap(data)
if err == nil {
err = uerr
}
return err
}
// MarshalSizer is the combination
// of the Marshaler and Sizer
// interfaces.
type MarshalSizer interface {
Marshaler
Sizer
}
// WriteFile writes a file from 'src' using
// memory mapping. It overwrites the entire
// contents of the previous file.
// The mapping size is calculated
// using the `Msgsize()` method
// of 'src', so it must produce a result
// equal to or greater than the actual encoded
// size of the object. Otherwise,
// a fault (SIGBUS) will occur.
//
// Reading and writing through file mappings
// is only efficient for large files; small
// files are best read and written using
// the ordinary streaming interfaces.
//
// NOTE: The performance of this call
// is highly OS- and filesystem-dependent.
// Users should take care to test that this
// performs as expected in a production environment.
// (Linux users should run a kernel and filesystem
// that support fallocate(2) for the best results.)
func WriteFile(src MarshalSizer, file *os.File) error {
sz := src.Msgsize()
err := fallocate(file, int64(sz))
if err != nil {
return err
}
data, err := syscall.Mmap(int(file.Fd()), 0, sz, syscall.PROT_READ|syscall.PROT_WRITE, syscall.MAP_SHARED)
if err != nil {
return err
}
adviseWrite(data)
chunk := data[:0]
chunk, err = src.MarshalMsg(chunk)
if err != nil {
return err
}
uerr := syscall.Munmap(data)
if uerr != nil {
return uerr
}
return file.Truncate(int64(len(chunk)))
}

View File

@@ -0,0 +1,47 @@
// +build windows appengine
package msgp
import (
"io/ioutil"
"os"
)
// MarshalSizer is the combination
// of the Marshaler and Sizer
// interfaces.
type MarshalSizer interface {
Marshaler
Sizer
}
func ReadFile(dst Unmarshaler, file *os.File) error {
if u, ok := dst.(Decodable); ok {
return u.DecodeMsg(NewReader(file))
}
data, err := ioutil.ReadAll(file)
if err != nil {
return err
}
_, err = dst.UnmarshalMsg(data)
return err
}
func WriteFile(src MarshalSizer, file *os.File) error {
if e, ok := src.(Encodable); ok {
w := NewWriter(file)
err := e.EncodeMsg(w)
if err == nil {
err = w.Flush()
}
return err
}
raw, err := src.MarshalMsg(nil)
if err != nil {
return err
}
_, err = file.Write(raw)
return err
}

174
vendor/github.com/glycerine/greenpack/msgp/integers.go generated vendored Normal file
View File

@@ -0,0 +1,174 @@
package msgp
/* ----------------------------------
integer encoding utilities
(inline-able)
TODO(tinylib): there are faster,
albeit non-portable solutions
to the code below. implement
byteswap?
---------------------------------- */
func putMint64(b []byte, i int64) {
b[0] = mint64
b[1] = byte(i >> 56)
b[2] = byte(i >> 48)
b[3] = byte(i >> 40)
b[4] = byte(i >> 32)
b[5] = byte(i >> 24)
b[6] = byte(i >> 16)
b[7] = byte(i >> 8)
b[8] = byte(i)
}
func getMint64(b []byte) int64 {
return (int64(b[1]) << 56) | (int64(b[2]) << 48) |
(int64(b[3]) << 40) | (int64(b[4]) << 32) |
(int64(b[5]) << 24) | (int64(b[6]) << 16) |
(int64(b[7]) << 8) | (int64(b[8]))
}
func putMint32(b []byte, i int32) {
b[0] = mint32
b[1] = byte(i >> 24)
b[2] = byte(i >> 16)
b[3] = byte(i >> 8)
b[4] = byte(i)
}
func getMint32(b []byte) int32 {
return (int32(b[1]) << 24) | (int32(b[2]) << 16) | (int32(b[3]) << 8) | (int32(b[4]))
}
func putMint16(b []byte, i int16) {
b[0] = mint16
b[1] = byte(i >> 8)
b[2] = byte(i)
}
func getMint16(b []byte) (i int16) {
return (int16(b[1]) << 8) | int16(b[2])
}
func putMint8(b []byte, i int8) {
b[0] = mint8
b[1] = byte(i)
}
func getMint8(b []byte) (i int8) {
return int8(b[1])
}
func putMuint64(b []byte, u uint64) {
b[0] = muint64
b[1] = byte(u >> 56)
b[2] = byte(u >> 48)
b[3] = byte(u >> 40)
b[4] = byte(u >> 32)
b[5] = byte(u >> 24)
b[6] = byte(u >> 16)
b[7] = byte(u >> 8)
b[8] = byte(u)
}
func getMuint64(b []byte) uint64 {
return (uint64(b[1]) << 56) | (uint64(b[2]) << 48) |
(uint64(b[3]) << 40) | (uint64(b[4]) << 32) |
(uint64(b[5]) << 24) | (uint64(b[6]) << 16) |
(uint64(b[7]) << 8) | (uint64(b[8]))
}
func putMuint32(b []byte, u uint32) {
b[0] = muint32
b[1] = byte(u >> 24)
b[2] = byte(u >> 16)
b[3] = byte(u >> 8)
b[4] = byte(u)
}
func getMuint32(b []byte) uint32 {
return (uint32(b[1]) << 24) | (uint32(b[2]) << 16) | (uint32(b[3]) << 8) | (uint32(b[4]))
}
func putMuint16(b []byte, u uint16) {
b[0] = muint16
b[1] = byte(u >> 8)
b[2] = byte(u)
}
func getMuint16(b []byte) uint16 {
return (uint16(b[1]) << 8) | uint16(b[2])
}
func putMuint8(b []byte, u uint8) {
b[0] = muint8
b[1] = byte(u)
}
func getMuint8(b []byte) uint8 {
return uint8(b[1])
}
func getUnix(b []byte) (sec int64, nsec int32) {
sec = (int64(b[0]) << 56) | (int64(b[1]) << 48) |
(int64(b[2]) << 40) | (int64(b[3]) << 32) |
(int64(b[4]) << 24) | (int64(b[5]) << 16) |
(int64(b[6]) << 8) | (int64(b[7]))
nsec = (int32(b[8]) << 24) | (int32(b[9]) << 16) | (int32(b[10]) << 8) | (int32(b[11]))
return
}
func putUnix(b []byte, sec int64, nsec int32) {
b[0] = byte(sec >> 56)
b[1] = byte(sec >> 48)
b[2] = byte(sec >> 40)
b[3] = byte(sec >> 32)
b[4] = byte(sec >> 24)
b[5] = byte(sec >> 16)
b[6] = byte(sec >> 8)
b[7] = byte(sec)
b[8] = byte(nsec >> 24)
b[9] = byte(nsec >> 16)
b[10] = byte(nsec >> 8)
b[11] = byte(nsec)
}
/* -----------------------------
prefix utilities
----------------------------- */
// write prefix and uint8
func prefixu8(b []byte, pre byte, sz uint8) {
b[0] = pre
b[1] = byte(sz)
}
// write prefix and big-endian uint16
func prefixu16(b []byte, pre byte, sz uint16) {
b[0] = pre
b[1] = byte(sz >> 8)
b[2] = byte(sz)
}
// write prefix and big-endian uint32
func prefixu32(b []byte, pre byte, sz uint32) {
b[0] = pre
b[1] = byte(sz >> 24)
b[2] = byte(sz >> 16)
b[3] = byte(sz >> 8)
b[4] = byte(sz)
}
func prefixu64(b []byte, pre byte, sz uint64) {
b[0] = pre
b[1] = byte(sz >> 56)
b[2] = byte(sz >> 48)
b[3] = byte(sz >> 40)
b[4] = byte(sz >> 32)
b[5] = byte(sz >> 24)
b[6] = byte(sz >> 16)
b[7] = byte(sz >> 8)
b[8] = byte(sz)
}

555
vendor/github.com/glycerine/greenpack/msgp/json.go generated vendored Normal file
View File

@@ -0,0 +1,555 @@
package msgp
import (
"bufio"
"encoding/base64"
"encoding/json"
"io"
"strconv"
"unicode/utf8"
)
var (
null = []byte("null")
hex = []byte("0123456789abcdef")
)
var defuns [_maxtype]func(jsWriter, *Reader) (int, error)
// note: there is an initialization loop if
// this isn't set up during init()
func init() {
// since none of these functions are inline-able,
// there is not much of a penalty to the indirect
// call. however, this is best expressed as a jump-table...
defuns = [_maxtype]func(jsWriter, *Reader) (int, error){
StrType: rwString,
BinType: rwBytes,
MapType: rwMap,
ArrayType: rwArray,
Float64Type: rwFloat64,
Float32Type: rwFloat32,
BoolType: rwBool,
IntType: rwInt,
UintType: rwUint,
NilType: rwNil,
ExtensionType: rwExtension,
Complex64Type: rwExtension,
Complex128Type: rwExtension,
TimeType: rwTime,
DurationType: rwDuration,
}
}
// this is the interface
// used to write json
type jsWriter interface {
io.Writer
io.ByteWriter
WriteString(string) (int, error)
}
// CopyToJSON reads MessagePack from 'src' and copies it
// as JSON to 'dst' until EOF.
func CopyToJSON(dst io.Writer, src io.Reader) (n int64, err error) {
r := NewReader(src)
n, err = r.WriteToJSON(dst)
freeR(r)
return
}
// WriteToJSON translates MessagePack from 'r' and writes it as
// JSON to 'w' until the underlying reader returns io.EOF. It returns
// the number of bytes written, and an error if it stopped before EOF.
func (r *Reader) WriteToJSON(w io.Writer) (n int64, err error) {
var j jsWriter
var bf *bufio.Writer
if jsw, ok := w.(jsWriter); ok {
j = jsw
} else {
bf = bufio.NewWriter(w)
j = bf
}
var nn int
for err == nil {
nn, err = rwNext(j, r)
n += int64(nn)
}
if err != io.EOF {
if bf != nil {
bf.Flush()
}
return
}
err = nil
if bf != nil {
err = bf.Flush()
}
return
}
func rwNext(w jsWriter, src *Reader) (int, error) {
t, err := src.NextType()
if err != nil {
return 0, err
}
return defuns[t](w, src)
}
func rwMap(dst jsWriter, src *Reader) (n int, err error) {
var comma bool
var sz uint32
var field []byte
sz, err = src.ReadMapHeader()
if err != nil {
return
}
if sz == 0 {
return dst.WriteString("{}")
}
err = dst.WriteByte('{')
if err != nil {
return
}
n++
var nn int
for i := uint32(0); i < sz; i++ {
if comma {
err = dst.WriteByte(',')
if err != nil {
return
}
n++
}
field, err = src.ReadMapKeyPtr()
if err != nil {
return
}
nn, err = rwquoted(dst, field)
n += nn
if err != nil {
return
}
err = dst.WriteByte(':')
if err != nil {
return
}
n++
nn, err = rwNext(dst, src)
n += nn
if err != nil {
return
}
if !comma {
comma = true
}
}
err = dst.WriteByte('}')
if err != nil {
return
}
n++
return
}
func rwArray(dst jsWriter, src *Reader) (n int, err error) {
err = dst.WriteByte('[')
if err != nil {
return
}
var sz uint32
var nn int
sz, err = src.ReadArrayHeader()
if err != nil {
return
}
comma := false
for i := uint32(0); i < sz; i++ {
if comma {
err = dst.WriteByte(',')
if err != nil {
return
}
n++
}
nn, err = rwNext(dst, src)
n += nn
if err != nil {
return
}
comma = true
}
err = dst.WriteByte(']')
if err != nil {
return
}
n++
return
}
func rwNil(dst jsWriter, src *Reader) (int, error) {
err := src.ReadNil()
if err != nil {
return 0, err
}
return dst.Write(null)
}
func rwFloat32(dst jsWriter, src *Reader) (int, error) {
f, err := src.ReadFloat32()
if err != nil {
return 0, err
}
src.scratch = strconv.AppendFloat(src.scratch[:0], float64(f), 'f', -1, 64)
return dst.Write(src.scratch)
}
func rwFloat64(dst jsWriter, src *Reader) (int, error) {
f, err := src.ReadFloat64()
if err != nil {
return 0, err
}
src.scratch = strconv.AppendFloat(src.scratch[:0], f, 'f', -1, 32)
return dst.Write(src.scratch)
}
func rwInt(dst jsWriter, src *Reader) (int, error) {
i, err := src.ReadInt64()
if err != nil {
return 0, err
}
src.scratch = strconv.AppendInt(src.scratch[:0], i, 10)
return dst.Write(src.scratch)
}
func rwUint(dst jsWriter, src *Reader) (int, error) {
u, err := src.ReadUint64()
if err != nil {
return 0, err
}
src.scratch = strconv.AppendUint(src.scratch[:0], u, 10)
return dst.Write(src.scratch)
}
func rwBool(dst jsWriter, src *Reader) (int, error) {
b, err := src.ReadBool()
if err != nil {
return 0, err
}
if b {
return dst.WriteString("true")
}
return dst.WriteString("false")
}
func rwTime(dst jsWriter, src *Reader) (int, error) {
t, err := src.ReadTime()
if err != nil {
return 0, err
}
bts, err := t.MarshalJSON()
if err != nil {
return 0, err
}
return dst.Write(bts)
}
func rwDuration(dst jsWriter, src *Reader) (int, error) {
dur, err := src.ReadDuration()
if err != nil {
return 0, err
}
bts, err := durationMarshalJSON(dur)
if err != nil {
return 0, err
}
return dst.Write(bts)
}
func rwExtension(dst jsWriter, src *Reader) (n int, err error) {
et, err := src.peekExtensionType()
if err != nil {
return 0, err
}
// registered extensions can override
// the JSON encoding
if j, ok := extensionReg[et]; ok {
var bts []byte
e := j()
err = src.ReadExtension(e)
if err != nil {
return
}
bts, err = json.Marshal(e)
if err != nil {
return
}
return dst.Write(bts)
}
e := RawExtension{}
e.Type = et
err = src.ReadExtension(&e)
if err != nil {
return
}
var nn int
err = dst.WriteByte('{')
if err != nil {
return
}
n++
nn, err = dst.WriteString(`"type:"`)
n += nn
if err != nil {
return
}
src.scratch = strconv.AppendInt(src.scratch[0:0], int64(e.Type), 10)
nn, err = dst.Write(src.scratch)
n += nn
if err != nil {
return
}
nn, err = dst.WriteString(`,"data":"`)
n += nn
if err != nil {
return
}
enc := base64.NewEncoder(base64.StdEncoding, dst)
nn, err = enc.Write(e.Data)
n += nn
if err != nil {
return
}
err = enc.Close()
if err != nil {
return
}
nn, err = dst.WriteString(`"}`)
n += nn
return
}
func rwString(dst jsWriter, src *Reader) (n int, err error) {
var p []byte
p, err = src.R.Peek(1)
if err != nil {
return
}
lead := p[0]
var read int
if isfixstr(lead) {
read = int(rfixstr(lead))
src.R.Skip(1)
goto write
}
switch lead {
case mstr8:
p, err = src.R.Next(2)
if err != nil {
return
}
read = int(uint8(p[1]))
case mstr16:
p, err = src.R.Next(3)
if err != nil {
return
}
read = int(big.Uint16(p[1:]))
case mstr32:
p, err = src.R.Next(5)
if err != nil {
return
}
read = int(big.Uint32(p[1:]))
default:
err = badPrefix(StrType, lead)
return
}
write:
p, err = src.R.Next(read)
if err != nil {
return
}
n, err = rwquoted(dst, p)
return
}
func rwBytes(dst jsWriter, src *Reader) (n int, err error) {
var nn int
err = dst.WriteByte('"')
if err != nil {
return
}
n++
src.scratch, err = src.ReadBytes(src.scratch[:0])
if err != nil {
return
}
enc := base64.NewEncoder(base64.StdEncoding, dst)
nn, err = enc.Write(src.scratch)
n += nn
if err != nil {
return
}
err = enc.Close()
if err != nil {
return
}
err = dst.WriteByte('"')
if err != nil {
return
}
n++
return
}
// Below (c) The Go Authors, 2009-2014
// Subject to the BSD-style license found at http://golang.org
//
// see: encoding/json/encode.go:(*encodeState).stringbytes()
func rwquoted(dst jsWriter, s []byte) (n int, err error) {
var nn int
err = dst.WriteByte('"')
if err != nil {
return
}
n++
start := 0
for i := 0; i < len(s); {
if b := s[i]; b < utf8.RuneSelf {
if 0x20 <= b && b != '\\' && b != '"' && b != '<' && b != '>' && b != '&' {
i++
continue
}
if start < i {
nn, err = dst.Write(s[start:i])
n += nn
if err != nil {
return
}
}
switch b {
case '\\', '"':
err = dst.WriteByte('\\')
if err != nil {
return
}
n++
err = dst.WriteByte(b)
if err != nil {
return
}
n++
case '\n':
err = dst.WriteByte('\\')
if err != nil {
return
}
n++
err = dst.WriteByte('n')
if err != nil {
return
}
n++
case '\r':
err = dst.WriteByte('\\')
if err != nil {
return
}
n++
err = dst.WriteByte('r')
if err != nil {
return
}
n++
default:
nn, err = dst.WriteString(`\u00`)
n += nn
if err != nil {
return
}
err = dst.WriteByte(hex[b>>4])
if err != nil {
return
}
n++
err = dst.WriteByte(hex[b&0xF])
if err != nil {
return
}
n++
}
i++
start = i
continue
}
c, size := utf8.DecodeRune(s[i:])
if c == utf8.RuneError && size == 1 {
if start < i {
nn, err = dst.Write(s[start:i])
n += nn
if err != nil {
return
}
nn, err = dst.WriteString(`\ufffd`)
n += nn
if err != nil {
return
}
i += size
start = i
continue
}
}
if c == '\u2028' || c == '\u2029' {
if start < i {
nn, err = dst.Write(s[start:i])
n += nn
if err != nil {
return
}
nn, err = dst.WriteString(`\u202`)
n += nn
if err != nil {
return
}
err = dst.WriteByte(hex[c&0xF])
if err != nil {
return
}
n++
}
}
i += size
}
if start < len(s) {
nn, err = dst.Write(s[start:])
n += nn
if err != nil {
return
}
}
err = dst.WriteByte('"')
if err != nil {
return
}
n++
return
}

View File

@@ -0,0 +1,416 @@
package msgp
import (
"bufio"
"bytes"
"encoding/base64"
"encoding/json"
"io"
"strconv"
"time"
)
var unfuns [_maxtype]func(jsWriter, []byte, []byte) ([]byte, []byte, error)
func init() {
// NOTE(pmh): this is best expressed as a jump table,
// but gc doesn't do that yet. revisit post-go1.5.
unfuns = [_maxtype]func(jsWriter, []byte, []byte) ([]byte, []byte, error){
StrType: rwStringBytes,
BinType: rwBytesBytes,
MapType: rwMapBytes,
ArrayType: rwArrayBytes,
Float64Type: rwFloat64Bytes,
Float32Type: rwFloat32Bytes,
BoolType: rwBoolBytes,
IntType: rwIntBytes,
UintType: rwUintBytes,
NilType: rwNullBytes,
ExtensionType: rwExtensionBytes,
Complex64Type: rwExtensionBytes,
Complex128Type: rwExtensionBytes,
TimeType: rwTimeBytes,
}
}
// UnmarshalAsJSON takes raw messagepack and writes
// it as JSON to 'w'. If an error is returned, the
// bytes not translated will also be returned. If
// no errors are encountered, the length of the returned
// slice will be zero.
func UnmarshalAsJSON(w io.Writer, msg []byte) ([]byte, error) {
var (
scratch []byte
cast bool
dst jsWriter
err error
)
if jsw, ok := w.(jsWriter); ok {
dst = jsw
cast = true
} else {
dst = bufio.NewWriterSize(w, 512)
}
for len(msg) > 0 && err == nil {
msg, scratch, err = writeNext(dst, msg, scratch)
}
if !cast && err == nil {
err = dst.(*bufio.Writer).Flush()
}
return msg, err
}
func writeNext(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
if len(msg) < 1 {
return msg, scratch, ErrShortBytes
}
t := getType(msg[0])
if t == InvalidType {
return msg, scratch, InvalidPrefixError(msg[0])
}
if t == ExtensionType {
et, err := peekExtension(msg)
if err != nil {
return nil, scratch, err
}
if et == TimeExtension {
t = TimeType
} else if et == DurationExtension {
t = DurationType
}
}
return unfuns[t](w, msg, scratch)
}
func rwArrayBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
sz, msg, err := nbs.ReadArrayHeaderBytes(msg)
if err != nil {
return msg, scratch, err
}
err = w.WriteByte('[')
if err != nil {
return msg, scratch, err
}
for i := uint32(0); i < sz; i++ {
if i != 0 {
err = w.WriteByte(',')
if err != nil {
return msg, scratch, err
}
}
msg, scratch, err = writeNext(w, msg, scratch)
if err != nil {
return msg, scratch, err
}
}
err = w.WriteByte(']')
return msg, scratch, err
}
func rwMapBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
sz, msg, err := nbs.ReadMapHeaderBytes(msg)
if err != nil {
return msg, scratch, err
}
err = w.WriteByte('{')
if err != nil {
return msg, scratch, err
}
for i := uint32(0); i < sz; i++ {
if i != 0 {
err = w.WriteByte(',')
if err != nil {
return msg, scratch, err
}
}
msg, scratch, err = rwMapKeyBytes(w, msg, scratch)
if err != nil {
return msg, scratch, err
}
err = w.WriteByte(':')
if err != nil {
return msg, scratch, err
}
msg, scratch, err = writeNext(w, msg, scratch)
if err != nil {
return msg, scratch, err
}
}
err = w.WriteByte('}')
return msg, scratch, err
}
func rwMapKeyBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
msg, scratch, err := rwStringBytes(w, msg, scratch)
if err != nil {
if tperr, ok := err.(TypeError); ok && tperr.Encoded == BinType {
return rwBytesBytes(w, msg, scratch)
}
}
return msg, scratch, err
}
func rwStringBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
str, msg, err := nbs.ReadStringZC(msg)
if err != nil {
return msg, scratch, err
}
_, err = rwquoted(w, str)
return msg, scratch, err
}
func rwBytesBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
bts, msg, err := nbs.ReadBytesZC(msg)
if err != nil {
return msg, scratch, err
}
l := base64.StdEncoding.EncodedLen(len(bts))
if cap(scratch) >= l {
scratch = scratch[0:l]
} else {
scratch = make([]byte, l)
}
base64.StdEncoding.Encode(scratch, bts)
err = w.WriteByte('"')
if err != nil {
return msg, scratch, err
}
_, err = w.Write(scratch)
if err != nil {
return msg, scratch, err
}
err = w.WriteByte('"')
return msg, scratch, err
}
func rwNullBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
msg, err := nbs.ReadNilBytes(msg)
if err != nil {
return msg, scratch, err
}
_, err = w.Write(null)
return msg, scratch, err
}
func rwBoolBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
b, msg, err := nbs.ReadBoolBytes(msg)
if err != nil {
return msg, scratch, err
}
if b {
_, err = w.WriteString("true")
return msg, scratch, err
}
_, err = w.WriteString("false")
return msg, scratch, err
}
func rwIntBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
i, msg, err := nbs.ReadInt64Bytes(msg)
if err != nil {
return msg, scratch, err
}
scratch = strconv.AppendInt(scratch[0:0], i, 10)
_, err = w.Write(scratch)
return msg, scratch, err
}
func rwUintBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
u, msg, err := nbs.ReadUint64Bytes(msg)
if err != nil {
return msg, scratch, err
}
scratch = strconv.AppendUint(scratch[0:0], u, 10)
_, err = w.Write(scratch)
return msg, scratch, err
}
func rwFloatBytes(w jsWriter, msg []byte, f64 bool, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
var f float64
var err error
var sz int
if f64 {
sz = 64
f, msg, err = nbs.ReadFloat64Bytes(msg)
} else {
sz = 32
var v float32
v, msg, err = nbs.ReadFloat32Bytes(msg)
f = float64(v)
}
if err != nil {
return msg, scratch, err
}
scratch = strconv.AppendFloat(scratch, f, 'f', -1, sz)
_, err = w.Write(scratch)
return msg, scratch, err
}
func rwFloat32Bytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
var f float32
var err error
f, msg, err = nbs.ReadFloat32Bytes(msg)
if err != nil {
return msg, scratch, err
}
scratch = strconv.AppendFloat(scratch[:0], float64(f), 'f', -1, 32)
_, err = w.Write(scratch)
return msg, scratch, err
}
func rwFloat64Bytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
var f float64
var err error
f, msg, err = nbs.ReadFloat64Bytes(msg)
if err != nil {
return msg, scratch, err
}
scratch = strconv.AppendFloat(scratch[:0], f, 'f', -1, 64)
_, err = w.Write(scratch)
return msg, scratch, err
}
func rwTimeBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
var t time.Time
var err error
t, msg, err = nbs.ReadTimeBytes(msg)
if err != nil {
return msg, scratch, err
}
bts, err := t.MarshalJSON()
if err != nil {
return msg, scratch, err
}
_, err = w.Write(bts)
return msg, scratch, err
}
func rwExtensionBytes(w jsWriter, msg []byte, scratch []byte) ([]byte, []byte, error) {
var nbs *NilBitsStack
var err error
var et int8
et, err = peekExtension(msg)
if err != nil {
return msg, scratch, err
}
// if it's time.Time
if et == TimeExtension {
var tm time.Time
tm, msg, err = nbs.ReadTimeBytes(msg)
if err != nil {
return msg, scratch, err
}
bts, err := tm.MarshalJSON()
if err != nil {
return msg, scratch, err
}
_, err = w.Write(bts)
return msg, scratch, err
} else if et == DurationExtension {
var dur time.Duration
dur, msg, err = nbs.ReadDurationBytes(msg)
if err != nil {
return msg, scratch, err
}
_, err := w.WriteString(`{"time.Duration":`)
if err != nil {
return msg, scratch, err
}
scratch = strconv.AppendInt(scratch[0:0], int64(dur), 10)
_, err = w.Write(scratch)
if err != nil {
return msg, scratch, err
}
_, err = w.WriteString(`}`)
return msg, scratch, err
}
// if the extension is registered,
// use its canonical JSON form
if f, ok := extensionReg[et]; ok {
e := f()
msg, err = nbs.ReadExtensionBytes(msg, e)
if err != nil {
return msg, scratch, err
}
bts, err := json.Marshal(e)
if err != nil {
return msg, scratch, err
}
_, err = w.Write(bts)
return msg, scratch, err
}
// otherwise, write `{"type": <num>, "data": "<base64data>"}`
r := RawExtension{}
r.Type = et
msg, err = nbs.ReadExtensionBytes(msg, &r)
if err != nil {
return msg, scratch, err
}
scratch, err = writeExt(w, r, scratch)
return msg, scratch, err
}
func durationMarshalJSON(dur time.Duration) ([]byte, error) {
var w bytes.Buffer
_, err := w.WriteString(`{"time.Duration":`)
if err != nil {
return nil, err
}
scratch := make([]byte, 32)
scratch = strconv.AppendInt(scratch[0:0], int64(dur), 10)
_, err = w.Write(scratch)
if err != nil {
return nil, err
}
_, err = w.WriteString(`}`)
return w.Bytes(), err
}
func writeExt(w jsWriter, r RawExtension, scratch []byte) ([]byte, error) {
_, err := w.WriteString(`{"type":`)
if err != nil {
return scratch, err
}
scratch = strconv.AppendInt(scratch[0:0], int64(r.Type), 10)
_, err = w.Write(scratch)
if err != nil {
return scratch, err
}
_, err = w.WriteString(`,"data":"`)
if err != nil {
return scratch, err
}
l := base64.StdEncoding.EncodedLen(len(r.Data))
if cap(scratch) >= l {
scratch = scratch[0:l]
} else {
scratch = make([]byte, l)
}
base64.StdEncoding.Encode(scratch, r.Data)
_, err = w.Write(scratch)
if err != nil {
return scratch, err
}
_, err = w.WriteString(`"}`)
return scratch, err
}

126
vendor/github.com/glycerine/greenpack/msgp/nilbits.go generated vendored Normal file
View File

@@ -0,0 +1,126 @@
package msgp
import (
"fmt"
//"runtime/debug"
)
const MaxNestedStructPointerDepth = 8
// NilBitsStack is a helper for Unmarshal
// methods to track where we are when
// deserializing from empty/nil/missing
// fields.
type NilBitsStack struct {
// simulate getting nils on the wire
AlwaysNil bool
LifoAlwaysNil int // this lifo always had false in it, so change it to a count.
LifoBts [MaxNestedStructPointerDepth][]byte // caps our static tree depth at 8, but avoids all memory allocation during the decode hotpath.
// UnsafeZeroCopy will make strings point to the
// original msgpack buffers. This is unsafe because
// if the buffer changes the string will change/be
// invalid/not protected against re-use. But for
// processing and disposing of single messages, one-at-at-time,
// without re-using any part of a message (or making a copy of strings explicitly with copy()
// if you must) then we can avoid all allocations for strings.
UnsafeZeroCopy bool
}
func (r *NilBitsStack) Init(cfg *RuntimeConfig) {
if cfg != nil {
r.UnsafeZeroCopy = cfg.UnsafeZeroCopy
}
}
func (r *NilBitsStack) IsNil(bts []byte) bool {
if r.AlwaysNil {
return true
}
if len(bts) != 0 && bts[0] == mnil {
return true
}
return false
}
// OnlyNilSlice is a slice that contains
// only the msgpack nil (0xc0) bytes.
var OnlyNilSlice = []byte{mnil}
// AlwaysNilString returns a string representation
// of the internal state of the NilBitsStack for
// debugging purposes.
func (r *NilBitsStack) AlwaysNilString() string {
s := "bottom: "
for i := 0; i > r.LifoAlwaysNil; i++ {
s += "T"
}
/* for _, v := range r.LifoAlwaysNil {
if v {
s += "T"
} else {
s += "f"
}
}
*/
return s
}
// PushAlwaysNil will set r.AlwaysNil to true
// and store bts on the internal stack.
func (r *NilBitsStack) PushAlwaysNil(bts []byte) []byte {
//fmt.Printf("PushAlwaysNil(), pre we are '%v'. Called from: stack is '%v'\n", r.AlwaysNilString(), string(debug.Stack()))
// save current state
if r.LifoAlwaysNil == MaxNestedStructPointerDepth {
panic(fmt.Sprintf("we hit our maximum nested struct-inside-struct depth! you must recompile msgp with github.com/glycerine/greenpack/msgp/nilbits.go: MaxNestedStructPointerDepth set to greater than the current value of %v, and then regenerate your msgp Unmarshaling code.", MaxNestedStructPointerDepth))
}
r.LifoBts[r.LifoAlwaysNil] = bts
r.LifoAlwaysNil++
// set reader r to always return nils
r.AlwaysNil = true
return OnlyNilSlice
}
// PopAlwaysNil pops the last []byte off the internal
// stack and returns it. If the stack is empty
// we panic.
func (r *NilBitsStack) PopAlwaysNil() (bts []byte) {
//fmt.Printf("my NilBitsTack.PopAlwaysNil() called!! ... stack is '%v'\n", string(debug.Stack()))
// defer func() {
// fmt.Printf("len of bts returned by PopAlwaysNil: %v, and debug string: '%v'\n",
// len(bts), r.AlwaysNilString())
// }()
n := r.LifoAlwaysNil
if n == 0 {
panic("PopAlwaysNil called on empty lifo")
}
if n < 0 {
panic("PopAlwaysNil called on illegal-less-thab-empty lifo")
}
bts = r.LifoBts[n-1]
r.LifoAlwaysNil--
if r.LifoAlwaysNil == 0 {
r.AlwaysNil = false
}
return bts
}
func ShowFound(found []bool) string {
s := "["
for i := range found {
if found[i] {
s += "1,"
} else {
s += "0,"
}
}
s += "]"
return s
}

268
vendor/github.com/glycerine/greenpack/msgp/number.go generated vendored Normal file
View File

@@ -0,0 +1,268 @@
package msgp
import (
"math"
"strconv"
)
// The portable parts of the Number implementation
// Number can be
// an int64, uint64, float32,
// or float64 internally.
// It can decode itself
// from any of the native
// messagepack number types.
// The zero-value of Number
// is Int(0). Using the equality
// operator with Number compares
// both the type and the value
// of the number.
type Number struct {
// internally, this
// is just a tagged union.
// the raw bits of the number
// are stored the same way regardless.
bits uint64
typ Type
}
// AsInt sets the number to an int64.
func (n *Number) AsInt(i int64) {
// we always store int(0)
// as {0, InvalidType} in
// order to preserve
// the behavior of the == operator
if i == 0 {
n.typ = InvalidType
n.bits = 0
return
}
n.typ = IntType
n.bits = uint64(i)
}
// AsUint sets the number to a uint64.
func (n *Number) AsUint(u uint64) {
n.typ = UintType
n.bits = u
}
// AsFloat32 sets the value of the number
// to a float32.
func (n *Number) AsFloat32(f float32) {
n.typ = Float32Type
n.bits = uint64(math.Float32bits(f))
}
// AsFloat64 sets the value of the
// number to a float64.
func (n *Number) AsFloat64(f float64) {
n.typ = Float64Type
n.bits = math.Float64bits(f)
}
// Int casts the number as an int64, and
// returns whether or not that was the
// underlying type.
func (n *Number) Int() (int64, bool) {
return int64(n.bits), n.typ == IntType || n.typ == InvalidType
}
// Uint casts the number as a uint64, and returns
// whether or not that was the underlying type.
func (n *Number) Uint() (uint64, bool) {
return n.bits, n.typ == UintType
}
// Float casts the number to a float64, and
// returns whether or not that was the underlying
// type (either a float64 or a float32).
func (n *Number) Float() (float64, bool) {
switch n.typ {
case Float32Type:
return float64(math.Float32frombits(uint32(n.bits))), true
case Float64Type:
return math.Float64frombits(n.bits), true
default:
return 0.0, false
}
}
// Type will return one of:
// Float64Type, Float32Type, UintType, or IntType.
func (n *Number) Type() Type {
if n.typ == InvalidType {
return IntType
}
return n.typ
}
// DecodeMsg implements msgp.Decodable
func (n *Number) DecodeMsg(r *Reader) error {
typ, err := r.NextType()
if err != nil {
return err
}
switch typ {
case Float32Type:
f, err := r.ReadFloat32()
if err != nil {
return err
}
n.AsFloat32(f)
return nil
case Float64Type:
f, err := r.ReadFloat64()
if err != nil {
return err
}
n.AsFloat64(f)
return nil
case IntType:
i, err := r.ReadInt64()
if err != nil {
return err
}
n.AsInt(i)
return nil
case UintType:
u, err := r.ReadUint64()
if err != nil {
return err
}
n.AsUint(u)
return nil
default:
return TypeError{Encoded: typ, Method: IntType}
}
}
// UnmarshalMsg implements msgp.Unmarshaler
func (n *Number) UnmarshalMsg(b []byte) ([]byte, error) {
var nbs *NilBitsStack
typ := NextType(b)
switch typ {
case IntType:
i, o, err := nbs.ReadInt64Bytes(b)
if err != nil {
return b, err
}
n.AsInt(i)
return o, nil
case UintType:
u, o, err := nbs.ReadUint64Bytes(b)
if err != nil {
return b, err
}
n.AsUint(u)
return o, nil
case Float64Type:
f, o, err := nbs.ReadFloat64Bytes(b)
if err != nil {
return b, err
}
n.AsFloat64(f)
return o, nil
case Float32Type:
f, o, err := nbs.ReadFloat32Bytes(b)
if err != nil {
return b, err
}
n.AsFloat32(f)
return o, nil
default:
return b, TypeError{Method: IntType, Encoded: typ}
}
}
// MarshalMsg implements msgp.Marshaler
func (n *Number) MarshalMsg(b []byte) ([]byte, error) {
switch n.typ {
case IntType:
return AppendInt64(b, int64(n.bits)), nil
case UintType:
return AppendUint64(b, uint64(n.bits)), nil
case Float64Type:
return AppendFloat64(b, math.Float64frombits(n.bits)), nil
case Float32Type:
return AppendFloat32(b, math.Float32frombits(uint32(n.bits))), nil
default:
return AppendInt64(b, 0), nil
}
}
// EncodeMsg implements msgp.Encodable
func (n *Number) EncodeMsg(w *Writer) error {
switch n.typ {
case IntType:
return w.WriteInt64(int64(n.bits))
case UintType:
return w.WriteUint64(n.bits)
case Float64Type:
return w.WriteFloat64(math.Float64frombits(n.bits))
case Float32Type:
return w.WriteFloat32(math.Float32frombits(uint32(n.bits)))
default:
return w.WriteInt64(0)
}
}
// Msgsize implements msgp.Sizer
func (n *Number) Msgsize() int {
switch n.typ {
case Float32Type:
return Float32Size
case Float64Type:
return Float64Size
case IntType:
return Int64Size
case UintType:
return Uint64Size
default:
return 1 // fixint(0)
}
}
// MarshalJSON implements json.Marshaler
func (n *Number) MarshalJSON() ([]byte, error) {
t := n.Type()
if t == InvalidType {
return []byte{'0'}, nil
}
out := make([]byte, 0, 32)
switch t {
case Float32Type, Float64Type:
f, _ := n.Float()
return strconv.AppendFloat(out, f, 'f', -1, 64), nil
case IntType:
i, _ := n.Int()
return strconv.AppendInt(out, i, 10), nil
case UintType:
u, _ := n.Uint()
return strconv.AppendUint(out, u, 10), nil
default:
panic("(*Number).typ is invalid")
}
}
// String implements fmt.Stringer
func (n *Number) String() string {
switch n.typ {
case InvalidType:
return "0"
case Float32Type, Float64Type:
f, _ := n.Float()
return strconv.FormatFloat(f, 'f', -1, 64)
case IntType:
i, _ := n.Int()
return strconv.FormatInt(i, 10)
case UintType:
u, _ := n.Uint()
return strconv.FormatUint(u, 10)
default:
panic("(*Number).typ is invalid")
}
}

1597
vendor/github.com/glycerine/greenpack/msgp/read.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

1483
vendor/github.com/glycerine/greenpack/msgp/read_bytes.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
package msgp
type RuntimeConfig struct {
// UnsafeZeroCopy will make strings point to the
// original msgpack buffers. This is unsafe because
// if the buffer changes the string will change/be
// invalid/not protected against re-use. But for
// processing and disposing of single messages, one-at-at-time,
// without re-using any part of a message (or making a copy of strings explicitly with copy()
// if you must) then we can avoid all allocations for strings.
UnsafeZeroCopy bool
}

39
vendor/github.com/glycerine/greenpack/msgp/size.go generated vendored Normal file
View File

@@ -0,0 +1,39 @@
package msgp
// The sizes provided
// are the worst-case
// encoded sizes for
// each type. For variable-
// length types ([]byte, string),
// the total encoded size is
// the prefix size plus the
// length of the object.
const (
Int64Size = 9
IntSize = Int64Size
UintSize = Int64Size
Int8Size = 2
Int16Size = 3
Int32Size = 5
Uint8Size = 2
ByteSize = Uint8Size
Uint16Size = 3
Uint32Size = 5
Uint64Size = Int64Size
Float64Size = 9
Float32Size = 5
Complex64Size = 10
Complex128Size = 18
TimeSize = 15
DurationSize = 12
BoolSize = 1
NilSize = 1
MapHeaderSize = 5
ArrayHeaderSize = 5
BytesPrefixSize = 5
StringPrefixSize = 5
ExtensionPrefixSize = 6
)

40
vendor/github.com/glycerine/greenpack/msgp/unsafe.go generated vendored Normal file
View File

@@ -0,0 +1,40 @@
// +build !appengine
package msgp
import (
"reflect"
"unsafe"
)
// NOTE:
// all of the definition in this file
// should be repeated in appengine.go,
// but without using unsafe
const (
// spec says int and uint are always
// the same size, but that int/uint
// size may not be machine word size
smallint = unsafe.Sizeof(int(0)) == 4
)
// UnsafeString returns the byte slice as a volatile string
// THIS SHOULD ONLY BE USED BY THE CODE GENERATOR.
// THIS IS EVIL CODE.
// YOU HAVE BEEN WARNED.
func UnsafeString(b []byte) string {
return *(*string)(unsafe.Pointer(&reflect.StringHeader{Data: uintptr(unsafe.Pointer(&b[0])), Len: len(b)}))
}
// UnsafeBytes returns the string as a byte slice
// THIS SHOULD ONLY BE USED BY THE CODE GENERATOR.
// THIS IS EVIL CODE.
// YOU HAVE BEEN WARNED.
func UnsafeBytes(s string) []byte {
return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{
Len: len(s),
Cap: len(s),
Data: (*(*reflect.StringHeader)(unsafe.Pointer(&s))).Data,
}))
}

869
vendor/github.com/glycerine/greenpack/msgp/write.go generated vendored Normal file
View File

@@ -0,0 +1,869 @@
package msgp
import (
"errors"
"fmt"
"io"
"math"
"reflect"
"sync"
"time"
)
// Sizer is an interface implemented
// by types that can estimate their
// size when MessagePack encoded.
// This interface is optional, but
// encoding/marshaling implementations
// may use this as a way to pre-allocate
// memory for serialization.
type Sizer interface {
Msgsize() int
}
var (
// Nowhere is an io.Writer to nowhere
Nowhere io.Writer = nwhere{}
btsType = reflect.TypeOf(([]byte)(nil))
writerPool = sync.Pool{
New: func() interface{} {
return &Writer{
buf: make([]byte, 2048),
ptrWrit: make(map[interface{}]int),
}
},
}
)
func popWriter(w io.Writer) *Writer {
wr := writerPool.Get().(*Writer)
wr.Reset(w)
wr.DedupReset()
return wr
}
func pushWriter(wr *Writer) {
wr.w = nil
wr.wloc = 0
writerPool.Put(wr)
}
// freeW frees a writer for use
// by other processes. It is not necessary
// to call freeW on a writer. However, maintaining
// a reference to a *Writer after calling freeW on
// it will cause undefined behavior.
func freeW(w *Writer) { pushWriter(w) }
// Require ensures that cap(old)-len(old) >= extra.
func Require(old []byte, extra int) []byte {
l := len(old)
c := cap(old)
r := l + extra
if c >= r {
return old
} else if l == 0 {
return make([]byte, 0, extra)
}
// the new size is the greater
// of double the old capacity
// and the sum of the old length
// and the number of new bytes
// necessary.
c <<= 1
if c < r {
c = r
}
n := make([]byte, l, c)
copy(n, old)
return n
}
// nowhere writer
type nwhere struct{}
func (n nwhere) Write(p []byte) (int, error) { return len(p), nil }
// Marshaler is the interface implemented
// by types that know how to marshal themselves
// as MessagePack. MarshalMsg appends the marshalled
// form of the object to the provided
// byte slice, returning the extended
// slice and any errors encountered.
type Marshaler interface {
MarshalMsg([]byte) ([]byte, error)
}
// Encodable is the interface implemented
// by types that know how to write themselves
// as MessagePack using a *msgp.Writer.
type Encodable interface {
EncodeMsg(*Writer) error
}
// Writer is a buffered writer
// that can be used to write
// MessagePack objects to an io.Writer.
// You must call *Writer.Flush() in order
// to flush all of the buffered data
// to the underlying writer.
type Writer struct {
w io.Writer
buf []byte
wloc int
ptrWrit map[interface{}]int
ptrCountNext int
}
// NewWriter returns a new *Writer.
func NewWriter(w io.Writer) *Writer {
if wr, ok := w.(*Writer); ok {
return wr
}
return popWriter(w)
}
// NewWriterSize returns a writer with a custom buffer size.
func NewWriterSize(w io.Writer, sz int) *Writer {
// we must be able to require() 18
// contiguous bytes, so that is the
// practical minimum buffer size
if sz < 18 {
sz = 18
}
return &Writer{
w: w,
buf: make([]byte, sz),
}
}
// Encode encodes an Encodable to an io.Writer.
func Encode(w io.Writer, e Encodable) error {
wr := NewWriter(w)
err := e.EncodeMsg(wr)
if err == nil {
err = wr.Flush()
}
freeW(wr)
return err
}
func (mw *Writer) flush() error {
if mw.wloc == 0 {
return nil
}
n, err := mw.w.Write(mw.buf[:mw.wloc])
if err != nil {
if n > 0 {
mw.wloc = copy(mw.buf, mw.buf[n:mw.wloc])
}
return err
}
mw.wloc = 0
return nil
}
// Flush flushes all of the buffered
// data to the underlying writer.
func (mw *Writer) Flush() error { return mw.flush() }
// Buffered returns the number bytes in the write buffer
func (mw *Writer) Buffered() int { return len(mw.buf) - mw.wloc }
func (mw *Writer) avail() int { return len(mw.buf) - mw.wloc }
func (mw *Writer) bufsize() int { return len(mw.buf) }
// NOTE: this should only be called with
// a number that is guaranteed to be less than
// len(mw.buf). typically, it is called with a constant.
//
// NOTE: this is a hot code path
func (mw *Writer) require(n int) (int, error) {
c := len(mw.buf)
wl := mw.wloc
if c-wl < n {
if err := mw.flush(); err != nil {
return 0, err
}
wl = mw.wloc
}
mw.wloc += n
return wl, nil
}
func (mw *Writer) Append(b ...byte) error {
if mw.avail() < len(b) {
err := mw.flush()
if err != nil {
return err
}
}
mw.wloc += copy(mw.buf[mw.wloc:], b)
return nil
}
// push one byte onto the buffer
//
// NOTE: this is a hot code path
func (mw *Writer) push(b byte) error {
if mw.wloc == len(mw.buf) {
if err := mw.flush(); err != nil {
return err
}
}
mw.buf[mw.wloc] = b
mw.wloc++
return nil
}
func (mw *Writer) prefix8(b byte, u uint8) error {
const need = 2
if len(mw.buf)-mw.wloc < need {
if err := mw.flush(); err != nil {
return err
}
}
prefixu8(mw.buf[mw.wloc:], b, u)
mw.wloc += need
return nil
}
func (mw *Writer) prefix16(b byte, u uint16) error {
const need = 3
if len(mw.buf)-mw.wloc < need {
if err := mw.flush(); err != nil {
return err
}
}
prefixu16(mw.buf[mw.wloc:], b, u)
mw.wloc += need
return nil
}
func (mw *Writer) prefix32(b byte, u uint32) error {
const need = 5
if len(mw.buf)-mw.wloc < need {
if err := mw.flush(); err != nil {
return err
}
}
prefixu32(mw.buf[mw.wloc:], b, u)
mw.wloc += need
return nil
}
func (mw *Writer) prefix64(b byte, u uint64) error {
const need = 9
if len(mw.buf)-mw.wloc < need {
if err := mw.flush(); err != nil {
return err
}
}
prefixu64(mw.buf[mw.wloc:], b, u)
mw.wloc += need
return nil
}
// Write implements io.Writer, and writes
// data directly to the buffer.
func (mw *Writer) Write(p []byte) (int, error) {
l := len(p)
if mw.avail() < l {
if err := mw.flush(); err != nil {
return 0, err
}
if l > len(mw.buf) {
return mw.w.Write(p)
}
}
mw.wloc += copy(mw.buf[mw.wloc:], p)
return l, nil
}
// implements io.WriteString
func (mw *Writer) writeString(s string) error {
l := len(s)
if mw.avail() < l {
if err := mw.flush(); err != nil {
return err
}
if l > len(mw.buf) {
_, err := io.WriteString(mw.w, s)
return err
}
}
mw.wloc += copy(mw.buf[mw.wloc:], s)
return nil
}
// Reset changes the underlying writer used by the Writer
func (mw *Writer) Reset(w io.Writer) {
mw.buf = mw.buf[:cap(mw.buf)]
mw.w = w
mw.wloc = 0
}
// WriteMapHeader writes a map header of the given
// size to the writer
func (mw *Writer) WriteMapHeader(sz uint32) error {
switch {
case sz <= 15:
return mw.push(wfixmap(uint8(sz)))
case sz <= math.MaxUint16:
return mw.prefix16(mmap16, uint16(sz))
default:
return mw.prefix32(mmap32, sz)
}
}
// WriteArrayHeader writes an array header of the
// given size to the writer
func (mw *Writer) WriteArrayHeader(sz uint32) error {
switch {
case sz <= 15:
return mw.push(wfixarray(uint8(sz)))
case sz <= math.MaxUint16:
return mw.prefix16(marray16, uint16(sz))
default:
return mw.prefix32(marray32, sz)
}
}
// WriteNil writes a nil byte to the buffer
func (mw *Writer) WriteNil() error {
return mw.push(mnil)
}
// WriteFloat64 writes a float64 to the writer
func (mw *Writer) WriteFloat64(f float64) error {
return mw.prefix64(mfloat64, math.Float64bits(f))
}
// WriteFloat32 writes a float32 to the writer
func (mw *Writer) WriteFloat32(f float32) error {
return mw.prefix32(mfloat32, math.Float32bits(f))
}
// WriteInt64 writes an int64 to the writer
func (mw *Writer) WriteInt64(i int64) error {
if i >= 0 {
switch {
case i <= math.MaxInt8:
return mw.push(wfixint(uint8(i)))
case i <= math.MaxInt16:
return mw.prefix16(mint16, uint16(i))
case i <= math.MaxInt32:
return mw.prefix32(mint32, uint32(i))
default:
return mw.prefix64(mint64, uint64(i))
}
}
switch {
case i >= -32:
return mw.push(wnfixint(int8(i)))
case i >= math.MinInt8:
return mw.prefix8(mint8, uint8(i))
case i >= math.MinInt16:
return mw.prefix16(mint16, uint16(i))
case i >= math.MinInt32:
return mw.prefix32(mint32, uint32(i))
default:
return mw.prefix64(mint64, uint64(i))
}
}
// WriteInt8 writes an int8 to the writer
func (mw *Writer) WriteInt8(i int8) error { return mw.WriteInt64(int64(i)) }
// WriteInt16 writes an int16 to the writer
func (mw *Writer) WriteInt16(i int16) error { return mw.WriteInt64(int64(i)) }
// WriteInt32 writes an int32 to the writer
func (mw *Writer) WriteInt32(i int32) error { return mw.WriteInt64(int64(i)) }
// WriteInt writes an int to the writer
func (mw *Writer) WriteInt(i int) error { return mw.WriteInt64(int64(i)) }
// WriteUint64 writes a uint64 to the writer
func (mw *Writer) WriteUint64(u uint64) error {
switch {
case u <= (1<<7)-1:
return mw.push(wfixint(uint8(u)))
case u <= math.MaxUint8:
return mw.prefix8(muint8, uint8(u))
case u <= math.MaxUint16:
return mw.prefix16(muint16, uint16(u))
case u <= math.MaxUint32:
return mw.prefix32(muint32, uint32(u))
default:
return mw.prefix64(muint64, u)
}
}
// WriteByte is analogous to WriteUint8
func (mw *Writer) WriteByte(u byte) error { return mw.WriteUint8(uint8(u)) }
// WriteUint8 writes a uint8 to the writer
func (mw *Writer) WriteUint8(u uint8) error { return mw.WriteUint64(uint64(u)) }
// WriteUint16 writes a uint16 to the writer
func (mw *Writer) WriteUint16(u uint16) error { return mw.WriteUint64(uint64(u)) }
// WriteUint32 writes a uint32 to the writer
func (mw *Writer) WriteUint32(u uint32) error { return mw.WriteUint64(uint64(u)) }
// WriteUint writes a uint to the writer
func (mw *Writer) WriteUint(u uint) error { return mw.WriteUint64(uint64(u)) }
// WriteBytes writes binary as 'bin' to the writer
func (mw *Writer) WriteBytes(b []byte) error {
sz := uint32(len(b))
var err error
switch {
case sz <= math.MaxUint8:
err = mw.prefix8(mbin8, uint8(sz))
case sz <= math.MaxUint16:
err = mw.prefix16(mbin16, uint16(sz))
default:
err = mw.prefix32(mbin32, sz)
}
if err != nil {
return err
}
_, err = mw.Write(b)
return err
}
// WriteBytesHeader writes just the size header
// of a MessagePack 'bin' object. The user is responsible
// for then writing 'sz' more bytes into the stream.
func (mw *Writer) WriteBytesHeader(sz uint32) error {
switch {
case sz <= math.MaxUint8:
return mw.prefix8(mbin8, uint8(sz))
case sz <= math.MaxUint16:
return mw.prefix16(mbin16, uint16(sz))
default:
return mw.prefix32(mbin32, sz)
}
}
// WriteBool writes a bool to the writer
func (mw *Writer) WriteBool(b bool) error {
if b {
return mw.push(mtrue)
}
return mw.push(mfalse)
}
// WriteString writes a messagepack string to the writer.
// (This is NOT an implementation of io.StringWriter)
func (mw *Writer) WriteString(s string) error {
sz := uint32(len(s))
var err error
switch {
case sz <= 31:
err = mw.push(wfixstr(uint8(sz)))
case sz <= math.MaxUint8:
err = mw.prefix8(mstr8, uint8(sz))
case sz <= math.MaxUint16:
err = mw.prefix16(mstr16, uint16(sz))
default:
err = mw.prefix32(mstr32, sz)
}
if err != nil {
return err
}
return mw.writeString(s)
}
// WriteStringHeader writes just the string size
// header of a MessagePack 'str' object. The user
// is responsible for writing 'sz' more valid UTF-8
// bytes to the stream.
func (mw *Writer) WriteStringHeader(sz uint32) error {
switch {
case sz <= 31:
return mw.push(wfixstr(uint8(sz)))
case sz <= math.MaxUint8:
return mw.prefix8(mstr8, uint8(sz))
case sz <= math.MaxUint16:
return mw.prefix16(mstr16, uint16(sz))
default:
return mw.prefix32(mstr32, sz)
}
}
// WriteStringFromBytes writes a 'str' object
// from a []byte.
func (mw *Writer) WriteStringFromBytes(str []byte) error {
sz := uint32(len(str))
var err error
switch {
case sz <= 31:
err = mw.push(wfixstr(uint8(sz)))
case sz <= math.MaxUint8:
err = mw.prefix8(mstr8, uint8(sz))
case sz <= math.MaxUint16:
err = mw.prefix16(mstr16, uint16(sz))
default:
err = mw.prefix32(mstr32, sz)
}
if err != nil {
return err
}
_, err = mw.Write(str)
return err
}
// WriteComplex64 writes a complex64 to the writer
func (mw *Writer) WriteComplex64(f complex64) error {
o, err := mw.require(10)
if err != nil {
return err
}
mw.buf[o] = mfixext8
mw.buf[o+1] = Complex64Extension
big.PutUint32(mw.buf[o+2:], math.Float32bits(real(f)))
big.PutUint32(mw.buf[o+6:], math.Float32bits(imag(f)))
return nil
}
// WriteComplex128 writes a complex128 to the writer
func (mw *Writer) WriteComplex128(f complex128) error {
o, err := mw.require(18)
if err != nil {
return err
}
mw.buf[o] = mfixext16
mw.buf[o+1] = Complex128Extension
big.PutUint64(mw.buf[o+2:], math.Float64bits(real(f)))
big.PutUint64(mw.buf[o+10:], math.Float64bits(imag(f)))
return nil
}
// WriteMapStrStr writes a map[string]string to the writer
func (mw *Writer) WriteMapStrStr(mp map[string]string) (err error) {
err = mw.WriteMapHeader(uint32(len(mp)))
if err != nil {
return
}
for key, val := range mp {
err = mw.WriteString(key)
if err != nil {
return
}
err = mw.WriteString(val)
if err != nil {
return
}
}
return nil
}
// WriteMapStrIntf writes a map[string]interface to the writer
func (mw *Writer) WriteMapStrIntf(mp map[string]interface{}) (err error) {
err = mw.WriteMapHeader(uint32(len(mp)))
if err != nil {
return
}
for key, val := range mp {
err = mw.WriteString(key)
if err != nil {
return
}
err = mw.WriteIntf(val)
if err != nil {
return
}
}
return
}
// WriteTime writes a time.Time object to the wire.
//
// Time is encoded as Unix time, which means that
// location (time zone) data is removed from the object.
// The encoded object itself is 12 bytes: 8 bytes for
// a big-endian 64-bit integer denoting seconds
// elapsed since "zero" Unix time, followed by 4 bytes
// for a big-endian 32-bit signed integer denoting
// the nanosecond offset of the time. This encoding
// is intended to ease portability across languages.
// (Note that this is *not* the standard time.Time
// binary encoding, because its implementation relies
// heavily on the internal representation used by the
// time package.)
func (mw *Writer) WriteTime(t time.Time) error {
t = t.UTC().Truncate(0) // strip out monotone clock
o, err := mw.require(15)
if err != nil {
return err
}
mw.buf[o] = mext8
mw.buf[o+1] = 12
mw.buf[o+2] = TimeExtension
putUnix(mw.buf[o+3:], t.Unix(), int32(t.Nanosecond()))
return nil
}
// WriteDuration writes a time.Duration object to the wire.
//
// Duration is encoded as int64.
func (mw *Writer) WriteDuration(dur time.Duration) error {
o, err := mw.require(12)
if err != nil {
return err
}
mw.buf[o] = mext8
mw.buf[o+1] = 9
mw.buf[o+2] = DurationExtension
putMint64(mw.buf[o+3:], int64(dur))
return nil
}
// WriteIntf writes the concrete type of 'v'.
// WriteIntf will error if 'v' is not one of the following:
// - A bool, float, string, []byte, int, uint, or complex
// - A map of supported types (with string keys)
// - An array or slice of supported types
// - A pointer to a supported type
// - A type that satisfies the msgp.Encodable interface
// - A type that satisfies the msgp.Extension interface
func (mw *Writer) WriteIntf(v interface{}) error {
if v == nil {
return mw.WriteNil()
}
switch v := v.(type) {
// preferred interfaces
case Encodable:
return v.EncodeMsg(mw)
case Extension:
return mw.WriteExtension(v)
// concrete types
case bool:
return mw.WriteBool(v)
case float32:
return mw.WriteFloat32(v)
case float64:
return mw.WriteFloat64(v)
case complex64:
return mw.WriteComplex64(v)
case complex128:
return mw.WriteComplex128(v)
case uint8:
return mw.WriteUint8(v)
case uint16:
return mw.WriteUint16(v)
case uint32:
return mw.WriteUint32(v)
case uint64:
return mw.WriteUint64(v)
case uint:
return mw.WriteUint(v)
case int8:
return mw.WriteInt8(v)
case int16:
return mw.WriteInt16(v)
case int32:
return mw.WriteInt32(v)
case int64:
return mw.WriteInt64(v)
case int:
return mw.WriteInt(v)
case string:
return mw.WriteString(v)
case []byte:
return mw.WriteBytes(v)
case map[string]string:
return mw.WriteMapStrStr(v)
case map[string]interface{}:
return mw.WriteMapStrIntf(v)
case time.Time:
return mw.WriteTime(v)
case time.Duration:
return mw.WriteDuration(v)
}
val := reflect.ValueOf(v)
if !isSupported(val.Kind()) || !val.IsValid() {
return fmt.Errorf("msgp: type %s not supported(1)", val)
}
switch val.Kind() {
case reflect.Ptr:
if val.IsNil() {
return mw.WriteNil()
}
return mw.WriteIntf(val.Elem().Interface())
case reflect.Slice:
return mw.writeSlice(val)
case reflect.Map:
return mw.writeMap(val)
}
return &ErrUnsupportedType{val.Type()}
}
func (mw *Writer) writeMap(v reflect.Value) (err error) {
if v.Elem().Kind() != reflect.String {
return errors.New("msgp: map keys must be strings")
}
ks := v.MapKeys()
err = mw.WriteMapHeader(uint32(len(ks)))
if err != nil {
return
}
for _, key := range ks {
val := v.MapIndex(key)
err = mw.WriteString(key.String())
if err != nil {
return
}
err = mw.WriteIntf(val.Interface())
if err != nil {
return
}
}
return
}
func (mw *Writer) writeSlice(v reflect.Value) (err error) {
// is []byte
if v.Type().ConvertibleTo(btsType) {
return mw.WriteBytes(v.Bytes())
}
sz := uint32(v.Len())
err = mw.WriteArrayHeader(sz)
if err != nil {
return
}
for i := uint32(0); i < sz; i++ {
err = mw.WriteIntf(v.Index(int(i)).Interface())
if err != nil {
return
}
}
return
}
func (mw *Writer) writeStruct(v reflect.Value) error {
if enc, ok := v.Interface().(Encodable); ok {
return enc.EncodeMsg(mw)
}
return fmt.Errorf("msgp: unsupported type: %s", v.Type())
}
func (mw *Writer) writeVal(v reflect.Value) error {
if !isSupported(v.Kind()) {
return fmt.Errorf("msgp: msgp/enc: type %q not supported(2)", v.Type())
}
// shortcut for nil values
if v.IsNil() {
return mw.WriteNil()
}
switch v.Kind() {
case reflect.Bool:
return mw.WriteBool(v.Bool())
case reflect.Float32, reflect.Float64:
return mw.WriteFloat64(v.Float())
case reflect.Complex64, reflect.Complex128:
return mw.WriteComplex128(v.Complex())
case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int8:
return mw.WriteInt64(v.Int())
case reflect.Interface, reflect.Ptr:
if v.IsNil() {
mw.WriteNil()
}
return mw.writeVal(v.Elem())
case reflect.Map:
return mw.writeMap(v)
case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint8:
return mw.WriteUint64(v.Uint())
case reflect.String:
return mw.WriteString(v.String())
case reflect.Slice, reflect.Array:
return mw.writeSlice(v)
case reflect.Struct:
return mw.writeStruct(v)
}
return fmt.Errorf("msgp: msgp/enc: type %q not supported(3)", v.Type())
}
// is the reflect.Kind encodable?
func isSupported(k reflect.Kind) bool {
switch k {
case reflect.Func, reflect.Chan, reflect.Invalid, reflect.UnsafePointer:
return false
default:
return true
}
}
// GuessSize guesses the size of the underlying
// value of 'i'. If the underlying value is not
// a simple builtin (or []byte), GuessSize defaults
// to 512.
func GuessSize(i interface{}) int {
if i == nil {
return NilSize
}
switch i := i.(type) {
case Sizer:
return i.Msgsize()
case Extension:
return ExtensionPrefixSize + i.Len()
case float64:
return Float64Size
case float32:
return Float32Size
case uint8, uint16, uint32, uint64, uint:
return UintSize
case int8, int16, int32, int64, int:
return IntSize
case []byte:
return BytesPrefixSize + len(i)
case string:
return StringPrefixSize + len(i)
case complex64:
return Complex64Size
case complex128:
return Complex128Size
case bool:
return BoolSize
case map[string]interface{}:
s := MapHeaderSize
for key, val := range i {
s += StringPrefixSize + len(key) + GuessSize(val)
}
return s
case map[string]string:
s := MapHeaderSize
for key, val := range i {
s += 2*StringPrefixSize + len(key) + len(val)
}
return s
default:
return 512
}
}

View File

@@ -0,0 +1,464 @@
package msgp
import (
"math"
"reflect"
"time"
)
// ensure 'sz' extra bytes in 'b' btw len(b) and cap(b)
func ensure(b []byte, sz int) ([]byte, int) {
l := len(b)
c := cap(b)
if c-l < sz {
o := make([]byte, (2*c)+sz) // exponential growth
n := copy(o, b)
return o[:n+sz], n
}
return b[:l+sz], l
}
// AppendMapHeader appends a map header with the
// given size to the slice
func AppendMapHeader(b []byte, sz uint32) []byte {
switch {
case sz <= 15:
return append(b, wfixmap(uint8(sz)))
case sz <= math.MaxUint16:
o, n := ensure(b, 3)
prefixu16(o[n:], mmap16, uint16(sz))
return o
default:
o, n := ensure(b, 5)
prefixu32(o[n:], mmap32, sz)
return o
}
}
// AppendArrayHeader appends an array header with
// the given size to the slice
func AppendArrayHeader(b []byte, sz uint32) []byte {
switch {
case sz <= 15:
return append(b, wfixarray(uint8(sz)))
case sz <= math.MaxUint16:
o, n := ensure(b, 3)
prefixu16(o[n:], marray16, uint16(sz))
return o
default:
o, n := ensure(b, 5)
prefixu32(o[n:], marray32, sz)
return o
}
}
// AppendNil appends a 'nil' byte to the slice
func AppendNil(b []byte) []byte { return append(b, mnil) }
// AppendFloat64 appends a float64 to the slice
func AppendFloat64(b []byte, f float64) []byte {
o, n := ensure(b, Float64Size)
prefixu64(o[n:], mfloat64, math.Float64bits(f))
return o
}
// AppendFloat32 appends a float32 to the slice
func AppendFloat32(b []byte, f float32) []byte {
o, n := ensure(b, Float32Size)
prefixu32(o[n:], mfloat32, math.Float32bits(f))
return o
}
// AppendInt64 appends an int64 to the slice
func AppendInt64(b []byte, i int64) []byte {
if i >= 0 {
switch {
case i <= math.MaxInt8:
return append(b, wfixint(uint8(i)))
case i <= math.MaxInt16:
o, n := ensure(b, 3)
putMint16(o[n:], int16(i))
return o
case i <= math.MaxInt32:
o, n := ensure(b, 5)
putMint32(o[n:], int32(i))
return o
default:
o, n := ensure(b, 9)
putMint64(o[n:], i)
return o
}
}
switch {
case i >= -32:
return append(b, wnfixint(int8(i)))
case i >= math.MinInt8:
o, n := ensure(b, 2)
putMint8(o[n:], int8(i))
return o
case i >= math.MinInt16:
o, n := ensure(b, 3)
putMint16(o[n:], int16(i))
return o
case i >= math.MinInt32:
o, n := ensure(b, 5)
putMint32(o[n:], int32(i))
return o
default:
o, n := ensure(b, 9)
putMint64(o[n:], i)
return o
}
}
// AppendInt appends an int to the slice
func AppendInt(b []byte, i int) []byte { return AppendInt64(b, int64(i)) }
// AppendInt8 appends an int8 to the slice
func AppendInt8(b []byte, i int8) []byte { return AppendInt64(b, int64(i)) }
// AppendInt16 appends an int16 to the slice
func AppendInt16(b []byte, i int16) []byte { return AppendInt64(b, int64(i)) }
// AppendInt32 appends an int32 to the slice
func AppendInt32(b []byte, i int32) []byte { return AppendInt64(b, int64(i)) }
// AppendUint64 appends a uint64 to the slice
func AppendUint64(b []byte, u uint64) []byte {
switch {
case u <= (1<<7)-1:
return append(b, wfixint(uint8(u)))
case u <= math.MaxUint8:
o, n := ensure(b, 2)
putMuint8(o[n:], uint8(u))
return o
case u <= math.MaxUint16:
o, n := ensure(b, 3)
putMuint16(o[n:], uint16(u))
return o
case u <= math.MaxUint32:
o, n := ensure(b, 5)
putMuint32(o[n:], uint32(u))
return o
default:
o, n := ensure(b, 9)
putMuint64(o[n:], u)
return o
}
}
// AppendUint appends a uint to the slice
func AppendUint(b []byte, u uint) []byte { return AppendUint64(b, uint64(u)) }
// AppendUint8 appends a uint8 to the slice
func AppendUint8(b []byte, u uint8) []byte { return AppendUint64(b, uint64(u)) }
// AppendByte is analogous to AppendUint8
func AppendByte(b []byte, u byte) []byte { return AppendUint8(b, uint8(u)) }
// AppendUint16 appends a uint16 to the slice
func AppendUint16(b []byte, u uint16) []byte { return AppendUint64(b, uint64(u)) }
// AppendUint32 appends a uint32 to the slice
func AppendUint32(b []byte, u uint32) []byte { return AppendUint64(b, uint64(u)) }
// AppendBytes appends bytes to the slice as MessagePack 'bin' data
func AppendBytes(b []byte, bts []byte) []byte {
sz := len(bts)
var o []byte
var n int
switch {
case sz <= math.MaxUint8:
o, n = ensure(b, 2+sz)
prefixu8(o[n:], mbin8, uint8(sz))
n += 2
case sz <= math.MaxUint16:
o, n = ensure(b, 3+sz)
prefixu16(o[n:], mbin16, uint16(sz))
n += 3
default:
o, n = ensure(b, 5+sz)
prefixu32(o[n:], mbin32, uint32(sz))
n += 5
}
return o[:n+copy(o[n:], bts)]
}
// AppendBool appends a bool to the slice
func AppendBool(b []byte, t bool) []byte {
if t {
return append(b, mtrue)
}
return append(b, mfalse)
}
// AppendString appends a string as a MessagePack 'str' to the slice
func AppendString(b []byte, s string) []byte {
sz := len(s)
var n int
var o []byte
switch {
case sz <= 31:
o, n = ensure(b, 1+sz)
o[n] = wfixstr(uint8(sz))
n++
case sz <= math.MaxUint8:
o, n = ensure(b, 2+sz)
prefixu8(o[n:], mstr8, uint8(sz))
n += 2
case sz <= math.MaxUint16:
o, n = ensure(b, 3+sz)
prefixu16(o[n:], mstr16, uint16(sz))
n += 3
default:
o, n = ensure(b, 5+sz)
prefixu32(o[n:], mstr32, uint32(sz))
n += 5
}
return o[:n+copy(o[n:], s)]
}
// AppendStringFromBytes appends a []byte
// as a MessagePack 'str' to the slice 'b.'
func AppendStringFromBytes(b []byte, str []byte) []byte {
sz := len(str)
var n int
var o []byte
switch {
case sz <= 31:
o, n = ensure(b, 1+sz)
o[n] = wfixstr(uint8(sz))
n++
case sz <= math.MaxUint8:
o, n = ensure(b, 2+sz)
prefixu8(o[n:], mstr8, uint8(sz))
n += 2
case sz <= math.MaxUint16:
o, n = ensure(b, 3+sz)
prefixu16(o[n:], mstr16, uint16(sz))
n += 3
default:
o, n = ensure(b, 5+sz)
prefixu32(o[n:], mstr32, uint32(sz))
n += 5
}
return o[:n+copy(o[n:], str)]
}
// AppendComplex64 appends a complex64 to the slice as a MessagePack extension
func AppendComplex64(b []byte, c complex64) []byte {
o, n := ensure(b, Complex64Size)
o[n] = mfixext8
o[n+1] = Complex64Extension
big.PutUint32(o[n+2:], math.Float32bits(real(c)))
big.PutUint32(o[n+6:], math.Float32bits(imag(c)))
return o
}
// AppendComplex128 appends a complex128 to the slice as a MessagePack extension
func AppendComplex128(b []byte, c complex128) []byte {
o, n := ensure(b, Complex128Size)
o[n] = mfixext16
o[n+1] = Complex128Extension
big.PutUint64(o[n+2:], math.Float64bits(real(c)))
big.PutUint64(o[n+10:], math.Float64bits(imag(c)))
return o
}
// AppendTime appends a time.Time to the slice as a MessagePack extension
func AppendTime(b []byte, t time.Time) []byte {
o, n := ensure(b, TimeSize)
t = t.UTC()
o[n] = mext8
o[n+1] = 12
o[n+2] = TimeExtension
putUnix(o[n+3:], t.Unix(), int32(t.Nanosecond()))
return o
}
// AppendDuration appends a time.Duration to the slice as a MessagePack extension
func AppendDuration(b []byte, dur time.Duration) []byte {
o, n := ensure(b, DurationSize)
o[n] = mext8
o[n+1] = 9
o[n+2] = DurationExtension
putMint64(o[n+3:], int64(dur))
return o
}
// AppendMapStrStr appends a map[string]string to the slice
// as a MessagePack map with 'str'-type keys and values
func AppendMapStrStr(b []byte, m map[string]string) []byte {
sz := uint32(len(m))
b = AppendMapHeader(b, sz)
for key, val := range m {
b = AppendString(b, key)
b = AppendString(b, val)
}
return b
}
// AppendMapStrIntf appends a map[string]interface{} to the slice
// as a MessagePack map with 'str'-type keys.
func AppendMapStrIntf(b []byte, m map[string]interface{}) ([]byte, error) {
sz := uint32(len(m))
b = AppendMapHeader(b, sz)
var err error
for key, val := range m {
b = AppendString(b, key)
b, err = AppendIntf(b, val)
if err != nil {
return b, err
}
}
return b, nil
}
// AppendMapStrSomething appends a map[string]* to the slice
// as a MessagePack map with 'str'-type keys. * must be
// serializable by AppendIntf().
func AppendMapStrSomething(b []byte, m reflect.Value) ([]byte, error) {
keys := m.MapKeys()
sz := uint32(len(keys))
if sz == 0 {
b = AppendMapHeader(b, sz)
return b, nil
}
var err error
for i, key := range keys {
if i == 0 {
if key.Type().Kind() != reflect.String {
return b, &ErrUnsupportedType{T: m.Type()}
}
// lazy because we try hard not to write
// half a value to b and then error out.
b = AppendMapHeader(b, sz)
}
b = AppendString(b, key.String())
val := m.MapIndex(key)
b, err = AppendIntf(b, val.Interface())
if err != nil {
return b, err
}
}
return b, nil
}
// AppendIntf appends the concrete type of 'i' to the
// provided []byte. 'i' must be one of the following:
// - 'nil'
// - A bool, float, string, []byte, int, uint, or complex
// - A map[string]interface{} or map[string]string
// - A []T, where T is another supported type
// - A *T, where T is another supported type
// - A type that satisfieds the msgp.Marshaler interface
// - A type that satisfies the msgp.Extension interface
func AppendIntf(b []byte, i interface{}) ([]byte, error) {
if i == nil {
return AppendNil(b), nil
}
// all the concrete types
// for which we have methods
switch i := i.(type) {
case Marshaler:
return i.MarshalMsg(b)
case Extension:
return AppendExtension(b, i)
case bool:
return AppendBool(b, i), nil
case float32:
return AppendFloat32(b, i), nil
case float64:
return AppendFloat64(b, i), nil
case complex64:
return AppendComplex64(b, i), nil
case complex128:
return AppendComplex128(b, i), nil
case string:
return AppendString(b, i), nil
case []byte:
return AppendBytes(b, i), nil
case int8:
return AppendInt8(b, i), nil
case int16:
return AppendInt16(b, i), nil
case int32:
return AppendInt32(b, i), nil
case int64:
return AppendInt64(b, i), nil
case int:
return AppendInt64(b, int64(i)), nil
case uint:
return AppendUint64(b, uint64(i)), nil
case uint8:
return AppendUint8(b, i), nil
case uint16:
return AppendUint16(b, i), nil
case uint32:
return AppendUint32(b, i), nil
case uint64:
return AppendUint64(b, i), nil
case time.Time:
return AppendTime(b, i), nil
case time.Duration:
return AppendDuration(b, i), nil
case map[string]interface{}:
return AppendMapStrIntf(b, i)
case map[string]string:
return AppendMapStrStr(b, i), nil
case []interface{}:
b = AppendArrayHeader(b, uint32(len(i)))
var err error
for _, k := range i {
b, err = AppendIntf(b, k)
if err != nil {
return b, err
}
}
return b, nil
}
var err error
v := reflect.ValueOf(i)
switch v.Kind() {
case reflect.Array, reflect.Slice:
l := v.Len()
b = AppendArrayHeader(b, uint32(l))
for i := 0; i < l; i++ {
b, err = AppendIntf(b, v.Index(i).Interface())
if err != nil {
return b, err
}
}
return b, nil
case reflect.Ptr:
if v.IsNil() {
return AppendNil(b), err
}
b, err = AppendIntf(b, v.Elem().Interface())
return b, err
case reflect.Map:
return AppendMapStrSomething(b, v)
default:
return b, &ErrUnsupportedType{T: v.Type()}
}
}
// AppendNegativeOneAndString is a helper for runtime struct id
func AppendNegativeOneAndStringAsBytes(b []byte, str []byte) []byte {
o := AppendInt64(b, -1)
return AppendStringFromBytes(o, str)
}

21
vendor/github.com/glycerine/liner/COPYING generated vendored Normal file
View File

@@ -0,0 +1,21 @@
Copyright © 2012 Peter Harris
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice (including the next
paragraph) shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

100
vendor/github.com/glycerine/liner/README.md generated vendored Normal file
View File

@@ -0,0 +1,100 @@
Liner
=====
Liner is a command line editor with history. It was inspired by linenoise;
everything Unix-like is a VT100 (or is trying very hard to be). If your
terminal is not pretending to be a VT100, change it. Liner also support
Windows.
Liner is released under the X11 license (which is similar to the new BSD
license).
Line Editing
------------
The following line editing commands are supported on platforms and terminals
that Liner supports:
Keystroke | Action
--------- | ------
Ctrl-A, Home | Move cursor to beginning of line
Ctrl-E, End | Move cursor to end of line
Ctrl-B, Left | Move cursor one character left
Ctrl-F, Right| Move cursor one character right
Ctrl-Left, Alt-B | Move cursor to previous word
Ctrl-Right, Alt-F | Move cursor to next word
Ctrl-D, Del | (if line is *not* empty) Delete character under cursor
Ctrl-D | (if line *is* empty) End of File - usually quits application
Ctrl-C | Reset input (create new empty prompt)
Ctrl-L | Clear screen (line is unmodified)
Ctrl-T | Transpose previous character with current character
Ctrl-H, BackSpace | Delete character before cursor
Ctrl-W | Delete word leading up to cursor
Ctrl-K | Delete from cursor to end of line
Ctrl-U | Delete from start of line to cursor
Ctrl-P, Up | Previous match from history
Ctrl-N, Down | Next match from history
Ctrl-R | Reverse Search history (Ctrl-S forward, Ctrl-G cancel)
Ctrl-Y | Paste from Yank buffer (Alt-Y to paste next yank instead)
Tab | Next completion
Shift-Tab | (after Tab) Previous completion
Getting started
-----------------
```go
package main
import (
"log"
"os"
"path/filepath"
"strings"
"github.com/peterh/liner"
)
var (
history_fn = filepath.Join(os.TempDir(), ".liner_example_history")
names = []string{"john", "james", "mary", "nancy"}
)
func main() {
line := liner.NewLiner()
defer line.Close()
line.SetCtrlCAborts(true)
line.SetCompleter(func(line string) (c []string) {
for _, n := range names {
if strings.HasPrefix(n, strings.ToLower(line)) {
c = append(c, n)
}
}
return
})
if f, err := os.Open(history_fn); err == nil {
line.ReadHistory(f)
f.Close()
}
if name, err := line.Prompt("What is your name? "); err == nil {
log.Print("Got: ", name)
line.AppendHistory(name)
} else if err == liner.ErrPromptAborted {
log.Print("Aborted")
} else {
log.Print("Error reading line: ", err)
}
if f, err := os.Create(history_fn); err != nil {
log.Print("Error writing history file: ", err)
} else {
line.WriteHistory(f)
f.Close()
}
}
```
For documentation, see http://godoc.org/github.com/peterh/liner

39
vendor/github.com/glycerine/liner/bsdinput.go generated vendored Normal file
View File

@@ -0,0 +1,39 @@
// +build openbsd freebsd netbsd
package liner
import "syscall"
const (
getTermios = syscall.TIOCGETA
setTermios = syscall.TIOCSETA
)
const (
// Input flags
inpck = 0x010
istrip = 0x020
icrnl = 0x100
ixon = 0x200
// Output flags
opost = 0x1
// Control flags
cs8 = 0x300
// Local flags
isig = 0x080
icanon = 0x100
iexten = 0x400
)
type termios struct {
Iflag uint32
Oflag uint32
Cflag uint32
Lflag uint32
Cc [20]byte
Ispeed int32
Ospeed int32
}

226
vendor/github.com/glycerine/liner/common.go generated vendored Normal file
View File

@@ -0,0 +1,226 @@
/*
Package liner implements a simple command line editor, inspired by linenoise
(https://github.com/antirez/linenoise/). This package supports WIN32 in
addition to the xterm codes supported by everything else.
*/
package liner
import (
"bufio"
"container/ring"
"errors"
"fmt"
"io"
"strings"
"sync"
"unicode/utf8"
)
type commonState struct {
terminalSupported bool
outputRedirected bool
inputRedirected bool
history []string
historyMutex sync.RWMutex
completer WordCompleter
columns int
killRing *ring.Ring
ctrlCAborts bool
r *bufio.Reader
tabStyle TabStyle
multiLineMode bool
cursorRows int
maxRows int
}
// TabStyle is used to select how tab completions are displayed.
type TabStyle int
// Two tab styles are currently available:
//
// TabCircular cycles through each completion item and displays it directly on
// the prompt
//
// TabPrints prints the list of completion items to the screen after a second
// tab key is pressed. This behaves similar to GNU readline and BASH (which
// uses readline)
const (
TabCircular TabStyle = iota
TabPrints
)
// ErrPromptAborted is returned from Prompt or PasswordPrompt when the user presses Ctrl-C
// if SetCtrlCAborts(true) has been called on the State
var ErrPromptAborted = errors.New("prompt aborted")
// ErrNotTerminalOutput is returned from Prompt or PasswordPrompt if the
// platform is normally supported, but stdout has been redirected
var ErrNotTerminalOutput = errors.New("standard output is not a terminal")
// Max elements to save on the killring
const KillRingMax = 60
// HistoryLimit is the maximum number of entries saved in the scrollback history.
const HistoryLimit = 1000
// ReadHistory reads scrollback history from r. Returns the number of lines
// read, and any read error (except io.EOF).
func (s *State) ReadHistory(r io.Reader) (num int, err error) {
s.historyMutex.Lock()
defer s.historyMutex.Unlock()
in := bufio.NewReader(r)
num = 0
for {
line, part, err := in.ReadLine()
if err == io.EOF {
break
}
if err != nil {
return num, err
}
if part {
return num, fmt.Errorf("line %d is too long", num+1)
}
if !utf8.Valid(line) {
return num, fmt.Errorf("invalid string at line %d", num+1)
}
num++
s.history = append(s.history, string(line))
if len(s.history) > HistoryLimit {
s.history = s.history[1:]
}
}
return num, nil
}
// WriteHistory writes scrollback history to w. Returns the number of lines
// successfully written, and any write error.
//
// Unlike the rest of liner's API, WriteHistory is safe to call
// from another goroutine while Prompt is in progress.
// This exception is to facilitate the saving of the history buffer
// during an unexpected exit (for example, due to Ctrl-C being invoked)
func (s *State) WriteHistory(w io.Writer) (num int, err error) {
s.historyMutex.RLock()
defer s.historyMutex.RUnlock()
for _, item := range s.history {
_, err := fmt.Fprintln(w, item)
if err != nil {
return num, err
}
num++
}
return num, nil
}
// AppendHistory appends an entry to the scrollback history. AppendHistory
// should be called iff Prompt returns a valid command.
func (s *State) AppendHistory(item string) {
s.historyMutex.Lock()
defer s.historyMutex.Unlock()
if len(s.history) > 0 {
if item == s.history[len(s.history)-1] {
return
}
}
s.history = append(s.history, item)
if len(s.history) > HistoryLimit {
s.history = s.history[1:]
}
}
// Returns the history lines starting with prefix
func (s *State) getHistoryByPrefix(prefix string) (ph []string) {
for _, h := range s.history {
if strings.HasPrefix(h, prefix) {
ph = append(ph, h)
}
}
return
}
// Returns the history lines matching the inteligent search
func (s *State) getHistoryByPattern(pattern string) (ph []string, pos []int) {
if pattern == "" {
return
}
for _, h := range s.history {
if i := strings.Index(h, pattern); i >= 0 {
ph = append(ph, h)
pos = append(pos, i)
}
}
return
}
// Completer takes the currently edited line content at the left of the cursor
// and returns a list of completion candidates.
// If the line is "Hello, wo!!!" and the cursor is before the first '!', "Hello, wo" is passed
// to the completer which may return {"Hello, world", "Hello, Word"} to have "Hello, world!!!".
type Completer func(line string) []string
// WordCompleter takes the currently edited line with the cursor position and
// returns the completion candidates for the partial word to be completed.
// If the line is "Hello, wo!!!" and the cursor is before the first '!', ("Hello, wo!!!", 9) is passed
// to the completer which may returns ("Hello, ", {"world", "Word"}, "!!!") to have "Hello, world!!!".
type WordCompleter func(line string, pos int) (head string, completions []string, tail string)
// SetCompleter sets the completion function that Liner will call to
// fetch completion candidates when the user presses tab.
func (s *State) SetCompleter(f Completer) {
if f == nil {
s.completer = nil
return
}
s.completer = func(line string, pos int) (string, []string, string) {
return "", f(string([]rune(line)[:pos])), string([]rune(line)[pos:])
}
}
// SetWordCompleter sets the completion function that Liner will call to
// fetch completion candidates when the user presses tab.
func (s *State) SetWordCompleter(f WordCompleter) {
s.completer = f
}
// SetTabCompletionStyle sets the behvavior when the Tab key is pressed
// for auto-completion. TabCircular is the default behavior and cycles
// through the list of candidates at the prompt. TabPrints will print
// the available completion candidates to the screen similar to BASH
// and GNU Readline
func (s *State) SetTabCompletionStyle(tabStyle TabStyle) {
s.tabStyle = tabStyle
}
// ModeApplier is the interface that wraps a representation of the terminal
// mode. ApplyMode sets the terminal to this mode.
type ModeApplier interface {
ApplyMode() error
}
// SetCtrlCAborts sets whether Prompt on a supported terminal will return an
// ErrPromptAborted when Ctrl-C is pressed. The default is false (will not
// return when Ctrl-C is pressed). Unsupported terminals typically raise SIGINT
// (and Prompt does not return) regardless of the value passed to SetCtrlCAborts.
func (s *State) SetCtrlCAborts(aborts bool) {
s.ctrlCAborts = aborts
}
// SetMultiLineMode sets whether line is auto-wrapped. The default is false (single line).
func (s *State) SetMultiLineMode(mlmode bool) {
s.multiLineMode = mlmode
}
func (s *State) promptUnsupported(p string) (string, error) {
if !s.inputRedirected || !s.terminalSupported {
fmt.Print(p)
}
linebuf, _, err := s.r.ReadLine()
if err != nil {
return "", err
}
return string(linebuf), nil
}

57
vendor/github.com/glycerine/liner/fallbackinput.go generated vendored Normal file
View File

@@ -0,0 +1,57 @@
// +build !windows,!linux,!darwin,!openbsd,!freebsd,!netbsd
package liner
import (
"bufio"
"errors"
"os"
)
// State represents an open terminal
type State struct {
commonState
}
// Prompt displays p, and then waits for user input. Prompt does not support
// line editing on this operating system.
func (s *State) Prompt(p string) (string, error) {
return s.promptUnsupported(p)
}
// PasswordPrompt is not supported in this OS.
func (s *State) PasswordPrompt(p string) (string, error) {
return "", errors.New("liner: function not supported in this terminal")
}
// NewLiner initializes a new *State
//
// Note that this operating system uses a fallback mode without line
// editing. Patches welcome.
func NewLiner() *State {
var s State
s.r = bufio.NewReader(os.Stdin)
return &s
}
// Close returns the terminal to its previous mode
func (s *State) Close() error {
return nil
}
// TerminalSupported returns false because line editing is not
// supported on this platform.
func TerminalSupported() bool {
return false
}
type noopMode struct{}
func (n noopMode) ApplyMode() error {
return nil
}
// TerminalMode returns a noop InputModeSetter on this platform.
func TerminalMode() (ModeApplier, error) {
return noopMode{}, nil
}

368
vendor/github.com/glycerine/liner/input.go generated vendored Normal file
View File

@@ -0,0 +1,368 @@
// +build linux darwin openbsd freebsd netbsd
package liner
import (
"bufio"
"errors"
"os"
"os/signal"
"strconv"
"strings"
"syscall"
"time"
)
type nexter struct {
r rune
err error
}
// State represents an open terminal
type State struct {
commonState
origMode termios
defaultMode termios
next <-chan nexter
winch chan os.Signal
pending []rune
useCHA bool
}
// NewLiner initializes a new *State, and sets the terminal into raw mode. To
// restore the terminal to its previous state, call State.Close().
//
// Note if you are still using Go 1.0: NewLiner handles SIGWINCH, so it will
// leak a channel every time you call it. Therefore, it is recommened that you
// upgrade to a newer release of Go, or ensure that NewLiner is only called
// once.
func NewLiner() *State {
var s State
s.r = bufio.NewReader(os.Stdin)
s.terminalSupported = TerminalSupported()
if m, err := TerminalMode(); err == nil {
s.origMode = *m.(*termios)
} else {
s.inputRedirected = true
}
if _, err := getMode(syscall.Stdout); err != 0 {
s.outputRedirected = true
}
if s.inputRedirected && s.outputRedirected {
s.terminalSupported = false
}
if s.terminalSupported && !s.inputRedirected && !s.outputRedirected {
mode := s.origMode
mode.Iflag &^= icrnl | inpck | istrip | ixon
mode.Cflag |= cs8
mode.Lflag &^= syscall.ECHO | icanon | iexten
mode.ApplyMode()
winch := make(chan os.Signal, 1)
signal.Notify(winch, syscall.SIGWINCH)
s.winch = winch
s.checkOutput()
}
if !s.outputRedirected {
s.getColumns()
s.outputRedirected = s.columns <= 0
}
return &s
}
var errTimedOut = errors.New("timeout")
func (s *State) startPrompt() {
if s.terminalSupported {
if m, err := TerminalMode(); err == nil {
s.defaultMode = *m.(*termios)
mode := s.defaultMode
mode.Lflag &^= isig
mode.ApplyMode()
}
}
s.restartPrompt()
}
func (s *State) restartPrompt() {
next := make(chan nexter)
go func() {
for {
var n nexter
n.r, _, n.err = s.r.ReadRune()
next <- n
// Shut down nexter loop when an end condition has been reached
if n.err != nil || n.r == '\n' || n.r == '\r' || n.r == ctrlC || n.r == ctrlD {
close(next)
return
}
}
}()
s.next = next
}
func (s *State) stopPrompt() {
if s.terminalSupported {
s.defaultMode.ApplyMode()
}
}
func (s *State) nextPending(timeout <-chan time.Time) (rune, error) {
select {
case thing, ok := <-s.next:
if !ok {
return 0, errors.New("liner: internal error")
}
if thing.err != nil {
return 0, thing.err
}
s.pending = append(s.pending, thing.r)
return thing.r, nil
case <-timeout:
rv := s.pending[0]
s.pending = s.pending[1:]
return rv, errTimedOut
}
// not reached
return 0, nil
}
func (s *State) readNext() (interface{}, error) {
if len(s.pending) > 0 {
rv := s.pending[0]
s.pending = s.pending[1:]
return rv, nil
}
var r rune
select {
case thing, ok := <-s.next:
if !ok {
return 0, errors.New("liner: internal error")
}
if thing.err != nil {
return nil, thing.err
}
r = thing.r
case <-s.winch:
s.getColumns()
return winch, nil
}
if r != esc {
return r, nil
}
s.pending = append(s.pending, r)
// Wait at most 50 ms for the rest of the escape sequence
// If nothing else arrives, it was an actual press of the esc key
timeout := time.After(50 * time.Millisecond)
flag, err := s.nextPending(timeout)
if err != nil {
if err == errTimedOut {
return flag, nil
}
return unknown, err
}
switch flag {
case '[':
code, err := s.nextPending(timeout)
if err != nil {
if err == errTimedOut {
return code, nil
}
return unknown, err
}
switch code {
case 'A':
s.pending = s.pending[:0] // escape code complete
return up, nil
case 'B':
s.pending = s.pending[:0] // escape code complete
return down, nil
case 'C':
s.pending = s.pending[:0] // escape code complete
return right, nil
case 'D':
s.pending = s.pending[:0] // escape code complete
return left, nil
case 'F':
s.pending = s.pending[:0] // escape code complete
return end, nil
case 'H':
s.pending = s.pending[:0] // escape code complete
return home, nil
case 'Z':
s.pending = s.pending[:0] // escape code complete
return shiftTab, nil
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
num := []rune{code}
for {
code, err := s.nextPending(timeout)
if err != nil {
if err == errTimedOut {
return code, nil
}
return nil, err
}
switch code {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
num = append(num, code)
case ';':
// Modifier code to follow
// This only supports Ctrl-left and Ctrl-right for now
x, _ := strconv.ParseInt(string(num), 10, 32)
if x != 1 {
// Can't be left or right
rv := s.pending[0]
s.pending = s.pending[1:]
return rv, nil
}
num = num[:0]
for {
code, err = s.nextPending(timeout)
if err != nil {
if err == errTimedOut {
rv := s.pending[0]
s.pending = s.pending[1:]
return rv, nil
}
return nil, err
}
switch code {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
num = append(num, code)
case 'C', 'D':
// right, left
mod, _ := strconv.ParseInt(string(num), 10, 32)
if mod != 5 {
// Not bare Ctrl
rv := s.pending[0]
s.pending = s.pending[1:]
return rv, nil
}
s.pending = s.pending[:0] // escape code complete
if code == 'C' {
return wordRight, nil
}
return wordLeft, nil
default:
// Not left or right
rv := s.pending[0]
s.pending = s.pending[1:]
return rv, nil
}
}
case '~':
s.pending = s.pending[:0] // escape code complete
x, _ := strconv.ParseInt(string(num), 10, 32)
switch x {
case 2:
return insert, nil
case 3:
return del, nil
case 5:
return pageUp, nil
case 6:
return pageDown, nil
case 7:
return home, nil
case 8:
return end, nil
case 15:
return f5, nil
case 17:
return f6, nil
case 18:
return f7, nil
case 19:
return f8, nil
case 20:
return f9, nil
case 21:
return f10, nil
case 23:
return f11, nil
case 24:
return f12, nil
default:
return unknown, nil
}
default:
// unrecognized escape code
rv := s.pending[0]
s.pending = s.pending[1:]
return rv, nil
}
}
}
case 'O':
code, err := s.nextPending(timeout)
if err != nil {
if err == errTimedOut {
return code, nil
}
return nil, err
}
s.pending = s.pending[:0] // escape code complete
switch code {
case 'c':
return wordRight, nil
case 'd':
return wordLeft, nil
case 'H':
return home, nil
case 'F':
return end, nil
case 'P':
return f1, nil
case 'Q':
return f2, nil
case 'R':
return f3, nil
case 'S':
return f4, nil
default:
return unknown, nil
}
case 'b':
s.pending = s.pending[:0] // escape code complete
return altB, nil
case 'f':
s.pending = s.pending[:0] // escape code complete
return altF, nil
case 'y':
s.pending = s.pending[:0] // escape code complete
return altY, nil
default:
rv := s.pending[0]
s.pending = s.pending[1:]
return rv, nil
}
// not reached
return r, nil
}
// Close returns the terminal to its previous mode
func (s *State) Close() error {
stopSignal(s.winch)
if !s.inputRedirected {
s.origMode.ApplyMode()
}
return nil
}
// TerminalSupported returns true if the current terminal supports
// line editing features, and false if liner will use the 'dumb'
// fallback for input.
// Note that TerminalSupported does not check all factors that may
// cause liner to not fully support the terminal (such as stdin redirection)
func TerminalSupported() bool {
bad := map[string]bool{"": true, "dumb": true, "cons25": true}
return !bad[strings.ToLower(os.Getenv("TERM"))]
}

39
vendor/github.com/glycerine/liner/input_darwin.go generated vendored Normal file
View File

@@ -0,0 +1,39 @@
// +build darwin
package liner
import "syscall"
const (
getTermios = syscall.TIOCGETA
setTermios = syscall.TIOCSETA
)
const (
// Input flags
inpck = 0x010
istrip = 0x020
icrnl = 0x100
ixon = 0x200
// Output flags
opost = 0x1
// Control flags
cs8 = 0x300
// Local flags
isig = 0x080
icanon = 0x100
iexten = 0x400
)
type termios struct {
Iflag uintptr
Oflag uintptr
Cflag uintptr
Lflag uintptr
Cc [20]byte
Ispeed uintptr
Ospeed uintptr
}

26
vendor/github.com/glycerine/liner/input_linux.go generated vendored Normal file
View File

@@ -0,0 +1,26 @@
// +build linux
package liner
import "syscall"
const (
getTermios = syscall.TCGETS
setTermios = syscall.TCSETS
)
const (
icrnl = syscall.ICRNL
inpck = syscall.INPCK
istrip = syscall.ISTRIP
ixon = syscall.IXON
opost = syscall.OPOST
cs8 = syscall.CS8
isig = syscall.ISIG
icanon = syscall.ICANON
iexten = syscall.IEXTEN
)
type termios struct {
syscall.Termios
}

321
vendor/github.com/glycerine/liner/input_windows.go generated vendored Normal file
View File

@@ -0,0 +1,321 @@
package liner
import (
"bufio"
"os"
"syscall"
"unsafe"
)
var (
kernel32 = syscall.NewLazyDLL("kernel32.dll")
procGetStdHandle = kernel32.NewProc("GetStdHandle")
procReadConsoleInput = kernel32.NewProc("ReadConsoleInputW")
procGetConsoleMode = kernel32.NewProc("GetConsoleMode")
procSetConsoleMode = kernel32.NewProc("SetConsoleMode")
procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition")
procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
procFillConsoleOutputCharacter = kernel32.NewProc("FillConsoleOutputCharacterW")
)
// These names are from the Win32 api, so they use underscores (contrary to
// what golint suggests)
const (
std_input_handle = uint32(-10 & 0xFFFFFFFF)
std_output_handle = uint32(-11 & 0xFFFFFFFF)
std_error_handle = uint32(-12 & 0xFFFFFFFF)
invalid_handle_value = ^uintptr(0)
)
type inputMode uint32
// State represents an open terminal
type State struct {
commonState
handle syscall.Handle
hOut syscall.Handle
origMode inputMode
defaultMode inputMode
key interface{}
repeat uint16
}
const (
enableEchoInput = 0x4
enableInsertMode = 0x20
enableLineInput = 0x2
enableMouseInput = 0x10
enableProcessedInput = 0x1
enableQuickEditMode = 0x40
enableWindowInput = 0x8
)
// NewLiner initializes a new *State, and sets the terminal into raw mode. To
// restore the terminal to its previous state, call State.Close().
func NewLiner() *State {
var s State
hIn, _, _ := procGetStdHandle.Call(uintptr(std_input_handle))
s.handle = syscall.Handle(hIn)
hOut, _, _ := procGetStdHandle.Call(uintptr(std_output_handle))
s.hOut = syscall.Handle(hOut)
s.terminalSupported = true
if m, err := TerminalMode(); err == nil {
s.origMode = m.(inputMode)
mode := s.origMode
mode &^= enableEchoInput
mode &^= enableInsertMode
mode &^= enableLineInput
mode &^= enableMouseInput
mode |= enableWindowInput
mode.ApplyMode()
} else {
s.inputRedirected = true
s.r = bufio.NewReader(os.Stdin)
}
s.getColumns()
s.outputRedirected = s.columns <= 0
return &s
}
// These names are from the Win32 api, so they use underscores (contrary to
// what golint suggests)
const (
focus_event = 0x0010
key_event = 0x0001
menu_event = 0x0008
mouse_event = 0x0002
window_buffer_size_event = 0x0004
)
type input_record struct {
eventType uint16
pad uint16
blob [16]byte
}
type key_event_record struct {
KeyDown int32
RepeatCount uint16
VirtualKeyCode uint16
VirtualScanCode uint16
Char int16
ControlKeyState uint32
}
// These names are from the Win32 api, so they use underscores (contrary to
// what golint suggests)
const (
vk_tab = 0x09
vk_prior = 0x21
vk_next = 0x22
vk_end = 0x23
vk_home = 0x24
vk_left = 0x25
vk_up = 0x26
vk_right = 0x27
vk_down = 0x28
vk_insert = 0x2d
vk_delete = 0x2e
vk_f1 = 0x70
vk_f2 = 0x71
vk_f3 = 0x72
vk_f4 = 0x73
vk_f5 = 0x74
vk_f6 = 0x75
vk_f7 = 0x76
vk_f8 = 0x77
vk_f9 = 0x78
vk_f10 = 0x79
vk_f11 = 0x7a
vk_f12 = 0x7b
bKey = 0x42
fKey = 0x46
yKey = 0x59
)
const (
shiftPressed = 0x0010
leftAltPressed = 0x0002
leftCtrlPressed = 0x0008
rightAltPressed = 0x0001
rightCtrlPressed = 0x0004
modKeys = shiftPressed | leftAltPressed | rightAltPressed | leftCtrlPressed | rightCtrlPressed
)
func (s *State) readNext() (interface{}, error) {
if s.repeat > 0 {
s.repeat--
return s.key, nil
}
var input input_record
pbuf := uintptr(unsafe.Pointer(&input))
var rv uint32
prv := uintptr(unsafe.Pointer(&rv))
for {
ok, _, err := procReadConsoleInput.Call(uintptr(s.handle), pbuf, 1, prv)
if ok == 0 {
return nil, err
}
if input.eventType == window_buffer_size_event {
xy := (*coord)(unsafe.Pointer(&input.blob[0]))
s.columns = int(xy.x)
return winch, nil
}
if input.eventType != key_event {
continue
}
ke := (*key_event_record)(unsafe.Pointer(&input.blob[0]))
if ke.KeyDown == 0 {
continue
}
if ke.VirtualKeyCode == vk_tab && ke.ControlKeyState&modKeys == shiftPressed {
s.key = shiftTab
} else if ke.VirtualKeyCode == bKey && (ke.ControlKeyState&modKeys == leftAltPressed ||
ke.ControlKeyState&modKeys == rightAltPressed) {
s.key = altB
} else if ke.VirtualKeyCode == fKey && (ke.ControlKeyState&modKeys == leftAltPressed ||
ke.ControlKeyState&modKeys == rightAltPressed) {
s.key = altF
} else if ke.VirtualKeyCode == yKey && (ke.ControlKeyState&modKeys == leftAltPressed ||
ke.ControlKeyState&modKeys == rightAltPressed) {
s.key = altY
} else if ke.Char > 0 {
s.key = rune(ke.Char)
} else {
switch ke.VirtualKeyCode {
case vk_prior:
s.key = pageUp
case vk_next:
s.key = pageDown
case vk_end:
s.key = end
case vk_home:
s.key = home
case vk_left:
s.key = left
if ke.ControlKeyState&(leftCtrlPressed|rightCtrlPressed) != 0 {
if ke.ControlKeyState&modKeys == ke.ControlKeyState&(leftCtrlPressed|rightCtrlPressed) {
s.key = wordLeft
}
}
case vk_right:
s.key = right
if ke.ControlKeyState&(leftCtrlPressed|rightCtrlPressed) != 0 {
if ke.ControlKeyState&modKeys == ke.ControlKeyState&(leftCtrlPressed|rightCtrlPressed) {
s.key = wordRight
}
}
case vk_up:
s.key = up
case vk_down:
s.key = down
case vk_insert:
s.key = insert
case vk_delete:
s.key = del
case vk_f1:
s.key = f1
case vk_f2:
s.key = f2
case vk_f3:
s.key = f3
case vk_f4:
s.key = f4
case vk_f5:
s.key = f5
case vk_f6:
s.key = f6
case vk_f7:
s.key = f7
case vk_f8:
s.key = f8
case vk_f9:
s.key = f9
case vk_f10:
s.key = f10
case vk_f11:
s.key = f11
case vk_f12:
s.key = f12
default:
// Eat modifier keys
// TODO: return Action(Unknown) if the key isn't a
// modifier.
continue
}
}
if ke.RepeatCount > 1 {
s.repeat = ke.RepeatCount - 1
}
return s.key, nil
}
return unknown, nil
}
// Close returns the terminal to its previous mode
func (s *State) Close() error {
s.origMode.ApplyMode()
return nil
}
func (s *State) startPrompt() {
if m, err := TerminalMode(); err == nil {
s.defaultMode = m.(inputMode)
mode := s.defaultMode
mode &^= enableProcessedInput
mode.ApplyMode()
}
}
func (s *State) restartPrompt() {
}
func (s *State) stopPrompt() {
s.defaultMode.ApplyMode()
}
// TerminalSupported returns true because line editing is always
// supported on Windows.
func TerminalSupported() bool {
return true
}
func (mode inputMode) ApplyMode() error {
hIn, _, err := procGetStdHandle.Call(uintptr(std_input_handle))
if hIn == invalid_handle_value || hIn == 0 {
return err
}
ok, _, err := procSetConsoleMode.Call(hIn, uintptr(mode))
if ok != 0 {
err = nil
}
return err
}
// TerminalMode returns the current terminal input mode as an InputModeSetter.
//
// This function is provided for convenience, and should
// not be necessary for most users of liner.
func TerminalMode() (ModeApplier, error) {
var mode inputMode
hIn, _, err := procGetStdHandle.Call(uintptr(std_input_handle))
if hIn == invalid_handle_value || hIn == 0 {
return nil, err
}
ok, _, err := procGetConsoleMode.Call(hIn, uintptr(unsafe.Pointer(&mode)))
if ok != 0 {
err = nil
}
return mode, err
}

1007
vendor/github.com/glycerine/liner/line.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

75
vendor/github.com/glycerine/liner/output.go generated vendored Normal file
View File

@@ -0,0 +1,75 @@
// +build linux darwin openbsd freebsd netbsd
package liner
import (
"fmt"
"os"
"strings"
"syscall"
"unsafe"
)
func (s *State) cursorPos(x int) {
if s.useCHA {
// 'G' is "Cursor Character Absolute (CHA)"
fmt.Printf("\x1b[%dG", x+1)
} else {
// 'C' is "Cursor Forward (CUF)"
fmt.Print("\r")
if x > 0 {
fmt.Printf("\x1b[%dC", x)
}
}
}
func (s *State) eraseLine() {
fmt.Print("\x1b[0K")
}
func (s *State) eraseScreen() {
fmt.Print("\x1b[H\x1b[2J")
}
func (s *State) moveUp(lines int) {
fmt.Printf("\x1b[%dA", lines)
}
func (s *State) moveDown(lines int) {
fmt.Printf("\x1b[%dB", lines)
}
func (s *State) emitNewLine() {
fmt.Print("\n")
}
type winSize struct {
row, col uint16
xpixel, ypixel uint16
}
func (s *State) getColumns() {
var ws winSize
ok, _, _ := syscall.Syscall(syscall.SYS_IOCTL, uintptr(syscall.Stdout),
syscall.TIOCGWINSZ, uintptr(unsafe.Pointer(&ws)))
if ok < 0 {
s.columns = 80
}
s.columns = int(ws.col)
}
func (s *State) checkOutput() {
// xterm is known to support CHA
if strings.Contains(strings.ToLower(os.Getenv("TERM")), "xterm") {
s.useCHA = true
return
}
// The test for functional ANSI CHA is unreliable (eg the Windows
// telnet command does not support reading the cursor position with
// an ANSI DSR request, despite setting TERM=ansi)
// Assume CHA isn't supported (which should be safe, although it
// does result in occasional visible cursor jitter)
s.useCHA = false
}

72
vendor/github.com/glycerine/liner/output_windows.go generated vendored Normal file
View File

@@ -0,0 +1,72 @@
package liner
import (
"unsafe"
)
type coord struct {
x, y int16
}
type smallRect struct {
left, top, right, bottom int16
}
type consoleScreenBufferInfo struct {
dwSize coord
dwCursorPosition coord
wAttributes int16
srWindow smallRect
dwMaximumWindowSize coord
}
func (s *State) cursorPos(x int) {
var sbi consoleScreenBufferInfo
procGetConsoleScreenBufferInfo.Call(uintptr(s.hOut), uintptr(unsafe.Pointer(&sbi)))
procSetConsoleCursorPosition.Call(uintptr(s.hOut),
uintptr(int(x)&0xFFFF|int(sbi.dwCursorPosition.y)<<16))
}
func (s *State) eraseLine() {
var sbi consoleScreenBufferInfo
procGetConsoleScreenBufferInfo.Call(uintptr(s.hOut), uintptr(unsafe.Pointer(&sbi)))
var numWritten uint32
procFillConsoleOutputCharacter.Call(uintptr(s.hOut), uintptr(' '),
uintptr(sbi.dwSize.x-sbi.dwCursorPosition.x),
uintptr(int(sbi.dwCursorPosition.x)&0xFFFF|int(sbi.dwCursorPosition.y)<<16),
uintptr(unsafe.Pointer(&numWritten)))
}
func (s *State) eraseScreen() {
var sbi consoleScreenBufferInfo
procGetConsoleScreenBufferInfo.Call(uintptr(s.hOut), uintptr(unsafe.Pointer(&sbi)))
var numWritten uint32
procFillConsoleOutputCharacter.Call(uintptr(s.hOut), uintptr(' '),
uintptr(sbi.dwSize.x)*uintptr(sbi.dwSize.y),
0,
uintptr(unsafe.Pointer(&numWritten)))
procSetConsoleCursorPosition.Call(uintptr(s.hOut), 0)
}
func (s *State) moveUp(lines int) {
var sbi consoleScreenBufferInfo
procGetConsoleScreenBufferInfo.Call(uintptr(s.hOut), uintptr(unsafe.Pointer(&sbi)))
procSetConsoleCursorPosition.Call(uintptr(s.hOut),
uintptr(int(sbi.dwCursorPosition.x)&0xFFFF|(int(sbi.dwCursorPosition.y)-lines)<<16))
}
func (s *State) moveDown(lines int) {
var sbi consoleScreenBufferInfo
procGetConsoleScreenBufferInfo.Call(uintptr(s.hOut), uintptr(unsafe.Pointer(&sbi)))
procSetConsoleCursorPosition.Call(uintptr(s.hOut),
uintptr(int(sbi.dwCursorPosition.x)&0xFFFF|(int(sbi.dwCursorPosition.y)+lines)<<16))
}
func (s *State) emitNewLine() {
// windows doesn't need to omit a new line
}
func (s *State) getColumns() {
var sbi consoleScreenBufferInfo
procGetConsoleScreenBufferInfo.Call(uintptr(s.hOut), uintptr(unsafe.Pointer(&sbi)))
s.columns = int(sbi.dwSize.x)
}

12
vendor/github.com/glycerine/liner/signal.go generated vendored Normal file
View File

@@ -0,0 +1,12 @@
// +build go1.1,!windows
package liner
import (
"os"
"os/signal"
)
func stopSignal(c chan<- os.Signal) {
signal.Stop(c)
}

11
vendor/github.com/glycerine/liner/signal_legacy.go generated vendored Normal file
View File

@@ -0,0 +1,11 @@
// +build !go1.1,!windows
package liner
import (
"os"
)
func stopSignal(c chan<- os.Signal) {
// signal.Stop does not exist before Go 1.1
}

37
vendor/github.com/glycerine/liner/unixmode.go generated vendored Normal file
View File

@@ -0,0 +1,37 @@
// +build linux darwin freebsd openbsd netbsd
package liner
import (
"syscall"
"unsafe"
)
func (mode *termios) ApplyMode() error {
_, _, errno := syscall.Syscall(syscall.SYS_IOCTL, uintptr(syscall.Stdin), setTermios, uintptr(unsafe.Pointer(mode)))
if errno != 0 {
return errno
}
return nil
}
// TerminalMode returns the current terminal input mode as an InputModeSetter.
//
// This function is provided for convenience, and should
// not be necessary for most users of liner.
func TerminalMode() (ModeApplier, error) {
mode, errno := getMode(syscall.Stdin)
if errno != 0 {
return nil, errno
}
return mode, nil
}
func getMode(handle int) (*termios, syscall.Errno) {
var mode termios
_, _, errno := syscall.Syscall(syscall.SYS_IOCTL, uintptr(handle), getTermios, uintptr(unsafe.Pointer(&mode)))
return &mode, errno
}

79
vendor/github.com/glycerine/liner/width.go generated vendored Normal file
View File

@@ -0,0 +1,79 @@
package liner
import "unicode"
// These character classes are mostly zero width (when combined).
// A few might not be, depending on the user's font. Fixing this
// is non-trivial, given that some terminals don't support
// ANSI DSR/CPR
var zeroWidth = []*unicode.RangeTable{
unicode.Mn,
unicode.Me,
unicode.Cc,
unicode.Cf,
}
var doubleWidth = []*unicode.RangeTable{
unicode.Han,
unicode.Hangul,
unicode.Hiragana,
unicode.Katakana,
}
// countGlyphs considers zero-width characters to be zero glyphs wide,
// and members of Chinese, Japanese, and Korean scripts to be 2 glyphs wide.
func countGlyphs(s []rune) int {
n := 0
for _, r := range s {
switch {
case unicode.IsOneOf(zeroWidth, r):
case unicode.IsOneOf(doubleWidth, r):
n += 2
default:
n++
}
}
return n
}
func countMultiLineGlyphs(s []rune, columns int, start int) int {
n := start
for _, r := range s {
switch {
case unicode.IsOneOf(zeroWidth, r):
case unicode.IsOneOf(doubleWidth, r):
n += 2
// no room for a 2-glyphs-wide char in the ending
// so skip a column and display it at the beginning
if n%columns == 1 {
n++
}
default:
n++
}
}
return n
}
func getPrefixGlyphs(s []rune, num int) []rune {
p := 0
for n := 0; n < num && p < len(s); p++ {
if !unicode.IsOneOf(zeroWidth, s[p]) {
n++
}
}
for p < len(s) && unicode.IsOneOf(zeroWidth, s[p]) {
p++
}
return s[:p]
}
func getSuffixGlyphs(s []rune, num int) []rune {
p := len(s)
for n := 0; n < num && p > 0; p-- {
if !unicode.IsOneOf(zeroWidth, s[p-1]) {
n++
}
}
return s[p:]
}

22
vendor/github.com/glycerine/zygomys/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,22 @@
Copyright (c) 2016, The zygomys authors.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

21
vendor/github.com/glycerine/zygomys/zygo/address.go generated vendored Normal file
View File

@@ -0,0 +1,21 @@
package zygo
type Address struct {
function *SexpFunction
position int
}
func (a Address) IsStackElem() {}
func (stack *Stack) PushAddr(function *SexpFunction, pc int) {
stack.Push(Address{function, pc})
}
func (stack *Stack) PopAddr() (*SexpFunction, int, error) {
elem, err := stack.Pop()
if err != nil {
return MissingFunction, 0, err
}
addr := elem.(Address)
return addr.function, addr.position, nil
}

221
vendor/github.com/glycerine/zygomys/zygo/arrayutils.go generated vendored Normal file
View File

@@ -0,0 +1,221 @@
package zygo
import "fmt"
func MapArray(env *Zlisp, fun *SexpFunction, arr *SexpArray) (Sexp, error) {
result := make([]Sexp, len(arr.Val))
var err error
var firstTyp *RegisteredType
for i := range arr.Val {
result[i], err = env.Apply(fun, arr.Val[i:i+1])
if err != nil {
return &SexpArray{Val: result, Typ: firstTyp, Env: env}, err
}
if firstTyp == nil {
firstTyp = result[i].Type()
}
}
return &SexpArray{Val: result, Typ: firstTyp, Env: env}, nil
}
func ConcatArray(arr *SexpArray, rest []Sexp) (Sexp, error) {
if arr == nil {
return SexpNull, fmt.Errorf("ConcatArray called with nil arr")
}
var res SexpArray
res.Val = arr.Val
for i, x := range rest {
switch t := x.(type) {
case *SexpArray:
res.Val = append(res.Val, t.Val...)
default:
return &res, fmt.Errorf("ConcatArray error: %d-th argument "+
"(0-based) is not an array", i)
}
}
return &res, nil
}
// (arrayidx ar [0 1])
func ArrayIndexFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {
Q("in ArrayIndexFunction, args = '%#v'", args)
narg := len(args)
if narg != 2 {
return SexpNull, WrongNargs
}
var err error
args, err = env.ResolveDotSym(args)
if err != nil {
return SexpNull, err
}
var ar *SexpArray
switch ar2 := args[0].(type) {
case *SexpArraySelector:
x, err := ar2.RHS(env)
if err != nil {
return SexpNull, err
}
switch xArr := x.(type) {
case *SexpArray:
ar = xArr
case *SexpHash:
return HashIndexFunction(env, name, []Sexp{xArr, args[1]})
default:
return SexpNull, fmt.Errorf("bad (arrayidx ar index) call: ar as arrayidx, but that did not resolve to an array, instead '%s'/type %T", x.SexpString(nil), x)
}
case *SexpArray:
ar = ar2
case *SexpHash:
return HashIndexFunction(env, name, args)
case *SexpHashSelector:
Q("ArrayIndexFunction sees args[0] is a hashSelector")
return HashIndexFunction(env, name, args)
default:
return SexpNull, fmt.Errorf("bad (arrayidx ar index) call: ar was not an array, instead '%s'/type %T",
args[0].SexpString(nil), args[0])
}
var idx *SexpArray
switch idx2 := args[1].(type) {
case *SexpArray:
idx = idx2
default:
return SexpNull, fmt.Errorf("bad (arrayidx ar index) call: index was not an array, instead '%s'/type %T",
args[1].SexpString(nil), args[1])
}
ret := SexpArraySelector{
Select: idx,
Container: ar,
}
return &ret, nil
}
// IndexBy subsets one array (possibly multidimensional) by another.
// e.g. if arr is [a b c] and idx is [0], we'll return a.
func (arr *SexpArray) IndexBy(idx *SexpArray) (Sexp, error) {
nIdx := len(idx.Val)
nTarget := arr.NumDim()
if nIdx > nTarget {
return SexpNull, fmt.Errorf("bad (arrayidx ar index) call: index requested %d dimensions, only have %d",
nIdx, nTarget)
}
if len(idx.Val) == 0 {
return SexpNull, fmt.Errorf("bad (arrayidx ar index) call: no index supplied")
}
if len(idx.Val) != 1 {
return SexpNull, fmt.Errorf("bad (arrayidx ar index) call: we only support a single index value atm")
}
i := 0
myInt, isInt := idx.Val[i].(*SexpInt)
if !isInt {
return SexpNull, fmt.Errorf("bad (arrayidx ar index) call: index with non-integer '%v'",
idx.Val[i].SexpString(nil))
}
k := myInt.Val
pos := k % int64(len(arr.Val))
if k < 0 {
mk := -k
mod := mk % int64(len(arr.Val))
pos = int64(len(arr.Val)) - mod
}
//Q("return pos %v", pos)
return arr.Val[pos], nil
}
func (arr *SexpArray) NumDim() int {
return 1
}
// SexpSelector: select a subset of an array:
// can be multidimensional index/slice
// and hence know its container and its position(s),
// and thus be able to read and write that position as
// need be.
type SexpArraySelector struct {
Select *SexpArray
Container *SexpArray
}
func (si *SexpArraySelector) SexpString(ps *PrintState) string {
Q("in SexpArraySelector.SexpString(), si.Container.Env = %p", si.Container.Env)
rhs, err := si.RHS(si.Container.Env)
if err != nil {
return fmt.Sprintf("(arraySelector %v %v)", si.Container.SexpString(ps), si.Select.SexpString(ps))
}
Q("in SexpArraySelector.SexpString(), rhs = %v", rhs.SexpString(ps))
Q("in SexpArraySelector.SexpString(), si.Container = %v", si.Container.SexpString(ps))
Q("in SexpArraySelector.SexpString(), si.Select = %v", si.Select.SexpString(ps))
return fmt.Sprintf("%v /*(arraySelector %v %v)*/", rhs.SexpString(ps), si.Container.SexpString(ps), si.Select.SexpString(ps))
}
// Type returns the type of the value.
func (si *SexpArraySelector) Type() *RegisteredType {
return GoStructRegistry.Lookup("arraySelector")
}
// RHS applies the selector to the contain and returns
// the value obtained.
func (x *SexpArraySelector) RHS(env *Zlisp) (Sexp, error) {
if len(x.Select.Val) != 1 {
return SexpNull, fmt.Errorf("SexpArraySelector: only " +
"size 1 selectors implemented")
}
var i int64
switch asInt := x.Select.Val[0].(type) {
case *SexpInt:
i = asInt.Val
default:
return SexpNull, fmt.Errorf("SexpArraySelector: int "+
"selector required; we saw %T", x.Select.Val[0])
}
if i < 0 {
return SexpNull, fmt.Errorf("SexpArraySelector: negative "+
"indexes not supported; we saw %v", i)
}
if i >= int64(len(x.Container.Val)) {
return SexpNull, fmt.Errorf("SexpArraySelector: index "+
"%v is out-of-bounds; length is %v", i, len(x.Container.Val))
}
ret := x.Container.Val[i]
Q("arraySelector returning ret = %#v", ret)
return ret, nil
}
// Selector stores indexing information that isn't
// yet materialized for getting or setting.
//
type Selector interface {
// RHS (right-hand-side) is used to dereference
// the pointer-like Selector, yielding a value suitable for the
// right-hand-side of an assignment statement.
//
RHS(env *Zlisp) (Sexp, error)
// AssignToSelection sets the selection to rhs
// The selected elements are the left-hand-side of the
// assignment *lhs = rhs
AssignToSelection(env *Zlisp, rhs Sexp) error
}
func (x *SexpArraySelector) AssignToSelection(env *Zlisp, rhs Sexp) error {
_, err := x.RHS(x.Container.Env) // check for errors
if err != nil {
return err
}
x.Container.Val[x.Select.Val[0].(*SexpInt).Val] = rhs
return nil
}
func (env *Zlisp) NewSexpArray(arr []Sexp) *SexpArray {
return &SexpArray{Val: arr, Env: env}
}

View File

@@ -0,0 +1 @@
package zygo

26
vendor/github.com/glycerine/zygomys/zygo/blake2.go generated vendored Normal file
View File

@@ -0,0 +1,26 @@
package zygo
import (
"encoding/binary"
"github.com/glycerine/blake2b"
)
// Blake2bUint64 returns an 8 byte BLAKE2b cryptographic
// hash of the raw.
//
// we're using the pure go: https://github.com/dchest/blake2b
//
// but the C-wrapped refence may be helpful as well --
//
// reference: https://godoc.org/github.com/codahale/blake2
// reference: https://blake2.net/
// reference: https://tools.ietf.org/html/rfc7693
//
func Blake2bUint64(raw []byte) uint64 {
cfg := &blake2b.Config{Size: 8}
h, err := blake2b.New(cfg)
panicOn(err)
h.Write(raw)
by := h.Sum(nil)
return binary.LittleEndian.Uint64(by[:8])
}

128
vendor/github.com/glycerine/zygomys/zygo/bsave.go generated vendored Normal file
View File

@@ -0,0 +1,128 @@
package zygo
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"github.com/glycerine/greenpack/msgp"
)
// (bsave value path) writes value as greenpack to file.
//
// (greenpack value) writes value as greenpack to SexpRaw in memory.
//
// bsave converts to binary with (togo) then saves the binary to file.
func WriteShadowGreenpackToFileFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {
narg := len(args)
if narg < 1 || narg > 2 {
return SexpNull, WrongNargs
}
// check arg[0]
var asHash *SexpHash
switch x := args[0].(type) {
default:
return SexpNull, fmt.Errorf("%s error: top value must be a hash or defmap; we see '%T'", name, args[0])
case *SexpHash:
// okay, good
asHash = x
}
switch name {
case "bsave":
if narg != 2 {
return SexpNull, WrongNargs
}
case "greenpack":
if narg != 1 {
return SexpNull, WrongNargs
}
var buf bytes.Buffer
_, err := toGreenpackHelper(env, asHash, &buf, "memory")
if err != nil {
return SexpNull, err
}
return &SexpRaw{Val: buf.Bytes()}, nil
}
// check arg[1]
var fn string
switch fna := args[1].(type) {
case *SexpStr:
fn = fna.S
default:
return SexpNull, fmt.Errorf("error: %s requires a string (SexpStr) path to write to as the second argument. we got type %T / value = %v", name, args[1], args[1])
}
// don't overwrite existing file
if FileExists(fn) {
return SexpNull, fmt.Errorf("error: %s refusing to write to existing file '%s'",
name, fn)
}
f, err := os.Create(fn)
if err != nil {
return SexpNull, fmt.Errorf("error: %s sees error trying to create file '%s': '%v'", name, fn, err)
}
defer f.Close()
_, err = toGreenpackHelper(env, asHash, f, fn)
return SexpNull, err
}
func toGreenpackHelper(env *Zlisp, asHash *SexpHash, f io.Writer, fn string) (Sexp, error) {
// create shadow structs
_, err := ToGoFunction(env, "togo", []Sexp{asHash})
if err != nil {
return SexpNull, fmt.Errorf("ToGo call sees error: '%v'", err)
}
if asHash.GoShadowStruct == nil {
return SexpNull, fmt.Errorf("GoShadowStruct was nil, on attempt to write to '%s'", fn)
}
enc, ok := interface{}(asHash.GoShadowStruct).(msgp.Encodable)
if !ok {
return SexpNull, fmt.Errorf("error: GoShadowStruct was not greenpack Encodable -- run `go generate` or add greenpack to the source file for type '%T'. on attempt to save to '%s'", asHash.GoShadowStruct, fn)
}
w := msgp.NewWriter(f)
err = msgp.Encode(w, enc)
if err != nil {
return SexpNull, fmt.Errorf("error: greenpack encoding to file '%s' of type '%T' sees error '%v'", fn, asHash.GoShadowStruct, err)
}
err = w.Flush()
return SexpNull, err
}
func ReadGreenpackFromFileFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {
narg := len(args)
if narg != 1 {
return SexpNull, WrongNargs
}
var fn string
switch fna := args[0].(type) {
case *SexpStr:
fn = fna.S
default:
return SexpNull, fmt.Errorf("%s requires a string path to read. we got type %T / value = %v", name, args[0], args[0])
}
if !FileExists(string(fn)) {
return SexpNull, fmt.Errorf("file '%s' does not exist", fn)
}
f, err := os.Open(fn)
if err != nil {
return SexpNull, err
}
defer f.Close()
by, err := ioutil.ReadAll(f)
if err != nil {
return SexpNull, err
}
return MsgpackToSexp(by, env)
}

879
vendor/github.com/glycerine/zygomys/zygo/builders.go generated vendored Normal file
View File

@@ -0,0 +1,879 @@
package zygo
import (
"fmt"
"reflect"
"strings"
"time"
)
// package.go: declare package, structs, function types
// A builder is a special kind of function. Like
// a macro it receives the un-evaluated tree
// of symbols from its caller. A builder
// can therefore be used to build new types
// and declarations new functions/methods.
//
// Like a function, a builder is called at
// run/evaluation time, not at definition time.
//
// Since it receives an un-evaluated tree of
// symbols, a builder must manually evaluate
// any arguments it wishes to find bindings for.
//
// The primary use here is to be able to define
// packages, structs, interfaces, functions,
// methods, and type aliases.
//
func (env *Zlisp) ImportPackageBuilder() {
env.AddBuilder("infixExpand", InfixBuilder)
env.AddBuilder("infix", InfixBuilder)
env.AddBuilder(":", ColonAccessBuilder)
env.AddBuilder("sys", SystemBuilder)
env.AddBuilder("struct", StructBuilder)
env.AddBuilder("func", FuncBuilder)
env.AddBuilder("method", FuncBuilder)
env.AddBuilder("interface", InterfaceBuilder)
//env.AddBuilder("package", PackageBuilder)
//env.AddBuilder("import", ImportBuilder)
env.AddBuilder("var", VarBuilder)
env.AddBuilder("expectError", ExpectErrorBuilder)
env.AddBuilder("comma", CommaBuilder)
// env.AddBuilder("&", AddressOfBuilder)
env.AddBuilder("import", ImportPackageBuilder)
env.AddFunction("sliceOf", SliceOfFunction)
env.AddFunction("ptr", PointerToFunction)
}
var sxSliceOf *SexpFunction = MakeUserFunction("sliceOf", SliceOfFunction)
var sxArrayOf *SexpFunction = MakeUserFunction("arrayOf", ArrayOfFunction)
type SexpUserVarDefn struct {
Name string
}
type RecordDefn struct {
Name string
Fields []*SexpField
FieldType map[string]*RegisteredType
}
func NewRecordDefn() *RecordDefn {
return &RecordDefn{
FieldType: make(map[string]*RegisteredType),
}
}
func (r *RecordDefn) SetName(name string) {
r.Name = name
}
func (r *RecordDefn) SetFields(flds []*SexpField) {
r.Fields = flds
for _, f := range flds {
g := (*SexpHash)(f)
rt, err := g.HashGet(nil, f.KeyOrder[0])
panicOn(err)
r.FieldType[g.KeyOrder[0].(*SexpSymbol).name] = rt.(*RegisteredType)
}
}
func (p *RecordDefn) Type() *RegisteredType {
rt := GoStructRegistry.Registry[p.Name]
//Q("RecordDefn) Type() sees rt = %v", rt)
return rt
}
// pretty print a struct
func (p *RecordDefn) SexpString(ps *PrintState) string {
Q("RecordDefn::SexpString() called!")
if len(p.Fields) == 0 {
return fmt.Sprintf("(struct %s)", p.Name)
}
s := fmt.Sprintf("(struct %s [\n", p.Name)
w := make([][]int, len(p.Fields))
maxnfield := 0
for i, f := range p.Fields {
w[i] = f.FieldWidths()
Q("w[i=%v] = %v", i, w[i])
maxnfield = maxi(maxnfield, len(w[i]))
}
biggestCol := make([]int, maxnfield)
Q("\n")
for j := 0; j < maxnfield; j++ {
for i := range p.Fields {
Q("i= %v, j=%v, len(w[i])=%v check=%v", i, j, len(w[i]), len(w[i]) < j)
if j < len(w[i]) {
biggestCol[j] = maxi(biggestCol[j], w[i][j]+1)
}
}
}
Q("RecordDefn::SexpString(): maxnfield = %v, out of %v", maxnfield, len(p.Fields))
Q("RecordDefn::SexpString(): biggestCol = %#v", biggestCol)
// computing padding
// x
// xx xx
// xxxxxxx x
// xxx x x x
//
// becomes
//
// x
// xx xx
// xxxxxxx
// xxx x x x
Q("pad = %#v", biggestCol)
for _, f := range p.Fields {
s += " " + f.AlignString(biggestCol) + "\n"
}
s += " ])\n"
return s
}
func maxi(a, b int) int {
if a > b {
return a
}
return b
}
type SexpField SexpHash
func (r SexpField) Type() *RegisteredType {
return r.GoStructFactory
}
// compute key and value widths to assist alignment
func (f *SexpField) FieldWidths() []int {
hash := (*SexpHash)(f)
wide := []int{}
for _, key := range hash.KeyOrder {
val, err := hash.HashGet(nil, key)
str := ""
if err == nil {
switch s := key.(type) {
case *SexpStr:
str += s.S + ":"
case *SexpSymbol:
str += s.name + ":"
default:
str += key.SexpString(nil) + ":"
}
wide = append(wide, len(str))
wide = append(wide, len(val.SexpString(nil))+1)
} else {
panic(err)
}
}
return wide
}
func (f *SexpField) AlignString(pad []int) string {
hash := (*SexpHash)(f)
str := " (" + hash.TypeName + " "
spc := " "
for i, key := range hash.KeyOrder {
val, err := hash.HashGet(nil, key)
r := ""
if err == nil {
switch s := key.(type) {
case *SexpStr:
r += s.S + ":"
case *SexpSymbol:
r += s.name + ":"
default:
r += key.SexpString(nil) + ":"
}
xtra := pad[i*2] - len(r)
if xtra < 0 {
panic(fmt.Sprintf("xtra = %d, pad[i=%v]=%v, len(r)=%v (r=%v)", xtra, i, pad[i], len(r), r))
}
leftpad := strings.Repeat(" ", xtra)
vs := val.SexpString(nil)
rightpad := strings.Repeat(" ", pad[(i*2)+1]-len(vs))
if i == 0 {
spc = " "
} else {
spc = ""
}
r = leftpad + r + spc + vs + rightpad
} else {
panic(err)
}
str += r
}
if len(hash.Map) > 0 {
return str[:len(str)-1] + ")"
}
return str + ")"
}
func (f *SexpField) SexpString(ps *PrintState) string {
hash := (*SexpHash)(f)
str := " (" + hash.TypeName + " "
for i, key := range hash.KeyOrder {
val, err := hash.HashGet(nil, key)
if err == nil {
switch s := key.(type) {
case *SexpStr:
str += s.S + ":"
case *SexpSymbol:
str += s.name + ":"
default:
str += key.SexpString(nil) + ":"
}
if i > 0 {
str += val.SexpString(nil) + " "
} else {
str += val.SexpString(nil) + " "
}
} else {
panic(err)
}
}
if len(hash.Map) > 0 {
return str[:len(str)-1] + ")"
}
return str + ")"
}
func StructBuilder(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
n := len(args)
if n < 1 {
return SexpNull, fmt.Errorf("struct name is missing. use: " +
"(struct struct-name ...)\n")
}
Q("in struct builder, name = '%s', args = ", name)
for i := range args {
Q("args[%v] = '%s' of type %T", i, args[i].SexpString(nil), args[i])
}
var symN *SexpSymbol
switch b := args[0].(type) {
case *SexpSymbol:
symN = b
case *SexpPair:
sy, isQuo := isQuotedSymbol(b)
if isQuo {
symN = sy.(*SexpSymbol)
} else {
return SexpNull, fmt.Errorf("bad struct name: symbol required")
}
default:
return SexpNull, fmt.Errorf("bad struct name: symbol required")
}
Q("good: have struct name '%v'", symN)
env.datastack.PushExpr(SexpNull)
structName := symN.name
{
// begin enable recursion -- add ourselves to the env early, then
// update later, so that structs can refer to themselves.
udsR := NewRecordDefn()
udsR.SetName(structName)
rtR := NewRegisteredType(func(env *Zlisp, h *SexpHash) (interface{}, error) {
return udsR, nil
})
rtR.UserStructDefn = udsR
rtR.DisplayAs = structName
GoStructRegistry.RegisterUserdef(rtR, false, structName)
// overwrite any existing definition, deliberately ignore any error,
// as there may not be a prior definition present at all.
env.linearstack.DeleteSymbolFromTopOfStackScope(symN)
err := env.LexicalBindSymbol(symN, rtR)
if err != nil {
return SexpNull, fmt.Errorf("struct builder could not bind symbol '%s': '%v'",
structName, err)
}
// end enable recursion
}
var xar []Sexp
var flat []*SexpField
if n > 2 {
return SexpNull, fmt.Errorf("bad struct declaration: more than two arguments." +
"prototype is (struct name [(field ...)*] )")
}
if n == 2 {
Q("in case n == 2")
switch ar := args[1].(type) {
default:
return SexpNull, fmt.Errorf("bad struct declaration '%v': second argument "+
"must be a slice of fields."+
" prototype is (struct name [(field ...)*] )", structName)
case *SexpArray:
arr := ar.Val
if len(arr) == 0 {
// allow this
} else {
// dup to avoid messing with the stack on eval:
//dup := env.Duplicate()
for i, ele := range arr {
Q("about to eval i=%v", i)
//ev, err := dup.EvalExpressions([]Sexp{ele})
ev, err := EvalFunction(env, "evalStructBuilder", []Sexp{ele})
Q("done with eval i=%v. ev=%v", i, ev.SexpString(nil))
if err != nil {
return SexpNull, fmt.Errorf("bad struct declaration '%v': bad "+
"field at array entry %v; error was '%v'", structName, i, err)
}
Q("checking for isHash at i=%v", i)
asHash, isHash := ev.(*SexpField)
if !isHash {
Q("was not hash, instead was %T", ev)
return SexpNull, fmt.Errorf("bad struct declaration '%v': bad "+
"field array at entry %v; a (field ...) is required. Instead saw '%T'/with value = '%v'",
structName, i, ev, ev.SexpString(nil))
}
Q("good eval i=%v, ev=%#v / %v", i, ev, ev.SexpString(nil))
ko := asHash.KeyOrder
if len(ko) == 0 {
return SexpNull, fmt.Errorf("bad struct declaration '%v': bad "+
"field array at entry %v; field had no name",
structName, i)
}
Q("ko = '%#v'", ko)
first := ko[0]
Q("first = '%#v'", first)
xar = append(xar, first)
xar = append(xar, ev)
flat = append(flat, ev.(*SexpField))
}
Q("no err from EvalExpressions, got xar = '%#v'", xar)
}
}
} // end n == 2
uds := NewRecordDefn()
uds.SetName(structName)
uds.SetFields(flat)
Q("good: made typeDefnHash: '%s'", uds.SexpString(nil))
rt := NewRegisteredType(func(env *Zlisp, h *SexpHash) (interface{}, error) {
return uds, nil
})
rt.UserStructDefn = uds
rt.DisplayAs = structName
GoStructRegistry.RegisterUserdef(rt, false, structName)
Q("good: registered new userdefined struct '%s'", structName)
// replace our recursive-reference-enabling symbol with the real one.
err := env.linearstack.DeleteSymbolFromTopOfStackScope(symN)
if err != nil {
return SexpNull, fmt.Errorf("internal error: should have already had symbol '%s' "+
"bound, but DeleteSymbolFromTopOfStackScope returned error: '%v'",
symN.name, err)
}
err = env.LexicalBindSymbol(symN, rt)
if err != nil {
return SexpNull, fmt.Errorf("late: struct builder could not bind symbol '%s': '%v'",
structName, err)
}
Q("good: bound symbol '%s' to RegisteredType '%s'", symN.SexpString(nil), rt.SexpString(nil))
return rt, nil
}
func InterfaceBuilder(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
nargs := len(args)
switch {
case nargs < 1:
return SexpNull, fmt.Errorf("interface name is missing. use: " +
"(interface interface-name [...])\n")
case nargs == 1:
return SexpNull, fmt.Errorf("interface array of methods missing. use: " +
"(interface interface-name [...])\n")
case nargs > 2:
return SexpNull, WrongNargs
}
// P("in interface builder, past arg check")
var iname string
var symN *SexpSymbol
switch sy := args[0].(type) {
case *SexpSymbol:
symN = sy
iname = sy.name
default:
return SexpNull, fmt.Errorf("interface name must be a symbol; we got %T", args[0])
}
// sanity check the name
builtin, builtTyp := env.IsBuiltinSym(symN)
if builtin {
return SexpNull,
fmt.Errorf("already have %s '%s', refusing to overwrite with interface",
builtTyp, symN.name)
}
if env.HasMacro(symN) {
return SexpNull, fmt.Errorf("Already have macro named '%s': refusing"+
" to define interface of same name.", symN.name)
}
// end sanity check the name
var arrMeth *SexpArray
switch ar := args[1].(type) {
case *SexpArray:
arrMeth = ar
default:
return SexpNull, fmt.Errorf("interface method vector expected after name; we got %T", args[1])
}
// P("in interface builder, args = ")
// for i := range args {
// P("args[%v] = '%s'", i, args[i].SexpString(nil))
// }
methods := make([]*SexpFunction, 0)
methodSlice := arrMeth.Val
if len(methodSlice) > 0 {
//dup := env.Duplicate()
for i := range methodSlice {
//ev, err := dup.EvalExpressions([]Sexp{methodSlice[i]})
ev, err := EvalFunction(env, "evalInterface", []Sexp{methodSlice[i]})
if err != nil {
return SexpNull, fmt.Errorf("error parsing the %v-th method in interface definition: '%v'", i, err)
}
me, gotFunc := ev.(*SexpFunction)
if !gotFunc {
return SexpNull,
fmt.Errorf("error parsing the %v-th method in interface: was not function but rather %T",
i, ev)
}
methods = append(methods, me)
}
}
decl := &SexpInterfaceDecl{
name: iname,
methods: methods,
}
return decl, nil
}
func SliceOfFunction(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
if len(args) != 1 {
return SexpNull, fmt.Errorf("argument x to (%s x) is missing. use: "+
"(%s a-regtype)\n", name, name)
}
Q("in SliceOfFunction")
var rt *RegisteredType
switch arg := args[0].(type) {
case *RegisteredType:
rt = arg
case *SexpHash:
rt = arg.GoStructFactory
default:
return SexpNull, fmt.Errorf("argument tx in (%s x) was not regtype, "+
"instead type %T displaying as '%v' ",
name, arg, arg.SexpString(nil))
}
Q("sliceOf arg = '%s' with type %T", args[0].SexpString(nil), args[0])
sliceRt := GoStructRegistry.GetOrCreateSliceType(rt)
Q("in SliceOfFunction: returning sliceRt = '%#v'", sliceRt)
return sliceRt, nil
}
func PointerToFunction(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
if len(args) != 1 {
return SexpNull, fmt.Errorf("argument to pointer-to is missing. use: "+
"(%s a-regtype)\n", name)
}
//P("in PointerToFunction(): args[0] = '%#v'", args[0])
var rt *RegisteredType
switch arg := args[0].(type) {
case *RegisteredType:
rt = arg
case *SexpHash:
rt = arg.GoStructFactory
case *SexpPointer:
// dereference operation, rather than type declaration
//P("dereference operation on *SexpPointer detected, returning target")
if arg == nil || arg.Target == nil {
return SexpNull, fmt.Errorf("illegal to dereference nil pointer")
}
return arg.Target, nil
case *SexpReflect:
Q("dereference operation on SexpReflect detected")
// TODO what goes here?
return SexpNull, fmt.Errorf("illegal to dereference nil pointer")
case *SexpSymbol:
if arg.isDot {
// (* h.a) dereferencing a dot symbol
resolved, err := dotGetSetHelper(env, arg.name, nil)
if err != nil {
return nil, err
}
return resolved, nil
} else {
panic("TODO: what goes here, for (* sym) where sym is a regular symbol")
}
default:
return SexpNull, fmt.Errorf("argument x in (%s x) was not regtype or SexpPointer, "+
"instead type %T displaying as '%v' ",
name, arg, arg.SexpString(nil))
}
Q("pointer-to arg = '%s' with type %T", args[0].SexpString(nil), args[0])
ptrRt := GoStructRegistry.GetOrCreatePointerType(rt)
return ptrRt, nil
}
func StructConstructorFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {
Q("in struct ctor, name = '%s', args = %#v", name, args)
return MakeHash(args, name, env)
}
func BaseTypeConstructorFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {
Q("in base type ctor, args = '%#v'", args)
if len(args) < 1 {
return SexpNull, WrongNargs
}
Q("in base ctor, name = '%s', args = %#v", name, args)
return SexpNull, nil
}
func baseConstruct(env *Zlisp, f *RegisteredType, nargs int) (Sexp, error) {
if nargs > 1 {
return SexpNull, fmt.Errorf("%d is too many arguments for a base type constructor", nargs)
}
v, err := f.Factory(env, nil)
if err != nil {
return SexpNull, err
}
Q("see call to baseConstruct, v = %v/type=%T", v, v)
if nargs == 0 {
switch v.(type) {
case *int, *uint8, *uint16, *uint32, *uint64, *int8, *int16, *int32, *int64:
return &SexpInt{}, nil
case *float32, *float64:
return &SexpFloat{}, nil
case *string:
return &SexpStr{S: ""}, nil
case *bool:
return &SexpBool{Val: false}, nil
case *time.Time:
return &SexpTime{}, nil
default:
return SexpNull, fmt.Errorf("unhandled no-arg case in baseConstruct, v has type=%T", v)
}
}
// get our one argument
args, err := env.datastack.PopExpressions(1)
if err != nil {
return SexpNull, err
}
arg := args[0]
switch v.(type) {
case *int, *uint8, *uint16, *uint32, *uint64, *int8, *int16, *int32, *int64:
myint, ok := arg.(*SexpInt)
if !ok {
return SexpNull, fmt.Errorf("cannot convert %T to int", arg)
}
return myint, nil
case *float32, *float64:
myfloat, ok := arg.(*SexpFloat)
if !ok {
return SexpNull, fmt.Errorf("cannot convert %T to float", arg)
}
return myfloat, nil
case *string:
mystring, ok := arg.(*SexpStr)
if !ok {
return SexpNull, fmt.Errorf("cannot convert %T to string", arg)
}
return mystring, nil
case *bool:
mybool, ok := arg.(*SexpBool)
if !ok {
return SexpNull, fmt.Errorf("cannot convert %T to bool", arg)
}
return mybool, nil
default:
return SexpNull, fmt.Errorf("unhandled case in baseConstruct, arg = %#v/type=%T", arg, arg)
}
//return SexpNull, fmt.Errorf("unhandled no-arg case in baseConstruct, v has type=%T", v)
}
// generate fixed size array
func ArrayOfFunction(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
if len(args) != 2 {
return SexpNull, fmt.Errorf("insufficient arguments to ([size] regtype) array constructor. use: " +
"([size...] a-regtype)\n")
}
sz := 0
Q("args = %#v in ArrayOfFunction", args)
switch ar := args[1].(type) {
case *SexpArray:
if len(ar.Val) == 0 {
return SexpNull, fmt.Errorf("at least one size must be specified in array constructor; e.g. ([size ...] regtype)")
}
asInt, isInt := ar.Val[0].(*SexpInt)
if !isInt {
return SexpNull, fmt.Errorf("size must be an int (not %T) in array constructor; e.g. ([size ...] regtype)", ar.Val[0])
}
sz = int(asInt.Val)
// TODO: implement multiple dimensional arrays (matrixes etc).
default:
return SexpNull, fmt.Errorf("at least one size must be specified in array constructor; e.g. ([size ...] regtype)")
}
var rt *RegisteredType
switch arg := args[0].(type) {
case *RegisteredType:
rt = arg
case *SexpHash:
rt = arg.GoStructFactory
default:
return SexpNull, fmt.Errorf("argument tx in (%s x) was not regtype, "+
"instead type %T displaying as '%v' ",
name, arg, arg.SexpString(nil))
}
//Q("arrayOf arg = '%s' with type %T", args[0].SexpString(nil), args[0])
derivedType := reflect.ArrayOf(sz, rt.TypeCache)
arrayRt := NewRegisteredType(func(env *Zlisp, h *SexpHash) (interface{}, error) {
return reflect.New(derivedType), nil
})
arrayRt.DisplayAs = fmt.Sprintf("(%s %s)", name, rt.DisplayAs)
arrayName := "arrayOf" + rt.RegisteredName
GoStructRegistry.RegisterUserdef(arrayRt, false, arrayName)
return arrayRt, nil
}
func VarBuilder(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
n := len(args)
if n != 2 {
return SexpNull, fmt.Errorf("var name/type missing. use: " +
"(var name regtype)\n")
}
Q("in var builder, name = '%s', args = ", name)
for i := range args {
Q("args[%v] = '%s' of type %T", i, args[i].SexpString(nil), args[i])
}
var symN *SexpSymbol
switch b := args[0].(type) {
case *SexpSymbol:
symN = b
case *SexpPair:
sy, isQuo := isQuotedSymbol(b)
if isQuo {
symN = sy.(*SexpSymbol)
} else {
return SexpNull, fmt.Errorf("bad var name: symbol required")
}
default:
return SexpNull, fmt.Errorf("bad var name: symbol required")
}
Q("good: have var name '%v'", symN)
//dup := env.Duplicate()
Q("about to eval args[1]=%v", args[1])
//ev, err := dup.EvalExpressions(args[1:2])
ev, err := EvalFunction(env, "evalVar", args[1:2])
Q("done with eval, ev=%v / type %T", ev.SexpString(nil), ev)
if err != nil {
return SexpNull, fmt.Errorf("bad var declaration, problem with type '%v': %v", args[1].SexpString(nil), err)
}
var rt *RegisteredType
switch myrt := ev.(type) {
case *RegisteredType:
rt = myrt
default:
return SexpNull, fmt.Errorf("bad var declaration, type '%v' is unknown", rt.SexpString(nil))
}
val, err := rt.Factory(env, nil)
if err != nil {
return SexpNull, fmt.Errorf("var declaration error: could not make type '%s': %v",
rt.SexpString(nil), err)
}
var valSexp Sexp
Q("val is of type %T", val)
switch v := val.(type) {
case Sexp:
valSexp = v
case reflect.Value:
Q("v is of type %T", v.Interface())
switch rd := v.Interface().(type) {
case ***RecordDefn:
Q("we have RecordDefn rd = %#v", *rd)
}
valSexp = &SexpReflect{Val: reflect.ValueOf(v)}
default:
valSexp = &SexpReflect{Val: reflect.ValueOf(v)}
}
Q("var decl: valSexp is '%v'", valSexp.SexpString(nil))
err = env.LexicalBindSymbol(symN, valSexp)
if err != nil {
return SexpNull, fmt.Errorf("var declaration error: could not bind symbol '%s': %v",
symN.name, err)
}
Q("good: var decl bound symbol '%s' to '%s' of type '%s'", symN.SexpString(nil), valSexp.SexpString(nil), rt.SexpString(nil))
env.datastack.PushExpr(valSexp)
return SexpNull, nil
}
func ExpectErrorBuilder(env *Zlisp, name string, args []Sexp) (Sexp, error) {
narg := len(args)
if narg != 2 {
return SexpNull, WrongNargs
}
dup := env.Duplicate()
es, err := dup.EvalExpressions(args[0:1])
if err != nil {
return SexpNull, fmt.Errorf("error evaluating the error string to expect: %s", err)
}
var expectedError *SexpStr
switch e := es.(type) {
case *SexpStr:
expectedError = e
default:
return SexpNull, fmt.Errorf("first arg to expectError must be the string of the error to expect")
}
Q("expectedError = %v", expectedError)
ev, err := dup.EvalExpressions(args[1:2])
Q("done with eval, ev=%v / type %T. err = %v", ev.SexpString(nil), ev, err)
if err != nil {
if err.Error() == expectedError.S {
return SexpNull, nil
}
return SexpNull, fmt.Errorf("expectError expected '%s' but saw '%s'", expectedError.S, err)
}
if expectedError.S == "" {
return SexpNull, nil
}
return SexpNull, fmt.Errorf("expectError expected '%s' but got no error", expectedError.S)
}
func ColonAccessBuilder(env *Zlisp, name string, args []Sexp) (Sexp, error) {
if len(args) < 1 || len(args) > 3 {
return SexpNull, WrongNargs
}
////Q("ColonAccessBuilder, args = %#v", args)
name = "hget"
//dup := env.Duplicate()
//collec, err := dup.EvalExpressions(args[1:2])
collec, err := EvalFunction(env, "evalColonAccess", args[1:2])
if err != nil {
return SexpNull, err
}
swapped := args
swapped[1] = swapped[0]
swapped[0] = collec
if len(args) == 3 {
// have default, needs eval too
//defaul, err := dup.EvalExpressions(args[2:3])
defaul, err := EvalFunction(env, "evalColonDefault", args[2:3])
if err != nil {
return SexpNull, err
}
swapped[2] = defaul
}
switch sx := collec.(type) {
case *SexpHash:
return HashAccessFunction(env, name, swapped)
case *SexpArray:
return ArrayAccessFunction(env, name, swapped)
case *SexpArraySelector:
Q("*SexpSelector seen in : operator form.")
return sx.RHS(env)
}
return SexpNull, fmt.Errorf("second argument to ':' function must be hash or array")
}
// CommaBuilder turns expressions on the LHS and RHS like {a,b,c = 1,2,3}
// into arrays (set [a b c] [1 2 3])
func CommaBuilder(env *Zlisp, name string, args []Sexp) (Sexp, error) {
n := len(args)
if n < 1 {
return SexpNull, nil
}
res := make([]Sexp, 0)
for i := range args {
commaHelper(args[i], &res)
}
return &SexpArray{Val: res}, nil
}
func commaHelper(a Sexp, res *[]Sexp) {
//Q("top of commaHelper with a = '%s'", a.SexpString(nil))
switch x := a.(type) {
case *SexpPair:
sy, isQuo := isQuotedSymbol(x)
if isQuo {
symN := sy.(*SexpSymbol)
//Q("have quoted symbol symN=%v", symN.SexpString(nil))
*res = append(*res, symN)
return
}
ar, err := ListToArray(x)
if err != nil || len(ar) < 1 {
return
}
switch sym := ar[0].(type) {
case *SexpSymbol:
if sym.name == "comma" {
//Q("have recursive comma")
over := ar[1:]
for i := range over {
commaHelper(over[i], res)
}
} else {
//Q("have symbol sym = '%v'", sym.SexpString(nil))
*res = append(*res, a)
}
default:
*res = append(*res, a)
}
default:
*res = append(*res, a)
}
}

209
vendor/github.com/glycerine/zygomys/zygo/callgo.go generated vendored Normal file
View File

@@ -0,0 +1,209 @@
package zygo
import (
"fmt"
"reflect"
"runtime"
)
// call Go methods
// Using reflection, invoke a Go method on a struct or interface.
// args[0] is a hash with an an attached GoStruct
// args[1] is a hash representing a method call on that struct.
// The returned Sexp is a hash that represents the result of that call.
func CallGoMethodFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {
Q("_method user func running!\n")
// protect against bad calls/bad reflection
var wasPanic bool
var recovered interface{}
tr := make([]byte, 16384)
trace := &tr
sx, err := func() (Sexp, error) {
defer func() {
recovered = recover()
if recovered != nil {
wasPanic = true
nbyte := runtime.Stack(*trace, false)
*trace = (*trace)[:nbyte]
}
}()
narg := len(args)
if narg < 2 {
return SexpNull, WrongNargs
}
obj, isHash := args[0].(*SexpHash)
if !isHash {
return SexpNull, fmt.Errorf("_method error: first argument must be a hash or defmap (a record) with an attached GoObject")
}
var methodname string
switch m := args[1].(type) {
case *SexpSymbol:
methodname = m.name
case *SexpStr:
methodname = m.S
default:
return SexpNull, fmt.Errorf("_method error: second argument must be a method name in symbol or string form (got %T)", args[1])
}
// get the method list, verify the method exists and get its type
if obj.NumMethod == -1 {
err := obj.SetMethodList(env)
if err != nil {
return SexpNull, fmt.Errorf("could not get method list for object: %s", err)
}
}
var method reflect.Method
found := false
for _, me := range obj.GoMethods {
if me.Name == methodname {
method = me
found = true
break
}
}
if !found {
return SexpNull, fmt.Errorf("no such method '%s' on %s. choices are: %s",
methodname, obj.TypeName,
(obj.GoMethSx).SexpString(nil))
}
// INVAR: var method holds our call target
// try always expecting this to be already done... test crashes
//P("in CallGoMethod '%s' obj.GoShadowStructVa = '%#v'", methodname, obj.GoShadowStructVa)
if obj.GoShadowStructVa.Kind() == reflect.Invalid {
// ready the struct... but only because there isn't already a shadow struct there!!
if !obj.ShadowSet {
_, err := ToGoFunction(env, "togo", []Sexp{obj})
if err != nil {
return SexpNull, fmt.Errorf("error converting object to Go struct: '%s'", err)
}
}
}
inputVa := []reflect.Value{(obj.GoShadowStructVa)}
// prep args.
needed := method.Type.NumIn() - 1 // one for the receiver
avail := narg - 2
if needed != avail {
// TODO: support varargs eventually
return SexpNull, fmt.Errorf("method %s needs %d arguments, but we have %d", method.Name, needed, avail)
}
var va reflect.Value
for i := 2; i < narg; i++ {
typ := method.Type.In(i - 1)
pdepth := PointerDepth(typ)
// we only handle 0 and 1 for now
Q("pdepth = %v\n", pdepth)
switch pdepth {
case 0:
va = reflect.New(typ)
case 1:
// handle the common single pointer to struct case
va = reflect.New(typ.Elem())
default:
return SexpNull, fmt.Errorf("error converting %d-th argument to "+
"Go: we don't handle double pointers", i-2)
}
Q("converting to go '%#v' into -> %#v\n", args[i], va.Interface())
iface, err := SexpToGoStructs(args[i], va.Interface(), env, nil)
if err != nil {
return SexpNull, fmt.Errorf("error converting %d-th "+
"argument to Go: '%s'", i-2, err)
}
switch pdepth {
case 0:
inputVa = append(inputVa, reflect.ValueOf(iface).Elem())
case 1:
inputVa = append(inputVa, reflect.ValueOf(iface))
}
Q("\n allocated new %T/val=%#v /i=%#v\n", va, va, va.Interface())
}
//P("_method: about to .Call by reflection!\n")
out := method.Func.Call(inputVa)
var iout []interface{}
for _, o := range out {
iout = append(iout, o.Interface())
}
Q("done with _method call, iout = %#v\n", iout)
Q("done with _method call, iout[0] = %#v\n", iout[0])
nout := len(out)
r := make([]Sexp, 0)
for i := 0; i < nout; i++ {
f := out[i].Interface()
switch e := f.(type) {
case nil:
r = append(r, SexpNull)
case int64:
r = append(r, &SexpInt{Val: e})
case int:
r = append(r, &SexpInt{Val: int64(e)})
case error:
r = append(r, &SexpError{e})
case string:
r = append(r, &SexpStr{S: e})
case float64:
r = append(r, &SexpFloat{Val: e})
case []byte:
r = append(r, &SexpRaw{Val: e})
case rune:
r = append(r, &SexpChar{Val: e})
default:
// go through the type registry
found := false
for hashName, factory := range GoStructRegistry.Registry {
st, err := factory.Factory(env, nil)
if err != nil {
return SexpNull, fmt.Errorf("MakeHash '%s' problem on Factory call: %s",
hashName, err)
}
Q("got st from Factory, checking if types match")
if reflect.ValueOf(st).Type() == out[i].Type() {
Q("types match")
retHash, err := MakeHash([]Sexp{}, factory.RegisteredName, env)
if err != nil {
return SexpNull, fmt.Errorf("MakeHash '%s' problem: %s",
hashName, err)
}
Q("filling from shadow")
err = retHash.FillHashFromShadow(env, f)
if err != nil {
return SexpNull, err
}
r = append(r, retHash)
found = true
break
}
}
if !found {
r = append(r, &SexpReflect{Val: out[i]})
}
}
}
return env.NewSexpArray(r), nil
}()
if wasPanic {
return SexpNull, fmt.Errorf("\n recovered from panic "+
"during CallGo. panic on = '%v'\n"+
"stack trace:\n%s\n", recovered, string(*trace))
}
return sx, err
}
// detect if inteface is holding anything
func NilOrHoldsNil(iface interface{}) bool {
if iface == nil {
return true
}
return reflect.ValueOf(iface).IsNil()
}

54
vendor/github.com/glycerine/zygomys/zygo/cfg.go generated vendored Normal file
View File

@@ -0,0 +1,54 @@
package zygo
import (
"flag"
)
// configure a glisp repl
type ZlispConfig struct {
CpuProfile string
MemProfile string
ExitOnFailure bool
CountFuncCalls bool
Flags *flag.FlagSet
ExtensionsVersion string
Command string
Sandboxed bool
Quiet bool
Trace bool
LoadDemoStructs bool
AfterScriptDontExit bool
// liner bombs under emacs, avoid it with this flag.
NoLiner bool
Prompt string // default "zygo> "
}
func NewZlispConfig(cmdname string) *ZlispConfig {
return &ZlispConfig{
Flags: flag.NewFlagSet(cmdname, flag.ExitOnError),
}
}
// call DefineFlags before myflags.Parse()
func (c *ZlispConfig) DefineFlags() {
c.Flags.StringVar(&c.CpuProfile, "cpuprofile", "", "write cpu profile to file")
c.Flags.StringVar(&c.MemProfile, "memprofile", "", "write mem profile to file")
c.Flags.BoolVar(&c.ExitOnFailure, "exitonfail", false, "exit on failure instead of starting repl")
c.Flags.BoolVar(&c.CountFuncCalls, "countcalls", false, "count how many times each function is run")
c.Flags.StringVar(&c.Command, "c", "", "expressions to evaluate")
c.Flags.BoolVar(&c.AfterScriptDontExit, "i", false, "after running the command line script, remain interactive rather than exiting")
c.Flags.BoolVar(&c.Sandboxed, "sandbox", false, "run sandboxed; disallow system/external interaction functions")
c.Flags.BoolVar(&c.Quiet, "quiet", false, "start repl without printing the version/mode/help banner")
c.Flags.BoolVar(&c.Trace, "trace", false, "trace execution (warning: very verbose and slow)")
c.Flags.BoolVar(&c.LoadDemoStructs, "demo", false, "load the demo structs: Event, Snoopy, Hornet, Weather and friends.")
}
// call c.ValidateConfig() after myflags.Parse()
func (c *ZlispConfig) ValidateConfig() error {
if c.Prompt == "" {
c.Prompt = "zygo> "
}
return nil
}

70
vendor/github.com/glycerine/zygomys/zygo/channels.go generated vendored Normal file
View File

@@ -0,0 +1,70 @@
package zygo
import (
"errors"
"fmt"
)
type SexpChannel struct {
Val chan Sexp
Typ *RegisteredType
}
func (ch *SexpChannel) SexpString(ps *PrintState) string {
return "[chan]"
}
func (ch *SexpChannel) Type() *RegisteredType {
return ch.Typ // TODO what should this be?
}
func MakeChanFunction(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
if len(args) > 1 {
return SexpNull, WrongNargs
}
size := 0
if len(args) == 1 {
switch t := args[0].(type) {
case *SexpInt:
size = int(t.Val)
default:
return SexpNull, errors.New(
fmt.Sprintf("argument to %s must be int", name))
}
}
return &SexpChannel{Val: make(chan Sexp, size)}, nil
}
func ChanTxFunction(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
if len(args) < 1 {
return SexpNull, WrongNargs
}
var channel chan Sexp
switch t := args[0].(type) {
case *SexpChannel:
channel = chan Sexp(t.Val)
default:
return SexpNull, errors.New(
fmt.Sprintf("argument 0 of %s must be channel", name))
}
if name == "send" {
if len(args) != 2 {
return SexpNull, WrongNargs
}
channel <- args[1]
return SexpNull, nil
}
return <-channel, nil
}
func (env *Zlisp) ImportChannels() {
env.AddFunction("makeChan", MakeChanFunction)
env.AddFunction("send", ChanTxFunction)
env.AddFunction("<!", ChanTxFunction)
}

93
vendor/github.com/glycerine/zygomys/zygo/check.go generated vendored Normal file
View File

@@ -0,0 +1,93 @@
package zygo
import (
"fmt"
)
// FunctionCallNameTypeCheck type checks a function call.
func (env *Zlisp) FunctionCallNameTypeCheck(f *SexpFunction, nargs *int) error {
if f.inputTypes != nil {
Q("FunctionCallNameTypeCheck sees inputTypes: '%v'", f.inputTypes.SexpString(nil))
} else {
return nil // no type checking requested
}
if f.varargs {
return nil // name/type checking for vargarg not currently implemented.
}
// our call arguments prepared -- will be pushed to the datastack
finalArgs := make([]Sexp, f.inputTypes.NumKeys)
// pop everything off the stack, will push finalArgs later
exprs, err := env.datastack.PopExpressions(*nargs)
if err != nil {
return err
}
// map the named submitted args, for fast lookup by name
submittedByName := make(map[string]Sexp)
// prep submittedByName
for i := 0; i < *nargs; i++ {
switch sym := exprs[i].(type) {
case *SexpSymbol:
if sym.colonTail {
Q("in env.CallFunction, have symbol.colonTail: exprs[%v]='%#v'", i, sym)
typ, err := f.inputTypes.HashGet(env, sym)
if err != nil {
return fmt.Errorf("%s takes no argument '%s'", f.name, sym.name)
}
if i == (*nargs)-1 {
return fmt.Errorf("named parameter '%s' not followed by value", sym.name)
}
val := exprs[i+1]
i++
_, already := submittedByName[sym.name]
if already {
return fmt.Errorf("duplicate named parameter '%s'", sym.name)
}
submittedByName[sym.name] = val
valtyp := val.Type()
if typ != nil && typ != valtyp {
return fmt.Errorf("type mismatch for parameter '%s': expected '%s', got '%s'",
sym.name, typ.SexpString(nil), valtyp.SexpString(nil))
}
} else {
Q("in env.CallFunction, exprs[%v]='%v'/type=%T", i, exprs[i].SexpString(nil), exprs[i])
}
default:
Q("in env.CallFunction, exprs[%v]='%v'/type=%T", i, exprs[i].SexpString(nil), exprs[i])
}
}
// simplify name matching for now with this rule: all by name, or none.
haveByName := len(submittedByName)
if haveByName > 0 {
if haveByName != f.inputTypes.NumKeys {
return fmt.Errorf("named arguments count %v != expected %v", haveByName, f.inputTypes.NumKeys)
}
// prep finalArgs in the order dictated
for i, key := range f.inputTypes.KeyOrder {
switch sy := key.(type) {
case *SexpSymbol:
// search for sy.name in our submittedByName args
a, found := submittedByName[sy.name]
if found {
Q("%s call: matching %v-th argument named '%s': passing value '%s'",
f.name, i, sy.name, a.SexpString(nil))
finalArgs[i] = a
}
default:
return fmt.Errorf("unsupported argument-name type %T", key)
}
}
} else {
// not using named parameters, restore the arguments to the stack as they were.
finalArgs = exprs
}
*nargs = len(finalArgs)
return env.datastack.PushExpressions(finalArgs)
}

45
vendor/github.com/glycerine/zygomys/zygo/closing.go generated vendored Normal file
View File

@@ -0,0 +1,45 @@
package zygo
// where we store our closure-supporing stack pointers
type Closing struct {
Stack *Stack
Name string
env *Zlisp
}
func NewClosing(name string, env *Zlisp) *Closing {
stk := env.linearstack.Clone()
// be super strict: only store up to our
// enclosing function definition, because after
// that, the definition time of that function
// should be what we use.
return &Closing{
Stack: stk,
Name: name,
env: env}
}
func NewEmptyClosing(name string, env *Zlisp) *Closing {
return &Closing{
Stack: env.NewStack(0),
Name: name,
env: env}
}
func (c *Closing) IsStackElem() {}
func (c *Closing) LookupSymbolUntilFunction(sym *SexpSymbol, setVal *Sexp, maximumFuncToSearch int, checkCaptures bool) (Sexp, error, *Scope) {
return c.Stack.LookupSymbolUntilFunction(sym, setVal, maximumFuncToSearch, checkCaptures)
}
func (c *Closing) LookupSymbol(sym *SexpSymbol, setVal *Sexp) (Sexp, error, *Scope) {
return c.Stack.LookupSymbol(sym, setVal)
}
func (c *Closing) Show(env *Zlisp, ps *PrintState, label string) (string, error) {
return c.Stack.Show(env, ps, label)
}
func (c *Closing) TopScope() *Scope {
return c.Stack.GetTop().(*Scope)
}

105
vendor/github.com/glycerine/zygomys/zygo/comment.go generated vendored Normal file
View File

@@ -0,0 +1,105 @@
package zygo
type SexpComment struct {
Comment string
Block bool
}
func (p *SexpComment) SexpString(ps *PrintState) string {
return p.Comment
}
func (p *SexpComment) Type() *RegisteredType {
return GoStructRegistry.Registry["comment"]
}
// Filters return true to keep, false to drop.
type Filter func(x Sexp) bool
func RemoveCommentsFilter(x Sexp) bool {
switch x.(type) {
case *SexpComment:
//P("RemoveCommentsFilter called on x= %T/val=%v. return false", x, x)
return false
default:
//P("RemoveCommentsFilter called on x= %T/val=%v. return true", x, x)
return true
}
}
// detect SexpEnd values and return false on them to filter them out.
func RemoveEndsFilter(x Sexp) bool {
switch n := x.(type) {
case *SexpSentinel:
if n.Val == SexpEnd.Val {
return false
}
}
return true
}
// detect SexpComma values and return false on them to filter them out.
func RemoveCommasFilter(x Sexp) bool {
switch x.(type) {
case *SexpComma:
return false
}
return true
}
func (env *Zlisp) FilterAny(x Sexp, f Filter) (filtered Sexp, keep bool) {
switch ele := x.(type) {
case *SexpArray:
res := &SexpArray{Val: env.FilterArray(ele.Val, f), Typ: ele.Typ, IsFuncDeclTypeArray: ele.IsFuncDeclTypeArray, Env: env}
return res, true
case *SexpPair:
return env.FilterList(ele, f), true
case *SexpHash:
return env.FilterHash(ele, f), true
default:
keep = f(x)
if keep {
return x, true
}
return SexpNull, false
}
}
func (env *Zlisp) FilterArray(x []Sexp, f Filter) []Sexp {
//P("FilterArray: before: %d in size", len(x))
//for i := range x {
//P("x[i=%d] = %v", i, x[i].SexpString())
//}
res := []Sexp{}
for i := range x {
filtered, keep := env.FilterAny(x[i], f)
if keep {
res = append(res, filtered)
}
}
//P("FilterArray: after: %d in size", len(res))
//for i := range res {
//P("x[i=%d] = %v", i, res[i].SexpString())
//}
return res
}
func (env *Zlisp) FilterHash(h *SexpHash, f Filter) *SexpHash {
// should not actually need this, since hashes
// don't yet exist in parsed symbols. (they are
// still lists).
//P("in FilterHash")
return h
}
func (env *Zlisp) FilterList(h *SexpPair, f Filter) Sexp {
//P("in FilterList")
arr, err := ListToArray(h)
res := []Sexp{}
if err == NotAList {
// don't filter pair lists
return h
}
res = env.FilterArray(arr, f)
return MakeList(res)
}

323
vendor/github.com/glycerine/zygomys/zygo/comparisons.go generated vendored Normal file
View File

@@ -0,0 +1,323 @@
package zygo
import (
"bytes"
"errors"
"fmt"
"math"
"reflect"
)
func IsNaNFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {
if len(args) != 1 {
return SexpNull, WrongNargs
}
var err error
a := args[0]
if sel, isSel := a.(Selector); isSel {
a, err = sel.RHS(env)
if err != nil {
return SexpNull, err
}
}
switch at := a.(type) {
case *SexpFloat:
if math.IsNaN(at.Val) {
return &SexpBool{Val: true}, nil
}
}
return &SexpBool{Val: false}, nil
}
func signumFloat(f float64) int {
if f > 0 {
return 1
}
if f < 0 {
return -1
}
return 0
}
func signumInt(i int64) int {
if i > 0 {
return 1
}
if i < 0 {
return -1
}
return 0
}
func compareFloat(f *SexpFloat, expr Sexp) (int, error) {
switch e := expr.(type) {
case *SexpInt:
if math.IsNaN(f.Val) {
return 2, nil
}
return signumFloat(f.Val - float64(e.Val)), nil
case *SexpFloat:
nanCount := 0
if math.IsNaN(f.Val) {
nanCount++
}
if math.IsNaN(e.Val) {
nanCount++
}
if nanCount > 0 {
return 1 + nanCount, nil
}
return signumFloat(f.Val - e.Val), nil
case *SexpChar:
if math.IsNaN(f.Val) {
return 2, nil
}
return signumFloat(f.Val - float64(e.Val)), nil
}
errmsg := fmt.Sprintf("err 91: cannot compare %T to %T", f, expr)
return 0, errors.New(errmsg)
}
func compareInt(i *SexpInt, expr Sexp) (int, error) {
switch e := expr.(type) {
case *SexpInt:
return signumInt(i.Val - e.Val), nil
case *SexpFloat:
return signumFloat(float64(i.Val) - e.Val), nil
case *SexpChar:
return signumInt(i.Val - int64(e.Val)), nil
case *SexpReflect:
r := reflect.Value(e.Val)
ifa := r.Interface()
switch z := ifa.(type) {
case *int64:
return signumInt(i.Val - *z), nil
}
P("compareInt(): ifa = %v/%T", ifa, ifa)
P("compareInt(): r.Elem() = %v/%T", r.Elem(), r.Elem())
P("compareInt(): r.Elem().Interface() = %v/%T", r.Elem().Interface(), r.Elem().Interface())
P("compareInt(): r.Elem().Type() = %v/%T", r.Elem().Type(), r.Elem().Type())
P("compareInt(): r.Elem().Type().Name() = %v/%T", r.Elem().Type().Name(), r.Elem().Type().Name())
}
errmsg := fmt.Sprintf("err 92: cannot compare %T to %T", i, expr)
return 0, errors.New(errmsg)
}
func compareChar(c *SexpChar, expr Sexp) (int, error) {
switch e := expr.(type) {
case *SexpInt:
return signumInt(int64(c.Val) - e.Val), nil
case *SexpFloat:
return signumFloat(float64(c.Val) - e.Val), nil
case *SexpChar:
return signumInt(int64(c.Val) - int64(e.Val)), nil
}
errmsg := fmt.Sprintf("err 93: cannot compare %T to %T", c, expr)
return 0, errors.New(errmsg)
}
func compareString(s *SexpStr, expr Sexp) (int, error) {
switch e := expr.(type) {
case *SexpStr:
return bytes.Compare([]byte(s.S), []byte(e.S)), nil
case *SexpReflect:
r := reflect.Value(e.Val)
ifa := r.Interface()
switch z := ifa.(type) {
case *string:
return bytes.Compare([]byte(s.S), []byte(*z)), nil
}
}
errmsg := fmt.Sprintf("err 94: cannot compare %T to %T", s, expr)
return 0, errors.New(errmsg)
}
func (env *Zlisp) compareSymbol(sym *SexpSymbol, expr Sexp) (int, error) {
switch e := expr.(type) {
case *SexpSymbol:
return signumInt(int64(sym.number - e.number)), nil
}
errmsg := fmt.Sprintf("err 95: cannot compare %T to %T", sym, expr)
return 0, errors.New(errmsg)
}
func (env *Zlisp) comparePair(a *SexpPair, b Sexp) (int, error) {
var bp *SexpPair
switch t := b.(type) {
case *SexpPair:
bp = t
default:
errmsg := fmt.Sprintf("err 96: cannot compare %T to %T", a, b)
return 0, errors.New(errmsg)
}
res, err := env.Compare(a.Head, bp.Head)
if err != nil {
return 0, err
}
if res != 0 {
return res, nil
}
return env.Compare(a.Tail, bp.Tail)
}
func (env *Zlisp) compareArray(a *SexpArray, b Sexp) (int, error) {
var ba *SexpArray
switch t := b.(type) {
case *SexpArray:
ba = t
default:
errmsg := fmt.Sprintf("err 97: cannot compare %T to %T", a, b)
return 0, errors.New(errmsg)
}
var length int
if len(a.Val) < len(ba.Val) {
length = len(a.Val)
} else {
length = len(ba.Val)
}
for i := 0; i < length; i++ {
res, err := env.Compare(a.Val[i], ba.Val[i])
if err != nil {
return 0, err
}
if res != 0 {
return res, nil
}
}
return signumInt(int64(len(a.Val) - len(ba.Val))), nil
}
func compareBool(a *SexpBool, b Sexp) (int, error) {
var bb *SexpBool
switch bt := b.(type) {
case *SexpBool:
bb = bt
default:
errmsg := fmt.Sprintf("err 98: cannot compare %T to %T", a, b)
return 0, errors.New(errmsg)
}
// true > false
if a.Val && bb.Val {
return 0, nil
}
if a.Val {
return 1, nil
}
if bb.Val {
return -1, nil
}
return 0, nil
}
func comparePointers(a *SexpPointer, bs Sexp) (int, error) {
var b *SexpPointer
switch bt := bs.(type) {
case *SexpPointer:
b = bt
default:
return 0, fmt.Errorf("err 99: cannot compare %T to %T", a, bs)
}
if a.Target == b.Target {
return 0, nil
}
return 1, nil
}
func (env *Zlisp) Compare(a Sexp, b Sexp) (int, error) {
var err error
if sel, isSel := a.(Selector); isSel {
a, err = sel.RHS(env)
if err != nil {
return 0, err
}
}
if sel, isSel := b.(Selector); isSel {
b, err = sel.RHS(env)
if err != nil {
return 0, err
}
}
switch at := a.(type) {
case *SexpInt:
return compareInt(at, b)
case *SexpUint64:
return compareUint64(at, b)
case *SexpChar:
return compareChar(at, b)
case *SexpFloat:
return compareFloat(at, b)
case *SexpBool:
return compareBool(at, b)
case *SexpStr:
return compareString(at, b)
case *SexpSymbol:
return env.compareSymbol(at, b)
case *SexpPair:
return env.comparePair(at, b)
case *SexpArray:
return env.compareArray(at, b)
case *SexpHash:
return compareHash(at, b)
case *RegisteredType:
return compareRegisteredTypes(at, b)
case *SexpPointer:
return comparePointers(at, b)
case *SexpSentinel:
if at == SexpNull && b == SexpNull {
return 0, nil
} else {
return -1, nil
}
case *SexpTime:
switch bt := b.(type) {
case *SexpTime:
if bt.Tm.Unix() == at.Tm.Unix() {
return 0, nil
}
return -1, nil
}
case *SexpReflect:
r := reflect.Value(at.Val)
ifa := r.Interface()
//P("Compare(): ifa = %v/%t", ifa, ifa)
//P("Compare(): r.Elem() = %v/%T", r.Elem(), r.Elem())
switch z := ifa.(type) {
case *int64:
return compareInt(&SexpInt{Val: *z}, b)
case *string:
return compareString(&SexpStr{S: *z}, b)
}
}
errmsg := fmt.Sprintf("err 100: cannot compare %T to %T", a, b)
return 0, errors.New(errmsg)
}
// only compare uint64 to uint64
func compareUint64(i *SexpUint64, expr Sexp) (int, error) {
switch e := expr.(type) {
case *SexpUint64:
return signumUint64(i.Val - e.Val), nil
}
errmsg := fmt.Sprintf("err 101: cannot compare %T to %T", i, expr)
return 0, errors.New(errmsg)
}
func signumUint64(i uint64) int {
if i > 0 {
return 1
}
if i < 0 {
return -1
}
return 0
}

46
vendor/github.com/glycerine/zygomys/zygo/coroutines.go generated vendored Normal file
View File

@@ -0,0 +1,46 @@
package zygo
import (
"errors"
)
type SexpGoroutine struct {
env *Zlisp
}
func (goro *SexpGoroutine) SexpString(ps *PrintState) string {
return "[coroutine]"
}
func (goro *SexpGoroutine) Type() *RegisteredType {
return nil // TODO what goes here
}
func StartGoroutineFunction(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
switch t := args[0].(type) {
case *SexpGoroutine:
go t.env.Run()
default:
return SexpNull, errors.New("not a goroutine")
}
return SexpNull, nil
}
func CreateGoroutineMacro(env *Zlisp, name string,
args []Sexp) (Sexp, error) {
goroenv := env.Duplicate()
err := goroenv.LoadExpressions(args)
if err != nil {
return SexpNull, nil
}
goro := &SexpGoroutine{goroenv}
// (apply StartGoroutineFunction [goro])
return MakeList([]Sexp{env.MakeSymbol("apply"),
MakeUserFunction("__start", StartGoroutineFunction),
&SexpArray{Val: []Sexp{goro}, Env: env}}), nil
}
func (env *Zlisp) ImportGoroutines() {
env.AddMacro("go", CreateGoroutineMacro)
}

75
vendor/github.com/glycerine/zygomys/zygo/datastack.go generated vendored Normal file
View File

@@ -0,0 +1,75 @@
package zygo
import (
"errors"
"fmt"
)
type DataStackElem struct {
expr Sexp
}
func (d DataStackElem) IsStackElem() {}
func (stack *Stack) PushExpr(expr Sexp) {
stack.Push(DataStackElem{expr})
}
func (stack *Stack) PushExpressions(expr []Sexp) error {
for _, x := range expr {
stack.Push(DataStackElem{x})
}
return nil
}
func (stack *Stack) PopExpr() (Sexp, error) {
elem, err := stack.Pop()
if err != nil {
return nil, err
}
return elem.(DataStackElem).expr, nil
}
func (stack *Stack) GetExpressions(n int) ([]Sexp, error) {
stack_start := stack.tos - n + 1
if stack_start < 0 {
return nil, errors.New("not enough items on stack")
}
arr := make([]Sexp, n)
for i := 0; i < n; i++ {
arr[i] = stack.elements[stack_start+i].(DataStackElem).expr
}
return arr, nil
}
func (stack *Stack) PopExpressions(n int) ([]Sexp, error) {
origSz := stack.Size()
expressions, err := stack.GetExpressions(n)
if err != nil {
return nil, err
}
stack.TruncateToSize(origSz - n)
return expressions, nil
}
func (stack *Stack) GetExpr(n int) (Sexp, error) {
elem, err := stack.Get(n)
if err != nil {
return nil, err
}
return elem.(DataStackElem).expr, nil
}
func (stack *Stack) PrintStack() {
for i := 0; i <= stack.tos; i++ {
expr := stack.elements[i].(DataStackElem).expr
fmt.Println("\t" + expr.SexpString(nil))
}
}
func (stack *Stack) PrintScopeStack() {
for i := 0; i <= stack.tos; i++ {
scop := stack.elements[i].(*Scope)
scop.Show(stack.env, NewPrintStateWithIndent(4), "")
}
}

View File

@@ -0,0 +1,149 @@
package zygo
import (
"fmt"
"time"
)
//go:generate msgp
//msgp:ignore Plane Wings Snoopy Hornet Hellcat SetOfPlanes
// the pointer wasn't getting followed.
type NestOuter struct {
Inner *NestInner `msg:"inner" json:"inner" zid:"0"`
}
type NestInner struct {
Hello string `msg:"hello" json:"hello" zid:"0"`
}
type Event struct {
Id int `json:"id" msg:"id"`
User Person `json:"user" msg:"user"`
Flight string `json:"flight" msg:"flight"`
Pilot []string `json:"pilot" msg:"pilot"`
Cancelled bool `json:"cancelled" msg:"cancelled"`
}
type Person struct {
First string `json:"first" msg:"first"`
Last string `json:"last" msg:"last"`
}
func (ev *Event) DisplayEvent(from string) {
fmt.Printf("%s %#v", from, ev)
}
type Wings struct {
SpanCm int
}
type SetOfPlanes struct {
Flyers []Flyer `json:"flyers" msg:"flyers"`
}
// the interface Flyer confounds the msgp msgpack code generator,
// so put the msgp:ignore Plane above
type Plane struct {
Wings
ID int `json:"id" msg:"id"`
Speed int `json:"speed" msg:"speed"`
Chld Flyer `json:"chld" msg:"chld"`
Friends []Flyer `json:"friends"`
}
type Snoopy struct {
Plane `json:"plane" msg:"plane"`
Cry string `json:"cry" msg:"cry"`
Pack []int `json:"pack"`
Carrying []Flyer `json:"carrying"`
}
type Hornet struct {
Plane `json:"plane" msg:"plane"`
Mass float64
Nickname string
}
type Hellcat struct {
Plane `json:"plane" msg:"plane"`
}
func (p *Snoopy) Fly(w *Weather) (s string, err error) {
w.Type = "VERY " + w.Type // side-effect, for demo purposes
s = fmt.Sprintf("Snoopy sees weather '%s', cries '%s'", w.Type, p.Cry)
fmt.Println(s)
for _, flyer := range p.Friends {
flyer.Fly(w)
}
return
}
func (p *Snoopy) GetCry() string {
return p.Cry
}
func (p *Snoopy) EchoWeather(w *Weather) *Weather {
return w
}
func (p *Snoopy) Sideeffect() {
fmt.Printf("Sideeffect() called! p = %p\n", p)
}
func (b *Hornet) Fly(w *Weather) (s string, err error) {
fmt.Printf("Hornet.Fly() called. I see weather %v\n", w.Type)
return
}
func (b *Hellcat) Fly(w *Weather) (s string, err error) {
fmt.Printf("Hellcat.Fly() called. I see weather %v\n", w.Type)
return
}
type Flyer interface {
Fly(w *Weather) (s string, err error)
}
type Weather struct {
Time time.Time `json:"time" msg:"time"`
Size int64 `json:"size" msg:"size"`
Type string `json:"type" msg:"type"`
Details []byte `json:"details" msg:"details"`
}
func (w *Weather) IsSunny() bool {
return w.Type == "sunny"
}
func (env *Zlisp) ImportDemoData() {
env.AddFunction("nestouter", DemoNestInnerOuterFunction)
env.AddFunction("nestinner", DemoNestInnerOuterFunction)
rt := &RegisteredType{GenDefMap: true, Factory: func(env *Zlisp, h *SexpHash) (interface{}, error) {
return &NestOuter{}, nil
}}
GoStructRegistry.RegisterUserdef(rt, true, "nestouter", "NestOuter")
rt = &RegisteredType{GenDefMap: true, Factory: func(env *Zlisp, h *SexpHash) (interface{}, error) {
return &NestInner{}, nil
}}
GoStructRegistry.RegisterUserdef(rt, true, "nestinner", "NestInner")
}
// constructor
func DemoNestInnerOuterFunction(env *Zlisp, name string, args []Sexp) (Sexp, error) {
n := len(args)
switch n {
case 0:
return SexpNull, WrongNargs
default:
// many parameters, treat as key:value pairs in the hash/record.
return ConstructorFunction(env, "msgmap", append([]Sexp{&SexpStr{S: name}}, MakeList(args)))
}
}

View File

@@ -0,0 +1,845 @@
package zygo
// NOTE: THIS FILE WAS PRODUCED BY THE
// MSGP CODE GENERATION TOOL (github.com/tinylib/msgp)
// DO NOT EDIT
import (
"github.com/tinylib/msgp/msgp"
)
// DecodeMsg implements msgp.Decodable
func (z *Event) DecodeMsg(dc *msgp.Reader) (err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, err = dc.ReadMapHeader()
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, err = dc.ReadMapKeyPtr()
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "id":
z.Id, err = dc.ReadInt()
if err != nil {
return
}
case "user":
var zb0002 uint32
zb0002, err = dc.ReadMapHeader()
if err != nil {
return
}
for zb0002 > 0 {
zb0002--
field, err = dc.ReadMapKeyPtr()
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "first":
z.User.First, err = dc.ReadString()
if err != nil {
return
}
case "last":
z.User.Last, err = dc.ReadString()
if err != nil {
return
}
default:
err = dc.Skip()
if err != nil {
return
}
}
}
case "flight":
z.Flight, err = dc.ReadString()
if err != nil {
return
}
case "pilot":
var zb0003 uint32
zb0003, err = dc.ReadArrayHeader()
if err != nil {
return
}
if cap(z.Pilot) >= int(zb0003) {
z.Pilot = (z.Pilot)[:zb0003]
} else {
z.Pilot = make([]string, zb0003)
}
for za0001 := range z.Pilot {
z.Pilot[za0001], err = dc.ReadString()
if err != nil {
return
}
}
case "cancelled":
z.Cancelled, err = dc.ReadBool()
if err != nil {
return
}
default:
err = dc.Skip()
if err != nil {
return
}
}
}
return
}
// EncodeMsg implements msgp.Encodable
func (z *Event) EncodeMsg(en *msgp.Writer) (err error) {
// map header, size 5
// write "id"
err = en.Append(0x85, 0xa2, 0x69, 0x64)
if err != nil {
return
}
err = en.WriteInt(z.Id)
if err != nil {
return
}
// write "user"
// map header, size 2
// write "first"
err = en.Append(0xa4, 0x75, 0x73, 0x65, 0x72, 0x82, 0xa5, 0x66, 0x69, 0x72, 0x73, 0x74)
if err != nil {
return
}
err = en.WriteString(z.User.First)
if err != nil {
return
}
// write "last"
err = en.Append(0xa4, 0x6c, 0x61, 0x73, 0x74)
if err != nil {
return
}
err = en.WriteString(z.User.Last)
if err != nil {
return
}
// write "flight"
err = en.Append(0xa6, 0x66, 0x6c, 0x69, 0x67, 0x68, 0x74)
if err != nil {
return
}
err = en.WriteString(z.Flight)
if err != nil {
return
}
// write "pilot"
err = en.Append(0xa5, 0x70, 0x69, 0x6c, 0x6f, 0x74)
if err != nil {
return
}
err = en.WriteArrayHeader(uint32(len(z.Pilot)))
if err != nil {
return
}
for za0001 := range z.Pilot {
err = en.WriteString(z.Pilot[za0001])
if err != nil {
return
}
}
// write "cancelled"
err = en.Append(0xa9, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64)
if err != nil {
return
}
err = en.WriteBool(z.Cancelled)
if err != nil {
return
}
return
}
// MarshalMsg implements msgp.Marshaler
func (z *Event) MarshalMsg(b []byte) (o []byte, err error) {
o = msgp.Require(b, z.Msgsize())
// map header, size 5
// string "id"
o = append(o, 0x85, 0xa2, 0x69, 0x64)
o = msgp.AppendInt(o, z.Id)
// string "user"
// map header, size 2
// string "first"
o = append(o, 0xa4, 0x75, 0x73, 0x65, 0x72, 0x82, 0xa5, 0x66, 0x69, 0x72, 0x73, 0x74)
o = msgp.AppendString(o, z.User.First)
// string "last"
o = append(o, 0xa4, 0x6c, 0x61, 0x73, 0x74)
o = msgp.AppendString(o, z.User.Last)
// string "flight"
o = append(o, 0xa6, 0x66, 0x6c, 0x69, 0x67, 0x68, 0x74)
o = msgp.AppendString(o, z.Flight)
// string "pilot"
o = append(o, 0xa5, 0x70, 0x69, 0x6c, 0x6f, 0x74)
o = msgp.AppendArrayHeader(o, uint32(len(z.Pilot)))
for za0001 := range z.Pilot {
o = msgp.AppendString(o, z.Pilot[za0001])
}
// string "cancelled"
o = append(o, 0xa9, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64)
o = msgp.AppendBool(o, z.Cancelled)
return
}
// UnmarshalMsg implements msgp.Unmarshaler
func (z *Event) UnmarshalMsg(bts []byte) (o []byte, err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, bts, err = msgp.ReadMapHeaderBytes(bts)
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, bts, err = msgp.ReadMapKeyZC(bts)
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "id":
z.Id, bts, err = msgp.ReadIntBytes(bts)
if err != nil {
return
}
case "user":
var zb0002 uint32
zb0002, bts, err = msgp.ReadMapHeaderBytes(bts)
if err != nil {
return
}
for zb0002 > 0 {
zb0002--
field, bts, err = msgp.ReadMapKeyZC(bts)
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "first":
z.User.First, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
case "last":
z.User.Last, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
default:
bts, err = msgp.Skip(bts)
if err != nil {
return
}
}
}
case "flight":
z.Flight, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
case "pilot":
var zb0003 uint32
zb0003, bts, err = msgp.ReadArrayHeaderBytes(bts)
if err != nil {
return
}
if cap(z.Pilot) >= int(zb0003) {
z.Pilot = (z.Pilot)[:zb0003]
} else {
z.Pilot = make([]string, zb0003)
}
for za0001 := range z.Pilot {
z.Pilot[za0001], bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
}
case "cancelled":
z.Cancelled, bts, err = msgp.ReadBoolBytes(bts)
if err != nil {
return
}
default:
bts, err = msgp.Skip(bts)
if err != nil {
return
}
}
}
o = bts
return
}
// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message
func (z *Event) Msgsize() (s int) {
s = 1 + 3 + msgp.IntSize + 5 + 1 + 6 + msgp.StringPrefixSize + len(z.User.First) + 5 + msgp.StringPrefixSize + len(z.User.Last) + 7 + msgp.StringPrefixSize + len(z.Flight) + 6 + msgp.ArrayHeaderSize
for za0001 := range z.Pilot {
s += msgp.StringPrefixSize + len(z.Pilot[za0001])
}
s += 10 + msgp.BoolSize
return
}
// DecodeMsg implements msgp.Decodable
func (z *NestInner) DecodeMsg(dc *msgp.Reader) (err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, err = dc.ReadMapHeader()
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, err = dc.ReadMapKeyPtr()
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "hello":
z.Hello, err = dc.ReadString()
if err != nil {
return
}
default:
err = dc.Skip()
if err != nil {
return
}
}
}
return
}
// EncodeMsg implements msgp.Encodable
func (z NestInner) EncodeMsg(en *msgp.Writer) (err error) {
// map header, size 1
// write "hello"
err = en.Append(0x81, 0xa5, 0x68, 0x65, 0x6c, 0x6c, 0x6f)
if err != nil {
return
}
err = en.WriteString(z.Hello)
if err != nil {
return
}
return
}
// MarshalMsg implements msgp.Marshaler
func (z NestInner) MarshalMsg(b []byte) (o []byte, err error) {
o = msgp.Require(b, z.Msgsize())
// map header, size 1
// string "hello"
o = append(o, 0x81, 0xa5, 0x68, 0x65, 0x6c, 0x6c, 0x6f)
o = msgp.AppendString(o, z.Hello)
return
}
// UnmarshalMsg implements msgp.Unmarshaler
func (z *NestInner) UnmarshalMsg(bts []byte) (o []byte, err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, bts, err = msgp.ReadMapHeaderBytes(bts)
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, bts, err = msgp.ReadMapKeyZC(bts)
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "hello":
z.Hello, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
default:
bts, err = msgp.Skip(bts)
if err != nil {
return
}
}
}
o = bts
return
}
// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message
func (z NestInner) Msgsize() (s int) {
s = 1 + 6 + msgp.StringPrefixSize + len(z.Hello)
return
}
// DecodeMsg implements msgp.Decodable
func (z *NestOuter) DecodeMsg(dc *msgp.Reader) (err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, err = dc.ReadMapHeader()
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, err = dc.ReadMapKeyPtr()
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "inner":
if dc.IsNil() {
err = dc.ReadNil()
if err != nil {
return
}
z.Inner = nil
} else {
if z.Inner == nil {
z.Inner = new(NestInner)
}
var zb0002 uint32
zb0002, err = dc.ReadMapHeader()
if err != nil {
return
}
for zb0002 > 0 {
zb0002--
field, err = dc.ReadMapKeyPtr()
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "hello":
z.Inner.Hello, err = dc.ReadString()
if err != nil {
return
}
default:
err = dc.Skip()
if err != nil {
return
}
}
}
}
default:
err = dc.Skip()
if err != nil {
return
}
}
}
return
}
// EncodeMsg implements msgp.Encodable
func (z *NestOuter) EncodeMsg(en *msgp.Writer) (err error) {
// map header, size 1
// write "inner"
err = en.Append(0x81, 0xa5, 0x69, 0x6e, 0x6e, 0x65, 0x72)
if err != nil {
return
}
if z.Inner == nil {
err = en.WriteNil()
if err != nil {
return
}
} else {
// map header, size 1
// write "hello"
err = en.Append(0x81, 0xa5, 0x68, 0x65, 0x6c, 0x6c, 0x6f)
if err != nil {
return
}
err = en.WriteString(z.Inner.Hello)
if err != nil {
return
}
}
return
}
// MarshalMsg implements msgp.Marshaler
func (z *NestOuter) MarshalMsg(b []byte) (o []byte, err error) {
o = msgp.Require(b, z.Msgsize())
// map header, size 1
// string "inner"
o = append(o, 0x81, 0xa5, 0x69, 0x6e, 0x6e, 0x65, 0x72)
if z.Inner == nil {
o = msgp.AppendNil(o)
} else {
// map header, size 1
// string "hello"
o = append(o, 0x81, 0xa5, 0x68, 0x65, 0x6c, 0x6c, 0x6f)
o = msgp.AppendString(o, z.Inner.Hello)
}
return
}
// UnmarshalMsg implements msgp.Unmarshaler
func (z *NestOuter) UnmarshalMsg(bts []byte) (o []byte, err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, bts, err = msgp.ReadMapHeaderBytes(bts)
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, bts, err = msgp.ReadMapKeyZC(bts)
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "inner":
if msgp.IsNil(bts) {
bts, err = msgp.ReadNilBytes(bts)
if err != nil {
return
}
z.Inner = nil
} else {
if z.Inner == nil {
z.Inner = new(NestInner)
}
var zb0002 uint32
zb0002, bts, err = msgp.ReadMapHeaderBytes(bts)
if err != nil {
return
}
for zb0002 > 0 {
zb0002--
field, bts, err = msgp.ReadMapKeyZC(bts)
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "hello":
z.Inner.Hello, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
default:
bts, err = msgp.Skip(bts)
if err != nil {
return
}
}
}
}
default:
bts, err = msgp.Skip(bts)
if err != nil {
return
}
}
}
o = bts
return
}
// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message
func (z *NestOuter) Msgsize() (s int) {
s = 1 + 6
if z.Inner == nil {
s += msgp.NilSize
} else {
s += 1 + 6 + msgp.StringPrefixSize + len(z.Inner.Hello)
}
return
}
// DecodeMsg implements msgp.Decodable
func (z *Person) DecodeMsg(dc *msgp.Reader) (err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, err = dc.ReadMapHeader()
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, err = dc.ReadMapKeyPtr()
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "first":
z.First, err = dc.ReadString()
if err != nil {
return
}
case "last":
z.Last, err = dc.ReadString()
if err != nil {
return
}
default:
err = dc.Skip()
if err != nil {
return
}
}
}
return
}
// EncodeMsg implements msgp.Encodable
func (z Person) EncodeMsg(en *msgp.Writer) (err error) {
// map header, size 2
// write "first"
err = en.Append(0x82, 0xa5, 0x66, 0x69, 0x72, 0x73, 0x74)
if err != nil {
return
}
err = en.WriteString(z.First)
if err != nil {
return
}
// write "last"
err = en.Append(0xa4, 0x6c, 0x61, 0x73, 0x74)
if err != nil {
return
}
err = en.WriteString(z.Last)
if err != nil {
return
}
return
}
// MarshalMsg implements msgp.Marshaler
func (z Person) MarshalMsg(b []byte) (o []byte, err error) {
o = msgp.Require(b, z.Msgsize())
// map header, size 2
// string "first"
o = append(o, 0x82, 0xa5, 0x66, 0x69, 0x72, 0x73, 0x74)
o = msgp.AppendString(o, z.First)
// string "last"
o = append(o, 0xa4, 0x6c, 0x61, 0x73, 0x74)
o = msgp.AppendString(o, z.Last)
return
}
// UnmarshalMsg implements msgp.Unmarshaler
func (z *Person) UnmarshalMsg(bts []byte) (o []byte, err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, bts, err = msgp.ReadMapHeaderBytes(bts)
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, bts, err = msgp.ReadMapKeyZC(bts)
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "first":
z.First, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
case "last":
z.Last, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
default:
bts, err = msgp.Skip(bts)
if err != nil {
return
}
}
}
o = bts
return
}
// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message
func (z Person) Msgsize() (s int) {
s = 1 + 6 + msgp.StringPrefixSize + len(z.First) + 5 + msgp.StringPrefixSize + len(z.Last)
return
}
// DecodeMsg implements msgp.Decodable
func (z *Weather) DecodeMsg(dc *msgp.Reader) (err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, err = dc.ReadMapHeader()
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, err = dc.ReadMapKeyPtr()
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "time":
z.Time, err = dc.ReadTime()
if err != nil {
return
}
case "size":
z.Size, err = dc.ReadInt64()
if err != nil {
return
}
case "type":
z.Type, err = dc.ReadString()
if err != nil {
return
}
case "details":
z.Details, err = dc.ReadBytes(z.Details)
if err != nil {
return
}
default:
err = dc.Skip()
if err != nil {
return
}
}
}
return
}
// EncodeMsg implements msgp.Encodable
func (z *Weather) EncodeMsg(en *msgp.Writer) (err error) {
// map header, size 4
// write "time"
err = en.Append(0x84, 0xa4, 0x74, 0x69, 0x6d, 0x65)
if err != nil {
return
}
err = en.WriteTime(z.Time)
if err != nil {
return
}
// write "size"
err = en.Append(0xa4, 0x73, 0x69, 0x7a, 0x65)
if err != nil {
return
}
err = en.WriteInt64(z.Size)
if err != nil {
return
}
// write "type"
err = en.Append(0xa4, 0x74, 0x79, 0x70, 0x65)
if err != nil {
return
}
err = en.WriteString(z.Type)
if err != nil {
return
}
// write "details"
err = en.Append(0xa7, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73)
if err != nil {
return
}
err = en.WriteBytes(z.Details)
if err != nil {
return
}
return
}
// MarshalMsg implements msgp.Marshaler
func (z *Weather) MarshalMsg(b []byte) (o []byte, err error) {
o = msgp.Require(b, z.Msgsize())
// map header, size 4
// string "time"
o = append(o, 0x84, 0xa4, 0x74, 0x69, 0x6d, 0x65)
o = msgp.AppendTime(o, z.Time)
// string "size"
o = append(o, 0xa4, 0x73, 0x69, 0x7a, 0x65)
o = msgp.AppendInt64(o, z.Size)
// string "type"
o = append(o, 0xa4, 0x74, 0x79, 0x70, 0x65)
o = msgp.AppendString(o, z.Type)
// string "details"
o = append(o, 0xa7, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73)
o = msgp.AppendBytes(o, z.Details)
return
}
// UnmarshalMsg implements msgp.Unmarshaler
func (z *Weather) UnmarshalMsg(bts []byte) (o []byte, err error) {
var field []byte
_ = field
var zb0001 uint32
zb0001, bts, err = msgp.ReadMapHeaderBytes(bts)
if err != nil {
return
}
for zb0001 > 0 {
zb0001--
field, bts, err = msgp.ReadMapKeyZC(bts)
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "time":
z.Time, bts, err = msgp.ReadTimeBytes(bts)
if err != nil {
return
}
case "size":
z.Size, bts, err = msgp.ReadInt64Bytes(bts)
if err != nil {
return
}
case "type":
z.Type, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
return
}
case "details":
z.Details, bts, err = msgp.ReadBytesBytes(bts, z.Details)
if err != nil {
return
}
default:
bts, err = msgp.Skip(bts)
if err != nil {
return
}
}
}
o = bts
return
}
// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message
func (z *Weather) Msgsize() (s int) {
s = 1 + 5 + msgp.TimeSize + 5 + msgp.Int64Size + 5 + msgp.StringPrefixSize + len(z.Type) + 8 + msgp.BytesPrefixSize + len(z.Details)
return
}

8
vendor/github.com/glycerine/zygomys/zygo/doc.go generated vendored Normal file
View File

@@ -0,0 +1,8 @@
/*
This project does not use godoc. Instead there is extensive
and detailed description of the language features maintained
on the wiki. See the following link.
https://github.com/glycerine/zygomys/wiki
*/
package zygo

Some files were not shown because too many files have changed in this diff Show More