mirror of
https://codeberg.org/scip/tablizer.git
synced 2025-12-18 13:01:11 +01:00
Compare commits
110 Commits
nointernal
...
v1.5.10
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8bdb3db105 | ||
| 4ce6c30f54 | |||
|
|
ec0b210167 | ||
| 253ef8262e | |||
| da48994744 | |||
| 39f06fddc8 | |||
|
|
50a9378d92 | ||
|
|
35b726fee4 | ||
|
|
8c87da34f2 | ||
|
|
6f0f5afb27 | ||
|
|
62b606e7da | ||
|
|
567d23b175 | ||
|
|
14f24533f0 | ||
|
|
4e413c02b5 | ||
|
|
6d8c0c0936 | ||
|
|
21b607af7c | ||
|
|
06a5d74fb6 | ||
|
|
5f3f7c417c | ||
|
|
687f4b7bb2 | ||
| 24b66b8a6b | |||
| d87c6878a4 | |||
| 4cdc4c8e18 | |||
| 9cb9a66332 | |||
| 24277cd716 | |||
| e51b141032 | |||
| 7af7304529 | |||
| b4c833a0ba | |||
| 1c36d93d65 | |||
|
|
ec864f42d6 | ||
|
|
4eaa676510 | ||
|
|
c600fb1136 | ||
|
|
abf9fac5c7 | ||
|
|
80dd6849ae | ||
| e2b82515f5 | |||
|
|
1976b4046e | ||
|
|
b1a2b3059e | ||
|
|
e3d6ef130c | ||
|
|
92fffaae9a | ||
|
|
f1c5ee5797 | ||
|
|
5168b04339 | ||
|
|
787178b17e | ||
|
|
eae39bbff1 | ||
| 40fbf17779 | |||
| 832841c1ff | |||
| 5726ed3f7f | |||
|
|
5e52cd9ce0 | ||
|
|
8c7c89c9ea | ||
| 25aa172c41 | |||
|
|
c436a92bcb | ||
|
|
65732a58d0 | ||
|
|
ace7f76210 | ||
| fda365bd8b | |||
| c1cfc08c23 | |||
| 150fdddd2a | |||
| 6b659773f1 | |||
| 74d82fa356 | |||
| 3949411c57 | |||
| a455f6b79a | |||
| 2c08687c29 | |||
| 200f1f32f8 | |||
| 768a19b4d6 | |||
|
|
dc718392b6 | ||
|
|
e8f4fef41c | ||
| 6566dd66f0 | |||
| 1593799c03 | |||
| ea3dd75fec | |||
| a306f2c601 | |||
| 82f54c120d | |||
|
|
2d5799e2f2 | ||
| 8e33cadcaa | |||
| 03f3225f24 | |||
| 63c7ef26b6 | |||
|
|
c2e7d8037a | ||
| 323c070caa | |||
| 53cf1e2ebe | |||
| 16c5053752 | |||
| 7d2d9a55d3 | |||
| 14c50b4e63 | |||
| 0e68dc585d | |||
| 6ca835add1 | |||
| 306f583522 | |||
| 9f971ed3b9 | |||
| 2ae2d2b33d | |||
| cf1a555b9b | |||
| 4d894a728b | |||
| 8792c5a40f | |||
| 7ab1a1178a | |||
| 1e44da4f6e | |||
| 59171f0fab | |||
| 8098ccf000 | |||
| 4dc87ac22e | |||
| ef5211e45f | |||
| 1a80e72737 | |||
| 8e765b167f | |||
| 30f4b67538 | |||
| 383b5db47e | |||
| f7d812b372 | |||
| 480f5f617d | |||
| 586e36c181 | |||
|
|
13c789b800 | ||
|
|
81e1394fd2 | ||
|
|
b8099fe389 | ||
|
|
1dc072aa67 | ||
|
|
d92f63ca30 | ||
| 78ccb8f54b | |||
| a29104aeab | |||
|
|
45d9e219a5 | ||
|
|
3eda59beeb | ||
|
|
7ada75c1d6 | ||
| 83d5628430 |
96
.gh-dash.yml
Normal file
96
.gh-dash.yml
Normal file
@@ -0,0 +1,96 @@
|
||||
prSections:
|
||||
- title: Responsible PRs
|
||||
filters: repo:tlinden/tablizer is:open NOT dependabot
|
||||
layout:
|
||||
repoName:
|
||||
hidden: true
|
||||
|
||||
- title: Responsible Dependabot PRs
|
||||
filters: repo:tlinden/tablizer is:open dependabot
|
||||
layout:
|
||||
repoName:
|
||||
hidden: true
|
||||
|
||||
issuesSections:
|
||||
- title: Responsible Issues
|
||||
filters: is:open repo:tlinden/tablizer -author:@me
|
||||
layout:
|
||||
repoName:
|
||||
hidden: true
|
||||
|
||||
- title: Note-to-Self Issues
|
||||
filters: is:open repo:tlinden/tablizer author:@me
|
||||
layout:
|
||||
creator:
|
||||
hidden: true
|
||||
repoName:
|
||||
hidden: true
|
||||
|
||||
defaults:
|
||||
preview:
|
||||
open: false
|
||||
width: 100
|
||||
|
||||
keybindings:
|
||||
universal:
|
||||
- key: "shift+down"
|
||||
builtin: pageDown
|
||||
- key: "shift+up"
|
||||
builtin: pageUp
|
||||
prs:
|
||||
- key: g
|
||||
name: gitu
|
||||
command: >
|
||||
cd {{.RepoPath}} && /home/scip/bin/gitu
|
||||
- key: M
|
||||
name: squash-merge
|
||||
command: gh pr merge --rebase --squash --admin --repo {{.RepoName}} {{.PrNumber}}
|
||||
- key: i
|
||||
name: show ci checks
|
||||
command: gh pr checks --repo {{.RepoName}} {{.PrNumber}} | glow -p
|
||||
- key: e
|
||||
name: edit pr
|
||||
command: ~/.config/gh-dash/edit-gh-pr {{.RepoName}} {{.PrNumber}}
|
||||
- key: E
|
||||
name: open repo in emacs
|
||||
command: emacsclient {{.RepoPath}} &
|
||||
issues:
|
||||
- key: v
|
||||
name: view
|
||||
command: gh issue view --repo {{.RepoName}} {{.IssueNumber}} | glow -p
|
||||
- key: l
|
||||
name: add label
|
||||
command: gh issue --repo {{.RepoName}} edit {{.IssueNumber}} --add-label $(gum choose bug enhancement question dependencies wontfix)
|
||||
- key: L
|
||||
name: remove label
|
||||
command: gh issue --repo {{.RepoName}} edit {{.IssueNumber}} --remove-label $(gum choose bug enhancement question dependencies wontfix)
|
||||
- key: E
|
||||
name: open repo in emacs
|
||||
command: emacsclient {{.RepoPath}} &
|
||||
|
||||
theme:
|
||||
ui:
|
||||
sectionsShowCount: true
|
||||
table:
|
||||
compact: false
|
||||
showSeparator: true
|
||||
colors:
|
||||
text:
|
||||
primary: "#E2E1ED"
|
||||
secondary: "#6770cb"
|
||||
inverted: "#242347"
|
||||
faint: "#b0793b"
|
||||
warning: "#E0AF68"
|
||||
success: "#3DF294"
|
||||
background:
|
||||
selected: "#1B1B33"
|
||||
border:
|
||||
primary: "#383B5B"
|
||||
secondary: "#39386B"
|
||||
faint: "#8d3e0b"
|
||||
|
||||
repoPaths:
|
||||
:owner/:repo: ~/dev/:repo
|
||||
|
||||
pager:
|
||||
diff: delta
|
||||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -7,7 +7,7 @@ assignees: TLINDEN
|
||||
|
||||
---
|
||||
|
||||
**Describtion**
|
||||
**Description**
|
||||
<!-- Please provide a clear and concise description of the issue: -->
|
||||
|
||||
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -7,7 +7,7 @@ assignees: TLINDEN
|
||||
|
||||
---
|
||||
|
||||
**Describtion**
|
||||
**Description**
|
||||
<!-- Please provide a clear and concise description of the feature you desire: -->
|
||||
|
||||
|
||||
|
||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
22
.github/workflows/ci.yaml
vendored
22
.github/workflows/ci.yaml
vendored
@@ -1,22 +1,22 @@
|
||||
name: build-and-test-tablizer
|
||||
on: [push, pull_request]
|
||||
on: [push]
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [1.18, 1.19]
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
version: ['1.24']
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
name: Build
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Set up Go 1.18
|
||||
uses: actions/setup-go@v3
|
||||
- name: Set up Go ${{ matrix.version }}
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: ${{ matrix.version }}
|
||||
go-version: '${{ matrix.version }}'
|
||||
id: go
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: build
|
||||
run: make
|
||||
@@ -28,11 +28,11 @@ jobs:
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: 1.18
|
||||
- uses: actions/checkout@v3
|
||||
go-version: 1.24
|
||||
- uses: actions/checkout@v5
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
with:
|
||||
skip-cache: true
|
||||
|
||||
87
.github/workflows/release.yaml
vendored
Normal file
87
.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
name: build-release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Build Release Assets
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: 1.24.0
|
||||
|
||||
- name: Build the executables
|
||||
run: ./mkrel.sh tablizer ${{ github.ref_name}}
|
||||
|
||||
- name: List the executables
|
||||
run: ls -l ./releases
|
||||
|
||||
- name: Upload the binaries
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
tag: ${{ github.ref_name }}
|
||||
file: ./releases/*
|
||||
file_glob: true
|
||||
|
||||
- name: Build Changelog
|
||||
id: github_release
|
||||
uses: mikepenz/release-changelog-builder-action@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
mode: "PR"
|
||||
configurationJson: |
|
||||
{
|
||||
"template": "#{{CHANGELOG}}\n\n**Full Changelog**: #{{RELEASE_DIFF}}",
|
||||
"pr_template": "- #{{TITLE}} (##{{NUMBER}}) by #{{AUTHOR}}\n#{{BODY}}",
|
||||
"empty_template": "- no changes",
|
||||
"categories": [
|
||||
{
|
||||
"title": "## New Features",
|
||||
"labels": ["add", "feature"]
|
||||
},
|
||||
{
|
||||
"title": "## Bug Fixes",
|
||||
"labels": ["fix", "bug", "revert"]
|
||||
},
|
||||
{
|
||||
"title": "## Documentation Enhancements",
|
||||
"labels": ["doc"]
|
||||
},
|
||||
{
|
||||
"title": "## Refactoring Efforts",
|
||||
"labels": ["refactor"]
|
||||
},
|
||||
{
|
||||
"title": "## Miscellaneus Changes",
|
||||
"labels": []
|
||||
}
|
||||
],
|
||||
"ignore_labels": [
|
||||
"duplicate", "good first issue", "help wanted", "invalid", "question", "wontfix"
|
||||
],
|
||||
"label_extractor": [
|
||||
{
|
||||
"pattern": "(.) (.+)",
|
||||
"target": "$1"
|
||||
},
|
||||
{
|
||||
"pattern": "(.) (.+)",
|
||||
"target": "$1",
|
||||
"on_property": "title"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
body: ${{steps.github_release.outputs.changelog}}
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
releases
|
||||
tablizer
|
||||
*.out
|
||||
|
||||
12
Makefile
12
Makefile
@@ -53,8 +53,7 @@ buildlocal:
|
||||
go build -ldflags "-X 'github.com/tlinden/tablizer/cfg.VERSION=$(VERSION)'"
|
||||
|
||||
release:
|
||||
./mkrel.sh $(tool) $(version)
|
||||
gh release create $(version) --generate-notes releases/*
|
||||
gh release create $(version) --generate-notes
|
||||
|
||||
install: buildlocal
|
||||
install -d -o $(UID) -g $(GID) $(PREFIX)/bin
|
||||
@@ -65,13 +64,12 @@ install: buildlocal
|
||||
clean:
|
||||
rm -rf $(tool) releases coverage.out
|
||||
|
||||
test:
|
||||
go test -v ./...
|
||||
bash t/test.sh
|
||||
test: clean
|
||||
go test -count=1 -cover ./... $(OPTS)
|
||||
|
||||
singletest:
|
||||
@echo "Call like this: ''make singletest TEST=TestPrepareColumns MOD=lib"
|
||||
go test -run $(TEST) github.com/tlinden/tablizer/$(MOD)
|
||||
@echo "Call like this: 'make singletest TEST=TestPrepareColumns MOD=lib'"
|
||||
go test -run $(TEST) github.com/tlinden/tablizer/$(MOD) $(OPTS)
|
||||
|
||||
cover-report:
|
||||
go test ./... -cover -coverprofile=coverage.out
|
||||
|
||||
130
README.md
130
README.md
@@ -6,7 +6,65 @@
|
||||
|
||||
Tablizer can be used to re-format tabular output of other
|
||||
programs. While you could do this using standard unix tools, in some
|
||||
cases it's a hard job.
|
||||
cases it's a hard job. With tablizer you can filter by column[s],
|
||||
ignore certain column[s] by regex, name or number. It can output the
|
||||
tabular data in a range of formats (see below). There's even an
|
||||
interactive filter/selection tool available.
|
||||
|
||||
## Demo
|
||||
|
||||

|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```default
|
||||
Usage:
|
||||
tablizer [regex,...] [file, ...] [flags]
|
||||
|
||||
Operational Flags:
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
-v, --invert-match select non-matching rows
|
||||
-n, --numbering Enable header numbering
|
||||
-N, --no-color Disable pattern highlighting
|
||||
-H, --no-headers Disable headers display
|
||||
-s, --separator <string> Custom field separator
|
||||
-k, --sort-by <int|name> Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter <field[!]=reg> Filter given field with regex, can be used multiple times
|
||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||
-R, --regex-transposer </from/to/> Apply /search/replace/ regexp to fields given in -T
|
||||
-j, --json Read JSON input (must be array of hashes)
|
||||
-I, --interactive Interactively filter and select rows
|
||||
|
||||
Output Flags (mutually exclusive):
|
||||
-X, --extended Enable extended output
|
||||
-M, --markdown Enable markdown table output
|
||||
-O, --orgtbl Enable org-mode table output
|
||||
-S, --shell Enable shell evaluable output
|
||||
-Y, --yaml Enable yaml output
|
||||
-C, --csv Enable CSV output
|
||||
-A, --ascii Default output mode, ascii tabular
|
||||
-L, --hightlight-lines Use alternating background colors for tables
|
||||
-y, --yank-columns Yank specified columns (separated by ,) to clipboard,
|
||||
space separated
|
||||
--ofs <char> Output field separator, used by -A and -C.
|
||||
|
||||
Sort Mode Flags (mutually exclusive):
|
||||
-a, --sort-age sort according to age (duration) string
|
||||
-D, --sort-desc Sort in descending order (default: ascending)
|
||||
-i, --sort-numeric sort according to string numerical value
|
||||
-t, --sort-time sort according to time string
|
||||
|
||||
Other Flags:
|
||||
-r --read-file <file> Use <file> as input instead of STDIN
|
||||
--completion <shell> Generate the autocompletion script for <shell>
|
||||
-f, --config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
-d, --debug Enable debugging
|
||||
-h, --help help for tablizer
|
||||
-m, --man Display manual page
|
||||
-V, --version Print program version
|
||||
```
|
||||
|
||||
Let's take this output:
|
||||
```
|
||||
@@ -22,13 +80,13 @@ to do this with tablizer:
|
||||
|
||||
```
|
||||
% kubectl get pods | tablizer
|
||||
NAME(1) READY(2) STATUS(3) RESTARTS(4) AGE(5)
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
repldepl-7bcd8d5b64-7zq4l 1/1 Running 1 (69m ago) 5h26m
|
||||
repldepl-7bcd8d5b64-m48n8 1/1 Running 1 (69m ago) 5h26m
|
||||
repldepl-7bcd8d5b64-q2bf4 1/1 Running 1 (69m ago) 5h26m
|
||||
|
||||
% kubectl get pods | tablizer -c 1,3
|
||||
NAME(1) STATUS(3)
|
||||
NAME STATUS
|
||||
repldepl-7bcd8d5b64-7zq4l Running
|
||||
repldepl-7bcd8d5b64-m48n8 Running
|
||||
repldepl-7bcd8d5b64-q2bf4 Running
|
||||
@@ -66,14 +124,14 @@ You can also specify a regex pattern to reduce the output:
|
||||
|
||||
```
|
||||
% kubectl get pods | tablizer q2bf4
|
||||
NAME(1) READY(2) STATUS(3) RESTARTS(4) AGE(5)
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
repldepl-7bcd8d5b64-q2bf4 1/1 Running 1 (69m ago) 5h26m
|
||||
```
|
||||
|
||||
Sometimes a filter regex is to broad and you wish to filter only on a
|
||||
particular column. This is possible using `-F`:
|
||||
```
|
||||
% kubectl get pods | tablizer -n -Fname=2
|
||||
% kubectl get pods | tablizer -Fname=2
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
repldepl-7bcd8d5b64-q2bf4 1/1 Running 1 (69m ago) 5h26m
|
||||
```
|
||||
@@ -83,9 +141,27 @@ otherwise on all rows.
|
||||
|
||||
There are more output modes like org-mode (orgtbl) and markdown.
|
||||
|
||||
## Demo
|
||||
You can also use it to modify certain cells using regular expression
|
||||
matching. For example:
|
||||
|
||||
```shell
|
||||
kubectl get pods | tablizer -T4 -R '/ /-/'
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
repldepl-7bcd8d5b64-7zq4l 1/1 Running 1-(69m-ago) 5h26m
|
||||
repldepl-7bcd8d5b64-m48n8 1/1 Running 1-(69m-ago) 5h26m
|
||||
repldepl-7bcd8d5b64-q2bf4 1/1 Running 1-(69m-ago) 5h26m
|
||||
```
|
||||
|
||||
Here, we modified the 4th column (`-T4`) by replacing every space with
|
||||
a dash. If you need to work with `/` characters, you can also use any
|
||||
other separator, for instance: `-R '| |-|'`.
|
||||
|
||||
There's also an interactive mode, invoked with the option B<-I>, where
|
||||
you can interactively filter and select rows:
|
||||
|
||||
<img width="937" height="293" alt="interactive" src="https://github.com/user-attachments/assets/0d4d65e2-d156-43ed-8021-39047c7939ed" />
|
||||
|
||||
|
||||
[](https://asciinema.org/a/9FKc3HPnlg8D2X8otheleEa9t)
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -116,10 +192,9 @@ hesitate to ask me about it, I'll add it.
|
||||
## Documentation
|
||||
|
||||
The documentation is provided as a unix man-page. It will be
|
||||
automatically installed if you install from source. However, you can
|
||||
read the man-page online:
|
||||
automatically installed if you install from source.
|
||||
|
||||
https://github.com/TLINDEN/tablizer/blob/main/tablizer.pod
|
||||
[However, you can read the man-page online](https://github.com/TLINDEN/tablizer/blob/main/tablizer.pod).
|
||||
|
||||
Or if you cloned the repository you can read it this way (perl needs
|
||||
to be installed though): `perldoc tablizer.pod`.
|
||||
@@ -138,6 +213,41 @@ In order to report a bug, unexpected behavior, feature requests
|
||||
or to submit a patch, please open an issue on github:
|
||||
https://github.com/TLINDEN/tablizer/issues.
|
||||
|
||||
## Prior Art
|
||||
|
||||
When I started with tablizer I was not aware that other tools
|
||||
exist. Here is a non-exhausive list of the ones I find especially
|
||||
awesome:
|
||||
|
||||
### [miller](https://github.com/johnkerl/miller)
|
||||
|
||||
This is a really powerful tool to work with tabular data and it also
|
||||
allows other inputs as json, csv etc. You can filter, manipulate,
|
||||
create pipelines, there's even a programming language builtin to do
|
||||
even more amazing things.
|
||||
|
||||
### [csvq](https://github.com/mithrandie/csvq)
|
||||
|
||||
Csvq allows you to query CSV and TSV data using SQL queries. How nice
|
||||
is that? Highly recommended if you have to work with a large (and
|
||||
wide) dataset and need to apply a complicated set of rules.
|
||||
|
||||
### [goawk](https://github.com/benhoyt/goawk)
|
||||
|
||||
Goawk is a 100% POSIX compliant AWK implementation in GO, which also
|
||||
supports CSV and TSV data as input (using `-i csv` for example). You
|
||||
can apply any kind of awk code to your tabular data, there are no
|
||||
limit to your creativity!
|
||||
|
||||
### [teip](https://github.com/greymd/teip)
|
||||
|
||||
I particularly like teip, it's a real gem. You can use it to drill
|
||||
"holes" into your tabular data and modify these "holes" using small
|
||||
external unix commands such as grep or sed. The possibilities are
|
||||
endless, you can even use teip to modify data inside a hole created by
|
||||
teip. Highly recommended.
|
||||
|
||||
|
||||
## Copyright and license
|
||||
|
||||
This software is licensed under the GNU GENERAL PUBLIC LICENSE version 3.
|
||||
|
||||
10
TODO.md
10
TODO.md
@@ -6,13 +6,3 @@
|
||||
|
||||
- add --no-headers option
|
||||
|
||||
### Lisp Plugin Infrastructure using zygo
|
||||
|
||||
Hooks:
|
||||
|
||||
| Filter | Purpose | Args | Return |
|
||||
|-----------|-------------------------------------------------------------|---------------------|--------|
|
||||
| filter | include or exclude lines | row as hash | bool |
|
||||
| process | do calculations with data, store results in global lisp env | whole dataset | nil |
|
||||
| transpose | modify a cell | headername and cell | cell |
|
||||
| append | add one or more rows to the dataset (use this to add stats) | nil | rows |
|
||||
|
||||
191
cfg/config.go
191
cfg/config.go
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022-2024 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -23,19 +23,30 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/glycerine/zygomys/zygo"
|
||||
"github.com/gookit/color"
|
||||
"github.com/hashicorp/hcl/v2/hclsimple"
|
||||
)
|
||||
|
||||
const DefaultSeparator string = `(\s\s+|\t)`
|
||||
const Version string = "v1.2.0"
|
||||
const MAXPARTS = 2
|
||||
const (
|
||||
Version = "v1.5.10"
|
||||
MAXPARTS = 2
|
||||
)
|
||||
|
||||
var DefaultLoadPath = os.Getenv("HOME") + "/.config/tablizer/lisp"
|
||||
var DefaultConfigfile = os.Getenv("HOME") + "/.config/tablizer/config"
|
||||
var (
|
||||
DefaultConfigfile = os.Getenv("HOME") + "/.config/tablizer/config"
|
||||
VERSION string // maintained by -x
|
||||
|
||||
var VERSION string // maintained by -x
|
||||
SeparatorTemplates = map[string]string{
|
||||
":tab:": `\s*\t\s*`, // tab but eats spaces around
|
||||
":spaces:": `\s{2,}`, // 2 or more spaces
|
||||
":pipe:": `\s*\|\s*`, // one pipe eating spaces around
|
||||
":default:": `(\s\s+|\t)`, // 2 or more spaces or tab
|
||||
":nonword:": `\W`, // word boundary
|
||||
":nondigit:": `\D`, // same for numbers
|
||||
":special:": `[\*\+\-_\(\)\[\]\{\}?\\/<>=&$§"':,\^]+`, // match any special char
|
||||
":nonprint:": `[[:^print:]]+`, // non printables
|
||||
}
|
||||
)
|
||||
|
||||
// public config, set via config file or using defaults
|
||||
type Settings struct {
|
||||
@@ -49,24 +60,51 @@ type Settings struct {
|
||||
HighlightHdrBG string `hcl:"HighlightHdrBG"`
|
||||
}
|
||||
|
||||
type Transposer struct {
|
||||
Search regexp.Regexp
|
||||
Replace string
|
||||
}
|
||||
|
||||
type Pattern struct {
|
||||
Pattern string
|
||||
PatternRe *regexp.Regexp
|
||||
Negate bool
|
||||
}
|
||||
|
||||
type Filter struct {
|
||||
Regex *regexp.Regexp
|
||||
Negate bool
|
||||
}
|
||||
|
||||
// internal config
|
||||
type Config struct {
|
||||
Debug bool
|
||||
NoNumbering bool
|
||||
Numbering bool
|
||||
NoHeaders bool
|
||||
Columns string
|
||||
UseColumns []int
|
||||
YankColumns string
|
||||
UseYankColumns []int
|
||||
Separator string
|
||||
OutputMode int
|
||||
InvertMatch bool
|
||||
Pattern string
|
||||
PatternR *regexp.Regexp
|
||||
Patterns []*Pattern
|
||||
UseFuzzySearch bool
|
||||
UseHighlight bool
|
||||
Interactive bool
|
||||
InputJSON bool
|
||||
AutoHeaders bool
|
||||
CustomHeaders []string
|
||||
|
||||
SortMode string
|
||||
SortDescending bool
|
||||
SortByColumn int
|
||||
SortMode string
|
||||
SortDescending bool
|
||||
SortByColumn string // 1,2
|
||||
UseSortByColumn []int // []int{1,2}
|
||||
|
||||
TransposeColumns string // 1,2
|
||||
UseTransposeColumns []int // []int{1,2}
|
||||
Transposers []string // []string{"/ /-/", "/foo/bar/"}
|
||||
UseTransposers []Transposer // {Search: re, Replace: string}
|
||||
|
||||
/*
|
||||
FIXME: make configurable somehow, config file or ENV
|
||||
@@ -79,13 +117,6 @@ type Config struct {
|
||||
|
||||
NoColor bool
|
||||
|
||||
// special case: we use the config struct to transport the lisp
|
||||
// env trough the program
|
||||
Lisp *zygo.Zlisp
|
||||
|
||||
// a path containing lisp scripts to be loaded on startup
|
||||
LispLoadPath string
|
||||
|
||||
// config file, optional
|
||||
Configfile string
|
||||
|
||||
@@ -93,7 +124,12 @@ type Config struct {
|
||||
|
||||
// used for field filtering
|
||||
Rawfilters []string
|
||||
Filters map[string]*regexp.Regexp
|
||||
Filters map[string]Filter //map[string]*regexp.Regexp
|
||||
|
||||
// -r <file>
|
||||
InputFile string
|
||||
|
||||
OFS string
|
||||
}
|
||||
|
||||
// maps outputmode short flags to output mode, ie. -O => -o orgtbl
|
||||
@@ -125,9 +161,6 @@ type Sortmode struct {
|
||||
Age bool
|
||||
}
|
||||
|
||||
// valid lisp hooks
|
||||
var ValidHooks []string
|
||||
|
||||
// default color schemes
|
||||
func (conf *Config) Colors() map[color.Level]map[string]color.Color {
|
||||
colors := map[color.Level]map[string]color.Color{
|
||||
@@ -263,12 +296,20 @@ func (conf *Config) PrepareModeFlags(flag Modeflag) {
|
||||
}
|
||||
|
||||
func (conf *Config) PrepareFilters() error {
|
||||
conf.Filters = make(map[string]*regexp.Regexp, len(conf.Rawfilters))
|
||||
conf.Filters = make(map[string]Filter, len(conf.Rawfilters))
|
||||
|
||||
for _, filter := range conf.Rawfilters {
|
||||
parts := strings.Split(filter, "=")
|
||||
for _, rawfilter := range conf.Rawfilters {
|
||||
filter := Filter{}
|
||||
|
||||
parts := strings.Split(rawfilter, "!=")
|
||||
if len(parts) != MAXPARTS {
|
||||
return errors.New("filter field and value must be separated by =")
|
||||
parts = strings.Split(rawfilter, "=")
|
||||
|
||||
if len(parts) != MAXPARTS {
|
||||
return errors.New("filter field and value must be separated by '=' or '!='")
|
||||
}
|
||||
} else {
|
||||
filter.Negate = true
|
||||
}
|
||||
|
||||
reg, err := regexp.Compile(parts[1])
|
||||
@@ -277,7 +318,31 @@ func (conf *Config) PrepareFilters() error {
|
||||
parts[0], err)
|
||||
}
|
||||
|
||||
conf.Filters[strings.ToLower(parts[0])] = reg
|
||||
filter.Regex = reg
|
||||
conf.Filters[strings.ToLower(parts[0])] = filter
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// check if transposers match transposer columns and prepare transposer structs
|
||||
func (conf *Config) PrepareTransposers() error {
|
||||
if len(conf.Transposers) != len(conf.UseTransposeColumns) {
|
||||
return fmt.Errorf("the number of transposers needs to correspond to the number of transpose columns: %d != %d",
|
||||
len(conf.Transposers), len(conf.UseTransposeColumns))
|
||||
}
|
||||
|
||||
for _, transposer := range conf.Transposers {
|
||||
parts := strings.Split(transposer, string(transposer[0]))
|
||||
if len(parts) != 4 {
|
||||
return fmt.Errorf("transposer function must have the format /regexp/replace-string/")
|
||||
}
|
||||
|
||||
conf.UseTransposers = append(conf.UseTransposers,
|
||||
Transposer{
|
||||
Search: *regexp.MustCompile(parts[1]),
|
||||
Replace: parts[2]},
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -286,10 +351,10 @@ func (conf *Config) PrepareFilters() error {
|
||||
func (conf *Config) CheckEnv() {
|
||||
// check for environment vars, command line flags have precedence,
|
||||
// NO_COLOR is being checked by the color module itself.
|
||||
if !conf.NoNumbering {
|
||||
_, set := os.LookupEnv("T_NO_HEADER_NUMBERING")
|
||||
if !conf.Numbering {
|
||||
_, set := os.LookupEnv("T_HEADER_NUMBERING")
|
||||
if set {
|
||||
conf.NoNumbering = true
|
||||
conf.Numbering = true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -304,31 +369,73 @@ func (conf *Config) CheckEnv() {
|
||||
func (conf *Config) ApplyDefaults() {
|
||||
// mode specific defaults
|
||||
if conf.OutputMode == Yaml || conf.OutputMode == CSV {
|
||||
conf.NoNumbering = true
|
||||
conf.Numbering = false
|
||||
}
|
||||
|
||||
ValidHooks = []string{"filter", "process", "transpose", "append"}
|
||||
if conf.Separator[0] == ':' && conf.Separator[len(conf.Separator)-1] == ':' {
|
||||
separator, ok := SeparatorTemplates[conf.Separator]
|
||||
if ok {
|
||||
conf.Separator = separator
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (conf *Config) PreparePattern(pattern string) error {
|
||||
PatternR, err := regexp.Compile(pattern)
|
||||
func (conf *Config) PreparePattern(patterns []*Pattern) error {
|
||||
// regex checks if a pattern looks like /$pattern/[i!]
|
||||
flagre := regexp.MustCompile(`^/(.*)/([i!]*)$`)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("regexp pattern %s is invalid: %w", conf.Pattern, err)
|
||||
for _, pattern := range patterns {
|
||||
matches := flagre.FindAllStringSubmatch(pattern.Pattern, -1)
|
||||
|
||||
// we have a regex with flags
|
||||
for _, match := range matches {
|
||||
pattern.Pattern = match[1] // the inner part is our actual pattern
|
||||
flags := match[2] // the flags
|
||||
|
||||
for _, flag := range flags {
|
||||
switch flag {
|
||||
case 'i':
|
||||
pattern.Pattern = `(?i)` + pattern.Pattern
|
||||
case '!':
|
||||
pattern.Negate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PatternRe, err := regexp.Compile(pattern.Pattern)
|
||||
if err != nil {
|
||||
return fmt.Errorf("regexp pattern %s is invalid: %w", pattern.Pattern, err)
|
||||
}
|
||||
|
||||
pattern.PatternRe = PatternRe
|
||||
}
|
||||
|
||||
conf.PatternR = PatternR
|
||||
conf.Pattern = pattern
|
||||
conf.Patterns = patterns
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (conf *Config) PrepareCustomHeaders(custom string) {
|
||||
if len(custom) > 0 {
|
||||
conf.CustomHeaders = strings.Split(custom, ",")
|
||||
}
|
||||
}
|
||||
|
||||
// Parse config file. Ignore if the file doesn't exist but return an
|
||||
// error if it exists but fails to read or parse
|
||||
func (conf *Config) ParseConfigfile() error {
|
||||
path, err := os.Stat(conf.Configfile)
|
||||
|
||||
if os.IsNotExist(err) || path.IsDir() {
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
// ignore non-existent files
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("failed to stat config file: %w", err)
|
||||
}
|
||||
|
||||
if path.IsDir() {
|
||||
// ignore non-existent or dirs
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -21,6 +21,8 @@ import (
|
||||
"fmt"
|
||||
// "reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPrepareModeFlags(t *testing.T) {
|
||||
@@ -44,9 +46,8 @@ func TestPrepareModeFlags(t *testing.T) {
|
||||
conf := Config{}
|
||||
|
||||
conf.PrepareModeFlags(testdata.flag)
|
||||
if conf.OutputMode != testdata.expect {
|
||||
t.Errorf("got: %d, expect: %d", conf.OutputMode, testdata.expect)
|
||||
}
|
||||
|
||||
assert.EqualValues(t, testdata.expect, conf.OutputMode)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -70,34 +71,67 @@ func TestPrepareSortFlags(t *testing.T) {
|
||||
|
||||
conf.PrepareSortFlags(testdata.flag)
|
||||
|
||||
if conf.SortMode != testdata.expect {
|
||||
t.Errorf("got: %s, expect: %s", conf.SortMode, testdata.expect)
|
||||
}
|
||||
assert.EqualValues(t, testdata.expect, conf.SortMode)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPreparePattern(t *testing.T) {
|
||||
var tests = []struct {
|
||||
pattern string
|
||||
wanterr bool
|
||||
patterns []*Pattern
|
||||
name string
|
||||
wanterror bool
|
||||
wanticase bool
|
||||
wantneg bool
|
||||
}{
|
||||
{"[A-Z]+", false},
|
||||
{"[a-z", true},
|
||||
{
|
||||
[]*Pattern{{Pattern: "[A-Z]+"}},
|
||||
"simple",
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
},
|
||||
{
|
||||
[]*Pattern{{Pattern: "[a-z"}},
|
||||
"regfail",
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
},
|
||||
{
|
||||
[]*Pattern{{Pattern: "/[A-Z]+/i"}},
|
||||
"icase",
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
},
|
||||
{
|
||||
[]*Pattern{{Pattern: "/[A-Z]+/!"}},
|
||||
"negate",
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
},
|
||||
{
|
||||
[]*Pattern{{Pattern: "/[A-Z]+/!i"}},
|
||||
"negicase",
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("PreparePattern-pattern-%s-wanterr-%t",
|
||||
testdata.pattern, testdata.wanterr)
|
||||
testname := fmt.Sprintf("PreparePattern-pattern-%s-wanterr-%t", testdata.name, testdata.wanterror)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := Config{}
|
||||
|
||||
err := conf.PreparePattern(testdata.pattern)
|
||||
err := conf.PreparePattern(testdata.patterns)
|
||||
|
||||
if err != nil {
|
||||
if !testdata.wanterr {
|
||||
t.Errorf("PreparePattern returned error: %s", err)
|
||||
}
|
||||
if testdata.wanterror {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
102
cmd/root.go
102
cmd/root.go
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022-2024 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -17,12 +17,10 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
@@ -30,24 +28,6 @@ import (
|
||||
"github.com/tlinden/tablizer/lib"
|
||||
)
|
||||
|
||||
func man() {
|
||||
man := exec.Command("less", "-")
|
||||
|
||||
var buffer bytes.Buffer
|
||||
|
||||
buffer.Write([]byte(manpage))
|
||||
|
||||
man.Stdout = os.Stdout
|
||||
man.Stdin = &buffer
|
||||
man.Stderr = os.Stderr
|
||||
|
||||
err := man.Run()
|
||||
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func completion(cmd *cobra.Command, mode string) error {
|
||||
switch mode {
|
||||
case "bash":
|
||||
@@ -63,6 +43,14 @@ func completion(cmd *cobra.Command, mode string) error {
|
||||
}
|
||||
}
|
||||
|
||||
// we die with exit 1 if there's an error
|
||||
func wrapE(err error) {
|
||||
if err != nil {
|
||||
fmt.Println(err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func Execute() {
|
||||
var (
|
||||
conf cfg.Config
|
||||
@@ -71,61 +59,55 @@ func Execute() {
|
||||
ShowCompletion string
|
||||
modeflag cfg.Modeflag
|
||||
sortmode cfg.Sortmode
|
||||
headers string
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "tablizer [regex] [file, ...]",
|
||||
Short: "[Re-]tabularize tabular data",
|
||||
Long: `Manipulate tabular output of other programs`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if ShowVersion {
|
||||
fmt.Println(cfg.Getversion())
|
||||
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
if ShowManual {
|
||||
man()
|
||||
lib.Pager("tablizer manual page", manpage)
|
||||
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
if len(ShowCompletion) > 0 {
|
||||
return completion(cmd, ShowCompletion)
|
||||
wrapE(completion(cmd, ShowCompletion))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Setup
|
||||
err := conf.ParseConfigfile()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
wrapE(conf.ParseConfigfile())
|
||||
|
||||
conf.CheckEnv()
|
||||
conf.PrepareModeFlags(modeflag)
|
||||
conf.PrepareSortFlags(sortmode)
|
||||
conf.PrepareCustomHeaders(headers)
|
||||
|
||||
if err = conf.PrepareFilters(); err != nil {
|
||||
return err
|
||||
}
|
||||
wrapE(conf.PrepareFilters())
|
||||
|
||||
conf.DetermineColormode()
|
||||
conf.ApplyDefaults()
|
||||
|
||||
// setup lisp env, load plugins etc
|
||||
err = lib.SetupLisp(&conf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// actual execution starts here
|
||||
return lib.ProcessFiles(&conf, args)
|
||||
wrapE(lib.ProcessFiles(&conf, args))
|
||||
},
|
||||
}
|
||||
|
||||
// options
|
||||
rootCmd.PersistentFlags().BoolVarP(&conf.Debug, "debug", "d", false,
|
||||
"Enable debugging")
|
||||
rootCmd.PersistentFlags().BoolVarP(&conf.NoNumbering, "no-numbering", "n", false,
|
||||
rootCmd.PersistentFlags().BoolVarP(&conf.Numbering, "numbering", "n", false,
|
||||
"Disable header numbering")
|
||||
rootCmd.PersistentFlags().BoolVarP(&conf.NoHeaders, "no-headers", "H", false,
|
||||
"Disable header display")
|
||||
@@ -143,13 +125,27 @@ func Execute() {
|
||||
"Use alternating background colors")
|
||||
rootCmd.PersistentFlags().StringVarP(&ShowCompletion, "completion", "", "",
|
||||
"Display completion code")
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.Separator, "separator", "s", cfg.DefaultSeparator,
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.Separator, "separator", "s", cfg.SeparatorTemplates[":default:"],
|
||||
"Custom field separator")
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.Columns, "columns", "c", "",
|
||||
"Only show the speficied columns (separated by ,)")
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.YankColumns, "yank-columns", "y", "",
|
||||
"Yank the speficied columns (separated by ,) to the clipboard")
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.TransposeColumns, "transpose-columns", "T", "",
|
||||
"Transpose the speficied columns (separated by ,)")
|
||||
rootCmd.PersistentFlags().BoolVarP(&conf.Interactive, "interactive", "I", false,
|
||||
"interactive mode")
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.OFS, "ofs", "", "",
|
||||
"Output field separator (' ' for ascii table, ',' for CSV)")
|
||||
rootCmd.PersistentFlags().BoolVarP(&conf.InputJSON, "json", "j", false,
|
||||
"JSON input mode")
|
||||
rootCmd.PersistentFlags().BoolVarP(&conf.AutoHeaders, "auto-headers", "", false,
|
||||
"Generate headers automatically")
|
||||
rootCmd.PersistentFlags().StringVarP(&headers, "custom-headers", "", "",
|
||||
"Custom headers")
|
||||
|
||||
// sort options
|
||||
rootCmd.PersistentFlags().IntVarP(&conf.SortByColumn, "sort-by", "k", 0,
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.SortByColumn, "sort-by", "k", "",
|
||||
"Sort by column (default: 1)")
|
||||
|
||||
// sort mode, only 1 allowed
|
||||
@@ -182,19 +178,27 @@ func Execute() {
|
||||
rootCmd.MarkFlagsMutuallyExclusive("extended", "markdown", "orgtbl",
|
||||
"shell", "yaml", "csv")
|
||||
|
||||
// lisp options
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.LispLoadPath, "load-path", "l", cfg.DefaultLoadPath,
|
||||
"Load path for lisp plugins (expects *.zy files)")
|
||||
|
||||
// config file
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.Configfile, "config", "f", cfg.DefaultConfigfile,
|
||||
"config file (default: ~/.config/tablizer/config)")
|
||||
|
||||
// filters
|
||||
rootCmd.PersistentFlags().StringArrayVarP(&conf.Rawfilters, "filter", "F", nil, "Filter by field (field=regexp)")
|
||||
rootCmd.PersistentFlags().StringArrayVarP(&conf.Rawfilters,
|
||||
"filter", "F", nil, "Filter by field (field=regexp || field!=regexp)")
|
||||
rootCmd.PersistentFlags().StringArrayVarP(&conf.Transposers,
|
||||
"regex-transposer", "R", nil, "apply /search/replace/ regexp to fields given in -T")
|
||||
|
||||
// input
|
||||
rootCmd.PersistentFlags().StringVarP(&conf.InputFile, "read-file", "r", "",
|
||||
"Read input data from file")
|
||||
|
||||
rootCmd.SetUsageTemplate(strings.TrimSpace(usage) + "\n")
|
||||
|
||||
if slices.Contains(os.Args, "-h") {
|
||||
fmt.Println(shortusage)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
err := rootCmd.Execute()
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
|
||||
16
cmd/shortusage.go
Normal file
16
cmd/shortusage.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package cmd
|
||||
|
||||
const shortusage = `tablizer [regex,...] [-r file] [flags]
|
||||
-c col,... show specified columns -L highlight matching lines
|
||||
-k col,... sort by specified columns -j read JSON input
|
||||
-F col=reg filter field with regexp -v invert match
|
||||
-T col,... transpose specified columns -n numberize columns
|
||||
-R /from/to/ apply replacement to columns in -T -N do not use colors
|
||||
-y col,... yank columns to clipboard -H do not show headers
|
||||
--ofs char output field separator -s specify field separator
|
||||
-r file read input from file -z use fuzzy search
|
||||
-f file read config from file -I interactive filter mode
|
||||
-d debug
|
||||
-O org -C CSV -M md -X ext -S shell -Y yaml -D sort descending order
|
||||
-m show manual --help show detailed help -v show version
|
||||
-a sort by age -i sort numerically -t sort by time`
|
||||
332
cmd/tablizer.go
332
cmd/tablizer.go
@@ -6,42 +6,52 @@ NAME
|
||||
|
||||
SYNOPSIS
|
||||
Usage:
|
||||
tablizer [regex] [file, ...] [flags]
|
||||
tablizer [regex,...] [-r file] [flags]
|
||||
|
||||
Operational Flags:
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
-v, --invert-match select non-matching rows
|
||||
-n, --no-numbering Disable header numbering
|
||||
-N, --no-color Disable pattern highlighting
|
||||
-H, --no-headers Disable headers display
|
||||
-s, --separator string Custom field separator
|
||||
-k, --sort-by int Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
-v, --invert-match select non-matching rows
|
||||
-n, --numbering Enable header numbering
|
||||
-N, --no-color Disable pattern highlighting
|
||||
-H, --no-headers Disable headers display
|
||||
-s, --separator <string> Custom field separator (maybe char, string or :class:)
|
||||
-k, --sort-by <int|name> Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter <field[!]=reg> Filter given field with regex, can be used multiple times
|
||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||
-R, --regex-transposer </from/to/> Apply /search/replace/ regexp to fields given in -T
|
||||
-j, --json Read JSON input (must be array of hashes)
|
||||
-I, --interactive Interactively filter and select rows
|
||||
--auto-headers Generate headers if there are none present in input
|
||||
--custom-headers a,b,... Use custom headers, separated by comma
|
||||
|
||||
Output Flags (mutually exclusive):
|
||||
-X, --extended Enable extended output
|
||||
-M, --markdown Enable markdown table output
|
||||
-O, --orgtbl Enable org-mode table output
|
||||
-S, --shell Enable shell evaluable output
|
||||
-Y, --yaml Enable yaml output
|
||||
-C, --csv Enable CSV output
|
||||
-A, --ascii Default output mode, ascii tabular
|
||||
-L, --hightlight-lines Use alternating background colors for tables
|
||||
-X, --extended Enable extended output
|
||||
-M, --markdown Enable markdown table output
|
||||
-O, --orgtbl Enable org-mode table output
|
||||
-S, --shell Enable shell evaluable output
|
||||
-Y, --yaml Enable yaml output
|
||||
-C, --csv Enable CSV output
|
||||
-A, --ascii Default output mode, ascii tabular
|
||||
-L, --hightlight-lines Use alternating background colors for tables
|
||||
-y, --yank-columns Yank specified columns (separated by ,) to clipboard,
|
||||
space separated
|
||||
--ofs <char> Output field separator, used by -A and -C.
|
||||
|
||||
Sort Mode Flags (mutually exclusive):
|
||||
-a, --sort-age sort according to age (duration) string
|
||||
-D, --sort-desc Sort in descending order (default: ascending)
|
||||
-i, --sort-numeric sort according to string numerical value
|
||||
-t, --sort-time sort according to time string
|
||||
-a, --sort-age sort according to age (duration) string
|
||||
-D, --sort-desc Sort in descending order (default: ascending)
|
||||
-i, --sort-numeric sort according to string numerical value
|
||||
-t, --sort-time sort according to time string
|
||||
|
||||
Other Flags:
|
||||
--completion <shell> Generate the autocompletion script for <shell>
|
||||
-f, --config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
-d, --debug Enable debugging
|
||||
-h, --help help for tablizer
|
||||
-m, --man Display manual page
|
||||
-V, --version Print program version
|
||||
-r --read-file <file> Use <file> as input instead of STDIN
|
||||
--completion <shell> Generate the autocompletion script for <shell>
|
||||
-f, --config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
-d, --debug Enable debugging
|
||||
-h, --help help for tablizer
|
||||
-m, --man Display manual page
|
||||
-V, --version Print program version
|
||||
|
||||
DESCRIPTION
|
||||
Many programs generate tabular output. But sometimes you need to
|
||||
@@ -69,16 +79,16 @@ DESCRIPTION
|
||||
kubectl get pods | tablizer
|
||||
|
||||
# read a file
|
||||
tablizer filename
|
||||
tablizer -r filename
|
||||
|
||||
# search for pattern in a file (works like grep)
|
||||
tablizer regex filename
|
||||
tablizer regex -r filename
|
||||
|
||||
# search for pattern in STDIN
|
||||
kubectl get pods | tablizer regex
|
||||
|
||||
The output looks like the original one but every header field will have
|
||||
a numer associated with it, e.g.:
|
||||
The output looks like the original one. You can add the option -n, then
|
||||
every header field will have a numer associated with it, e.g.:
|
||||
|
||||
NAME(1) READY(2) STATUS(3) RESTARTS(4) AGE(5)
|
||||
|
||||
@@ -90,7 +100,13 @@ DESCRIPTION
|
||||
You can specify the numbers in any order but output will always follow
|
||||
the original order.
|
||||
|
||||
The numbering can be suppressed by using the -n option.
|
||||
However, you may also just use the header names instead of numbers, eg:
|
||||
|
||||
kubectl get pods | tablizer -cname,status
|
||||
|
||||
You can also use regular expressions with -c, eg:
|
||||
|
||||
kubectl get pods | tablizer -c '[ae]'
|
||||
|
||||
By default tablizer shows a header containing the names of each column.
|
||||
This can be disabled using the -H option. Be aware that this only
|
||||
@@ -101,10 +117,19 @@ DESCRIPTION
|
||||
highlighted. You can disable this behavior with the -N option.
|
||||
|
||||
Use the -k option to specify by which column to sort the tabular data
|
||||
(as in GNU sort(1)). The default sort column is the first one. To
|
||||
disable sorting at all, supply 0 (Zero) to -k. The default sort order is
|
||||
ascending. You can change this to descending order using the option -D.
|
||||
The default sort order is by string, but there are other sort modes:
|
||||
(as in GNU sort(1)). The default sort column is the first one. You can
|
||||
specify column numbers or names. Column numbers start with 1, names are
|
||||
case insensitive. You can specify multiple columns separated by comma to
|
||||
sort, but the type must be the same. For example if you want to sort
|
||||
numerically, all columns must be numbers. If you use column numbers,
|
||||
then be aware, that these are the numbers before column extraction. For
|
||||
example if you have a table with 4 columns and specify "-c4", then only
|
||||
1 column (the fourth) will be printed, however if you want to sort by
|
||||
this column, you'll have to specify "-k4".
|
||||
|
||||
The default sort order is ascending. You can change this to descending
|
||||
order using the option -D. The default sort order is by alphanumeric
|
||||
string, but there are other sort modes:
|
||||
|
||||
-a --sort-age
|
||||
Sorts duration strings like "1d4h32m51s".
|
||||
@@ -118,31 +143,95 @@ DESCRIPTION
|
||||
Finally the -d option enables debugging output which is mostly useful
|
||||
for the developer.
|
||||
|
||||
SEPARATOR
|
||||
The option -s can be a single character, in which case the CSV parser
|
||||
will be invoked. You can also specify a string as separator. The string
|
||||
will be interpreted as literal string unless it is a valid go regular
|
||||
expression. For example:
|
||||
|
||||
-s '\t{2,}\'
|
||||
|
||||
is being used as a regexp and will match two or more consecutive tabs.
|
||||
|
||||
-s 'foo'
|
||||
|
||||
on the other hand is no regular expression and will be used literally.
|
||||
|
||||
To make live easier, there are a couple of predefined regular
|
||||
expressions, which you can specify as classes:
|
||||
|
||||
* :tab:
|
||||
|
||||
Matches a tab and eats spaces around it.
|
||||
|
||||
* :spaces:
|
||||
|
||||
Matches 2 or more spaces.
|
||||
|
||||
* :pipe:
|
||||
|
||||
Matches a pipe character and eats spaces around it.
|
||||
|
||||
* :default:
|
||||
|
||||
Matches 2 or more spaces or tab. This is the default separator if
|
||||
none is specified.
|
||||
|
||||
* :nonword:
|
||||
|
||||
Matches a non-word character.
|
||||
|
||||
* :nondigit:
|
||||
|
||||
Matches a non-digit character.
|
||||
|
||||
* :special:
|
||||
|
||||
Matches one or more special chars like brackets, dollar sign,
|
||||
slashes etc.
|
||||
|
||||
* :nonprint:
|
||||
|
||||
Matches one or more non-printable characters.
|
||||
|
||||
PATTERNS AND FILTERING
|
||||
You can reduce the rows being displayed by using a regular expression
|
||||
pattern. The regexp is PCRE compatible, refer to the syntax cheat sheet
|
||||
here: <https://github.com/google/re2/wiki/Syntax>. If you want to read a
|
||||
more comprehensive documentation about the topic and have perl installed
|
||||
you can read it with:
|
||||
You can reduce the rows being displayed by using one or more regular
|
||||
expression patterns. The regexp language being used is the one of
|
||||
GOLANG, refer to the syntax cheat sheet here:
|
||||
<https://pkg.go.dev/regexp/syntax>.
|
||||
|
||||
If you want to read a more comprehensive documentation about the topic
|
||||
and have perl installed you can read it with:
|
||||
|
||||
perldoc perlre
|
||||
|
||||
Or read it online: <https://perldoc.perl.org/perlre>.
|
||||
Or read it online: <https://perldoc.perl.org/perlre>. But please note
|
||||
that the GO regexp engine does NOT support all perl regex terms,
|
||||
especially look-ahead and look-behind.
|
||||
|
||||
A note on modifiers: the regexp engine used in tablizer uses another
|
||||
modifier syntax:
|
||||
If you want to supply flags to a regex, then surround it with slashes
|
||||
and append the flag. The following flags are supported:
|
||||
|
||||
(?MODIFIER)
|
||||
|
||||
The most important modifiers are:
|
||||
|
||||
"i" ignore case "m" multiline mode "s" single line mode
|
||||
i => case insensitive
|
||||
! => negative match
|
||||
|
||||
Example for a case insensitive search:
|
||||
|
||||
kubectl get pods -A | tablizer "(?i)account"
|
||||
kubectl get pods -A | tablizer "/account/i"
|
||||
|
||||
You can use the experimental fuzzy search feature by providing the
|
||||
If you use the "!" flag, then the regex match will be negated, that is,
|
||||
if a line in the input matches the given regex, but "!" is supplied,
|
||||
tablizer will NOT include it in the output.
|
||||
|
||||
For example, here we want to get all lines matching "foo" but not "bar":
|
||||
|
||||
cat table | tablizer foo '/bar/!'
|
||||
|
||||
This would match a line "foo zorro" but not "foo bar".
|
||||
|
||||
The flags can also be combined.
|
||||
|
||||
You can also use the experimental fuzzy search feature by providing the
|
||||
option -z, in which case the pattern is regarded as a fuzzy search term,
|
||||
not a regexp.
|
||||
|
||||
@@ -157,8 +246,26 @@ DESCRIPTION
|
||||
If you specify more than one filter, both filters have to match (AND
|
||||
operation).
|
||||
|
||||
These field filters can also be negated:
|
||||
|
||||
fieldname!=regexp
|
||||
|
||||
If the option -v is specified, the filtering is inverted.
|
||||
|
||||
INTERACTIVE FILTERING
|
||||
You can also use the interactive mode, enabled with "-I" to filter and
|
||||
select rows. This mode is complementary, that is, other filter options
|
||||
are still being respected.
|
||||
|
||||
To enter e filter, hit "/", enter a filter string and finish with
|
||||
"ENTER". Use "SPACE" to select/deselect rows, use "a" to select all
|
||||
(visible) rows.
|
||||
|
||||
Commit your selection with "q". The selected rows are being fed to the
|
||||
requested output mode as usual. Abort with "CTRL-c", in which case the
|
||||
results of the interactive mode are being ignored and all rows are being
|
||||
fed to output.
|
||||
|
||||
COLUMNS
|
||||
The parameter -c can be used to specify, which columns to display. By
|
||||
default tablizer numerizes the header names and these numbers can be
|
||||
@@ -185,6 +292,44 @@ DESCRIPTION
|
||||
|
||||
where "C" is our regexp which matches CMD.
|
||||
|
||||
If a column specifier doesn't look like a regular expression, matching
|
||||
against header fields will be case insensitive. So, if you have a field
|
||||
with the name "ID" then these will all match: "-c id", "-c Id". The same
|
||||
rule applies to the options "-T" and "-F".
|
||||
|
||||
TRANSPOSE FIELDS USING REGEXPS
|
||||
You can manipulate field contents using regular expressions. You have to
|
||||
tell tablizer which field[s] to operate on using the option "-T" and the
|
||||
search/replace pattern using "-R". The number of columns and patterns
|
||||
must match.
|
||||
|
||||
A search/replace pattern consists of the following elements:
|
||||
|
||||
/search-regexp/replace-string/
|
||||
|
||||
The separator can be any valid character. Especially if you want to use
|
||||
a regexp containing the "/" character, eg:
|
||||
|
||||
|search-regexp|replace-string|
|
||||
|
||||
Example:
|
||||
|
||||
cat t/testtable2
|
||||
NAME DURATION
|
||||
x 10
|
||||
a 100
|
||||
z 0
|
||||
u 4
|
||||
k 6
|
||||
|
||||
cat t/testtable2 | tablizer -T2 -R '/^\d/4/' -n
|
||||
NAME DURATION
|
||||
x 40
|
||||
a 400
|
||||
z 4
|
||||
u 4
|
||||
k 4
|
||||
|
||||
OUTPUT MODES
|
||||
There might be cases when the tabular output of a program is way too
|
||||
large for your current terminal but you still need to see every column.
|
||||
@@ -218,12 +363,24 @@ DESCRIPTION
|
||||
markdown which prints a Markdown table, yaml, which prints yaml encoding
|
||||
and CSV mode, which prints a comma separated value file.
|
||||
|
||||
PUT FIELDS TO CLIPBOARD
|
||||
You can let tablizer put fields to the clipboard using the option "-y".
|
||||
This best fits the use-case when the result of your filtering yields
|
||||
just one row. For example:
|
||||
|
||||
cloudctl cluster ls | tablizer -yid matchbox
|
||||
|
||||
If "matchbox" matches one cluster, you can immediately use the id of
|
||||
that cluster somewhere else and paste it. Of course, if there are
|
||||
multiple matches, then all id's will be put into the clipboard separated
|
||||
by one space.
|
||||
|
||||
ENVIRONMENT VARIABLES
|
||||
tablizer supports certain environment variables which use can use to
|
||||
influence program behavior. Commandline flags have always precedence
|
||||
over environment variables.
|
||||
|
||||
<T_NO_HEADER_NUMBERING> - disable numbering of header fields, like -n.
|
||||
<T_HEADER_NUMBERING> - enable numbering of header fields, like -n.
|
||||
<T_COLUMNS> - comma separated list of columns to output, like -c
|
||||
<NO_COLORS> - disable colorization of matches, like -N
|
||||
|
||||
@@ -336,6 +493,9 @@ LICENSE
|
||||
Released under the MIT License, Copyright (c) 2006-2011 Kirill
|
||||
Simonov
|
||||
|
||||
bubble-table (https://github.com/Evertras/bubble-table)
|
||||
Released under the MIT License, Copyright (c) 2022 Brandon Fulljames
|
||||
|
||||
AUTHORS
|
||||
Thomas von Dein tom AT vondein DOT org
|
||||
|
||||
@@ -343,42 +503,52 @@ AUTHORS
|
||||
var usage = `
|
||||
|
||||
Usage:
|
||||
tablizer [regex] [file, ...] [flags]
|
||||
tablizer [regex,...] [-r file] [flags]
|
||||
|
||||
Operational Flags:
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
-v, --invert-match select non-matching rows
|
||||
-n, --no-numbering Disable header numbering
|
||||
-N, --no-color Disable pattern highlighting
|
||||
-H, --no-headers Disable headers display
|
||||
-s, --separator string Custom field separator
|
||||
-k, --sort-by int Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
-v, --invert-match select non-matching rows
|
||||
-n, --numbering Enable header numbering
|
||||
-N, --no-color Disable pattern highlighting
|
||||
-H, --no-headers Disable headers display
|
||||
-s, --separator <string> Custom field separator (maybe char, string or :class:)
|
||||
-k, --sort-by <int|name> Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter <field[!]=reg> Filter given field with regex, can be used multiple times
|
||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||
-R, --regex-transposer </from/to/> Apply /search/replace/ regexp to fields given in -T
|
||||
-j, --json Read JSON input (must be array of hashes)
|
||||
-I, --interactive Interactively filter and select rows
|
||||
--auto-headers Generate headers if there are none present in input
|
||||
--custom-headers a,b,... Use custom headers, separated by comma
|
||||
|
||||
Output Flags (mutually exclusive):
|
||||
-X, --extended Enable extended output
|
||||
-M, --markdown Enable markdown table output
|
||||
-O, --orgtbl Enable org-mode table output
|
||||
-S, --shell Enable shell evaluable output
|
||||
-Y, --yaml Enable yaml output
|
||||
-C, --csv Enable CSV output
|
||||
-A, --ascii Default output mode, ascii tabular
|
||||
-L, --hightlight-lines Use alternating background colors for tables
|
||||
-X, --extended Enable extended output
|
||||
-M, --markdown Enable markdown table output
|
||||
-O, --orgtbl Enable org-mode table output
|
||||
-S, --shell Enable shell evaluable output
|
||||
-Y, --yaml Enable yaml output
|
||||
-C, --csv Enable CSV output
|
||||
-A, --ascii Default output mode, ascii tabular
|
||||
-L, --hightlight-lines Use alternating background colors for tables
|
||||
-y, --yank-columns Yank specified columns (separated by ,) to clipboard,
|
||||
space separated
|
||||
--ofs <char> Output field separator, used by -A and -C.
|
||||
|
||||
Sort Mode Flags (mutually exclusive):
|
||||
-a, --sort-age sort according to age (duration) string
|
||||
-D, --sort-desc Sort in descending order (default: ascending)
|
||||
-i, --sort-numeric sort according to string numerical value
|
||||
-t, --sort-time sort according to time string
|
||||
-a, --sort-age sort according to age (duration) string
|
||||
-D, --sort-desc Sort in descending order (default: ascending)
|
||||
-i, --sort-numeric sort according to string numerical value
|
||||
-t, --sort-time sort according to time string
|
||||
|
||||
Other Flags:
|
||||
--completion <shell> Generate the autocompletion script for <shell>
|
||||
-f, --config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
-d, --debug Enable debugging
|
||||
-h, --help help for tablizer
|
||||
-m, --man Display manual page
|
||||
-V, --version Print program version
|
||||
-r --read-file <file> Use <file> as input instead of STDIN
|
||||
--completion <shell> Generate the autocompletion script for <shell>
|
||||
-f, --config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
-d, --debug Enable debugging
|
||||
-h, --help help for tablizer
|
||||
-m, --man Display manual page
|
||||
-V, --version Print program version
|
||||
|
||||
|
||||
`
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
all:
|
||||
LC_ALL=en_US.UTF-8 asciinema rec --cols 50 --row 30 -c ./demo.sh --overwrite tmp.cast
|
||||
agg tmp.cast tmp.gif
|
||||
31
demo/demo.sh
31
demo/demo.sh
@@ -1,31 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
prompt() {
|
||||
if test -n "$1"; then
|
||||
echo
|
||||
echo -n "% $*"
|
||||
sleep 1
|
||||
echo
|
||||
$*
|
||||
echo
|
||||
echo -n "% "
|
||||
else
|
||||
echo -n "% "
|
||||
fi
|
||||
}
|
||||
|
||||
PATH=..:$PATH
|
||||
clear
|
||||
while IFS=$'\t' read -r flags table msg source _; do
|
||||
echo "#"
|
||||
echo "# source tabular data:"
|
||||
cat $table
|
||||
echo
|
||||
echo "#"
|
||||
echo "# $msg:"
|
||||
prompt "tablizer $flags $table"
|
||||
|
||||
sleep 4
|
||||
clear
|
||||
done < <(yq -r tables.yaml \
|
||||
| yq -r '.tables[] | [.flags, .table, .msg, .source] | @tsv')
|
||||
@@ -1,4 +0,0 @@
|
||||
NAME DURATION COUNT WHEN
|
||||
beta 1d10h5m1s 33 3/1/2014
|
||||
alpha 4h35m 170 2013-Feb-03
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700
|
||||
@@ -1,3 +0,0 @@
|
||||
PID TTY TIME CMD
|
||||
30912 pts/0 00:00:00 bash
|
||||
49526 pts/0 00:00:00 ps
|
||||
@@ -1,54 +0,0 @@
|
||||
tables:
|
||||
# OUTPUTS
|
||||
- flags: -A
|
||||
table: table.demo1
|
||||
msg: default output mode
|
||||
- flags: -O
|
||||
table: table.demo1
|
||||
msg: orgmode output mode
|
||||
- flags: -M
|
||||
table: table.demo1
|
||||
msg: markdown output mode
|
||||
- flags: -S
|
||||
table: table.demo1
|
||||
msg: shell output mode
|
||||
- flags: -X
|
||||
table: table.demo1
|
||||
msg: extended output mode
|
||||
- flags: -Y
|
||||
table: table.demo1
|
||||
msg: yaml output mode
|
||||
- flags: -C
|
||||
table: table.demo1
|
||||
msg: CSV output mode
|
||||
|
||||
# SORTS
|
||||
- flags: -A -k 3
|
||||
table: table.demo1
|
||||
msg: sort by column 3
|
||||
- flags: -A -k 4 -t
|
||||
table: table.demo1
|
||||
msg: sort by column 4 and sort type time
|
||||
- flags: -A -k 2 -a
|
||||
table: table.demo1
|
||||
msg: sort by column 2 and sort type duration
|
||||
|
||||
# REDUCE
|
||||
- flags: -A -c 1,3
|
||||
table: table.demo1
|
||||
msg: only display column 1 and 3
|
||||
- flags: -A -c AM,RA
|
||||
table: table.demo1
|
||||
msg: only display columns matching /(RA|AM)/
|
||||
- flags: -X -c 1,3
|
||||
table: table.demo1
|
||||
msg: only display column 1 and 3 in extended mode
|
||||
|
||||
# SEARCH
|
||||
- flags: /20 -A
|
||||
table: table.demo1
|
||||
msg: only show rows matching /20
|
||||
- flags: /20 -A -v
|
||||
table: table.demo1
|
||||
msg: only show rows NOT matching /20
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
{"version": 2, "width": 80, "height": 25, "timestamp": 1666890777, "env": {"SHELL": "/bin/bash", "TERM": "xterm-256color"}}
|
||||
[0.004618, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[0.010297, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[0.010898, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[0.011125, "o", "\r\n#\r\n"]
|
||||
[0.011177, "o", "# default output mode:\r\n"]
|
||||
[0.011219, "o", "\r\n% tablizer -A table.demo1"]
|
||||
[1.011851, "o", "\r\n"]
|
||||
[1.013635, "o", "NAME(1)\tDURATION(2)\tCOUNT(3)\tWHEN(4) \r\nbeta \t1d10h5m1s \t33 \t3/1/2014 \t\r\nalpha \t4h35m \t170 \t2013-Feb-03 \t\r\nceta \t33d12h \t9 \t06/Jan/2008 15:04:05 -0700\t\r\n"]
|
||||
[1.014021, "o", "\r\n% "]
|
||||
[5.015241, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[5.015339, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[5.015688, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[5.015776, "o", "\r\n#\r\n# orgmode output mode:\r\n\r\n% tablizer -O table.demo1"]
|
||||
[6.016322, "o", "\r\n"]
|
||||
[6.01823, "o", "+---------+-------------+----------+----------------------------+\r\n| NAME(1) | DURATION(2) | COUNT(3) | WHEN(4) |\r\n+---------+-------------+----------+----------------------------+\r\n| beta | 1d10h5m1s | 33 | 3/1/2014 |\r\n| alpha | 4h35m | 170 | 2013-Feb-03 |\r\n| ceta | 33d12h | 9 | 06/Jan/2008 15:04:05 -0700 |\r\n+---------+-------------+----------+----------------------------+\r\n"]
|
||||
[6.018497, "o", "\r\n% "]
|
||||
[10.020014, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[10.020112, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[10.020573, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[10.020643, "o", "\r\n#\r\n"]
|
||||
[10.02068, "o", "# markdown output mode:\r\n\r\n% tablizer -M table.demo1"]
|
||||
[11.021559, "o", "\r\n"]
|
||||
[11.023551, "o", "| NAME(1) | DURATION(2) | COUNT(3) | WHEN(4) |\r\n|---------|-------------|----------|----------------------------|\r\n| beta | 1d10h5m1s | 33 | 3/1/2014 |\r\n| alpha | 4h35m | 170 | 2013-Feb-03 |\r\n| ceta | 33d12h | 9 | 06/Jan/2008 15:04:05 -0700 |\r\n"]
|
||||
[11.023838, "o", "\r\n% "]
|
||||
[15.025244, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[15.025345, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[15.025829, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[15.025915, "o", "\r\n#\r\n# shell output mode:\r\n"]
|
||||
[15.025931, "o", "\r\n"]
|
||||
[15.025948, "o", "% tablizer -S table.demo1"]
|
||||
[16.026714, "o", "\r\n"]
|
||||
[16.028606, "o", "NAME(1)=\"beta\" DURATION(2)=\"1d10h5m1s\" COUNT(3)=\"33\" WHEN(4)=\"3/1/2014\"\r\nNAME(1)=\"alpha\" DURATION(2)=\"4h35m\" COUNT(3)=\"170\" WHEN(4)=\"2013-Feb-03\"\r\nNAME(1)=\"ceta\" DURATION(2)=\"33d12h\" COUNT(3)=\"9\" WHEN(4)=\"06/Jan/2008 15:04:05 -0700\"\r\n"]
|
||||
[16.029144, "o", "\r\n% "]
|
||||
[20.030593, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[20.030706, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[20.03121, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[20.031277, "o", "\r\n#\r\n# extended output mode:\r\n"]
|
||||
[20.031327, "o", "\r\n% tablizer -X table.demo1"]
|
||||
[21.032053, "o", "\r\n"]
|
||||
[21.033787, "o", " NAME(1): beta\r\nDURATION(2): 1d10h5m1s\r\n COUNT(3): 33\r\n WHEN(4): 3/1/2014\r\n\r\n NAME(1): alpha\r\nDURATION(2): 4h35m\r\n COUNT(3): 170\r\n WHEN(4): 2013-Feb-03\r\n\r\n NAME(1): ceta\r\nDURATION(2): 33d12h\r\n COUNT(3): 9\r\n WHEN(4): 06/Jan/2008 15:04:05 -0700\r\n\r\n"]
|
||||
[21.034132, "o", "\r\n% "]
|
||||
[25.035531, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[25.035585, "o", "#\r\n"]
|
||||
[25.035681, "o", "# source tabular data:\r\n"]
|
||||
[25.036179, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[25.036232, "o", "\r\n#\r\n"]
|
||||
[25.036274, "o", "# yaml output mode:\r\n\r\n% tablizer -Y table.demo1"]
|
||||
[26.036928, "o", "\r\n"]
|
||||
[26.038674, "o", "entries:\r\n - count: 33\r\n duration: \"1d10h5m1s\"\r\n name: \"beta\"\r\n when: \"3/1/2014\"\r\n - count: 170\r\n duration: \"4h35m\"\r\n name: \"alpha\"\r\n when: \"2013-Feb-03\"\r\n - count: 9\r\n duration: \"33d12h\"\r\n name: \"ceta\"\r\n when: \"06/Jan/2008 15:04:05 -0700\"\r\n"]
|
||||
[26.038975, "o", "\r\n% "]
|
||||
[30.040539, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[30.040659, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[30.041167, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[30.041246, "o", "\r\n#\r\n# CSV output mode:\r\n\r\n% tablizer -C table.demo1"]
|
||||
[31.042088, "o", "\r\n"]
|
||||
[31.043721, "o", "NAME,DURATION,COUNT,WHEN\r\nbeta,1d10h5m1s,33,3/1/2014\r\nalpha,4h35m,170,2013-Feb-03\r\nceta,33d12h,9,06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[31.043997, "o", "\r\n% "]
|
||||
[35.045523, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[35.04563, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[35.046209, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[35.046275, "o", "\r\n#\r\n# sort by column 3:\r\n\r\n% tablizer -A -k 3 table.demo1"]
|
||||
[36.047083, "o", "\r\n"]
|
||||
[36.048793, "o", "NAME(1)\tDURATION(2)\tCOUNT(3)\tWHEN(4) \r\nalpha \t4h35m \t170 \t2013-Feb-03 \t\r\nbeta \t1d10h5m1s \t33 \t3/1/2014 \t\r\nceta \t33d12h \t9 \t06/Jan/2008 15:04:05 -0700\t\r\n"]
|
||||
[36.049077, "o", "\r\n% "]
|
||||
[40.050739, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[40.050925, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[40.051481, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[40.051671, "o", "\r\n#\r\n# sort by column 4 and sort type time:\r\n\r\n% tablizer -A -k 4 -t table.demo1"]
|
||||
[41.052486, "o", "\r\n"]
|
||||
[41.05454, "o", "NAME(1)\tDURATION(2)\tCOUNT(3)\tWHEN(4) \r\nceta \t33d12h \t9 \t06/Jan/2008 15:04:05 -0700\t\r\nalpha \t4h35m \t170 \t2013-Feb-03 \t\r\nbeta \t1d10h5m1s \t33 \t3/1/2014 \t\r\n"]
|
||||
[41.054864, "o", "\r\n% "]
|
||||
[45.056297, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[45.056405, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[45.056895, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[45.056978, "o", "\r\n#\r\n"]
|
||||
[45.057023, "o", "# sort by column 2 and sort type duration:\r\n"]
|
||||
[45.057073, "o", "\r\n% tablizer -A -k 2 -a table.demo1"]
|
||||
[46.057895, "o", "\r\n"]
|
||||
[46.059684, "o", "NAME(1)\tDURATION(2)\tCOUNT(3)\tWHEN(4) \r\nalpha \t4h35m \t170 \t2013-Feb-03 \t\r\nbeta \t1d10h5m1s \t33 \t3/1/2014 \t\r\nceta \t33d12h \t9 \t06/Jan/2008 15:04:05 -0700\t\r\n"]
|
||||
[46.059988, "o", "\r\n% "]
|
||||
[50.061514, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[50.061622, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[50.062091, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[50.062188, "o", "\r\n#\r\n# only display column 1 and 3:\r\n\r\n% tablizer -A -c 1,3 table.demo1"]
|
||||
[51.062985, "o", "\r\n"]
|
||||
[51.066293, "o", "NAME(1)\tCOUNT(3) \r\nbeta \t33 \t\r\nalpha \t170 \t\r\nceta \t9 \t\r\n"]
|
||||
[51.066843, "o", "\r\n% "]
|
||||
[55.070781, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[55.071327, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[55.073499, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[55.073822, "o", "\r\n#\r\n# only display columns matching /(RA|AM)/:\r\n"]
|
||||
[55.074188, "o", "\r\n% tablizer -A -c AM,RA table.demo1"]
|
||||
[56.07636, "o", "\r\n"]
|
||||
[56.078603, "o", "NAME(1)\tDURATION(2) \r\nbeta \t1d10h5m1s \t\r\nalpha \t4h35m \t\r\nceta \t33d12h \t\r\n"]
|
||||
[56.078957, "o", "\r\n% "]
|
||||
[60.080574, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[60.080734, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[60.081286, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[60.081418, "o", "\r\n#\r\n# only display column 1 and 3 in extended mode:\r\n\r\n% tablizer -X -c 1,3 table.demo1"]
|
||||
[61.082844, "o", "\r\n"]
|
||||
[61.089822, "o", " NAME(1): beta\r\nCOUNT(3): 33\r\n\r\n NAME(1): alpha\r\nCOUNT(3): 170\r\n\r\n NAME(1): ceta\r\nCOUNT(3): 9\r\n\r\n"]
|
||||
[61.090969, "o", "\r\n% "]
|
||||
[65.096092, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[65.096571, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[65.098736, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[65.099085, "o", "\r\n#\r\n# only show rows matching /20:\r\n"]
|
||||
[65.099283, "o", "\r\n% tablizer /20 -A table.demo1"]
|
||||
[66.101537, "o", "\r\n"]
|
||||
[66.109112, "o", "NAME(1)\tDURATION(2)\tCOUNT(3)\tWHEN(4) \r\nbeta \t1d10h5m1s \t33 \t3/1\u001b[102;30m/20\u001b[0m14 \t\r\nceta \t33d12h \t9 \t06/Jan\u001b[102;30m/20\u001b[0m08 15:04:05 -0700\t\r\n"]
|
||||
[66.109405, "o", "\r\n% "]
|
||||
[70.11076, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
[70.110873, "o", "#\r\n# source tabular data:\r\n"]
|
||||
[70.111365, "o", "NAME DURATION COUNT WHEN\r\nbeta 1d10h5m1s 33 3/1/2014\r\nalpha 4h35m 170 2013-Feb-03\r\nceta 33d12h 9 06/Jan/2008 15:04:05 -0700\r\n"]
|
||||
[70.111469, "o", "\r\n#\r\n# only show rows NOT matching /20:\r\n\r\n% tablizer /20 -A -v table.demo1"]
|
||||
[71.112738, "o", "\r\n"]
|
||||
[71.120032, "o", "NAME(1)\tDURATION(2)\tCOUNT(3)\tWHEN(4) \r\nalpha \t4h35m \t170 \t2013-Feb-03\t\r\n"]
|
||||
[71.121127, "o", "\r\n% "]
|
||||
[75.126199, "o", "\u001b[H\u001b[2J\u001b[3J"]
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 243 KiB |
82
go.mod
82
go.mod
@@ -1,43 +1,61 @@
|
||||
module github.com/tlinden/tablizer
|
||||
|
||||
go 1.18
|
||||
go 1.24.0
|
||||
|
||||
require (
|
||||
github.com/alecthomas/repr v0.1.1
|
||||
github.com/alecthomas/repr v0.5.2
|
||||
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de
|
||||
github.com/glycerine/zygomys v5.1.2+incompatible
|
||||
github.com/gookit/color v1.5.2
|
||||
github.com/hashicorp/hcl/v2 v2.19.1
|
||||
github.com/lithammer/fuzzysearch v1.1.7
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/charmbracelet/bubbles v0.21.0
|
||||
github.com/charmbracelet/bubbletea v1.3.10
|
||||
github.com/charmbracelet/lipgloss v1.1.0
|
||||
github.com/evertras/bubble-table v0.19.2
|
||||
github.com/gookit/color v1.6.0
|
||||
github.com/hashicorp/hcl/v2 v2.24.0
|
||||
github.com/lithammer/fuzzysearch v1.1.8
|
||||
github.com/mattn/go-isatty v0.0.20
|
||||
github.com/olekukonko/tablewriter v1.1.0
|
||||
github.com/rogpeppe/go-internal v1.14.1
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/tiagomelo/go-clipboard v0.1.2
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/agext/levenshtein v1.2.1 // indirect
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
|
||||
github.com/agext/levenshtein v1.2.3 // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/glycerine/blake2b v0.0.0-20151022103502-3c8c640cd7be // indirect
|
||||
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 // indirect
|
||||
github.com/glycerine/greenpack v5.1.1+incompatible // indirect
|
||||
github.com/glycerine/liner v0.0.0-20160121172638-72909af234e0 // indirect
|
||||
github.com/google/go-cmp v0.5.6 // indirect
|
||||
github.com/gopherjs/gopherjs v1.17.2 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/jtolds/gls v4.20.0+incompatible // indirect
|
||||
github.com/mattn/go-runewidth v0.0.10 // indirect
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect
|
||||
github.com/philhofer/fwd v1.1.2 // indirect
|
||||
github.com/rivo/uniseg v0.1.0 // indirect
|
||||
github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636 // indirect
|
||||
github.com/shurcooL/go-goon v1.0.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/tinylib/msgp v1.1.9 // indirect
|
||||
github.com/ugorji/go/codec v1.2.11 // indirect
|
||||
github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778 // indirect
|
||||
github.com/zclconf/go-cty v1.13.0 // indirect
|
||||
golang.org/x/sys v0.13.0 // indirect
|
||||
golang.org/x/text v0.11.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||
github.com/atotto/clipboard v0.1.4 // indirect
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||
github.com/charmbracelet/colorprofile v0.3.1 // indirect
|
||||
github.com/charmbracelet/x/ansi v0.10.1 // indirect
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
|
||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||
github.com/fatih/color v1.18.0 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-localereader v0.0.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
|
||||
github.com/muesli/cancelreader v0.2.2 // indirect
|
||||
github.com/muesli/reflow v0.3.0 // indirect
|
||||
github.com/muesli/termenv v0.16.0 // indirect
|
||||
github.com/olekukonko/errors v1.1.0 // indirect
|
||||
github.com/olekukonko/ll v0.0.9 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||
github.com/zclconf/go-cty v1.16.3 // indirect
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect
|
||||
golang.org/x/mod v0.21.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.36.0 // indirect
|
||||
golang.org/x/text v0.25.0 // indirect
|
||||
golang.org/x/tools v0.26.0 // indirect
|
||||
)
|
||||
|
||||
178
go.sum
178
go.sum
@@ -1,88 +1,118 @@
|
||||
github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
|
||||
github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||
github.com/alecthomas/repr v0.1.1 h1:87P60cSmareLAxMc4Hro0r2RBY4ROm0dYwkJNpS4pPs=
|
||||
github.com/alecthomas/repr v0.1.1/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
|
||||
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
|
||||
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||
github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs=
|
||||
github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
|
||||
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA=
|
||||
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
|
||||
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||
github.com/charmbracelet/bubbles v0.21.0 h1:9TdC97SdRVg/1aaXNVWfFH3nnLAwOXr8Fn6u6mfQdFs=
|
||||
github.com/charmbracelet/bubbles v0.21.0/go.mod h1:HF+v6QUR4HkEpz62dx7ym2xc71/KBHg+zKwJtMw+qtg=
|
||||
github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw=
|
||||
github.com/charmbracelet/bubbletea v1.3.10/go.mod h1:ORQfo0fk8U+po9VaNvnV95UPWA1BitP1E0N6xJPlHr4=
|
||||
github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40=
|
||||
github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0=
|
||||
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
|
||||
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
|
||||
github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
|
||||
github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
|
||||
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
|
||||
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/glycerine/blake2b v0.0.0-20151022103502-3c8c640cd7be h1:XBJdPGgA3qqhW+p9CANCAVdF7ZIXdu3pZAkypMkKAjE=
|
||||
github.com/glycerine/blake2b v0.0.0-20151022103502-3c8c640cd7be/go.mod h1:OSCrScrFAjcBObrulk6BEQlytA462OkG1UGB5NYj9kE=
|
||||
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 h1:gclg6gY70GLy3PbkQ1AERPfmLMMagS60DKF78eWwLn8=
|
||||
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||
github.com/glycerine/greenpack v5.1.1+incompatible h1:fDr9i6MkSGZmAy4VXPfJhW+SyK2/LNnzIp5nHyDiaIM=
|
||||
github.com/glycerine/greenpack v5.1.1+incompatible/go.mod h1:us0jVISAESGjsEuLlAfCd5nkZm6W6WQF18HPuOecIg4=
|
||||
github.com/glycerine/liner v0.0.0-20160121172638-72909af234e0 h1:4ZegphJXBTc4uFQ08UVoWYmQXorGa+ipXetUj83sMBc=
|
||||
github.com/glycerine/liner v0.0.0-20160121172638-72909af234e0/go.mod h1:AqJLs6UeoC65dnHxyCQ6MO31P5STpjcmgaANAU+No8Q=
|
||||
github.com/glycerine/zygomys v5.1.2+incompatible h1:jmcdmA3XPxgfOunAXFpipE9LQoUL6eX6d2mhYyjV4GE=
|
||||
github.com/glycerine/zygomys v5.1.2+incompatible/go.mod h1:i3SPKZpmy9dwF/3iWrXJ/ZLyzZucegwypwOmqRkUUaQ=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||
github.com/evertras/bubble-table v0.19.2 h1:u77oiM6JlRR+CvS5FZc3Hz+J6iEsvEDcR5kO8OFb1Yw=
|
||||
github.com/evertras/bubble-table v0.19.2/go.mod h1:ifHujS1YxwnYSOgcR2+m3GnJ84f7CVU/4kUOxUCjEbQ=
|
||||
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
||||
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
||||
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
|
||||
github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
|
||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/gookit/color v1.5.2 h1:uLnfXcaFjlrDnQDT+NCBcfhrXqYTx/rcCa6xn01Y8yI=
|
||||
github.com/gookit/color v1.5.2/go.mod h1:w8h4bGiHeeBpvQVePTutdbERIUf3oJE5lZ8HM0UgXyg=
|
||||
github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g=
|
||||
github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k=
|
||||
github.com/hashicorp/hcl/v2 v2.19.1 h1://i05Jqznmb2EXqa39Nsvyan2o5XyMowW5fnCKW5RPI=
|
||||
github.com/hashicorp/hcl/v2 v2.19.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4=
|
||||
github.com/lithammer/fuzzysearch v1.1.7 h1:q8rZNmBIUkqxsxb/IlwsXVbCoPIH/0juxjFHY0UIwhU=
|
||||
github.com/lithammer/fuzzysearch v1.1.7/go.mod h1:ZhIlfRGxnD8qa9car/yplC6GmnM14CS07BYAKJJBK2I=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-runewidth v0.0.10 h1:CoZ3S2P7pvtP45xOtBw+/mDL2z0RKI576gSkzRRpdGg=
|
||||
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/gookit/assert v0.1.1 h1:lh3GcawXe/p+cU7ESTZ5Ui3Sm/x8JWpIis4/1aF0mY0=
|
||||
github.com/gookit/assert v0.1.1/go.mod h1:jS5bmIVQZTIwk42uXl4lyj4iaaxx32tqH16CFj0VX2E=
|
||||
github.com/gookit/color v1.6.0 h1:JjJXBTk1ETNyqyilJhkTXJYYigHG24TM9Xa2M1xAhRA=
|
||||
github.com/gookit/color v1.6.0/go.mod h1:9ACFc7/1IpHGBW8RwuDm/0YEnhg3dwwXpoMsmtyHfjs=
|
||||
github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQxvE=
|
||||
github.com/hashicorp/hcl/v2 v2.24.0/go.mod h1:oGoO1FIQYfn/AgyOhlg9qLC6/nOJPX3qGbkZpYAcqfM=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/lithammer/fuzzysearch v1.1.8 h1:/HIuJnjHuXS8bKaiTMeeDlW2/AyIWk2brx1V8LFgLN4=
|
||||
github.com/lithammer/fuzzysearch v1.1.8/go.mod h1:IdqeyBClc3FFqSzYq/MXESsS4S0FsZ5ajtkr5xPLts4=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
|
||||
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
||||
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM=
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/philhofer/fwd v1.1.2 h1:bnDivRJ1EWPjUIRXV5KfORO897HTbpFAQddBdE8t7Gw=
|
||||
github.com/philhofer/fwd v1.1.2/go.mod h1:qkPdfjR2SIEbspLqpe1tO4n5yICnr2DY7mqEx2tUTP0=
|
||||
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
|
||||
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
|
||||
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
||||
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
|
||||
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
|
||||
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||
github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM=
|
||||
github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y=
|
||||
github.com/olekukonko/ll v0.0.9 h1:Y+1YqDfVkqMWuEQMclsF9HUR5+a82+dxJuL1HHSRpxI=
|
||||
github.com/olekukonko/ll v0.0.9/go.mod h1:En+sEW0JNETl26+K8eZ6/W4UQ7CYSrrgg/EdIYT2H8g=
|
||||
github.com/olekukonko/tablewriter v1.1.0 h1:N0LHrshF4T39KvI96fn6GT8HEjXRXYNDrDjKFDB7RIY=
|
||||
github.com/olekukonko/tablewriter v1.1.0/go.mod h1:5c+EBPeSqvXnLLgkm9isDdzR3wjfBkHR9Nhfp3NWrzo=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rivo/uniseg v0.1.0 h1:+2KBaVoUmb9XzDsrx/Ct0W/EYOSFf/nWTauy++DprtY=
|
||||
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636 h1:aSISeOcal5irEhJd1M+IrApc0PdcN7e7Aj4yuEnOrfQ=
|
||||
github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
|
||||
github.com/shurcooL/go-goon v1.0.0 h1:BCQPvxGkHHJ4WpBO4m/9FXbITVIsvAm/T66cCcCGI7E=
|
||||
github.com/shurcooL/go-goon v1.0.0/go.mod h1:2wTHMsGo7qnpmqA8ADYZtP4I1DD94JpXGQ3Dxq2YQ5w=
|
||||
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
|
||||
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
||||
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
||||
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/tinylib/msgp v1.1.9 h1:SHf3yoO2sGA0veCJeCBYLHuttAVFHGm2RHgNodW7wQU=
|
||||
github.com/tinylib/msgp v1.1.9/go.mod h1:BCXGB54lDD8qUEPmiG0cQQUANC4IUQyB2ItS2UDlO/k=
|
||||
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
|
||||
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778 h1:QldyIu/L63oPpyvQmHgvgickp1Yw510KJOqX7H24mg8=
|
||||
github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/tiagomelo/go-clipboard v0.1.2 h1:Ph2icR0vZRIj3v5ExvsGweBwsbbDUTlS6HoF40MkQD8=
|
||||
github.com/tiagomelo/go-clipboard v0.1.2/go.mod h1:kXtjJBIMimZaGbxmcKZ8+JqK+acSNf5tAJiChlZBOr8=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/zclconf/go-cty v1.13.0 h1:It5dfKTTZHe9aeppbNOda3mN7Ag7sg6QkBNm6TkyFa0=
|
||||
github.com/zclconf/go-cty v1.13.0/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0=
|
||||
github.com/zclconf/go-cty v1.16.3 h1:osr++gw2T61A8KVYHoQiFbFd1Lh3JOCXc/jFLJXKTxk=
|
||||
github.com/zclconf/go-cty v1.16.3/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
|
||||
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
|
||||
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
|
||||
golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
@@ -90,15 +120,18 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
|
||||
golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
@@ -106,17 +139,16 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
|
||||
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
||||
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ=
|
||||
golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
120
lib/filter.go
120
lib/filter.go
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022-2024 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -19,7 +19,6 @@ package lib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
@@ -28,15 +27,46 @@ import (
|
||||
)
|
||||
|
||||
/*
|
||||
* [!]Match a line, use fuzzy search for normal pattern strings and
|
||||
* regexp otherwise.
|
||||
*/
|
||||
* [!]Match a line, use fuzzy search for normal pattern strings and
|
||||
* regexp otherwise.
|
||||
|
||||
'foo bar' foo, /bar/! => false => line contains foo and not (not bar)
|
||||
'foo nix' foo, /bar/! => ture => line contains foo and (not bar)
|
||||
'foo bar' foo, /bar/ => true => line contains both foo and bar
|
||||
'foo nix' foo, /bar/ => false => line does not contain bar
|
||||
'foo bar' foo, /nix/ => false => line does not contain nix
|
||||
*/
|
||||
func matchPattern(conf cfg.Config, line string) bool {
|
||||
if conf.UseFuzzySearch {
|
||||
return fuzzy.MatchFold(conf.Pattern, line)
|
||||
if len(conf.Patterns) == 0 {
|
||||
// any line always matches ""
|
||||
return true
|
||||
}
|
||||
|
||||
return conf.PatternR.MatchString(line)
|
||||
if conf.UseFuzzySearch {
|
||||
// fuzzy search only considers the 1st pattern
|
||||
return fuzzy.MatchFold(conf.Patterns[0].Pattern, line)
|
||||
}
|
||||
|
||||
var match int
|
||||
|
||||
//fmt.Printf("<%s>\n", line)
|
||||
for _, re := range conf.Patterns {
|
||||
patmatch := re.PatternRe.MatchString(line)
|
||||
if re.Negate {
|
||||
// toggle the meaning of match
|
||||
patmatch = !patmatch
|
||||
}
|
||||
|
||||
if patmatch {
|
||||
match++
|
||||
}
|
||||
|
||||
//fmt.Printf("patmatch: %t, match: %d, pattern: %s, negate: %t\n", patmatch, match, re.Pattern, re.Negate)
|
||||
}
|
||||
|
||||
// fmt.Printf("result: %t\n", match == len(conf.Patterns))
|
||||
//fmt.Println()
|
||||
return match == len(conf.Patterns)
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -44,10 +74,10 @@ func matchPattern(conf cfg.Config, line string) bool {
|
||||
* more filters match on a row, it will be kept, otherwise it will be
|
||||
* excluded.
|
||||
*/
|
||||
func FilterByFields(conf cfg.Config, data Tabdata) (Tabdata, bool, error) {
|
||||
func FilterByFields(conf cfg.Config, data *Tabdata) (*Tabdata, bool, error) {
|
||||
if len(conf.Filters) == 0 {
|
||||
// no filters, no checking
|
||||
return Tabdata{}, false, nil
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
newdata := data.CloneEmpty()
|
||||
@@ -56,15 +86,19 @@ func FilterByFields(conf cfg.Config, data Tabdata) (Tabdata, bool, error) {
|
||||
keep := true
|
||||
|
||||
for idx, header := range data.headers {
|
||||
if !Exists(conf.Filters, strings.ToLower(header)) {
|
||||
lcheader := strings.ToLower(header)
|
||||
if !Exists(conf.Filters, lcheader) {
|
||||
// do not filter by unspecified field
|
||||
continue
|
||||
}
|
||||
|
||||
if !conf.Filters[strings.ToLower(header)].MatchString(row[idx]) {
|
||||
// there IS a filter, but it doesn't match
|
||||
keep = false
|
||||
match := conf.Filters[lcheader].Regex.MatchString(row[idx])
|
||||
if conf.Filters[lcheader].Negate {
|
||||
match = !match
|
||||
}
|
||||
|
||||
if !match {
|
||||
keep = false
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -75,7 +109,44 @@ func FilterByFields(conf cfg.Config, data Tabdata) (Tabdata, bool, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return newdata, true, nil
|
||||
return &newdata, true, nil
|
||||
}
|
||||
|
||||
/*
|
||||
* Transpose fields using search/replace regexp.
|
||||
*/
|
||||
func TransposeFields(conf cfg.Config, data *Tabdata) (*Tabdata, bool, error) {
|
||||
if len(conf.UseTransposers) == 0 {
|
||||
// nothing to be done
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
newdata := data.CloneEmpty()
|
||||
transposed := false
|
||||
|
||||
for _, row := range data.entries {
|
||||
transposedrow := false
|
||||
|
||||
for idx := range data.headers {
|
||||
transposeidx, hasone := findindex(conf.UseTransposeColumns, idx+1)
|
||||
if hasone {
|
||||
row[idx] =
|
||||
conf.UseTransposers[transposeidx].Search.ReplaceAllString(
|
||||
row[idx],
|
||||
conf.UseTransposers[transposeidx].Replace,
|
||||
)
|
||||
transposedrow = true
|
||||
}
|
||||
}
|
||||
|
||||
if transposedrow {
|
||||
// also apply -v
|
||||
newdata.entries = append(newdata.entries, row)
|
||||
transposed = true
|
||||
}
|
||||
}
|
||||
|
||||
return &newdata, transposed, nil
|
||||
}
|
||||
|
||||
/* generic map.Exists(key) */
|
||||
@@ -87,8 +158,11 @@ func Exists[K comparable, V any](m map[K]V, v K) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
/*
|
||||
* Filters the whole input lines, returns filtered lines
|
||||
*/
|
||||
func FilterByPattern(conf cfg.Config, input io.Reader) (io.Reader, error) {
|
||||
if conf.Pattern == "" {
|
||||
if len(conf.Patterns) == 0 {
|
||||
return input, nil
|
||||
}
|
||||
|
||||
@@ -100,25 +174,13 @@ func FilterByPattern(conf cfg.Config, input io.Reader) (io.Reader, error) {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if hadFirst {
|
||||
// don't match 1st line, it's the header
|
||||
if conf.Pattern != "" && matchPattern(conf, line) == conf.InvertMatch {
|
||||
if matchPattern(conf, line) == conf.InvertMatch {
|
||||
// by default -v is false, so if a line does NOT
|
||||
// match the pattern, we will ignore it. However,
|
||||
// if the user specified -v, the matching is inverted,
|
||||
// so we ignore all lines, which DO match.
|
||||
continue
|
||||
}
|
||||
|
||||
// apply user defined lisp filters, if any
|
||||
accept, err := RunFilterHooks(conf, line)
|
||||
if err != nil {
|
||||
return input, fmt.Errorf("failed to apply filter hook: %w", err)
|
||||
}
|
||||
|
||||
if !accept {
|
||||
// IF there are filter hook[s] and IF one of them
|
||||
// returns false on the current line, reject it
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
lines = append(lines, line)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2024 Thomas von Dein
|
||||
Copyright © 2024-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -19,29 +19,29 @@ package lib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
func TestMatchPattern(t *testing.T) {
|
||||
var input = []struct {
|
||||
name string
|
||||
fuzzy bool
|
||||
pattern string
|
||||
line string
|
||||
name string
|
||||
fuzzy bool
|
||||
patterns []*cfg.Pattern
|
||||
line string
|
||||
}{
|
||||
{
|
||||
name: "normal",
|
||||
pattern: "haus",
|
||||
line: "hausparty",
|
||||
name: "normal",
|
||||
patterns: []*cfg.Pattern{{Pattern: "haus"}},
|
||||
line: "hausparty",
|
||||
},
|
||||
{
|
||||
name: "fuzzy",
|
||||
pattern: "hpt",
|
||||
line: "haus-party-termin",
|
||||
fuzzy: true,
|
||||
name: "fuzzy",
|
||||
patterns: []*cfg.Pattern{{Pattern: "hpt"}},
|
||||
line: "haus-party-termin",
|
||||
fuzzy: true,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -55,14 +55,12 @@ func TestMatchPattern(t *testing.T) {
|
||||
conf.UseFuzzySearch = true
|
||||
}
|
||||
|
||||
err := conf.PreparePattern(inputdata.pattern)
|
||||
if err != nil {
|
||||
t.Errorf("PreparePattern returned error: %s", err)
|
||||
}
|
||||
err := conf.PreparePattern(inputdata.patterns)
|
||||
|
||||
if !matchPattern(conf, inputdata.line) {
|
||||
t.Errorf("matchPattern() did not match\nExp: true\nGot: false\n")
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
|
||||
res := matchPattern(conf, inputdata.line)
|
||||
assert.EqualValues(t, true, res)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -98,6 +96,20 @@ func TestFilterByFields(t *testing.T) {
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "one-field-negative",
|
||||
filter: []string{"one!=asd"},
|
||||
expect: Tabdata{
|
||||
headers: []string{
|
||||
"ONE", "TWO", "THREE",
|
||||
},
|
||||
entries: [][]string{
|
||||
{"19191", "EDD 1", "x"},
|
||||
{"8d8", "AN 1", "y"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "one-field-inverted",
|
||||
filter: []string{"one=19"},
|
||||
@@ -149,14 +161,12 @@ func TestFilterByFields(t *testing.T) {
|
||||
conf := cfg.Config{Rawfilters: inputdata.filter, InvertMatch: inputdata.invert}
|
||||
|
||||
err := conf.PrepareFilters()
|
||||
if err != nil {
|
||||
t.Errorf("PrepareFilters returned error: %s", err)
|
||||
}
|
||||
|
||||
data, _, _ := FilterByFields(conf, data)
|
||||
if !reflect.DeepEqual(data, inputdata.expect) {
|
||||
t.Errorf("Filtered data does not match expected data:\ngot: %+v\nexp: %+v", data, inputdata.expect)
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
|
||||
data, _, _ := FilterByFields(conf, &data)
|
||||
|
||||
assert.EqualValues(t, inputdata.expect, *data)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
240
lib/helpers.go
240
lib/helpers.go
@@ -22,7 +22,7 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -30,42 +30,127 @@ import (
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
func contains(s []int, e int) bool {
|
||||
for _, a := range s {
|
||||
func findindex(s []int, e int) (int, bool) {
|
||||
for i, a := range s {
|
||||
if a == e {
|
||||
return true
|
||||
return i, true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// validate the consitency of parsed data
|
||||
func ValidateConsistency(data *Tabdata) error {
|
||||
expectedfields := len(data.headers)
|
||||
|
||||
for idx, row := range data.entries {
|
||||
if len(row) != expectedfields {
|
||||
return fmt.Errorf("row %d does not contain expected %d elements, but %d",
|
||||
idx, expectedfields, len(row))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// parse columns list given with -c, modifies config.UseColumns based
|
||||
// on eventually given regex
|
||||
// on eventually given regex.
|
||||
// This is an output filter, because -cN,N,... is being applied AFTER
|
||||
// processing of the input data.
|
||||
func PrepareColumns(conf *cfg.Config, data *Tabdata) error {
|
||||
if conf.Columns == "" {
|
||||
return nil
|
||||
// -c columns
|
||||
usecolumns, err := PrepareColumnVars(conf.Columns, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, use := range strings.Split(conf.Columns, ",") {
|
||||
if len(use) == 0 {
|
||||
return fmt.Errorf("could not parse columns list %s: empty column", conf.Columns)
|
||||
conf.UseColumns = usecolumns
|
||||
|
||||
// -y columns
|
||||
useyankcolumns, err := PrepareColumnVars(conf.YankColumns, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
conf.UseYankColumns = useyankcolumns
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Same thing as above but for -T option, which is an input option,
|
||||
// because transposers are being applied before output.
|
||||
func PrepareTransposerColumns(conf *cfg.Config, data *Tabdata) error {
|
||||
// -T columns
|
||||
usetransposecolumns, err := PrepareColumnVars(conf.TransposeColumns, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
conf.UseTransposeColumns = usetransposecolumns
|
||||
|
||||
// verify that columns and transposers match and prepare transposer structs
|
||||
if err := conf.PrepareTransposers(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// output option, prepare -k1,2 sort fields
|
||||
func PrepareSortColumns(conf *cfg.Config, data *Tabdata) error {
|
||||
// -c columns
|
||||
usecolumns, err := PrepareColumnVars(conf.SortByColumn, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
conf.UseSortByColumn = usecolumns
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func PrepareColumnVars(columns string, data *Tabdata) ([]int, error) {
|
||||
if columns == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
usecolumns := []int{}
|
||||
|
||||
isregex := regexp.MustCompile(`\W`)
|
||||
|
||||
for _, columnpattern := range strings.Split(columns, ",") {
|
||||
if len(columnpattern) == 0 {
|
||||
return nil, fmt.Errorf("could not parse columns list %s: empty column", columns)
|
||||
}
|
||||
|
||||
usenum, err := strconv.Atoi(use)
|
||||
usenum, err := strconv.Atoi(columnpattern)
|
||||
if err != nil {
|
||||
// might be a regexp
|
||||
colPattern, err := regexp.Compile(use)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("Could not parse columns list %s: %v", conf.Columns, err)
|
||||
// not a number
|
||||
|
||||
return errors.New(msg)
|
||||
}
|
||||
if !isregex.MatchString(columnpattern) {
|
||||
// is not a regexp (contains no non-word chars)
|
||||
// lc() it so that word searches are case insensitive
|
||||
columnpattern = strings.ToLower(columnpattern)
|
||||
|
||||
// find matching header fields
|
||||
for i, head := range data.headers {
|
||||
if colPattern.MatchString(head) {
|
||||
conf.UseColumns = append(conf.UseColumns, i+1)
|
||||
for i, head := range data.headers {
|
||||
if columnpattern == strings.ToLower(head) {
|
||||
usecolumns = append(usecolumns, i+1)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
colPattern, err := regexp.Compile("(?i)" + columnpattern)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("Could not parse columns list %s: %v", columns, err)
|
||||
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
|
||||
// find matching header fields, ignoring case
|
||||
for i, head := range data.headers {
|
||||
if colPattern.MatchString(strings.ToLower(head)) {
|
||||
usecolumns = append(usecolumns, i+1)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -73,52 +158,37 @@ func PrepareColumns(conf *cfg.Config, data *Tabdata) error {
|
||||
// a colum spec is not a number, we process them above
|
||||
// inside the err handler for atoi(). so only add the
|
||||
// number, if it's really just a number.
|
||||
conf.UseColumns = append(conf.UseColumns, usenum)
|
||||
usecolumns = append(usecolumns, usenum)
|
||||
}
|
||||
}
|
||||
|
||||
// deduplicate: put all values into a map (value gets map key)
|
||||
// thereby removing duplicates, extract keys into new slice
|
||||
// and sort it
|
||||
imap := make(map[int]int, len(conf.UseColumns))
|
||||
for _, i := range conf.UseColumns {
|
||||
imap[i] = 0
|
||||
// deduplicate columns, preserve order
|
||||
deduped := []int{}
|
||||
for _, i := range usecolumns {
|
||||
if !slices.Contains(deduped, i) {
|
||||
deduped = append(deduped, i)
|
||||
}
|
||||
}
|
||||
|
||||
conf.UseColumns = nil
|
||||
|
||||
for k := range imap {
|
||||
conf.UseColumns = append(conf.UseColumns, k)
|
||||
}
|
||||
|
||||
sort.Ints(conf.UseColumns)
|
||||
|
||||
return nil
|
||||
return deduped, nil
|
||||
}
|
||||
|
||||
// prepare headers: add numbers to headers
|
||||
func numberizeAndReduceHeaders(conf cfg.Config, data *Tabdata) {
|
||||
numberedHeaders := []string{}
|
||||
numberedHeaders := make([]string, len(data.headers))
|
||||
|
||||
maxwidth := 0 // start from scratch, so we only look at displayed column widths
|
||||
|
||||
// add numbers to headers if needed, get widest cell width
|
||||
for idx, head := range data.headers {
|
||||
var headlen int
|
||||
|
||||
if len(conf.Columns) > 0 {
|
||||
// -c specified
|
||||
if !contains(conf.UseColumns, idx+1) {
|
||||
// ignore this one
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if conf.NoNumbering {
|
||||
numberedHeaders = append(numberedHeaders, head)
|
||||
headlen = len(head)
|
||||
if conf.Numbering {
|
||||
newhead := fmt.Sprintf("%s(%d)", head, idx+1)
|
||||
numberedHeaders[idx] = newhead
|
||||
headlen = len(newhead)
|
||||
} else {
|
||||
numhead := fmt.Sprintf("%s(%d)", head, idx+1)
|
||||
headlen = len(numhead)
|
||||
numberedHeaders = append(numberedHeaders, numhead)
|
||||
headlen = len(head)
|
||||
}
|
||||
|
||||
if headlen > maxwidth {
|
||||
@@ -126,7 +196,24 @@ func numberizeAndReduceHeaders(conf cfg.Config, data *Tabdata) {
|
||||
}
|
||||
}
|
||||
|
||||
data.headers = numberedHeaders
|
||||
if conf.Numbering {
|
||||
data.headers = numberedHeaders
|
||||
}
|
||||
|
||||
if len(conf.UseColumns) > 0 {
|
||||
// re-align headers based on user requested column list
|
||||
headers := make([]string, len(conf.UseColumns))
|
||||
|
||||
for i, col := range conf.UseColumns {
|
||||
for idx := range data.headers {
|
||||
if col-1 == idx {
|
||||
headers[i] = data.headers[col-1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data.headers = headers
|
||||
}
|
||||
|
||||
if data.maxwidthHeader != maxwidth && maxwidth > 0 {
|
||||
data.maxwidthHeader = maxwidth
|
||||
@@ -138,17 +225,17 @@ func reduceColumns(conf cfg.Config, data *Tabdata) {
|
||||
if len(conf.Columns) > 0 {
|
||||
reducedEntries := [][]string{}
|
||||
|
||||
var reducedEntry []string
|
||||
|
||||
for _, entry := range data.entries {
|
||||
reducedEntry = nil
|
||||
var reducedEntry []string
|
||||
|
||||
for i, value := range entry {
|
||||
if !contains(conf.UseColumns, i+1) {
|
||||
continue
|
||||
for _, col := range conf.UseColumns {
|
||||
col--
|
||||
|
||||
for idx, value := range entry {
|
||||
if idx == col {
|
||||
reducedEntry = append(reducedEntry, value)
|
||||
}
|
||||
}
|
||||
|
||||
reducedEntry = append(reducedEntry, value)
|
||||
}
|
||||
|
||||
reducedEntries = append(reducedEntries, reducedEntry)
|
||||
@@ -158,17 +245,6 @@ func reduceColumns(conf cfg.Config, data *Tabdata) {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: remove this when we only use Tablewriter and strip in ParseFile()!
|
||||
func trimRow(row []string) []string {
|
||||
var fixedrow = make([]string, len(row))
|
||||
|
||||
for idx, cell := range row {
|
||||
fixedrow[idx] = strings.TrimSpace(cell)
|
||||
}
|
||||
|
||||
return fixedrow
|
||||
}
|
||||
|
||||
// FIXME: refactor this beast!
|
||||
func colorizeData(conf cfg.Config, output string) string {
|
||||
switch {
|
||||
@@ -205,12 +281,20 @@ func colorizeData(conf cfg.Config, output string) string {
|
||||
|
||||
return colorized
|
||||
|
||||
case len(conf.Pattern) > 0 && !conf.NoColor && color.IsConsole(os.Stdout):
|
||||
r := regexp.MustCompile("(" + conf.Pattern + ")")
|
||||
case len(conf.Patterns) > 0 && !conf.NoColor && color.IsConsole(os.Stdout):
|
||||
out := output
|
||||
|
||||
return r.ReplaceAllStringFunc(output, func(in string) string {
|
||||
return conf.ColorStyle.Sprint(in)
|
||||
})
|
||||
for _, re := range conf.Patterns {
|
||||
if !re.Negate {
|
||||
r := regexp.MustCompile("(" + re.Pattern + ")")
|
||||
|
||||
out = r.ReplaceAllStringFunc(out, func(in string) string {
|
||||
return conf.ColorStyle.Sprint(in)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
|
||||
default:
|
||||
return output
|
||||
|
||||
@@ -19,9 +19,10 @@ package lib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
@@ -38,10 +39,9 @@ func TestContains(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
testname := fmt.Sprintf("contains-%d,%d,%t", tt.list, tt.search, tt.want)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
answer := contains(tt.list, tt.search)
|
||||
if answer != tt.want {
|
||||
t.Errorf("got %t, want %t", answer, tt.want)
|
||||
}
|
||||
answer := slices.Contains(tt.list, tt.search)
|
||||
|
||||
assert.EqualValues(t, tt.want, answer)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -67,24 +67,98 @@ func TestPrepareColumns(t *testing.T) {
|
||||
}{
|
||||
{"1,2,3", []int{1, 2, 3}, false},
|
||||
{"1,2,", []int{}, true},
|
||||
{"T", []int{2, 3}, false},
|
||||
{"T,2,3", []int{2, 3}, false},
|
||||
{"T.", []int{2, 3}, false},
|
||||
{"T.,2,3", []int{2, 3}, false},
|
||||
{"[a-z,4,5", []int{4, 5}, true}, // invalid regexp
|
||||
}
|
||||
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("PrepareColumns-%s-%t", testdata.input, testdata.wanterror)
|
||||
testname := fmt.Sprintf("PrepareColumns-%s-%t",
|
||||
testdata.input, testdata.wanterror)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := cfg.Config{Columns: testdata.input}
|
||||
err := PrepareColumns(&conf, &data)
|
||||
if err != nil {
|
||||
if !testdata.wanterror {
|
||||
t.Errorf("got error: %v", err)
|
||||
}
|
||||
|
||||
if testdata.wanterror {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
if !reflect.DeepEqual(conf.UseColumns, testdata.exp) {
|
||||
t.Errorf("got: %v, expected: %v", conf.UseColumns, testdata.exp)
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, testdata.exp, conf.UseColumns)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPrepareTransposerColumns(t *testing.T) {
|
||||
data := Tabdata{
|
||||
maxwidthHeader: 5,
|
||||
columns: 3,
|
||||
headers: []string{
|
||||
"ONE", "TWO", "THREE",
|
||||
},
|
||||
entries: [][]string{
|
||||
{
|
||||
"2", "3", "4",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var tests = []struct {
|
||||
input string
|
||||
transp []string
|
||||
exp int
|
||||
wanterror bool // expect error
|
||||
}{
|
||||
{
|
||||
"1",
|
||||
[]string{`/\d/x/`},
|
||||
1,
|
||||
false,
|
||||
},
|
||||
{
|
||||
"T.", // will match [T]WO and [T]HREE
|
||||
[]string{`/\d/x/`, `/.//`},
|
||||
2,
|
||||
false,
|
||||
},
|
||||
{
|
||||
"TH.,2",
|
||||
[]string{`/\d/x/`, `/.//`},
|
||||
2,
|
||||
false,
|
||||
},
|
||||
{
|
||||
"1",
|
||||
[]string{},
|
||||
1,
|
||||
true,
|
||||
},
|
||||
{
|
||||
"",
|
||||
[]string{`|.|N|`},
|
||||
0,
|
||||
true,
|
||||
},
|
||||
{
|
||||
"1",
|
||||
[]string{`|.|N|`},
|
||||
1,
|
||||
false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("PrepareTransposerColumns-%s-%t", testdata.input, testdata.wanterror)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := cfg.Config{TransposeColumns: testdata.input, Transposers: testdata.transp}
|
||||
err := PrepareTransposerColumns(&conf, &data)
|
||||
|
||||
if testdata.wanterror {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, testdata.exp, len(conf.UseTransposeColumns))
|
||||
assert.EqualValues(t, len(conf.UseTransposeColumns), len(conf.Transposers))
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -122,10 +196,8 @@ func TestReduceColumns(t *testing.T) {
|
||||
c := cfg.Config{Columns: "x", UseColumns: testdata.columns}
|
||||
data := Tabdata{entries: input}
|
||||
reduceColumns(c, &data)
|
||||
if !reflect.DeepEqual(data.entries, testdata.expect) {
|
||||
t.Errorf("reduceColumns returned invalid data:\ngot: %+v\nexp: %+v",
|
||||
data.entries, testdata.expect)
|
||||
}
|
||||
|
||||
assert.EqualValues(t, testdata.expect, data.entries)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -136,27 +208,25 @@ func TestNumberizeHeaders(t *testing.T) {
|
||||
}
|
||||
|
||||
var tests = []struct {
|
||||
expect []string
|
||||
columns []int
|
||||
nonum bool
|
||||
expect []string
|
||||
columns []int
|
||||
numberize bool
|
||||
}{
|
||||
{[]string{"ONE(1)", "TWO(2)", "THREE(3)"}, []int{1, 2, 3}, false},
|
||||
{[]string{"ONE(1)", "TWO(2)"}, []int{1, 2}, false},
|
||||
{[]string{"ONE", "TWO"}, []int{1, 2}, true},
|
||||
{[]string{"ONE(1)", "TWO(2)", "THREE(3)"}, []int{1, 2, 3}, true},
|
||||
{[]string{"ONE(1)", "TWO(2)"}, []int{1, 2}, true},
|
||||
{[]string{"ONE", "TWO"}, []int{1, 2}, false},
|
||||
}
|
||||
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("numberize-headers-columns-%+v-nonum-%t",
|
||||
testdata.columns, testdata.nonum)
|
||||
testdata.columns, testdata.numberize)
|
||||
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := cfg.Config{Columns: "x", UseColumns: testdata.columns, NoNumbering: testdata.nonum}
|
||||
conf := cfg.Config{Columns: "x", UseColumns: testdata.columns, Numbering: testdata.numberize}
|
||||
usedata := data
|
||||
numberizeAndReduceHeaders(conf, &usedata)
|
||||
if !reflect.DeepEqual(usedata.headers, testdata.expect) {
|
||||
t.Errorf("numberizeAndReduceHeaders returned invalid data:\ngot: %+v\nexp: %+v",
|
||||
usedata.headers, testdata.expect)
|
||||
}
|
||||
|
||||
assert.EqualValues(t, testdata.expect, usedata.headers)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
112
lib/io.go
112
lib/io.go
@@ -29,86 +29,88 @@ import (
|
||||
const RWRR = 0755
|
||||
|
||||
func ProcessFiles(conf *cfg.Config, args []string) error {
|
||||
fds, pattern, err := determineIO(conf, args)
|
||||
fd, patterns, err := determineIO(conf, args)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := conf.PreparePattern(pattern); err != nil {
|
||||
if err := conf.PreparePattern(patterns); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, fd := range fds {
|
||||
data, err := Parse(*conf, fd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = PrepareColumns(conf, &data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
printData(os.Stdout, *conf, &data)
|
||||
data, err := Parse(*conf, fd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = ValidateConsistency(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = PrepareSortColumns(conf, &data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = PrepareColumns(conf, &data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if conf.Interactive {
|
||||
newdata, err := tableEditor(conf, &data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
data = *newdata
|
||||
}
|
||||
|
||||
printData(os.Stdout, *conf, &data)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func determineIO(conf *cfg.Config, args []string) ([]io.Reader, string, error) {
|
||||
var filehandles []io.Reader
|
||||
|
||||
var pattern string
|
||||
|
||||
func determineIO(conf *cfg.Config, args []string) (io.Reader, []*cfg.Pattern, error) {
|
||||
var filehandle io.Reader
|
||||
var patterns []*cfg.Pattern
|
||||
var haveio bool
|
||||
|
||||
stat, _ := os.Stdin.Stat()
|
||||
if (stat.Mode() & os.ModeCharDevice) == 0 {
|
||||
// we're reading from STDIN, which takes precedence over file args
|
||||
filehandles = append(filehandles, os.Stdin)
|
||||
switch {
|
||||
case conf.InputFile == "-":
|
||||
filehandle = os.Stdin
|
||||
haveio = true
|
||||
case conf.InputFile != "":
|
||||
fd, err := os.OpenFile(conf.InputFile, os.O_RDONLY, RWRR)
|
||||
|
||||
if len(args) > 0 {
|
||||
// ignore any args > 1
|
||||
pattern = args[0]
|
||||
conf.Pattern = args[0] // used for colorization by printData()
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to read input file %s: %w", conf.InputFile, err)
|
||||
}
|
||||
|
||||
filehandle = fd
|
||||
haveio = true
|
||||
} else if len(args) > 0 {
|
||||
// threre were args left, take a look
|
||||
if args[0] == "-" {
|
||||
// in traditional unix programs a dash denotes STDIN (forced)
|
||||
filehandles = append(filehandles, os.Stdin)
|
||||
}
|
||||
|
||||
if !haveio {
|
||||
stat, _ := os.Stdin.Stat()
|
||||
if (stat.Mode() & os.ModeCharDevice) == 0 {
|
||||
// we're reading from STDIN, which takes precedence over file args
|
||||
filehandle = os.Stdin
|
||||
haveio = true
|
||||
} else {
|
||||
if _, err := os.Stat(args[0]); err != nil {
|
||||
// first one is not a file, consider it as regexp and
|
||||
// shift arg list
|
||||
pattern = args[0]
|
||||
conf.Pattern = args[0] // used for colorization by printData()
|
||||
args = args[1:]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(args) > 0 {
|
||||
// consider any other args as files
|
||||
for _, file := range args {
|
||||
filehandle, err := os.OpenFile(file, os.O_RDONLY, RWRR)
|
||||
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("failed to read input file %s: %w", file, err)
|
||||
}
|
||||
|
||||
filehandles = append(filehandles, filehandle)
|
||||
haveio = true
|
||||
}
|
||||
}
|
||||
if len(args) > 0 {
|
||||
patterns = make([]*cfg.Pattern, len(args))
|
||||
for i, arg := range args {
|
||||
patterns[i] = &cfg.Pattern{Pattern: arg}
|
||||
}
|
||||
}
|
||||
|
||||
if !haveio {
|
||||
return nil, "", errors.New("no file specified and nothing to read on stdin")
|
||||
return nil, nil, errors.New("no file specified and nothing to read on stdin")
|
||||
}
|
||||
|
||||
return filehandles, pattern, nil
|
||||
return filehandle, patterns, nil
|
||||
}
|
||||
|
||||
313
lib/lisp.go
313
lib/lisp.go
@@ -1,313 +0,0 @@
|
||||
/*
|
||||
Copyright © 2023 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package lib
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/glycerine/zygomys/zygo"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
/*
|
||||
needs to be global because we can't feed an cfg object to AddHook()
|
||||
which is being called from user lisp code
|
||||
*/
|
||||
var Hooks map[string][]*zygo.SexpSymbol
|
||||
|
||||
/*
|
||||
AddHook() (called addhook from lisp code) can be used by the user to
|
||||
add a function to one of the available hooks provided by tablizer.
|
||||
*/
|
||||
func AddHook(env *zygo.Zlisp, name string, args []zygo.Sexp) (zygo.Sexp, error) {
|
||||
var hookname string
|
||||
|
||||
if len(args) < 2 {
|
||||
return zygo.SexpNull, errors.New("argument of %add-hook should be: %hook-name %your-function")
|
||||
}
|
||||
|
||||
switch sexptype := args[0].(type) {
|
||||
case *zygo.SexpSymbol:
|
||||
if !HookExists(sexptype.Name()) {
|
||||
return zygo.SexpNull, errors.New("Unknown hook " + sexptype.Name())
|
||||
}
|
||||
|
||||
hookname = sexptype.Name()
|
||||
|
||||
default:
|
||||
return zygo.SexpNull, errors.New("hook name must be a symbol ")
|
||||
}
|
||||
|
||||
switch sexptype := args[1].(type) {
|
||||
case *zygo.SexpSymbol:
|
||||
_, exists := Hooks[hookname]
|
||||
if !exists {
|
||||
Hooks[hookname] = []*zygo.SexpSymbol{sexptype}
|
||||
} else {
|
||||
Hooks[hookname] = append(Hooks[hookname], sexptype)
|
||||
}
|
||||
|
||||
default:
|
||||
return zygo.SexpNull, errors.New("hook function must be a symbol ")
|
||||
}
|
||||
|
||||
return zygo.SexpNull, nil
|
||||
}
|
||||
|
||||
/*
|
||||
Check if a hook exists
|
||||
*/
|
||||
func HookExists(key string) bool {
|
||||
for _, hook := range cfg.ValidHooks {
|
||||
if hook == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/*
|
||||
* Basic sanity checks and load lisp file
|
||||
*/
|
||||
func LoadAndEvalFile(env *zygo.Zlisp, path string) error {
|
||||
if strings.HasSuffix(path, `.zy`) {
|
||||
code, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read lisp file %s: %w", path, err)
|
||||
}
|
||||
|
||||
// FIXME: check what res (_ here) could be and mean
|
||||
_, err = env.EvalString(string(code))
|
||||
if err != nil {
|
||||
log.Fatalf(env.GetStackTrace(err))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
* Setup lisp interpreter environment
|
||||
*/
|
||||
func SetupLisp(conf *cfg.Config) error {
|
||||
// iterate over load-path and evaluate all *.zy files there, if any
|
||||
// we ignore if load-path does not exist, which is the default anyway
|
||||
path, err := os.Stat(conf.LispLoadPath)
|
||||
if os.IsNotExist(err) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// init global hooks
|
||||
Hooks = make(map[string][]*zygo.SexpSymbol)
|
||||
|
||||
// init sandbox
|
||||
env := zygo.NewZlispSandbox()
|
||||
env.AddFunction("addhook", AddHook)
|
||||
|
||||
if !path.IsDir() {
|
||||
// load single lisp file
|
||||
err = LoadAndEvalFile(env, conf.LispLoadPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// load all lisp file in load dir
|
||||
dir, err := os.ReadDir(conf.LispLoadPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read lisp dir %s: %w",
|
||||
conf.LispLoadPath, err)
|
||||
}
|
||||
|
||||
for _, entry := range dir {
|
||||
if !entry.IsDir() {
|
||||
err := LoadAndEvalFile(env, conf.LispLoadPath+"/"+entry.Name())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RegisterLib(env)
|
||||
|
||||
conf.Lisp = env
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
Execute every user lisp function registered as filter hook.
|
||||
|
||||
Each function is given the current line as argument and is expected to
|
||||
return a boolean. True indicates to keep the line, false to skip
|
||||
it.
|
||||
|
||||
If there are multiple such functions registered, then the first one
|
||||
returning false wins, that is if each function returns true the line
|
||||
will be kept, if at least one of them returns false, it will be
|
||||
skipped.
|
||||
*/
|
||||
func RunFilterHooks(conf cfg.Config, line string) (bool, error) {
|
||||
for _, hook := range Hooks["filter"] {
|
||||
var result bool
|
||||
|
||||
conf.Lisp.Clear()
|
||||
|
||||
res, err := conf.Lisp.EvalString(fmt.Sprintf("(%s `%s`)", hook.Name(), line))
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to evaluate hook loader: %w", err)
|
||||
}
|
||||
|
||||
switch sexptype := res.(type) {
|
||||
case *zygo.SexpBool:
|
||||
result = sexptype.Val
|
||||
default:
|
||||
return false, fmt.Errorf("filter hook shall return bool")
|
||||
}
|
||||
|
||||
if !result {
|
||||
// the first hook which returns false leads to complete false
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
|
||||
// if no hook returned false, we succeed and accept the given line
|
||||
return true, nil
|
||||
}
|
||||
|
||||
/*
|
||||
These hooks get the data (Tabdata) readily processed by tablizer as
|
||||
argument. They are expected to return a SexpPair containing a boolean
|
||||
denoting if the data has been modified and the actual modified
|
||||
data. Columns must be the same, rows may differ. Cells may also have
|
||||
been modified.
|
||||
|
||||
Replaces the internal data structure Tabdata with the user supplied
|
||||
version.
|
||||
|
||||
Only one process hook function is supported.
|
||||
|
||||
The somewhat complicated code is being caused by the fact, that we
|
||||
need to convert our internal structure to a lisp variable and vice
|
||||
versa afterwards.
|
||||
*/
|
||||
func RunProcessHooks(conf cfg.Config, data Tabdata) (Tabdata, bool, error) {
|
||||
var userdata Tabdata
|
||||
|
||||
lisplist := []zygo.Sexp{}
|
||||
|
||||
if len(Hooks["process"]) == 0 {
|
||||
return userdata, false, nil
|
||||
}
|
||||
|
||||
if len(Hooks["process"]) > 1 {
|
||||
fmt.Println("Warning: only one process hook is allowed!")
|
||||
}
|
||||
|
||||
// there are hook[s] installed, convert the go data structure 'data to lisp
|
||||
for _, row := range data.entries {
|
||||
var entry zygo.SexpHash
|
||||
|
||||
for idx, cell := range row {
|
||||
err := entry.HashSet(&zygo.SexpStr{S: data.headers[idx]}, &zygo.SexpStr{S: cell})
|
||||
if err != nil {
|
||||
return userdata, false, fmt.Errorf("failed to convert to lisp data: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
lisplist = append(lisplist, &entry)
|
||||
}
|
||||
|
||||
// we need to add it to the env so that the function can use the struct directly
|
||||
conf.Lisp.AddGlobal("data", &zygo.SexpArray{Val: lisplist, Env: conf.Lisp})
|
||||
|
||||
// execute the actual hook
|
||||
hook := Hooks["process"][0]
|
||||
|
||||
conf.Lisp.Clear()
|
||||
|
||||
var result bool
|
||||
|
||||
res, err := conf.Lisp.EvalString(fmt.Sprintf("(%s data)", hook.Name()))
|
||||
if err != nil {
|
||||
return userdata, false, fmt.Errorf("failed to eval lisp loader: %w", err)
|
||||
}
|
||||
|
||||
// we expect (bool, array(hash)) as return from the function
|
||||
switch sexptype := res.(type) {
|
||||
case *zygo.SexpPair:
|
||||
switch th := sexptype.Head.(type) {
|
||||
case *zygo.SexpBool:
|
||||
result = th.Val
|
||||
default:
|
||||
return userdata, false, errors.New("xpect (bool, array(hash)) as return value")
|
||||
}
|
||||
|
||||
switch sexptailtype := sexptype.Tail.(type) {
|
||||
case *zygo.SexpArray:
|
||||
lisplist = sexptailtype.Val
|
||||
default:
|
||||
return userdata, false, errors.New("expect (bool, array(hash)) as return value ")
|
||||
}
|
||||
default:
|
||||
return userdata, false, errors.New("filter hook shall return array of hashes ")
|
||||
}
|
||||
|
||||
if !result {
|
||||
// no further processing required
|
||||
return userdata, result, nil
|
||||
}
|
||||
|
||||
// finally convert lispdata back to Tabdata
|
||||
for _, item := range lisplist {
|
||||
row := []string{}
|
||||
|
||||
switch hash := item.(type) {
|
||||
case *zygo.SexpHash:
|
||||
for _, header := range data.headers {
|
||||
entry, err := hash.HashGetDefault(
|
||||
conf.Lisp,
|
||||
&zygo.SexpStr{S: header},
|
||||
&zygo.SexpStr{S: ""})
|
||||
if err != nil {
|
||||
return userdata, false, fmt.Errorf("failed to get lisp hash entry: %w", err)
|
||||
}
|
||||
|
||||
switch sexptype := entry.(type) {
|
||||
case *zygo.SexpStr:
|
||||
row = append(row, sexptype.S)
|
||||
default:
|
||||
return userdata, false, errors.New("hsh values should be string ")
|
||||
}
|
||||
}
|
||||
default:
|
||||
return userdata, false, errors.New("rturned array should contain hashes ")
|
||||
}
|
||||
|
||||
userdata.entries = append(userdata.entries, row)
|
||||
}
|
||||
|
||||
userdata.headers = data.headers
|
||||
|
||||
return userdata, result, nil
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
/*
|
||||
Copyright © 2023 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package lib
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
"github.com/glycerine/zygomys/zygo"
|
||||
)
|
||||
|
||||
func Splice2SexpList(list []string) zygo.Sexp {
|
||||
slist := []zygo.Sexp{}
|
||||
|
||||
for _, item := range list {
|
||||
slist = append(slist, &zygo.SexpStr{S: item})
|
||||
}
|
||||
|
||||
return zygo.MakeList(slist)
|
||||
}
|
||||
|
||||
func StringReSplit(env *zygo.Zlisp, name string, args []zygo.Sexp) (zygo.Sexp, error) {
|
||||
if len(args) < 2 {
|
||||
return zygo.SexpNull, errors.New("expecting 2 arguments")
|
||||
}
|
||||
|
||||
var separator, input string
|
||||
|
||||
switch t := args[0].(type) {
|
||||
case *zygo.SexpStr:
|
||||
input = t.S
|
||||
default:
|
||||
return zygo.SexpNull, errors.New("second argument must be a string")
|
||||
}
|
||||
|
||||
switch t := args[1].(type) {
|
||||
case *zygo.SexpStr:
|
||||
separator = t.S
|
||||
default:
|
||||
return zygo.SexpNull, errors.New("first argument must be a string")
|
||||
}
|
||||
|
||||
sep := regexp.MustCompile(separator)
|
||||
|
||||
return Splice2SexpList(sep.Split(input, -1)), nil
|
||||
}
|
||||
|
||||
func String2Int(env *zygo.Zlisp, name string, args []zygo.Sexp) (zygo.Sexp, error) {
|
||||
var number int
|
||||
|
||||
switch t := args[0].(type) {
|
||||
case *zygo.SexpStr:
|
||||
num, err := strconv.Atoi(t.S)
|
||||
|
||||
if err != nil {
|
||||
return zygo.SexpNull, fmt.Errorf("failed to convert string to number: %w", err)
|
||||
}
|
||||
|
||||
number = num
|
||||
|
||||
default:
|
||||
return zygo.SexpNull, errors.New("argument must be a string")
|
||||
}
|
||||
|
||||
return &zygo.SexpInt{Val: int64(number)}, nil
|
||||
}
|
||||
|
||||
func RegisterLib(env *zygo.Zlisp) {
|
||||
env.AddFunction("resplit", StringReSplit)
|
||||
env.AddFunction("atoi", String2Int)
|
||||
}
|
||||
120
lib/pager.go
Normal file
120
lib/pager.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package lib
|
||||
|
||||
// pager setup using bubbletea
|
||||
// file shamlelessly copied from:
|
||||
// https://github.com/charmbracelet/bubbletea/tree/main/examples/pager
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/charmbracelet/bubbles/viewport"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
)
|
||||
|
||||
var (
|
||||
titleStyle = func() lipgloss.Style {
|
||||
b := lipgloss.RoundedBorder()
|
||||
b.Right = "├"
|
||||
return lipgloss.NewStyle().BorderStyle(b).Padding(0, 1)
|
||||
}()
|
||||
|
||||
infoStyle = func() lipgloss.Style {
|
||||
b := lipgloss.RoundedBorder()
|
||||
b.Left = "┤"
|
||||
return titleStyle.BorderStyle(b)
|
||||
}()
|
||||
)
|
||||
|
||||
type Doc struct {
|
||||
content string
|
||||
title string
|
||||
ready bool
|
||||
viewport viewport.Model
|
||||
}
|
||||
|
||||
func (m Doc) Init() tea.Cmd {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m Doc) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
var (
|
||||
cmd tea.Cmd
|
||||
cmds []tea.Cmd
|
||||
)
|
||||
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
if k := msg.String(); k == "ctrl+c" || k == "q" || k == "esc" {
|
||||
return m, tea.Quit
|
||||
}
|
||||
|
||||
case tea.WindowSizeMsg:
|
||||
headerHeight := lipgloss.Height(m.headerView())
|
||||
footerHeight := lipgloss.Height(m.footerView())
|
||||
verticalMarginHeight := headerHeight + footerHeight
|
||||
|
||||
if !m.ready {
|
||||
// Since this program is using the full size of the viewport we
|
||||
// need to wait until we've received the window dimensions before
|
||||
// we can initialize the viewport. The initial dimensions come in
|
||||
// quickly, though asynchronously, which is why we wait for them
|
||||
// here.
|
||||
m.viewport = viewport.New(msg.Width, msg.Height-verticalMarginHeight)
|
||||
m.viewport.YPosition = headerHeight
|
||||
m.viewport.SetContent(m.content)
|
||||
m.ready = true
|
||||
} else {
|
||||
m.viewport.Width = msg.Width
|
||||
m.viewport.Height = msg.Height - verticalMarginHeight
|
||||
}
|
||||
}
|
||||
|
||||
// Handle keyboard and mouse events in the viewport
|
||||
m.viewport, cmd = m.viewport.Update(msg)
|
||||
cmds = append(cmds, cmd)
|
||||
|
||||
return m, tea.Batch(cmds...)
|
||||
}
|
||||
|
||||
func (m Doc) View() string {
|
||||
if !m.ready {
|
||||
return "\n Initializing..."
|
||||
}
|
||||
return fmt.Sprintf("%s\n%s\n%s", m.headerView(), m.viewport.View(), m.footerView())
|
||||
}
|
||||
|
||||
func (m Doc) headerView() string {
|
||||
// title := titleStyle.Render("RPN Help Overview")
|
||||
title := titleStyle.Render(m.title)
|
||||
line := strings.Repeat("─", max(0, m.viewport.Width-lipgloss.Width(title)))
|
||||
return lipgloss.JoinHorizontal(lipgloss.Center, title, line)
|
||||
}
|
||||
|
||||
func (m Doc) footerView() string {
|
||||
info := infoStyle.Render(fmt.Sprintf("%3.f%%", m.viewport.ScrollPercent()*100))
|
||||
line := strings.Repeat("─", max(0, m.viewport.Width-lipgloss.Width(info)))
|
||||
return lipgloss.JoinHorizontal(lipgloss.Center, line, info)
|
||||
}
|
||||
|
||||
func max(a, b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func Pager(title, message string) {
|
||||
p := tea.NewProgram(
|
||||
Doc{content: message, title: title},
|
||||
tea.WithAltScreen(), // use the full size of the terminal in its "alternate screen buffer"
|
||||
tea.WithMouseCellMotion(), // turn on mouse support so we can track the mouse wheel
|
||||
)
|
||||
|
||||
if _, err := p.Run(); err != nil {
|
||||
fmt.Println("could not run pager:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
278
lib/parser.go
278
lib/parser.go
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022-2024 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -20,8 +20,12 @@ package lib
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"math"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
@@ -33,11 +37,70 @@ import (
|
||||
Parser switch
|
||||
*/
|
||||
func Parse(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
var data Tabdata
|
||||
var err error
|
||||
|
||||
// first step, parse the data
|
||||
if len(conf.Separator) == 1 {
|
||||
return parseCSV(conf, input)
|
||||
data, err = parseCSV(conf, input)
|
||||
} else if conf.InputJSON {
|
||||
data, err = parseJSON(conf, input)
|
||||
} else {
|
||||
data, err = parseTabular(conf, input)
|
||||
}
|
||||
|
||||
return parseTabular(conf, input)
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
|
||||
// 2nd step, apply filters, code or transposers, if any
|
||||
postdata, changed, err := PostProcess(conf, &data)
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
|
||||
if changed {
|
||||
return *postdata, nil
|
||||
}
|
||||
|
||||
return data, err
|
||||
}
|
||||
|
||||
/*
|
||||
* Setup headers, given headers might be usable headers or just the
|
||||
* first row, which we use to determine how many headers to generate,
|
||||
* if enabled.
|
||||
*/
|
||||
func SetHeaders(conf cfg.Config, headers []string) []string {
|
||||
if !conf.AutoHeaders && len(conf.CustomHeaders) == 0 {
|
||||
return headers
|
||||
}
|
||||
|
||||
if conf.AutoHeaders {
|
||||
heads := make([]string, len(headers))
|
||||
for idx := range headers {
|
||||
heads[idx] = fmt.Sprintf("%d", idx+1)
|
||||
}
|
||||
|
||||
return heads
|
||||
}
|
||||
|
||||
if len(conf.CustomHeaders) == len(headers) {
|
||||
return conf.CustomHeaders
|
||||
}
|
||||
|
||||
// use as much custom ones we have, generate the remainder
|
||||
heads := make([]string, len(headers))
|
||||
|
||||
for idx := range headers {
|
||||
if idx < len(conf.CustomHeaders) {
|
||||
heads[idx] = conf.CustomHeaders[idx]
|
||||
} else {
|
||||
heads[idx] = fmt.Sprintf("%d", idx+1)
|
||||
}
|
||||
}
|
||||
|
||||
return heads
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -61,7 +124,7 @@ func parseCSV(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
}
|
||||
|
||||
if len(records) >= 1 {
|
||||
data.headers = records[0]
|
||||
data.headers = SetHeaders(conf, records[0])
|
||||
data.columns = len(records)
|
||||
|
||||
for _, head := range data.headers {
|
||||
@@ -72,19 +135,14 @@ func parseCSV(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if len(records) > 1 {
|
||||
data.entries = records[1:]
|
||||
if len(records) >= 1 {
|
||||
if conf.AutoHeaders || len(conf.CustomHeaders) > 0 {
|
||||
data.entries = records
|
||||
} else {
|
||||
data.entries = records[1:]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply user defined lisp process hooks, if any
|
||||
userdata, changed, err := RunProcessHooks(conf, data)
|
||||
if err != nil {
|
||||
return data, fmt.Errorf("failed to apply filter hook: %w", err)
|
||||
}
|
||||
|
||||
if changed {
|
||||
data = userdata
|
||||
}
|
||||
|
||||
return data, nil
|
||||
@@ -110,12 +168,11 @@ func parseTabular(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
if !hadFirst {
|
||||
// header processing
|
||||
data.columns = len(parts)
|
||||
// if Debug {
|
||||
// fmt.Println(parts)
|
||||
// }
|
||||
|
||||
// process all header fields
|
||||
for _, part := range parts {
|
||||
firstrow := make([]string, len(parts))
|
||||
|
||||
for idx, part := range parts {
|
||||
// register widest header field
|
||||
headerlen := len(part)
|
||||
if headerlen > data.maxwidthHeader {
|
||||
@@ -123,14 +180,25 @@ func parseTabular(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
}
|
||||
|
||||
// register fields data
|
||||
data.headers = append(data.headers, strings.TrimSpace(part))
|
||||
firstrow[idx] = strings.TrimSpace(part)
|
||||
|
||||
// done
|
||||
hadFirst = true
|
||||
}
|
||||
|
||||
data.headers = SetHeaders(conf, firstrow)
|
||||
|
||||
if conf.AutoHeaders || len(conf.CustomHeaders) > 0 {
|
||||
// we do not use generated headers, consider as row
|
||||
if matchPattern(conf, line) == conf.InvertMatch {
|
||||
continue
|
||||
}
|
||||
|
||||
data.entries = append(data.entries, firstrow)
|
||||
}
|
||||
} else {
|
||||
// data processing
|
||||
if conf.Pattern != "" && matchPattern(conf, line) == conf.InvertMatch {
|
||||
if matchPattern(conf, line) == conf.InvertMatch {
|
||||
// by default -v is false, so if a line does NOT
|
||||
// match the pattern, we will ignore it. However,
|
||||
// if the user specified -v, the matching is inverted,
|
||||
@@ -138,18 +206,6 @@ func parseTabular(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
// apply user defined lisp filters, if any
|
||||
accept, err := RunFilterHooks(conf, line)
|
||||
if err != nil {
|
||||
return data, fmt.Errorf("failed to apply filter hook: %w", err)
|
||||
}
|
||||
|
||||
if !accept {
|
||||
// IF there are filter hook[s] and IF one of them
|
||||
// returns false on the current line, reject it
|
||||
continue
|
||||
}
|
||||
|
||||
idx := 0 // we cannot use the header index, because we could exclude columns
|
||||
values := []string{}
|
||||
for _, part := range parts {
|
||||
@@ -174,29 +230,173 @@ func parseTabular(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
return data, fmt.Errorf("failed to read from io.Reader: %w", scanner.Err())
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
/*
|
||||
Parse JSON input. We only support an array of maps.
|
||||
*/
|
||||
func parseRawJSON(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
dec := json.NewDecoder(input)
|
||||
headers := []string{}
|
||||
idxmap := map[string]int{}
|
||||
data := [][]string{}
|
||||
row := []string{}
|
||||
iskey := true
|
||||
haveheaders := false
|
||||
var currentfield string
|
||||
var idx int
|
||||
var isjson bool
|
||||
|
||||
for {
|
||||
t, err := dec.Token()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
switch val := t.(type) {
|
||||
case string:
|
||||
if iskey {
|
||||
if !haveheaders {
|
||||
// consider only the keys of the first item as headers
|
||||
headers = append(headers, val)
|
||||
}
|
||||
currentfield = val
|
||||
} else {
|
||||
if !haveheaders {
|
||||
// the first row uses the order as it comes in
|
||||
row = append(row, val)
|
||||
} else {
|
||||
// use the pre-determined order, that way items
|
||||
// can be in any order as long as they contain all
|
||||
// neccessary fields. They may also contain less
|
||||
// fields than the first item, these will contain
|
||||
// the empty string
|
||||
row[idxmap[currentfield]] = val
|
||||
}
|
||||
}
|
||||
|
||||
case float64:
|
||||
var value string
|
||||
|
||||
// we set precision to 0 if the float is a whole number
|
||||
if val == math.Trunc(val) {
|
||||
value = fmt.Sprintf("%.f", val)
|
||||
} else {
|
||||
value = fmt.Sprintf("%f", val)
|
||||
}
|
||||
|
||||
if !haveheaders {
|
||||
row = append(row, value)
|
||||
} else {
|
||||
row[idxmap[currentfield]] = value
|
||||
}
|
||||
|
||||
case nil:
|
||||
// we ignore here if a value shall be an int or a string,
|
||||
// because tablizer only works with strings anyway
|
||||
if !haveheaders {
|
||||
row = append(row, "")
|
||||
} else {
|
||||
row[idxmap[currentfield]] = ""
|
||||
}
|
||||
|
||||
case json.Delim:
|
||||
if val.String() == "}" {
|
||||
data = append(data, row)
|
||||
row = make([]string, len(headers))
|
||||
idx++
|
||||
|
||||
if !haveheaders {
|
||||
// remember the array position of header fields,
|
||||
// which we use to assign elements to the correct
|
||||
// row index
|
||||
for i, header := range headers {
|
||||
idxmap[header] = i
|
||||
}
|
||||
}
|
||||
|
||||
haveheaders = true
|
||||
}
|
||||
isjson = true
|
||||
default:
|
||||
fmt.Printf("unknown token: %v type: %T\n", t, t)
|
||||
}
|
||||
|
||||
iskey = !iskey
|
||||
}
|
||||
|
||||
if isjson && (len(headers) == 0 || len(data) == 0) {
|
||||
return Tabdata{}, errors.New("failed to parse JSON, input did not contain array of hashes")
|
||||
}
|
||||
|
||||
return Tabdata{headers: headers, entries: data, columns: len(headers)}, nil
|
||||
}
|
||||
|
||||
func parseJSON(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
// parse raw json
|
||||
data, err := parseRawJSON(conf, input)
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
|
||||
// apply filter, if any
|
||||
filtered := [][]string{}
|
||||
var line string
|
||||
|
||||
for _, row := range data.entries {
|
||||
line = strings.Join(row, " ")
|
||||
|
||||
if matchPattern(conf, line) == conf.InvertMatch {
|
||||
continue
|
||||
}
|
||||
|
||||
filtered = append(filtered, row)
|
||||
}
|
||||
|
||||
if len(filtered) != len(data.entries) {
|
||||
data.entries = filtered
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func PostProcess(conf cfg.Config, data *Tabdata) (*Tabdata, bool, error) {
|
||||
var modified bool
|
||||
|
||||
// filter by field filters, if any
|
||||
filtereddata, changed, err := FilterByFields(conf, data)
|
||||
if err != nil {
|
||||
return data, fmt.Errorf("failed to filter fields: %w", err)
|
||||
return data, false, fmt.Errorf("failed to filter fields: %w", err)
|
||||
}
|
||||
|
||||
if changed {
|
||||
data = filtereddata
|
||||
modified = true
|
||||
}
|
||||
|
||||
// apply user defined lisp process hooks, if any
|
||||
userdata, changed, err := RunProcessHooks(conf, data)
|
||||
// check if transposers are valid and turn into Transposer structs
|
||||
if err := PrepareTransposerColumns(&conf, data); err != nil {
|
||||
return data, false, err
|
||||
}
|
||||
|
||||
// transpose if demanded
|
||||
modifieddata, changed, err := TransposeFields(conf, data)
|
||||
if err != nil {
|
||||
return data, fmt.Errorf("failed to apply filter hook: %w", err)
|
||||
return data, false, fmt.Errorf("failed to transpose fields: %w", err)
|
||||
}
|
||||
|
||||
if changed {
|
||||
data = userdata
|
||||
data = modifieddata
|
||||
modified = true
|
||||
}
|
||||
|
||||
if conf.Debug {
|
||||
repr.Print(data)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
return data, modified, nil
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -19,10 +19,11 @@ package lib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
@@ -33,7 +34,7 @@ var input = []struct {
|
||||
}{
|
||||
{
|
||||
name: "tabular-data",
|
||||
separator: cfg.DefaultSeparator,
|
||||
separator: cfg.SeparatorTemplates[":default:"],
|
||||
text: `
|
||||
ONE TWO THREE
|
||||
asd igig cxxxncnc
|
||||
@@ -67,66 +68,61 @@ func TestParser(t *testing.T) {
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
readFd := strings.NewReader(strings.TrimSpace(testdata.text))
|
||||
conf := cfg.Config{Separator: testdata.separator}
|
||||
gotdata, err := Parse(conf, readFd)
|
||||
gotdata, err := wrapValidateParser(conf, readFd)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Parser returned error: %s\nData processed so far: %+v", err, gotdata)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(data, gotdata) {
|
||||
t.Errorf("Parser returned invalid data\nExp: %+v\nGot: %+v\n",
|
||||
data, gotdata)
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, data, gotdata)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParserPatternmatching(t *testing.T) {
|
||||
var tests = []struct {
|
||||
entries [][]string
|
||||
pattern string
|
||||
invert bool
|
||||
want bool
|
||||
name string
|
||||
entries [][]string
|
||||
patterns []*cfg.Pattern
|
||||
invert bool
|
||||
wanterror bool
|
||||
}{
|
||||
{
|
||||
name: "match",
|
||||
entries: [][]string{
|
||||
{"asd", "igig", "cxxxncnc"},
|
||||
},
|
||||
pattern: "ig",
|
||||
invert: false,
|
||||
patterns: []*cfg.Pattern{{Pattern: "ig"}},
|
||||
invert: false,
|
||||
},
|
||||
{
|
||||
name: "invert",
|
||||
entries: [][]string{
|
||||
{"19191", "EDD 1", "X"},
|
||||
},
|
||||
pattern: "ig",
|
||||
invert: true,
|
||||
patterns: []*cfg.Pattern{{Pattern: "ig"}},
|
||||
invert: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, inputdata := range input {
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("parse-%s-with-pattern-%s-inverted-%t",
|
||||
inputdata.name, testdata.pattern, testdata.invert)
|
||||
inputdata.name, testdata.name, testdata.invert)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := cfg.Config{InvertMatch: testdata.invert, Pattern: testdata.pattern,
|
||||
Separator: inputdata.separator}
|
||||
conf := cfg.Config{
|
||||
InvertMatch: testdata.invert,
|
||||
Patterns: testdata.patterns,
|
||||
Separator: inputdata.separator,
|
||||
}
|
||||
|
||||
_ = conf.PreparePattern(testdata.pattern)
|
||||
_ = conf.PreparePattern(testdata.patterns)
|
||||
|
||||
readFd := strings.NewReader(strings.TrimSpace(inputdata.text))
|
||||
gotdata, err := Parse(conf, readFd)
|
||||
data, err := wrapValidateParser(conf, readFd)
|
||||
|
||||
if err != nil {
|
||||
if !testdata.want {
|
||||
t.Errorf("Parser returned error: %s\nData processed so far: %+v",
|
||||
err, gotdata)
|
||||
}
|
||||
if testdata.wanterror {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
if !reflect.DeepEqual(testdata.entries, gotdata.entries) {
|
||||
t.Errorf("Parser returned invalid data (pattern: %s, invert: %t)\nExp: %+v\nGot: %+v\n",
|
||||
testdata.pattern, testdata.invert, testdata.entries, gotdata.entries)
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, testdata.entries, data.entries)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -152,15 +148,282 @@ asd igig
|
||||
19191 EDD 1 X`
|
||||
|
||||
readFd := strings.NewReader(strings.TrimSpace(table))
|
||||
conf := cfg.Config{Separator: cfg.DefaultSeparator}
|
||||
gotdata, err := Parse(conf, readFd)
|
||||
conf := cfg.Config{Separator: cfg.SeparatorTemplates[":default:"]}
|
||||
gotdata, err := wrapValidateParser(conf, readFd)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Parser returned error: %s\nData processed so far: %+v", err, gotdata)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, data, gotdata)
|
||||
}
|
||||
|
||||
func TestParserJSONInput(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
expect Tabdata
|
||||
wanterror bool // true: expect fail, false: expect success
|
||||
}{
|
||||
{
|
||||
// too deep nesting
|
||||
name: "invalidjson",
|
||||
wanterror: true,
|
||||
input: `[
|
||||
{
|
||||
"item": {
|
||||
"NAME": "postgres-operator-7f4c7c8485-ntlns",
|
||||
"READY": "1/1",
|
||||
"STATUS": "Running",
|
||||
"RESTARTS": "0",
|
||||
"AGE": "24h"
|
||||
}
|
||||
}
|
||||
`,
|
||||
expect: Tabdata{},
|
||||
},
|
||||
|
||||
{
|
||||
// contains nil, int and float values
|
||||
name: "niljson",
|
||||
wanterror: false,
|
||||
input: `[
|
||||
{
|
||||
"NAME": "postgres-operator-7f4c7c8485-ntlns",
|
||||
"READY": "1/1",
|
||||
"STATUS": "Running",
|
||||
"RESTARTS": 0,
|
||||
"AGE": null,
|
||||
"X": 12,
|
||||
"Y": 34.222
|
||||
}
|
||||
]`,
|
||||
expect: Tabdata{
|
||||
columns: 7,
|
||||
headers: []string{"NAME", "READY", "STATUS", "RESTARTS", "AGE", "X", "Y"},
|
||||
entries: [][]string{
|
||||
[]string{
|
||||
"postgres-operator-7f4c7c8485-ntlns",
|
||||
"1/1",
|
||||
"Running",
|
||||
"0",
|
||||
"",
|
||||
"12",
|
||||
"34.222000",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
// one field missing + different order
|
||||
// but shall not fail
|
||||
name: "kgpfail",
|
||||
wanterror: false,
|
||||
input: `[
|
||||
{
|
||||
"NAME": "postgres-operator-7f4c7c8485-ntlns",
|
||||
"READY": "1/1",
|
||||
"STATUS": "Running",
|
||||
"RESTARTS": "0",
|
||||
"AGE": "24h"
|
||||
},
|
||||
{
|
||||
"NAME": "wal-g-exporter-778dcd95f5-wcjzn",
|
||||
"RESTARTS": "0",
|
||||
"READY": "1/1",
|
||||
"AGE": "24h"
|
||||
}
|
||||
]`,
|
||||
expect: Tabdata{
|
||||
columns: 5,
|
||||
headers: []string{"NAME", "READY", "STATUS", "RESTARTS", "AGE"},
|
||||
entries: [][]string{
|
||||
[]string{
|
||||
"postgres-operator-7f4c7c8485-ntlns",
|
||||
"1/1",
|
||||
"Running",
|
||||
"0",
|
||||
"24h",
|
||||
},
|
||||
[]string{
|
||||
"wal-g-exporter-778dcd95f5-wcjzn",
|
||||
"1/1",
|
||||
"",
|
||||
"0",
|
||||
"24h",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "kgp",
|
||||
wanterror: false,
|
||||
input: `[
|
||||
{
|
||||
"NAME": "postgres-operator-7f4c7c8485-ntlns",
|
||||
"READY": "1/1",
|
||||
"STATUS": "Running",
|
||||
"RESTARTS": "0",
|
||||
"AGE": "24h"
|
||||
},
|
||||
{
|
||||
"NAME": "wal-g-exporter-778dcd95f5-wcjzn",
|
||||
"STATUS": "Running",
|
||||
"READY": "1/1",
|
||||
"RESTARTS": "0",
|
||||
"AGE": "24h"
|
||||
}
|
||||
]`,
|
||||
expect: Tabdata{
|
||||
columns: 5,
|
||||
headers: []string{"NAME", "READY", "STATUS", "RESTARTS", "AGE"},
|
||||
entries: [][]string{
|
||||
[]string{
|
||||
"postgres-operator-7f4c7c8485-ntlns",
|
||||
"1/1",
|
||||
"Running",
|
||||
"0",
|
||||
"24h",
|
||||
},
|
||||
[]string{
|
||||
"wal-g-exporter-778dcd95f5-wcjzn",
|
||||
"1/1",
|
||||
"Running",
|
||||
"0",
|
||||
"24h",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(data, gotdata) {
|
||||
t.Errorf("Parser returned invalid data, Regex: %s\nExp: %+v\nGot: %+v\n",
|
||||
conf.Separator, data, gotdata)
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("parse-json-%s", testdata.name)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := cfg.Config{InputJSON: true}
|
||||
|
||||
readFd := strings.NewReader(strings.TrimSpace(testdata.input))
|
||||
data, err := wrapValidateParser(conf, readFd)
|
||||
|
||||
if testdata.wanterror {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, testdata.expect, data)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParserSeparators(t *testing.T) {
|
||||
list := []string{"alpha", "beta", "delta"}
|
||||
|
||||
tests := []struct {
|
||||
input string
|
||||
sep string
|
||||
}{
|
||||
{
|
||||
input: `🎲`,
|
||||
sep: ":nonprint:",
|
||||
},
|
||||
{
|
||||
input: `|`,
|
||||
sep: ":pipe:",
|
||||
},
|
||||
{
|
||||
input: ` `,
|
||||
sep: ":spaces:",
|
||||
},
|
||||
{
|
||||
input: " \t ",
|
||||
sep: ":tab:",
|
||||
},
|
||||
{
|
||||
input: `-`,
|
||||
sep: ":nonword:",
|
||||
},
|
||||
{
|
||||
input: `//$`,
|
||||
sep: ":special:",
|
||||
},
|
||||
}
|
||||
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("parse-%s", testdata.sep)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
header := strings.Join(list, testdata.input)
|
||||
row := header
|
||||
content := header + "\n" + row
|
||||
|
||||
readFd := strings.NewReader(strings.TrimSpace(content))
|
||||
conf := cfg.Config{Separator: testdata.sep}
|
||||
conf.ApplyDefaults()
|
||||
|
||||
gotdata, err := wrapValidateParser(conf, readFd)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, [][]string{list}, gotdata.entries)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParserSetHeaders(t *testing.T) {
|
||||
row := []string{"c", "b", "c", "d", "e"}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
custom []string
|
||||
expect []string
|
||||
auto bool
|
||||
}{
|
||||
{
|
||||
name: "default",
|
||||
expect: row,
|
||||
},
|
||||
{
|
||||
name: "auto",
|
||||
expect: strings.Split("1 2 3 4 5", " "),
|
||||
auto: true,
|
||||
},
|
||||
{
|
||||
name: "custom-complete",
|
||||
custom: strings.Split("A B C D E", " "),
|
||||
expect: strings.Split("A B C D E", " "),
|
||||
},
|
||||
{
|
||||
name: "custom-too-short",
|
||||
custom: strings.Split("A B", " "),
|
||||
expect: strings.Split("A B 3 4 5", " "),
|
||||
},
|
||||
{
|
||||
name: "custom-too-long",
|
||||
custom: strings.Split("A B C D E F G", " "),
|
||||
expect: strings.Split("A B C D E", " "),
|
||||
},
|
||||
}
|
||||
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("parse-%s", testdata.name)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := cfg.Config{
|
||||
AutoHeaders: testdata.auto,
|
||||
CustomHeaders: testdata.custom,
|
||||
}
|
||||
headers := SetHeaders(conf, row)
|
||||
|
||||
assert.NotNil(t, headers)
|
||||
assert.EqualValues(t, testdata.expect, headers)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func wrapValidateParser(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
data, err := Parse(conf, input)
|
||||
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
|
||||
err = ValidateConsistency(&data)
|
||||
|
||||
return data, err
|
||||
}
|
||||
|
||||
211
lib/printer.go
211
lib/printer.go
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -22,26 +22,32 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/gookit/color"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"github.com/olekukonko/tablewriter/renderer"
|
||||
"github.com/olekukonko/tablewriter/tw"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func printData(writer io.Writer, conf cfg.Config, data *Tabdata) {
|
||||
// add numbers to headers and remove this we're not interested in
|
||||
// Sort the data first, before headers+entries are being
|
||||
// reduced. That way the user can specify any valid column to sort
|
||||
// by, independently if it's being used for display or not.
|
||||
sortTable(conf, data)
|
||||
|
||||
// put one or more columns into clipboard
|
||||
yankColumns(conf, data)
|
||||
|
||||
// add numbers to headers and remove those we're not interested in
|
||||
numberizeAndReduceHeaders(conf, data)
|
||||
|
||||
// remove unwanted columns, if any
|
||||
reduceColumns(conf, data)
|
||||
|
||||
// sort the data
|
||||
sortTable(conf, data)
|
||||
|
||||
switch conf.OutputMode {
|
||||
case cfg.Extended:
|
||||
printExtendedData(writer, conf, data)
|
||||
@@ -56,7 +62,7 @@ func printData(writer io.Writer, conf cfg.Config, data *Tabdata) {
|
||||
case cfg.Yaml:
|
||||
printYamlData(writer, data)
|
||||
case cfg.CSV:
|
||||
printCSVData(writer, data)
|
||||
printCSVData(writer, conf, data)
|
||||
default:
|
||||
printASCIIData(writer, conf, data)
|
||||
}
|
||||
@@ -71,36 +77,58 @@ Emacs org-mode compatible table (also orgtbl-mode)
|
||||
*/
|
||||
func printOrgmodeData(writer io.Writer, conf cfg.Config, data *Tabdata) {
|
||||
tableString := &strings.Builder{}
|
||||
table := tablewriter.NewWriter(tableString)
|
||||
|
||||
table := tablewriter.NewTable(tableString,
|
||||
tablewriter.WithRenderer(
|
||||
renderer.NewBlueprint(
|
||||
tw.Rendition{
|
||||
Borders: tw.Border{
|
||||
Left: tw.On,
|
||||
Right: tw.On,
|
||||
Top: tw.On,
|
||||
Bottom: tw.On,
|
||||
},
|
||||
Settings: tw.Settings{
|
||||
Separators: tw.Separators{
|
||||
ShowHeader: tw.On,
|
||||
ShowFooter: tw.Off,
|
||||
BetweenRows: tw.Off,
|
||||
BetweenColumns: 0,
|
||||
},
|
||||
},
|
||||
Symbols: tw.NewSymbols(tw.StyleASCII),
|
||||
})),
|
||||
|
||||
tablewriter.WithConfig(
|
||||
tablewriter.Config{
|
||||
Header: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{
|
||||
Alignment: tw.AlignLeft,
|
||||
AutoFormat: tw.Off,
|
||||
},
|
||||
},
|
||||
Row: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{
|
||||
Alignment: tw.AlignLeft,
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
if !conf.NoHeaders {
|
||||
table.SetHeader(data.headers)
|
||||
table.Header(data.headers)
|
||||
}
|
||||
|
||||
for _, row := range data.entries {
|
||||
table.Append(trimRow(row))
|
||||
if err := table.Bulk(data.entries); err != nil {
|
||||
log.Fatalf("Failed to add data to table renderer: %s", err)
|
||||
}
|
||||
|
||||
table.Render()
|
||||
if err := table.Render(); err != nil {
|
||||
log.Fatalf("Failed to render table: %s", err)
|
||||
}
|
||||
|
||||
/* fix output for org-mode (orgtbl)
|
||||
tableWriter output:
|
||||
+------+------+
|
||||
| cell | cell |
|
||||
+------+------+
|
||||
|
||||
Needed for org-mode compatibility:
|
||||
|------+------|
|
||||
| cell | cell |
|
||||
|------+------|
|
||||
*/
|
||||
leftR := regexp.MustCompile(`(?m)^\\+`)
|
||||
rightR := regexp.MustCompile(`\\+(?m)$`)
|
||||
|
||||
output(writer, color.Sprint(
|
||||
colorizeData(conf,
|
||||
rightR.ReplaceAllString(
|
||||
leftR.ReplaceAllString(tableString.String(), "|"), "|"))))
|
||||
output(writer, color.Sprint(colorizeData(conf, tableString.String())))
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -108,20 +136,57 @@ Markdown table
|
||||
*/
|
||||
func printMarkdownData(writer io.Writer, conf cfg.Config, data *Tabdata) {
|
||||
tableString := &strings.Builder{}
|
||||
table := tablewriter.NewWriter(tableString)
|
||||
|
||||
table := tablewriter.NewTable(tableString,
|
||||
tablewriter.WithRenderer(
|
||||
renderer.NewBlueprint(
|
||||
tw.Rendition{
|
||||
Borders: tw.Border{
|
||||
Left: tw.On,
|
||||
Right: tw.On,
|
||||
Top: tw.Off,
|
||||
Bottom: tw.Off,
|
||||
},
|
||||
Settings: tw.Settings{
|
||||
Separators: tw.Separators{
|
||||
ShowHeader: tw.On,
|
||||
ShowFooter: tw.Off,
|
||||
BetweenRows: tw.Off,
|
||||
BetweenColumns: 0,
|
||||
},
|
||||
},
|
||||
Symbols: tw.NewSymbols(tw.StyleMarkdown),
|
||||
})),
|
||||
|
||||
tablewriter.WithConfig(
|
||||
tablewriter.Config{
|
||||
Header: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{
|
||||
Alignment: tw.AlignLeft,
|
||||
AutoFormat: tw.Off,
|
||||
},
|
||||
},
|
||||
Row: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{
|
||||
Alignment: tw.AlignLeft,
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
if !conf.NoHeaders {
|
||||
table.SetHeader(data.headers)
|
||||
table.Header(data.headers)
|
||||
}
|
||||
|
||||
for _, row := range data.entries {
|
||||
table.Append(trimRow(row))
|
||||
if err := table.Bulk(data.entries); err != nil {
|
||||
log.Fatalf("Failed to add data to table renderer: %s", err)
|
||||
}
|
||||
|
||||
table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false})
|
||||
table.SetCenterSeparator("|")
|
||||
if err := table.Render(); err != nil {
|
||||
log.Fatalf("Failed to render table: %s", err)
|
||||
}
|
||||
|
||||
table.Render()
|
||||
output(writer, color.Sprint(colorizeData(conf, tableString.String())))
|
||||
}
|
||||
|
||||
@@ -129,34 +194,56 @@ func printMarkdownData(writer io.Writer, conf cfg.Config, data *Tabdata) {
|
||||
Simple ASCII table without any borders etc, just like the input we expect
|
||||
*/
|
||||
func printASCIIData(writer io.Writer, conf cfg.Config, data *Tabdata) {
|
||||
OFS := " "
|
||||
if conf.OFS != "" {
|
||||
OFS = conf.OFS
|
||||
}
|
||||
|
||||
tableString := &strings.Builder{}
|
||||
table := tablewriter.NewWriter(tableString)
|
||||
|
||||
styleTSV := tw.NewSymbolCustom("space").WithColumn("\t")
|
||||
|
||||
table := tablewriter.NewTable(tableString,
|
||||
tablewriter.WithRenderer(
|
||||
renderer.NewBlueprint(tw.Rendition{
|
||||
Borders: tw.BorderNone,
|
||||
Symbols: styleTSV,
|
||||
Settings: tw.Settings{
|
||||
Separators: tw.SeparatorsNone,
|
||||
Lines: tw.LinesNone,
|
||||
},
|
||||
})),
|
||||
tablewriter.WithConfig(tablewriter.Config{
|
||||
Header: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{
|
||||
AutoFormat: tw.Off,
|
||||
},
|
||||
Padding: tw.CellPadding{Global: tw.Padding{Left: "", Right: OFS}},
|
||||
},
|
||||
Row: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{
|
||||
AutoWrap: tw.WrapNone,
|
||||
Alignment: tw.AlignLeft,
|
||||
},
|
||||
Padding: tw.CellPadding{Global: tw.Padding{Right: OFS}},
|
||||
},
|
||||
|
||||
Debug: true,
|
||||
}),
|
||||
)
|
||||
|
||||
if !conf.NoHeaders {
|
||||
table.SetHeader(data.headers)
|
||||
table.Header(data.headers)
|
||||
}
|
||||
|
||||
table.AppendBulk(data.entries)
|
||||
|
||||
table.SetAutoWrapText(false)
|
||||
table.SetAutoFormatHeaders(true)
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetCenterSeparator("")
|
||||
table.SetColumnSeparator("")
|
||||
table.SetRowSeparator("")
|
||||
table.SetHeaderLine(false)
|
||||
table.SetBorder(false)
|
||||
table.SetNoWhiteSpace(true)
|
||||
|
||||
if !conf.UseHighlight {
|
||||
// the tabs destroy the highlighting
|
||||
table.SetTablePadding("\t") // pad with tabs
|
||||
} else {
|
||||
table.SetTablePadding(" ")
|
||||
if err := table.Bulk(data.entries); err != nil {
|
||||
log.Fatalf("Failed to add data to table renderer: %s", err)
|
||||
}
|
||||
|
||||
if err := table.Render(); err != nil {
|
||||
log.Fatalf("Failed to render table: %s", err)
|
||||
}
|
||||
|
||||
table.Render()
|
||||
output(writer, color.Sprint(colorizeData(conf, tableString.String())))
|
||||
}
|
||||
|
||||
@@ -241,8 +328,14 @@ func printYamlData(writer io.Writer, data *Tabdata) {
|
||||
output(writer, string(yamlstr))
|
||||
}
|
||||
|
||||
func printCSVData(writer io.Writer, data *Tabdata) {
|
||||
func printCSVData(writer io.Writer, conf cfg.Config, data *Tabdata) {
|
||||
OFS := ","
|
||||
if conf.OFS != "" {
|
||||
OFS = conf.OFS
|
||||
}
|
||||
|
||||
csvout := csv.NewWriter(writer)
|
||||
csvout.Comma = []rune(OFS)[0]
|
||||
|
||||
if err := csvout.Write(data.headers); err != nil {
|
||||
log.Fatalln("error writing record to csv:", err)
|
||||
|
||||
@@ -23,6 +23,7 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
@@ -63,9 +64,9 @@ var tests = []struct {
|
||||
name string // so we can identify which one fails, can be the same
|
||||
// for multiple tests, because flags will be appended to the name
|
||||
sortby string // empty == default
|
||||
column int // sort by this column, 0 == default first or NO Sort
|
||||
column int // sort by this column (numbers start by 1)
|
||||
desc bool // sort in descending order, default == ascending
|
||||
nonum bool // hide numbering
|
||||
numberize bool // add header numbering
|
||||
mode int // shell, orgtbl, etc. empty == default: ascii
|
||||
usecol []int // columns to display, empty == display all
|
||||
usecolstr string // for testname, must match usecol
|
||||
@@ -73,17 +74,19 @@ var tests = []struct {
|
||||
}{
|
||||
// --------------------- Default settings mode tests ``
|
||||
{
|
||||
mode: cfg.ASCII,
|
||||
name: "default",
|
||||
mode: cfg.ASCII,
|
||||
numberize: true,
|
||||
name: "default",
|
||||
expect: `
|
||||
NAME(1) DURATION(2) COUNT(3) WHEN(4)
|
||||
beta 1d10h5m1s 33 3/1/2014
|
||||
alpha 4h35m 170 2013-Feb-03
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700`,
|
||||
NAME(1) DURATION(2) COUNT(3) WHEN(4)
|
||||
beta 1d10h5m1s 33 3/1/2014
|
||||
alpha 4h35m 170 2013-Feb-03
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700`,
|
||||
},
|
||||
{
|
||||
mode: cfg.CSV,
|
||||
name: "csv",
|
||||
mode: cfg.CSV,
|
||||
numberize: false,
|
||||
name: "csv",
|
||||
expect: `
|
||||
NAME,DURATION,COUNT,WHEN
|
||||
beta,1d10h5m1s,33,3/1/2014
|
||||
@@ -91,40 +94,42 @@ alpha,4h35m,170,2013-Feb-03
|
||||
ceta,33d12h,9,06/Jan/2008 15:04:05 -0700`,
|
||||
},
|
||||
{
|
||||
name: "default",
|
||||
mode: cfg.Orgtbl,
|
||||
name: "orgtbl",
|
||||
numberize: true,
|
||||
mode: cfg.Orgtbl,
|
||||
expect: `
|
||||
+---------+-------------+----------+----------------------------+
|
||||
| NAME(1) | DURATION(2) | COUNT(3) | WHEN(4) |
|
||||
| NAME(1) | DURATION(2) | COUNT(3) | WHEN(4) |
|
||||
+---------+-------------+----------+----------------------------+
|
||||
| beta | 1d10h5m1s | 33 | 3/1/2014 |
|
||||
| alpha | 4h35m | 170 | 2013-Feb-03 |
|
||||
| ceta | 33d12h | 9 | 06/Jan/2008 15:04:05 -0700 |
|
||||
| beta | 1d10h5m1s | 33 | 3/1/2014 |
|
||||
| alpha | 4h35m | 170 | 2013-Feb-03 |
|
||||
| ceta | 33d12h | 9 | 06/Jan/2008 15:04:05 -0700 |
|
||||
+---------+-------------+----------+----------------------------+`,
|
||||
},
|
||||
{
|
||||
name: "default",
|
||||
mode: cfg.Markdown,
|
||||
name: "markdown",
|
||||
mode: cfg.Markdown,
|
||||
numberize: true,
|
||||
expect: `
|
||||
| NAME(1) | DURATION(2) | COUNT(3) | WHEN(4) |
|
||||
| NAME(1) | DURATION(2) | COUNT(3) | WHEN(4) |
|
||||
|---------|-------------|----------|----------------------------|
|
||||
| beta | 1d10h5m1s | 33 | 3/1/2014 |
|
||||
| alpha | 4h35m | 170 | 2013-Feb-03 |
|
||||
| ceta | 33d12h | 9 | 06/Jan/2008 15:04:05 -0700 |`,
|
||||
| beta | 1d10h5m1s | 33 | 3/1/2014 |
|
||||
| alpha | 4h35m | 170 | 2013-Feb-03 |
|
||||
| ceta | 33d12h | 9 | 06/Jan/2008 15:04:05 -0700 |`,
|
||||
},
|
||||
{
|
||||
name: "default",
|
||||
mode: cfg.Shell,
|
||||
nonum: true,
|
||||
name: "shell",
|
||||
mode: cfg.Shell,
|
||||
numberize: false,
|
||||
expect: `
|
||||
NAME="beta" DURATION="1d10h5m1s" COUNT="33" WHEN="3/1/2014"
|
||||
NAME="alpha" DURATION="4h35m" COUNT="170" WHEN="2013-Feb-03"
|
||||
NAME="ceta" DURATION="33d12h" COUNT="9" WHEN="06/Jan/2008 15:04:05 -0700"`,
|
||||
},
|
||||
{
|
||||
name: "default",
|
||||
mode: cfg.Yaml,
|
||||
nonum: true,
|
||||
name: "yaml",
|
||||
mode: cfg.Yaml,
|
||||
numberize: false,
|
||||
expect: `
|
||||
entries:
|
||||
- count: 33
|
||||
@@ -141,8 +146,9 @@ entries:
|
||||
when: "06/Jan/2008 15:04:05 -0700"`,
|
||||
},
|
||||
{
|
||||
name: "default",
|
||||
mode: cfg.Extended,
|
||||
name: "extended",
|
||||
mode: cfg.Extended,
|
||||
numberize: true,
|
||||
expect: `
|
||||
NAME(1): beta
|
||||
DURATION(2): 1d10h5m1s
|
||||
@@ -162,112 +168,131 @@ DURATION(2): 33d12h
|
||||
|
||||
//------------------------ SORT TESTS
|
||||
{
|
||||
name: "sortbycolumn",
|
||||
column: 3,
|
||||
sortby: "numeric",
|
||||
desc: false,
|
||||
name: "sortbycolumn3",
|
||||
column: 3,
|
||||
sortby: "numeric",
|
||||
numberize: true,
|
||||
desc: false,
|
||||
expect: `
|
||||
NAME(1) DURATION(2) COUNT(3) WHEN(4)
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700
|
||||
beta 1d10h5m1s 33 3/1/2014
|
||||
alpha 4h35m 170 2013-Feb-03`,
|
||||
NAME(1) DURATION(2) COUNT(3) WHEN(4)
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700
|
||||
beta 1d10h5m1s 33 3/1/2014
|
||||
alpha 4h35m 170 2013-Feb-03`,
|
||||
},
|
||||
{
|
||||
name: "sortbycolumn",
|
||||
column: 4,
|
||||
sortby: "time",
|
||||
desc: false,
|
||||
name: "sortbycolumn4",
|
||||
column: 4,
|
||||
sortby: "time",
|
||||
desc: false,
|
||||
numberize: true,
|
||||
expect: `
|
||||
NAME(1) DURATION(2) COUNT(3) WHEN(4)
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700
|
||||
alpha 4h35m 170 2013-Feb-03
|
||||
beta 1d10h5m1s 33 3/1/2014`,
|
||||
NAME(1) DURATION(2) COUNT(3) WHEN(4)
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700
|
||||
alpha 4h35m 170 2013-Feb-03
|
||||
beta 1d10h5m1s 33 3/1/2014`,
|
||||
},
|
||||
{
|
||||
name: "sortbycolumn",
|
||||
column: 2,
|
||||
sortby: "duration",
|
||||
desc: false,
|
||||
name: "sortbycolumn2",
|
||||
column: 2,
|
||||
sortby: "duration",
|
||||
numberize: true,
|
||||
desc: false,
|
||||
expect: `
|
||||
NAME(1) DURATION(2) COUNT(3) WHEN(4)
|
||||
alpha 4h35m 170 2013-Feb-03
|
||||
beta 1d10h5m1s 33 3/1/2014
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700`,
|
||||
NAME(1) DURATION(2) COUNT(3) WHEN(4)
|
||||
alpha 4h35m 170 2013-Feb-03
|
||||
beta 1d10h5m1s 33 3/1/2014
|
||||
ceta 33d12h 9 06/Jan/2008 15:04:05 -0700`,
|
||||
},
|
||||
|
||||
// ----------------------- UseColumns Tests
|
||||
{
|
||||
name: "usecolumns",
|
||||
usecol: []int{1, 4},
|
||||
numberize: true,
|
||||
usecolstr: "1,4",
|
||||
expect: `
|
||||
NAME(1) WHEN(4)
|
||||
beta 3/1/2014
|
||||
alpha 2013-Feb-03
|
||||
ceta 06/Jan/2008 15:04:05 -0700`,
|
||||
NAME(1) WHEN(4)
|
||||
beta 3/1/2014
|
||||
alpha 2013-Feb-03
|
||||
ceta 06/Jan/2008 15:04:05 -0700`,
|
||||
},
|
||||
{
|
||||
name: "usecolumns",
|
||||
name: "usecolumns2",
|
||||
usecol: []int{2},
|
||||
numberize: true,
|
||||
usecolstr: "2",
|
||||
expect: `
|
||||
DURATION(2)
|
||||
1d10h5m1s
|
||||
4h35m
|
||||
1d10h5m1s
|
||||
4h35m
|
||||
33d12h`,
|
||||
},
|
||||
{
|
||||
name: "usecolumns",
|
||||
name: "usecolumns3",
|
||||
usecol: []int{3},
|
||||
numberize: true,
|
||||
usecolstr: "3",
|
||||
expect: `
|
||||
COUNT(3)
|
||||
33
|
||||
170
|
||||
33
|
||||
170
|
||||
9`,
|
||||
},
|
||||
{
|
||||
name: "usecolumns",
|
||||
name: "usecolumns4",
|
||||
column: 0,
|
||||
usecol: []int{1, 3},
|
||||
numberize: true,
|
||||
usecolstr: "1,3",
|
||||
expect: `
|
||||
NAME(1) COUNT(3)
|
||||
beta 33
|
||||
alpha 170
|
||||
ceta 9`,
|
||||
NAME(1) COUNT(3)
|
||||
beta 33
|
||||
alpha 170
|
||||
ceta 9`,
|
||||
},
|
||||
{
|
||||
name: "usecolumns",
|
||||
usecol: []int{2, 4},
|
||||
numberize: true,
|
||||
usecolstr: "2,4",
|
||||
expect: `
|
||||
DURATION(2) WHEN(4)
|
||||
1d10h5m1s 3/1/2014
|
||||
4h35m 2013-Feb-03
|
||||
33d12h 06/Jan/2008 15:04:05 -0700`,
|
||||
DURATION(2) WHEN(4)
|
||||
1d10h5m1s 3/1/2014
|
||||
4h35m 2013-Feb-03
|
||||
33d12h 06/Jan/2008 15:04:05 -0700`,
|
||||
},
|
||||
}
|
||||
|
||||
func TestPrinter(t *testing.T) {
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("print-sortcol-%d-desc-%t-sortby-%s-mode-%d-usecolumns-%s",
|
||||
testdata.column, testdata.desc, testdata.sortby, testdata.mode, testdata.usecolstr)
|
||||
testname := fmt.Sprintf("print-%s-%d-desc-%t-sortby-%s-mode-%d-usecolumns-%s-numberize-%t",
|
||||
testdata.name, testdata.column, testdata.desc, testdata.sortby,
|
||||
testdata.mode, testdata.usecolstr, testdata.numberize)
|
||||
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
// replaces os.Stdout, but we ignore it
|
||||
var writer bytes.Buffer
|
||||
|
||||
// cmd flags
|
||||
conf := cfg.Config{
|
||||
SortByColumn: testdata.column,
|
||||
SortDescending: testdata.desc,
|
||||
SortMode: testdata.sortby,
|
||||
OutputMode: testdata.mode,
|
||||
NoNumbering: testdata.nonum,
|
||||
Numbering: testdata.numberize,
|
||||
UseColumns: testdata.usecol,
|
||||
NoColor: true,
|
||||
OFS: " ",
|
||||
}
|
||||
|
||||
if conf.OutputMode == cfg.CSV {
|
||||
conf.OFS = ","
|
||||
}
|
||||
|
||||
if testdata.column > 0 {
|
||||
conf.UseSortByColumn = []int{testdata.column}
|
||||
}
|
||||
|
||||
conf.Separator = cfg.SeparatorTemplates[":default:"]
|
||||
conf.ApplyDefaults()
|
||||
|
||||
// the test checks the len!
|
||||
@@ -284,10 +309,7 @@ func TestPrinter(t *testing.T) {
|
||||
|
||||
got := strings.TrimSpace(writer.String())
|
||||
|
||||
if got != exp {
|
||||
t.Errorf("not rendered correctly:\n+++ got:\n%s\n+++ want:\n%s",
|
||||
got, exp)
|
||||
}
|
||||
assert.EqualValues(t, exp, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
41
lib/sort.go
41
lib/sort.go
@@ -18,6 +18,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
package lib
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
@@ -27,34 +28,41 @@ import (
|
||||
)
|
||||
|
||||
func sortTable(conf cfg.Config, data *Tabdata) {
|
||||
if conf.SortByColumn <= 0 {
|
||||
if len(conf.UseSortByColumn) == 0 {
|
||||
// no sorting wanted
|
||||
return
|
||||
}
|
||||
|
||||
// slightly modified here to match internal array indicies
|
||||
col := conf.SortByColumn
|
||||
|
||||
col-- // ui starts counting by 1, but use 0 internally
|
||||
|
||||
// sanity checks
|
||||
if len(data.entries) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
if col >= len(data.headers) {
|
||||
// fall back to default column
|
||||
col = 0
|
||||
}
|
||||
|
||||
// actual sorting
|
||||
sort.SliceStable(data.entries, func(i, j int) bool {
|
||||
return compare(&conf, data.entries[i][col], data.entries[j][col])
|
||||
// holds the result of a sort of one column
|
||||
comparators := []int{}
|
||||
|
||||
// iterate over all columns to be sorted, conf.SortMode must be identical!
|
||||
for _, column := range conf.UseSortByColumn {
|
||||
comparators = append(comparators, compare(&conf, data.entries[i][column-1], data.entries[j][column-1]))
|
||||
}
|
||||
|
||||
// return the combined result
|
||||
res := cmp.Or(comparators...)
|
||||
|
||||
switch res {
|
||||
case 0:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
// config is not modified here, but it would be inefficient to copy it every loop
|
||||
func compare(conf *cfg.Config, left string, right string) bool {
|
||||
func compare(conf *cfg.Config, left string, right string) int {
|
||||
var comp bool
|
||||
|
||||
switch conf.SortMode {
|
||||
@@ -88,7 +96,12 @@ func compare(conf *cfg.Config, left string, right string) bool {
|
||||
comp = !comp
|
||||
}
|
||||
|
||||
return comp
|
||||
switch comp {
|
||||
case true:
|
||||
return 0
|
||||
default:
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
@@ -41,9 +42,7 @@ func TestDuration2Seconds(t *testing.T) {
|
||||
testname := fmt.Sprintf("duration-%s", testdata.dur)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
seconds := duration2int(testdata.dur)
|
||||
if seconds != testdata.expect {
|
||||
t.Errorf("got %d, want %d", seconds, testdata.expect)
|
||||
}
|
||||
assert.EqualValues(t, testdata.expect, seconds)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -53,18 +52,18 @@ func TestCompare(t *testing.T) {
|
||||
mode string
|
||||
a string
|
||||
b string
|
||||
want bool
|
||||
want int
|
||||
desc bool
|
||||
}{
|
||||
// ascending
|
||||
{"numeric", "10", "20", true, false},
|
||||
{"duration", "2d4h5m", "45m", false, false},
|
||||
{"time", "12/24/2022", "1/1/1970", false, false},
|
||||
{"numeric", "10", "20", 0, false},
|
||||
{"duration", "2d4h5m", "45m", 1, false},
|
||||
{"time", "12/24/2022", "1/1/1970", 1, false},
|
||||
|
||||
// descending
|
||||
{"numeric", "10", "20", false, true},
|
||||
{"duration", "2d4h5m", "45m", true, true},
|
||||
{"time", "12/24/2022", "1/1/1970", true, true},
|
||||
{"numeric", "10", "20", 1, true},
|
||||
{"duration", "2d4h5m", "45m", 0, true},
|
||||
{"time", "12/24/2022", "1/1/1970", 0, true},
|
||||
}
|
||||
|
||||
for _, testdata := range tests {
|
||||
@@ -74,9 +73,7 @@ func TestCompare(t *testing.T) {
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
c := cfg.Config{SortMode: testdata.mode, SortDescending: testdata.desc}
|
||||
got := compare(&c, testdata.a, testdata.b)
|
||||
if got != testdata.want {
|
||||
t.Errorf("got %t, want %t", got, testdata.want)
|
||||
}
|
||||
assert.EqualValues(t, testdata.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
524
lib/tableeditor.go
Normal file
524
lib/tableeditor.go
Normal file
@@ -0,0 +1,524 @@
|
||||
/*
|
||||
Copyright © 2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package lib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/evertras/bubble-table/table"
|
||||
"github.com/mattn/go-isatty"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
// The context exists outside of the bubble loop, and is being used as
|
||||
// pointer reciever. That way we can use it as our primary storage
|
||||
// container.
|
||||
type Context struct {
|
||||
selectedColumn int
|
||||
showHelp bool
|
||||
descending bool
|
||||
data *Tabdata
|
||||
|
||||
// Window dimensions
|
||||
totalWidth int
|
||||
totalHeight int
|
||||
|
||||
// Table dimensions
|
||||
horizontalMargin int
|
||||
verticalMargin int
|
||||
}
|
||||
|
||||
// Execute tablizer sort function, feed it with fresh config, we do
|
||||
// NOT use the existing runtime config, because sorting is
|
||||
// configurable in the UI separately.
|
||||
func (ctx *Context) Sort(mode string) {
|
||||
conf := cfg.Config{
|
||||
SortMode: mode,
|
||||
SortDescending: ctx.descending,
|
||||
UseSortByColumn: []int{ctx.selectedColumn + 1},
|
||||
}
|
||||
|
||||
ctx.descending = !ctx.descending
|
||||
|
||||
sortTable(conf, ctx.data)
|
||||
}
|
||||
|
||||
// The actual table model, holds the context pointer, a copy of the
|
||||
// pre-processed data and some flags
|
||||
type FilterTable struct {
|
||||
Table table.Model
|
||||
|
||||
Rows int
|
||||
|
||||
quitting bool
|
||||
unchanged bool
|
||||
|
||||
maxColumns int
|
||||
headerIdx map[string]int
|
||||
|
||||
ctx *Context
|
||||
|
||||
columns []table.Column
|
||||
}
|
||||
|
||||
type HelpLine []string
|
||||
type HelpColumn []HelpLine
|
||||
|
||||
const (
|
||||
// header+footer
|
||||
ExtraRows = 5
|
||||
|
||||
HelpFooter = "?:help | "
|
||||
)
|
||||
|
||||
var (
|
||||
// we use our own custom border style
|
||||
customBorder = table.Border{
|
||||
Top: "─",
|
||||
Left: "│",
|
||||
Right: "│",
|
||||
Bottom: "─",
|
||||
|
||||
TopRight: "╮",
|
||||
TopLeft: "╭",
|
||||
BottomRight: "╯",
|
||||
BottomLeft: "╰",
|
||||
|
||||
TopJunction: "┬",
|
||||
LeftJunction: "├",
|
||||
RightJunction: "┤",
|
||||
BottomJunction: "┴",
|
||||
InnerJunction: "┼",
|
||||
|
||||
InnerDivider: "│",
|
||||
}
|
||||
|
||||
// Cells in selected columns will be highlighted
|
||||
StyleSelected = lipgloss.NewStyle().
|
||||
Background(lipgloss.Color("#696969")).
|
||||
Foreground(lipgloss.Color("#ffffff")).
|
||||
Align(lipgloss.Left)
|
||||
|
||||
StyleHeader = lipgloss.NewStyle().
|
||||
Foreground(lipgloss.Color("#ff4500")).
|
||||
Align(lipgloss.Left).Bold(true)
|
||||
|
||||
// help buffer styles
|
||||
StyleKey = lipgloss.NewStyle().Bold(true)
|
||||
StyleHelp = lipgloss.NewStyle().Foreground(lipgloss.Color("#ff4500"))
|
||||
|
||||
// the default style
|
||||
NoStyle = lipgloss.NewStyle().Align(lipgloss.Left)
|
||||
|
||||
HelpData = []HelpColumn{
|
||||
{
|
||||
HelpLine{"up", "navigate up"},
|
||||
HelpLine{"down", "navigate down"},
|
||||
HelpLine{"tab", "navigate columns"},
|
||||
},
|
||||
{
|
||||
HelpLine{"s", "sort alpha-numerically"},
|
||||
HelpLine{"n", "sort numerically"},
|
||||
HelpLine{"t", "sort by time"},
|
||||
HelpLine{"d", "sort by duration"},
|
||||
},
|
||||
{
|
||||
HelpLine{"spc", "[de]select a row"},
|
||||
HelpLine{"a", "[de]select all visible rows"},
|
||||
HelpLine{"f", "enter fuzzy filter"},
|
||||
HelpLine{"esc", "finish filter input"},
|
||||
},
|
||||
{
|
||||
HelpLine{"?", "show help buffer"},
|
||||
HelpLine{"q", "commit and quit"},
|
||||
HelpLine{"c-c", "discard and quit"},
|
||||
},
|
||||
}
|
||||
|
||||
// rendered from Help above
|
||||
Help = ""
|
||||
|
||||
// number of lines taken by help below, adjust accordingly!
|
||||
HelpRows = 0
|
||||
)
|
||||
|
||||
// generate a lipgloss styled help buffer consisting of various
|
||||
// columns
|
||||
func generateHelp() {
|
||||
help := strings.Builder{}
|
||||
helpcols := []string{}
|
||||
maxrows := 0
|
||||
|
||||
for _, col := range HelpData {
|
||||
help.Reset()
|
||||
|
||||
// determine max key width to avoid excess spaces between keys and help
|
||||
keylen := 0
|
||||
for _, line := range col {
|
||||
if len(line[0]) > keylen {
|
||||
keylen = len(line[0])
|
||||
}
|
||||
}
|
||||
|
||||
keylenstr := fmt.Sprintf("%d", keylen)
|
||||
|
||||
for _, line := range col {
|
||||
// 0: key, 1: help text
|
||||
help.WriteString(StyleKey.Render(fmt.Sprintf("%-"+keylenstr+"s", line[0])))
|
||||
help.WriteString(" " + StyleHelp.Render(line[1]) + " \n")
|
||||
}
|
||||
|
||||
helpcols = append(helpcols, help.String())
|
||||
|
||||
if len(col) > maxrows {
|
||||
maxrows = len(col)
|
||||
}
|
||||
}
|
||||
|
||||
HelpRows = maxrows + 1
|
||||
Help = "\n" + lipgloss.JoinHorizontal(lipgloss.Top, helpcols...)
|
||||
}
|
||||
|
||||
// initializes the table model
|
||||
func NewModel(data *Tabdata, ctx *Context) FilterTable {
|
||||
columns := make([]table.Column, len(data.headers))
|
||||
lengths := make([]int, len(data.headers))
|
||||
hidx := make(map[string]int, len(data.headers))
|
||||
|
||||
// give columns at least the header width
|
||||
for idx, header := range data.headers {
|
||||
lengths[idx] = len(header)
|
||||
hidx[strings.ToLower(header)] = idx
|
||||
}
|
||||
|
||||
// determine max width per column
|
||||
for _, entry := range data.entries {
|
||||
for i, cell := range entry {
|
||||
if len(cell) > lengths[i] {
|
||||
lengths[i] = len(cell)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// determine flexFactor with base 10, used by flexColumns
|
||||
for i, len := range lengths {
|
||||
if len <= 10 {
|
||||
lengths[i] = 1
|
||||
} else {
|
||||
lengths[i] = len / 10
|
||||
}
|
||||
}
|
||||
|
||||
// setup column data with flexColumns
|
||||
for idx, header := range data.headers {
|
||||
columns[idx] = table.NewFlexColumn(
|
||||
strings.ToLower(header),
|
||||
StyleHeader.Render(header),
|
||||
lengths[idx]).WithFiltered(true).WithStyle(NoStyle)
|
||||
}
|
||||
|
||||
// separate variable so we can share the row filling code
|
||||
filtertbl := FilterTable{
|
||||
maxColumns: len(data.headers),
|
||||
Rows: len(data.entries),
|
||||
headerIdx: hidx,
|
||||
ctx: ctx,
|
||||
columns: columns,
|
||||
}
|
||||
|
||||
filtertbl.Table = table.New(columns)
|
||||
filtertbl.fillRows()
|
||||
|
||||
// finally construct help buffer
|
||||
generateHelp()
|
||||
|
||||
return filtertbl
|
||||
}
|
||||
|
||||
// Applied to every cell on every change (TAB,up,down key, resize
|
||||
// event etc)
|
||||
func CellController(input table.StyledCellFuncInput, m FilterTable) lipgloss.Style {
|
||||
if m.headerIdx[input.Column.Key()] == m.ctx.selectedColumn {
|
||||
return StyleSelected
|
||||
}
|
||||
|
||||
return NoStyle
|
||||
}
|
||||
|
||||
// Selects or deselects ALL rows
|
||||
func (m *FilterTable) ToggleAllSelected() {
|
||||
rows := m.Table.GetVisibleRows()
|
||||
selected := m.Table.SelectedRows()
|
||||
|
||||
if len(selected) > 0 {
|
||||
for i, row := range selected {
|
||||
rows[i] = row.Selected(false)
|
||||
}
|
||||
} else {
|
||||
for i, row := range rows {
|
||||
rows[i] = row.Selected(true)
|
||||
}
|
||||
}
|
||||
|
||||
m.Table.WithRows(rows)
|
||||
}
|
||||
|
||||
// ? pressed, display help message
|
||||
func (m FilterTable) ToggleHelp() {
|
||||
m.ctx.showHelp = !m.ctx.showHelp
|
||||
}
|
||||
|
||||
func (m FilterTable) Init() tea.Cmd {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Forward call to context sort
|
||||
func (m *FilterTable) Sort(mode string) {
|
||||
m.ctx.Sort(mode)
|
||||
m.fillRows()
|
||||
}
|
||||
|
||||
// Fills the table rows with our data. Called once on startup and
|
||||
// repeatedly if the user changes the sort order in some way
|
||||
func (m *FilterTable) fillRows() {
|
||||
// required to be able to feed the model to the controller
|
||||
controllerWrapper := func(input table.StyledCellFuncInput) lipgloss.Style {
|
||||
return CellController(input, *m)
|
||||
}
|
||||
|
||||
// fill the rows with style
|
||||
rows := make([]table.Row, len(m.ctx.data.entries))
|
||||
for idx, entry := range m.ctx.data.entries {
|
||||
rowdata := make(table.RowData, len(entry))
|
||||
|
||||
for i, cell := range entry {
|
||||
rowdata[strings.ToLower(m.ctx.data.headers[i])] =
|
||||
table.NewStyledCellWithStyleFunc(cell+" ", controllerWrapper)
|
||||
}
|
||||
|
||||
rows[idx] = table.NewRow(rowdata)
|
||||
}
|
||||
|
||||
m.Table = m.Table.
|
||||
WithRows(rows).
|
||||
Filtered(true).
|
||||
WithFuzzyFilter().
|
||||
Focused(true).
|
||||
SelectableRows(true).
|
||||
WithSelectedText(" ", "✓").
|
||||
WithFooterVisibility(true).
|
||||
WithHeaderVisibility(true).
|
||||
HighlightStyle(StyleSelected).
|
||||
Border(customBorder)
|
||||
}
|
||||
|
||||
// Part of the bubbletea event loop, called every tick
|
||||
func (m FilterTable) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
var (
|
||||
cmd tea.Cmd
|
||||
cmds []tea.Cmd
|
||||
)
|
||||
|
||||
m.Table, cmd = m.Table.Update(msg)
|
||||
cmds = append(cmds, cmd)
|
||||
|
||||
// If the user is about to enter filter text, do NOT respond to
|
||||
// key bindings, as they might be part of the filter!
|
||||
if !m.Table.GetIsFilterInputFocused() {
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
switch msg.String() {
|
||||
case "q":
|
||||
m.quitting = true
|
||||
m.unchanged = false
|
||||
cmds = append(cmds, tea.Quit)
|
||||
|
||||
case "ctrl+c":
|
||||
m.quitting = true
|
||||
m.unchanged = true
|
||||
cmds = append(cmds, tea.Quit)
|
||||
|
||||
case "a":
|
||||
m.ToggleAllSelected()
|
||||
|
||||
case "tab":
|
||||
m.SelectNextColumn()
|
||||
|
||||
case "?":
|
||||
m.ToggleHelp()
|
||||
m.recalculateTable()
|
||||
|
||||
case "s":
|
||||
m.Sort("alphanumeric")
|
||||
|
||||
case "n":
|
||||
m.Sort("numeric")
|
||||
|
||||
case "d":
|
||||
m.Sort("duration")
|
||||
|
||||
case "t":
|
||||
m.Sort("time")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Happens when the terminal window has been resized
|
||||
switch msg := msg.(type) {
|
||||
case tea.WindowSizeMsg:
|
||||
m.ctx.totalWidth = msg.Width
|
||||
m.ctx.totalHeight = msg.Height
|
||||
|
||||
m.recalculateTable()
|
||||
}
|
||||
|
||||
m.updateFooter()
|
||||
|
||||
return m, tea.Batch(cmds...)
|
||||
}
|
||||
|
||||
// Add some info to the footer
|
||||
func (m *FilterTable) updateFooter() {
|
||||
selected := m.Table.SelectedRows()
|
||||
footer := fmt.Sprintf("selected: %d ", len(selected))
|
||||
|
||||
if m.Table.GetIsFilterInputFocused() {
|
||||
footer = fmt.Sprintf("/%s %s", m.Table.GetCurrentFilter(), footer)
|
||||
} else if m.Table.GetIsFilterActive() {
|
||||
footer = fmt.Sprintf("Filter: %s %s", m.Table.GetCurrentFilter(), footer)
|
||||
}
|
||||
|
||||
m.Table = m.Table.WithStaticFooter(HelpFooter + footer)
|
||||
}
|
||||
|
||||
// Called on resize event (or if help has been toggled)
|
||||
func (m *FilterTable) recalculateTable() {
|
||||
m.Table = m.Table.
|
||||
WithTargetWidth(m.calculateWidth()).
|
||||
WithMinimumHeight(m.calculateHeight()).
|
||||
WithPageSize(m.calculateHeight() - ExtraRows)
|
||||
}
|
||||
|
||||
func (m *FilterTable) calculateWidth() int {
|
||||
return m.ctx.totalWidth - m.ctx.horizontalMargin
|
||||
}
|
||||
|
||||
// Take help height into account, if enabled
|
||||
func (m *FilterTable) calculateHeight() int {
|
||||
height := m.Rows + ExtraRows
|
||||
|
||||
if height >= m.ctx.totalHeight {
|
||||
height = m.ctx.totalHeight - m.ctx.verticalMargin
|
||||
} else {
|
||||
height = m.ctx.totalHeight
|
||||
}
|
||||
|
||||
if m.ctx.showHelp {
|
||||
height = height - HelpRows
|
||||
}
|
||||
|
||||
return height
|
||||
}
|
||||
|
||||
// Part of the bubbletable event view, called every tick
|
||||
func (m FilterTable) View() string {
|
||||
body := strings.Builder{}
|
||||
|
||||
if !m.quitting {
|
||||
body.WriteString(m.Table.View())
|
||||
|
||||
if m.ctx.showHelp {
|
||||
body.WriteString(Help)
|
||||
}
|
||||
}
|
||||
|
||||
return body.String()
|
||||
}
|
||||
|
||||
// User hit the TAB key
|
||||
func (m *FilterTable) SelectNextColumn() {
|
||||
if m.ctx.selectedColumn == m.maxColumns-1 {
|
||||
m.ctx.selectedColumn = 0
|
||||
} else {
|
||||
m.ctx.selectedColumn++
|
||||
}
|
||||
}
|
||||
|
||||
// entry point from outside tablizer into table editor
|
||||
func tableEditor(conf *cfg.Config, data *Tabdata) (*Tabdata, error) {
|
||||
// we render to STDERR to avoid dead lock when the user redirects STDOUT
|
||||
// see https://github.com/charmbracelet/bubbletea/issues/860
|
||||
//
|
||||
// TODO: doesn't work with libgloss v2 anymore!
|
||||
|
||||
out := os.Stderr
|
||||
|
||||
if isatty.IsTerminal(os.Stdout.Fd()) {
|
||||
out = os.Stdout
|
||||
}
|
||||
|
||||
lipgloss.SetDefaultRenderer(lipgloss.NewRenderer(out))
|
||||
|
||||
ctx := &Context{data: data}
|
||||
|
||||
// Output to STDERR because there's a known bubbletea/lipgloss
|
||||
// issue: if a program with a tui is expected to write something
|
||||
// to STDOUT when the tui is finished, then the styles do not
|
||||
// work. So we write to STDERR (which works) and tablizer can
|
||||
// still be used inside pipes.
|
||||
program := tea.NewProgram(
|
||||
NewModel(data, ctx),
|
||||
tea.WithOutput(out),
|
||||
tea.WithAltScreen())
|
||||
|
||||
m, err := program.Run()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if m.(FilterTable).unchanged {
|
||||
return data, err
|
||||
}
|
||||
|
||||
// Data has been modified. Extract it, put it back into our own
|
||||
// structure and give control back to cmdline tablizer.
|
||||
filteredtable := m.(FilterTable)
|
||||
|
||||
data.entries = make([][]string, len(filteredtable.Table.SelectedRows()))
|
||||
for pos, row := range m.(FilterTable).Table.SelectedRows() {
|
||||
entry := make([]string, len(data.headers))
|
||||
for idx, field := range data.headers {
|
||||
cell := row.Data[strings.ToLower(field)]
|
||||
switch value := cell.(type) {
|
||||
case string:
|
||||
entry[idx] = value
|
||||
case table.StyledCell:
|
||||
entry[idx] = value.Data.(string)
|
||||
}
|
||||
}
|
||||
|
||||
data.entries[pos] = entry
|
||||
}
|
||||
|
||||
return data, err
|
||||
}
|
||||
51
lib/yank.go
Normal file
51
lib/yank.go
Normal file
@@ -0,0 +1,51 @@
|
||||
/*
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package lib
|
||||
|
||||
import (
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"github.com/tiagomelo/go-clipboard/clipboard"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
func yankColumns(conf cfg.Config, data *Tabdata) {
|
||||
var yank []string
|
||||
|
||||
if len(data.entries) == 0 || len(conf.UseYankColumns) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
for _, row := range data.entries {
|
||||
for i, field := range row {
|
||||
for _, idx := range conf.UseYankColumns {
|
||||
if i == idx-1 {
|
||||
yank = append(yank, field)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(yank) > 0 {
|
||||
cb := clipboard.New(clipboard.ClipboardOptions{Primary: true})
|
||||
if err := cb.CopyText(strings.Join(yank, " ")); err != nil {
|
||||
log.Fatalln("error writing string to clipboard:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
68
lib/yank_test.go
Normal file
68
lib/yank_test.go
Normal file
@@ -0,0 +1,68 @@
|
||||
/*
|
||||
Copyright © 2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package lib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/tiagomelo/go-clipboard/clipboard"
|
||||
"github.com/tlinden/tablizer/cfg"
|
||||
)
|
||||
|
||||
var yanktests = []struct {
|
||||
name string
|
||||
yank []int // -y$colum,$column... after processing
|
||||
filter string
|
||||
expect string
|
||||
}{
|
||||
{
|
||||
name: "one",
|
||||
yank: []int{1},
|
||||
filter: "beta",
|
||||
},
|
||||
}
|
||||
|
||||
func DISABLED_TestYankColumns(t *testing.T) {
|
||||
cb := clipboard.New()
|
||||
|
||||
for _, testdata := range yanktests {
|
||||
testname := fmt.Sprintf("yank-%s-filter-%s",
|
||||
testdata.name, testdata.filter)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := cfg.Config{
|
||||
OutputMode: cfg.ASCII,
|
||||
UseYankColumns: testdata.yank,
|
||||
NoColor: true,
|
||||
}
|
||||
|
||||
conf.ApplyDefaults()
|
||||
data := newData() // defined in printer_test.go, reused here
|
||||
|
||||
var writer bytes.Buffer
|
||||
printData(&writer, conf, &data)
|
||||
|
||||
got, err := cb.PasteText()
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, testdata.expect, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
10
main.go
10
main.go
@@ -18,9 +18,17 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/tlinden/tablizer/cmd"
|
||||
)
|
||||
|
||||
func main() {
|
||||
cmd.Execute()
|
||||
os.Exit(Main())
|
||||
}
|
||||
|
||||
func Main() int {
|
||||
cmd.Execute()
|
||||
|
||||
return 0 // cmd takes care of exit 1 itself
|
||||
}
|
||||
|
||||
19
main_test.go
Normal file
19
main_test.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/rogpeppe/go-internal/testscript"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
testscript.Main(m, map[string]func(){
|
||||
"tablizer": main,
|
||||
})
|
||||
}
|
||||
|
||||
func TestTablizer(t *testing.T) {
|
||||
testscript.Run(t, testscript.Params{
|
||||
Dir: "t",
|
||||
})
|
||||
}
|
||||
9
mkrel.sh
9
mkrel.sh
@@ -42,8 +42,15 @@ for D in $DIST; do
|
||||
binfile="releases/${tool}-${os}-${arch}-${version}"
|
||||
tardir="${tool}-${os}-${arch}-${version}"
|
||||
tarfile="releases/${tool}-${os}-${arch}-${version}.tar.gz"
|
||||
pie=""
|
||||
|
||||
if test "$D" = "linux/amd64"; then
|
||||
pie="-buildmode=pie"
|
||||
fi
|
||||
|
||||
set -x
|
||||
GOOS=${os} GOARCH=${arch} go build -o ${binfile} -ldflags "-X 'github.com/tlinden/tablizer/cfg.VERSION=${version}'"
|
||||
GOOS=${os} GOARCH=${arch} go build -tags osusergo,netgo -ldflags "-extldflags=-static -w -X 'github.com/tlinden/tablizer/cfg.VERSION=${version}'" --trimpath $pie -o ${binfile}
|
||||
strip --strip-all ${binfile}
|
||||
mkdir -p ${tardir}
|
||||
cp ${binfile} README.md LICENSE ${tardir}/
|
||||
echo 'tool = tablizer
|
||||
|
||||
42
t/test-basics.txtar
Normal file
42
t/test-basics.txtar
Normal file
@@ -0,0 +1,42 @@
|
||||
# usage
|
||||
exec tablizer --help
|
||||
stdout Usage
|
||||
|
||||
exec tablizer -h
|
||||
stdout show
|
||||
|
||||
# version
|
||||
exec tablizer -V
|
||||
stdout version
|
||||
|
||||
# completion
|
||||
exec tablizer --completion bash
|
||||
stdout __tablizer_init_completion
|
||||
|
||||
# use config (configures colors, but these are not being used, since
|
||||
# this env doesn't support it, but at least it should succeed.
|
||||
exec tablizer -f config.hcl -r testtable.txt Runn
|
||||
stdout Runn
|
||||
|
||||
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.txt --
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
alertmanager-kube-prometheus-alertmanager-0 2/2 Running 35 (45m ago) 11d
|
||||
grafana-fcc54cbc9-bk7s8 1/1 Running 17 (45m ago) 1d
|
||||
kube-prometheus-blackbox-exporter-5d85b5d8f4-tskh7 1/1 Running 17 (45m ago) 1h44m
|
||||
kube-prometheus-kube-state-metrics-b4cd9487-75p7f 1/1 Running 20 (45m ago) 45m
|
||||
kube-prometheus-node-exporter-bfzpl 1/1 Running 17 (45m ago) 54s
|
||||
|
||||
|
||||
-- config.hcl --
|
||||
BG = "lightGreen"
|
||||
FG = "white"
|
||||
HighlightBG = "lightGreen"
|
||||
HighlightFG = "white"
|
||||
NoHighlightBG = "white"
|
||||
NoHighlightFG = "lightGreen"
|
||||
HighlightHdrBG = "red"
|
||||
HighlightHdrFG = "white"
|
||||
|
||||
26
t/test-csv.txtar
Normal file
26
t/test-csv.txtar
Normal file
@@ -0,0 +1,26 @@
|
||||
# reading from file and matching with lowercase words
|
||||
exec tablizer -c name,status -r testtable.csv -s,
|
||||
stdout grafana.*Runn
|
||||
|
||||
# matching mixed case
|
||||
exec tablizer -c NAME,staTUS -r testtable.csv -s,
|
||||
stdout grafana.*Runn
|
||||
|
||||
# matching using numbers
|
||||
exec tablizer -c 1,3 -r testtable.csv -s,
|
||||
stdout grafana.*Runn
|
||||
|
||||
# matching using regex
|
||||
exec tablizer -c 'na.*,stat.' -r testtable.csv -s,
|
||||
stdout grafana.*Runn
|
||||
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.csv --
|
||||
NAME,READY,STATUS,RESTARTS,AGE
|
||||
alertmanager-kube-prometheus-alertmanager-0,2/2,Running,35 (45m ago),11d
|
||||
grafana-fcc54cbc9-bk7s8,1/1,Running,17 (45m ago),1d
|
||||
kube-prometheus-blackbox-exporter-5d85b5d8f4-tskh7,1/1,Running,17 (45m ago),1h44m
|
||||
kube-prometheus-kube-state-metrics-b4cd9487-75p7f,1/1,Running,20 (45m ago),45m
|
||||
kube-prometheus-node-exporter-bfzpl,1/1,Running,17 (45m ago),54s
|
||||
|
||||
21
t/test-filtering.txtar
Normal file
21
t/test-filtering.txtar
Normal file
@@ -0,0 +1,21 @@
|
||||
# filtering
|
||||
exec tablizer -r testtable.txt -F name=grafana
|
||||
stdout grafana.*Runn
|
||||
|
||||
# filtering two columns
|
||||
exec tablizer -r testtable.txt -F name=prometh -F age=1h
|
||||
stdout blackbox.*Runn
|
||||
|
||||
# filtering two same columns
|
||||
exec tablizer -r testtable.txt -F name=prometh -F name=alert
|
||||
stdout prometheus-alertmanager.*Runn
|
||||
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.txt --
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
alertmanager-kube-prometheus-alertmanager-0 2/2 Running 35 (45m ago) 11d
|
||||
grafana-fcc54cbc9-bk7s8 1/1 Running 17 (45m ago) 1d
|
||||
kube-prometheus-blackbox-exporter-5d85b5d8f4-tskh7 1/1 Running 17 (45m ago) 1h44m
|
||||
kube-prometheus-kube-state-metrics-b4cd9487-75p7f 1/1 Running 20 (45m ago) 45m
|
||||
kube-prometheus-node-exporter-bfzpl 1/1 Running 17 (45m ago) 54s
|
||||
25
t/test-headermatching.txtar
Normal file
25
t/test-headermatching.txtar
Normal file
@@ -0,0 +1,25 @@
|
||||
# reading from file and matching with lowercase words
|
||||
exec tablizer -c name,status -r testtable.txt
|
||||
stdout grafana.*Runn
|
||||
|
||||
# matching mixed case
|
||||
exec tablizer -c NAME,staTUS -r testtable.txt
|
||||
stdout grafana.*Runn
|
||||
|
||||
# matching using numbers
|
||||
exec tablizer -c 1,3 -r testtable.txt
|
||||
stdout grafana.*Runn
|
||||
|
||||
# matching using regex
|
||||
exec tablizer -c 'na.*,stat.' -r testtable.txt
|
||||
stdout grafana.*Runn
|
||||
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.txt --
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
alertmanager-kube-prometheus-alertmanager-0 2/2 Running 35 (45m ago) 11d
|
||||
grafana-fcc54cbc9-bk7s8 1/1 Running 17 (45m ago) 1d
|
||||
kube-prometheus-blackbox-exporter-5d85b5d8f4-tskh7 1/1 Running 17 (45m ago) 1h44m
|
||||
kube-prometheus-kube-state-metrics-b4cd9487-75p7f 1/1 Running 20 (45m ago) 45m
|
||||
kube-prometheus-node-exporter-bfzpl 1/1 Running 17 (45m ago) 54s
|
||||
46
t/test-multipatterns.txtar
Normal file
46
t/test-multipatterns.txtar
Normal file
@@ -0,0 +1,46 @@
|
||||
# filtering
|
||||
|
||||
# a AND b
|
||||
exec tablizer -r testtable.txt -H -cspecies invasive imperium
|
||||
stdout 'namak'
|
||||
! stdout human
|
||||
|
||||
# a AND !b
|
||||
exec tablizer -r testtable.txt -H -cspecies invasive '/imperium/!'
|
||||
stdout 'human'
|
||||
! stdout namak
|
||||
|
||||
# a AND !b AND c
|
||||
exec tablizer -r testtable.txt -H -cspecies peaceful '/imperium/!' planetary
|
||||
stdout 'kenaha'
|
||||
! stdout 'namak|heduu|riedl'
|
||||
|
||||
# case insensitive
|
||||
exec tablizer -r testtable.txt -H -cspecies '/REGIONAL/i'
|
||||
stdout namak
|
||||
! stdout 'human|riedl|heduu|kenaa'
|
||||
|
||||
# case insensitive negated
|
||||
exec tablizer -r testtable.txt -H -cspecies '/REGIONAL/!i'
|
||||
stdout 'human|riedl|heduu|kenaa'
|
||||
! stdout namak
|
||||
|
||||
# !a AND !b
|
||||
exec tablizer -r testtable.txt -H -cspecies '/galactic/!' '/planetary/!'
|
||||
stdout namak
|
||||
! stdout 'human|riedl|heduu|kenaa'
|
||||
|
||||
# same case insensitive
|
||||
exec tablizer -r testtable.txt -H -cspecies '/GALACTIC/i!' '/PLANETARY/!i'
|
||||
stdout namak
|
||||
! stdout 'human|riedl|heduu|kenaa'
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.txt --
|
||||
SPECIES TYPE HOME STAGE SPREAD
|
||||
human invasive earth brink planetary
|
||||
riedl peaceful keauna civilized pangalactic
|
||||
namak invasive namak imperium regional
|
||||
heduu peaceful iu imperium galactic
|
||||
kenaha peaceful kohi hunter-gatherer planetary
|
||||
|
||||
49
t/test-sort.txtar
Normal file
49
t/test-sort.txtar
Normal file
@@ -0,0 +1,49 @@
|
||||
# sort by name
|
||||
exec tablizer -r testtable.txt -k 1
|
||||
stdout '^alert.*\n^grafana.*\n^kube'
|
||||
|
||||
# sort by name reversed
|
||||
exec tablizer -r testtable.txt -k 1 -D
|
||||
stdout 'kube.*\n^grafana.*\n^alert'
|
||||
|
||||
# sort by starts numerically
|
||||
exec tablizer -r testtable.txt -k 4 -i -c4
|
||||
stdout '17\s*\n^20\s*\n^35'
|
||||
|
||||
# sort by starts numerically reversed
|
||||
exec tablizer -r testtable.txt -k 4 -i -c4 -D
|
||||
stdout '35\s*\n^20\s*\n^17'
|
||||
|
||||
# sort by age
|
||||
exec tablizer -r testtable.txt -k 5 -a
|
||||
stdout '45m\s*\n.*1h44m'
|
||||
|
||||
# sort by age reverse
|
||||
exec tablizer -r testtable.txt -k 5 -a -D
|
||||
stdout '1h44m\s*\n.*45m'
|
||||
|
||||
# sort by time
|
||||
exec tablizer -r timetable.txt -k 2 -t
|
||||
stdout '^sel.*\n^foo.*\nbar'
|
||||
|
||||
# sort by time reverse
|
||||
exec tablizer -r timetable.txt -k 2 -t -D
|
||||
stdout '^bar.*\n^foo.*\nsel'
|
||||
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.txt --
|
||||
NAME READY STATUS STARTS AGE
|
||||
alertmanager-kube-prometheus-alertmanager-0 2/2 Running 35 11d
|
||||
kube-prometheus-blackbox-exporter-5d85b5d8f4-tskh7 1/1 Running 17 1h44m
|
||||
grafana-fcc54cbc9-bk7s8 1/1 Running 17 1d
|
||||
kube-prometheus-kube-state-metrics-b4cd9487-75p7f 1/1 Running 20 45m
|
||||
kube-prometheus-node-exporter-bfzpl 1/1 Running 17 54s
|
||||
|
||||
|
||||
|
||||
-- timetable.txt --
|
||||
NAME TIME
|
||||
foo 2024-11-18T12:00:00+01:00
|
||||
bar 2024-11-18T12:45:00+01:00
|
||||
sel 2024-07-18T12:00:00+01:00
|
||||
18
t/test-stdin.txtar
Normal file
18
t/test-stdin.txtar
Normal file
@@ -0,0 +1,18 @@
|
||||
# reading from stdin and matching with lowercase words
|
||||
stdin testtable.txt
|
||||
exec tablizer -c name,status
|
||||
stdout grafana.*Runn
|
||||
|
||||
# reading from -r stdin and matching with lowercase words
|
||||
stdin testtable.txt
|
||||
exec tablizer -c name,status -r -
|
||||
stdout grafana.*Runn
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.txt --
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
alertmanager-kube-prometheus-alertmanager-0 2/2 Running 35 (45m ago) 11d
|
||||
grafana-fcc54cbc9-bk7s8 1/1 Running 17 (45m ago) 1d
|
||||
kube-prometheus-blackbox-exporter-5d85b5d8f4-tskh7 1/1 Running 17 (45m ago) 1h44m
|
||||
kube-prometheus-kube-state-metrics-b4cd9487-75p7f 1/1 Running 20 (45m ago) 45m
|
||||
kube-prometheus-node-exporter-bfzpl 1/1 Running 17 (45m ago) 54s
|
||||
21
t/test-transpose.txtar
Normal file
21
t/test-transpose.txtar
Normal file
@@ -0,0 +1,21 @@
|
||||
# transpose one field
|
||||
exec tablizer -r testtable.txt -T status -R '/Running/OK/'
|
||||
stdout grafana.*OK
|
||||
|
||||
# transpose two fields
|
||||
exec tablizer -r testtable.txt -T name,status -R '/alertmanager-//' -R '/Running/OK/'
|
||||
stdout prometheus-0.*OK
|
||||
|
||||
# transpose one field and show one column
|
||||
exec tablizer -r testtable.txt -T status -R '/Running/OK/' -c name
|
||||
! stdout grafana.*OK
|
||||
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.txt --
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
alertmanager-kube-prometheus-alertmanager-0 2/2 Running 35 (45m ago) 11d
|
||||
grafana-fcc54cbc9-bk7s8 1/1 Running 17 (45m ago) 1d
|
||||
kube-prometheus-blackbox-exporter-5d85b5d8f4-tskh7 1/1 Running 17 (45m ago) 1h44m
|
||||
kube-prometheus-kube-state-metrics-b4cd9487-75p7f 1/1 Running 20 (45m ago) 45m
|
||||
kube-prometheus-node-exporter-bfzpl 1/1 Running 17 (45m ago) 54s
|
||||
45
t/test.sh
45
t/test.sh
@@ -1,45 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# simple commandline unit test script
|
||||
|
||||
t="../tablizer"
|
||||
fail=0
|
||||
|
||||
ex() {
|
||||
# execute a test, report+exit on error, stay silent otherwise
|
||||
log="/tmp/test-tablizer.$$.log"
|
||||
name=$1
|
||||
shift
|
||||
|
||||
echo -n "TEST $name "
|
||||
|
||||
$* > $log 2>&1
|
||||
|
||||
if test $? -ne 0; then
|
||||
echo "failed, see $log"
|
||||
fail=1
|
||||
else
|
||||
echo "ok"
|
||||
rm -f $log
|
||||
fi
|
||||
}
|
||||
|
||||
# only use files in test dir
|
||||
cd $(dirname $0)
|
||||
|
||||
echo "Executing commandline tests ..."
|
||||
|
||||
# io pattern tests
|
||||
ex io-pattern-and-file $t bk7 testtable
|
||||
cat testtable | ex io-pattern-and-stdin $t bk7
|
||||
cat testtable | ex io-pattern-and-stdin-dash $t bk7 -
|
||||
|
||||
# same w/o pattern
|
||||
ex io-just-file $t testtable
|
||||
cat testtable | ex io-just-stdin $t
|
||||
cat testtable | ex io-just-stdin-dash $t -
|
||||
|
||||
if test $fail -ne 0; then
|
||||
echo "!!! Some tests failed !!!"
|
||||
exit 1
|
||||
fi
|
||||
6
t/testtable.csv
Normal file
6
t/testtable.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
NAME,DURATION
|
||||
x,10
|
||||
a,100
|
||||
z,0
|
||||
u,4
|
||||
k,6
|
||||
|
6
t/testtable3
Normal file
6
t/testtable3
Normal file
@@ -0,0 +1,6 @@
|
||||
NAME READY STATUS STARTS AGE
|
||||
alertmanager-kube-prometheus-alertmanager-0 2/2 Running 35 11d
|
||||
kube-prometheus-blackbox-exporter-5d85b5d8f4-tskh7 1/1 Running 17 1h44m
|
||||
grafana-fcc54cbc9-bk7s8 1/1 Running 17 1d
|
||||
kube-prometheus-kube-state-metrics-b4cd9487-75p7f 1/1 Running 20 45m
|
||||
kube-prometheus-node-exporter-bfzpl 1/1 Running 17 54s
|
||||
4
t/testtable4
Normal file
4
t/testtable4
Normal file
@@ -0,0 +1,4 @@
|
||||
ONE TWO
|
||||
1 4
|
||||
3 1
|
||||
5 2
|
||||
6
t/testtable5
Normal file
6
t/testtable5
Normal file
@@ -0,0 +1,6 @@
|
||||
SPECIES TYPE HOME STAGE
|
||||
human invasive earth brink
|
||||
riedl peaceful keauna civilized
|
||||
namak invasive namak imperium
|
||||
heduu peaceful iu imperium
|
||||
kenaha peaceful kohi hunter-gatherer
|
||||
299
tablizer.1
299
tablizer.1
@@ -133,7 +133,7 @@
|
||||
.\" ========================================================================
|
||||
.\"
|
||||
.IX Title "TABLIZER 1"
|
||||
.TH TABLIZER 1 "2024-05-07" "1" "User Commands"
|
||||
.TH TABLIZER 1 "2025-10-10" "1" "User Commands"
|
||||
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
|
||||
.\" way too many mistakes in technical documents.
|
||||
.if n .ad l
|
||||
@@ -144,42 +144,52 @@ tablizer \- Manipulate tabular output of other programs
|
||||
.IX Header "SYNOPSIS"
|
||||
.Vb 2
|
||||
\& Usage:
|
||||
\& tablizer [regex] [file, ...] [flags]
|
||||
\& tablizer [regex,...] [\-r file] [flags]
|
||||
\&
|
||||
\& Operational Flags:
|
||||
\& \-c, \-\-columns string Only show the speficied columns (separated by ,)
|
||||
\& \-v, \-\-invert\-match select non\-matching rows
|
||||
\& \-n, \-\-no\-numbering Disable header numbering
|
||||
\& \-N, \-\-no\-color Disable pattern highlighting
|
||||
\& \-H, \-\-no\-headers Disable headers display
|
||||
\& \-s, \-\-separator string Custom field separator
|
||||
\& \-k, \-\-sort\-by int Sort by column (default: 1)
|
||||
\& \-z, \-\-fuzzy Use fuzzy search [experimental]
|
||||
\& \-F, \-\-filter field=reg Filter given field with regex, can be used multiple times
|
||||
\& \-c, \-\-columns string Only show the speficied columns (separated by ,)
|
||||
\& \-v, \-\-invert\-match select non\-matching rows
|
||||
\& \-n, \-\-numbering Enable header numbering
|
||||
\& \-N, \-\-no\-color Disable pattern highlighting
|
||||
\& \-H, \-\-no\-headers Disable headers display
|
||||
\& \-s, \-\-separator <string> Custom field separator (maybe char, string or :class:)
|
||||
\& \-k, \-\-sort\-by <int|name> Sort by column (default: 1)
|
||||
\& \-z, \-\-fuzzy Use fuzzy search [experimental]
|
||||
\& \-F, \-\-filter <field[!]=reg> Filter given field with regex, can be used multiple times
|
||||
\& \-T, \-\-transpose\-columns string Transpose the speficied columns (separated by ,)
|
||||
\& \-R, \-\-regex\-transposer </from/to/> Apply /search/replace/ regexp to fields given in \-T
|
||||
\& \-j, \-\-json Read JSON input (must be array of hashes)
|
||||
\& \-I, \-\-interactive Interactively filter and select rows
|
||||
\& \-\-auto\-headers Generate headers if there are none present in input
|
||||
\& \-\-custom\-headers a,b,... Use custom headers, separated by comma
|
||||
\&
|
||||
\& Output Flags (mutually exclusive):
|
||||
\& \-X, \-\-extended Enable extended output
|
||||
\& \-M, \-\-markdown Enable markdown table output
|
||||
\& \-O, \-\-orgtbl Enable org\-mode table output
|
||||
\& \-S, \-\-shell Enable shell evaluable output
|
||||
\& \-Y, \-\-yaml Enable yaml output
|
||||
\& \-C, \-\-csv Enable CSV output
|
||||
\& \-A, \-\-ascii Default output mode, ascii tabular
|
||||
\& \-L, \-\-hightlight\-lines Use alternating background colors for tables
|
||||
\& \-X, \-\-extended Enable extended output
|
||||
\& \-M, \-\-markdown Enable markdown table output
|
||||
\& \-O, \-\-orgtbl Enable org\-mode table output
|
||||
\& \-S, \-\-shell Enable shell evaluable output
|
||||
\& \-Y, \-\-yaml Enable yaml output
|
||||
\& \-C, \-\-csv Enable CSV output
|
||||
\& \-A, \-\-ascii Default output mode, ascii tabular
|
||||
\& \-L, \-\-hightlight\-lines Use alternating background colors for tables
|
||||
\& \-y, \-\-yank\-columns Yank specified columns (separated by ,) to clipboard,
|
||||
\& space separated
|
||||
\& \-\-ofs <char> Output field separator, used by \-A and \-C.
|
||||
\&
|
||||
\& Sort Mode Flags (mutually exclusive):
|
||||
\& \-a, \-\-sort\-age sort according to age (duration) string
|
||||
\& \-D, \-\-sort\-desc Sort in descending order (default: ascending)
|
||||
\& \-i, \-\-sort\-numeric sort according to string numerical value
|
||||
\& \-t, \-\-sort\-time sort according to time string
|
||||
\& \-a, \-\-sort\-age sort according to age (duration) string
|
||||
\& \-D, \-\-sort\-desc Sort in descending order (default: ascending)
|
||||
\& \-i, \-\-sort\-numeric sort according to string numerical value
|
||||
\& \-t, \-\-sort\-time sort according to time string
|
||||
\&
|
||||
\& Other Flags:
|
||||
\& \-\-completion <shell> Generate the autocompletion script for <shell>
|
||||
\& \-f, \-\-config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
\& \-d, \-\-debug Enable debugging
|
||||
\& \-h, \-\-help help for tablizer
|
||||
\& \-m, \-\-man Display manual page
|
||||
\& \-V, \-\-version Print program version
|
||||
\& \-r \-\-read\-file <file> Use <file> as input instead of STDIN
|
||||
\& \-\-completion <shell> Generate the autocompletion script for <shell>
|
||||
\& \-f, \-\-config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
\& \-d, \-\-debug Enable debugging
|
||||
\& \-h, \-\-help help for tablizer
|
||||
\& \-m, \-\-man Display manual page
|
||||
\& \-V, \-\-version Print program version
|
||||
.Ve
|
||||
.SH "DESCRIPTION"
|
||||
.IX Header "DESCRIPTION"
|
||||
@@ -211,17 +221,17 @@ pattern. Hence:
|
||||
\& kubectl get pods | tablizer
|
||||
\&
|
||||
\& # read a file
|
||||
\& tablizer filename
|
||||
\& tablizer \-r filename
|
||||
\&
|
||||
\& # search for pattern in a file (works like grep)
|
||||
\& tablizer regex filename
|
||||
\& tablizer regex \-r filename
|
||||
\&
|
||||
\& # search for pattern in STDIN
|
||||
\& kubectl get pods | tablizer regex
|
||||
.Ve
|
||||
.PP
|
||||
The output looks like the original one but every header field will
|
||||
have a numer associated with it, e.g.:
|
||||
The output looks like the original one. You can add the option \fB\-n\fR,
|
||||
then every header field will have a numer associated with it, e.g.:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& NAME(1) READY(2) STATUS(3) RESTARTS(4) AGE(5)
|
||||
@@ -237,7 +247,18 @@ columns you want to have in your output (see \s-1COLUMNS\s0:
|
||||
You can specify the numbers in any order but output will always follow
|
||||
the original order.
|
||||
.PP
|
||||
The numbering can be suppressed by using the \fB\-n\fR option.
|
||||
However, you may also just use the header names instead of numbers,
|
||||
eg:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& kubectl get pods | tablizer \-cname,status
|
||||
.Ve
|
||||
.PP
|
||||
You can also use regular expressions with \fB\-c\fR, eg:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& kubectl get pods | tablizer \-c \*(Aq[ae]\*(Aq
|
||||
.Ve
|
||||
.PP
|
||||
By default tablizer shows a header containing the names of each
|
||||
column. This can be disabled using the \fB\-H\fR option. Be aware that
|
||||
@@ -248,11 +269,20 @@ By default, if a \fBpattern\fR has been speficied, matches will be
|
||||
highlighted. You can disable this behavior with the \fB\-N\fR option.
|
||||
.PP
|
||||
Use the \fB\-k\fR option to specify by which column to sort the tabular
|
||||
data (as in \s-1GNU\s0 \fBsort\fR\|(1)). The default sort column is the first one. To
|
||||
disable sorting at all, supply 0 (Zero) to \-k. The default sort order
|
||||
is ascending. You can change this to descending order using the option
|
||||
\&\fB\-D\fR. The default sort order is by string, but there are other sort
|
||||
modes:
|
||||
data (as in \s-1GNU\s0 \fBsort\fR\|(1)). The default sort column is the first
|
||||
one. You can specify column numbers or names. Column numbers start
|
||||
with 1, names are case insensitive. You can specify multiple columns
|
||||
separated by comma to sort, but the type must be the same. For example
|
||||
if you want to sort numerically, all columns must be numbers. If you
|
||||
use column numbers, then be aware, that these are the numbers before
|
||||
column extraction. For example if you have a table with 4 columns and
|
||||
specify \f(CW\*(C`\-c4\*(C'\fR, then only 1 column (the fourth) will be printed,
|
||||
however if you want to sort by this column, you'll have to specify
|
||||
\&\f(CW\*(C`\-k4\*(C'\fR.
|
||||
.PP
|
||||
The default sort order is ascending. You can change this to
|
||||
descending order using the option \fB\-D\fR. The default sort order is by
|
||||
alphanumeric string, but there are other sort modes:
|
||||
.IP "\fB\-a \-\-sort\-age\fR" 4
|
||||
.IX Item "-a --sort-age"
|
||||
Sorts duration strings like \*(L"1d4h32m51s\*(R".
|
||||
@@ -265,40 +295,110 @@ Sorts timestamps.
|
||||
.PP
|
||||
Finally the \fB\-d\fR option enables debugging output which is mostly
|
||||
useful for the developer.
|
||||
.SS "\s-1SEPARATOR\s0"
|
||||
.IX Subsection "SEPARATOR"
|
||||
The option \fB\-s\fR can be a single character, in which case the \s-1CSV\s0
|
||||
parser will be invoked. You can also specify a string as
|
||||
separator. The string will be interpreted as literal string unless it
|
||||
is a valid go regular expression. For example:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& \-s \*(Aq\et{2,}\e\*(Aq
|
||||
.Ve
|
||||
.PP
|
||||
is being used as a regexp and will match two or more consecutive tabs.
|
||||
.PP
|
||||
.Vb 1
|
||||
\& \-s \*(Aqfoo\*(Aq
|
||||
.Ve
|
||||
.PP
|
||||
on the other hand is no regular expression and will be used literally.
|
||||
.PP
|
||||
To make live easier, there are a couple of predefined regular
|
||||
expressions, which you can specify as classes:
|
||||
.Sp
|
||||
.RS 4
|
||||
* :tab:
|
||||
.Sp
|
||||
Matches a tab and eats spaces around it.
|
||||
.Sp
|
||||
* :spaces:
|
||||
.Sp
|
||||
Matches 2 or more spaces.
|
||||
.Sp
|
||||
* :pipe:
|
||||
.Sp
|
||||
Matches a pipe character and eats spaces around it.
|
||||
.Sp
|
||||
* :default:
|
||||
.Sp
|
||||
Matches 2 or more spaces or tab. This is the default separator if none
|
||||
is specified.
|
||||
.Sp
|
||||
* :nonword:
|
||||
.Sp
|
||||
Matches a non-word character.
|
||||
.Sp
|
||||
* :nondigit:
|
||||
.Sp
|
||||
Matches a non-digit character.
|
||||
.Sp
|
||||
* :special:
|
||||
.Sp
|
||||
Matches one or more special chars like brackets, dollar sign, slashes etc.
|
||||
.Sp
|
||||
* :nonprint:
|
||||
.Sp
|
||||
Matches one or more non-printable characters.
|
||||
.RE
|
||||
.SS "\s-1PATTERNS AND FILTERING\s0"
|
||||
.IX Subsection "PATTERNS AND FILTERING"
|
||||
You can reduce the rows being displayed by using a regular expression
|
||||
pattern. The regexp is \s-1PCRE\s0 compatible, refer to the syntax cheat
|
||||
sheet here: <https://github.com/google/re2/wiki/Syntax>. If you want
|
||||
to read a more comprehensive documentation about the topic and have
|
||||
perl installed you can read it with:
|
||||
You can reduce the rows being displayed by using one or more regular
|
||||
expression patterns. The regexp language being used is the one of
|
||||
\&\s-1GOLANG,\s0 refer to the syntax cheat sheet here:
|
||||
<https://pkg.go.dev/regexp/syntax>.
|
||||
.PP
|
||||
If you want to read a more comprehensive documentation about the
|
||||
topic and have perl installed you can read it with:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& perldoc perlre
|
||||
.Ve
|
||||
.PP
|
||||
Or read it online: <https://perldoc.perl.org/perlre>.
|
||||
Or read it online: <https://perldoc.perl.org/perlre>. But please note
|
||||
that the \s-1GO\s0 regexp engine does \s-1NOT\s0 support all perl regex terms,
|
||||
especially look-ahead and look-behind.
|
||||
.PP
|
||||
A note on modifiers: the regexp engine used in tablizer uses another
|
||||
modifier syntax:
|
||||
If you want to supply flags to a regex, then surround it with slashes
|
||||
and append the flag. The following flags are supported:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& (?MODIFIER)
|
||||
.Vb 2
|
||||
\& i => case insensitive
|
||||
\& ! => negative match
|
||||
.Ve
|
||||
.PP
|
||||
The most important modifiers are:
|
||||
.PP
|
||||
\&\f(CW\*(C`i\*(C'\fR ignore case
|
||||
\&\f(CW\*(C`m\*(C'\fR multiline mode
|
||||
\&\f(CW\*(C`s\*(C'\fR single line mode
|
||||
.PP
|
||||
Example for a case insensitive search:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& kubectl get pods \-A | tablizer "(?i)account"
|
||||
\& kubectl get pods \-A | tablizer "/account/i"
|
||||
.Ve
|
||||
.PP
|
||||
You can use the experimental fuzzy search feature by providing the
|
||||
If you use the \f(CW\*(C`!\*(C'\fR flag, then the regex match will be negated, that
|
||||
is, if a line in the input matches the given regex, but \f(CW\*(C`!\*(C'\fR is
|
||||
supplied, tablizer will \s-1NOT\s0 include it in the output.
|
||||
.PP
|
||||
For example, here we want to get all lines matching \*(L"foo\*(R" but not
|
||||
\&\*(L"bar\*(R":
|
||||
.PP
|
||||
.Vb 1
|
||||
\& cat table | tablizer foo \*(Aq/bar/!\*(Aq
|
||||
.Ve
|
||||
.PP
|
||||
This would match a line \*(L"foo zorro\*(R" but not \*(L"foo bar\*(R".
|
||||
.PP
|
||||
The flags can also be combined.
|
||||
.PP
|
||||
You can also use the experimental fuzzy search feature by providing the
|
||||
option \fB\-z\fR, in which case the pattern is regarded as a fuzzy search
|
||||
term, not a regexp.
|
||||
.PP
|
||||
@@ -315,7 +415,27 @@ Fieldnames (== columns headers) are case insensitive.
|
||||
If you specify more than one filter, both filters have to match (\s-1AND\s0
|
||||
operation).
|
||||
.PP
|
||||
These field filters can also be negated:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& fieldname!=regexp
|
||||
.Ve
|
||||
.PP
|
||||
If the option \fB\-v\fR is specified, the filtering is inverted.
|
||||
.SS "\s-1INTERACTIVE FILTERING\s0"
|
||||
.IX Subsection "INTERACTIVE FILTERING"
|
||||
You can also use the interactive mode, enabled with \f(CW\*(C`\-I\*(C'\fR to filter
|
||||
and select rows. This mode is complementary, that is, other filter
|
||||
options are still being respected.
|
||||
.PP
|
||||
To enter e filter, hit \f(CW\*(C`/\*(C'\fR, enter a filter string and finish with
|
||||
\&\f(CW\*(C`ENTER\*(C'\fR. Use \f(CW\*(C`SPACE\*(C'\fR to select/deselect rows, use \f(CW\*(C`a\*(C'\fR to select all
|
||||
(visible) rows.
|
||||
.PP
|
||||
Commit your selection with \f(CW\*(C`q\*(C'\fR. The selected rows are being fed to
|
||||
the requested output mode as usual. Abort with \f(CW\*(C`CTRL\-c\*(C'\fR, in which
|
||||
case the results of the interactive mode are being ignored and all
|
||||
rows are being fed to output.
|
||||
.SS "\s-1COLUMNS\s0"
|
||||
.IX Subsection "COLUMNS"
|
||||
The parameter \fB\-c\fR can be used to specify, which columns to
|
||||
@@ -348,6 +468,50 @@ We want to see only the \s-1CMD\s0 column and use a regex for this:
|
||||
.Ve
|
||||
.PP
|
||||
where \*(L"C\*(R" is our regexp which matches \s-1CMD.\s0
|
||||
.PP
|
||||
If a column specifier doesn't look like a regular expression, matching
|
||||
against header fields will be case insensitive. So, if you have a
|
||||
field with the name \f(CW\*(C`ID\*(C'\fR then these will all match: \f(CW\*(C`\-c id\*(C'\fR, \f(CW\*(C`\-c
|
||||
Id\*(C'\fR. The same rule applies to the options \f(CW\*(C`\-T\*(C'\fR and \f(CW\*(C`\-F\*(C'\fR.
|
||||
.SS "\s-1TRANSPOSE FIELDS USING REGEXPS\s0"
|
||||
.IX Subsection "TRANSPOSE FIELDS USING REGEXPS"
|
||||
You can manipulate field contents using regular expressions. You have
|
||||
to tell tablizer which field[s] to operate on using the option \f(CW\*(C`\-T\*(C'\fR
|
||||
and the search/replace pattern using \f(CW\*(C`\-R\*(C'\fR. The number of columns and
|
||||
patterns must match.
|
||||
.PP
|
||||
A search/replace pattern consists of the following elements:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& /search\-regexp/replace\-string/
|
||||
.Ve
|
||||
.PP
|
||||
The separator can be any valid character. Especially if you want to
|
||||
use a regexp containing the \f(CW\*(C`/\*(C'\fR character, eg:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& |search\-regexp|replace\-string|
|
||||
.Ve
|
||||
.PP
|
||||
Example:
|
||||
.PP
|
||||
.Vb 7
|
||||
\& cat t/testtable2
|
||||
\& NAME DURATION
|
||||
\& x 10
|
||||
\& a 100
|
||||
\& z 0
|
||||
\& u 4
|
||||
\& k 6
|
||||
\&
|
||||
\& cat t/testtable2 | tablizer \-T2 \-R \*(Aq/^\ed/4/\*(Aq \-n
|
||||
\& NAME DURATION
|
||||
\& x 40
|
||||
\& a 400
|
||||
\& z 4
|
||||
\& u 4
|
||||
\& k 4
|
||||
.Ve
|
||||
.SS "\s-1OUTPUT MODES\s0"
|
||||
.IX Subsection "OUTPUT MODES"
|
||||
There might be cases when the tabular output of a program is way too
|
||||
@@ -387,13 +551,27 @@ more output modes available: \fBorgtbl\fR which prints an Emacs org-mode
|
||||
table and \fBmarkdown\fR which prints a Markdown table, \fByaml\fR, which
|
||||
prints yaml encoding and \s-1CSV\s0 mode, which prints a comma separated
|
||||
value file.
|
||||
.SS "\s-1PUT FIELDS TO CLIPBOARD\s0"
|
||||
.IX Subsection "PUT FIELDS TO CLIPBOARD"
|
||||
You can let tablizer put fields to the clipboard using the option
|
||||
\&\f(CW\*(C`\-y\*(C'\fR. This best fits the use-case when the result of your filtering
|
||||
yields just one row. For example:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& cloudctl cluster ls | tablizer \-yid matchbox
|
||||
.Ve
|
||||
.PP
|
||||
If \*(L"matchbox\*(R" matches one cluster, you can immediately use the id of
|
||||
that cluster somewhere else and paste it. Of course, if there are
|
||||
multiple matches, then all id's will be put into the clipboard
|
||||
separated by one space.
|
||||
.SS "\s-1ENVIRONMENT VARIABLES\s0"
|
||||
.IX Subsection "ENVIRONMENT VARIABLES"
|
||||
\&\fBtablizer\fR supports certain environment variables which use can use
|
||||
to influence program behavior. Commandline flags have always
|
||||
precedence over environment variables.
|
||||
.IP "<T_NO_HEADER_NUMBERING> \- disable numbering of header fields, like \fB\-n\fR." 4
|
||||
.IX Item "<T_NO_HEADER_NUMBERING> - disable numbering of header fields, like -n."
|
||||
.IP "<T_HEADER_NUMBERING> \- enable numbering of header fields, like \fB\-n\fR." 4
|
||||
.IX Item "<T_HEADER_NUMBERING> - enable numbering of header fields, like -n."
|
||||
.PD 0
|
||||
.IP "<T_COLUMNS> \- comma separated list of columns to output, like \fB\-c\fR" 4
|
||||
.IX Item "<T_COLUMNS> - comma separated list of columns to output, like -c"
|
||||
@@ -524,6 +702,9 @@ Released under the \s-1MIT\s0 License, Copyright (c) 201 by Oleku Konko
|
||||
.IP "yaml (gopkg.in/yaml.v3)" 4
|
||||
.IX Item "yaml (gopkg.in/yaml.v3)"
|
||||
Released under the \s-1MIT\s0 License, Copyright (c) 2006\-2011 Kirill Simonov
|
||||
.IP "bubble-table (https://github.com/Evertras/bubble\-table)" 4
|
||||
.IX Item "bubble-table (https://github.com/Evertras/bubble-table)"
|
||||
Released under the \s-1MIT\s0 License, Copyright (c) 2022 Brandon Fulljames
|
||||
.SH "AUTHORS"
|
||||
.IX Header "AUTHORS"
|
||||
Thomas von Dein \fBtom \s-1AT\s0 vondein \s-1DOT\s0 org\fR
|
||||
|
||||
283
tablizer.pod
283
tablizer.pod
@@ -5,42 +5,52 @@ tablizer - Manipulate tabular output of other programs
|
||||
=head1 SYNOPSIS
|
||||
|
||||
Usage:
|
||||
tablizer [regex] [file, ...] [flags]
|
||||
tablizer [regex,...] [-r file] [flags]
|
||||
|
||||
Operational Flags:
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
-v, --invert-match select non-matching rows
|
||||
-n, --no-numbering Disable header numbering
|
||||
-N, --no-color Disable pattern highlighting
|
||||
-H, --no-headers Disable headers display
|
||||
-s, --separator string Custom field separator
|
||||
-k, --sort-by int Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
-v, --invert-match select non-matching rows
|
||||
-n, --numbering Enable header numbering
|
||||
-N, --no-color Disable pattern highlighting
|
||||
-H, --no-headers Disable headers display
|
||||
-s, --separator <string> Custom field separator (maybe char, string or :class:)
|
||||
-k, --sort-by <int|name> Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter <field[!]=reg> Filter given field with regex, can be used multiple times
|
||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||
-R, --regex-transposer </from/to/> Apply /search/replace/ regexp to fields given in -T
|
||||
-j, --json Read JSON input (must be array of hashes)
|
||||
-I, --interactive Interactively filter and select rows
|
||||
--auto-headers Generate headers if there are none present in input
|
||||
--custom-headers a,b,... Use custom headers, separated by comma
|
||||
|
||||
Output Flags (mutually exclusive):
|
||||
-X, --extended Enable extended output
|
||||
-M, --markdown Enable markdown table output
|
||||
-O, --orgtbl Enable org-mode table output
|
||||
-S, --shell Enable shell evaluable output
|
||||
-Y, --yaml Enable yaml output
|
||||
-C, --csv Enable CSV output
|
||||
-A, --ascii Default output mode, ascii tabular
|
||||
-L, --hightlight-lines Use alternating background colors for tables
|
||||
-X, --extended Enable extended output
|
||||
-M, --markdown Enable markdown table output
|
||||
-O, --orgtbl Enable org-mode table output
|
||||
-S, --shell Enable shell evaluable output
|
||||
-Y, --yaml Enable yaml output
|
||||
-C, --csv Enable CSV output
|
||||
-A, --ascii Default output mode, ascii tabular
|
||||
-L, --hightlight-lines Use alternating background colors for tables
|
||||
-y, --yank-columns Yank specified columns (separated by ,) to clipboard,
|
||||
space separated
|
||||
--ofs <char> Output field separator, used by -A and -C.
|
||||
|
||||
Sort Mode Flags (mutually exclusive):
|
||||
-a, --sort-age sort according to age (duration) string
|
||||
-D, --sort-desc Sort in descending order (default: ascending)
|
||||
-i, --sort-numeric sort according to string numerical value
|
||||
-t, --sort-time sort according to time string
|
||||
-a, --sort-age sort according to age (duration) string
|
||||
-D, --sort-desc Sort in descending order (default: ascending)
|
||||
-i, --sort-numeric sort according to string numerical value
|
||||
-t, --sort-time sort according to time string
|
||||
|
||||
Other Flags:
|
||||
--completion <shell> Generate the autocompletion script for <shell>
|
||||
-f, --config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
-d, --debug Enable debugging
|
||||
-h, --help help for tablizer
|
||||
-m, --man Display manual page
|
||||
-V, --version Print program version
|
||||
-r --read-file <file> Use <file> as input instead of STDIN
|
||||
--completion <shell> Generate the autocompletion script for <shell>
|
||||
-f, --config <file> Configuration file (default: ~/.config/tablizer/config)
|
||||
-d, --debug Enable debugging
|
||||
-h, --help help for tablizer
|
||||
-m, --man Display manual page
|
||||
-V, --version Print program version
|
||||
|
||||
|
||||
=head1 DESCRIPTION
|
||||
@@ -72,16 +82,16 @@ pattern. Hence:
|
||||
kubectl get pods | tablizer
|
||||
|
||||
# read a file
|
||||
tablizer filename
|
||||
tablizer -r filename
|
||||
|
||||
# search for pattern in a file (works like grep)
|
||||
tablizer regex filename
|
||||
tablizer regex -r filename
|
||||
|
||||
# search for pattern in STDIN
|
||||
kubectl get pods | tablizer regex
|
||||
|
||||
The output looks like the original one but every header field will
|
||||
have a numer associated with it, e.g.:
|
||||
The output looks like the original one. You can add the option B<-n>,
|
||||
then every header field will have a numer associated with it, e.g.:
|
||||
|
||||
NAME(1) READY(2) STATUS(3) RESTARTS(4) AGE(5)
|
||||
|
||||
@@ -93,7 +103,14 @@ columns you want to have in your output (see L<COLUMNS>:
|
||||
You can specify the numbers in any order but output will always follow
|
||||
the original order.
|
||||
|
||||
The numbering can be suppressed by using the B<-n> option.
|
||||
However, you may also just use the header names instead of numbers,
|
||||
eg:
|
||||
|
||||
kubectl get pods | tablizer -cname,status
|
||||
|
||||
You can also use regular expressions with B<-c>, eg:
|
||||
|
||||
kubectl get pods | tablizer -c '[ae]'
|
||||
|
||||
By default tablizer shows a header containing the names of each
|
||||
column. This can be disabled using the B<-H> option. Be aware that
|
||||
@@ -104,11 +121,20 @@ By default, if a B<pattern> has been speficied, matches will be
|
||||
highlighted. You can disable this behavior with the B<-N> option.
|
||||
|
||||
Use the B<-k> option to specify by which column to sort the tabular
|
||||
data (as in GNU sort(1)). The default sort column is the first one. To
|
||||
disable sorting at all, supply 0 (Zero) to -k. The default sort order
|
||||
is ascending. You can change this to descending order using the option
|
||||
B<-D>. The default sort order is by string, but there are other sort
|
||||
modes:
|
||||
data (as in GNU sort(1)). The default sort column is the first
|
||||
one. You can specify column numbers or names. Column numbers start
|
||||
with 1, names are case insensitive. You can specify multiple columns
|
||||
separated by comma to sort, but the type must be the same. For example
|
||||
if you want to sort numerically, all columns must be numbers. If you
|
||||
use column numbers, then be aware, that these are the numbers before
|
||||
column extraction. For example if you have a table with 4 columns and
|
||||
specify C<-c4>, then only 1 column (the fourth) will be printed,
|
||||
however if you want to sort by this column, you'll have to specify
|
||||
C<-k4>.
|
||||
|
||||
The default sort order is ascending. You can change this to
|
||||
descending order using the option B<-D>. The default sort order is by
|
||||
alphanumeric string, but there are other sort modes:
|
||||
|
||||
=over
|
||||
|
||||
@@ -129,34 +155,102 @@ Sorts timestamps.
|
||||
Finally the B<-d> option enables debugging output which is mostly
|
||||
useful for the developer.
|
||||
|
||||
=head2 SEPARATOR
|
||||
|
||||
The option B<-s> can be a single character, in which case the CSV
|
||||
parser will be invoked. You can also specify a string as
|
||||
separator. The string will be interpreted as literal string unless it
|
||||
is a valid go regular expression. For example:
|
||||
|
||||
-s '\t{2,}\'
|
||||
|
||||
is being used as a regexp and will match two or more consecutive tabs.
|
||||
|
||||
-s 'foo'
|
||||
|
||||
on the other hand is no regular expression and will be used literally.
|
||||
|
||||
To make live easier, there are a couple of predefined regular
|
||||
expressions, which you can specify as classes:
|
||||
|
||||
=over
|
||||
|
||||
* :tab:
|
||||
|
||||
Matches a tab and eats spaces around it.
|
||||
|
||||
* :spaces:
|
||||
|
||||
Matches 2 or more spaces.
|
||||
|
||||
* :pipe:
|
||||
|
||||
Matches a pipe character and eats spaces around it.
|
||||
|
||||
* :default:
|
||||
|
||||
Matches 2 or more spaces or tab. This is the default separator if none
|
||||
is specified.
|
||||
|
||||
* :nonword:
|
||||
|
||||
Matches a non-word character.
|
||||
|
||||
* :nondigit:
|
||||
|
||||
Matches a non-digit character.
|
||||
|
||||
* :special:
|
||||
|
||||
Matches one or more special chars like brackets, dollar sign, slashes etc.
|
||||
|
||||
* :nonprint:
|
||||
|
||||
Matches one or more non-printable characters.
|
||||
|
||||
|
||||
=back
|
||||
|
||||
=head2 PATTERNS AND FILTERING
|
||||
|
||||
You can reduce the rows being displayed by using a regular expression
|
||||
pattern. The regexp is PCRE compatible, refer to the syntax cheat
|
||||
sheet here: L<https://github.com/google/re2/wiki/Syntax>. If you want
|
||||
to read a more comprehensive documentation about the topic and have
|
||||
perl installed you can read it with:
|
||||
You can reduce the rows being displayed by using one or more regular
|
||||
expression patterns. The regexp language being used is the one of
|
||||
GOLANG, refer to the syntax cheat sheet here:
|
||||
L<https://pkg.go.dev/regexp/syntax>.
|
||||
|
||||
If you want to read a more comprehensive documentation about the
|
||||
topic and have perl installed you can read it with:
|
||||
|
||||
perldoc perlre
|
||||
|
||||
Or read it online: L<https://perldoc.perl.org/perlre>.
|
||||
Or read it online: L<https://perldoc.perl.org/perlre>. But please note
|
||||
that the GO regexp engine does NOT support all perl regex terms,
|
||||
especially look-ahead and look-behind.
|
||||
|
||||
A note on modifiers: the regexp engine used in tablizer uses another
|
||||
modifier syntax:
|
||||
If you want to supply flags to a regex, then surround it with slashes
|
||||
and append the flag. The following flags are supported:
|
||||
|
||||
(?MODIFIER)
|
||||
|
||||
The most important modifiers are:
|
||||
|
||||
C<i> ignore case
|
||||
C<m> multiline mode
|
||||
C<s> single line mode
|
||||
i => case insensitive
|
||||
! => negative match
|
||||
|
||||
Example for a case insensitive search:
|
||||
|
||||
kubectl get pods -A | tablizer "(?i)account"
|
||||
kubectl get pods -A | tablizer "/account/i"
|
||||
|
||||
You can use the experimental fuzzy search feature by providing the
|
||||
If you use the C<!> flag, then the regex match will be negated, that
|
||||
is, if a line in the input matches the given regex, but C<!> is
|
||||
supplied, tablizer will NOT include it in the output.
|
||||
|
||||
For example, here we want to get all lines matching "foo" but not
|
||||
"bar":
|
||||
|
||||
cat table | tablizer foo '/bar/!'
|
||||
|
||||
This would match a line "foo zorro" but not "foo bar".
|
||||
|
||||
The flags can also be combined.
|
||||
|
||||
You can also use the experimental fuzzy search feature by providing the
|
||||
option B<-z>, in which case the pattern is regarded as a fuzzy search
|
||||
term, not a regexp.
|
||||
|
||||
@@ -171,8 +265,26 @@ Fieldnames (== columns headers) are case insensitive.
|
||||
If you specify more than one filter, both filters have to match (AND
|
||||
operation).
|
||||
|
||||
These field filters can also be negated:
|
||||
|
||||
fieldname!=regexp
|
||||
|
||||
If the option B<-v> is specified, the filtering is inverted.
|
||||
|
||||
=head2 INTERACTIVE FILTERING
|
||||
|
||||
You can also use the interactive mode, enabled with C<-I> to filter
|
||||
and select rows. This mode is complementary, that is, other filter
|
||||
options are still being respected.
|
||||
|
||||
To enter e filter, hit C</>, enter a filter string and finish with
|
||||
C<ENTER>. Use C<SPACE> to select/deselect rows, use C<a> to select all
|
||||
(visible) rows.
|
||||
|
||||
Commit your selection with C<q>. The selected rows are being fed to
|
||||
the requested output mode as usual. Abort with C<CTRL-c>, in which
|
||||
case the results of the interactive mode are being ignored and all
|
||||
rows are being fed to output.
|
||||
|
||||
=head2 COLUMNS
|
||||
|
||||
@@ -203,6 +315,46 @@ We want to see only the CMD column and use a regex for this:
|
||||
|
||||
where "C" is our regexp which matches CMD.
|
||||
|
||||
If a column specifier doesn't look like a regular expression, matching
|
||||
against header fields will be case insensitive. So, if you have a
|
||||
field with the name C<ID> then these will all match: C<-c id>, C<-c
|
||||
Id>. The same rule applies to the options C<-T> and C<-F>.
|
||||
|
||||
|
||||
=head2 TRANSPOSE FIELDS USING REGEXPS
|
||||
|
||||
You can manipulate field contents using regular expressions. You have
|
||||
to tell tablizer which field[s] to operate on using the option C<-T>
|
||||
and the search/replace pattern using C<-R>. The number of columns and
|
||||
patterns must match.
|
||||
|
||||
A search/replace pattern consists of the following elements:
|
||||
|
||||
/search-regexp/replace-string/
|
||||
|
||||
The separator can be any valid character. Especially if you want to
|
||||
use a regexp containing the C</> character, eg:
|
||||
|
||||
|search-regexp|replace-string|
|
||||
|
||||
Example:
|
||||
|
||||
cat t/testtable2
|
||||
NAME DURATION
|
||||
x 10
|
||||
a 100
|
||||
z 0
|
||||
u 4
|
||||
k 6
|
||||
|
||||
cat t/testtable2 | tablizer -T2 -R '/^\d/4/' -n
|
||||
NAME DURATION
|
||||
x 40
|
||||
a 400
|
||||
z 4
|
||||
u 4
|
||||
k 4
|
||||
|
||||
=head2 OUTPUT MODES
|
||||
|
||||
There might be cases when the tabular output of a program is way too
|
||||
@@ -239,6 +391,19 @@ table and B<markdown> which prints a Markdown table, B<yaml>, which
|
||||
prints yaml encoding and CSV mode, which prints a comma separated
|
||||
value file.
|
||||
|
||||
=head2 PUT FIELDS TO CLIPBOARD
|
||||
|
||||
You can let tablizer put fields to the clipboard using the option
|
||||
C<-y>. This best fits the use-case when the result of your filtering
|
||||
yields just one row. For example:
|
||||
|
||||
cloudctl cluster ls | tablizer -yid matchbox
|
||||
|
||||
If "matchbox" matches one cluster, you can immediately use the id of
|
||||
that cluster somewhere else and paste it. Of course, if there are
|
||||
multiple matches, then all id's will be put into the clipboard
|
||||
separated by one space.
|
||||
|
||||
=head2 ENVIRONMENT VARIABLES
|
||||
|
||||
B<tablizer> supports certain environment variables which use can use
|
||||
@@ -247,7 +412,7 @@ precedence over environment variables.
|
||||
|
||||
=over
|
||||
|
||||
=item <T_NO_HEADER_NUMBERING> - disable numbering of header fields, like B<-n>.
|
||||
=item <T_HEADER_NUMBERING> - enable numbering of header fields, like B<-n>.
|
||||
|
||||
=item <T_COLUMNS> - comma separated list of columns to output, like B<-c>
|
||||
|
||||
@@ -341,6 +506,8 @@ the C<-L> parameter).
|
||||
Colorization can be turned off completely either by setting the
|
||||
parameter C<-N> or the environment variable B<NO_COLOR> to a true value.
|
||||
|
||||
|
||||
|
||||
=head1 BUGS
|
||||
|
||||
In order to report a bug, unexpected behavior, feature requests
|
||||
@@ -381,6 +548,10 @@ Released under the MIT License, Copyright (c) 201 by Oleku Konko
|
||||
|
||||
Released under the MIT License, Copyright (c) 2006-2011 Kirill Simonov
|
||||
|
||||
=item bubble-table (https://github.com/Evertras/bubble-table)
|
||||
|
||||
Released under the MIT License, Copyright (c) 2022 Brandon Fulljames
|
||||
|
||||
=back
|
||||
|
||||
=head1 AUTHORS
|
||||
|
||||
2
vendor/github.com/agext/levenshtein/.gitignore
generated
vendored
2
vendor/github.com/agext/levenshtein/.gitignore
generated
vendored
@@ -1,2 +0,0 @@
|
||||
README.html
|
||||
coverage.out
|
||||
70
vendor/github.com/agext/levenshtein/.travis.yml
generated
vendored
70
vendor/github.com/agext/levenshtein/.travis.yml
generated
vendored
@@ -1,70 +0,0 @@
|
||||
language: go
|
||||
sudo: false
|
||||
go:
|
||||
- 1.8
|
||||
- 1.7.5
|
||||
- 1.7.4
|
||||
- 1.7.3
|
||||
- 1.7.2
|
||||
- 1.7.1
|
||||
- 1.7
|
||||
- tip
|
||||
- 1.6.4
|
||||
- 1.6.3
|
||||
- 1.6.2
|
||||
- 1.6.1
|
||||
- 1.6
|
||||
- 1.5.4
|
||||
- 1.5.3
|
||||
- 1.5.2
|
||||
- 1.5.1
|
||||
- 1.5
|
||||
- 1.4.3
|
||||
- 1.4.2
|
||||
- 1.4.1
|
||||
- 1.4
|
||||
- 1.3.3
|
||||
- 1.3.2
|
||||
- 1.3.1
|
||||
- 1.3
|
||||
- 1.2.2
|
||||
- 1.2.1
|
||||
- 1.2
|
||||
- 1.1.2
|
||||
- 1.1.1
|
||||
- 1.1
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
script:
|
||||
- $HOME/gopath/bin/goveralls -service=travis-ci
|
||||
notifications:
|
||||
email:
|
||||
on_success: never
|
||||
matrix:
|
||||
fast_finish: true
|
||||
allow_failures:
|
||||
- go: tip
|
||||
- go: 1.6.4
|
||||
- go: 1.6.3
|
||||
- go: 1.6.2
|
||||
- go: 1.6.1
|
||||
- go: 1.6
|
||||
- go: 1.5.4
|
||||
- go: 1.5.3
|
||||
- go: 1.5.2
|
||||
- go: 1.5.1
|
||||
- go: 1.5
|
||||
- go: 1.4.3
|
||||
- go: 1.4.2
|
||||
- go: 1.4.1
|
||||
- go: 1.4
|
||||
- go: 1.3.3
|
||||
- go: 1.3.2
|
||||
- go: 1.3.1
|
||||
- go: 1.3
|
||||
- go: 1.2.2
|
||||
- go: 1.2.1
|
||||
- go: 1.2
|
||||
- go: 1.1.2
|
||||
- go: 1.1.1
|
||||
- go: 1.1
|
||||
36
vendor/github.com/agext/levenshtein/DCO
generated
vendored
36
vendor/github.com/agext/levenshtein/DCO
generated
vendored
@@ -1,36 +0,0 @@
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
201
vendor/github.com/agext/levenshtein/LICENSE
generated
vendored
201
vendor/github.com/agext/levenshtein/LICENSE
generated
vendored
@@ -1,201 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
1
vendor/github.com/agext/levenshtein/MAINTAINERS
generated
vendored
1
vendor/github.com/agext/levenshtein/MAINTAINERS
generated
vendored
@@ -1 +0,0 @@
|
||||
Alex Bucataru <alex@alrux.com> (@AlexBucataru)
|
||||
5
vendor/github.com/agext/levenshtein/NOTICE
generated
vendored
5
vendor/github.com/agext/levenshtein/NOTICE
generated
vendored
@@ -1,5 +0,0 @@
|
||||
Alrux Go EXTensions (AGExt) - package levenshtein
|
||||
Copyright 2016 ALRUX Inc.
|
||||
|
||||
This product includes software developed at ALRUX Inc.
|
||||
(http://www.alrux.com/).
|
||||
38
vendor/github.com/agext/levenshtein/README.md
generated
vendored
38
vendor/github.com/agext/levenshtein/README.md
generated
vendored
@@ -1,38 +0,0 @@
|
||||
# A Go package for calculating the Levenshtein distance between two strings
|
||||
|
||||
[](https://github.com/agext/levenshtein/releases/latest)
|
||||
[](https://godoc.org/github.com/agext/levenshtein)
|
||||
[](https://travis-ci.org/agext/levenshtein)
|
||||
[](https://coveralls.io/github/agext/levenshtein)
|
||||
[](https://goreportcard.com/report/github.com/agext/levenshtein)
|
||||
|
||||
|
||||
This package implements distance and similarity metrics for strings, based on the Levenshtein measure, in [Go](http://golang.org).
|
||||
|
||||
## Project Status
|
||||
|
||||
v1.2.1 Stable: Guaranteed no breaking changes to the API in future v1.x releases. Probably safe to use in production, though provided on "AS IS" basis.
|
||||
|
||||
This package is being actively maintained. If you encounter any problems or have any suggestions for improvement, please [open an issue](https://github.com/agext/levenshtein/issues). Pull requests are welcome.
|
||||
|
||||
## Overview
|
||||
|
||||
The Levenshtein `Distance` between two strings is the minimum total cost of edits that would convert the first string into the second. The allowed edit operations are insertions, deletions, and substitutions, all at character (one UTF-8 code point) level. Each operation has a default cost of 1, but each can be assigned its own cost equal to or greater than 0.
|
||||
|
||||
A `Distance` of 0 means the two strings are identical, and the higher the value the more different the strings. Since in practice we are interested in finding if the two strings are "close enough", it often does not make sense to continue the calculation once the result is mathematically guaranteed to exceed a desired threshold. Providing this value to the `Distance` function allows it to take a shortcut and return a lower bound instead of an exact cost when the threshold is exceeded.
|
||||
|
||||
The `Similarity` function calculates the distance, then converts it into a normalized metric within the range 0..1, with 1 meaning the strings are identical, and 0 that they have nothing in common. A minimum similarity threshold can be provided to speed up the calculation of the metric for strings that are far too dissimilar for the purpose at hand. All values under this threshold are rounded down to 0.
|
||||
|
||||
The `Match` function provides a similarity metric, with the same range and meaning as `Similarity`, but with a bonus for string pairs that share a common prefix and have a similarity above a "bonus threshold". It uses the same method as proposed by Winkler for the Jaro distance, and the reasoning behind it is that these string pairs are very likely spelling variations or errors, and they are more closely linked than the edit distance alone would suggest.
|
||||
|
||||
The underlying `Calculate` function is also exported, to allow the building of other derivative metrics, if needed.
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
go get github.com/agext/levenshtein
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Package levenshtein is released under the Apache 2.0 license. See the [LICENSE](LICENSE) file for details.
|
||||
290
vendor/github.com/agext/levenshtein/levenshtein.go
generated
vendored
290
vendor/github.com/agext/levenshtein/levenshtein.go
generated
vendored
@@ -1,290 +0,0 @@
|
||||
// Copyright 2016 ALRUX Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/*
|
||||
Package levenshtein implements distance and similarity metrics for strings, based on the Levenshtein measure.
|
||||
|
||||
The Levenshtein `Distance` between two strings is the minimum total cost of edits that would convert the first string into the second. The allowed edit operations are insertions, deletions, and substitutions, all at character (one UTF-8 code point) level. Each operation has a default cost of 1, but each can be assigned its own cost equal to or greater than 0.
|
||||
|
||||
A `Distance` of 0 means the two strings are identical, and the higher the value the more different the strings. Since in practice we are interested in finding if the two strings are "close enough", it often does not make sense to continue the calculation once the result is mathematically guaranteed to exceed a desired threshold. Providing this value to the `Distance` function allows it to take a shortcut and return a lower bound instead of an exact cost when the threshold is exceeded.
|
||||
|
||||
The `Similarity` function calculates the distance, then converts it into a normalized metric within the range 0..1, with 1 meaning the strings are identical, and 0 that they have nothing in common. A minimum similarity threshold can be provided to speed up the calculation of the metric for strings that are far too dissimilar for the purpose at hand. All values under this threshold are rounded down to 0.
|
||||
|
||||
The `Match` function provides a similarity metric, with the same range and meaning as `Similarity`, but with a bonus for string pairs that share a common prefix and have a similarity above a "bonus threshold". It uses the same method as proposed by Winkler for the Jaro distance, and the reasoning behind it is that these string pairs are very likely spelling variations or errors, and they are more closely linked than the edit distance alone would suggest.
|
||||
|
||||
The underlying `Calculate` function is also exported, to allow the building of other derivative metrics, if needed.
|
||||
*/
|
||||
package levenshtein
|
||||
|
||||
// Calculate determines the Levenshtein distance between two strings, using
|
||||
// the given costs for each edit operation. It returns the distance along with
|
||||
// the lengths of the longest common prefix and suffix.
|
||||
//
|
||||
// If maxCost is non-zero, the calculation stops as soon as the distance is determined
|
||||
// to be greater than maxCost. Therefore, any return value higher than maxCost is a
|
||||
// lower bound for the actual distance.
|
||||
func Calculate(str1, str2 []rune, maxCost, insCost, subCost, delCost int) (dist, prefixLen, suffixLen int) {
|
||||
l1, l2 := len(str1), len(str2)
|
||||
// trim common prefix, if any, as it doesn't affect the distance
|
||||
for ; prefixLen < l1 && prefixLen < l2; prefixLen++ {
|
||||
if str1[prefixLen] != str2[prefixLen] {
|
||||
break
|
||||
}
|
||||
}
|
||||
str1, str2 = str1[prefixLen:], str2[prefixLen:]
|
||||
l1 -= prefixLen
|
||||
l2 -= prefixLen
|
||||
// trim common suffix, if any, as it doesn't affect the distance
|
||||
for 0 < l1 && 0 < l2 {
|
||||
if str1[l1-1] != str2[l2-1] {
|
||||
str1, str2 = str1[:l1], str2[:l2]
|
||||
break
|
||||
}
|
||||
l1--
|
||||
l2--
|
||||
suffixLen++
|
||||
}
|
||||
// if the first string is empty, the distance is the length of the second string times the cost of insertion
|
||||
if l1 == 0 {
|
||||
dist = l2 * insCost
|
||||
return
|
||||
}
|
||||
// if the second string is empty, the distance is the length of the first string times the cost of deletion
|
||||
if l2 == 0 {
|
||||
dist = l1 * delCost
|
||||
return
|
||||
}
|
||||
|
||||
// variables used in inner "for" loops
|
||||
var y, dy, c, l int
|
||||
|
||||
// if maxCost is greater than or equal to the maximum possible distance, it's equivalent to 'unlimited'
|
||||
if maxCost > 0 {
|
||||
if subCost < delCost+insCost {
|
||||
if maxCost >= l1*subCost+(l2-l1)*insCost {
|
||||
maxCost = 0
|
||||
}
|
||||
} else {
|
||||
if maxCost >= l1*delCost+l2*insCost {
|
||||
maxCost = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if maxCost > 0 {
|
||||
// prefer the longer string first, to minimize time;
|
||||
// a swap also transposes the meanings of insertion and deletion.
|
||||
if l1 < l2 {
|
||||
str1, str2, l1, l2, insCost, delCost = str2, str1, l2, l1, delCost, insCost
|
||||
}
|
||||
|
||||
// the length differential times cost of deletion is a lower bound for the cost;
|
||||
// if it is higher than the maxCost, there is no point going into the main calculation.
|
||||
if dist = (l1 - l2) * delCost; dist > maxCost {
|
||||
return
|
||||
}
|
||||
|
||||
d := make([]int, l1+1)
|
||||
|
||||
// offset and length of d in the current row
|
||||
doff, dlen := 0, 1
|
||||
for y, dy = 1, delCost; y <= l1 && dy <= maxCost; dlen++ {
|
||||
d[y] = dy
|
||||
y++
|
||||
dy = y * delCost
|
||||
}
|
||||
// fmt.Printf("%q -> %q: init doff=%d dlen=%d d[%d:%d]=%v\n", str1, str2, doff, dlen, doff, doff+dlen, d[doff:doff+dlen])
|
||||
|
||||
for x := 0; x < l2; x++ {
|
||||
dy, d[doff] = d[doff], d[doff]+insCost
|
||||
for d[doff] > maxCost && dlen > 0 {
|
||||
if str1[doff] != str2[x] {
|
||||
dy += subCost
|
||||
}
|
||||
doff++
|
||||
dlen--
|
||||
if c = d[doff] + insCost; c < dy {
|
||||
dy = c
|
||||
}
|
||||
dy, d[doff] = d[doff], dy
|
||||
}
|
||||
for y, l = doff, doff+dlen-1; y < l; dy, d[y] = d[y], dy {
|
||||
if str1[y] != str2[x] {
|
||||
dy += subCost
|
||||
}
|
||||
if c = d[y] + delCost; c < dy {
|
||||
dy = c
|
||||
}
|
||||
y++
|
||||
if c = d[y] + insCost; c < dy {
|
||||
dy = c
|
||||
}
|
||||
}
|
||||
if y < l1 {
|
||||
if str1[y] != str2[x] {
|
||||
dy += subCost
|
||||
}
|
||||
if c = d[y] + delCost; c < dy {
|
||||
dy = c
|
||||
}
|
||||
for ; dy <= maxCost && y < l1; dy, d[y] = dy+delCost, dy {
|
||||
y++
|
||||
dlen++
|
||||
}
|
||||
}
|
||||
// fmt.Printf("%q -> %q: x=%d doff=%d dlen=%d d[%d:%d]=%v\n", str1, str2, x, doff, dlen, doff, doff+dlen, d[doff:doff+dlen])
|
||||
if dlen == 0 {
|
||||
dist = maxCost + 1
|
||||
return
|
||||
}
|
||||
}
|
||||
if doff+dlen-1 < l1 {
|
||||
dist = maxCost + 1
|
||||
return
|
||||
}
|
||||
dist = d[l1]
|
||||
} else {
|
||||
// ToDo: This is O(l1*l2) time and O(min(l1,l2)) space; investigate if it is
|
||||
// worth to implement diagonal approach - O(l1*(1+dist)) time, up to O(l1*l2) space
|
||||
// http://www.csse.monash.edu.au/~lloyd/tildeStrings/Alignment/92.IPL.html
|
||||
|
||||
// prefer the shorter string first, to minimize space; time is O(l1*l2) anyway;
|
||||
// a swap also transposes the meanings of insertion and deletion.
|
||||
if l1 > l2 {
|
||||
str1, str2, l1, l2, insCost, delCost = str2, str1, l2, l1, delCost, insCost
|
||||
}
|
||||
d := make([]int, l1+1)
|
||||
|
||||
for y = 1; y <= l1; y++ {
|
||||
d[y] = y * delCost
|
||||
}
|
||||
for x := 0; x < l2; x++ {
|
||||
dy, d[0] = d[0], d[0]+insCost
|
||||
for y = 0; y < l1; dy, d[y] = d[y], dy {
|
||||
if str1[y] != str2[x] {
|
||||
dy += subCost
|
||||
}
|
||||
if c = d[y] + delCost; c < dy {
|
||||
dy = c
|
||||
}
|
||||
y++
|
||||
if c = d[y] + insCost; c < dy {
|
||||
dy = c
|
||||
}
|
||||
}
|
||||
}
|
||||
dist = d[l1]
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Distance returns the Levenshtein distance between str1 and str2, using the
|
||||
// default or provided cost values. Pass nil for the third argument to use the
|
||||
// default cost of 1 for all three operations, with no maximum.
|
||||
func Distance(str1, str2 string, p *Params) int {
|
||||
if p == nil {
|
||||
p = defaultParams
|
||||
}
|
||||
dist, _, _ := Calculate([]rune(str1), []rune(str2), p.maxCost, p.insCost, p.subCost, p.delCost)
|
||||
return dist
|
||||
}
|
||||
|
||||
// Similarity returns a score in the range of 0..1 for how similar the two strings are.
|
||||
// A score of 1 means the strings are identical, and 0 means they have nothing in common.
|
||||
//
|
||||
// A nil third argument uses the default cost of 1 for all three operations.
|
||||
//
|
||||
// If a non-zero MinScore value is provided in the parameters, scores lower than it
|
||||
// will be returned as 0.
|
||||
func Similarity(str1, str2 string, p *Params) float64 {
|
||||
return Match(str1, str2, p.Clone().BonusThreshold(1.1)) // guaranteed no bonus
|
||||
}
|
||||
|
||||
// Match returns a similarity score adjusted by the same method as proposed by Winkler for
|
||||
// the Jaro distance - giving a bonus to string pairs that share a common prefix, only if their
|
||||
// similarity score is already over a threshold.
|
||||
//
|
||||
// The score is in the range of 0..1, with 1 meaning the strings are identical,
|
||||
// and 0 meaning they have nothing in common.
|
||||
//
|
||||
// A nil third argument uses the default cost of 1 for all three operations, maximum length of
|
||||
// common prefix to consider for bonus of 4, scaling factor of 0.1, and bonus threshold of 0.7.
|
||||
//
|
||||
// If a non-zero MinScore value is provided in the parameters, scores lower than it
|
||||
// will be returned as 0.
|
||||
func Match(str1, str2 string, p *Params) float64 {
|
||||
s1, s2 := []rune(str1), []rune(str2)
|
||||
l1, l2 := len(s1), len(s2)
|
||||
// two empty strings are identical; shortcut also avoids divByZero issues later on.
|
||||
if l1 == 0 && l2 == 0 {
|
||||
return 1
|
||||
}
|
||||
|
||||
if p == nil {
|
||||
p = defaultParams
|
||||
}
|
||||
|
||||
// a min over 1 can never be satisfied, so the score is 0.
|
||||
if p.minScore > 1 {
|
||||
return 0
|
||||
}
|
||||
|
||||
insCost, delCost, maxDist, max := p.insCost, p.delCost, 0, 0
|
||||
if l1 > l2 {
|
||||
l1, l2, insCost, delCost = l2, l1, delCost, insCost
|
||||
}
|
||||
|
||||
if p.subCost < delCost+insCost {
|
||||
maxDist = l1*p.subCost + (l2-l1)*insCost
|
||||
} else {
|
||||
maxDist = l1*delCost + l2*insCost
|
||||
}
|
||||
|
||||
// a zero min is always satisfied, so no need to set a max cost.
|
||||
if p.minScore > 0 {
|
||||
// if p.minScore is lower than p.bonusThreshold, we can use a simplified formula
|
||||
// for the max cost, because a sim score below min cannot receive a bonus.
|
||||
if p.minScore < p.bonusThreshold {
|
||||
// round down the max - a cost equal to a rounded up max would already be under min.
|
||||
max = int((1 - p.minScore) * float64(maxDist))
|
||||
} else {
|
||||
// p.minScore <= sim + p.bonusPrefix*p.bonusScale*(1-sim)
|
||||
// p.minScore <= (1-dist/maxDist) + p.bonusPrefix*p.bonusScale*(1-(1-dist/maxDist))
|
||||
// p.minScore <= 1 - dist/maxDist + p.bonusPrefix*p.bonusScale*dist/maxDist
|
||||
// 1 - p.minScore >= dist/maxDist - p.bonusPrefix*p.bonusScale*dist/maxDist
|
||||
// (1-p.minScore)*maxDist/(1-p.bonusPrefix*p.bonusScale) >= dist
|
||||
max = int((1 - p.minScore) * float64(maxDist) / (1 - float64(p.bonusPrefix)*p.bonusScale))
|
||||
}
|
||||
}
|
||||
|
||||
dist, pl, _ := Calculate(s1, s2, max, p.insCost, p.subCost, p.delCost)
|
||||
if max > 0 && dist > max {
|
||||
return 0
|
||||
}
|
||||
sim := 1 - float64(dist)/float64(maxDist)
|
||||
|
||||
if sim >= p.bonusThreshold && sim < 1 && p.bonusPrefix > 0 && p.bonusScale > 0 {
|
||||
if pl > p.bonusPrefix {
|
||||
pl = p.bonusPrefix
|
||||
}
|
||||
sim += float64(pl) * p.bonusScale * (1 - sim)
|
||||
}
|
||||
|
||||
if sim < p.minScore {
|
||||
return 0
|
||||
}
|
||||
|
||||
return sim
|
||||
}
|
||||
152
vendor/github.com/agext/levenshtein/params.go
generated
vendored
152
vendor/github.com/agext/levenshtein/params.go
generated
vendored
@@ -1,152 +0,0 @@
|
||||
// Copyright 2016 ALRUX Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package levenshtein
|
||||
|
||||
// Params represents a set of parameter values for the various formulas involved
|
||||
// in the calculation of the Levenshtein string metrics.
|
||||
type Params struct {
|
||||
insCost int
|
||||
subCost int
|
||||
delCost int
|
||||
maxCost int
|
||||
minScore float64
|
||||
bonusPrefix int
|
||||
bonusScale float64
|
||||
bonusThreshold float64
|
||||
}
|
||||
|
||||
var (
|
||||
defaultParams = NewParams()
|
||||
)
|
||||
|
||||
// NewParams creates a new set of parameters and initializes it with the default values.
|
||||
func NewParams() *Params {
|
||||
return &Params{
|
||||
insCost: 1,
|
||||
subCost: 1,
|
||||
delCost: 1,
|
||||
maxCost: 0,
|
||||
minScore: 0,
|
||||
bonusPrefix: 4,
|
||||
bonusScale: .1,
|
||||
bonusThreshold: .7,
|
||||
}
|
||||
}
|
||||
|
||||
// Clone returns a pointer to a copy of the receiver parameter set, or of a new
|
||||
// default parameter set if the receiver is nil.
|
||||
func (p *Params) Clone() *Params {
|
||||
if p == nil {
|
||||
return NewParams()
|
||||
}
|
||||
return &Params{
|
||||
insCost: p.insCost,
|
||||
subCost: p.subCost,
|
||||
delCost: p.delCost,
|
||||
maxCost: p.maxCost,
|
||||
minScore: p.minScore,
|
||||
bonusPrefix: p.bonusPrefix,
|
||||
bonusScale: p.bonusScale,
|
||||
bonusThreshold: p.bonusThreshold,
|
||||
}
|
||||
}
|
||||
|
||||
// InsCost overrides the default value of 1 for the cost of insertion.
|
||||
// The new value must be zero or positive.
|
||||
func (p *Params) InsCost(v int) *Params {
|
||||
if v >= 0 {
|
||||
p.insCost = v
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// SubCost overrides the default value of 1 for the cost of substitution.
|
||||
// The new value must be zero or positive.
|
||||
func (p *Params) SubCost(v int) *Params {
|
||||
if v >= 0 {
|
||||
p.subCost = v
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// DelCost overrides the default value of 1 for the cost of deletion.
|
||||
// The new value must be zero or positive.
|
||||
func (p *Params) DelCost(v int) *Params {
|
||||
if v >= 0 {
|
||||
p.delCost = v
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// MaxCost overrides the default value of 0 (meaning unlimited) for the maximum cost.
|
||||
// The calculation of Distance() stops when the result is guaranteed to exceed
|
||||
// this maximum, returning a lower-bound rather than exact value.
|
||||
// The new value must be zero or positive.
|
||||
func (p *Params) MaxCost(v int) *Params {
|
||||
if v >= 0 {
|
||||
p.maxCost = v
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// MinScore overrides the default value of 0 for the minimum similarity score.
|
||||
// Scores below this threshold are returned as 0 by Similarity() and Match().
|
||||
// The new value must be zero or positive. Note that a minimum greater than 1
|
||||
// can never be satisfied, resulting in a score of 0 for any pair of strings.
|
||||
func (p *Params) MinScore(v float64) *Params {
|
||||
if v >= 0 {
|
||||
p.minScore = v
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// BonusPrefix overrides the default value for the maximum length of
|
||||
// common prefix to be considered for bonus by Match().
|
||||
// The new value must be zero or positive.
|
||||
func (p *Params) BonusPrefix(v int) *Params {
|
||||
if v >= 0 {
|
||||
p.bonusPrefix = v
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// BonusScale overrides the default value for the scaling factor used by Match()
|
||||
// in calculating the bonus.
|
||||
// The new value must be zero or positive. To guarantee that the similarity score
|
||||
// remains in the interval 0..1, this scaling factor is not allowed to exceed
|
||||
// 1 / BonusPrefix.
|
||||
func (p *Params) BonusScale(v float64) *Params {
|
||||
if v >= 0 {
|
||||
p.bonusScale = v
|
||||
}
|
||||
|
||||
// the bonus cannot exceed (1-sim), or the score may become greater than 1.
|
||||
if float64(p.bonusPrefix)*p.bonusScale > 1 {
|
||||
p.bonusScale = 1 / float64(p.bonusPrefix)
|
||||
}
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
// BonusThreshold overrides the default value for the minimum similarity score
|
||||
// for which Match() can assign a bonus.
|
||||
// The new value must be zero or positive. Note that a threshold greater than 1
|
||||
// effectively makes Match() become the equivalent of Similarity().
|
||||
func (p *Params) BonusThreshold(v float64) *Params {
|
||||
if v >= 0 {
|
||||
p.bonusThreshold = v
|
||||
}
|
||||
return p
|
||||
}
|
||||
21
vendor/github.com/alecthomas/repr/COPYING
generated
vendored
21
vendor/github.com/alecthomas/repr/COPYING
generated
vendored
@@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Alec Thomas
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
90
vendor/github.com/alecthomas/repr/README.md
generated
vendored
90
vendor/github.com/alecthomas/repr/README.md
generated
vendored
@@ -1,90 +0,0 @@
|
||||
# Python's repr() for Go [](http://godoc.org/github.com/alecthomas/repr) [](https://circleci.com/gh/alecthomas/repr)
|
||||
|
||||
This package attempts to represent Go values in a form that can be used almost directly in Go source
|
||||
code.
|
||||
|
||||
Unfortunately, some values (such as pointers to basic types) can not be represented directly in Go.
|
||||
These values will be represented as `&<value>`. eg. `&23`
|
||||
|
||||
## Example
|
||||
|
||||
```go
|
||||
type test struct {
|
||||
S string
|
||||
I int
|
||||
A []int
|
||||
}
|
||||
|
||||
func main() {
|
||||
repr.Print(&test{
|
||||
S: "String",
|
||||
I: 123,
|
||||
A: []int{1, 2, 3},
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
Outputs
|
||||
|
||||
```
|
||||
&main.test{S: "String", I: 123, A: []int{1, 2, 3}}
|
||||
```
|
||||
|
||||
## Why repr and not [pp](https://github.com/k0kubun/pp)?
|
||||
|
||||
pp is designed for printing coloured output to consoles, with (seemingly?) no way to disable this. If you don't want coloured output (eg. for use in diffs, logs, etc.) repr is for you.
|
||||
|
||||
## Why repr and not [go-spew](https://github.com/davecgh/go-spew)?
|
||||
|
||||
Repr deliberately contains much less metadata about values. It is designed to (generally) be copyable directly into source code.
|
||||
|
||||
Compare go-spew:
|
||||
|
||||
```go
|
||||
(parser.expression) (len=1 cap=1) {
|
||||
(parser.alternative) (len=1 cap=1) {
|
||||
([]interface {}) (len=1 cap=1) {
|
||||
(*parser.repitition)(0xc82000b220)({
|
||||
expression: (parser.expression) (len=2 cap=2) {
|
||||
(parser.alternative) (len=1 cap=1) {
|
||||
([]interface {}) (len=1 cap=1) {
|
||||
(parser.str) (len=1) "a"
|
||||
}
|
||||
},
|
||||
(parser.alternative) (len=1 cap=1) {
|
||||
([]interface {}) (len=1 cap=1) {
|
||||
(*parser.self)(0x593ef0)({
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To repr:
|
||||
|
||||
```go
|
||||
parser.expression{
|
||||
parser.alternative{
|
||||
[]interface {}{
|
||||
&parser.repitition{
|
||||
expression: parser.expression{
|
||||
parser.alternative{
|
||||
[]interface {}{
|
||||
parser.str("a"),
|
||||
},
|
||||
},
|
||||
parser.alternative{
|
||||
[]interface {}{
|
||||
&parser.self{ },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
353
vendor/github.com/alecthomas/repr/repr.go
generated
vendored
353
vendor/github.com/alecthomas/repr/repr.go
generated
vendored
@@ -1,353 +0,0 @@
|
||||
// Package repr attempts to represent Go values in a form that can be copy-and-pasted into source
|
||||
// code directly.
|
||||
//
|
||||
// Some values (such as pointers to basic types) can not be represented directly in
|
||||
// Go. These values will be output as `&<value>`. eg. `&23`
|
||||
package repr
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"reflect"
|
||||
"sort"
|
||||
"time"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var (
|
||||
// "Real" names of basic kinds, used to differentiate type aliases.
|
||||
realKindName = map[reflect.Kind]string{
|
||||
reflect.Bool: "bool",
|
||||
reflect.Int: "int",
|
||||
reflect.Int8: "int8",
|
||||
reflect.Int16: "int16",
|
||||
reflect.Int32: "int32",
|
||||
reflect.Int64: "int64",
|
||||
reflect.Uint: "uint",
|
||||
reflect.Uint8: "uint8",
|
||||
reflect.Uint16: "uint16",
|
||||
reflect.Uint32: "uint32",
|
||||
reflect.Uint64: "uint64",
|
||||
reflect.Uintptr: "uintptr",
|
||||
reflect.Float32: "float32",
|
||||
reflect.Float64: "float64",
|
||||
reflect.Complex64: "complex64",
|
||||
reflect.Complex128: "complex128",
|
||||
reflect.Array: "array",
|
||||
reflect.Chan: "chan",
|
||||
reflect.Func: "func",
|
||||
reflect.Map: "map",
|
||||
reflect.Slice: "slice",
|
||||
reflect.String: "string",
|
||||
}
|
||||
|
||||
goStringerType = reflect.TypeOf((*fmt.GoStringer)(nil)).Elem()
|
||||
|
||||
byteSliceType = reflect.TypeOf([]byte{})
|
||||
)
|
||||
|
||||
// Default prints to os.Stdout with two space indentation.
|
||||
var Default = New(os.Stdout, Indent(" "))
|
||||
|
||||
// An Option modifies the default behaviour of a Printer.
|
||||
type Option func(o *Printer)
|
||||
|
||||
// Indent output by this much.
|
||||
func Indent(indent string) Option { return func(o *Printer) { o.indent = indent } }
|
||||
|
||||
// NoIndent disables indenting.
|
||||
func NoIndent() Option { return Indent("") }
|
||||
|
||||
// OmitEmpty sets whether empty field members should be omitted from output.
|
||||
func OmitEmpty(omitEmpty bool) Option { return func(o *Printer) { o.omitEmpty = omitEmpty } }
|
||||
|
||||
// ExplicitTypes adds explicit typing to slice and map struct values that would normally be inferred by Go.
|
||||
func ExplicitTypes(ok bool) Option { return func(o *Printer) { o.explicitTypes = true } }
|
||||
|
||||
// IgnoreGoStringer disables use of the .GoString() method.
|
||||
func IgnoreGoStringer() Option { return func(o *Printer) { o.ignoreGoStringer = true } }
|
||||
|
||||
// Hide excludes the given types from representation, instead just printing the name of the type.
|
||||
func Hide(ts ...interface{}) Option {
|
||||
return func(o *Printer) {
|
||||
for _, t := range ts {
|
||||
rt := reflect.Indirect(reflect.ValueOf(t)).Type()
|
||||
o.exclude[rt] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// AlwaysIncludeType always includes explicit type information for each item.
|
||||
func AlwaysIncludeType() Option { return func(o *Printer) { o.alwaysIncludeType = true } }
|
||||
|
||||
// Printer represents structs in a printable manner.
|
||||
type Printer struct {
|
||||
indent string
|
||||
omitEmpty bool
|
||||
ignoreGoStringer bool
|
||||
alwaysIncludeType bool
|
||||
explicitTypes bool
|
||||
exclude map[reflect.Type]bool
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
// New creates a new Printer on w with the given Options.
|
||||
func New(w io.Writer, options ...Option) *Printer {
|
||||
p := &Printer{
|
||||
w: w,
|
||||
indent: " ",
|
||||
omitEmpty: true,
|
||||
exclude: map[reflect.Type]bool{},
|
||||
}
|
||||
for _, option := range options {
|
||||
option(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *Printer) nextIndent(indent string) string {
|
||||
if p.indent != "" {
|
||||
return indent + p.indent
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (p *Printer) thisIndent(indent string) string {
|
||||
if p.indent != "" {
|
||||
return indent
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Print the values.
|
||||
func (p *Printer) Print(vs ...interface{}) {
|
||||
for i, v := range vs {
|
||||
if i > 0 {
|
||||
fmt.Fprint(p.w, " ")
|
||||
}
|
||||
p.reprValue(map[reflect.Value]bool{}, reflect.ValueOf(v), "", true)
|
||||
}
|
||||
}
|
||||
|
||||
// Println prints each value on a new line.
|
||||
func (p *Printer) Println(vs ...interface{}) {
|
||||
for i, v := range vs {
|
||||
if i > 0 {
|
||||
fmt.Fprint(p.w, " ")
|
||||
}
|
||||
p.reprValue(map[reflect.Value]bool{}, reflect.ValueOf(v), "", true)
|
||||
}
|
||||
fmt.Fprintln(p.w)
|
||||
}
|
||||
|
||||
func (p *Printer) reprValue(seen map[reflect.Value]bool, v reflect.Value, indent string, showType bool) { // nolint: gocyclo
|
||||
if seen[v] {
|
||||
fmt.Fprint(p.w, "...")
|
||||
return
|
||||
}
|
||||
seen[v] = true
|
||||
defer delete(seen, v)
|
||||
|
||||
if v.Kind() == reflect.Invalid || (v.Kind() == reflect.Ptr || v.Kind() == reflect.Map || v.Kind() == reflect.Chan || v.Kind() == reflect.Slice || v.Kind() == reflect.Func || v.Kind() == reflect.Interface) && v.IsNil() {
|
||||
fmt.Fprint(p.w, "nil")
|
||||
return
|
||||
}
|
||||
if p.exclude[v.Type()] {
|
||||
fmt.Fprintf(p.w, "%s...", v.Type().Name())
|
||||
return
|
||||
}
|
||||
t := v.Type()
|
||||
|
||||
if t == byteSliceType {
|
||||
fmt.Fprintf(p.w, "[]byte(%q)", v.Bytes())
|
||||
return
|
||||
}
|
||||
|
||||
// If we can't access a private field directly with reflection, try and do so via unsafe.
|
||||
if !v.CanInterface() && v.CanAddr() {
|
||||
uv := reflect.NewAt(t, unsafe.Pointer(v.UnsafeAddr())).Elem()
|
||||
if uv.CanInterface() {
|
||||
v = uv
|
||||
}
|
||||
}
|
||||
// Attempt to use fmt.GoStringer interface.
|
||||
if !p.ignoreGoStringer && t.Implements(goStringerType) {
|
||||
fmt.Fprint(p.w, v.Interface().(fmt.GoStringer).GoString())
|
||||
return
|
||||
}
|
||||
in := p.thisIndent(indent)
|
||||
ni := p.nextIndent(indent)
|
||||
switch v.Kind() {
|
||||
case reflect.Slice, reflect.Array:
|
||||
fmt.Fprintf(p.w, "%s{", v.Type())
|
||||
if v.Len() == 0 {
|
||||
fmt.Fprint(p.w, "}")
|
||||
} else {
|
||||
if p.indent != "" {
|
||||
fmt.Fprintf(p.w, "\n")
|
||||
}
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
e := v.Index(i)
|
||||
fmt.Fprintf(p.w, "%s", ni)
|
||||
p.reprValue(seen, e, ni, p.alwaysIncludeType || p.explicitTypes)
|
||||
if p.indent != "" {
|
||||
fmt.Fprintf(p.w, ",\n")
|
||||
} else if i < v.Len()-1 {
|
||||
fmt.Fprintf(p.w, ", ")
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(p.w, "%s}", in)
|
||||
}
|
||||
|
||||
case reflect.Chan:
|
||||
fmt.Fprintf(p.w, "make(")
|
||||
fmt.Fprintf(p.w, "%s", v.Type())
|
||||
fmt.Fprintf(p.w, ", %d)", v.Cap())
|
||||
|
||||
case reflect.Map:
|
||||
fmt.Fprintf(p.w, "%s{", v.Type())
|
||||
if p.indent != "" && v.Len() != 0 {
|
||||
fmt.Fprintf(p.w, "\n")
|
||||
}
|
||||
keys := v.MapKeys()
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
return fmt.Sprint(keys[i]) < fmt.Sprint(keys[j])
|
||||
})
|
||||
for i, k := range keys {
|
||||
kv := v.MapIndex(k)
|
||||
fmt.Fprintf(p.w, "%s", ni)
|
||||
p.reprValue(seen, k, ni, p.alwaysIncludeType || p.explicitTypes)
|
||||
fmt.Fprintf(p.w, ": ")
|
||||
p.reprValue(seen, kv, ni, true)
|
||||
if p.indent != "" {
|
||||
fmt.Fprintf(p.w, ",\n")
|
||||
} else if i < v.Len()-1 {
|
||||
fmt.Fprintf(p.w, ", ")
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(p.w, "%s}", in)
|
||||
|
||||
case reflect.Struct:
|
||||
if td, ok := asTime(v); ok {
|
||||
timeToGo(p.w, td)
|
||||
} else {
|
||||
if showType {
|
||||
fmt.Fprintf(p.w, "%s{", v.Type())
|
||||
} else {
|
||||
fmt.Fprint(p.w, "{")
|
||||
}
|
||||
if p.indent != "" && v.NumField() != 0 {
|
||||
fmt.Fprintf(p.w, "\n")
|
||||
}
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
t := v.Type().Field(i)
|
||||
f := v.Field(i)
|
||||
if p.omitEmpty && f.IsZero() {
|
||||
continue
|
||||
}
|
||||
fmt.Fprintf(p.w, "%s%s: ", ni, t.Name)
|
||||
p.reprValue(seen, f, ni, true)
|
||||
if p.indent != "" {
|
||||
fmt.Fprintf(p.w, ",\n")
|
||||
} else if i < v.NumField()-1 {
|
||||
fmt.Fprintf(p.w, ", ")
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(p.w, "%s}", indent)
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if v.IsNil() {
|
||||
fmt.Fprintf(p.w, "nil")
|
||||
return
|
||||
}
|
||||
if showType {
|
||||
fmt.Fprintf(p.w, "&")
|
||||
}
|
||||
p.reprValue(seen, v.Elem(), indent, showType)
|
||||
|
||||
case reflect.String:
|
||||
if t.Name() != "string" || p.alwaysIncludeType {
|
||||
fmt.Fprintf(p.w, "%s(%q)", t, v.String())
|
||||
} else {
|
||||
fmt.Fprintf(p.w, "%q", v.String())
|
||||
}
|
||||
|
||||
case reflect.Interface:
|
||||
if v.IsNil() {
|
||||
fmt.Fprintf(p.w, "interface {}(nil)")
|
||||
} else {
|
||||
p.reprValue(seen, v.Elem(), indent, true)
|
||||
}
|
||||
|
||||
default:
|
||||
if t.Name() != realKindName[t.Kind()] || p.alwaysIncludeType {
|
||||
fmt.Fprintf(p.w, "%s(%v)", t, v)
|
||||
} else {
|
||||
fmt.Fprintf(p.w, "%v", v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func asTime(v reflect.Value) (time.Time, bool) {
|
||||
if !v.CanInterface() {
|
||||
return time.Time{}, false
|
||||
}
|
||||
t, ok := v.Interface().(time.Time)
|
||||
return t, ok
|
||||
}
|
||||
|
||||
// String returns a string representing v.
|
||||
func String(v interface{}, options ...Option) string {
|
||||
w := bytes.NewBuffer(nil)
|
||||
options = append([]Option{NoIndent()}, options...)
|
||||
p := New(w, options...)
|
||||
p.Print(v)
|
||||
return w.String()
|
||||
}
|
||||
|
||||
func extractOptions(vs ...interface{}) (args []interface{}, options []Option) {
|
||||
for _, v := range vs {
|
||||
if o, ok := v.(Option); ok {
|
||||
options = append(options, o)
|
||||
} else {
|
||||
args = append(args, v)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Println prints v to os.Stdout, one per line.
|
||||
func Println(vs ...interface{}) {
|
||||
args, options := extractOptions(vs...)
|
||||
New(os.Stdout, options...).Println(args...)
|
||||
}
|
||||
|
||||
// Print writes a representation of v to os.Stdout, separated by spaces.
|
||||
func Print(vs ...interface{}) {
|
||||
args, options := extractOptions(vs...)
|
||||
New(os.Stdout, options...).Print(args...)
|
||||
}
|
||||
|
||||
func timeToGo(w io.Writer, t time.Time) {
|
||||
if t.IsZero() {
|
||||
fmt.Fprint(w, "time.Time{}")
|
||||
return
|
||||
}
|
||||
|
||||
var zone string
|
||||
switch loc := t.Location(); loc {
|
||||
case nil:
|
||||
zone = "nil"
|
||||
case time.UTC:
|
||||
zone = "time.UTC"
|
||||
case time.Local:
|
||||
zone = "time.Local"
|
||||
default:
|
||||
n, off := t.Zone()
|
||||
zone = fmt.Sprintf("time.FixedZone(%q, %d)", n, off)
|
||||
}
|
||||
y, m, d := t.Date()
|
||||
fmt.Fprintf(w, `time.Date(%d, %d, %d, %d, %d, %d, %d, %s)`, y, m, d, t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), zone)
|
||||
}
|
||||
95
vendor/github.com/apparentlymart/go-textseg/v13/LICENSE
generated
vendored
95
vendor/github.com/apparentlymart/go-textseg/v13/LICENSE
generated
vendored
@@ -1,95 +0,0 @@
|
||||
Copyright (c) 2017 Martin Atkins
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---------
|
||||
|
||||
Unicode table generation programs are under a separate copyright and license:
|
||||
|
||||
Copyright (c) 2014 Couchbase, Inc.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
||||
except in compliance with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the
|
||||
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
either express or implied. See the License for the specific language governing permissions
|
||||
and limitations under the License.
|
||||
|
||||
---------
|
||||
|
||||
Grapheme break data is provided as part of the Unicode character database,
|
||||
copright 2016 Unicode, Inc, which is provided with the following license:
|
||||
|
||||
Unicode Data Files include all data files under the directories
|
||||
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
|
||||
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
|
||||
http://www.unicode.org/utility/trac/browser/.
|
||||
|
||||
Unicode Data Files do not include PDF online code charts under the
|
||||
directory http://www.unicode.org/Public/.
|
||||
|
||||
Software includes any source code published in the Unicode Standard
|
||||
or under the directories
|
||||
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
|
||||
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
|
||||
http://www.unicode.org/utility/trac/browser/.
|
||||
|
||||
NOTICE TO USER: Carefully read the following legal agreement.
|
||||
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
|
||||
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
|
||||
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
|
||||
TERMS AND CONDITIONS OF THIS AGREEMENT.
|
||||
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
|
||||
THE DATA FILES OR SOFTWARE.
|
||||
|
||||
COPYRIGHT AND PERMISSION NOTICE
|
||||
|
||||
Copyright © 1991-2017 Unicode, Inc. All rights reserved.
|
||||
Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of the Unicode data files and any associated documentation
|
||||
(the "Data Files") or Unicode software and any associated documentation
|
||||
(the "Software") to deal in the Data Files or Software
|
||||
without restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, and/or sell copies of
|
||||
the Data Files or Software, and to permit persons to whom the Data Files
|
||||
or Software are furnished to do so, provided that either
|
||||
(a) this copyright and permission notice appear with all copies
|
||||
of the Data Files or Software, or
|
||||
(b) this copyright and permission notice appear in associated
|
||||
Documentation.
|
||||
|
||||
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
|
||||
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
|
||||
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
|
||||
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
|
||||
|
||||
Except as contained in this notice, the name of a copyright holder
|
||||
shall not be used in advertising or otherwise to promote the sale,
|
||||
use or other dealings in these Data Files or Software without prior
|
||||
written authorization of the copyright holder.
|
||||
30
vendor/github.com/apparentlymart/go-textseg/v13/textseg/all_tokens.go
generated
vendored
30
vendor/github.com/apparentlymart/go-textseg/v13/textseg/all_tokens.go
generated
vendored
@@ -1,30 +0,0 @@
|
||||
package textseg
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
)
|
||||
|
||||
// AllTokens is a utility that uses a bufio.SplitFunc to produce a slice of
|
||||
// all of the recognized tokens in the given buffer.
|
||||
func AllTokens(buf []byte, splitFunc bufio.SplitFunc) ([][]byte, error) {
|
||||
scanner := bufio.NewScanner(bytes.NewReader(buf))
|
||||
scanner.Split(splitFunc)
|
||||
var ret [][]byte
|
||||
for scanner.Scan() {
|
||||
ret = append(ret, scanner.Bytes())
|
||||
}
|
||||
return ret, scanner.Err()
|
||||
}
|
||||
|
||||
// TokenCount is a utility that uses a bufio.SplitFunc to count the number of
|
||||
// recognized tokens in the given buffer.
|
||||
func TokenCount(buf []byte, splitFunc bufio.SplitFunc) (int, error) {
|
||||
scanner := bufio.NewScanner(bytes.NewReader(buf))
|
||||
scanner.Split(splitFunc)
|
||||
var ret int
|
||||
for scanner.Scan() {
|
||||
ret++
|
||||
}
|
||||
return ret, scanner.Err()
|
||||
}
|
||||
525
vendor/github.com/apparentlymart/go-textseg/v13/textseg/emoji_table.rl
generated
vendored
525
vendor/github.com/apparentlymart/go-textseg/v13/textseg/emoji_table.rl
generated
vendored
@@ -1,525 +0,0 @@
|
||||
# The following Ragel file was autogenerated with unicode2ragel.rb
|
||||
# from: https://www.unicode.org/Public/13.0.0/ucd/emoji/emoji-data.txt
|
||||
#
|
||||
# It defines ["Extended_Pictographic"].
|
||||
#
|
||||
# To use this, make sure that your alphtype is set to byte,
|
||||
# and that your input is in utf8.
|
||||
|
||||
%%{
|
||||
machine Emoji;
|
||||
|
||||
Extended_Pictographic =
|
||||
0xC2 0xA9 #E0.6 [1] (©️) copyright
|
||||
| 0xC2 0xAE #E0.6 [1] (®️) registered
|
||||
| 0xE2 0x80 0xBC #E0.6 [1] (‼️) double exclamation mark
|
||||
| 0xE2 0x81 0x89 #E0.6 [1] (⁉️) exclamation question ...
|
||||
| 0xE2 0x84 0xA2 #E0.6 [1] (™️) trade mark
|
||||
| 0xE2 0x84 0xB9 #E0.6 [1] (ℹ️) information
|
||||
| 0xE2 0x86 0x94..0x99 #E0.6 [6] (↔️..↙️) left-right arrow..do...
|
||||
| 0xE2 0x86 0xA9..0xAA #E0.6 [2] (↩️..↪️) right arrow curving ...
|
||||
| 0xE2 0x8C 0x9A..0x9B #E0.6 [2] (⌚..⌛) watch..hourglass done
|
||||
| 0xE2 0x8C 0xA8 #E1.0 [1] (⌨️) keyboard
|
||||
| 0xE2 0x8E 0x88 #E0.0 [1] (⎈) HELM SYMBOL
|
||||
| 0xE2 0x8F 0x8F #E1.0 [1] (⏏️) eject button
|
||||
| 0xE2 0x8F 0xA9..0xAC #E0.6 [4] (⏩..⏬) fast-forward button..f...
|
||||
| 0xE2 0x8F 0xAD..0xAE #E0.7 [2] (⏭️..⏮️) next track button..l...
|
||||
| 0xE2 0x8F 0xAF #E1.0 [1] (⏯️) play or pause button
|
||||
| 0xE2 0x8F 0xB0 #E0.6 [1] (⏰) alarm clock
|
||||
| 0xE2 0x8F 0xB1..0xB2 #E1.0 [2] (⏱️..⏲️) stopwatch..timer clock
|
||||
| 0xE2 0x8F 0xB3 #E0.6 [1] (⏳) hourglass not done
|
||||
| 0xE2 0x8F 0xB8..0xBA #E0.7 [3] (⏸️..⏺️) pause button..record...
|
||||
| 0xE2 0x93 0x82 #E0.6 [1] (Ⓜ️) circled M
|
||||
| 0xE2 0x96 0xAA..0xAB #E0.6 [2] (▪️..▫️) black small square.....
|
||||
| 0xE2 0x96 0xB6 #E0.6 [1] (▶️) play button
|
||||
| 0xE2 0x97 0x80 #E0.6 [1] (◀️) reverse button
|
||||
| 0xE2 0x97 0xBB..0xBE #E0.6 [4] (◻️..◾) white medium square.....
|
||||
| 0xE2 0x98 0x80..0x81 #E0.6 [2] (☀️..☁️) sun..cloud
|
||||
| 0xE2 0x98 0x82..0x83 #E0.7 [2] (☂️..☃️) umbrella..snowman
|
||||
| 0xE2 0x98 0x84 #E1.0 [1] (☄️) comet
|
||||
| 0xE2 0x98 0x85 #E0.0 [1] (★) BLACK STAR
|
||||
| 0xE2 0x98 0x87..0x8D #E0.0 [7] (☇..☍) LIGHTNING..OPPOSITION
|
||||
| 0xE2 0x98 0x8E #E0.6 [1] (☎️) telephone
|
||||
| 0xE2 0x98 0x8F..0x90 #E0.0 [2] (☏..☐) WHITE TELEPHONE..BALLO...
|
||||
| 0xE2 0x98 0x91 #E0.6 [1] (☑️) check box with check
|
||||
| 0xE2 0x98 0x92 #E0.0 [1] (☒) BALLOT BOX WITH X
|
||||
| 0xE2 0x98 0x94..0x95 #E0.6 [2] (☔..☕) umbrella with rain dro...
|
||||
| 0xE2 0x98 0x96..0x97 #E0.0 [2] (☖..☗) WHITE SHOGI PIECE..BLA...
|
||||
| 0xE2 0x98 0x98 #E1.0 [1] (☘️) shamrock
|
||||
| 0xE2 0x98 0x99..0x9C #E0.0 [4] (☙..☜) REVERSED ROTATED FLORA...
|
||||
| 0xE2 0x98 0x9D #E0.6 [1] (☝️) index pointing up
|
||||
| 0xE2 0x98 0x9E..0x9F #E0.0 [2] (☞..☟) WHITE RIGHT POINTING I...
|
||||
| 0xE2 0x98 0xA0 #E1.0 [1] (☠️) skull and crossbones
|
||||
| 0xE2 0x98 0xA1 #E0.0 [1] (☡) CAUTION SIGN
|
||||
| 0xE2 0x98 0xA2..0xA3 #E1.0 [2] (☢️..☣️) radioactive..biohazard
|
||||
| 0xE2 0x98 0xA4..0xA5 #E0.0 [2] (☤..☥) CADUCEUS..ANKH
|
||||
| 0xE2 0x98 0xA6 #E1.0 [1] (☦️) orthodox cross
|
||||
| 0xE2 0x98 0xA7..0xA9 #E0.0 [3] (☧..☩) CHI RHO..CROSS OF JERU...
|
||||
| 0xE2 0x98 0xAA #E0.7 [1] (☪️) star and crescent
|
||||
| 0xE2 0x98 0xAB..0xAD #E0.0 [3] (☫..☭) FARSI SYMBOL..HAMMER A...
|
||||
| 0xE2 0x98 0xAE #E1.0 [1] (☮️) peace symbol
|
||||
| 0xE2 0x98 0xAF #E0.7 [1] (☯️) yin yang
|
||||
| 0xE2 0x98 0xB0..0xB7 #E0.0 [8] (☰..☷) TRIGRAM FOR HEAVEN..TR...
|
||||
| 0xE2 0x98 0xB8..0xB9 #E0.7 [2] (☸️..☹️) wheel of dharma..fro...
|
||||
| 0xE2 0x98 0xBA #E0.6 [1] (☺️) smiling face
|
||||
| 0xE2 0x98 0xBB..0xBF #E0.0 [5] (☻..☿) BLACK SMILING FACE..ME...
|
||||
| 0xE2 0x99 0x80 #E4.0 [1] (♀️) female sign
|
||||
| 0xE2 0x99 0x81 #E0.0 [1] (♁) EARTH
|
||||
| 0xE2 0x99 0x82 #E4.0 [1] (♂️) male sign
|
||||
| 0xE2 0x99 0x83..0x87 #E0.0 [5] (♃..♇) JUPITER..PLUTO
|
||||
| 0xE2 0x99 0x88..0x93 #E0.6 [12] (♈..♓) Aries..Pisces
|
||||
| 0xE2 0x99 0x94..0x9E #E0.0 [11] (♔..♞) WHITE CHESS KING..BLAC...
|
||||
| 0xE2 0x99 0x9F #E11.0 [1] (♟️) chess pawn
|
||||
| 0xE2 0x99 0xA0 #E0.6 [1] (♠️) spade suit
|
||||
| 0xE2 0x99 0xA1..0xA2 #E0.0 [2] (♡..♢) WHITE HEART SUIT..WHIT...
|
||||
| 0xE2 0x99 0xA3 #E0.6 [1] (♣️) club suit
|
||||
| 0xE2 0x99 0xA4 #E0.0 [1] (♤) WHITE SPADE SUIT
|
||||
| 0xE2 0x99 0xA5..0xA6 #E0.6 [2] (♥️..♦️) heart suit..diamond ...
|
||||
| 0xE2 0x99 0xA7 #E0.0 [1] (♧) WHITE CLUB SUIT
|
||||
| 0xE2 0x99 0xA8 #E0.6 [1] (♨️) hot springs
|
||||
| 0xE2 0x99 0xA9..0xBA #E0.0 [18] (♩..♺) QUARTER NOTE..RECYCLIN...
|
||||
| 0xE2 0x99 0xBB #E0.6 [1] (♻️) recycling symbol
|
||||
| 0xE2 0x99 0xBC..0xBD #E0.0 [2] (♼..♽) RECYCLED PAPER SYMBOL....
|
||||
| 0xE2 0x99 0xBE #E11.0 [1] (♾️) infinity
|
||||
| 0xE2 0x99 0xBF #E0.6 [1] (♿) wheelchair symbol
|
||||
| 0xE2 0x9A 0x80..0x85 #E0.0 [6] (⚀..⚅) DIE FACE-1..DIE FACE-6
|
||||
| 0xE2 0x9A 0x90..0x91 #E0.0 [2] (⚐..⚑) WHITE FLAG..BLACK FLAG
|
||||
| 0xE2 0x9A 0x92 #E1.0 [1] (⚒️) hammer and pick
|
||||
| 0xE2 0x9A 0x93 #E0.6 [1] (⚓) anchor
|
||||
| 0xE2 0x9A 0x94 #E1.0 [1] (⚔️) crossed swords
|
||||
| 0xE2 0x9A 0x95 #E4.0 [1] (⚕️) medical symbol
|
||||
| 0xE2 0x9A 0x96..0x97 #E1.0 [2] (⚖️..⚗️) balance scale..alembic
|
||||
| 0xE2 0x9A 0x98 #E0.0 [1] (⚘) FLOWER
|
||||
| 0xE2 0x9A 0x99 #E1.0 [1] (⚙️) gear
|
||||
| 0xE2 0x9A 0x9A #E0.0 [1] (⚚) STAFF OF HERMES
|
||||
| 0xE2 0x9A 0x9B..0x9C #E1.0 [2] (⚛️..⚜️) atom symbol..fleur-d...
|
||||
| 0xE2 0x9A 0x9D..0x9F #E0.0 [3] (⚝..⚟) OUTLINED WHITE STAR..T...
|
||||
| 0xE2 0x9A 0xA0..0xA1 #E0.6 [2] (⚠️..⚡) warning..high voltage
|
||||
| 0xE2 0x9A 0xA2..0xA6 #E0.0 [5] (⚢..⚦) DOUBLED FEMALE SIGN..M...
|
||||
| 0xE2 0x9A 0xA7 #E13.0 [1] (⚧️) transgender symbol
|
||||
| 0xE2 0x9A 0xA8..0xA9 #E0.0 [2] (⚨..⚩) VERTICAL MALE WITH STR...
|
||||
| 0xE2 0x9A 0xAA..0xAB #E0.6 [2] (⚪..⚫) white circle..black ci...
|
||||
| 0xE2 0x9A 0xAC..0xAF #E0.0 [4] (⚬..⚯) MEDIUM SMALL WHITE CIR...
|
||||
| 0xE2 0x9A 0xB0..0xB1 #E1.0 [2] (⚰️..⚱️) coffin..funeral urn
|
||||
| 0xE2 0x9A 0xB2..0xBC #E0.0 [11] (⚲..⚼) NEUTER..SESQUIQUADRATE
|
||||
| 0xE2 0x9A 0xBD..0xBE #E0.6 [2] (⚽..⚾) soccer ball..baseball
|
||||
| 0xE2 0x9A 0xBF..0xFF #E0.0 [5] (⚿..⛃) SQUARED KEY..BLACK DRA...
|
||||
| 0xE2 0x9B 0x00..0x83 #
|
||||
| 0xE2 0x9B 0x84..0x85 #E0.6 [2] (⛄..⛅) snowman without snow.....
|
||||
| 0xE2 0x9B 0x86..0x87 #E0.0 [2] (⛆..⛇) RAIN..BLACK SNOWMAN
|
||||
| 0xE2 0x9B 0x88 #E0.7 [1] (⛈️) cloud with lightning ...
|
||||
| 0xE2 0x9B 0x89..0x8D #E0.0 [5] (⛉..⛍) TURNED WHITE SHOGI PIE...
|
||||
| 0xE2 0x9B 0x8E #E0.6 [1] (⛎) Ophiuchus
|
||||
| 0xE2 0x9B 0x8F #E0.7 [1] (⛏️) pick
|
||||
| 0xE2 0x9B 0x90 #E0.0 [1] (⛐) CAR SLIDING
|
||||
| 0xE2 0x9B 0x91 #E0.7 [1] (⛑️) rescue worker’s helmet
|
||||
| 0xE2 0x9B 0x92 #E0.0 [1] (⛒) CIRCLED CROSSING LANES
|
||||
| 0xE2 0x9B 0x93 #E0.7 [1] (⛓️) chains
|
||||
| 0xE2 0x9B 0x94 #E0.6 [1] (⛔) no entry
|
||||
| 0xE2 0x9B 0x95..0xA8 #E0.0 [20] (⛕..⛨) ALTERNATE ONE-WAY LEFT...
|
||||
| 0xE2 0x9B 0xA9 #E0.7 [1] (⛩️) shinto shrine
|
||||
| 0xE2 0x9B 0xAA #E0.6 [1] (⛪) church
|
||||
| 0xE2 0x9B 0xAB..0xAF #E0.0 [5] (⛫..⛯) CASTLE..MAP SYMBOL FOR...
|
||||
| 0xE2 0x9B 0xB0..0xB1 #E0.7 [2] (⛰️..⛱️) mountain..umbrella o...
|
||||
| 0xE2 0x9B 0xB2..0xB3 #E0.6 [2] (⛲..⛳) fountain..flag in hole
|
||||
| 0xE2 0x9B 0xB4 #E0.7 [1] (⛴️) ferry
|
||||
| 0xE2 0x9B 0xB5 #E0.6 [1] (⛵) sailboat
|
||||
| 0xE2 0x9B 0xB6 #E0.0 [1] (⛶) SQUARE FOUR CORNERS
|
||||
| 0xE2 0x9B 0xB7..0xB9 #E0.7 [3] (⛷️..⛹️) skier..person bounci...
|
||||
| 0xE2 0x9B 0xBA #E0.6 [1] (⛺) tent
|
||||
| 0xE2 0x9B 0xBB..0xBC #E0.0 [2] (⛻..⛼) JAPANESE BANK SYMBOL.....
|
||||
| 0xE2 0x9B 0xBD #E0.6 [1] (⛽) fuel pump
|
||||
| 0xE2 0x9B 0xBE..0xFF #E0.0 [4] (⛾..✁) CUP ON BLACK SQUARE..U...
|
||||
| 0xE2 0x9C 0x00..0x81 #
|
||||
| 0xE2 0x9C 0x82 #E0.6 [1] (✂️) scissors
|
||||
| 0xE2 0x9C 0x83..0x84 #E0.0 [2] (✃..✄) LOWER BLADE SCISSORS.....
|
||||
| 0xE2 0x9C 0x85 #E0.6 [1] (✅) check mark button
|
||||
| 0xE2 0x9C 0x88..0x8C #E0.6 [5] (✈️..✌️) airplane..victory hand
|
||||
| 0xE2 0x9C 0x8D #E0.7 [1] (✍️) writing hand
|
||||
| 0xE2 0x9C 0x8E #E0.0 [1] (✎) LOWER RIGHT PENCIL
|
||||
| 0xE2 0x9C 0x8F #E0.6 [1] (✏️) pencil
|
||||
| 0xE2 0x9C 0x90..0x91 #E0.0 [2] (✐..✑) UPPER RIGHT PENCIL..WH...
|
||||
| 0xE2 0x9C 0x92 #E0.6 [1] (✒️) black nib
|
||||
| 0xE2 0x9C 0x94 #E0.6 [1] (✔️) check mark
|
||||
| 0xE2 0x9C 0x96 #E0.6 [1] (✖️) multiply
|
||||
| 0xE2 0x9C 0x9D #E0.7 [1] (✝️) latin cross
|
||||
| 0xE2 0x9C 0xA1 #E0.7 [1] (✡️) star of David
|
||||
| 0xE2 0x9C 0xA8 #E0.6 [1] (✨) sparkles
|
||||
| 0xE2 0x9C 0xB3..0xB4 #E0.6 [2] (✳️..✴️) eight-spoked asteris...
|
||||
| 0xE2 0x9D 0x84 #E0.6 [1] (❄️) snowflake
|
||||
| 0xE2 0x9D 0x87 #E0.6 [1] (❇️) sparkle
|
||||
| 0xE2 0x9D 0x8C #E0.6 [1] (❌) cross mark
|
||||
| 0xE2 0x9D 0x8E #E0.6 [1] (❎) cross mark button
|
||||
| 0xE2 0x9D 0x93..0x95 #E0.6 [3] (❓..❕) question mark..white e...
|
||||
| 0xE2 0x9D 0x97 #E0.6 [1] (❗) exclamation mark
|
||||
| 0xE2 0x9D 0xA3 #E1.0 [1] (❣️) heart exclamation
|
||||
| 0xE2 0x9D 0xA4 #E0.6 [1] (❤️) red heart
|
||||
| 0xE2 0x9D 0xA5..0xA7 #E0.0 [3] (❥..❧) ROTATED HEAVY BLACK HE...
|
||||
| 0xE2 0x9E 0x95..0x97 #E0.6 [3] (➕..➗) plus..divide
|
||||
| 0xE2 0x9E 0xA1 #E0.6 [1] (➡️) right arrow
|
||||
| 0xE2 0x9E 0xB0 #E0.6 [1] (➰) curly loop
|
||||
| 0xE2 0x9E 0xBF #E1.0 [1] (➿) double curly loop
|
||||
| 0xE2 0xA4 0xB4..0xB5 #E0.6 [2] (⤴️..⤵️) right arrow curving ...
|
||||
| 0xE2 0xAC 0x85..0x87 #E0.6 [3] (⬅️..⬇️) left arrow..down arrow
|
||||
| 0xE2 0xAC 0x9B..0x9C #E0.6 [2] (⬛..⬜) black large square..wh...
|
||||
| 0xE2 0xAD 0x90 #E0.6 [1] (⭐) star
|
||||
| 0xE2 0xAD 0x95 #E0.6 [1] (⭕) hollow red circle
|
||||
| 0xE3 0x80 0xB0 #E0.6 [1] (〰️) wavy dash
|
||||
| 0xE3 0x80 0xBD #E0.6 [1] (〽️) part alternation mark
|
||||
| 0xE3 0x8A 0x97 #E0.6 [1] (㊗️) Japanese “congratulat...
|
||||
| 0xE3 0x8A 0x99 #E0.6 [1] (㊙️) Japanese “secret” button
|
||||
| 0xF0 0x9F 0x80 0x80..0x83 #E0.0 [4] (🀀..🀃) MAHJONG TILE EAST W...
|
||||
| 0xF0 0x9F 0x80 0x84 #E0.6 [1] (🀄) mahjong red dragon
|
||||
| 0xF0 0x9F 0x80 0x85..0xFF #E0.0 [202] (🀅..🃎) MAHJONG TILE ...
|
||||
| 0xF0 0x9F 0x81..0x82 0x00..0xFF #
|
||||
| 0xF0 0x9F 0x83 0x00..0x8E #
|
||||
| 0xF0 0x9F 0x83 0x8F #E0.6 [1] (🃏) joker
|
||||
| 0xF0 0x9F 0x83 0x90..0xBF #E0.0 [48] (..) <reserved-1F0D0>..<...
|
||||
| 0xF0 0x9F 0x84 0x8D..0x8F #E0.0 [3] (🄍..🄏) CIRCLED ZERO WITH S...
|
||||
| 0xF0 0x9F 0x84 0xAF #E0.0 [1] (🄯) COPYLEFT SYMBOL
|
||||
| 0xF0 0x9F 0x85 0xAC..0xAF #E0.0 [4] (🅬..🅯) RAISED MR SIGN..CIR...
|
||||
| 0xF0 0x9F 0x85 0xB0..0xB1 #E0.6 [2] (🅰️..🅱️) A button (blood t...
|
||||
| 0xF0 0x9F 0x85 0xBE..0xBF #E0.6 [2] (🅾️..🅿️) O button (blood t...
|
||||
| 0xF0 0x9F 0x86 0x8E #E0.6 [1] (🆎) AB button (blood type)
|
||||
| 0xF0 0x9F 0x86 0x91..0x9A #E0.6 [10] (🆑..🆚) CL button..VS button
|
||||
| 0xF0 0x9F 0x86 0xAD..0xFF #E0.0 [57] (🆭..) MASK WORK SYMBOL..<...
|
||||
| 0xF0 0x9F 0x87 0x00..0xA5 #
|
||||
| 0xF0 0x9F 0x88 0x81..0x82 #E0.6 [2] (🈁..🈂️) Japanese “here” bu...
|
||||
| 0xF0 0x9F 0x88 0x83..0x8F #E0.0 [13] (..) <reserved-1F203>..<...
|
||||
| 0xF0 0x9F 0x88 0x9A #E0.6 [1] (🈚) Japanese “free of char...
|
||||
| 0xF0 0x9F 0x88 0xAF #E0.6 [1] (🈯) Japanese “reserved” bu...
|
||||
| 0xF0 0x9F 0x88 0xB2..0xBA #E0.6 [9] (🈲..🈺) Japanese “prohibite...
|
||||
| 0xF0 0x9F 0x88 0xBC..0xBF #E0.0 [4] (..) <reserved-1F23C>..<...
|
||||
| 0xF0 0x9F 0x89 0x89..0x8F #E0.0 [7] (..) <reserved-1F249>..<...
|
||||
| 0xF0 0x9F 0x89 0x90..0x91 #E0.6 [2] (🉐..🉑) Japanese “bargain” ...
|
||||
| 0xF0 0x9F 0x89 0x92..0xFF #E0.0 [174] (..) <reserved-1F2...
|
||||
| 0xF0 0x9F 0x8A..0x8A 0x00..0xFF #
|
||||
| 0xF0 0x9F 0x8B 0x00..0xBF #
|
||||
| 0xF0 0x9F 0x8C 0x80..0x8C #E0.6 [13] (🌀..🌌) cyclone..milky way
|
||||
| 0xF0 0x9F 0x8C 0x8D..0x8E #E0.7 [2] (🌍..🌎) globe showing Europ...
|
||||
| 0xF0 0x9F 0x8C 0x8F #E0.6 [1] (🌏) globe showing Asia-Aus...
|
||||
| 0xF0 0x9F 0x8C 0x90 #E1.0 [1] (🌐) globe with meridians
|
||||
| 0xF0 0x9F 0x8C 0x91 #E0.6 [1] (🌑) new moon
|
||||
| 0xF0 0x9F 0x8C 0x92 #E1.0 [1] (🌒) waxing crescent moon
|
||||
| 0xF0 0x9F 0x8C 0x93..0x95 #E0.6 [3] (🌓..🌕) first quarter moon....
|
||||
| 0xF0 0x9F 0x8C 0x96..0x98 #E1.0 [3] (🌖..🌘) waning gibbous moon...
|
||||
| 0xF0 0x9F 0x8C 0x99 #E0.6 [1] (🌙) crescent moon
|
||||
| 0xF0 0x9F 0x8C 0x9A #E1.0 [1] (🌚) new moon face
|
||||
| 0xF0 0x9F 0x8C 0x9B #E0.6 [1] (🌛) first quarter moon face
|
||||
| 0xF0 0x9F 0x8C 0x9C #E0.7 [1] (🌜) last quarter moon face
|
||||
| 0xF0 0x9F 0x8C 0x9D..0x9E #E1.0 [2] (🌝..🌞) full moon face..sun...
|
||||
| 0xF0 0x9F 0x8C 0x9F..0xA0 #E0.6 [2] (🌟..🌠) glowing star..shoot...
|
||||
| 0xF0 0x9F 0x8C 0xA1 #E0.7 [1] (🌡️) thermometer
|
||||
| 0xF0 0x9F 0x8C 0xA2..0xA3 #E0.0 [2] (🌢..🌣) BLACK DROPLET..WHIT...
|
||||
| 0xF0 0x9F 0x8C 0xA4..0xAC #E0.7 [9] (🌤️..🌬️) sun behind small ...
|
||||
| 0xF0 0x9F 0x8C 0xAD..0xAF #E1.0 [3] (🌭..🌯) hot dog..burrito
|
||||
| 0xF0 0x9F 0x8C 0xB0..0xB1 #E0.6 [2] (🌰..🌱) chestnut..seedling
|
||||
| 0xF0 0x9F 0x8C 0xB2..0xB3 #E1.0 [2] (🌲..🌳) evergreen tree..dec...
|
||||
| 0xF0 0x9F 0x8C 0xB4..0xB5 #E0.6 [2] (🌴..🌵) palm tree..cactus
|
||||
| 0xF0 0x9F 0x8C 0xB6 #E0.7 [1] (🌶️) hot pepper
|
||||
| 0xF0 0x9F 0x8C 0xB7..0xFF #E0.6 [20] (🌷..🍊) tulip..tangerine
|
||||
| 0xF0 0x9F 0x8D 0x00..0x8A #
|
||||
| 0xF0 0x9F 0x8D 0x8B #E1.0 [1] (🍋) lemon
|
||||
| 0xF0 0x9F 0x8D 0x8C..0x8F #E0.6 [4] (🍌..🍏) banana..green apple
|
||||
| 0xF0 0x9F 0x8D 0x90 #E1.0 [1] (🍐) pear
|
||||
| 0xF0 0x9F 0x8D 0x91..0xBB #E0.6 [43] (🍑..🍻) peach..clinking bee...
|
||||
| 0xF0 0x9F 0x8D 0xBC #E1.0 [1] (🍼) baby bottle
|
||||
| 0xF0 0x9F 0x8D 0xBD #E0.7 [1] (🍽️) fork and knife with p...
|
||||
| 0xF0 0x9F 0x8D 0xBE..0xBF #E1.0 [2] (🍾..🍿) bottle with popping...
|
||||
| 0xF0 0x9F 0x8E 0x80..0x93 #E0.6 [20] (🎀..🎓) ribbon..graduation cap
|
||||
| 0xF0 0x9F 0x8E 0x94..0x95 #E0.0 [2] (🎔..🎕) HEART WITH TIP ON T...
|
||||
| 0xF0 0x9F 0x8E 0x96..0x97 #E0.7 [2] (🎖️..🎗️) military medal..r...
|
||||
| 0xF0 0x9F 0x8E 0x98 #E0.0 [1] (🎘) MUSICAL KEYBOARD WITH ...
|
||||
| 0xF0 0x9F 0x8E 0x99..0x9B #E0.7 [3] (🎙️..🎛️) studio microphone...
|
||||
| 0xF0 0x9F 0x8E 0x9C..0x9D #E0.0 [2] (🎜..🎝) BEAMED ASCENDING MU...
|
||||
| 0xF0 0x9F 0x8E 0x9E..0x9F #E0.7 [2] (🎞️..🎟️) film frames..admi...
|
||||
| 0xF0 0x9F 0x8E 0xA0..0xFF #E0.6 [37] (🎠..🏄) carousel horse..per...
|
||||
| 0xF0 0x9F 0x8F 0x00..0x84 #
|
||||
| 0xF0 0x9F 0x8F 0x85 #E1.0 [1] (🏅) sports medal
|
||||
| 0xF0 0x9F 0x8F 0x86 #E0.6 [1] (🏆) trophy
|
||||
| 0xF0 0x9F 0x8F 0x87 #E1.0 [1] (🏇) horse racing
|
||||
| 0xF0 0x9F 0x8F 0x88 #E0.6 [1] (🏈) american football
|
||||
| 0xF0 0x9F 0x8F 0x89 #E1.0 [1] (🏉) rugby football
|
||||
| 0xF0 0x9F 0x8F 0x8A #E0.6 [1] (🏊) person swimming
|
||||
| 0xF0 0x9F 0x8F 0x8B..0x8E #E0.7 [4] (🏋️..🏎️) person lifting we...
|
||||
| 0xF0 0x9F 0x8F 0x8F..0x93 #E1.0 [5] (🏏..🏓) cricket game..ping ...
|
||||
| 0xF0 0x9F 0x8F 0x94..0x9F #E0.7 [12] (🏔️..🏟️) snow-capped mount...
|
||||
| 0xF0 0x9F 0x8F 0xA0..0xA3 #E0.6 [4] (🏠..🏣) house..Japanese pos...
|
||||
| 0xF0 0x9F 0x8F 0xA4 #E1.0 [1] (🏤) post office
|
||||
| 0xF0 0x9F 0x8F 0xA5..0xB0 #E0.6 [12] (🏥..🏰) hospital..castle
|
||||
| 0xF0 0x9F 0x8F 0xB1..0xB2 #E0.0 [2] (🏱..🏲) WHITE PENNANT..BLAC...
|
||||
| 0xF0 0x9F 0x8F 0xB3 #E0.7 [1] (🏳️) white flag
|
||||
| 0xF0 0x9F 0x8F 0xB4 #E1.0 [1] (🏴) black flag
|
||||
| 0xF0 0x9F 0x8F 0xB5 #E0.7 [1] (🏵️) rosette
|
||||
| 0xF0 0x9F 0x8F 0xB6 #E0.0 [1] (🏶) BLACK ROSETTE
|
||||
| 0xF0 0x9F 0x8F 0xB7 #E0.7 [1] (🏷️) label
|
||||
| 0xF0 0x9F 0x8F 0xB8..0xBA #E1.0 [3] (🏸..🏺) badminton..amphora
|
||||
| 0xF0 0x9F 0x90 0x80..0x87 #E1.0 [8] (🐀..🐇) rat..rabbit
|
||||
| 0xF0 0x9F 0x90 0x88 #E0.7 [1] (🐈) cat
|
||||
| 0xF0 0x9F 0x90 0x89..0x8B #E1.0 [3] (🐉..🐋) dragon..whale
|
||||
| 0xF0 0x9F 0x90 0x8C..0x8E #E0.6 [3] (🐌..🐎) snail..horse
|
||||
| 0xF0 0x9F 0x90 0x8F..0x90 #E1.0 [2] (🐏..🐐) ram..goat
|
||||
| 0xF0 0x9F 0x90 0x91..0x92 #E0.6 [2] (🐑..🐒) ewe..monkey
|
||||
| 0xF0 0x9F 0x90 0x93 #E1.0 [1] (🐓) rooster
|
||||
| 0xF0 0x9F 0x90 0x94 #E0.6 [1] (🐔) chicken
|
||||
| 0xF0 0x9F 0x90 0x95 #E0.7 [1] (🐕) dog
|
||||
| 0xF0 0x9F 0x90 0x96 #E1.0 [1] (🐖) pig
|
||||
| 0xF0 0x9F 0x90 0x97..0xA9 #E0.6 [19] (🐗..🐩) boar..poodle
|
||||
| 0xF0 0x9F 0x90 0xAA #E1.0 [1] (🐪) camel
|
||||
| 0xF0 0x9F 0x90 0xAB..0xBE #E0.6 [20] (🐫..🐾) two-hump camel..paw...
|
||||
| 0xF0 0x9F 0x90 0xBF #E0.7 [1] (🐿️) chipmunk
|
||||
| 0xF0 0x9F 0x91 0x80 #E0.6 [1] (👀) eyes
|
||||
| 0xF0 0x9F 0x91 0x81 #E0.7 [1] (👁️) eye
|
||||
| 0xF0 0x9F 0x91 0x82..0xA4 #E0.6 [35] (👂..👤) ear..bust in silhou...
|
||||
| 0xF0 0x9F 0x91 0xA5 #E1.0 [1] (👥) busts in silhouette
|
||||
| 0xF0 0x9F 0x91 0xA6..0xAB #E0.6 [6] (👦..👫) boy..woman and man ...
|
||||
| 0xF0 0x9F 0x91 0xAC..0xAD #E1.0 [2] (👬..👭) men holding hands.....
|
||||
| 0xF0 0x9F 0x91 0xAE..0xFF #E0.6 [63] (👮..💬) police officer..spe...
|
||||
| 0xF0 0x9F 0x92 0x00..0xAC #
|
||||
| 0xF0 0x9F 0x92 0xAD #E1.0 [1] (💭) thought balloon
|
||||
| 0xF0 0x9F 0x92 0xAE..0xB5 #E0.6 [8] (💮..💵) white flower..dolla...
|
||||
| 0xF0 0x9F 0x92 0xB6..0xB7 #E1.0 [2] (💶..💷) euro banknote..poun...
|
||||
| 0xF0 0x9F 0x92 0xB8..0xFF #E0.6 [52] (💸..📫) money with wings..c...
|
||||
| 0xF0 0x9F 0x93 0x00..0xAB #
|
||||
| 0xF0 0x9F 0x93 0xAC..0xAD #E0.7 [2] (📬..📭) open mailbox with r...
|
||||
| 0xF0 0x9F 0x93 0xAE #E0.6 [1] (📮) postbox
|
||||
| 0xF0 0x9F 0x93 0xAF #E1.0 [1] (📯) postal horn
|
||||
| 0xF0 0x9F 0x93 0xB0..0xB4 #E0.6 [5] (📰..📴) newspaper..mobile p...
|
||||
| 0xF0 0x9F 0x93 0xB5 #E1.0 [1] (📵) no mobile phones
|
||||
| 0xF0 0x9F 0x93 0xB6..0xB7 #E0.6 [2] (📶..📷) antenna bars..camera
|
||||
| 0xF0 0x9F 0x93 0xB8 #E1.0 [1] (📸) camera with flash
|
||||
| 0xF0 0x9F 0x93 0xB9..0xBC #E0.6 [4] (📹..📼) video camera..video...
|
||||
| 0xF0 0x9F 0x93 0xBD #E0.7 [1] (📽️) film projector
|
||||
| 0xF0 0x9F 0x93 0xBE #E0.0 [1] (📾) PORTABLE STEREO
|
||||
| 0xF0 0x9F 0x93 0xBF..0xFF #E1.0 [4] (📿..🔂) prayer beads..repea...
|
||||
| 0xF0 0x9F 0x94 0x00..0x82 #
|
||||
| 0xF0 0x9F 0x94 0x83 #E0.6 [1] (🔃) clockwise vertical arrows
|
||||
| 0xF0 0x9F 0x94 0x84..0x87 #E1.0 [4] (🔄..🔇) counterclockwise ar...
|
||||
| 0xF0 0x9F 0x94 0x88 #E0.7 [1] (🔈) speaker low volume
|
||||
| 0xF0 0x9F 0x94 0x89 #E1.0 [1] (🔉) speaker medium volume
|
||||
| 0xF0 0x9F 0x94 0x8A..0x94 #E0.6 [11] (🔊..🔔) speaker high volume...
|
||||
| 0xF0 0x9F 0x94 0x95 #E1.0 [1] (🔕) bell with slash
|
||||
| 0xF0 0x9F 0x94 0x96..0xAB #E0.6 [22] (🔖..🔫) bookmark..pistol
|
||||
| 0xF0 0x9F 0x94 0xAC..0xAD #E1.0 [2] (🔬..🔭) microscope..telescope
|
||||
| 0xF0 0x9F 0x94 0xAE..0xBD #E0.6 [16] (🔮..🔽) crystal ball..downw...
|
||||
| 0xF0 0x9F 0x95 0x86..0x88 #E0.0 [3] (🕆..🕈) WHITE LATIN CROSS.....
|
||||
| 0xF0 0x9F 0x95 0x89..0x8A #E0.7 [2] (🕉️..🕊️) om..dove
|
||||
| 0xF0 0x9F 0x95 0x8B..0x8E #E1.0 [4] (🕋..🕎) kaaba..menorah
|
||||
| 0xF0 0x9F 0x95 0x8F #E0.0 [1] (🕏) BOWL OF HYGIEIA
|
||||
| 0xF0 0x9F 0x95 0x90..0x9B #E0.6 [12] (🕐..🕛) one o’clock..twelve...
|
||||
| 0xF0 0x9F 0x95 0x9C..0xA7 #E0.7 [12] (🕜..🕧) one-thirty..twelve-...
|
||||
| 0xF0 0x9F 0x95 0xA8..0xAE #E0.0 [7] (🕨..🕮) RIGHT SPEAKER..BOOK
|
||||
| 0xF0 0x9F 0x95 0xAF..0xB0 #E0.7 [2] (🕯️..🕰️) candle..mantelpie...
|
||||
| 0xF0 0x9F 0x95 0xB1..0xB2 #E0.0 [2] (🕱..🕲) BLACK SKULL AND CRO...
|
||||
| 0xF0 0x9F 0x95 0xB3..0xB9 #E0.7 [7] (🕳️..🕹️) hole..joystick
|
||||
| 0xF0 0x9F 0x95 0xBA #E3.0 [1] (🕺) man dancing
|
||||
| 0xF0 0x9F 0x95 0xBB..0xFF #E0.0 [12] (🕻..🖆) LEFT HAND TELEPHONE...
|
||||
| 0xF0 0x9F 0x96 0x00..0x86 #
|
||||
| 0xF0 0x9F 0x96 0x87 #E0.7 [1] (🖇️) linked paperclips
|
||||
| 0xF0 0x9F 0x96 0x88..0x89 #E0.0 [2] (🖈..🖉) BLACK PUSHPIN..LOWE...
|
||||
| 0xF0 0x9F 0x96 0x8A..0x8D #E0.7 [4] (🖊️..🖍️) pen..crayon
|
||||
| 0xF0 0x9F 0x96 0x8E..0x8F #E0.0 [2] (🖎..🖏) LEFT WRITING HAND.....
|
||||
| 0xF0 0x9F 0x96 0x90 #E0.7 [1] (🖐️) hand with fingers spl...
|
||||
| 0xF0 0x9F 0x96 0x91..0x94 #E0.0 [4] (🖑..🖔) REVERSED RAISED HAN...
|
||||
| 0xF0 0x9F 0x96 0x95..0x96 #E1.0 [2] (🖕..🖖) middle finger..vulc...
|
||||
| 0xF0 0x9F 0x96 0x97..0xA3 #E0.0 [13] (🖗..🖣) WHITE DOWN POINTING...
|
||||
| 0xF0 0x9F 0x96 0xA4 #E3.0 [1] (🖤) black heart
|
||||
| 0xF0 0x9F 0x96 0xA5 #E0.7 [1] (🖥️) desktop computer
|
||||
| 0xF0 0x9F 0x96 0xA6..0xA7 #E0.0 [2] (🖦..🖧) KEYBOARD AND MOUSE....
|
||||
| 0xF0 0x9F 0x96 0xA8 #E0.7 [1] (🖨️) printer
|
||||
| 0xF0 0x9F 0x96 0xA9..0xB0 #E0.0 [8] (🖩..🖰) POCKET CALCULATOR.....
|
||||
| 0xF0 0x9F 0x96 0xB1..0xB2 #E0.7 [2] (🖱️..🖲️) computer mouse..t...
|
||||
| 0xF0 0x9F 0x96 0xB3..0xBB #E0.0 [9] (🖳..🖻) OLD PERSONAL COMPUT...
|
||||
| 0xF0 0x9F 0x96 0xBC #E0.7 [1] (🖼️) framed picture
|
||||
| 0xF0 0x9F 0x96 0xBD..0xFF #E0.0 [5] (🖽..🗁) FRAME WITH TILES..O...
|
||||
| 0xF0 0x9F 0x97 0x00..0x81 #
|
||||
| 0xF0 0x9F 0x97 0x82..0x84 #E0.7 [3] (🗂️..🗄️) card index divide...
|
||||
| 0xF0 0x9F 0x97 0x85..0x90 #E0.0 [12] (🗅..🗐) EMPTY NOTE..PAGES
|
||||
| 0xF0 0x9F 0x97 0x91..0x93 #E0.7 [3] (🗑️..🗓️) wastebasket..spir...
|
||||
| 0xF0 0x9F 0x97 0x94..0x9B #E0.0 [8] (🗔..🗛) DESKTOP WINDOW..DEC...
|
||||
| 0xF0 0x9F 0x97 0x9C..0x9E #E0.7 [3] (🗜️..🗞️) clamp..rolled-up ...
|
||||
| 0xF0 0x9F 0x97 0x9F..0xA0 #E0.0 [2] (🗟..🗠) PAGE WITH CIRCLED T...
|
||||
| 0xF0 0x9F 0x97 0xA1 #E0.7 [1] (🗡️) dagger
|
||||
| 0xF0 0x9F 0x97 0xA2 #E0.0 [1] (🗢) LIPS
|
||||
| 0xF0 0x9F 0x97 0xA3 #E0.7 [1] (🗣️) speaking head
|
||||
| 0xF0 0x9F 0x97 0xA4..0xA7 #E0.0 [4] (🗤..🗧) THREE RAYS ABOVE..T...
|
||||
| 0xF0 0x9F 0x97 0xA8 #E2.0 [1] (🗨️) left speech bubble
|
||||
| 0xF0 0x9F 0x97 0xA9..0xAE #E0.0 [6] (🗩..🗮) RIGHT SPEECH BUBBLE...
|
||||
| 0xF0 0x9F 0x97 0xAF #E0.7 [1] (🗯️) right anger bubble
|
||||
| 0xF0 0x9F 0x97 0xB0..0xB2 #E0.0 [3] (🗰..🗲) MOOD BUBBLE..LIGHTN...
|
||||
| 0xF0 0x9F 0x97 0xB3 #E0.7 [1] (🗳️) ballot box with ballot
|
||||
| 0xF0 0x9F 0x97 0xB4..0xB9 #E0.0 [6] (🗴..🗹) BALLOT SCRIPT X..BA...
|
||||
| 0xF0 0x9F 0x97 0xBA #E0.7 [1] (🗺️) world map
|
||||
| 0xF0 0x9F 0x97 0xBB..0xBF #E0.6 [5] (🗻..🗿) mount fuji..moai
|
||||
| 0xF0 0x9F 0x98 0x80 #E1.0 [1] (😀) grinning face
|
||||
| 0xF0 0x9F 0x98 0x81..0x86 #E0.6 [6] (😁..😆) beaming face with s...
|
||||
| 0xF0 0x9F 0x98 0x87..0x88 #E1.0 [2] (😇..😈) smiling face with h...
|
||||
| 0xF0 0x9F 0x98 0x89..0x8D #E0.6 [5] (😉..😍) winking face..smili...
|
||||
| 0xF0 0x9F 0x98 0x8E #E1.0 [1] (😎) smiling face with sung...
|
||||
| 0xF0 0x9F 0x98 0x8F #E0.6 [1] (😏) smirking face
|
||||
| 0xF0 0x9F 0x98 0x90 #E0.7 [1] (😐) neutral face
|
||||
| 0xF0 0x9F 0x98 0x91 #E1.0 [1] (😑) expressionless face
|
||||
| 0xF0 0x9F 0x98 0x92..0x94 #E0.6 [3] (😒..😔) unamused face..pens...
|
||||
| 0xF0 0x9F 0x98 0x95 #E1.0 [1] (😕) confused face
|
||||
| 0xF0 0x9F 0x98 0x96 #E0.6 [1] (😖) confounded face
|
||||
| 0xF0 0x9F 0x98 0x97 #E1.0 [1] (😗) kissing face
|
||||
| 0xF0 0x9F 0x98 0x98 #E0.6 [1] (😘) face blowing a kiss
|
||||
| 0xF0 0x9F 0x98 0x99 #E1.0 [1] (😙) kissing face with smil...
|
||||
| 0xF0 0x9F 0x98 0x9A #E0.6 [1] (😚) kissing face with clos...
|
||||
| 0xF0 0x9F 0x98 0x9B #E1.0 [1] (😛) face with tongue
|
||||
| 0xF0 0x9F 0x98 0x9C..0x9E #E0.6 [3] (😜..😞) winking face with t...
|
||||
| 0xF0 0x9F 0x98 0x9F #E1.0 [1] (😟) worried face
|
||||
| 0xF0 0x9F 0x98 0xA0..0xA5 #E0.6 [6] (😠..😥) angry face..sad but...
|
||||
| 0xF0 0x9F 0x98 0xA6..0xA7 #E1.0 [2] (😦..😧) frowning face with ...
|
||||
| 0xF0 0x9F 0x98 0xA8..0xAB #E0.6 [4] (😨..😫) fearful face..tired...
|
||||
| 0xF0 0x9F 0x98 0xAC #E1.0 [1] (😬) grimacing face
|
||||
| 0xF0 0x9F 0x98 0xAD #E0.6 [1] (😭) loudly crying face
|
||||
| 0xF0 0x9F 0x98 0xAE..0xAF #E1.0 [2] (😮..😯) face with open mout...
|
||||
| 0xF0 0x9F 0x98 0xB0..0xB3 #E0.6 [4] (😰..😳) anxious face with s...
|
||||
| 0xF0 0x9F 0x98 0xB4 #E1.0 [1] (😴) sleeping face
|
||||
| 0xF0 0x9F 0x98 0xB5 #E0.6 [1] (😵) dizzy face
|
||||
| 0xF0 0x9F 0x98 0xB6 #E1.0 [1] (😶) face without mouth
|
||||
| 0xF0 0x9F 0x98 0xB7..0xFF #E0.6 [10] (😷..🙀) face with medical m...
|
||||
| 0xF0 0x9F 0x99 0x00..0x80 #
|
||||
| 0xF0 0x9F 0x99 0x81..0x84 #E1.0 [4] (🙁..🙄) slightly frowning f...
|
||||
| 0xF0 0x9F 0x99 0x85..0x8F #E0.6 [11] (🙅..🙏) person gesturing NO...
|
||||
| 0xF0 0x9F 0x9A 0x80 #E0.6 [1] (🚀) rocket
|
||||
| 0xF0 0x9F 0x9A 0x81..0x82 #E1.0 [2] (🚁..🚂) helicopter..locomotive
|
||||
| 0xF0 0x9F 0x9A 0x83..0x85 #E0.6 [3] (🚃..🚅) railway car..bullet...
|
||||
| 0xF0 0x9F 0x9A 0x86 #E1.0 [1] (🚆) train
|
||||
| 0xF0 0x9F 0x9A 0x87 #E0.6 [1] (🚇) metro
|
||||
| 0xF0 0x9F 0x9A 0x88 #E1.0 [1] (🚈) light rail
|
||||
| 0xF0 0x9F 0x9A 0x89 #E0.6 [1] (🚉) station
|
||||
| 0xF0 0x9F 0x9A 0x8A..0x8B #E1.0 [2] (🚊..🚋) tram..tram car
|
||||
| 0xF0 0x9F 0x9A 0x8C #E0.6 [1] (🚌) bus
|
||||
| 0xF0 0x9F 0x9A 0x8D #E0.7 [1] (🚍) oncoming bus
|
||||
| 0xF0 0x9F 0x9A 0x8E #E1.0 [1] (🚎) trolleybus
|
||||
| 0xF0 0x9F 0x9A 0x8F #E0.6 [1] (🚏) bus stop
|
||||
| 0xF0 0x9F 0x9A 0x90 #E1.0 [1] (🚐) minibus
|
||||
| 0xF0 0x9F 0x9A 0x91..0x93 #E0.6 [3] (🚑..🚓) ambulance..police car
|
||||
| 0xF0 0x9F 0x9A 0x94 #E0.7 [1] (🚔) oncoming police car
|
||||
| 0xF0 0x9F 0x9A 0x95 #E0.6 [1] (🚕) taxi
|
||||
| 0xF0 0x9F 0x9A 0x96 #E1.0 [1] (🚖) oncoming taxi
|
||||
| 0xF0 0x9F 0x9A 0x97 #E0.6 [1] (🚗) automobile
|
||||
| 0xF0 0x9F 0x9A 0x98 #E0.7 [1] (🚘) oncoming automobile
|
||||
| 0xF0 0x9F 0x9A 0x99..0x9A #E0.6 [2] (🚙..🚚) sport utility vehic...
|
||||
| 0xF0 0x9F 0x9A 0x9B..0xA1 #E1.0 [7] (🚛..🚡) articulated lorry.....
|
||||
| 0xF0 0x9F 0x9A 0xA2 #E0.6 [1] (🚢) ship
|
||||
| 0xF0 0x9F 0x9A 0xA3 #E1.0 [1] (🚣) person rowing boat
|
||||
| 0xF0 0x9F 0x9A 0xA4..0xA5 #E0.6 [2] (🚤..🚥) speedboat..horizont...
|
||||
| 0xF0 0x9F 0x9A 0xA6 #E1.0 [1] (🚦) vertical traffic light
|
||||
| 0xF0 0x9F 0x9A 0xA7..0xAD #E0.6 [7] (🚧..🚭) construction..no sm...
|
||||
| 0xF0 0x9F 0x9A 0xAE..0xB1 #E1.0 [4] (🚮..🚱) litter in bin sign....
|
||||
| 0xF0 0x9F 0x9A 0xB2 #E0.6 [1] (🚲) bicycle
|
||||
| 0xF0 0x9F 0x9A 0xB3..0xB5 #E1.0 [3] (🚳..🚵) no bicycles..person...
|
||||
| 0xF0 0x9F 0x9A 0xB6 #E0.6 [1] (🚶) person walking
|
||||
| 0xF0 0x9F 0x9A 0xB7..0xB8 #E1.0 [2] (🚷..🚸) no pedestrians..chi...
|
||||
| 0xF0 0x9F 0x9A 0xB9..0xBE #E0.6 [6] (🚹..🚾) men’s room..water c...
|
||||
| 0xF0 0x9F 0x9A 0xBF #E1.0 [1] (🚿) shower
|
||||
| 0xF0 0x9F 0x9B 0x80 #E0.6 [1] (🛀) person taking bath
|
||||
| 0xF0 0x9F 0x9B 0x81..0x85 #E1.0 [5] (🛁..🛅) bathtub..left luggage
|
||||
| 0xF0 0x9F 0x9B 0x86..0x8A #E0.0 [5] (🛆..🛊) TRIANGLE WITH ROUND...
|
||||
| 0xF0 0x9F 0x9B 0x8B #E0.7 [1] (🛋️) couch and lamp
|
||||
| 0xF0 0x9F 0x9B 0x8C #E1.0 [1] (🛌) person in bed
|
||||
| 0xF0 0x9F 0x9B 0x8D..0x8F #E0.7 [3] (🛍️..🛏️) shopping bags..bed
|
||||
| 0xF0 0x9F 0x9B 0x90 #E1.0 [1] (🛐) place of worship
|
||||
| 0xF0 0x9F 0x9B 0x91..0x92 #E3.0 [2] (🛑..🛒) stop sign..shopping...
|
||||
| 0xF0 0x9F 0x9B 0x93..0x94 #E0.0 [2] (🛓..🛔) STUPA..PAGODA
|
||||
| 0xF0 0x9F 0x9B 0x95 #E12.0 [1] (🛕) hindu temple
|
||||
| 0xF0 0x9F 0x9B 0x96..0x97 #E13.0 [2] (🛖..🛗) hut..elevator
|
||||
| 0xF0 0x9F 0x9B 0x98..0x9F #E0.0 [8] (..🛟) <reserved-1F6D8>..<...
|
||||
| 0xF0 0x9F 0x9B 0xA0..0xA5 #E0.7 [6] (🛠️..🛥️) hammer and wrench...
|
||||
| 0xF0 0x9F 0x9B 0xA6..0xA8 #E0.0 [3] (🛦..🛨) UP-POINTING MILITAR...
|
||||
| 0xF0 0x9F 0x9B 0xA9 #E0.7 [1] (🛩️) small airplane
|
||||
| 0xF0 0x9F 0x9B 0xAA #E0.0 [1] (🛪) NORTHEAST-POINTING AIR...
|
||||
| 0xF0 0x9F 0x9B 0xAB..0xAC #E1.0 [2] (🛫..🛬) airplane departure....
|
||||
| 0xF0 0x9F 0x9B 0xAD..0xAF #E0.0 [3] (..) <reserved-1F6ED>..<...
|
||||
| 0xF0 0x9F 0x9B 0xB0 #E0.7 [1] (🛰️) satellite
|
||||
| 0xF0 0x9F 0x9B 0xB1..0xB2 #E0.0 [2] (🛱..🛲) ONCOMING FIRE ENGIN...
|
||||
| 0xF0 0x9F 0x9B 0xB3 #E0.7 [1] (🛳️) passenger ship
|
||||
| 0xF0 0x9F 0x9B 0xB4..0xB6 #E3.0 [3] (🛴..🛶) kick scooter..canoe
|
||||
| 0xF0 0x9F 0x9B 0xB7..0xB8 #E5.0 [2] (🛷..🛸) sled..flying saucer
|
||||
| 0xF0 0x9F 0x9B 0xB9 #E11.0 [1] (🛹) skateboard
|
||||
| 0xF0 0x9F 0x9B 0xBA #E12.0 [1] (🛺) auto rickshaw
|
||||
| 0xF0 0x9F 0x9B 0xBB..0xBC #E13.0 [2] (🛻..🛼) pickup truck..rolle...
|
||||
| 0xF0 0x9F 0x9B 0xBD..0xBF #E0.0 [3] (..) <reserved-1F6FD>..<...
|
||||
| 0xF0 0x9F 0x9D 0xB4..0xBF #E0.0 [12] (🝴..🝿) <reserved-1F774>..<...
|
||||
| 0xF0 0x9F 0x9F 0x95..0x9F #E0.0 [11] (🟕..) CIRCLED TRIANGLE..<...
|
||||
| 0xF0 0x9F 0x9F 0xA0..0xAB #E12.0 [12] (🟠..🟫) orange circle..brow...
|
||||
| 0xF0 0x9F 0x9F 0xAC..0xBF #E0.0 [20] (..) <reserved-1F7EC>..<...
|
||||
| 0xF0 0x9F 0xA0 0x8C..0x8F #E0.0 [4] (..) <reserved-1F80C>..<...
|
||||
| 0xF0 0x9F 0xA1 0x88..0x8F #E0.0 [8] (..) <reserved-1F848>..<...
|
||||
| 0xF0 0x9F 0xA1 0x9A..0x9F #E0.0 [6] (..) <reserved-1F85A>..<...
|
||||
| 0xF0 0x9F 0xA2 0x88..0x8F #E0.0 [8] (..) <reserved-1F888>..<...
|
||||
| 0xF0 0x9F 0xA2 0xAE..0xFF #E0.0 [82] (..) <reserved-1F8AE>..<...
|
||||
| 0xF0 0x9F 0xA3 0x00..0xBF #
|
||||
| 0xF0 0x9F 0xA4 0x8C #E13.0 [1] (🤌) pinched fingers
|
||||
| 0xF0 0x9F 0xA4 0x8D..0x8F #E12.0 [3] (🤍..🤏) white heart..pinchi...
|
||||
| 0xF0 0x9F 0xA4 0x90..0x98 #E1.0 [9] (🤐..🤘) zipper-mouth face.....
|
||||
| 0xF0 0x9F 0xA4 0x99..0x9E #E3.0 [6] (🤙..🤞) call me hand..cross...
|
||||
| 0xF0 0x9F 0xA4 0x9F #E5.0 [1] (🤟) love-you gesture
|
||||
| 0xF0 0x9F 0xA4 0xA0..0xA7 #E3.0 [8] (🤠..🤧) cowboy hat face..sn...
|
||||
| 0xF0 0x9F 0xA4 0xA8..0xAF #E5.0 [8] (🤨..🤯) face with raised ey...
|
||||
| 0xF0 0x9F 0xA4 0xB0 #E3.0 [1] (🤰) pregnant woman
|
||||
| 0xF0 0x9F 0xA4 0xB1..0xB2 #E5.0 [2] (🤱..🤲) breast-feeding..pal...
|
||||
| 0xF0 0x9F 0xA4 0xB3..0xBA #E3.0 [8] (🤳..🤺) selfie..person fencing
|
||||
| 0xF0 0x9F 0xA4 0xBC..0xBE #E3.0 [3] (🤼..🤾) people wrestling..p...
|
||||
| 0xF0 0x9F 0xA4 0xBF #E12.0 [1] (🤿) diving mask
|
||||
| 0xF0 0x9F 0xA5 0x80..0x85 #E3.0 [6] (🥀..🥅) wilted flower..goal...
|
||||
| 0xF0 0x9F 0xA5 0x87..0x8B #E3.0 [5] (🥇..🥋) 1st place medal..ma...
|
||||
| 0xF0 0x9F 0xA5 0x8C #E5.0 [1] (🥌) curling stone
|
||||
| 0xF0 0x9F 0xA5 0x8D..0x8F #E11.0 [3] (🥍..🥏) lacrosse..flying disc
|
||||
| 0xF0 0x9F 0xA5 0x90..0x9E #E3.0 [15] (🥐..🥞) croissant..pancakes
|
||||
| 0xF0 0x9F 0xA5 0x9F..0xAB #E5.0 [13] (🥟..🥫) dumpling..canned food
|
||||
| 0xF0 0x9F 0xA5 0xAC..0xB0 #E11.0 [5] (🥬..🥰) leafy green..smilin...
|
||||
| 0xF0 0x9F 0xA5 0xB1 #E12.0 [1] (🥱) yawning face
|
||||
| 0xF0 0x9F 0xA5 0xB2 #E13.0 [1] (🥲) smiling face with tear
|
||||
| 0xF0 0x9F 0xA5 0xB3..0xB6 #E11.0 [4] (🥳..🥶) partying face..cold...
|
||||
| 0xF0 0x9F 0xA5 0xB7..0xB8 #E13.0 [2] (🥷..🥸) ninja..disguised face
|
||||
| 0xF0 0x9F 0xA5 0xB9 #E0.0 [1] (🥹) <reserved-1F979>
|
||||
| 0xF0 0x9F 0xA5 0xBA #E11.0 [1] (🥺) pleading face
|
||||
| 0xF0 0x9F 0xA5 0xBB #E12.0 [1] (🥻) sari
|
||||
| 0xF0 0x9F 0xA5 0xBC..0xBF #E11.0 [4] (🥼..🥿) lab coat..flat shoe
|
||||
| 0xF0 0x9F 0xA6 0x80..0x84 #E1.0 [5] (🦀..🦄) crab..unicorn
|
||||
| 0xF0 0x9F 0xA6 0x85..0x91 #E3.0 [13] (🦅..🦑) eagle..squid
|
||||
| 0xF0 0x9F 0xA6 0x92..0x97 #E5.0 [6] (🦒..🦗) giraffe..cricket
|
||||
| 0xF0 0x9F 0xA6 0x98..0xA2 #E11.0 [11] (🦘..🦢) kangaroo..swan
|
||||
| 0xF0 0x9F 0xA6 0xA3..0xA4 #E13.0 [2] (🦣..🦤) mammoth..dodo
|
||||
| 0xF0 0x9F 0xA6 0xA5..0xAA #E12.0 [6] (🦥..🦪) sloth..oyster
|
||||
| 0xF0 0x9F 0xA6 0xAB..0xAD #E13.0 [3] (🦫..🦭) beaver..seal
|
||||
| 0xF0 0x9F 0xA6 0xAE..0xAF #E12.0 [2] (🦮..🦯) guide dog..white cane
|
||||
| 0xF0 0x9F 0xA6 0xB0..0xB9 #E11.0 [10] (🦰..🦹) red hair..supervillain
|
||||
| 0xF0 0x9F 0xA6 0xBA..0xBF #E12.0 [6] (🦺..🦿) safety vest..mechan...
|
||||
| 0xF0 0x9F 0xA7 0x80 #E1.0 [1] (🧀) cheese wedge
|
||||
| 0xF0 0x9F 0xA7 0x81..0x82 #E11.0 [2] (🧁..🧂) cupcake..salt
|
||||
| 0xF0 0x9F 0xA7 0x83..0x8A #E12.0 [8] (🧃..🧊) beverage box..ice
|
||||
| 0xF0 0x9F 0xA7 0x8B #E13.0 [1] (🧋) bubble tea
|
||||
| 0xF0 0x9F 0xA7 0x8C #E0.0 [1] (🧌) <reserved-1F9CC>
|
||||
| 0xF0 0x9F 0xA7 0x8D..0x8F #E12.0 [3] (🧍..🧏) person standing..de...
|
||||
| 0xF0 0x9F 0xA7 0x90..0xA6 #E5.0 [23] (🧐..🧦) face with monocle.....
|
||||
| 0xF0 0x9F 0xA7 0xA7..0xBF #E11.0 [25] (🧧..🧿) red envelope..nazar...
|
||||
| 0xF0 0x9F 0xA8 0x80..0xFF #E0.0 [112] (🨀..) NEUTRAL CHESS KING....
|
||||
| 0xF0 0x9F 0xA9 0x00..0xAF #
|
||||
| 0xF0 0x9F 0xA9 0xB0..0xB3 #E12.0 [4] (🩰..🩳) ballet shoes..shorts
|
||||
| 0xF0 0x9F 0xA9 0xB4 #E13.0 [1] (🩴) thong sandal
|
||||
| 0xF0 0x9F 0xA9 0xB5..0xB7 #E0.0 [3] (🩵..🩷) <reserved-1FA75>..<...
|
||||
| 0xF0 0x9F 0xA9 0xB8..0xBA #E12.0 [3] (🩸..🩺) drop of blood..stet...
|
||||
| 0xF0 0x9F 0xA9 0xBB..0xBF #E0.0 [5] (🩻..) <reserved-1FA7B>..<...
|
||||
| 0xF0 0x9F 0xAA 0x80..0x82 #E12.0 [3] (🪀..🪂) yo-yo..parachute
|
||||
| 0xF0 0x9F 0xAA 0x83..0x86 #E13.0 [4] (🪃..🪆) boomerang..nesting ...
|
||||
| 0xF0 0x9F 0xAA 0x87..0x8F #E0.0 [9] (🪇..) <reserved-1FA87>..<...
|
||||
| 0xF0 0x9F 0xAA 0x90..0x95 #E12.0 [6] (🪐..🪕) ringed planet..banjo
|
||||
| 0xF0 0x9F 0xAA 0x96..0xA8 #E13.0 [19] (🪖..🪨) military helmet..rock
|
||||
| 0xF0 0x9F 0xAA 0xA9..0xAF #E0.0 [7] (🪩..🪯) <reserved-1FAA9>..<...
|
||||
| 0xF0 0x9F 0xAA 0xB0..0xB6 #E13.0 [7] (🪰..🪶) fly..feather
|
||||
| 0xF0 0x9F 0xAA 0xB7..0xBF #E0.0 [9] (🪷..🪿) <reserved-1FAB7>..<...
|
||||
| 0xF0 0x9F 0xAB 0x80..0x82 #E13.0 [3] (🫀..🫂) anatomical heart..p...
|
||||
| 0xF0 0x9F 0xAB 0x83..0x8F #E0.0 [13] (🫃..🫏) <reserved-1FAC3>..<...
|
||||
| 0xF0 0x9F 0xAB 0x90..0x96 #E13.0 [7] (🫐..🫖) blueberries..teapot
|
||||
| 0xF0 0x9F 0xAB 0x97..0xBF #E0.0 [41] (🫗..) <reserved-1FAD7>..<...
|
||||
| 0xF0 0x9F 0xB0 0x80..0xFF #E0.0[1022] (..) <reserved-1FC...
|
||||
| 0xF0 0x9F 0xB1..0xBE 0x00..0xFF #
|
||||
| 0xF0 0x9F 0xBF 0x00..0xBD #
|
||||
;
|
||||
|
||||
}%%
|
||||
8
vendor/github.com/apparentlymart/go-textseg/v13/textseg/generate.go
generated
vendored
8
vendor/github.com/apparentlymart/go-textseg/v13/textseg/generate.go
generated
vendored
@@ -1,8 +0,0 @@
|
||||
package textseg
|
||||
|
||||
//go:generate go run make_tables.go -output tables.go
|
||||
//go:generate go run make_test_tables.go -output tables_test.go
|
||||
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/13.0.0/ucd/auxiliary/GraphemeBreakProperty.txt -m GraphemeCluster -p "Prepend,CR,LF,Control,Extend,Regional_Indicator,SpacingMark,L,V,T,LV,LVT,ZWJ" -o grapheme_clusters_table.rl
|
||||
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/13.0.0/ucd/emoji/emoji-data.txt -m Emoji -p "Extended_Pictographic" -o emoji_table.rl
|
||||
//go:generate ragel -Z grapheme_clusters.rl
|
||||
//go:generate gofmt -w grapheme_clusters.go
|
||||
4138
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters.go
generated
vendored
4138
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters.go
generated
vendored
File diff suppressed because it is too large
Load Diff
133
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters.rl
generated
vendored
133
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters.rl
generated
vendored
@@ -1,133 +0,0 @@
|
||||
package textseg
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Generated from grapheme_clusters.rl. DO NOT EDIT
|
||||
%%{
|
||||
# (except you are actually in grapheme_clusters.rl here, so edit away!)
|
||||
|
||||
machine graphclust;
|
||||
write data;
|
||||
}%%
|
||||
|
||||
var Error = errors.New("invalid UTF8 text")
|
||||
|
||||
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
|
||||
// on grapheme cluster boundaries.
|
||||
func ScanGraphemeClusters(data []byte, atEOF bool) (int, []byte, error) {
|
||||
if len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
|
||||
// Ragel state
|
||||
cs := 0 // Current State
|
||||
p := 0 // "Pointer" into data
|
||||
pe := len(data) // End-of-data "pointer"
|
||||
ts := 0
|
||||
te := 0
|
||||
act := 0
|
||||
eof := pe
|
||||
|
||||
// Make Go compiler happy
|
||||
_ = ts
|
||||
_ = te
|
||||
_ = act
|
||||
_ = eof
|
||||
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
|
||||
%%{
|
||||
include GraphemeCluster "grapheme_clusters_table.rl";
|
||||
include Emoji "emoji_table.rl";
|
||||
|
||||
action start {
|
||||
startPos = p
|
||||
}
|
||||
|
||||
action end {
|
||||
endPos = p
|
||||
}
|
||||
|
||||
action emit {
|
||||
return endPos+1, data[startPos:endPos+1], nil
|
||||
}
|
||||
|
||||
ZWJGlue = ZWJ (Extended_Pictographic Extend*)?;
|
||||
AnyExtender = Extend | ZWJGlue | SpacingMark;
|
||||
Extension = AnyExtender*;
|
||||
ReplacementChar = (0xEF 0xBF 0xBD);
|
||||
|
||||
CRLFSeq = CR LF;
|
||||
ControlSeq = Control | ReplacementChar;
|
||||
HangulSeq = (
|
||||
L+ (((LV? V+ | LVT) T*)?|LV?) |
|
||||
LV V* T* |
|
||||
V+ T* |
|
||||
LVT T* |
|
||||
T+
|
||||
) Extension;
|
||||
EmojiSeq = Extended_Pictographic Extend* Extension;
|
||||
ZWJSeq = ZWJ (ZWJ | Extend | SpacingMark)*;
|
||||
EmojiFlagSeq = Regional_Indicator Regional_Indicator? Extension;
|
||||
|
||||
UTF8Cont = 0x80 .. 0xBF;
|
||||
AnyUTF8 = (
|
||||
0x00..0x7F |
|
||||
0xC0..0xDF . UTF8Cont |
|
||||
0xE0..0xEF . UTF8Cont . UTF8Cont |
|
||||
0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
|
||||
);
|
||||
|
||||
# OtherSeq is any character that isn't at the start of one of the extended sequences above, followed by extension
|
||||
OtherSeq = (AnyUTF8 - (CR|LF|Control|ReplacementChar|L|LV|V|LVT|T|Extended_Pictographic|ZWJ|Regional_Indicator|Prepend)) (Extend | ZWJ | SpacingMark)*;
|
||||
|
||||
# PrependSeq is prepend followed by any of the other patterns above, except control characters which explicitly break
|
||||
PrependSeq = Prepend+ (HangulSeq|EmojiSeq|ZWJSeq|EmojiFlagSeq|OtherSeq)?;
|
||||
|
||||
CRLFTok = CRLFSeq >start @end;
|
||||
ControlTok = ControlSeq >start @end;
|
||||
HangulTok = HangulSeq >start @end;
|
||||
EmojiTok = EmojiSeq >start @end;
|
||||
ZWJTok = ZWJSeq >start @end;
|
||||
EmojiFlagTok = EmojiFlagSeq >start @end;
|
||||
OtherTok = OtherSeq >start @end;
|
||||
PrependTok = PrependSeq >start @end;
|
||||
|
||||
main := |*
|
||||
CRLFTok => emit;
|
||||
ControlTok => emit;
|
||||
HangulTok => emit;
|
||||
EmojiTok => emit;
|
||||
ZWJTok => emit;
|
||||
EmojiFlagTok => emit;
|
||||
PrependTok => emit;
|
||||
OtherTok => emit;
|
||||
|
||||
# any single valid UTF-8 character would also be valid per spec,
|
||||
# but we'll handle that separately after the loop so we can deal
|
||||
# with requesting more bytes if we're not at EOF.
|
||||
*|;
|
||||
|
||||
write init;
|
||||
write exec;
|
||||
}%%
|
||||
|
||||
// If we fall out here then we were unable to complete a sequence.
|
||||
// If we weren't able to complete a sequence then either we've
|
||||
// reached the end of a partial buffer (so there's more data to come)
|
||||
// or we have an isolated symbol that would normally be part of a
|
||||
// grapheme cluster but has appeared in isolation here.
|
||||
|
||||
if !atEOF {
|
||||
// Request more
|
||||
return 0, nil, nil
|
||||
}
|
||||
|
||||
// Just take the first UTF-8 sequence and return that.
|
||||
_, seqLen := utf8.DecodeRune(data)
|
||||
return seqLen, data[:seqLen], nil
|
||||
}
|
||||
1609
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters_table.rl
generated
vendored
1609
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters_table.rl
generated
vendored
File diff suppressed because it is too large
Load Diff
5833
vendor/github.com/apparentlymart/go-textseg/v13/textseg/tables.go
generated
vendored
5833
vendor/github.com/apparentlymart/go-textseg/v13/textseg/tables.go
generated
vendored
File diff suppressed because it is too large
Load Diff
335
vendor/github.com/apparentlymart/go-textseg/v13/textseg/unicode2ragel.rb
generated
vendored
335
vendor/github.com/apparentlymart/go-textseg/v13/textseg/unicode2ragel.rb
generated
vendored
@@ -1,335 +0,0 @@
|
||||
#!/usr/bin/env ruby
|
||||
#
|
||||
# This scripted has been updated to accept more command-line arguments:
|
||||
#
|
||||
# -u, --url URL to process
|
||||
# -m, --machine Machine name
|
||||
# -p, --properties Properties to add to the machine
|
||||
# -o, --output Write output to file
|
||||
#
|
||||
# Updated by: Marty Schoch <marty.schoch@gmail.com>
|
||||
#
|
||||
# This script uses the unicode spec to generate a Ragel state machine
|
||||
# that recognizes unicode alphanumeric characters. It generates 5
|
||||
# character classes: uupper, ulower, ualpha, udigit, and ualnum.
|
||||
# Currently supported encodings are UTF-8 [default] and UCS-4.
|
||||
#
|
||||
# Usage: unicode2ragel.rb [options]
|
||||
# -e, --encoding [ucs4 | utf8] Data encoding
|
||||
# -h, --help Show this message
|
||||
#
|
||||
# This script was originally written as part of the Ferret search
|
||||
# engine library.
|
||||
#
|
||||
# Author: Rakan El-Khalil <rakan@well.com>
|
||||
|
||||
require 'optparse'
|
||||
require 'open-uri'
|
||||
|
||||
ENCODINGS = [ :utf8, :ucs4 ]
|
||||
ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" }
|
||||
DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt"
|
||||
DEFAULT_MACHINE_NAME= "WChar"
|
||||
|
||||
###
|
||||
# Display vars & default option
|
||||
|
||||
TOTAL_WIDTH = 80
|
||||
RANGE_WIDTH = 23
|
||||
@encoding = :utf8
|
||||
@chart_url = DEFAULT_CHART_URL
|
||||
machine_name = DEFAULT_MACHINE_NAME
|
||||
properties = []
|
||||
@output = $stdout
|
||||
|
||||
###
|
||||
# Option parsing
|
||||
|
||||
cli_opts = OptionParser.new do |opts|
|
||||
opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o|
|
||||
@encoding = o.downcase.to_sym
|
||||
end
|
||||
opts.on("-h", "--help", "Show this message") do
|
||||
puts opts
|
||||
exit
|
||||
end
|
||||
opts.on("-u", "--url URL", "URL to process") do |o|
|
||||
@chart_url = o
|
||||
end
|
||||
opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o|
|
||||
machine_name = o
|
||||
end
|
||||
opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o|
|
||||
properties = o
|
||||
end
|
||||
opts.on("-o", "--output FILE", "output file") do |o|
|
||||
@output = File.new(o, "w+")
|
||||
end
|
||||
end
|
||||
|
||||
cli_opts.parse(ARGV)
|
||||
unless ENCODINGS.member? @encoding
|
||||
puts "Invalid encoding: #{@encoding}"
|
||||
puts cli_opts
|
||||
exit
|
||||
end
|
||||
|
||||
##
|
||||
# Downloads the document at url and yields every alpha line's hex
|
||||
# range and description.
|
||||
|
||||
def each_alpha( url, property )
|
||||
URI.open( url ) do |file|
|
||||
file.each_line do |line|
|
||||
next if line =~ /^#/;
|
||||
next if line !~ /; #{property} *#/;
|
||||
|
||||
range, description = line.split(/;/)
|
||||
range.strip!
|
||||
description.gsub!(/.*#/, '').strip!
|
||||
|
||||
if range =~ /\.\./
|
||||
start, stop = range.split '..'
|
||||
else start = stop = range
|
||||
end
|
||||
|
||||
yield start.hex .. stop.hex, description
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
###
|
||||
# Formats to hex at minimum width
|
||||
|
||||
def to_hex( n )
|
||||
r = "%0X" % n
|
||||
r = "0#{r}" unless (r.length % 2).zero?
|
||||
r
|
||||
end
|
||||
|
||||
###
|
||||
# UCS4 is just a straight hex conversion of the unicode codepoint.
|
||||
|
||||
def to_ucs4( range )
|
||||
rangestr = "0x" + to_hex(range.begin)
|
||||
rangestr << "..0x" + to_hex(range.end) if range.begin != range.end
|
||||
[ rangestr ]
|
||||
end
|
||||
|
||||
##
|
||||
# 0x00 - 0x7f -> 0zzzzzzz[7]
|
||||
# 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6]
|
||||
# 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6]
|
||||
# 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6]
|
||||
|
||||
UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff]
|
||||
|
||||
def to_utf8_enc( n )
|
||||
r = 0
|
||||
if n <= 0x7f
|
||||
r = n
|
||||
elsif n <= 0x7ff
|
||||
y = 0xc0 | (n >> 6)
|
||||
z = 0x80 | (n & 0x3f)
|
||||
r = y << 8 | z
|
||||
elsif n <= 0xffff
|
||||
x = 0xe0 | (n >> 12)
|
||||
y = 0x80 | (n >> 6) & 0x3f
|
||||
z = 0x80 | n & 0x3f
|
||||
r = x << 16 | y << 8 | z
|
||||
elsif n <= 0x10ffff
|
||||
w = 0xf0 | (n >> 18)
|
||||
x = 0x80 | (n >> 12) & 0x3f
|
||||
y = 0x80 | (n >> 6) & 0x3f
|
||||
z = 0x80 | n & 0x3f
|
||||
r = w << 24 | x << 16 | y << 8 | z
|
||||
end
|
||||
|
||||
to_hex(r)
|
||||
end
|
||||
|
||||
def from_utf8_enc( n )
|
||||
n = n.hex
|
||||
r = 0
|
||||
if n <= 0x7f
|
||||
r = n
|
||||
elsif n <= 0xdfff
|
||||
y = (n >> 8) & 0x1f
|
||||
z = n & 0x3f
|
||||
r = y << 6 | z
|
||||
elsif n <= 0xefffff
|
||||
x = (n >> 16) & 0x0f
|
||||
y = (n >> 8) & 0x3f
|
||||
z = n & 0x3f
|
||||
r = x << 10 | y << 6 | z
|
||||
elsif n <= 0xf7ffffff
|
||||
w = (n >> 24) & 0x07
|
||||
x = (n >> 16) & 0x3f
|
||||
y = (n >> 8) & 0x3f
|
||||
z = n & 0x3f
|
||||
r = w << 18 | x << 12 | y << 6 | z
|
||||
end
|
||||
r
|
||||
end
|
||||
|
||||
###
|
||||
# Given a range, splits it up into ranges that can be continuously
|
||||
# encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff]
|
||||
# This is not strictly needed since the current [5.1] unicode standard
|
||||
# doesn't have ranges that straddle utf8 boundaries. This is included
|
||||
# for completeness as there is no telling if that will ever change.
|
||||
|
||||
def utf8_ranges( range )
|
||||
ranges = []
|
||||
UTF8_BOUNDARIES.each do |max|
|
||||
if range.begin <= max
|
||||
if range.end <= max
|
||||
ranges << range
|
||||
return ranges
|
||||
end
|
||||
|
||||
ranges << (range.begin .. max)
|
||||
range = (max + 1) .. range.end
|
||||
end
|
||||
end
|
||||
ranges
|
||||
end
|
||||
|
||||
def build_range( start, stop )
|
||||
size = start.size/2
|
||||
left = size - 1
|
||||
return [""] if size < 1
|
||||
|
||||
a = start[0..1]
|
||||
b = stop[0..1]
|
||||
|
||||
###
|
||||
# Shared prefix
|
||||
|
||||
if a == b
|
||||
return build_range(start[2..-1], stop[2..-1]).map do |elt|
|
||||
"0x#{a} " + elt
|
||||
end
|
||||
end
|
||||
|
||||
###
|
||||
# Unshared prefix, end of run
|
||||
|
||||
return ["0x#{a}..0x#{b} "] if left.zero?
|
||||
|
||||
###
|
||||
# Unshared prefix, not end of run
|
||||
# Range can be 0x123456..0x56789A
|
||||
# Which is equivalent to:
|
||||
# 0x123456 .. 0x12FFFF
|
||||
# 0x130000 .. 0x55FFFF
|
||||
# 0x560000 .. 0x56789A
|
||||
|
||||
ret = []
|
||||
ret << build_range(start, a + "FF" * left)
|
||||
|
||||
###
|
||||
# Only generate middle range if need be.
|
||||
|
||||
if a.hex+1 != b.hex
|
||||
max = to_hex(b.hex - 1)
|
||||
max = "FF" if b == "FF"
|
||||
ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left
|
||||
end
|
||||
|
||||
###
|
||||
# Don't generate last range if it is covered by first range
|
||||
|
||||
ret << build_range(b + "00" * left, stop) unless b == "FF"
|
||||
ret.flatten!
|
||||
end
|
||||
|
||||
def to_utf8( range )
|
||||
utf8_ranges( range ).map do |r|
|
||||
begin_enc = to_utf8_enc(r.begin)
|
||||
end_enc = to_utf8_enc(r.end)
|
||||
build_range begin_enc, end_enc
|
||||
end.flatten!
|
||||
end
|
||||
|
||||
##
|
||||
# Perform a 3-way comparison of the number of codepoints advertised by
|
||||
# the unicode spec for the given range, the originally parsed range,
|
||||
# and the resulting utf8 encoded range.
|
||||
|
||||
def count_codepoints( code )
|
||||
code.split(' ').inject(1) do |acc, elt|
|
||||
if elt =~ /0x(.+)\.\.0x(.+)/
|
||||
if @encoding == :utf8
|
||||
acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1)
|
||||
else
|
||||
acc * ($2.hex - $1.hex + 1)
|
||||
end
|
||||
else
|
||||
acc
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def is_valid?( range, desc, codes )
|
||||
spec_count = 1
|
||||
spec_count = $1.to_i if desc =~ /\[(\d+)\]/
|
||||
range_count = range.end - range.begin + 1
|
||||
|
||||
sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) }
|
||||
sum == spec_count and sum == range_count
|
||||
end
|
||||
|
||||
##
|
||||
# Generate the state maching to stdout
|
||||
|
||||
def generate_machine( name, property )
|
||||
pipe = " "
|
||||
@output.puts " #{name} = "
|
||||
each_alpha( @chart_url, property ) do |range, desc|
|
||||
|
||||
codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range)
|
||||
|
||||
#raise "Invalid encoding of range #{range}: #{codes.inspect}" unless
|
||||
# is_valid? range, desc, codes
|
||||
|
||||
range_width = codes.map { |a| a.size }.max
|
||||
range_width = RANGE_WIDTH if range_width < RANGE_WIDTH
|
||||
|
||||
desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11
|
||||
desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH
|
||||
|
||||
if desc.size > desc_width
|
||||
desc = desc[0..desc_width - 4] + "..."
|
||||
end
|
||||
|
||||
codes.each_with_index do |r, idx|
|
||||
desc = "" unless idx.zero?
|
||||
code = "%-#{range_width}s" % r
|
||||
@output.puts " #{pipe} #{code} ##{desc}"
|
||||
pipe = "|"
|
||||
end
|
||||
end
|
||||
@output.puts " ;"
|
||||
@output.puts ""
|
||||
end
|
||||
|
||||
@output.puts <<EOF
|
||||
# The following Ragel file was autogenerated with #{$0}
|
||||
# from: #{@chart_url}
|
||||
#
|
||||
# It defines #{properties}.
|
||||
#
|
||||
# To use this, make sure that your alphtype is set to #{ALPHTYPES[@encoding]},
|
||||
# and that your input is in #{@encoding}.
|
||||
|
||||
%%{
|
||||
machine #{machine_name};
|
||||
|
||||
EOF
|
||||
|
||||
properties.each { |x| generate_machine( x, x ) }
|
||||
|
||||
@output.puts <<EOF
|
||||
}%%
|
||||
EOF
|
||||
19
vendor/github.com/apparentlymart/go-textseg/v13/textseg/utf8_seqs.go
generated
vendored
19
vendor/github.com/apparentlymart/go-textseg/v13/textseg/utf8_seqs.go
generated
vendored
@@ -1,19 +0,0 @@
|
||||
package textseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
|
||||
// on UTF8 sequence boundaries.
|
||||
//
|
||||
// This is included largely for completeness, since this behavior is already
|
||||
// built in to Go when ranging over a string.
|
||||
func ScanUTF8Sequences(data []byte, atEOF bool) (int, []byte, error) {
|
||||
if len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
r, seqLen := utf8.DecodeRune(data)
|
||||
if r == utf8.RuneError && !atEOF {
|
||||
return 0, nil, nil
|
||||
}
|
||||
return seqLen, data[:seqLen], nil
|
||||
}
|
||||
95
vendor/github.com/apparentlymart/go-textseg/v15/LICENSE
generated
vendored
95
vendor/github.com/apparentlymart/go-textseg/v15/LICENSE
generated
vendored
@@ -1,95 +0,0 @@
|
||||
Copyright (c) 2017 Martin Atkins
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---------
|
||||
|
||||
Unicode table generation programs are under a separate copyright and license:
|
||||
|
||||
Copyright (c) 2014 Couchbase, Inc.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
||||
except in compliance with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the
|
||||
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
either express or implied. See the License for the specific language governing permissions
|
||||
and limitations under the License.
|
||||
|
||||
---------
|
||||
|
||||
Grapheme break data is provided as part of the Unicode character database,
|
||||
copright 2016 Unicode, Inc, which is provided with the following license:
|
||||
|
||||
Unicode Data Files include all data files under the directories
|
||||
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
|
||||
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
|
||||
http://www.unicode.org/utility/trac/browser/.
|
||||
|
||||
Unicode Data Files do not include PDF online code charts under the
|
||||
directory http://www.unicode.org/Public/.
|
||||
|
||||
Software includes any source code published in the Unicode Standard
|
||||
or under the directories
|
||||
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
|
||||
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
|
||||
http://www.unicode.org/utility/trac/browser/.
|
||||
|
||||
NOTICE TO USER: Carefully read the following legal agreement.
|
||||
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
|
||||
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
|
||||
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
|
||||
TERMS AND CONDITIONS OF THIS AGREEMENT.
|
||||
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
|
||||
THE DATA FILES OR SOFTWARE.
|
||||
|
||||
COPYRIGHT AND PERMISSION NOTICE
|
||||
|
||||
Copyright © 1991-2017 Unicode, Inc. All rights reserved.
|
||||
Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of the Unicode data files and any associated documentation
|
||||
(the "Data Files") or Unicode software and any associated documentation
|
||||
(the "Software") to deal in the Data Files or Software
|
||||
without restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, and/or sell copies of
|
||||
the Data Files or Software, and to permit persons to whom the Data Files
|
||||
or Software are furnished to do so, provided that either
|
||||
(a) this copyright and permission notice appear with all copies
|
||||
of the Data Files or Software, or
|
||||
(b) this copyright and permission notice appear in associated
|
||||
Documentation.
|
||||
|
||||
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
|
||||
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
|
||||
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
|
||||
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
|
||||
|
||||
Except as contained in this notice, the name of a copyright holder
|
||||
shall not be used in advertising or otherwise to promote the sale,
|
||||
use or other dealings in these Data Files or Software without prior
|
||||
written authorization of the copyright holder.
|
||||
30
vendor/github.com/apparentlymart/go-textseg/v15/textseg/all_tokens.go
generated
vendored
30
vendor/github.com/apparentlymart/go-textseg/v15/textseg/all_tokens.go
generated
vendored
@@ -1,30 +0,0 @@
|
||||
package textseg
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
)
|
||||
|
||||
// AllTokens is a utility that uses a bufio.SplitFunc to produce a slice of
|
||||
// all of the recognized tokens in the given buffer.
|
||||
func AllTokens(buf []byte, splitFunc bufio.SplitFunc) ([][]byte, error) {
|
||||
scanner := bufio.NewScanner(bytes.NewReader(buf))
|
||||
scanner.Split(splitFunc)
|
||||
var ret [][]byte
|
||||
for scanner.Scan() {
|
||||
ret = append(ret, scanner.Bytes())
|
||||
}
|
||||
return ret, scanner.Err()
|
||||
}
|
||||
|
||||
// TokenCount is a utility that uses a bufio.SplitFunc to count the number of
|
||||
// recognized tokens in the given buffer.
|
||||
func TokenCount(buf []byte, splitFunc bufio.SplitFunc) (int, error) {
|
||||
scanner := bufio.NewScanner(bytes.NewReader(buf))
|
||||
scanner.Split(splitFunc)
|
||||
var ret int
|
||||
for scanner.Scan() {
|
||||
ret++
|
||||
}
|
||||
return ret, scanner.Err()
|
||||
}
|
||||
545
vendor/github.com/apparentlymart/go-textseg/v15/textseg/emoji_table.rl
generated
vendored
545
vendor/github.com/apparentlymart/go-textseg/v15/textseg/emoji_table.rl
generated
vendored
@@ -1,545 +0,0 @@
|
||||
# The following Ragel file was autogenerated with unicode2ragel.rb
|
||||
# from: https://www.unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
|
||||
#
|
||||
# It defines ["Extended_Pictographic"].
|
||||
#
|
||||
# To use this, make sure that your alphtype is set to byte,
|
||||
# and that your input is in utf8.
|
||||
|
||||
%%{
|
||||
machine Emoji;
|
||||
|
||||
Extended_Pictographic =
|
||||
0xC2 0xA9 #E0.6 [1] (©️) copyright
|
||||
| 0xC2 0xAE #E0.6 [1] (®️) registered
|
||||
| 0xE2 0x80 0xBC #E0.6 [1] (‼️) double exclamation mark
|
||||
| 0xE2 0x81 0x89 #E0.6 [1] (⁉️) exclamation question ...
|
||||
| 0xE2 0x84 0xA2 #E0.6 [1] (™️) trade mark
|
||||
| 0xE2 0x84 0xB9 #E0.6 [1] (ℹ️) information
|
||||
| 0xE2 0x86 0x94..0x99 #E0.6 [6] (↔️..↙️) left-right arrow..do...
|
||||
| 0xE2 0x86 0xA9..0xAA #E0.6 [2] (↩️..↪️) right arrow curving ...
|
||||
| 0xE2 0x8C 0x9A..0x9B #E0.6 [2] (⌚..⌛) watch..hourglass done
|
||||
| 0xE2 0x8C 0xA8 #E1.0 [1] (⌨️) keyboard
|
||||
| 0xE2 0x8E 0x88 #E0.0 [1] (⎈) HELM SYMBOL
|
||||
| 0xE2 0x8F 0x8F #E1.0 [1] (⏏️) eject button
|
||||
| 0xE2 0x8F 0xA9..0xAC #E0.6 [4] (⏩..⏬) fast-forward button..f...
|
||||
| 0xE2 0x8F 0xAD..0xAE #E0.7 [2] (⏭️..⏮️) next track button..l...
|
||||
| 0xE2 0x8F 0xAF #E1.0 [1] (⏯️) play or pause button
|
||||
| 0xE2 0x8F 0xB0 #E0.6 [1] (⏰) alarm clock
|
||||
| 0xE2 0x8F 0xB1..0xB2 #E1.0 [2] (⏱️..⏲️) stopwatch..timer clock
|
||||
| 0xE2 0x8F 0xB3 #E0.6 [1] (⏳) hourglass not done
|
||||
| 0xE2 0x8F 0xB8..0xBA #E0.7 [3] (⏸️..⏺️) pause button..record...
|
||||
| 0xE2 0x93 0x82 #E0.6 [1] (Ⓜ️) circled M
|
||||
| 0xE2 0x96 0xAA..0xAB #E0.6 [2] (▪️..▫️) black small square.....
|
||||
| 0xE2 0x96 0xB6 #E0.6 [1] (▶️) play button
|
||||
| 0xE2 0x97 0x80 #E0.6 [1] (◀️) reverse button
|
||||
| 0xE2 0x97 0xBB..0xBE #E0.6 [4] (◻️..◾) white medium square.....
|
||||
| 0xE2 0x98 0x80..0x81 #E0.6 [2] (☀️..☁️) sun..cloud
|
||||
| 0xE2 0x98 0x82..0x83 #E0.7 [2] (☂️..☃️) umbrella..snowman
|
||||
| 0xE2 0x98 0x84 #E1.0 [1] (☄️) comet
|
||||
| 0xE2 0x98 0x85 #E0.0 [1] (★) BLACK STAR
|
||||
| 0xE2 0x98 0x87..0x8D #E0.0 [7] (☇..☍) LIGHTNING..OPPOSITION
|
||||
| 0xE2 0x98 0x8E #E0.6 [1] (☎️) telephone
|
||||
| 0xE2 0x98 0x8F..0x90 #E0.0 [2] (☏..☐) WHITE TELEPHONE..BALLO...
|
||||
| 0xE2 0x98 0x91 #E0.6 [1] (☑️) check box with check
|
||||
| 0xE2 0x98 0x92 #E0.0 [1] (☒) BALLOT BOX WITH X
|
||||
| 0xE2 0x98 0x94..0x95 #E0.6 [2] (☔..☕) umbrella with rain dro...
|
||||
| 0xE2 0x98 0x96..0x97 #E0.0 [2] (☖..☗) WHITE SHOGI PIECE..BLA...
|
||||
| 0xE2 0x98 0x98 #E1.0 [1] (☘️) shamrock
|
||||
| 0xE2 0x98 0x99..0x9C #E0.0 [4] (☙..☜) REVERSED ROTATED FLORA...
|
||||
| 0xE2 0x98 0x9D #E0.6 [1] (☝️) index pointing up
|
||||
| 0xE2 0x98 0x9E..0x9F #E0.0 [2] (☞..☟) WHITE RIGHT POINTING I...
|
||||
| 0xE2 0x98 0xA0 #E1.0 [1] (☠️) skull and crossbones
|
||||
| 0xE2 0x98 0xA1 #E0.0 [1] (☡) CAUTION SIGN
|
||||
| 0xE2 0x98 0xA2..0xA3 #E1.0 [2] (☢️..☣️) radioactive..biohazard
|
||||
| 0xE2 0x98 0xA4..0xA5 #E0.0 [2] (☤..☥) CADUCEUS..ANKH
|
||||
| 0xE2 0x98 0xA6 #E1.0 [1] (☦️) orthodox cross
|
||||
| 0xE2 0x98 0xA7..0xA9 #E0.0 [3] (☧..☩) CHI RHO..CROSS OF JERU...
|
||||
| 0xE2 0x98 0xAA #E0.7 [1] (☪️) star and crescent
|
||||
| 0xE2 0x98 0xAB..0xAD #E0.0 [3] (☫..☭) FARSI SYMBOL..HAMMER A...
|
||||
| 0xE2 0x98 0xAE #E1.0 [1] (☮️) peace symbol
|
||||
| 0xE2 0x98 0xAF #E0.7 [1] (☯️) yin yang
|
||||
| 0xE2 0x98 0xB0..0xB7 #E0.0 [8] (☰..☷) TRIGRAM FOR HEAVEN..TR...
|
||||
| 0xE2 0x98 0xB8..0xB9 #E0.7 [2] (☸️..☹️) wheel of dharma..fro...
|
||||
| 0xE2 0x98 0xBA #E0.6 [1] (☺️) smiling face
|
||||
| 0xE2 0x98 0xBB..0xBF #E0.0 [5] (☻..☿) BLACK SMILING FACE..ME...
|
||||
| 0xE2 0x99 0x80 #E4.0 [1] (♀️) female sign
|
||||
| 0xE2 0x99 0x81 #E0.0 [1] (♁) EARTH
|
||||
| 0xE2 0x99 0x82 #E4.0 [1] (♂️) male sign
|
||||
| 0xE2 0x99 0x83..0x87 #E0.0 [5] (♃..♇) JUPITER..PLUTO
|
||||
| 0xE2 0x99 0x88..0x93 #E0.6 [12] (♈..♓) Aries..Pisces
|
||||
| 0xE2 0x99 0x94..0x9E #E0.0 [11] (♔..♞) WHITE CHESS KING..BLAC...
|
||||
| 0xE2 0x99 0x9F #E11.0 [1] (♟️) chess pawn
|
||||
| 0xE2 0x99 0xA0 #E0.6 [1] (♠️) spade suit
|
||||
| 0xE2 0x99 0xA1..0xA2 #E0.0 [2] (♡..♢) WHITE HEART SUIT..WHIT...
|
||||
| 0xE2 0x99 0xA3 #E0.6 [1] (♣️) club suit
|
||||
| 0xE2 0x99 0xA4 #E0.0 [1] (♤) WHITE SPADE SUIT
|
||||
| 0xE2 0x99 0xA5..0xA6 #E0.6 [2] (♥️..♦️) heart suit..diamond ...
|
||||
| 0xE2 0x99 0xA7 #E0.0 [1] (♧) WHITE CLUB SUIT
|
||||
| 0xE2 0x99 0xA8 #E0.6 [1] (♨️) hot springs
|
||||
| 0xE2 0x99 0xA9..0xBA #E0.0 [18] (♩..♺) QUARTER NOTE..RECYCLIN...
|
||||
| 0xE2 0x99 0xBB #E0.6 [1] (♻️) recycling symbol
|
||||
| 0xE2 0x99 0xBC..0xBD #E0.0 [2] (♼..♽) RECYCLED PAPER SYMBOL....
|
||||
| 0xE2 0x99 0xBE #E11.0 [1] (♾️) infinity
|
||||
| 0xE2 0x99 0xBF #E0.6 [1] (♿) wheelchair symbol
|
||||
| 0xE2 0x9A 0x80..0x85 #E0.0 [6] (⚀..⚅) DIE FACE-1..DIE FACE-6
|
||||
| 0xE2 0x9A 0x90..0x91 #E0.0 [2] (⚐..⚑) WHITE FLAG..BLACK FLAG
|
||||
| 0xE2 0x9A 0x92 #E1.0 [1] (⚒️) hammer and pick
|
||||
| 0xE2 0x9A 0x93 #E0.6 [1] (⚓) anchor
|
||||
| 0xE2 0x9A 0x94 #E1.0 [1] (⚔️) crossed swords
|
||||
| 0xE2 0x9A 0x95 #E4.0 [1] (⚕️) medical symbol
|
||||
| 0xE2 0x9A 0x96..0x97 #E1.0 [2] (⚖️..⚗️) balance scale..alembic
|
||||
| 0xE2 0x9A 0x98 #E0.0 [1] (⚘) FLOWER
|
||||
| 0xE2 0x9A 0x99 #E1.0 [1] (⚙️) gear
|
||||
| 0xE2 0x9A 0x9A #E0.0 [1] (⚚) STAFF OF HERMES
|
||||
| 0xE2 0x9A 0x9B..0x9C #E1.0 [2] (⚛️..⚜️) atom symbol..fleur-d...
|
||||
| 0xE2 0x9A 0x9D..0x9F #E0.0 [3] (⚝..⚟) OUTLINED WHITE STAR..T...
|
||||
| 0xE2 0x9A 0xA0..0xA1 #E0.6 [2] (⚠️..⚡) warning..high voltage
|
||||
| 0xE2 0x9A 0xA2..0xA6 #E0.0 [5] (⚢..⚦) DOUBLED FEMALE SIGN..M...
|
||||
| 0xE2 0x9A 0xA7 #E13.0 [1] (⚧️) transgender symbol
|
||||
| 0xE2 0x9A 0xA8..0xA9 #E0.0 [2] (⚨..⚩) VERTICAL MALE WITH STR...
|
||||
| 0xE2 0x9A 0xAA..0xAB #E0.6 [2] (⚪..⚫) white circle..black ci...
|
||||
| 0xE2 0x9A 0xAC..0xAF #E0.0 [4] (⚬..⚯) MEDIUM SMALL WHITE CIR...
|
||||
| 0xE2 0x9A 0xB0..0xB1 #E1.0 [2] (⚰️..⚱️) coffin..funeral urn
|
||||
| 0xE2 0x9A 0xB2..0xBC #E0.0 [11] (⚲..⚼) NEUTER..SESQUIQUADRATE
|
||||
| 0xE2 0x9A 0xBD..0xBE #E0.6 [2] (⚽..⚾) soccer ball..baseball
|
||||
| 0xE2 0x9A 0xBF..0xFF #E0.0 [5] (⚿..⛃) SQUARED KEY..BLACK DRA...
|
||||
| 0xE2 0x9B 0x00..0x83 #
|
||||
| 0xE2 0x9B 0x84..0x85 #E0.6 [2] (⛄..⛅) snowman without snow.....
|
||||
| 0xE2 0x9B 0x86..0x87 #E0.0 [2] (⛆..⛇) RAIN..BLACK SNOWMAN
|
||||
| 0xE2 0x9B 0x88 #E0.7 [1] (⛈️) cloud with lightning ...
|
||||
| 0xE2 0x9B 0x89..0x8D #E0.0 [5] (⛉..⛍) TURNED WHITE SHOGI PIE...
|
||||
| 0xE2 0x9B 0x8E #E0.6 [1] (⛎) Ophiuchus
|
||||
| 0xE2 0x9B 0x8F #E0.7 [1] (⛏️) pick
|
||||
| 0xE2 0x9B 0x90 #E0.0 [1] (⛐) CAR SLIDING
|
||||
| 0xE2 0x9B 0x91 #E0.7 [1] (⛑️) rescue worker’s helmet
|
||||
| 0xE2 0x9B 0x92 #E0.0 [1] (⛒) CIRCLED CROSSING LANES
|
||||
| 0xE2 0x9B 0x93 #E0.7 [1] (⛓️) chains
|
||||
| 0xE2 0x9B 0x94 #E0.6 [1] (⛔) no entry
|
||||
| 0xE2 0x9B 0x95..0xA8 #E0.0 [20] (⛕..⛨) ALTERNATE ONE-WAY LEFT...
|
||||
| 0xE2 0x9B 0xA9 #E0.7 [1] (⛩️) shinto shrine
|
||||
| 0xE2 0x9B 0xAA #E0.6 [1] (⛪) church
|
||||
| 0xE2 0x9B 0xAB..0xAF #E0.0 [5] (⛫..⛯) CASTLE..MAP SYMBOL FOR...
|
||||
| 0xE2 0x9B 0xB0..0xB1 #E0.7 [2] (⛰️..⛱️) mountain..umbrella o...
|
||||
| 0xE2 0x9B 0xB2..0xB3 #E0.6 [2] (⛲..⛳) fountain..flag in hole
|
||||
| 0xE2 0x9B 0xB4 #E0.7 [1] (⛴️) ferry
|
||||
| 0xE2 0x9B 0xB5 #E0.6 [1] (⛵) sailboat
|
||||
| 0xE2 0x9B 0xB6 #E0.0 [1] (⛶) SQUARE FOUR CORNERS
|
||||
| 0xE2 0x9B 0xB7..0xB9 #E0.7 [3] (⛷️..⛹️) skier..person bounci...
|
||||
| 0xE2 0x9B 0xBA #E0.6 [1] (⛺) tent
|
||||
| 0xE2 0x9B 0xBB..0xBC #E0.0 [2] (⛻..⛼) JAPANESE BANK SYMBOL.....
|
||||
| 0xE2 0x9B 0xBD #E0.6 [1] (⛽) fuel pump
|
||||
| 0xE2 0x9B 0xBE..0xFF #E0.0 [4] (⛾..✁) CUP ON BLACK SQUARE..U...
|
||||
| 0xE2 0x9C 0x00..0x81 #
|
||||
| 0xE2 0x9C 0x82 #E0.6 [1] (✂️) scissors
|
||||
| 0xE2 0x9C 0x83..0x84 #E0.0 [2] (✃..✄) LOWER BLADE SCISSORS.....
|
||||
| 0xE2 0x9C 0x85 #E0.6 [1] (✅) check mark button
|
||||
| 0xE2 0x9C 0x88..0x8C #E0.6 [5] (✈️..✌️) airplane..victory hand
|
||||
| 0xE2 0x9C 0x8D #E0.7 [1] (✍️) writing hand
|
||||
| 0xE2 0x9C 0x8E #E0.0 [1] (✎) LOWER RIGHT PENCIL
|
||||
| 0xE2 0x9C 0x8F #E0.6 [1] (✏️) pencil
|
||||
| 0xE2 0x9C 0x90..0x91 #E0.0 [2] (✐..✑) UPPER RIGHT PENCIL..WH...
|
||||
| 0xE2 0x9C 0x92 #E0.6 [1] (✒️) black nib
|
||||
| 0xE2 0x9C 0x94 #E0.6 [1] (✔️) check mark
|
||||
| 0xE2 0x9C 0x96 #E0.6 [1] (✖️) multiply
|
||||
| 0xE2 0x9C 0x9D #E0.7 [1] (✝️) latin cross
|
||||
| 0xE2 0x9C 0xA1 #E0.7 [1] (✡️) star of David
|
||||
| 0xE2 0x9C 0xA8 #E0.6 [1] (✨) sparkles
|
||||
| 0xE2 0x9C 0xB3..0xB4 #E0.6 [2] (✳️..✴️) eight-spoked asteris...
|
||||
| 0xE2 0x9D 0x84 #E0.6 [1] (❄️) snowflake
|
||||
| 0xE2 0x9D 0x87 #E0.6 [1] (❇️) sparkle
|
||||
| 0xE2 0x9D 0x8C #E0.6 [1] (❌) cross mark
|
||||
| 0xE2 0x9D 0x8E #E0.6 [1] (❎) cross mark button
|
||||
| 0xE2 0x9D 0x93..0x95 #E0.6 [3] (❓..❕) red question mark..whi...
|
||||
| 0xE2 0x9D 0x97 #E0.6 [1] (❗) red exclamation mark
|
||||
| 0xE2 0x9D 0xA3 #E1.0 [1] (❣️) heart exclamation
|
||||
| 0xE2 0x9D 0xA4 #E0.6 [1] (❤️) red heart
|
||||
| 0xE2 0x9D 0xA5..0xA7 #E0.0 [3] (❥..❧) ROTATED HEAVY BLACK HE...
|
||||
| 0xE2 0x9E 0x95..0x97 #E0.6 [3] (➕..➗) plus..divide
|
||||
| 0xE2 0x9E 0xA1 #E0.6 [1] (➡️) right arrow
|
||||
| 0xE2 0x9E 0xB0 #E0.6 [1] (➰) curly loop
|
||||
| 0xE2 0x9E 0xBF #E1.0 [1] (➿) double curly loop
|
||||
| 0xE2 0xA4 0xB4..0xB5 #E0.6 [2] (⤴️..⤵️) right arrow curving ...
|
||||
| 0xE2 0xAC 0x85..0x87 #E0.6 [3] (⬅️..⬇️) left arrow..down arrow
|
||||
| 0xE2 0xAC 0x9B..0x9C #E0.6 [2] (⬛..⬜) black large square..wh...
|
||||
| 0xE2 0xAD 0x90 #E0.6 [1] (⭐) star
|
||||
| 0xE2 0xAD 0x95 #E0.6 [1] (⭕) hollow red circle
|
||||
| 0xE3 0x80 0xB0 #E0.6 [1] (〰️) wavy dash
|
||||
| 0xE3 0x80 0xBD #E0.6 [1] (〽️) part alternation mark
|
||||
| 0xE3 0x8A 0x97 #E0.6 [1] (㊗️) Japanese “congratulat...
|
||||
| 0xE3 0x8A 0x99 #E0.6 [1] (㊙️) Japanese “secret” button
|
||||
| 0xF0 0x9F 0x80 0x80..0x83 #E0.0 [4] (🀀..🀃) MAHJONG TILE EAST W...
|
||||
| 0xF0 0x9F 0x80 0x84 #E0.6 [1] (🀄) mahjong red dragon
|
||||
| 0xF0 0x9F 0x80 0x85..0xFF #E0.0 [202] (🀅..🃎) MAHJONG TILE ...
|
||||
| 0xF0 0x9F 0x81..0x82 0x00..0xFF #
|
||||
| 0xF0 0x9F 0x83 0x00..0x8E #
|
||||
| 0xF0 0x9F 0x83 0x8F #E0.6 [1] (🃏) joker
|
||||
| 0xF0 0x9F 0x83 0x90..0xBF #E0.0 [48] (..) <reserved-1F0D0>..<...
|
||||
| 0xF0 0x9F 0x84 0x8D..0x8F #E0.0 [3] (🄍..🄏) CIRCLED ZERO WITH S...
|
||||
| 0xF0 0x9F 0x84 0xAF #E0.0 [1] (🄯) COPYLEFT SYMBOL
|
||||
| 0xF0 0x9F 0x85 0xAC..0xAF #E0.0 [4] (🅬..🅯) RAISED MR SIGN..CIR...
|
||||
| 0xF0 0x9F 0x85 0xB0..0xB1 #E0.6 [2] (🅰️..🅱️) A button (blood t...
|
||||
| 0xF0 0x9F 0x85 0xBE..0xBF #E0.6 [2] (🅾️..🅿️) O button (blood t...
|
||||
| 0xF0 0x9F 0x86 0x8E #E0.6 [1] (🆎) AB button (blood type)
|
||||
| 0xF0 0x9F 0x86 0x91..0x9A #E0.6 [10] (🆑..🆚) CL button..VS button
|
||||
| 0xF0 0x9F 0x86 0xAD..0xFF #E0.0 [57] (🆭..) MASK WORK SYMBOL..<...
|
||||
| 0xF0 0x9F 0x87 0x00..0xA5 #
|
||||
| 0xF0 0x9F 0x88 0x81..0x82 #E0.6 [2] (🈁..🈂️) Japanese “here” bu...
|
||||
| 0xF0 0x9F 0x88 0x83..0x8F #E0.0 [13] (..) <reserved-1F203>..<...
|
||||
| 0xF0 0x9F 0x88 0x9A #E0.6 [1] (🈚) Japanese “free of char...
|
||||
| 0xF0 0x9F 0x88 0xAF #E0.6 [1] (🈯) Japanese “reserved” bu...
|
||||
| 0xF0 0x9F 0x88 0xB2..0xBA #E0.6 [9] (🈲..🈺) Japanese “prohibite...
|
||||
| 0xF0 0x9F 0x88 0xBC..0xBF #E0.0 [4] (..) <reserved-1F23C>..<...
|
||||
| 0xF0 0x9F 0x89 0x89..0x8F #E0.0 [7] (..) <reserved-1F249>..<...
|
||||
| 0xF0 0x9F 0x89 0x90..0x91 #E0.6 [2] (🉐..🉑) Japanese “bargain” ...
|
||||
| 0xF0 0x9F 0x89 0x92..0xFF #E0.0 [174] (..) <reserved-1F2...
|
||||
| 0xF0 0x9F 0x8A..0x8A 0x00..0xFF #
|
||||
| 0xF0 0x9F 0x8B 0x00..0xBF #
|
||||
| 0xF0 0x9F 0x8C 0x80..0x8C #E0.6 [13] (🌀..🌌) cyclone..milky way
|
||||
| 0xF0 0x9F 0x8C 0x8D..0x8E #E0.7 [2] (🌍..🌎) globe showing Europ...
|
||||
| 0xF0 0x9F 0x8C 0x8F #E0.6 [1] (🌏) globe showing Asia-Aus...
|
||||
| 0xF0 0x9F 0x8C 0x90 #E1.0 [1] (🌐) globe with meridians
|
||||
| 0xF0 0x9F 0x8C 0x91 #E0.6 [1] (🌑) new moon
|
||||
| 0xF0 0x9F 0x8C 0x92 #E1.0 [1] (🌒) waxing crescent moon
|
||||
| 0xF0 0x9F 0x8C 0x93..0x95 #E0.6 [3] (🌓..🌕) first quarter moon....
|
||||
| 0xF0 0x9F 0x8C 0x96..0x98 #E1.0 [3] (🌖..🌘) waning gibbous moon...
|
||||
| 0xF0 0x9F 0x8C 0x99 #E0.6 [1] (🌙) crescent moon
|
||||
| 0xF0 0x9F 0x8C 0x9A #E1.0 [1] (🌚) new moon face
|
||||
| 0xF0 0x9F 0x8C 0x9B #E0.6 [1] (🌛) first quarter moon face
|
||||
| 0xF0 0x9F 0x8C 0x9C #E0.7 [1] (🌜) last quarter moon face
|
||||
| 0xF0 0x9F 0x8C 0x9D..0x9E #E1.0 [2] (🌝..🌞) full moon face..sun...
|
||||
| 0xF0 0x9F 0x8C 0x9F..0xA0 #E0.6 [2] (🌟..🌠) glowing star..shoot...
|
||||
| 0xF0 0x9F 0x8C 0xA1 #E0.7 [1] (🌡️) thermometer
|
||||
| 0xF0 0x9F 0x8C 0xA2..0xA3 #E0.0 [2] (🌢..🌣) BLACK DROPLET..WHIT...
|
||||
| 0xF0 0x9F 0x8C 0xA4..0xAC #E0.7 [9] (🌤️..🌬️) sun behind small ...
|
||||
| 0xF0 0x9F 0x8C 0xAD..0xAF #E1.0 [3] (🌭..🌯) hot dog..burrito
|
||||
| 0xF0 0x9F 0x8C 0xB0..0xB1 #E0.6 [2] (🌰..🌱) chestnut..seedling
|
||||
| 0xF0 0x9F 0x8C 0xB2..0xB3 #E1.0 [2] (🌲..🌳) evergreen tree..dec...
|
||||
| 0xF0 0x9F 0x8C 0xB4..0xB5 #E0.6 [2] (🌴..🌵) palm tree..cactus
|
||||
| 0xF0 0x9F 0x8C 0xB6 #E0.7 [1] (🌶️) hot pepper
|
||||
| 0xF0 0x9F 0x8C 0xB7..0xFF #E0.6 [20] (🌷..🍊) tulip..tangerine
|
||||
| 0xF0 0x9F 0x8D 0x00..0x8A #
|
||||
| 0xF0 0x9F 0x8D 0x8B #E1.0 [1] (🍋) lemon
|
||||
| 0xF0 0x9F 0x8D 0x8C..0x8F #E0.6 [4] (🍌..🍏) banana..green apple
|
||||
| 0xF0 0x9F 0x8D 0x90 #E1.0 [1] (🍐) pear
|
||||
| 0xF0 0x9F 0x8D 0x91..0xBB #E0.6 [43] (🍑..🍻) peach..clinking bee...
|
||||
| 0xF0 0x9F 0x8D 0xBC #E1.0 [1] (🍼) baby bottle
|
||||
| 0xF0 0x9F 0x8D 0xBD #E0.7 [1] (🍽️) fork and knife with p...
|
||||
| 0xF0 0x9F 0x8D 0xBE..0xBF #E1.0 [2] (🍾..🍿) bottle with popping...
|
||||
| 0xF0 0x9F 0x8E 0x80..0x93 #E0.6 [20] (🎀..🎓) ribbon..graduation cap
|
||||
| 0xF0 0x9F 0x8E 0x94..0x95 #E0.0 [2] (🎔..🎕) HEART WITH TIP ON T...
|
||||
| 0xF0 0x9F 0x8E 0x96..0x97 #E0.7 [2] (🎖️..🎗️) military medal..r...
|
||||
| 0xF0 0x9F 0x8E 0x98 #E0.0 [1] (🎘) MUSICAL KEYBOARD WITH ...
|
||||
| 0xF0 0x9F 0x8E 0x99..0x9B #E0.7 [3] (🎙️..🎛️) studio microphone...
|
||||
| 0xF0 0x9F 0x8E 0x9C..0x9D #E0.0 [2] (🎜..🎝) BEAMED ASCENDING MU...
|
||||
| 0xF0 0x9F 0x8E 0x9E..0x9F #E0.7 [2] (🎞️..🎟️) film frames..admi...
|
||||
| 0xF0 0x9F 0x8E 0xA0..0xFF #E0.6 [37] (🎠..🏄) carousel horse..per...
|
||||
| 0xF0 0x9F 0x8F 0x00..0x84 #
|
||||
| 0xF0 0x9F 0x8F 0x85 #E1.0 [1] (🏅) sports medal
|
||||
| 0xF0 0x9F 0x8F 0x86 #E0.6 [1] (🏆) trophy
|
||||
| 0xF0 0x9F 0x8F 0x87 #E1.0 [1] (🏇) horse racing
|
||||
| 0xF0 0x9F 0x8F 0x88 #E0.6 [1] (🏈) american football
|
||||
| 0xF0 0x9F 0x8F 0x89 #E1.0 [1] (🏉) rugby football
|
||||
| 0xF0 0x9F 0x8F 0x8A #E0.6 [1] (🏊) person swimming
|
||||
| 0xF0 0x9F 0x8F 0x8B..0x8E #E0.7 [4] (🏋️..🏎️) person lifting we...
|
||||
| 0xF0 0x9F 0x8F 0x8F..0x93 #E1.0 [5] (🏏..🏓) cricket game..ping ...
|
||||
| 0xF0 0x9F 0x8F 0x94..0x9F #E0.7 [12] (🏔️..🏟️) snow-capped mount...
|
||||
| 0xF0 0x9F 0x8F 0xA0..0xA3 #E0.6 [4] (🏠..🏣) house..Japanese pos...
|
||||
| 0xF0 0x9F 0x8F 0xA4 #E1.0 [1] (🏤) post office
|
||||
| 0xF0 0x9F 0x8F 0xA5..0xB0 #E0.6 [12] (🏥..🏰) hospital..castle
|
||||
| 0xF0 0x9F 0x8F 0xB1..0xB2 #E0.0 [2] (🏱..🏲) WHITE PENNANT..BLAC...
|
||||
| 0xF0 0x9F 0x8F 0xB3 #E0.7 [1] (🏳️) white flag
|
||||
| 0xF0 0x9F 0x8F 0xB4 #E1.0 [1] (🏴) black flag
|
||||
| 0xF0 0x9F 0x8F 0xB5 #E0.7 [1] (🏵️) rosette
|
||||
| 0xF0 0x9F 0x8F 0xB6 #E0.0 [1] (🏶) BLACK ROSETTE
|
||||
| 0xF0 0x9F 0x8F 0xB7 #E0.7 [1] (🏷️) label
|
||||
| 0xF0 0x9F 0x8F 0xB8..0xBA #E1.0 [3] (🏸..🏺) badminton..amphora
|
||||
| 0xF0 0x9F 0x90 0x80..0x87 #E1.0 [8] (🐀..🐇) rat..rabbit
|
||||
| 0xF0 0x9F 0x90 0x88 #E0.7 [1] (🐈) cat
|
||||
| 0xF0 0x9F 0x90 0x89..0x8B #E1.0 [3] (🐉..🐋) dragon..whale
|
||||
| 0xF0 0x9F 0x90 0x8C..0x8E #E0.6 [3] (🐌..🐎) snail..horse
|
||||
| 0xF0 0x9F 0x90 0x8F..0x90 #E1.0 [2] (🐏..🐐) ram..goat
|
||||
| 0xF0 0x9F 0x90 0x91..0x92 #E0.6 [2] (🐑..🐒) ewe..monkey
|
||||
| 0xF0 0x9F 0x90 0x93 #E1.0 [1] (🐓) rooster
|
||||
| 0xF0 0x9F 0x90 0x94 #E0.6 [1] (🐔) chicken
|
||||
| 0xF0 0x9F 0x90 0x95 #E0.7 [1] (🐕) dog
|
||||
| 0xF0 0x9F 0x90 0x96 #E1.0 [1] (🐖) pig
|
||||
| 0xF0 0x9F 0x90 0x97..0xA9 #E0.6 [19] (🐗..🐩) boar..poodle
|
||||
| 0xF0 0x9F 0x90 0xAA #E1.0 [1] (🐪) camel
|
||||
| 0xF0 0x9F 0x90 0xAB..0xBE #E0.6 [20] (🐫..🐾) two-hump camel..paw...
|
||||
| 0xF0 0x9F 0x90 0xBF #E0.7 [1] (🐿️) chipmunk
|
||||
| 0xF0 0x9F 0x91 0x80 #E0.6 [1] (👀) eyes
|
||||
| 0xF0 0x9F 0x91 0x81 #E0.7 [1] (👁️) eye
|
||||
| 0xF0 0x9F 0x91 0x82..0xA4 #E0.6 [35] (👂..👤) ear..bust in silhou...
|
||||
| 0xF0 0x9F 0x91 0xA5 #E1.0 [1] (👥) busts in silhouette
|
||||
| 0xF0 0x9F 0x91 0xA6..0xAB #E0.6 [6] (👦..👫) boy..woman and man ...
|
||||
| 0xF0 0x9F 0x91 0xAC..0xAD #E1.0 [2] (👬..👭) men holding hands.....
|
||||
| 0xF0 0x9F 0x91 0xAE..0xFF #E0.6 [63] (👮..💬) police officer..spe...
|
||||
| 0xF0 0x9F 0x92 0x00..0xAC #
|
||||
| 0xF0 0x9F 0x92 0xAD #E1.0 [1] (💭) thought balloon
|
||||
| 0xF0 0x9F 0x92 0xAE..0xB5 #E0.6 [8] (💮..💵) white flower..dolla...
|
||||
| 0xF0 0x9F 0x92 0xB6..0xB7 #E1.0 [2] (💶..💷) euro banknote..poun...
|
||||
| 0xF0 0x9F 0x92 0xB8..0xFF #E0.6 [52] (💸..📫) money with wings..c...
|
||||
| 0xF0 0x9F 0x93 0x00..0xAB #
|
||||
| 0xF0 0x9F 0x93 0xAC..0xAD #E0.7 [2] (📬..📭) open mailbox with r...
|
||||
| 0xF0 0x9F 0x93 0xAE #E0.6 [1] (📮) postbox
|
||||
| 0xF0 0x9F 0x93 0xAF #E1.0 [1] (📯) postal horn
|
||||
| 0xF0 0x9F 0x93 0xB0..0xB4 #E0.6 [5] (📰..📴) newspaper..mobile p...
|
||||
| 0xF0 0x9F 0x93 0xB5 #E1.0 [1] (📵) no mobile phones
|
||||
| 0xF0 0x9F 0x93 0xB6..0xB7 #E0.6 [2] (📶..📷) antenna bars..camera
|
||||
| 0xF0 0x9F 0x93 0xB8 #E1.0 [1] (📸) camera with flash
|
||||
| 0xF0 0x9F 0x93 0xB9..0xBC #E0.6 [4] (📹..📼) video camera..video...
|
||||
| 0xF0 0x9F 0x93 0xBD #E0.7 [1] (📽️) film projector
|
||||
| 0xF0 0x9F 0x93 0xBE #E0.0 [1] (📾) PORTABLE STEREO
|
||||
| 0xF0 0x9F 0x93 0xBF..0xFF #E1.0 [4] (📿..🔂) prayer beads..repea...
|
||||
| 0xF0 0x9F 0x94 0x00..0x82 #
|
||||
| 0xF0 0x9F 0x94 0x83 #E0.6 [1] (🔃) clockwise vertical arrows
|
||||
| 0xF0 0x9F 0x94 0x84..0x87 #E1.0 [4] (🔄..🔇) counterclockwise ar...
|
||||
| 0xF0 0x9F 0x94 0x88 #E0.7 [1] (🔈) speaker low volume
|
||||
| 0xF0 0x9F 0x94 0x89 #E1.0 [1] (🔉) speaker medium volume
|
||||
| 0xF0 0x9F 0x94 0x8A..0x94 #E0.6 [11] (🔊..🔔) speaker high volume...
|
||||
| 0xF0 0x9F 0x94 0x95 #E1.0 [1] (🔕) bell with slash
|
||||
| 0xF0 0x9F 0x94 0x96..0xAB #E0.6 [22] (🔖..🔫) bookmark..water pistol
|
||||
| 0xF0 0x9F 0x94 0xAC..0xAD #E1.0 [2] (🔬..🔭) microscope..telescope
|
||||
| 0xF0 0x9F 0x94 0xAE..0xBD #E0.6 [16] (🔮..🔽) crystal ball..downw...
|
||||
| 0xF0 0x9F 0x95 0x86..0x88 #E0.0 [3] (🕆..🕈) WHITE LATIN CROSS.....
|
||||
| 0xF0 0x9F 0x95 0x89..0x8A #E0.7 [2] (🕉️..🕊️) om..dove
|
||||
| 0xF0 0x9F 0x95 0x8B..0x8E #E1.0 [4] (🕋..🕎) kaaba..menorah
|
||||
| 0xF0 0x9F 0x95 0x8F #E0.0 [1] (🕏) BOWL OF HYGIEIA
|
||||
| 0xF0 0x9F 0x95 0x90..0x9B #E0.6 [12] (🕐..🕛) one o’clock..twelve...
|
||||
| 0xF0 0x9F 0x95 0x9C..0xA7 #E0.7 [12] (🕜..🕧) one-thirty..twelve-...
|
||||
| 0xF0 0x9F 0x95 0xA8..0xAE #E0.0 [7] (🕨..🕮) RIGHT SPEAKER..BOOK
|
||||
| 0xF0 0x9F 0x95 0xAF..0xB0 #E0.7 [2] (🕯️..🕰️) candle..mantelpie...
|
||||
| 0xF0 0x9F 0x95 0xB1..0xB2 #E0.0 [2] (🕱..🕲) BLACK SKULL AND CRO...
|
||||
| 0xF0 0x9F 0x95 0xB3..0xB9 #E0.7 [7] (🕳️..🕹️) hole..joystick
|
||||
| 0xF0 0x9F 0x95 0xBA #E3.0 [1] (🕺) man dancing
|
||||
| 0xF0 0x9F 0x95 0xBB..0xFF #E0.0 [12] (🕻..🖆) LEFT HAND TELEPHONE...
|
||||
| 0xF0 0x9F 0x96 0x00..0x86 #
|
||||
| 0xF0 0x9F 0x96 0x87 #E0.7 [1] (🖇️) linked paperclips
|
||||
| 0xF0 0x9F 0x96 0x88..0x89 #E0.0 [2] (🖈..🖉) BLACK PUSHPIN..LOWE...
|
||||
| 0xF0 0x9F 0x96 0x8A..0x8D #E0.7 [4] (🖊️..🖍️) pen..crayon
|
||||
| 0xF0 0x9F 0x96 0x8E..0x8F #E0.0 [2] (🖎..🖏) LEFT WRITING HAND.....
|
||||
| 0xF0 0x9F 0x96 0x90 #E0.7 [1] (🖐️) hand with fingers spl...
|
||||
| 0xF0 0x9F 0x96 0x91..0x94 #E0.0 [4] (🖑..🖔) REVERSED RAISED HAN...
|
||||
| 0xF0 0x9F 0x96 0x95..0x96 #E1.0 [2] (🖕..🖖) middle finger..vulc...
|
||||
| 0xF0 0x9F 0x96 0x97..0xA3 #E0.0 [13] (🖗..🖣) WHITE DOWN POINTING...
|
||||
| 0xF0 0x9F 0x96 0xA4 #E3.0 [1] (🖤) black heart
|
||||
| 0xF0 0x9F 0x96 0xA5 #E0.7 [1] (🖥️) desktop computer
|
||||
| 0xF0 0x9F 0x96 0xA6..0xA7 #E0.0 [2] (🖦..🖧) KEYBOARD AND MOUSE....
|
||||
| 0xF0 0x9F 0x96 0xA8 #E0.7 [1] (🖨️) printer
|
||||
| 0xF0 0x9F 0x96 0xA9..0xB0 #E0.0 [8] (🖩..🖰) POCKET CALCULATOR.....
|
||||
| 0xF0 0x9F 0x96 0xB1..0xB2 #E0.7 [2] (🖱️..🖲️) computer mouse..t...
|
||||
| 0xF0 0x9F 0x96 0xB3..0xBB #E0.0 [9] (🖳..🖻) OLD PERSONAL COMPUT...
|
||||
| 0xF0 0x9F 0x96 0xBC #E0.7 [1] (🖼️) framed picture
|
||||
| 0xF0 0x9F 0x96 0xBD..0xFF #E0.0 [5] (🖽..🗁) FRAME WITH TILES..O...
|
||||
| 0xF0 0x9F 0x97 0x00..0x81 #
|
||||
| 0xF0 0x9F 0x97 0x82..0x84 #E0.7 [3] (🗂️..🗄️) card index divide...
|
||||
| 0xF0 0x9F 0x97 0x85..0x90 #E0.0 [12] (🗅..🗐) EMPTY NOTE..PAGES
|
||||
| 0xF0 0x9F 0x97 0x91..0x93 #E0.7 [3] (🗑️..🗓️) wastebasket..spir...
|
||||
| 0xF0 0x9F 0x97 0x94..0x9B #E0.0 [8] (🗔..🗛) DESKTOP WINDOW..DEC...
|
||||
| 0xF0 0x9F 0x97 0x9C..0x9E #E0.7 [3] (🗜️..🗞️) clamp..rolled-up ...
|
||||
| 0xF0 0x9F 0x97 0x9F..0xA0 #E0.0 [2] (🗟..🗠) PAGE WITH CIRCLED T...
|
||||
| 0xF0 0x9F 0x97 0xA1 #E0.7 [1] (🗡️) dagger
|
||||
| 0xF0 0x9F 0x97 0xA2 #E0.0 [1] (🗢) LIPS
|
||||
| 0xF0 0x9F 0x97 0xA3 #E0.7 [1] (🗣️) speaking head
|
||||
| 0xF0 0x9F 0x97 0xA4..0xA7 #E0.0 [4] (🗤..🗧) THREE RAYS ABOVE..T...
|
||||
| 0xF0 0x9F 0x97 0xA8 #E2.0 [1] (🗨️) left speech bubble
|
||||
| 0xF0 0x9F 0x97 0xA9..0xAE #E0.0 [6] (🗩..🗮) RIGHT SPEECH BUBBLE...
|
||||
| 0xF0 0x9F 0x97 0xAF #E0.7 [1] (🗯️) right anger bubble
|
||||
| 0xF0 0x9F 0x97 0xB0..0xB2 #E0.0 [3] (🗰..🗲) MOOD BUBBLE..LIGHTN...
|
||||
| 0xF0 0x9F 0x97 0xB3 #E0.7 [1] (🗳️) ballot box with ballot
|
||||
| 0xF0 0x9F 0x97 0xB4..0xB9 #E0.0 [6] (🗴..🗹) BALLOT SCRIPT X..BA...
|
||||
| 0xF0 0x9F 0x97 0xBA #E0.7 [1] (🗺️) world map
|
||||
| 0xF0 0x9F 0x97 0xBB..0xBF #E0.6 [5] (🗻..🗿) mount fuji..moai
|
||||
| 0xF0 0x9F 0x98 0x80 #E1.0 [1] (😀) grinning face
|
||||
| 0xF0 0x9F 0x98 0x81..0x86 #E0.6 [6] (😁..😆) beaming face with s...
|
||||
| 0xF0 0x9F 0x98 0x87..0x88 #E1.0 [2] (😇..😈) smiling face with h...
|
||||
| 0xF0 0x9F 0x98 0x89..0x8D #E0.6 [5] (😉..😍) winking face..smili...
|
||||
| 0xF0 0x9F 0x98 0x8E #E1.0 [1] (😎) smiling face with sung...
|
||||
| 0xF0 0x9F 0x98 0x8F #E0.6 [1] (😏) smirking face
|
||||
| 0xF0 0x9F 0x98 0x90 #E0.7 [1] (😐) neutral face
|
||||
| 0xF0 0x9F 0x98 0x91 #E1.0 [1] (😑) expressionless face
|
||||
| 0xF0 0x9F 0x98 0x92..0x94 #E0.6 [3] (😒..😔) unamused face..pens...
|
||||
| 0xF0 0x9F 0x98 0x95 #E1.0 [1] (😕) confused face
|
||||
| 0xF0 0x9F 0x98 0x96 #E0.6 [1] (😖) confounded face
|
||||
| 0xF0 0x9F 0x98 0x97 #E1.0 [1] (😗) kissing face
|
||||
| 0xF0 0x9F 0x98 0x98 #E0.6 [1] (😘) face blowing a kiss
|
||||
| 0xF0 0x9F 0x98 0x99 #E1.0 [1] (😙) kissing face with smil...
|
||||
| 0xF0 0x9F 0x98 0x9A #E0.6 [1] (😚) kissing face with clos...
|
||||
| 0xF0 0x9F 0x98 0x9B #E1.0 [1] (😛) face with tongue
|
||||
| 0xF0 0x9F 0x98 0x9C..0x9E #E0.6 [3] (😜..😞) winking face with t...
|
||||
| 0xF0 0x9F 0x98 0x9F #E1.0 [1] (😟) worried face
|
||||
| 0xF0 0x9F 0x98 0xA0..0xA5 #E0.6 [6] (😠..😥) angry face..sad but...
|
||||
| 0xF0 0x9F 0x98 0xA6..0xA7 #E1.0 [2] (😦..😧) frowning face with ...
|
||||
| 0xF0 0x9F 0x98 0xA8..0xAB #E0.6 [4] (😨..😫) fearful face..tired...
|
||||
| 0xF0 0x9F 0x98 0xAC #E1.0 [1] (😬) grimacing face
|
||||
| 0xF0 0x9F 0x98 0xAD #E0.6 [1] (😭) loudly crying face
|
||||
| 0xF0 0x9F 0x98 0xAE..0xAF #E1.0 [2] (😮..😯) face with open mout...
|
||||
| 0xF0 0x9F 0x98 0xB0..0xB3 #E0.6 [4] (😰..😳) anxious face with s...
|
||||
| 0xF0 0x9F 0x98 0xB4 #E1.0 [1] (😴) sleeping face
|
||||
| 0xF0 0x9F 0x98 0xB5 #E0.6 [1] (😵) face with crossed-out ...
|
||||
| 0xF0 0x9F 0x98 0xB6 #E1.0 [1] (😶) face without mouth
|
||||
| 0xF0 0x9F 0x98 0xB7..0xFF #E0.6 [10] (😷..🙀) face with medical m...
|
||||
| 0xF0 0x9F 0x99 0x00..0x80 #
|
||||
| 0xF0 0x9F 0x99 0x81..0x84 #E1.0 [4] (🙁..🙄) slightly frowning f...
|
||||
| 0xF0 0x9F 0x99 0x85..0x8F #E0.6 [11] (🙅..🙏) person gesturing NO...
|
||||
| 0xF0 0x9F 0x9A 0x80 #E0.6 [1] (🚀) rocket
|
||||
| 0xF0 0x9F 0x9A 0x81..0x82 #E1.0 [2] (🚁..🚂) helicopter..locomotive
|
||||
| 0xF0 0x9F 0x9A 0x83..0x85 #E0.6 [3] (🚃..🚅) railway car..bullet...
|
||||
| 0xF0 0x9F 0x9A 0x86 #E1.0 [1] (🚆) train
|
||||
| 0xF0 0x9F 0x9A 0x87 #E0.6 [1] (🚇) metro
|
||||
| 0xF0 0x9F 0x9A 0x88 #E1.0 [1] (🚈) light rail
|
||||
| 0xF0 0x9F 0x9A 0x89 #E0.6 [1] (🚉) station
|
||||
| 0xF0 0x9F 0x9A 0x8A..0x8B #E1.0 [2] (🚊..🚋) tram..tram car
|
||||
| 0xF0 0x9F 0x9A 0x8C #E0.6 [1] (🚌) bus
|
||||
| 0xF0 0x9F 0x9A 0x8D #E0.7 [1] (🚍) oncoming bus
|
||||
| 0xF0 0x9F 0x9A 0x8E #E1.0 [1] (🚎) trolleybus
|
||||
| 0xF0 0x9F 0x9A 0x8F #E0.6 [1] (🚏) bus stop
|
||||
| 0xF0 0x9F 0x9A 0x90 #E1.0 [1] (🚐) minibus
|
||||
| 0xF0 0x9F 0x9A 0x91..0x93 #E0.6 [3] (🚑..🚓) ambulance..police car
|
||||
| 0xF0 0x9F 0x9A 0x94 #E0.7 [1] (🚔) oncoming police car
|
||||
| 0xF0 0x9F 0x9A 0x95 #E0.6 [1] (🚕) taxi
|
||||
| 0xF0 0x9F 0x9A 0x96 #E1.0 [1] (🚖) oncoming taxi
|
||||
| 0xF0 0x9F 0x9A 0x97 #E0.6 [1] (🚗) automobile
|
||||
| 0xF0 0x9F 0x9A 0x98 #E0.7 [1] (🚘) oncoming automobile
|
||||
| 0xF0 0x9F 0x9A 0x99..0x9A #E0.6 [2] (🚙..🚚) sport utility vehic...
|
||||
| 0xF0 0x9F 0x9A 0x9B..0xA1 #E1.0 [7] (🚛..🚡) articulated lorry.....
|
||||
| 0xF0 0x9F 0x9A 0xA2 #E0.6 [1] (🚢) ship
|
||||
| 0xF0 0x9F 0x9A 0xA3 #E1.0 [1] (🚣) person rowing boat
|
||||
| 0xF0 0x9F 0x9A 0xA4..0xA5 #E0.6 [2] (🚤..🚥) speedboat..horizont...
|
||||
| 0xF0 0x9F 0x9A 0xA6 #E1.0 [1] (🚦) vertical traffic light
|
||||
| 0xF0 0x9F 0x9A 0xA7..0xAD #E0.6 [7] (🚧..🚭) construction..no sm...
|
||||
| 0xF0 0x9F 0x9A 0xAE..0xB1 #E1.0 [4] (🚮..🚱) litter in bin sign....
|
||||
| 0xF0 0x9F 0x9A 0xB2 #E0.6 [1] (🚲) bicycle
|
||||
| 0xF0 0x9F 0x9A 0xB3..0xB5 #E1.0 [3] (🚳..🚵) no bicycles..person...
|
||||
| 0xF0 0x9F 0x9A 0xB6 #E0.6 [1] (🚶) person walking
|
||||
| 0xF0 0x9F 0x9A 0xB7..0xB8 #E1.0 [2] (🚷..🚸) no pedestrians..chi...
|
||||
| 0xF0 0x9F 0x9A 0xB9..0xBE #E0.6 [6] (🚹..🚾) men’s room..water c...
|
||||
| 0xF0 0x9F 0x9A 0xBF #E1.0 [1] (🚿) shower
|
||||
| 0xF0 0x9F 0x9B 0x80 #E0.6 [1] (🛀) person taking bath
|
||||
| 0xF0 0x9F 0x9B 0x81..0x85 #E1.0 [5] (🛁..🛅) bathtub..left luggage
|
||||
| 0xF0 0x9F 0x9B 0x86..0x8A #E0.0 [5] (🛆..🛊) TRIANGLE WITH ROUND...
|
||||
| 0xF0 0x9F 0x9B 0x8B #E0.7 [1] (🛋️) couch and lamp
|
||||
| 0xF0 0x9F 0x9B 0x8C #E1.0 [1] (🛌) person in bed
|
||||
| 0xF0 0x9F 0x9B 0x8D..0x8F #E0.7 [3] (🛍️..🛏️) shopping bags..bed
|
||||
| 0xF0 0x9F 0x9B 0x90 #E1.0 [1] (🛐) place of worship
|
||||
| 0xF0 0x9F 0x9B 0x91..0x92 #E3.0 [2] (🛑..🛒) stop sign..shopping...
|
||||
| 0xF0 0x9F 0x9B 0x93..0x94 #E0.0 [2] (🛓..🛔) STUPA..PAGODA
|
||||
| 0xF0 0x9F 0x9B 0x95 #E12.0 [1] (🛕) hindu temple
|
||||
| 0xF0 0x9F 0x9B 0x96..0x97 #E13.0 [2] (🛖..🛗) hut..elevator
|
||||
| 0xF0 0x9F 0x9B 0x98..0x9B #E0.0 [4] (..) <reserved-1F6D8>..<...
|
||||
| 0xF0 0x9F 0x9B 0x9C #E15.0 [1] (🛜) wireless
|
||||
| 0xF0 0x9F 0x9B 0x9D..0x9F #E14.0 [3] (🛝..🛟) playground slide..r...
|
||||
| 0xF0 0x9F 0x9B 0xA0..0xA5 #E0.7 [6] (🛠️..🛥️) hammer and wrench...
|
||||
| 0xF0 0x9F 0x9B 0xA6..0xA8 #E0.0 [3] (🛦..🛨) UP-POINTING MILITAR...
|
||||
| 0xF0 0x9F 0x9B 0xA9 #E0.7 [1] (🛩️) small airplane
|
||||
| 0xF0 0x9F 0x9B 0xAA #E0.0 [1] (🛪) NORTHEAST-POINTING AIR...
|
||||
| 0xF0 0x9F 0x9B 0xAB..0xAC #E1.0 [2] (🛫..🛬) airplane departure....
|
||||
| 0xF0 0x9F 0x9B 0xAD..0xAF #E0.0 [3] (..) <reserved-1F6ED>..<...
|
||||
| 0xF0 0x9F 0x9B 0xB0 #E0.7 [1] (🛰️) satellite
|
||||
| 0xF0 0x9F 0x9B 0xB1..0xB2 #E0.0 [2] (🛱..🛲) ONCOMING FIRE ENGIN...
|
||||
| 0xF0 0x9F 0x9B 0xB3 #E0.7 [1] (🛳️) passenger ship
|
||||
| 0xF0 0x9F 0x9B 0xB4..0xB6 #E3.0 [3] (🛴..🛶) kick scooter..canoe
|
||||
| 0xF0 0x9F 0x9B 0xB7..0xB8 #E5.0 [2] (🛷..🛸) sled..flying saucer
|
||||
| 0xF0 0x9F 0x9B 0xB9 #E11.0 [1] (🛹) skateboard
|
||||
| 0xF0 0x9F 0x9B 0xBA #E12.0 [1] (🛺) auto rickshaw
|
||||
| 0xF0 0x9F 0x9B 0xBB..0xBC #E13.0 [2] (🛻..🛼) pickup truck..rolle...
|
||||
| 0xF0 0x9F 0x9B 0xBD..0xBF #E0.0 [3] (..) <reserved-1F6FD>..<...
|
||||
| 0xF0 0x9F 0x9D 0xB4..0xBF #E0.0 [12] (🝴..🝿) LOT OF FORTUNE..ORCUS
|
||||
| 0xF0 0x9F 0x9F 0x95..0x9F #E0.0 [11] (🟕..) CIRCLED TRIANGLE..<...
|
||||
| 0xF0 0x9F 0x9F 0xA0..0xAB #E12.0 [12] (🟠..🟫) orange circle..brow...
|
||||
| 0xF0 0x9F 0x9F 0xAC..0xAF #E0.0 [4] (..) <reserved-1F7EC>..<...
|
||||
| 0xF0 0x9F 0x9F 0xB0 #E14.0 [1] (🟰) heavy equals sign
|
||||
| 0xF0 0x9F 0x9F 0xB1..0xBF #E0.0 [15] (..) <reserved-1F7F1>..<...
|
||||
| 0xF0 0x9F 0xA0 0x8C..0x8F #E0.0 [4] (..) <reserved-1F80C>..<...
|
||||
| 0xF0 0x9F 0xA1 0x88..0x8F #E0.0 [8] (..) <reserved-1F848>..<...
|
||||
| 0xF0 0x9F 0xA1 0x9A..0x9F #E0.0 [6] (..) <reserved-1F85A>..<...
|
||||
| 0xF0 0x9F 0xA2 0x88..0x8F #E0.0 [8] (..) <reserved-1F888>..<...
|
||||
| 0xF0 0x9F 0xA2 0xAE..0xFF #E0.0 [82] (..) <reserved-1F8AE>..<...
|
||||
| 0xF0 0x9F 0xA3 0x00..0xBF #
|
||||
| 0xF0 0x9F 0xA4 0x8C #E13.0 [1] (🤌) pinched fingers
|
||||
| 0xF0 0x9F 0xA4 0x8D..0x8F #E12.0 [3] (🤍..🤏) white heart..pinchi...
|
||||
| 0xF0 0x9F 0xA4 0x90..0x98 #E1.0 [9] (🤐..🤘) zipper-mouth face.....
|
||||
| 0xF0 0x9F 0xA4 0x99..0x9E #E3.0 [6] (🤙..🤞) call me hand..cross...
|
||||
| 0xF0 0x9F 0xA4 0x9F #E5.0 [1] (🤟) love-you gesture
|
||||
| 0xF0 0x9F 0xA4 0xA0..0xA7 #E3.0 [8] (🤠..🤧) cowboy hat face..sn...
|
||||
| 0xF0 0x9F 0xA4 0xA8..0xAF #E5.0 [8] (🤨..🤯) face with raised ey...
|
||||
| 0xF0 0x9F 0xA4 0xB0 #E3.0 [1] (🤰) pregnant woman
|
||||
| 0xF0 0x9F 0xA4 0xB1..0xB2 #E5.0 [2] (🤱..🤲) breast-feeding..pal...
|
||||
| 0xF0 0x9F 0xA4 0xB3..0xBA #E3.0 [8] (🤳..🤺) selfie..person fencing
|
||||
| 0xF0 0x9F 0xA4 0xBC..0xBE #E3.0 [3] (🤼..🤾) people wrestling..p...
|
||||
| 0xF0 0x9F 0xA4 0xBF #E12.0 [1] (🤿) diving mask
|
||||
| 0xF0 0x9F 0xA5 0x80..0x85 #E3.0 [6] (🥀..🥅) wilted flower..goal...
|
||||
| 0xF0 0x9F 0xA5 0x87..0x8B #E3.0 [5] (🥇..🥋) 1st place medal..ma...
|
||||
| 0xF0 0x9F 0xA5 0x8C #E5.0 [1] (🥌) curling stone
|
||||
| 0xF0 0x9F 0xA5 0x8D..0x8F #E11.0 [3] (🥍..🥏) lacrosse..flying disc
|
||||
| 0xF0 0x9F 0xA5 0x90..0x9E #E3.0 [15] (🥐..🥞) croissant..pancakes
|
||||
| 0xF0 0x9F 0xA5 0x9F..0xAB #E5.0 [13] (🥟..🥫) dumpling..canned food
|
||||
| 0xF0 0x9F 0xA5 0xAC..0xB0 #E11.0 [5] (🥬..🥰) leafy green..smilin...
|
||||
| 0xF0 0x9F 0xA5 0xB1 #E12.0 [1] (🥱) yawning face
|
||||
| 0xF0 0x9F 0xA5 0xB2 #E13.0 [1] (🥲) smiling face with tear
|
||||
| 0xF0 0x9F 0xA5 0xB3..0xB6 #E11.0 [4] (🥳..🥶) partying face..cold...
|
||||
| 0xF0 0x9F 0xA5 0xB7..0xB8 #E13.0 [2] (🥷..🥸) ninja..disguised face
|
||||
| 0xF0 0x9F 0xA5 0xB9 #E14.0 [1] (🥹) face holding back tears
|
||||
| 0xF0 0x9F 0xA5 0xBA #E11.0 [1] (🥺) pleading face
|
||||
| 0xF0 0x9F 0xA5 0xBB #E12.0 [1] (🥻) sari
|
||||
| 0xF0 0x9F 0xA5 0xBC..0xBF #E11.0 [4] (🥼..🥿) lab coat..flat shoe
|
||||
| 0xF0 0x9F 0xA6 0x80..0x84 #E1.0 [5] (🦀..🦄) crab..unicorn
|
||||
| 0xF0 0x9F 0xA6 0x85..0x91 #E3.0 [13] (🦅..🦑) eagle..squid
|
||||
| 0xF0 0x9F 0xA6 0x92..0x97 #E5.0 [6] (🦒..🦗) giraffe..cricket
|
||||
| 0xF0 0x9F 0xA6 0x98..0xA2 #E11.0 [11] (🦘..🦢) kangaroo..swan
|
||||
| 0xF0 0x9F 0xA6 0xA3..0xA4 #E13.0 [2] (🦣..🦤) mammoth..dodo
|
||||
| 0xF0 0x9F 0xA6 0xA5..0xAA #E12.0 [6] (🦥..🦪) sloth..oyster
|
||||
| 0xF0 0x9F 0xA6 0xAB..0xAD #E13.0 [3] (🦫..🦭) beaver..seal
|
||||
| 0xF0 0x9F 0xA6 0xAE..0xAF #E12.0 [2] (🦮..🦯) guide dog..white cane
|
||||
| 0xF0 0x9F 0xA6 0xB0..0xB9 #E11.0 [10] (🦰..🦹) red hair..supervillain
|
||||
| 0xF0 0x9F 0xA6 0xBA..0xBF #E12.0 [6] (🦺..🦿) safety vest..mechan...
|
||||
| 0xF0 0x9F 0xA7 0x80 #E1.0 [1] (🧀) cheese wedge
|
||||
| 0xF0 0x9F 0xA7 0x81..0x82 #E11.0 [2] (🧁..🧂) cupcake..salt
|
||||
| 0xF0 0x9F 0xA7 0x83..0x8A #E12.0 [8] (🧃..🧊) beverage box..ice
|
||||
| 0xF0 0x9F 0xA7 0x8B #E13.0 [1] (🧋) bubble tea
|
||||
| 0xF0 0x9F 0xA7 0x8C #E14.0 [1] (🧌) troll
|
||||
| 0xF0 0x9F 0xA7 0x8D..0x8F #E12.0 [3] (🧍..🧏) person standing..de...
|
||||
| 0xF0 0x9F 0xA7 0x90..0xA6 #E5.0 [23] (🧐..🧦) face with monocle.....
|
||||
| 0xF0 0x9F 0xA7 0xA7..0xBF #E11.0 [25] (🧧..🧿) red envelope..nazar...
|
||||
| 0xF0 0x9F 0xA8 0x80..0xFF #E0.0 [112] (🨀..) NEUTRAL CHESS KING....
|
||||
| 0xF0 0x9F 0xA9 0x00..0xAF #
|
||||
| 0xF0 0x9F 0xA9 0xB0..0xB3 #E12.0 [4] (🩰..🩳) ballet shoes..shorts
|
||||
| 0xF0 0x9F 0xA9 0xB4 #E13.0 [1] (🩴) thong sandal
|
||||
| 0xF0 0x9F 0xA9 0xB5..0xB7 #E15.0 [3] (🩵..🩷) light blue heart..p...
|
||||
| 0xF0 0x9F 0xA9 0xB8..0xBA #E12.0 [3] (🩸..🩺) drop of blood..stet...
|
||||
| 0xF0 0x9F 0xA9 0xBB..0xBC #E14.0 [2] (🩻..🩼) x-ray..crutch
|
||||
| 0xF0 0x9F 0xA9 0xBD..0xBF #E0.0 [3] (..) <reserved-1FA7D>..<...
|
||||
| 0xF0 0x9F 0xAA 0x80..0x82 #E12.0 [3] (🪀..🪂) yo-yo..parachute
|
||||
| 0xF0 0x9F 0xAA 0x83..0x86 #E13.0 [4] (🪃..🪆) boomerang..nesting ...
|
||||
| 0xF0 0x9F 0xAA 0x87..0x88 #E15.0 [2] (🪇..🪈) maracas..flute
|
||||
| 0xF0 0x9F 0xAA 0x89..0x8F #E0.0 [7] (..) <reserved-1FA89>..<...
|
||||
| 0xF0 0x9F 0xAA 0x90..0x95 #E12.0 [6] (🪐..🪕) ringed planet..banjo
|
||||
| 0xF0 0x9F 0xAA 0x96..0xA8 #E13.0 [19] (🪖..🪨) military helmet..rock
|
||||
| 0xF0 0x9F 0xAA 0xA9..0xAC #E14.0 [4] (🪩..🪬) mirror ball..hamsa
|
||||
| 0xF0 0x9F 0xAA 0xAD..0xAF #E15.0 [3] (🪭..🪯) folding hand fan..k...
|
||||
| 0xF0 0x9F 0xAA 0xB0..0xB6 #E13.0 [7] (🪰..🪶) fly..feather
|
||||
| 0xF0 0x9F 0xAA 0xB7..0xBA #E14.0 [4] (🪷..🪺) lotus..nest with eggs
|
||||
| 0xF0 0x9F 0xAA 0xBB..0xBD #E15.0 [3] (🪻..🪽) hyacinth..wing
|
||||
| 0xF0 0x9F 0xAA 0xBE #E0.0 [1] () <reserved-1FABE>
|
||||
| 0xF0 0x9F 0xAA 0xBF #E15.0 [1] (🪿) goose
|
||||
| 0xF0 0x9F 0xAB 0x80..0x82 #E13.0 [3] (🫀..🫂) anatomical heart..p...
|
||||
| 0xF0 0x9F 0xAB 0x83..0x85 #E14.0 [3] (🫃..🫅) pregnant man..perso...
|
||||
| 0xF0 0x9F 0xAB 0x86..0x8D #E0.0 [8] (..) <reserved-1FAC6>..<...
|
||||
| 0xF0 0x9F 0xAB 0x8E..0x8F #E15.0 [2] (🫎..🫏) moose..donkey
|
||||
| 0xF0 0x9F 0xAB 0x90..0x96 #E13.0 [7] (🫐..🫖) blueberries..teapot
|
||||
| 0xF0 0x9F 0xAB 0x97..0x99 #E14.0 [3] (🫗..🫙) pouring liquid..jar
|
||||
| 0xF0 0x9F 0xAB 0x9A..0x9B #E15.0 [2] (🫚..🫛) ginger root..pea pod
|
||||
| 0xF0 0x9F 0xAB 0x9C..0x9F #E0.0 [4] (..) <reserved-1FADC>..<...
|
||||
| 0xF0 0x9F 0xAB 0xA0..0xA7 #E14.0 [8] (🫠..🫧) melting face..bubbles
|
||||
| 0xF0 0x9F 0xAB 0xA8 #E15.0 [1] (🫨) shaking face
|
||||
| 0xF0 0x9F 0xAB 0xA9..0xAF #E0.0 [7] (..) <reserved-1FAE9>..<...
|
||||
| 0xF0 0x9F 0xAB 0xB0..0xB6 #E14.0 [7] (🫰..🫶) hand with index fin...
|
||||
| 0xF0 0x9F 0xAB 0xB7..0xB8 #E15.0 [2] (🫷..🫸) leftwards pushing h...
|
||||
| 0xF0 0x9F 0xAB 0xB9..0xBF #E0.0 [7] (..) <reserved-1FAF9>..<...
|
||||
| 0xF0 0x9F 0xB0 0x80..0xFF #E0.0[1022] (..) <reserved-1FC...
|
||||
| 0xF0 0x9F 0xB1..0xBE 0x00..0xFF #
|
||||
| 0xF0 0x9F 0xBF 0x00..0xBD #
|
||||
;
|
||||
|
||||
}%%
|
||||
8
vendor/github.com/apparentlymart/go-textseg/v15/textseg/generate.go
generated
vendored
8
vendor/github.com/apparentlymart/go-textseg/v15/textseg/generate.go
generated
vendored
@@ -1,8 +0,0 @@
|
||||
package textseg
|
||||
|
||||
//go:generate go run make_tables.go -output tables.go
|
||||
//go:generate go run make_test_tables.go -output tables_test.go
|
||||
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/15.0.0/ucd/auxiliary/GraphemeBreakProperty.txt -m GraphemeCluster -p "Prepend,CR,LF,Control,Extend,Regional_Indicator,SpacingMark,L,V,T,LV,LVT,ZWJ" -o grapheme_clusters_table.rl
|
||||
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt -m Emoji -p "Extended_Pictographic" -o emoji_table.rl
|
||||
//go:generate ragel -Z grapheme_clusters.rl
|
||||
//go:generate gofmt -w grapheme_clusters.go
|
||||
4349
vendor/github.com/apparentlymart/go-textseg/v15/textseg/grapheme_clusters.go
generated
vendored
4349
vendor/github.com/apparentlymart/go-textseg/v15/textseg/grapheme_clusters.go
generated
vendored
File diff suppressed because it is too large
Load Diff
133
vendor/github.com/apparentlymart/go-textseg/v15/textseg/grapheme_clusters.rl
generated
vendored
133
vendor/github.com/apparentlymart/go-textseg/v15/textseg/grapheme_clusters.rl
generated
vendored
@@ -1,133 +0,0 @@
|
||||
package textseg
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Generated from grapheme_clusters.rl. DO NOT EDIT
|
||||
%%{
|
||||
# (except you are actually in grapheme_clusters.rl here, so edit away!)
|
||||
|
||||
machine graphclust;
|
||||
write data;
|
||||
}%%
|
||||
|
||||
var Error = errors.New("invalid UTF8 text")
|
||||
|
||||
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
|
||||
// on grapheme cluster boundaries.
|
||||
func ScanGraphemeClusters(data []byte, atEOF bool) (int, []byte, error) {
|
||||
if len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
|
||||
// Ragel state
|
||||
cs := 0 // Current State
|
||||
p := 0 // "Pointer" into data
|
||||
pe := len(data) // End-of-data "pointer"
|
||||
ts := 0
|
||||
te := 0
|
||||
act := 0
|
||||
eof := pe
|
||||
|
||||
// Make Go compiler happy
|
||||
_ = ts
|
||||
_ = te
|
||||
_ = act
|
||||
_ = eof
|
||||
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
|
||||
%%{
|
||||
include GraphemeCluster "grapheme_clusters_table.rl";
|
||||
include Emoji "emoji_table.rl";
|
||||
|
||||
action start {
|
||||
startPos = p
|
||||
}
|
||||
|
||||
action end {
|
||||
endPos = p
|
||||
}
|
||||
|
||||
action emit {
|
||||
return endPos+1, data[startPos:endPos+1], nil
|
||||
}
|
||||
|
||||
ZWJGlue = ZWJ (Extended_Pictographic Extend*)?;
|
||||
AnyExtender = Extend | ZWJGlue | SpacingMark;
|
||||
Extension = AnyExtender*;
|
||||
ReplacementChar = (0xEF 0xBF 0xBD);
|
||||
|
||||
CRLFSeq = CR LF;
|
||||
ControlSeq = Control | ReplacementChar;
|
||||
HangulSeq = (
|
||||
L+ (((LV? V+ | LVT) T*)?|LV?) |
|
||||
LV V* T* |
|
||||
V+ T* |
|
||||
LVT T* |
|
||||
T+
|
||||
) Extension;
|
||||
EmojiSeq = Extended_Pictographic Extend* Extension;
|
||||
ZWJSeq = ZWJ (ZWJ | Extend | SpacingMark)*;
|
||||
EmojiFlagSeq = Regional_Indicator Regional_Indicator? Extension;
|
||||
|
||||
UTF8Cont = 0x80 .. 0xBF;
|
||||
AnyUTF8 = (
|
||||
0x00..0x7F |
|
||||
0xC0..0xDF . UTF8Cont |
|
||||
0xE0..0xEF . UTF8Cont . UTF8Cont |
|
||||
0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
|
||||
);
|
||||
|
||||
# OtherSeq is any character that isn't at the start of one of the extended sequences above, followed by extension
|
||||
OtherSeq = (AnyUTF8 - (CR|LF|Control|ReplacementChar|L|LV|V|LVT|T|Extended_Pictographic|ZWJ|Regional_Indicator|Prepend)) (Extend | ZWJ | SpacingMark)*;
|
||||
|
||||
# PrependSeq is prepend followed by any of the other patterns above, except control characters which explicitly break
|
||||
PrependSeq = Prepend+ (HangulSeq|EmojiSeq|ZWJSeq|EmojiFlagSeq|OtherSeq)?;
|
||||
|
||||
CRLFTok = CRLFSeq >start @end;
|
||||
ControlTok = ControlSeq >start @end;
|
||||
HangulTok = HangulSeq >start @end;
|
||||
EmojiTok = EmojiSeq >start @end;
|
||||
ZWJTok = ZWJSeq >start @end;
|
||||
EmojiFlagTok = EmojiFlagSeq >start @end;
|
||||
OtherTok = OtherSeq >start @end;
|
||||
PrependTok = PrependSeq >start @end;
|
||||
|
||||
main := |*
|
||||
CRLFTok => emit;
|
||||
ControlTok => emit;
|
||||
HangulTok => emit;
|
||||
EmojiTok => emit;
|
||||
ZWJTok => emit;
|
||||
EmojiFlagTok => emit;
|
||||
PrependTok => emit;
|
||||
OtherTok => emit;
|
||||
|
||||
# any single valid UTF-8 character would also be valid per spec,
|
||||
# but we'll handle that separately after the loop so we can deal
|
||||
# with requesting more bytes if we're not at EOF.
|
||||
*|;
|
||||
|
||||
write init;
|
||||
write exec;
|
||||
}%%
|
||||
|
||||
// If we fall out here then we were unable to complete a sequence.
|
||||
// If we weren't able to complete a sequence then either we've
|
||||
// reached the end of a partial buffer (so there's more data to come)
|
||||
// or we have an isolated symbol that would normally be part of a
|
||||
// grapheme cluster but has appeared in isolation here.
|
||||
|
||||
if !atEOF {
|
||||
// Request more
|
||||
return 0, nil, nil
|
||||
}
|
||||
|
||||
// Just take the first UTF-8 sequence and return that.
|
||||
_, seqLen := utf8.DecodeRune(data)
|
||||
return seqLen, data[:seqLen], nil
|
||||
}
|
||||
1637
vendor/github.com/apparentlymart/go-textseg/v15/textseg/grapheme_clusters_table.rl
generated
vendored
1637
vendor/github.com/apparentlymart/go-textseg/v15/textseg/grapheme_clusters_table.rl
generated
vendored
File diff suppressed because it is too large
Load Diff
6120
vendor/github.com/apparentlymart/go-textseg/v15/textseg/tables.go
generated
vendored
6120
vendor/github.com/apparentlymart/go-textseg/v15/textseg/tables.go
generated
vendored
File diff suppressed because it is too large
Load Diff
335
vendor/github.com/apparentlymart/go-textseg/v15/textseg/unicode2ragel.rb
generated
vendored
335
vendor/github.com/apparentlymart/go-textseg/v15/textseg/unicode2ragel.rb
generated
vendored
@@ -1,335 +0,0 @@
|
||||
#!/usr/bin/env ruby
|
||||
#
|
||||
# This scripted has been updated to accept more command-line arguments:
|
||||
#
|
||||
# -u, --url URL to process
|
||||
# -m, --machine Machine name
|
||||
# -p, --properties Properties to add to the machine
|
||||
# -o, --output Write output to file
|
||||
#
|
||||
# Updated by: Marty Schoch <marty.schoch@gmail.com>
|
||||
#
|
||||
# This script uses the unicode spec to generate a Ragel state machine
|
||||
# that recognizes unicode alphanumeric characters. It generates 5
|
||||
# character classes: uupper, ulower, ualpha, udigit, and ualnum.
|
||||
# Currently supported encodings are UTF-8 [default] and UCS-4.
|
||||
#
|
||||
# Usage: unicode2ragel.rb [options]
|
||||
# -e, --encoding [ucs4 | utf8] Data encoding
|
||||
# -h, --help Show this message
|
||||
#
|
||||
# This script was originally written as part of the Ferret search
|
||||
# engine library.
|
||||
#
|
||||
# Author: Rakan El-Khalil <rakan@well.com>
|
||||
|
||||
require 'optparse'
|
||||
require 'open-uri'
|
||||
|
||||
ENCODINGS = [ :utf8, :ucs4 ]
|
||||
ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" }
|
||||
DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt"
|
||||
DEFAULT_MACHINE_NAME= "WChar"
|
||||
|
||||
###
|
||||
# Display vars & default option
|
||||
|
||||
TOTAL_WIDTH = 80
|
||||
RANGE_WIDTH = 23
|
||||
@encoding = :utf8
|
||||
@chart_url = DEFAULT_CHART_URL
|
||||
machine_name = DEFAULT_MACHINE_NAME
|
||||
properties = []
|
||||
@output = $stdout
|
||||
|
||||
###
|
||||
# Option parsing
|
||||
|
||||
cli_opts = OptionParser.new do |opts|
|
||||
opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o|
|
||||
@encoding = o.downcase.to_sym
|
||||
end
|
||||
opts.on("-h", "--help", "Show this message") do
|
||||
puts opts
|
||||
exit
|
||||
end
|
||||
opts.on("-u", "--url URL", "URL to process") do |o|
|
||||
@chart_url = o
|
||||
end
|
||||
opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o|
|
||||
machine_name = o
|
||||
end
|
||||
opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o|
|
||||
properties = o
|
||||
end
|
||||
opts.on("-o", "--output FILE", "output file") do |o|
|
||||
@output = File.new(o, "w+")
|
||||
end
|
||||
end
|
||||
|
||||
cli_opts.parse(ARGV)
|
||||
unless ENCODINGS.member? @encoding
|
||||
puts "Invalid encoding: #{@encoding}"
|
||||
puts cli_opts
|
||||
exit
|
||||
end
|
||||
|
||||
##
|
||||
# Downloads the document at url and yields every alpha line's hex
|
||||
# range and description.
|
||||
|
||||
def each_alpha( url, property )
|
||||
URI.open( url ) do |file|
|
||||
file.each_line do |line|
|
||||
next if line =~ /^#/;
|
||||
next if line !~ /; #{property} *#/;
|
||||
|
||||
range, description = line.split(/;/)
|
||||
range.strip!
|
||||
description.gsub!(/.*#/, '').strip!
|
||||
|
||||
if range =~ /\.\./
|
||||
start, stop = range.split '..'
|
||||
else start = stop = range
|
||||
end
|
||||
|
||||
yield start.hex .. stop.hex, description
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
###
|
||||
# Formats to hex at minimum width
|
||||
|
||||
def to_hex( n )
|
||||
r = "%0X" % n
|
||||
r = "0#{r}" unless (r.length % 2).zero?
|
||||
r
|
||||
end
|
||||
|
||||
###
|
||||
# UCS4 is just a straight hex conversion of the unicode codepoint.
|
||||
|
||||
def to_ucs4( range )
|
||||
rangestr = "0x" + to_hex(range.begin)
|
||||
rangestr << "..0x" + to_hex(range.end) if range.begin != range.end
|
||||
[ rangestr ]
|
||||
end
|
||||
|
||||
##
|
||||
# 0x00 - 0x7f -> 0zzzzzzz[7]
|
||||
# 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6]
|
||||
# 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6]
|
||||
# 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6]
|
||||
|
||||
UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff]
|
||||
|
||||
def to_utf8_enc( n )
|
||||
r = 0
|
||||
if n <= 0x7f
|
||||
r = n
|
||||
elsif n <= 0x7ff
|
||||
y = 0xc0 | (n >> 6)
|
||||
z = 0x80 | (n & 0x3f)
|
||||
r = y << 8 | z
|
||||
elsif n <= 0xffff
|
||||
x = 0xe0 | (n >> 12)
|
||||
y = 0x80 | (n >> 6) & 0x3f
|
||||
z = 0x80 | n & 0x3f
|
||||
r = x << 16 | y << 8 | z
|
||||
elsif n <= 0x10ffff
|
||||
w = 0xf0 | (n >> 18)
|
||||
x = 0x80 | (n >> 12) & 0x3f
|
||||
y = 0x80 | (n >> 6) & 0x3f
|
||||
z = 0x80 | n & 0x3f
|
||||
r = w << 24 | x << 16 | y << 8 | z
|
||||
end
|
||||
|
||||
to_hex(r)
|
||||
end
|
||||
|
||||
def from_utf8_enc( n )
|
||||
n = n.hex
|
||||
r = 0
|
||||
if n <= 0x7f
|
||||
r = n
|
||||
elsif n <= 0xdfff
|
||||
y = (n >> 8) & 0x1f
|
||||
z = n & 0x3f
|
||||
r = y << 6 | z
|
||||
elsif n <= 0xefffff
|
||||
x = (n >> 16) & 0x0f
|
||||
y = (n >> 8) & 0x3f
|
||||
z = n & 0x3f
|
||||
r = x << 10 | y << 6 | z
|
||||
elsif n <= 0xf7ffffff
|
||||
w = (n >> 24) & 0x07
|
||||
x = (n >> 16) & 0x3f
|
||||
y = (n >> 8) & 0x3f
|
||||
z = n & 0x3f
|
||||
r = w << 18 | x << 12 | y << 6 | z
|
||||
end
|
||||
r
|
||||
end
|
||||
|
||||
###
|
||||
# Given a range, splits it up into ranges that can be continuously
|
||||
# encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff]
|
||||
# This is not strictly needed since the current [5.1] unicode standard
|
||||
# doesn't have ranges that straddle utf8 boundaries. This is included
|
||||
# for completeness as there is no telling if that will ever change.
|
||||
|
||||
def utf8_ranges( range )
|
||||
ranges = []
|
||||
UTF8_BOUNDARIES.each do |max|
|
||||
if range.begin <= max
|
||||
if range.end <= max
|
||||
ranges << range
|
||||
return ranges
|
||||
end
|
||||
|
||||
ranges << (range.begin .. max)
|
||||
range = (max + 1) .. range.end
|
||||
end
|
||||
end
|
||||
ranges
|
||||
end
|
||||
|
||||
def build_range( start, stop )
|
||||
size = start.size/2
|
||||
left = size - 1
|
||||
return [""] if size < 1
|
||||
|
||||
a = start[0..1]
|
||||
b = stop[0..1]
|
||||
|
||||
###
|
||||
# Shared prefix
|
||||
|
||||
if a == b
|
||||
return build_range(start[2..-1], stop[2..-1]).map do |elt|
|
||||
"0x#{a} " + elt
|
||||
end
|
||||
end
|
||||
|
||||
###
|
||||
# Unshared prefix, end of run
|
||||
|
||||
return ["0x#{a}..0x#{b} "] if left.zero?
|
||||
|
||||
###
|
||||
# Unshared prefix, not end of run
|
||||
# Range can be 0x123456..0x56789A
|
||||
# Which is equivalent to:
|
||||
# 0x123456 .. 0x12FFFF
|
||||
# 0x130000 .. 0x55FFFF
|
||||
# 0x560000 .. 0x56789A
|
||||
|
||||
ret = []
|
||||
ret << build_range(start, a + "FF" * left)
|
||||
|
||||
###
|
||||
# Only generate middle range if need be.
|
||||
|
||||
if a.hex+1 != b.hex
|
||||
max = to_hex(b.hex - 1)
|
||||
max = "FF" if b == "FF"
|
||||
ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left
|
||||
end
|
||||
|
||||
###
|
||||
# Don't generate last range if it is covered by first range
|
||||
|
||||
ret << build_range(b + "00" * left, stop) unless b == "FF"
|
||||
ret.flatten!
|
||||
end
|
||||
|
||||
def to_utf8( range )
|
||||
utf8_ranges( range ).map do |r|
|
||||
begin_enc = to_utf8_enc(r.begin)
|
||||
end_enc = to_utf8_enc(r.end)
|
||||
build_range begin_enc, end_enc
|
||||
end.flatten!
|
||||
end
|
||||
|
||||
##
|
||||
# Perform a 3-way comparison of the number of codepoints advertised by
|
||||
# the unicode spec for the given range, the originally parsed range,
|
||||
# and the resulting utf8 encoded range.
|
||||
|
||||
def count_codepoints( code )
|
||||
code.split(' ').inject(1) do |acc, elt|
|
||||
if elt =~ /0x(.+)\.\.0x(.+)/
|
||||
if @encoding == :utf8
|
||||
acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1)
|
||||
else
|
||||
acc * ($2.hex - $1.hex + 1)
|
||||
end
|
||||
else
|
||||
acc
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def is_valid?( range, desc, codes )
|
||||
spec_count = 1
|
||||
spec_count = $1.to_i if desc =~ /\[(\d+)\]/
|
||||
range_count = range.end - range.begin + 1
|
||||
|
||||
sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) }
|
||||
sum == spec_count and sum == range_count
|
||||
end
|
||||
|
||||
##
|
||||
# Generate the state maching to stdout
|
||||
|
||||
def generate_machine( name, property )
|
||||
pipe = " "
|
||||
@output.puts " #{name} = "
|
||||
each_alpha( @chart_url, property ) do |range, desc|
|
||||
|
||||
codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range)
|
||||
|
||||
#raise "Invalid encoding of range #{range}: #{codes.inspect}" unless
|
||||
# is_valid? range, desc, codes
|
||||
|
||||
range_width = codes.map { |a| a.size }.max
|
||||
range_width = RANGE_WIDTH if range_width < RANGE_WIDTH
|
||||
|
||||
desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11
|
||||
desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH
|
||||
|
||||
if desc.size > desc_width
|
||||
desc = desc[0..desc_width - 4] + "..."
|
||||
end
|
||||
|
||||
codes.each_with_index do |r, idx|
|
||||
desc = "" unless idx.zero?
|
||||
code = "%-#{range_width}s" % r
|
||||
@output.puts " #{pipe} #{code} ##{desc}"
|
||||
pipe = "|"
|
||||
end
|
||||
end
|
||||
@output.puts " ;"
|
||||
@output.puts ""
|
||||
end
|
||||
|
||||
@output.puts <<EOF
|
||||
# The following Ragel file was autogenerated with #{$0}
|
||||
# from: #{@chart_url}
|
||||
#
|
||||
# It defines #{properties}.
|
||||
#
|
||||
# To use this, make sure that your alphtype is set to #{ALPHTYPES[@encoding]},
|
||||
# and that your input is in #{@encoding}.
|
||||
|
||||
%%{
|
||||
machine #{machine_name};
|
||||
|
||||
EOF
|
||||
|
||||
properties.each { |x| generate_machine( x, x ) }
|
||||
|
||||
@output.puts <<EOF
|
||||
}%%
|
||||
EOF
|
||||
19
vendor/github.com/apparentlymart/go-textseg/v15/textseg/utf8_seqs.go
generated
vendored
19
vendor/github.com/apparentlymart/go-textseg/v15/textseg/utf8_seqs.go
generated
vendored
@@ -1,19 +0,0 @@
|
||||
package textseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
|
||||
// on UTF8 sequence boundaries.
|
||||
//
|
||||
// This is included largely for completeness, since this behavior is already
|
||||
// built in to Go when ranging over a string.
|
||||
func ScanUTF8Sequences(data []byte, atEOF bool) (int, []byte, error) {
|
||||
if len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
r, seqLen := utf8.DecodeRune(data)
|
||||
if r == utf8.RuneError && !atEOF {
|
||||
return 0, nil, nil
|
||||
}
|
||||
return seqLen, data[:seqLen], nil
|
||||
}
|
||||
13
vendor/github.com/araddon/dateparse/.travis.yml
generated
vendored
13
vendor/github.com/araddon/dateparse/.travis.yml
generated
vendored
@@ -1,13 +0,0 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.13.x
|
||||
|
||||
before_install:
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- go test -race -coverprofile=coverage.txt -covermode=atomic
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
21
vendor/github.com/araddon/dateparse/LICENSE
generated
vendored
21
vendor/github.com/araddon/dateparse/LICENSE
generated
vendored
@@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015-2017 Aaron Raddon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
323
vendor/github.com/araddon/dateparse/README.md
generated
vendored
323
vendor/github.com/araddon/dateparse/README.md
generated
vendored
@@ -1,323 +0,0 @@
|
||||
Go Date Parser
|
||||
---------------------------
|
||||
|
||||
Parse many date strings without knowing format in advance. Uses a scanner to read bytes and use a state machine to find format. Much faster than shotgun based parse methods. See [bench_test.go](https://github.com/araddon/dateparse/blob/master/bench_test.go) for performance comparison.
|
||||
|
||||
|
||||
[](https://codecov.io/gh/araddon/dateparse)
|
||||
[](http://godoc.org/github.com/araddon/dateparse)
|
||||
[](https://travis-ci.org/araddon/dateparse)
|
||||
[](https://goreportcard.com/report/araddon/dateparse)
|
||||
|
||||
**MM/DD/YYYY VS DD/MM/YYYY** Right now this uses mm/dd/yyyy WHEN ambiguous if this is not desired behavior, use `ParseStrict` which will fail on ambiguous date strings.
|
||||
|
||||
**Timezones** The location your server is configured affects the results! See example or https://play.golang.org/p/IDHRalIyXh and last paragraph here https://golang.org/pkg/time/#Parse.
|
||||
|
||||
|
||||
```go
|
||||
|
||||
// Normal parse. Equivalent Timezone rules as time.Parse()
|
||||
t, err := dateparse.ParseAny("3/1/2014")
|
||||
|
||||
// Parse Strict, error on ambigous mm/dd vs dd/mm dates
|
||||
t, err := dateparse.ParseStrict("3/1/2014")
|
||||
> returns error
|
||||
|
||||
// Return a string that represents the layout to parse the given date-time.
|
||||
layout, err := dateparse.ParseFormat("May 8, 2009 5:57:51 PM")
|
||||
> "Jan 2, 2006 3:04:05 PM"
|
||||
|
||||
```
|
||||
|
||||
cli tool for testing dateformats
|
||||
----------------------------------
|
||||
|
||||
[Date Parse CLI](https://github.com/araddon/dateparse/blob/master/dateparse)
|
||||
|
||||
|
||||
Extended example
|
||||
-------------------
|
||||
|
||||
https://github.com/araddon/dateparse/blob/master/example/main.go
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/scylladb/termtables"
|
||||
"github.com/araddon/dateparse"
|
||||
)
|
||||
|
||||
var examples = []string{
|
||||
"May 8, 2009 5:57:51 PM",
|
||||
"oct 7, 1970",
|
||||
"oct 7, '70",
|
||||
"oct. 7, 1970",
|
||||
"oct. 7, 70",
|
||||
"Mon Jan 2 15:04:05 2006",
|
||||
"Mon Jan 2 15:04:05 MST 2006",
|
||||
"Mon Jan 02 15:04:05 -0700 2006",
|
||||
"Monday, 02-Jan-06 15:04:05 MST",
|
||||
"Mon, 02 Jan 2006 15:04:05 MST",
|
||||
"Tue, 11 Jul 2017 16:28:13 +0200 (CEST)",
|
||||
"Mon, 02 Jan 2006 15:04:05 -0700",
|
||||
"Mon 30 Sep 2018 09:09:09 PM UTC",
|
||||
"Mon Aug 10 15:44:11 UTC+0100 2015",
|
||||
"Thu, 4 Jan 2018 17:53:36 +0000",
|
||||
"Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)",
|
||||
"Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00)",
|
||||
"September 17, 2012 10:09am",
|
||||
"September 17, 2012 at 10:09am PST-08",
|
||||
"September 17, 2012, 10:10:09",
|
||||
"October 7, 1970",
|
||||
"October 7th, 1970",
|
||||
"12 Feb 2006, 19:17",
|
||||
"12 Feb 2006 19:17",
|
||||
"14 May 2019 19:11:40.164",
|
||||
"7 oct 70",
|
||||
"7 oct 1970",
|
||||
"03 February 2013",
|
||||
"1 July 2013",
|
||||
"2013-Feb-03",
|
||||
// dd/Mon/yyy alpha Months
|
||||
"06/Jan/2008:15:04:05 -0700",
|
||||
"06/Jan/2008 15:04:05 -0700",
|
||||
// mm/dd/yy
|
||||
"3/31/2014",
|
||||
"03/31/2014",
|
||||
"08/21/71",
|
||||
"8/1/71",
|
||||
"4/8/2014 22:05",
|
||||
"04/08/2014 22:05",
|
||||
"4/8/14 22:05",
|
||||
"04/2/2014 03:00:51",
|
||||
"8/8/1965 12:00:00 AM",
|
||||
"8/8/1965 01:00:01 PM",
|
||||
"8/8/1965 01:00 PM",
|
||||
"8/8/1965 1:00 PM",
|
||||
"8/8/1965 12:00 AM",
|
||||
"4/02/2014 03:00:51",
|
||||
"03/19/2012 10:11:59",
|
||||
"03/19/2012 10:11:59.3186369",
|
||||
// yyyy/mm/dd
|
||||
"2014/3/31",
|
||||
"2014/03/31",
|
||||
"2014/4/8 22:05",
|
||||
"2014/04/08 22:05",
|
||||
"2014/04/2 03:00:51",
|
||||
"2014/4/02 03:00:51",
|
||||
"2012/03/19 10:11:59",
|
||||
"2012/03/19 10:11:59.3186369",
|
||||
// yyyy:mm:dd
|
||||
"2014:3:31",
|
||||
"2014:03:31",
|
||||
"2014:4:8 22:05",
|
||||
"2014:04:08 22:05",
|
||||
"2014:04:2 03:00:51",
|
||||
"2014:4:02 03:00:51",
|
||||
"2012:03:19 10:11:59",
|
||||
"2012:03:19 10:11:59.3186369",
|
||||
// Chinese
|
||||
"2014年04月08日",
|
||||
// yyyy-mm-ddThh
|
||||
"2006-01-02T15:04:05+0000",
|
||||
"2009-08-12T22:15:09-07:00",
|
||||
"2009-08-12T22:15:09",
|
||||
"2009-08-12T22:15:09.988",
|
||||
"2009-08-12T22:15:09Z",
|
||||
"2017-07-19T03:21:51:897+0100",
|
||||
"2019-05-29T08:41-04", // no seconds, 2 digit TZ offset
|
||||
// yyyy-mm-dd hh:mm:ss
|
||||
"2014-04-26 17:24:37.3186369",
|
||||
"2012-08-03 18:31:59.257000000",
|
||||
"2014-04-26 17:24:37.123",
|
||||
"2013-04-01 22:43",
|
||||
"2013-04-01 22:43:22",
|
||||
"2014-12-16 06:20:00 UTC",
|
||||
"2014-12-16 06:20:00 GMT",
|
||||
"2014-04-26 05:24:37 PM",
|
||||
"2014-04-26 13:13:43 +0800",
|
||||
"2014-04-26 13:13:43 +0800 +08",
|
||||
"2014-04-26 13:13:44 +09:00",
|
||||
"2012-08-03 18:31:59.257000000 +0000 UTC",
|
||||
"2015-09-30 18:48:56.35272715 +0000 UTC",
|
||||
"2015-02-18 00:12:00 +0000 GMT",
|
||||
"2015-02-18 00:12:00 +0000 UTC",
|
||||
"2015-02-08 03:02:00 +0300 MSK m=+0.000000001",
|
||||
"2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001",
|
||||
"2017-07-19 03:21:51+00:00",
|
||||
"2014-04-26",
|
||||
"2014-04",
|
||||
"2014",
|
||||
"2014-05-11 08:20:13,787",
|
||||
// yyyy-mm-dd-07:00
|
||||
"2020-07-20+08:00",
|
||||
// mm.dd.yy
|
||||
"3.31.2014",
|
||||
"03.31.2014",
|
||||
"08.21.71",
|
||||
"2014.03",
|
||||
"2014.03.30",
|
||||
// yyyymmdd and similar
|
||||
"20140601",
|
||||
"20140722105203",
|
||||
// yymmdd hh:mm:yy mysql log
|
||||
// 080313 05:21:55 mysqld started
|
||||
"171113 14:14:20",
|
||||
// unix seconds, ms, micro, nano
|
||||
"1332151919",
|
||||
"1384216367189",
|
||||
"1384216367111222",
|
||||
"1384216367111222333",
|
||||
}
|
||||
|
||||
var (
|
||||
timezone = ""
|
||||
)
|
||||
|
||||
func main() {
|
||||
flag.StringVar(&timezone, "timezone", "UTC", "Timezone aka `America/Los_Angeles` formatted time-zone")
|
||||
flag.Parse()
|
||||
|
||||
if timezone != "" {
|
||||
// NOTE: This is very, very important to understand
|
||||
// time-parsing in go
|
||||
loc, err := time.LoadLocation(timezone)
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
time.Local = loc
|
||||
}
|
||||
|
||||
table := termtables.CreateTable()
|
||||
|
||||
table.AddHeaders("Input", "Parsed, and Output as %v")
|
||||
for _, dateExample := range examples {
|
||||
t, err := dateparse.ParseLocal(dateExample)
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
table.AddRow(dateExample, fmt.Sprintf("%v", t))
|
||||
}
|
||||
fmt.Println(table.Render())
|
||||
}
|
||||
|
||||
/*
|
||||
+-------------------------------------------------------+-----------------------------------------+
|
||||
| Input | Parsed, and Output as %v |
|
||||
+-------------------------------------------------------+-----------------------------------------+
|
||||
| May 8, 2009 5:57:51 PM | 2009-05-08 17:57:51 +0000 UTC |
|
||||
| oct 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| oct 7, '70 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| oct. 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| oct. 7, 70 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| Mon Jan 2 15:04:05 2006 | 2006-01-02 15:04:05 +0000 UTC |
|
||||
| Mon Jan 2 15:04:05 MST 2006 | 2006-01-02 15:04:05 +0000 MST |
|
||||
| Mon Jan 02 15:04:05 -0700 2006 | 2006-01-02 15:04:05 -0700 -0700 |
|
||||
| Monday, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST |
|
||||
| Mon, 02 Jan 2006 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST |
|
||||
| Tue, 11 Jul 2017 16:28:13 +0200 (CEST) | 2017-07-11 16:28:13 +0200 +0200 |
|
||||
| Mon, 02 Jan 2006 15:04:05 -0700 | 2006-01-02 15:04:05 -0700 -0700 |
|
||||
| Mon 30 Sep 2018 09:09:09 PM UTC | 2018-09-30 21:09:09 +0000 UTC |
|
||||
| Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC |
|
||||
| Thu, 4 Jan 2018 17:53:36 +0000 | 2018-01-04 17:53:36 +0000 UTC |
|
||||
| Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT |
|
||||
| Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00) | 2021-01-03 00:12:23 +0800 +0800 |
|
||||
| September 17, 2012 10:09am | 2012-09-17 10:09:00 +0000 UTC |
|
||||
| September 17, 2012 at 10:09am PST-08 | 2012-09-17 10:09:00 -0800 PST |
|
||||
| September 17, 2012, 10:10:09 | 2012-09-17 10:10:09 +0000 UTC |
|
||||
| October 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| October 7th, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC |
|
||||
| 12 Feb 2006 19:17 | 2006-02-12 19:17:00 +0000 UTC |
|
||||
| 14 May 2019 19:11:40.164 | 2019-05-14 19:11:40.164 +0000 UTC |
|
||||
| 7 oct 70 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| 7 oct 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| 03 February 2013 | 2013-02-03 00:00:00 +0000 UTC |
|
||||
| 1 July 2013 | 2013-07-01 00:00:00 +0000 UTC |
|
||||
| 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC |
|
||||
| 06/Jan/2008:15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
|
||||
| 06/Jan/2008 15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
|
||||
| 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 08/21/71 | 1971-08-21 00:00:00 +0000 UTC |
|
||||
| 8/1/71 | 1971-08-01 00:00:00 +0000 UTC |
|
||||
| 4/8/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 04/08/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 4/8/14 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 04/2/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 8/8/1965 12:00:00 AM | 1965-08-08 00:00:00 +0000 UTC |
|
||||
| 8/8/1965 01:00:01 PM | 1965-08-08 13:00:01 +0000 UTC |
|
||||
| 8/8/1965 01:00 PM | 1965-08-08 13:00:00 +0000 UTC |
|
||||
| 8/8/1965 1:00 PM | 1965-08-08 13:00:00 +0000 UTC |
|
||||
| 8/8/1965 12:00 AM | 1965-08-08 00:00:00 +0000 UTC |
|
||||
| 4/02/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 03/19/2012 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
|
||||
| 03/19/2012 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
|
||||
| 2014/3/31 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 2014/03/31 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 2014/4/8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 2014/04/08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 2014/04/2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
|
||||
| 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
|
||||
| 2014:3:31 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 2014:03:31 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 2014:4:8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 2014:04:08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 2014:04:2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 2014:4:02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 2012:03:19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
|
||||
| 2012:03:19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
|
||||
| 2014年04月08日 | 2014-04-08 00:00:00 +0000 UTC |
|
||||
| 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC |
|
||||
| 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 |
|
||||
| 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC |
|
||||
| 2009-08-12T22:15:09.988 | 2009-08-12 22:15:09.988 +0000 UTC |
|
||||
| 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC |
|
||||
| 2017-07-19T03:21:51:897+0100 | 2017-07-19 03:21:51.897 +0100 +0100 |
|
||||
| 2019-05-29T08:41-04 | 2019-05-29 08:41:00 -0400 -0400 |
|
||||
| 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC |
|
||||
| 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC |
|
||||
| 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC |
|
||||
| 2013-04-01 22:43 | 2013-04-01 22:43:00 +0000 UTC |
|
||||
| 2013-04-01 22:43:22 | 2013-04-01 22:43:22 +0000 UTC |
|
||||
| 2014-12-16 06:20:00 UTC | 2014-12-16 06:20:00 +0000 UTC |
|
||||
| 2014-12-16 06:20:00 GMT | 2014-12-16 06:20:00 +0000 UTC |
|
||||
| 2014-04-26 05:24:37 PM | 2014-04-26 17:24:37 +0000 UTC |
|
||||
| 2014-04-26 13:13:43 +0800 | 2014-04-26 13:13:43 +0800 +0800 |
|
||||
| 2014-04-26 13:13:43 +0800 +08 | 2014-04-26 13:13:43 +0800 +0800 |
|
||||
| 2014-04-26 13:13:44 +09:00 | 2014-04-26 13:13:44 +0900 +0900 |
|
||||
| 2012-08-03 18:31:59.257000000 +0000 UTC | 2012-08-03 18:31:59.257 +0000 UTC |
|
||||
| 2015-09-30 18:48:56.35272715 +0000 UTC | 2015-09-30 18:48:56.35272715 +0000 UTC |
|
||||
| 2015-02-18 00:12:00 +0000 GMT | 2015-02-18 00:12:00 +0000 UTC |
|
||||
| 2015-02-18 00:12:00 +0000 UTC | 2015-02-18 00:12:00 +0000 UTC |
|
||||
| 2015-02-08 03:02:00 +0300 MSK m=+0.000000001 | 2015-02-08 03:02:00 +0300 +0300 |
|
||||
| 2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001 | 2015-02-08 03:02:00.001 +0300 +0300 |
|
||||
| 2017-07-19 03:21:51+00:00 | 2017-07-19 03:21:51 +0000 UTC |
|
||||
| 2014-04-26 | 2014-04-26 00:00:00 +0000 UTC |
|
||||
| 2014-04 | 2014-04-01 00:00:00 +0000 UTC |
|
||||
| 2014 | 2014-01-01 00:00:00 +0000 UTC |
|
||||
| 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC |
|
||||
| 2020-07-20+08:00 | 2020-07-20 00:00:00 +0800 +0800 |
|
||||
| 3.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 03.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 08.21.71 | 1971-08-21 00:00:00 +0000 UTC |
|
||||
| 2014.03 | 2014-03-01 00:00:00 +0000 UTC |
|
||||
| 2014.03.30 | 2014-03-30 00:00:00 +0000 UTC |
|
||||
| 20140601 | 2014-06-01 00:00:00 +0000 UTC |
|
||||
| 20140722105203 | 2014-07-22 10:52:03 +0000 UTC |
|
||||
| 171113 14:14:20 | 2017-11-13 14:14:20 +0000 UTC |
|
||||
| 1332151919 | 2012-03-19 10:11:59 +0000 UTC |
|
||||
| 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC |
|
||||
| 1384216367111222 | 2013-11-12 00:32:47.111222 +0000 UTC |
|
||||
| 1384216367111222333 | 2013-11-12 00:32:47.111222333 +0000 UTC |
|
||||
+-------------------------------------------------------+-----------------------------------------+
|
||||
*/
|
||||
|
||||
```
|
||||
2189
vendor/github.com/araddon/dateparse/parseany.go
generated
vendored
2189
vendor/github.com/araddon/dateparse/parseany.go
generated
vendored
File diff suppressed because it is too large
Load Diff
23
vendor/github.com/glycerine/blake2b/README
generated
vendored
23
vendor/github.com/glycerine/blake2b/README
generated
vendored
@@ -1,23 +0,0 @@
|
||||
Go implementation of BLAKE2b collision-resistant cryptographic hash function
|
||||
created by Jean-Philippe Aumasson, Samuel Neves, Zooko Wilcox-O'Hearn, and
|
||||
Christian Winnerlein (https://blake2.net).
|
||||
|
||||
INSTALLATION
|
||||
|
||||
$ go get github.com/dchest/blake2b
|
||||
|
||||
|
||||
DOCUMENTATION
|
||||
|
||||
See http://godoc.org/github.com/dchest/blake2b
|
||||
|
||||
|
||||
PUBLIC DOMAIN DEDICATION
|
||||
|
||||
Written in 2012 by Dmitry Chestnykh.
|
||||
|
||||
To the extent possible under law, the author have dedicated all copyright
|
||||
and related and neighboring rights to this software to the public domain
|
||||
worldwide. This software is distributed without any warranty.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
299
vendor/github.com/glycerine/blake2b/blake2b.go
generated
vendored
299
vendor/github.com/glycerine/blake2b/blake2b.go
generated
vendored
@@ -1,299 +0,0 @@
|
||||
// Written in 2012 by Dmitry Chestnykh.
|
||||
//
|
||||
// To the extent possible under law, the author have dedicated all copyright
|
||||
// and related and neighboring rights to this software to the public domain
|
||||
// worldwide. This software is distributed without any warranty.
|
||||
// http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
// Package blake2b implements BLAKE2b cryptographic hash function.
|
||||
package blake2b
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"hash"
|
||||
)
|
||||
|
||||
const (
|
||||
BlockSize = 128 // block size of algorithm
|
||||
Size = 64 // maximum digest size
|
||||
SaltSize = 16 // maximum salt size
|
||||
PersonSize = 16 // maximum personalization string size
|
||||
KeySize = 64 // maximum size of key
|
||||
)
|
||||
|
||||
type digest struct {
|
||||
h [8]uint64 // current chain value
|
||||
t [2]uint64 // message bytes counter
|
||||
f [2]uint64 // finalization flags
|
||||
x [BlockSize]byte // buffer for data not yet compressed
|
||||
nx int // number of bytes in buffer
|
||||
|
||||
ih [8]uint64 // initial chain value (after config)
|
||||
paddedKey [BlockSize]byte // copy of key, padded with zeros
|
||||
isKeyed bool // indicates whether hash was keyed
|
||||
size uint8 // digest size in bytes
|
||||
isLastNode bool // indicates processing of the last node in tree hashing
|
||||
}
|
||||
|
||||
// Initialization values.
|
||||
var iv = [8]uint64{
|
||||
0x6a09e667f3bcc908, 0xbb67ae8584caa73b,
|
||||
0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
|
||||
0x510e527fade682d1, 0x9b05688c2b3e6c1f,
|
||||
0x1f83d9abfb41bd6b, 0x5be0cd19137e2179,
|
||||
}
|
||||
|
||||
// Config is used to configure hash function parameters and keying.
|
||||
// All parameters are optional.
|
||||
type Config struct {
|
||||
Size uint8 // digest size (if zero, default size of 64 bytes is used)
|
||||
Key []byte // key for prefix-MAC
|
||||
Salt []byte // salt (if < 16 bytes, padded with zeros)
|
||||
Person []byte // personalization (if < 16 bytes, padded with zeros)
|
||||
Tree *Tree // parameters for tree hashing
|
||||
}
|
||||
|
||||
// Tree represents parameters for tree hashing.
|
||||
type Tree struct {
|
||||
Fanout uint8 // fanout
|
||||
MaxDepth uint8 // maximal depth
|
||||
LeafSize uint32 // leaf maximal byte length (0 for unlimited)
|
||||
NodeOffset uint64 // node offset (0 for first, leftmost or leaf)
|
||||
NodeDepth uint8 // node depth (0 for leaves)
|
||||
InnerHashSize uint8 // inner hash byte length
|
||||
IsLastNode bool // indicates processing of the last node of layer
|
||||
}
|
||||
|
||||
var (
|
||||
defaultConfig = &Config{Size: Size}
|
||||
config256 = &Config{Size: 32}
|
||||
)
|
||||
|
||||
func verifyConfig(c *Config) error {
|
||||
if c.Size > Size {
|
||||
return errors.New("digest size is too large")
|
||||
}
|
||||
if len(c.Key) > KeySize {
|
||||
return errors.New("key is too large")
|
||||
}
|
||||
if len(c.Salt) > SaltSize {
|
||||
// Smaller salt is okay: it will be padded with zeros.
|
||||
return errors.New("salt is too large")
|
||||
}
|
||||
if len(c.Person) > PersonSize {
|
||||
// Smaller personalization is okay: it will be padded with zeros.
|
||||
return errors.New("personalization is too large")
|
||||
}
|
||||
if c.Tree != nil {
|
||||
if c.Tree.Fanout == 1 {
|
||||
return errors.New("fanout of 1 is not allowed in tree mode")
|
||||
}
|
||||
if c.Tree.MaxDepth < 2 {
|
||||
return errors.New("incorrect tree depth")
|
||||
}
|
||||
if c.Tree.InnerHashSize < 1 || c.Tree.InnerHashSize > Size {
|
||||
return errors.New("incorrect tree inner hash size")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// New returns a new hash.Hash configured with the given Config.
|
||||
// Config can be nil, in which case the default one is used, calculating 64-byte digest.
|
||||
// Returns non-nil error if Config contains invalid parameters.
|
||||
func New(c *Config) (hash.Hash, error) {
|
||||
if c == nil {
|
||||
c = defaultConfig
|
||||
} else {
|
||||
if c.Size == 0 {
|
||||
// Set default size if it's zero.
|
||||
c.Size = Size
|
||||
}
|
||||
if err := verifyConfig(c); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
d := new(digest)
|
||||
d.initialize(c)
|
||||
return d, nil
|
||||
}
|
||||
|
||||
// initialize initializes digest with the given
|
||||
// config, which must be non-nil and verified.
|
||||
func (d *digest) initialize(c *Config) {
|
||||
// Create parameter block.
|
||||
var p [BlockSize]byte
|
||||
p[0] = c.Size
|
||||
p[1] = uint8(len(c.Key))
|
||||
if c.Salt != nil {
|
||||
copy(p[32:], c.Salt)
|
||||
}
|
||||
if c.Person != nil {
|
||||
copy(p[48:], c.Person)
|
||||
}
|
||||
if c.Tree != nil {
|
||||
p[2] = c.Tree.Fanout
|
||||
p[3] = c.Tree.MaxDepth
|
||||
binary.LittleEndian.PutUint32(p[4:], c.Tree.LeafSize)
|
||||
binary.LittleEndian.PutUint64(p[8:], c.Tree.NodeOffset)
|
||||
p[16] = c.Tree.NodeDepth
|
||||
p[17] = c.Tree.InnerHashSize
|
||||
} else {
|
||||
p[2] = 1
|
||||
p[3] = 1
|
||||
}
|
||||
// Initialize.
|
||||
d.size = c.Size
|
||||
for i := 0; i < 8; i++ {
|
||||
d.h[i] = iv[i] ^ binary.LittleEndian.Uint64(p[i*8:])
|
||||
}
|
||||
if c.Tree != nil && c.Tree.IsLastNode {
|
||||
d.isLastNode = true
|
||||
}
|
||||
// Process key.
|
||||
if c.Key != nil {
|
||||
copy(d.paddedKey[:], c.Key)
|
||||
d.Write(d.paddedKey[:])
|
||||
d.isKeyed = true
|
||||
}
|
||||
// Save a copy of initialized state.
|
||||
copy(d.ih[:], d.h[:])
|
||||
}
|
||||
|
||||
// New512 returns a new hash.Hash computing the BLAKE2b 64-byte checksum.
|
||||
func New512() hash.Hash {
|
||||
d := new(digest)
|
||||
d.initialize(defaultConfig)
|
||||
return d
|
||||
}
|
||||
|
||||
// New256 returns a new hash.Hash computing the BLAKE2b 32-byte checksum.
|
||||
func New256() hash.Hash {
|
||||
d := new(digest)
|
||||
d.initialize(config256)
|
||||
return d
|
||||
}
|
||||
|
||||
// NewMAC returns a new hash.Hash computing BLAKE2b prefix-
|
||||
// Message Authentication Code of the given size in bytes
|
||||
// (up to 64) with the given key (up to 64 bytes in length).
|
||||
func NewMAC(outBytes uint8, key []byte) hash.Hash {
|
||||
d, err := New(&Config{Size: outBytes, Key: key})
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// Reset resets the state of digest to the initial state
|
||||
// after configuration and keying.
|
||||
func (d *digest) Reset() {
|
||||
copy(d.h[:], d.ih[:])
|
||||
d.t[0] = 0
|
||||
d.t[1] = 0
|
||||
d.f[0] = 0
|
||||
d.f[1] = 0
|
||||
d.nx = 0
|
||||
if d.isKeyed {
|
||||
d.Write(d.paddedKey[:])
|
||||
}
|
||||
}
|
||||
|
||||
// Size returns the digest size in bytes.
|
||||
func (d *digest) Size() int { return int(d.size) }
|
||||
|
||||
// BlockSize returns the algorithm block size in bytes.
|
||||
func (d *digest) BlockSize() int { return BlockSize }
|
||||
|
||||
func (d *digest) Write(p []byte) (nn int, err error) {
|
||||
nn = len(p)
|
||||
left := BlockSize - d.nx
|
||||
if len(p) > left {
|
||||
// Process buffer.
|
||||
copy(d.x[d.nx:], p[:left])
|
||||
p = p[left:]
|
||||
blocks(d, d.x[:])
|
||||
d.nx = 0
|
||||
}
|
||||
// Process full blocks except for the last one.
|
||||
if len(p) > BlockSize {
|
||||
n := len(p) &^ (BlockSize - 1)
|
||||
if n == len(p) {
|
||||
n -= BlockSize
|
||||
}
|
||||
blocks(d, p[:n])
|
||||
p = p[n:]
|
||||
}
|
||||
// Fill buffer.
|
||||
d.nx += copy(d.x[d.nx:], p)
|
||||
return
|
||||
}
|
||||
|
||||
// Sum returns the calculated checksum.
|
||||
func (d0 *digest) Sum(in []byte) []byte {
|
||||
// Make a copy of d0 so that caller can keep writing and summing.
|
||||
d := *d0
|
||||
hash := d.checkSum()
|
||||
return append(in, hash[:d.size]...)
|
||||
}
|
||||
|
||||
func (d *digest) checkSum() [Size]byte {
|
||||
// Do not create unnecessary copies of the key.
|
||||
if d.isKeyed {
|
||||
for i := 0; i < len(d.paddedKey); i++ {
|
||||
d.paddedKey[i] = 0
|
||||
}
|
||||
}
|
||||
|
||||
dec := BlockSize - uint64(d.nx)
|
||||
if d.t[0] < dec {
|
||||
d.t[1]--
|
||||
}
|
||||
d.t[0] -= dec
|
||||
|
||||
// Pad buffer with zeros.
|
||||
for i := d.nx; i < len(d.x); i++ {
|
||||
d.x[i] = 0
|
||||
}
|
||||
// Set last block flag.
|
||||
d.f[0] = 0xffffffffffffffff
|
||||
if d.isLastNode {
|
||||
d.f[1] = 0xffffffffffffffff
|
||||
}
|
||||
// Compress last block.
|
||||
blocks(d, d.x[:])
|
||||
|
||||
var out [Size]byte
|
||||
j := 0
|
||||
for _, s := range d.h[:(d.size-1)/8+1] {
|
||||
out[j+0] = byte(s >> 0)
|
||||
out[j+1] = byte(s >> 8)
|
||||
out[j+2] = byte(s >> 16)
|
||||
out[j+3] = byte(s >> 24)
|
||||
out[j+4] = byte(s >> 32)
|
||||
out[j+5] = byte(s >> 40)
|
||||
out[j+6] = byte(s >> 48)
|
||||
out[j+7] = byte(s >> 56)
|
||||
j += 8
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// Sum512 returns a 64-byte BLAKE2b hash of data.
|
||||
func Sum512(data []byte) [64]byte {
|
||||
var d digest
|
||||
d.initialize(defaultConfig)
|
||||
d.Write(data)
|
||||
return d.checkSum()
|
||||
}
|
||||
|
||||
// Sum256 returns a 32-byte BLAKE2b hash of data.
|
||||
func Sum256(data []byte) (out [32]byte) {
|
||||
var d digest
|
||||
d.initialize(config256)
|
||||
d.Write(data)
|
||||
sum := d.checkSum()
|
||||
copy(out[:], sum[:32])
|
||||
return
|
||||
}
|
||||
1420
vendor/github.com/glycerine/blake2b/block.go
generated
vendored
1420
vendor/github.com/glycerine/blake2b/block.go
generated
vendored
File diff suppressed because it is too large
Load Diff
25
vendor/github.com/glycerine/greenpack/LICENSE
generated
vendored
25
vendor/github.com/glycerine/greenpack/LICENSE
generated
vendored
@@ -1,25 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Portions Copyright (c) 2016 Jason E. Aten
|
||||
Portions Copyright (c) 2014 Philip Hofer
|
||||
Portions Copyright (c) 2009 The Go Authors (license at http://golang.org) where indicated
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom
|
||||
the Software is furnished to do so, subject to the
|
||||
following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||
OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
24
vendor/github.com/glycerine/greenpack/msgp/advise_linux.go
generated
vendored
24
vendor/github.com/glycerine/greenpack/msgp/advise_linux.go
generated
vendored
@@ -1,24 +0,0 @@
|
||||
// +build linux,!appengine
|
||||
|
||||
package msgp
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func adviseRead(mem []byte) {
|
||||
syscall.Madvise(mem, syscall.MADV_SEQUENTIAL|syscall.MADV_WILLNEED)
|
||||
}
|
||||
|
||||
func adviseWrite(mem []byte) {
|
||||
syscall.Madvise(mem, syscall.MADV_SEQUENTIAL)
|
||||
}
|
||||
|
||||
func fallocate(f *os.File, sz int64) error {
|
||||
err := syscall.Fallocate(int(f.Fd()), 0, 0, sz)
|
||||
if err == syscall.ENOTSUP {
|
||||
return f.Truncate(sz)
|
||||
}
|
||||
return err
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user