mirror of
https://codeberg.org/scip/tablizer.git
synced 2025-12-16 20:20:57 +01:00
Merge branch 'main' into feature/yank
This commit is contained in:
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -5,8 +5,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
version: ['1.22']
|
||||
# windows-latest removed, see:
|
||||
# https://github.com/rogpeppe/go-internal/issues/284
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
name: Build
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
63
.github/workflows/release.yaml
vendored
63
.github/workflows/release.yaml
vendored
@@ -1,8 +1,8 @@
|
||||
name: build-and-test
|
||||
name: build-release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
- "v*.*.*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
@@ -10,10 +10,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v1
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.22.11
|
||||
|
||||
@@ -30,3 +30,58 @@ jobs:
|
||||
tag: ${{ github.ref_name }}
|
||||
file: ./releases/*
|
||||
file_glob: true
|
||||
|
||||
- name: Build Changelog
|
||||
id: github_release
|
||||
uses: mikepenz/release-changelog-builder-action@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
mode: "PR"
|
||||
configurationJson: |
|
||||
{
|
||||
"template": "#{{CHANGELOG}}\n\n**Full Changelog**: #{{RELEASE_DIFF}}",
|
||||
"pr_template": "- #{{TITLE}} (##{{NUMBER}}) by #{{AUTHOR}}\n#{{BODY}}",
|
||||
"empty_template": "- no changes",
|
||||
"categories": [
|
||||
{
|
||||
"title": "## New Features",
|
||||
"labels": ["add", "feature"]
|
||||
},
|
||||
{
|
||||
"title": "## Bug Fixes",
|
||||
"labels": ["fix", "bug", "revert"]
|
||||
},
|
||||
{
|
||||
"title": "## Documentation Enhancements",
|
||||
"labels": ["doc"]
|
||||
},
|
||||
{
|
||||
"title": "## Refactoring Efforts",
|
||||
"labels": ["refactor"]
|
||||
},
|
||||
{
|
||||
"title": "## Miscellaneus Changes",
|
||||
"labels": []
|
||||
}
|
||||
],
|
||||
"ignore_labels": [
|
||||
"duplicate", "good first issue", "help wanted", "invalid", "question", "wontfix"
|
||||
],
|
||||
"label_extractor": [
|
||||
{
|
||||
"pattern": "(.) (.+)",
|
||||
"target": "$1"
|
||||
},
|
||||
{
|
||||
"pattern": "(.) (.+)",
|
||||
"target": "$1",
|
||||
"on_property": "title"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
body: ${{steps.github_release.outputs.changelog}}
|
||||
|
||||
@@ -22,7 +22,7 @@ Operational Flags:
|
||||
-s, --separator string Custom field separator
|
||||
-k, --sort-by int Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
||||
-F, --filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022-2024 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -52,6 +52,17 @@ type Transposer struct {
|
||||
Replace string
|
||||
}
|
||||
|
||||
type Pattern struct {
|
||||
Pattern string
|
||||
PatternRe *regexp.Regexp
|
||||
Negate bool
|
||||
}
|
||||
|
||||
type Filter struct {
|
||||
Regex *regexp.Regexp
|
||||
Negate bool
|
||||
}
|
||||
|
||||
// internal config
|
||||
type Config struct {
|
||||
Debug bool
|
||||
@@ -64,8 +75,7 @@ type Config struct {
|
||||
Separator string
|
||||
OutputMode int
|
||||
InvertMatch bool
|
||||
Pattern string
|
||||
PatternR *regexp.Regexp
|
||||
Patterns []*Pattern
|
||||
UseFuzzySearch bool
|
||||
UseHighlight bool
|
||||
|
||||
@@ -97,7 +107,7 @@ type Config struct {
|
||||
|
||||
// used for field filtering
|
||||
Rawfilters []string
|
||||
Filters map[string]*regexp.Regexp
|
||||
Filters map[string]Filter //map[string]*regexp.Regexp
|
||||
|
||||
// -r <file>
|
||||
InputFile string
|
||||
@@ -267,12 +277,20 @@ func (conf *Config) PrepareModeFlags(flag Modeflag) {
|
||||
}
|
||||
|
||||
func (conf *Config) PrepareFilters() error {
|
||||
conf.Filters = make(map[string]*regexp.Regexp, len(conf.Rawfilters))
|
||||
conf.Filters = make(map[string]Filter, len(conf.Rawfilters))
|
||||
|
||||
for _, filter := range conf.Rawfilters {
|
||||
parts := strings.Split(filter, "=")
|
||||
for _, rawfilter := range conf.Rawfilters {
|
||||
filter := Filter{}
|
||||
|
||||
parts := strings.Split(rawfilter, "!=")
|
||||
if len(parts) != MAXPARTS {
|
||||
return errors.New("filter field and value must be separated by =")
|
||||
parts = strings.Split(rawfilter, "=")
|
||||
|
||||
if len(parts) != MAXPARTS {
|
||||
return errors.New("filter field and value must be separated by '=' or '!='")
|
||||
}
|
||||
} else {
|
||||
filter.Negate = true
|
||||
}
|
||||
|
||||
reg, err := regexp.Compile(parts[1])
|
||||
@@ -281,7 +299,8 @@ func (conf *Config) PrepareFilters() error {
|
||||
parts[0], err)
|
||||
}
|
||||
|
||||
conf.Filters[strings.ToLower(strings.ToLower(parts[0]))] = reg
|
||||
filter.Regex = reg
|
||||
conf.Filters[strings.ToLower(parts[0])] = filter
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -335,15 +354,37 @@ func (conf *Config) ApplyDefaults() {
|
||||
}
|
||||
}
|
||||
|
||||
func (conf *Config) PreparePattern(pattern string) error {
|
||||
PatternR, err := regexp.Compile(pattern)
|
||||
func (conf *Config) PreparePattern(patterns []*Pattern) error {
|
||||
// regex checks if a pattern looks like /$pattern/[i!]
|
||||
flagre := regexp.MustCompile(`^/(.*)/([i!]*)$`)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("regexp pattern %s is invalid: %w", conf.Pattern, err)
|
||||
for _, pattern := range patterns {
|
||||
matches := flagre.FindAllStringSubmatch(pattern.Pattern, -1)
|
||||
|
||||
// we have a regex with flags
|
||||
for _, match := range matches {
|
||||
pattern.Pattern = match[1] // the inner part is our actual pattern
|
||||
flags := match[2] // the flags
|
||||
|
||||
for _, flag := range flags {
|
||||
switch flag {
|
||||
case 'i':
|
||||
pattern.Pattern = `(?i)` + pattern.Pattern
|
||||
case '!':
|
||||
pattern.Negate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PatternRe, err := regexp.Compile(pattern.Pattern)
|
||||
if err != nil {
|
||||
return fmt.Errorf("regexp pattern %s is invalid: %w", pattern.Pattern, err)
|
||||
}
|
||||
|
||||
pattern.PatternRe = PatternRe
|
||||
}
|
||||
|
||||
conf.PatternR = PatternR
|
||||
conf.Pattern = pattern
|
||||
conf.Patterns = patterns
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -79,20 +79,55 @@ func TestPrepareSortFlags(t *testing.T) {
|
||||
|
||||
func TestPreparePattern(t *testing.T) {
|
||||
var tests = []struct {
|
||||
pattern string
|
||||
wanterr bool
|
||||
patterns []*Pattern
|
||||
name string
|
||||
wanterr bool
|
||||
wanticase bool
|
||||
wantneg bool
|
||||
}{
|
||||
{"[A-Z]+", false},
|
||||
{"[a-z", true},
|
||||
{
|
||||
[]*Pattern{{Pattern: "[A-Z]+"}},
|
||||
"simple",
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
},
|
||||
{
|
||||
[]*Pattern{{Pattern: "[a-z"}},
|
||||
"regfail",
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
},
|
||||
{
|
||||
[]*Pattern{{Pattern: "/[A-Z]+/i"}},
|
||||
"icase",
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
},
|
||||
{
|
||||
[]*Pattern{{Pattern: "/[A-Z]+/!"}},
|
||||
"negate",
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
},
|
||||
{
|
||||
[]*Pattern{{Pattern: "/[A-Z]+/!i"}},
|
||||
"negicase",
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("PreparePattern-pattern-%s-wanterr-%t",
|
||||
testdata.pattern, testdata.wanterr)
|
||||
testname := fmt.Sprintf("PreparePattern-pattern-%s-wanterr-%t", testdata.name, testdata.wanterr)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := Config{}
|
||||
|
||||
err := conf.PreparePattern(testdata.pattern)
|
||||
err := conf.PreparePattern(testdata.patterns)
|
||||
|
||||
if err != nil {
|
||||
if !testdata.wanterr {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022-2024 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -192,7 +192,7 @@ func Execute() {
|
||||
|
||||
// filters
|
||||
rootCmd.PersistentFlags().StringArrayVarP(&conf.Rawfilters,
|
||||
"filter", "F", nil, "Filter by field (field=regexp)")
|
||||
"filter", "F", nil, "Filter by field (field=regexp || field!=regexp)")
|
||||
rootCmd.PersistentFlags().StringArrayVarP(&conf.Transposers,
|
||||
"regex-transposer", "R", nil, "apply /search/replace/ regexp to fields given in -T")
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ NAME
|
||||
|
||||
SYNOPSIS
|
||||
Usage:
|
||||
tablizer [regex] [file, ...] [flags]
|
||||
tablizer [regex,...] [file, ...] [flags]
|
||||
|
||||
Operational Flags:
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
@@ -17,7 +17,7 @@ SYNOPSIS
|
||||
-s, --separator string Custom field separator
|
||||
-k, --sort-by int|name Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
||||
-F, --filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
||||
|
||||
@@ -132,30 +132,43 @@ DESCRIPTION
|
||||
for the developer.
|
||||
|
||||
PATTERNS AND FILTERING
|
||||
You can reduce the rows being displayed by using a regular expression
|
||||
pattern. The regexp is PCRE compatible, refer to the syntax cheat sheet
|
||||
here: <https://github.com/google/re2/wiki/Syntax>. If you want to read a
|
||||
more comprehensive documentation about the topic and have perl installed
|
||||
you can read it with:
|
||||
You can reduce the rows being displayed by using one or more regular
|
||||
expression patterns. The regexp language being used is the one of
|
||||
GOLANG, refer to the syntax cheat sheet here:
|
||||
<https://pkg.go.dev/regexp/syntax>.
|
||||
|
||||
If you want to read a more comprehensive documentation about the topic
|
||||
and have perl installed you can read it with:
|
||||
|
||||
perldoc perlre
|
||||
|
||||
Or read it online: <https://perldoc.perl.org/perlre>.
|
||||
Or read it online: <https://perldoc.perl.org/perlre>. But please note
|
||||
that the GO regexp engine does NOT support all perl regex terms,
|
||||
especially look-ahead and look-behind.
|
||||
|
||||
A note on modifiers: the regexp engine used in tablizer uses another
|
||||
modifier syntax:
|
||||
If you want to supply flags to a regex, then surround it with slashes
|
||||
and append the flag. The following flags are supported:
|
||||
|
||||
(?MODIFIER)
|
||||
|
||||
The most important modifiers are:
|
||||
|
||||
"i" ignore case "m" multiline mode "s" single line mode
|
||||
i => case insensitive
|
||||
! => negative match
|
||||
|
||||
Example for a case insensitive search:
|
||||
|
||||
kubectl get pods -A | tablizer "(?i)account"
|
||||
kubectl get pods -A | tablizer "/account/i"
|
||||
|
||||
You can use the experimental fuzzy search feature by providing the
|
||||
If you use the "!" flag, then the regex match will be negated, that is,
|
||||
if a line in the input matches the given regex, but "!" is supplied,
|
||||
tablizer will NOT include it in the output.
|
||||
|
||||
For example, here we want to get all lines matching "foo" but not "bar":
|
||||
|
||||
cat table | tablizer foo '/bar/!'
|
||||
|
||||
This would match a line "foo zorro" but not "foo bar".
|
||||
|
||||
The flags can also be combined.
|
||||
|
||||
You can also use the experimental fuzzy search feature by providing the
|
||||
option -z, in which case the pattern is regarded as a fuzzy search term,
|
||||
not a regexp.
|
||||
|
||||
@@ -170,6 +183,10 @@ DESCRIPTION
|
||||
If you specify more than one filter, both filters have to match (AND
|
||||
operation).
|
||||
|
||||
These field filters can also be negated:
|
||||
|
||||
fieldname!=regexp
|
||||
|
||||
If the option -v is specified, the filtering is inverted.
|
||||
|
||||
COLUMNS
|
||||
@@ -406,7 +423,7 @@ AUTHORS
|
||||
var usage = `
|
||||
|
||||
Usage:
|
||||
tablizer [regex] [file, ...] [flags]
|
||||
tablizer [regex,...] [file, ...] [flags]
|
||||
|
||||
Operational Flags:
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
@@ -417,7 +434,7 @@ Operational Flags:
|
||||
-s, --separator string Custom field separator
|
||||
-k, --sort-by int|name Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
||||
-F, --filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022-2024 Thomas von Dein
|
||||
Copyright © 2022-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -27,15 +27,46 @@ import (
|
||||
)
|
||||
|
||||
/*
|
||||
* [!]Match a line, use fuzzy search for normal pattern strings and
|
||||
* regexp otherwise.
|
||||
*/
|
||||
* [!]Match a line, use fuzzy search for normal pattern strings and
|
||||
* regexp otherwise.
|
||||
|
||||
'foo bar' foo, /bar/! => false => line contains foo and not (not bar)
|
||||
'foo nix' foo, /bar/! => ture => line contains foo and (not bar)
|
||||
'foo bar' foo, /bar/ => true => line contains both foo and bar
|
||||
'foo nix' foo, /bar/ => false => line does not contain bar
|
||||
'foo bar' foo, /nix/ => false => line does not contain nix
|
||||
*/
|
||||
func matchPattern(conf cfg.Config, line string) bool {
|
||||
if conf.UseFuzzySearch {
|
||||
return fuzzy.MatchFold(conf.Pattern, line)
|
||||
if len(conf.Patterns) == 0 {
|
||||
// any line always matches ""
|
||||
return true
|
||||
}
|
||||
|
||||
return conf.PatternR.MatchString(line)
|
||||
if conf.UseFuzzySearch {
|
||||
// fuzzy search only considers the 1st pattern
|
||||
return fuzzy.MatchFold(conf.Patterns[0].Pattern, line)
|
||||
}
|
||||
|
||||
var match int
|
||||
|
||||
//fmt.Printf("<%s>\n", line)
|
||||
for _, re := range conf.Patterns {
|
||||
patmatch := re.PatternRe.MatchString(line)
|
||||
if re.Negate {
|
||||
// toggle the meaning of match
|
||||
patmatch = !patmatch
|
||||
}
|
||||
|
||||
if patmatch {
|
||||
match++
|
||||
}
|
||||
|
||||
//fmt.Printf("patmatch: %t, match: %d, pattern: %s, negate: %t\n", patmatch, match, re.Pattern, re.Negate)
|
||||
}
|
||||
|
||||
// fmt.Printf("result: %t\n", match == len(conf.Patterns))
|
||||
//fmt.Println()
|
||||
return match == len(conf.Patterns)
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -55,15 +86,19 @@ func FilterByFields(conf cfg.Config, data *Tabdata) (*Tabdata, bool, error) {
|
||||
keep := true
|
||||
|
||||
for idx, header := range data.headers {
|
||||
if !Exists(conf.Filters, strings.ToLower(header)) {
|
||||
lcheader := strings.ToLower(header)
|
||||
if !Exists(conf.Filters, lcheader) {
|
||||
// do not filter by unspecified field
|
||||
continue
|
||||
}
|
||||
|
||||
if !conf.Filters[strings.ToLower(header)].MatchString(row[idx]) {
|
||||
// there IS a filter, but it doesn't match
|
||||
keep = false
|
||||
match := conf.Filters[lcheader].Regex.MatchString(row[idx])
|
||||
if conf.Filters[lcheader].Negate {
|
||||
match = !match
|
||||
}
|
||||
|
||||
if !match {
|
||||
keep = false
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -123,8 +158,11 @@ func Exists[K comparable, V any](m map[K]V, v K) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
/*
|
||||
* Filters the whole input lines, returns filtered lines
|
||||
*/
|
||||
func FilterByPattern(conf cfg.Config, input io.Reader) (io.Reader, error) {
|
||||
if conf.Pattern == "" {
|
||||
if len(conf.Patterns) == 0 {
|
||||
return input, nil
|
||||
}
|
||||
|
||||
@@ -136,7 +174,7 @@ func FilterByPattern(conf cfg.Config, input io.Reader) (io.Reader, error) {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if hadFirst {
|
||||
// don't match 1st line, it's the header
|
||||
if conf.Pattern != "" && matchPattern(conf, line) == conf.InvertMatch {
|
||||
if matchPattern(conf, line) == conf.InvertMatch {
|
||||
// by default -v is false, so if a line does NOT
|
||||
// match the pattern, we will ignore it. However,
|
||||
// if the user specified -v, the matching is inverted,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright © 2024 Thomas von Dein
|
||||
Copyright © 2024-2025 Thomas von Dein
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -27,21 +27,21 @@ import (
|
||||
|
||||
func TestMatchPattern(t *testing.T) {
|
||||
var input = []struct {
|
||||
name string
|
||||
fuzzy bool
|
||||
pattern string
|
||||
line string
|
||||
name string
|
||||
fuzzy bool
|
||||
patterns []*cfg.Pattern
|
||||
line string
|
||||
}{
|
||||
{
|
||||
name: "normal",
|
||||
pattern: "haus",
|
||||
line: "hausparty",
|
||||
name: "normal",
|
||||
patterns: []*cfg.Pattern{{Pattern: "haus"}},
|
||||
line: "hausparty",
|
||||
},
|
||||
{
|
||||
name: "fuzzy",
|
||||
pattern: "hpt",
|
||||
line: "haus-party-termin",
|
||||
fuzzy: true,
|
||||
name: "fuzzy",
|
||||
patterns: []*cfg.Pattern{{Pattern: "hpt"}},
|
||||
line: "haus-party-termin",
|
||||
fuzzy: true,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ func TestMatchPattern(t *testing.T) {
|
||||
conf.UseFuzzySearch = true
|
||||
}
|
||||
|
||||
err := conf.PreparePattern(inputdata.pattern)
|
||||
err := conf.PreparePattern(inputdata.patterns)
|
||||
if err != nil {
|
||||
t.Errorf("PreparePattern returned error: %s", err)
|
||||
}
|
||||
@@ -98,6 +98,20 @@ func TestFilterByFields(t *testing.T) {
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "one-field-negative",
|
||||
filter: []string{"one!=asd"},
|
||||
expect: Tabdata{
|
||||
headers: []string{
|
||||
"ONE", "TWO", "THREE",
|
||||
},
|
||||
entries: [][]string{
|
||||
{"19191", "EDD 1", "x"},
|
||||
{"8d8", "AN 1", "y"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "one-field-inverted",
|
||||
filter: []string{"one=19"},
|
||||
|
||||
@@ -301,12 +301,20 @@ func colorizeData(conf cfg.Config, output string) string {
|
||||
|
||||
return colorized
|
||||
|
||||
case len(conf.Pattern) > 0 && !conf.NoColor && color.IsConsole(os.Stdout):
|
||||
r := regexp.MustCompile("(" + conf.Pattern + ")")
|
||||
case len(conf.Patterns) > 0 && !conf.NoColor && color.IsConsole(os.Stdout):
|
||||
out := output
|
||||
|
||||
return r.ReplaceAllStringFunc(output, func(in string) string {
|
||||
return conf.ColorStyle.Sprint(in)
|
||||
})
|
||||
for _, re := range conf.Patterns {
|
||||
if !re.Negate {
|
||||
r := regexp.MustCompile("(" + re.Pattern + ")")
|
||||
|
||||
out = r.ReplaceAllStringFunc(out, func(in string) string {
|
||||
return conf.ColorStyle.Sprint(in)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
|
||||
default:
|
||||
return output
|
||||
|
||||
20
lib/io.go
20
lib/io.go
@@ -29,13 +29,13 @@ import (
|
||||
const RWRR = 0755
|
||||
|
||||
func ProcessFiles(conf *cfg.Config, args []string) error {
|
||||
fd, pattern, err := determineIO(conf, args)
|
||||
fd, patterns, err := determineIO(conf, args)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := conf.PreparePattern(pattern); err != nil {
|
||||
if err := conf.PreparePattern(patterns); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -63,9 +63,9 @@ func ProcessFiles(conf *cfg.Config, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func determineIO(conf *cfg.Config, args []string) (io.Reader, string, error) {
|
||||
func determineIO(conf *cfg.Config, args []string) (io.Reader, []*cfg.Pattern, error) {
|
||||
var filehandle io.Reader
|
||||
var pattern string
|
||||
var patterns []*cfg.Pattern
|
||||
var haveio bool
|
||||
|
||||
switch {
|
||||
@@ -76,7 +76,7 @@ func determineIO(conf *cfg.Config, args []string) (io.Reader, string, error) {
|
||||
fd, err := os.OpenFile(conf.InputFile, os.O_RDONLY, RWRR)
|
||||
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("failed to read input file %s: %w", conf.InputFile, err)
|
||||
return nil, nil, fmt.Errorf("failed to read input file %s: %w", conf.InputFile, err)
|
||||
}
|
||||
|
||||
filehandle = fd
|
||||
@@ -93,13 +93,15 @@ func determineIO(conf *cfg.Config, args []string) (io.Reader, string, error) {
|
||||
}
|
||||
|
||||
if len(args) > 0 {
|
||||
pattern = args[0]
|
||||
conf.Pattern = args[0]
|
||||
patterns = make([]*cfg.Pattern, len(args))
|
||||
for i, arg := range args {
|
||||
patterns[i] = &cfg.Pattern{Pattern: arg}
|
||||
}
|
||||
}
|
||||
|
||||
if !haveio {
|
||||
return nil, "", errors.New("no file specified and nothing to read on stdin")
|
||||
return nil, nil, errors.New("no file specified and nothing to read on stdin")
|
||||
}
|
||||
|
||||
return filehandle, pattern, nil
|
||||
return filehandle, patterns, nil
|
||||
}
|
||||
|
||||
@@ -137,7 +137,7 @@ func parseTabular(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
||||
}
|
||||
} else {
|
||||
// data processing
|
||||
if conf.Pattern != "" && matchPattern(conf, line) == conf.InvertMatch {
|
||||
if matchPattern(conf, line) == conf.InvertMatch {
|
||||
// by default -v is false, so if a line does NOT
|
||||
// match the pattern, we will ignore it. However,
|
||||
// if the user specified -v, the matching is inverted,
|
||||
|
||||
@@ -83,36 +83,42 @@ func TestParser(t *testing.T) {
|
||||
|
||||
func TestParserPatternmatching(t *testing.T) {
|
||||
var tests = []struct {
|
||||
entries [][]string
|
||||
pattern string
|
||||
invert bool
|
||||
want bool
|
||||
name string
|
||||
entries [][]string
|
||||
patterns []*cfg.Pattern
|
||||
invert bool
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "match",
|
||||
entries: [][]string{
|
||||
{"asd", "igig", "cxxxncnc"},
|
||||
},
|
||||
pattern: "ig",
|
||||
invert: false,
|
||||
patterns: []*cfg.Pattern{{Pattern: "ig"}},
|
||||
invert: false,
|
||||
},
|
||||
{
|
||||
name: "invert",
|
||||
entries: [][]string{
|
||||
{"19191", "EDD 1", "X"},
|
||||
},
|
||||
pattern: "ig",
|
||||
invert: true,
|
||||
patterns: []*cfg.Pattern{{Pattern: "ig"}},
|
||||
invert: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, inputdata := range input {
|
||||
for _, testdata := range tests {
|
||||
testname := fmt.Sprintf("parse-%s-with-pattern-%s-inverted-%t",
|
||||
inputdata.name, testdata.pattern, testdata.invert)
|
||||
inputdata.name, testdata.name, testdata.invert)
|
||||
t.Run(testname, func(t *testing.T) {
|
||||
conf := cfg.Config{InvertMatch: testdata.invert, Pattern: testdata.pattern,
|
||||
Separator: inputdata.separator}
|
||||
conf := cfg.Config{
|
||||
InvertMatch: testdata.invert,
|
||||
Patterns: testdata.patterns,
|
||||
Separator: inputdata.separator,
|
||||
}
|
||||
|
||||
_ = conf.PreparePattern(testdata.pattern)
|
||||
_ = conf.PreparePattern(testdata.patterns)
|
||||
|
||||
readFd := strings.NewReader(strings.TrimSpace(inputdata.text))
|
||||
gotdata, err := Parse(conf, readFd)
|
||||
@@ -125,7 +131,7 @@ func TestParserPatternmatching(t *testing.T) {
|
||||
} else {
|
||||
if !reflect.DeepEqual(testdata.entries, gotdata.entries) {
|
||||
t.Errorf("Parser returned invalid data (pattern: %s, invert: %t)\nExp: %+v\nGot: %+v\n",
|
||||
testdata.pattern, testdata.invert, testdata.entries, gotdata.entries)
|
||||
testdata.name, testdata.invert, testdata.entries, gotdata.entries)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
9
mkrel.sh
9
mkrel.sh
@@ -42,8 +42,15 @@ for D in $DIST; do
|
||||
binfile="releases/${tool}-${os}-${arch}-${version}"
|
||||
tardir="${tool}-${os}-${arch}-${version}"
|
||||
tarfile="releases/${tool}-${os}-${arch}-${version}.tar.gz"
|
||||
pie=""
|
||||
|
||||
if test "$D" = "linux/amd64"; then
|
||||
pie="-buildmode=pie"
|
||||
fi
|
||||
|
||||
set -x
|
||||
GOOS=${os} GOARCH=${arch} go build -o ${binfile} -ldflags "-X 'github.com/tlinden/tablizer/cfg.VERSION=${version}'"
|
||||
GOOS=${os} GOARCH=${arch} go build -tags osusergo,netgo -ldflags "-extldflags=-static -w -X 'github.com/tlinden/tablizer/cfg.VERSION=${version}'" --trimpath $pie -o ${binfile}
|
||||
strip --strip-all ${binfile}
|
||||
mkdir -p ${tardir}
|
||||
cp ${binfile} README.md LICENSE ${tardir}/
|
||||
echo 'tool = tablizer
|
||||
|
||||
46
t/test-multipatterns.txtar
Normal file
46
t/test-multipatterns.txtar
Normal file
@@ -0,0 +1,46 @@
|
||||
# filtering
|
||||
|
||||
# a AND b
|
||||
exec tablizer -r testtable.txt -H -cspecies invasive imperium
|
||||
stdout 'namak'
|
||||
! stdout human
|
||||
|
||||
# a AND !b
|
||||
exec tablizer -r testtable.txt -H -cspecies invasive '/imperium/!'
|
||||
stdout 'human'
|
||||
! stdout namak
|
||||
|
||||
# a AND !b AND c
|
||||
exec tablizer -r testtable.txt -H -cspecies peaceful '/imperium/!' planetary
|
||||
stdout 'kenaha'
|
||||
! stdout 'namak|heduu|riedl'
|
||||
|
||||
# case insensitive
|
||||
exec tablizer -r testtable.txt -H -cspecies '/REGIONAL/i'
|
||||
stdout namak
|
||||
! stdout 'human|riedl|heduu|kenaa'
|
||||
|
||||
# case insensitive negated
|
||||
exec tablizer -r testtable.txt -H -cspecies '/REGIONAL/!i'
|
||||
stdout 'human|riedl|heduu|kenaa'
|
||||
! stdout namak
|
||||
|
||||
# !a AND !b
|
||||
exec tablizer -r testtable.txt -H -cspecies '/galactic/!' '/planetary/!'
|
||||
stdout namak
|
||||
! stdout 'human|riedl|heduu|kenaa'
|
||||
|
||||
# same case insensitive
|
||||
exec tablizer -r testtable.txt -H -cspecies '/GALACTIC/i!' '/PLANETARY/!i'
|
||||
stdout namak
|
||||
! stdout 'human|riedl|heduu|kenaa'
|
||||
|
||||
# will be automatically created in work dir
|
||||
-- testtable.txt --
|
||||
SPECIES TYPE HOME STAGE SPREAD
|
||||
human invasive earth brink planetary
|
||||
riedl peaceful keauna civilized pangalactic
|
||||
namak invasive namak imperium regional
|
||||
heduu peaceful iu imperium galactic
|
||||
kenaha peaceful kohi hunter-gatherer planetary
|
||||
|
||||
6
t/testtable5
Normal file
6
t/testtable5
Normal file
@@ -0,0 +1,6 @@
|
||||
SPECIES TYPE HOME STAGE
|
||||
human invasive earth brink
|
||||
riedl peaceful keauna civilized
|
||||
namak invasive namak imperium
|
||||
heduu peaceful iu imperium
|
||||
kenaha peaceful kohi hunter-gatherer
|
||||
61
tablizer.1
61
tablizer.1
@@ -134,6 +134,7 @@
|
||||
.\"
|
||||
.IX Title "TABLIZER 1"
|
||||
.TH TABLIZER 1 "2025-02-23" "1" "User Commands"
|
||||
|
||||
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
|
||||
.\" way too many mistakes in technical documents.
|
||||
.if n .ad l
|
||||
@@ -144,7 +145,7 @@ tablizer \- Manipulate tabular output of other programs
|
||||
.IX Header "SYNOPSIS"
|
||||
.Vb 2
|
||||
\& Usage:
|
||||
\& tablizer [regex] [file, ...] [flags]
|
||||
\& tablizer [regex,...] [file, ...] [flags]
|
||||
\&
|
||||
\& Operational Flags:
|
||||
\& \-c, \-\-columns string Only show the speficied columns (separated by ,)
|
||||
@@ -155,7 +156,7 @@ tablizer \- Manipulate tabular output of other programs
|
||||
\& \-s, \-\-separator string Custom field separator
|
||||
\& \-k, \-\-sort\-by int|name Sort by column (default: 1)
|
||||
\& \-z, \-\-fuzzy Use fuzzy search [experimental]
|
||||
\& \-F, \-\-filter field=reg Filter given field with regex, can be used multiple times
|
||||
\& \-F, \-\-filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||
\& \-T, \-\-transpose\-columns string Transpose the speficied columns (separated by ,)
|
||||
\& \-R, \-\-regex\-transposer /from/to/ Apply /search/replace/ regexp to fields given in \-T
|
||||
\&
|
||||
@@ -280,38 +281,52 @@ Finally the \fB\-d\fR option enables debugging output which is mostly
|
||||
useful for the developer.
|
||||
.SS "\s-1PATTERNS AND FILTERING\s0"
|
||||
.IX Subsection "PATTERNS AND FILTERING"
|
||||
You can reduce the rows being displayed by using a regular expression
|
||||
pattern. The regexp is \s-1PCRE\s0 compatible, refer to the syntax cheat
|
||||
sheet here: <https://github.com/google/re2/wiki/Syntax>. If you want
|
||||
to read a more comprehensive documentation about the topic and have
|
||||
perl installed you can read it with:
|
||||
You can reduce the rows being displayed by using one or more regular
|
||||
expression patterns. The regexp language being used is the one of
|
||||
\&\s-1GOLANG,\s0 refer to the syntax cheat sheet here:
|
||||
<https://pkg.go.dev/regexp/syntax>.
|
||||
.PP
|
||||
If you want to read a more comprehensive documentation about the
|
||||
topic and have perl installed you can read it with:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& perldoc perlre
|
||||
.Ve
|
||||
.PP
|
||||
Or read it online: <https://perldoc.perl.org/perlre>.
|
||||
Or read it online: <https://perldoc.perl.org/perlre>. But please note
|
||||
that the \s-1GO\s0 regexp engine does \s-1NOT\s0 support all perl regex terms,
|
||||
especially look-ahead and look-behind.
|
||||
.PP
|
||||
A note on modifiers: the regexp engine used in tablizer uses another
|
||||
modifier syntax:
|
||||
If you want to supply flags to a regex, then surround it with slashes
|
||||
and append the flag. The following flags are supported:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& (?MODIFIER)
|
||||
.Vb 2
|
||||
\& i => case insensitive
|
||||
\& ! => negative match
|
||||
.Ve
|
||||
.PP
|
||||
The most important modifiers are:
|
||||
.PP
|
||||
\&\f(CW\*(C`i\*(C'\fR ignore case
|
||||
\&\f(CW\*(C`m\*(C'\fR multiline mode
|
||||
\&\f(CW\*(C`s\*(C'\fR single line mode
|
||||
.PP
|
||||
Example for a case insensitive search:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& kubectl get pods \-A | tablizer "(?i)account"
|
||||
\& kubectl get pods \-A | tablizer "/account/i"
|
||||
.Ve
|
||||
.PP
|
||||
You can use the experimental fuzzy search feature by providing the
|
||||
If you use the \f(CW\*(C`!\*(C'\fR flag, then the regex match will be negated, that
|
||||
is, if a line in the input matches the given regex, but \f(CW\*(C`!\*(C'\fR is
|
||||
supplied, tablizer will \s-1NOT\s0 include it in the output.
|
||||
.PP
|
||||
For example, here we want to get all lines matching \*(L"foo\*(R" but not
|
||||
\&\*(L"bar\*(R":
|
||||
.PP
|
||||
.Vb 1
|
||||
\& cat table | tablizer foo \*(Aq/bar/!\*(Aq
|
||||
.Ve
|
||||
.PP
|
||||
This would match a line \*(L"foo zorro\*(R" but not \*(L"foo bar\*(R".
|
||||
.PP
|
||||
The flags can also be combined.
|
||||
.PP
|
||||
You can also use the experimental fuzzy search feature by providing the
|
||||
option \fB\-z\fR, in which case the pattern is regarded as a fuzzy search
|
||||
term, not a regexp.
|
||||
.PP
|
||||
@@ -328,6 +343,12 @@ Fieldnames (== columns headers) are case insensitive.
|
||||
If you specify more than one filter, both filters have to match (\s-1AND\s0
|
||||
operation).
|
||||
.PP
|
||||
These field filters can also be negated:
|
||||
.PP
|
||||
.Vb 1
|
||||
\& fieldname!=regexp
|
||||
.Ve
|
||||
.PP
|
||||
If the option \fB\-v\fR is specified, the filtering is inverted.
|
||||
.SS "\s-1COLUMNS\s0"
|
||||
.IX Subsection "COLUMNS"
|
||||
|
||||
54
tablizer.pod
54
tablizer.pod
@@ -5,7 +5,7 @@ tablizer - Manipulate tabular output of other programs
|
||||
=head1 SYNOPSIS
|
||||
|
||||
Usage:
|
||||
tablizer [regex] [file, ...] [flags]
|
||||
tablizer [regex,...] [file, ...] [flags]
|
||||
|
||||
Operational Flags:
|
||||
-c, --columns string Only show the speficied columns (separated by ,)
|
||||
@@ -16,7 +16,7 @@ tablizer - Manipulate tabular output of other programs
|
||||
-s, --separator string Custom field separator
|
||||
-k, --sort-by int|name Sort by column (default: 1)
|
||||
-z, --fuzzy Use fuzzy search [experimental]
|
||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
||||
-F, --filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
||||
|
||||
@@ -144,32 +144,44 @@ useful for the developer.
|
||||
|
||||
=head2 PATTERNS AND FILTERING
|
||||
|
||||
You can reduce the rows being displayed by using a regular expression
|
||||
pattern. The regexp is PCRE compatible, refer to the syntax cheat
|
||||
sheet here: L<https://github.com/google/re2/wiki/Syntax>. If you want
|
||||
to read a more comprehensive documentation about the topic and have
|
||||
perl installed you can read it with:
|
||||
You can reduce the rows being displayed by using one or more regular
|
||||
expression patterns. The regexp language being used is the one of
|
||||
GOLANG, refer to the syntax cheat sheet here:
|
||||
L<https://pkg.go.dev/regexp/syntax>.
|
||||
|
||||
If you want to read a more comprehensive documentation about the
|
||||
topic and have perl installed you can read it with:
|
||||
|
||||
perldoc perlre
|
||||
|
||||
Or read it online: L<https://perldoc.perl.org/perlre>.
|
||||
Or read it online: L<https://perldoc.perl.org/perlre>. But please note
|
||||
that the GO regexp engine does NOT support all perl regex terms,
|
||||
especially look-ahead and look-behind.
|
||||
|
||||
A note on modifiers: the regexp engine used in tablizer uses another
|
||||
modifier syntax:
|
||||
If you want to supply flags to a regex, then surround it with slashes
|
||||
and append the flag. The following flags are supported:
|
||||
|
||||
(?MODIFIER)
|
||||
|
||||
The most important modifiers are:
|
||||
|
||||
C<i> ignore case
|
||||
C<m> multiline mode
|
||||
C<s> single line mode
|
||||
i => case insensitive
|
||||
! => negative match
|
||||
|
||||
Example for a case insensitive search:
|
||||
|
||||
kubectl get pods -A | tablizer "(?i)account"
|
||||
kubectl get pods -A | tablizer "/account/i"
|
||||
|
||||
You can use the experimental fuzzy search feature by providing the
|
||||
If you use the C<!> flag, then the regex match will be negated, that
|
||||
is, if a line in the input matches the given regex, but C<!> is
|
||||
supplied, tablizer will NOT include it in the output.
|
||||
|
||||
For example, here we want to get all lines matching "foo" but not
|
||||
"bar":
|
||||
|
||||
cat table | tablizer foo '/bar/!'
|
||||
|
||||
This would match a line "foo zorro" but not "foo bar".
|
||||
|
||||
The flags can also be combined.
|
||||
|
||||
You can also use the experimental fuzzy search feature by providing the
|
||||
option B<-z>, in which case the pattern is regarded as a fuzzy search
|
||||
term, not a regexp.
|
||||
|
||||
@@ -184,6 +196,10 @@ Fieldnames (== columns headers) are case insensitive.
|
||||
If you specify more than one filter, both filters have to match (AND
|
||||
operation).
|
||||
|
||||
These field filters can also be negated:
|
||||
|
||||
fieldname!=regexp
|
||||
|
||||
If the option B<-v> is specified, the filtering is inverted.
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user