mirror of
https://codeberg.org/scip/tablizer.git
synced 2025-12-17 04:30:56 +01:00
Merge branch 'main' into feature/yank
This commit is contained in:
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -5,8 +5,6 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
version: ['1.22']
|
version: ['1.22']
|
||||||
# windows-latest removed, see:
|
|
||||||
# https://github.com/rogpeppe/go-internal/issues/284
|
|
||||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
name: Build
|
name: Build
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|||||||
63
.github/workflows/release.yaml
vendored
63
.github/workflows/release.yaml
vendored
@@ -1,8 +1,8 @@
|
|||||||
name: build-and-test
|
name: build-release
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- "*"
|
- "v*.*.*"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
@@ -10,10 +10,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v1
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: 1.22.11
|
go-version: 1.22.11
|
||||||
|
|
||||||
@@ -30,3 +30,58 @@ jobs:
|
|||||||
tag: ${{ github.ref_name }}
|
tag: ${{ github.ref_name }}
|
||||||
file: ./releases/*
|
file: ./releases/*
|
||||||
file_glob: true
|
file_glob: true
|
||||||
|
|
||||||
|
- name: Build Changelog
|
||||||
|
id: github_release
|
||||||
|
uses: mikepenz/release-changelog-builder-action@v5
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
mode: "PR"
|
||||||
|
configurationJson: |
|
||||||
|
{
|
||||||
|
"template": "#{{CHANGELOG}}\n\n**Full Changelog**: #{{RELEASE_DIFF}}",
|
||||||
|
"pr_template": "- #{{TITLE}} (##{{NUMBER}}) by #{{AUTHOR}}\n#{{BODY}}",
|
||||||
|
"empty_template": "- no changes",
|
||||||
|
"categories": [
|
||||||
|
{
|
||||||
|
"title": "## New Features",
|
||||||
|
"labels": ["add", "feature"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "## Bug Fixes",
|
||||||
|
"labels": ["fix", "bug", "revert"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "## Documentation Enhancements",
|
||||||
|
"labels": ["doc"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "## Refactoring Efforts",
|
||||||
|
"labels": ["refactor"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "## Miscellaneus Changes",
|
||||||
|
"labels": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"ignore_labels": [
|
||||||
|
"duplicate", "good first issue", "help wanted", "invalid", "question", "wontfix"
|
||||||
|
],
|
||||||
|
"label_extractor": [
|
||||||
|
{
|
||||||
|
"pattern": "(.) (.+)",
|
||||||
|
"target": "$1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pattern": "(.) (.+)",
|
||||||
|
"target": "$1",
|
||||||
|
"on_property": "title"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Create Release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
body: ${{steps.github_release.outputs.changelog}}
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ Operational Flags:
|
|||||||
-s, --separator string Custom field separator
|
-s, --separator string Custom field separator
|
||||||
-k, --sort-by int Sort by column (default: 1)
|
-k, --sort-by int Sort by column (default: 1)
|
||||||
-z, --fuzzy Use fuzzy search [experimental]
|
-z, --fuzzy Use fuzzy search [experimental]
|
||||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
-F, --filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||||
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
Copyright © 2022-2024 Thomas von Dein
|
Copyright © 2022-2025 Thomas von Dein
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
||||||
@@ -52,6 +52,17 @@ type Transposer struct {
|
|||||||
Replace string
|
Replace string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Pattern struct {
|
||||||
|
Pattern string
|
||||||
|
PatternRe *regexp.Regexp
|
||||||
|
Negate bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type Filter struct {
|
||||||
|
Regex *regexp.Regexp
|
||||||
|
Negate bool
|
||||||
|
}
|
||||||
|
|
||||||
// internal config
|
// internal config
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Debug bool
|
Debug bool
|
||||||
@@ -64,8 +75,7 @@ type Config struct {
|
|||||||
Separator string
|
Separator string
|
||||||
OutputMode int
|
OutputMode int
|
||||||
InvertMatch bool
|
InvertMatch bool
|
||||||
Pattern string
|
Patterns []*Pattern
|
||||||
PatternR *regexp.Regexp
|
|
||||||
UseFuzzySearch bool
|
UseFuzzySearch bool
|
||||||
UseHighlight bool
|
UseHighlight bool
|
||||||
|
|
||||||
@@ -97,7 +107,7 @@ type Config struct {
|
|||||||
|
|
||||||
// used for field filtering
|
// used for field filtering
|
||||||
Rawfilters []string
|
Rawfilters []string
|
||||||
Filters map[string]*regexp.Regexp
|
Filters map[string]Filter //map[string]*regexp.Regexp
|
||||||
|
|
||||||
// -r <file>
|
// -r <file>
|
||||||
InputFile string
|
InputFile string
|
||||||
@@ -267,12 +277,20 @@ func (conf *Config) PrepareModeFlags(flag Modeflag) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (conf *Config) PrepareFilters() error {
|
func (conf *Config) PrepareFilters() error {
|
||||||
conf.Filters = make(map[string]*regexp.Regexp, len(conf.Rawfilters))
|
conf.Filters = make(map[string]Filter, len(conf.Rawfilters))
|
||||||
|
|
||||||
for _, filter := range conf.Rawfilters {
|
for _, rawfilter := range conf.Rawfilters {
|
||||||
parts := strings.Split(filter, "=")
|
filter := Filter{}
|
||||||
|
|
||||||
|
parts := strings.Split(rawfilter, "!=")
|
||||||
if len(parts) != MAXPARTS {
|
if len(parts) != MAXPARTS {
|
||||||
return errors.New("filter field and value must be separated by =")
|
parts = strings.Split(rawfilter, "=")
|
||||||
|
|
||||||
|
if len(parts) != MAXPARTS {
|
||||||
|
return errors.New("filter field and value must be separated by '=' or '!='")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
filter.Negate = true
|
||||||
}
|
}
|
||||||
|
|
||||||
reg, err := regexp.Compile(parts[1])
|
reg, err := regexp.Compile(parts[1])
|
||||||
@@ -281,7 +299,8 @@ func (conf *Config) PrepareFilters() error {
|
|||||||
parts[0], err)
|
parts[0], err)
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.Filters[strings.ToLower(strings.ToLower(parts[0]))] = reg
|
filter.Regex = reg
|
||||||
|
conf.Filters[strings.ToLower(parts[0])] = filter
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@@ -335,15 +354,37 @@ func (conf *Config) ApplyDefaults() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (conf *Config) PreparePattern(pattern string) error {
|
func (conf *Config) PreparePattern(patterns []*Pattern) error {
|
||||||
PatternR, err := regexp.Compile(pattern)
|
// regex checks if a pattern looks like /$pattern/[i!]
|
||||||
|
flagre := regexp.MustCompile(`^/(.*)/([i!]*)$`)
|
||||||
|
|
||||||
if err != nil {
|
for _, pattern := range patterns {
|
||||||
return fmt.Errorf("regexp pattern %s is invalid: %w", conf.Pattern, err)
|
matches := flagre.FindAllStringSubmatch(pattern.Pattern, -1)
|
||||||
|
|
||||||
|
// we have a regex with flags
|
||||||
|
for _, match := range matches {
|
||||||
|
pattern.Pattern = match[1] // the inner part is our actual pattern
|
||||||
|
flags := match[2] // the flags
|
||||||
|
|
||||||
|
for _, flag := range flags {
|
||||||
|
switch flag {
|
||||||
|
case 'i':
|
||||||
|
pattern.Pattern = `(?i)` + pattern.Pattern
|
||||||
|
case '!':
|
||||||
|
pattern.Negate = true
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.PatternR = PatternR
|
PatternRe, err := regexp.Compile(pattern.Pattern)
|
||||||
conf.Pattern = pattern
|
if err != nil {
|
||||||
|
return fmt.Errorf("regexp pattern %s is invalid: %w", pattern.Pattern, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
pattern.PatternRe = PatternRe
|
||||||
|
}
|
||||||
|
|
||||||
|
conf.Patterns = patterns
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -79,20 +79,55 @@ func TestPrepareSortFlags(t *testing.T) {
|
|||||||
|
|
||||||
func TestPreparePattern(t *testing.T) {
|
func TestPreparePattern(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
pattern string
|
patterns []*Pattern
|
||||||
|
name string
|
||||||
wanterr bool
|
wanterr bool
|
||||||
|
wanticase bool
|
||||||
|
wantneg bool
|
||||||
}{
|
}{
|
||||||
{"[A-Z]+", false},
|
{
|
||||||
{"[a-z", true},
|
[]*Pattern{{Pattern: "[A-Z]+"}},
|
||||||
|
"simple",
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
[]*Pattern{{Pattern: "[a-z"}},
|
||||||
|
"regfail",
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
[]*Pattern{{Pattern: "/[A-Z]+/i"}},
|
||||||
|
"icase",
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
[]*Pattern{{Pattern: "/[A-Z]+/!"}},
|
||||||
|
"negate",
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
[]*Pattern{{Pattern: "/[A-Z]+/!i"}},
|
||||||
|
"negicase",
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, testdata := range tests {
|
for _, testdata := range tests {
|
||||||
testname := fmt.Sprintf("PreparePattern-pattern-%s-wanterr-%t",
|
testname := fmt.Sprintf("PreparePattern-pattern-%s-wanterr-%t", testdata.name, testdata.wanterr)
|
||||||
testdata.pattern, testdata.wanterr)
|
|
||||||
t.Run(testname, func(t *testing.T) {
|
t.Run(testname, func(t *testing.T) {
|
||||||
conf := Config{}
|
conf := Config{}
|
||||||
|
|
||||||
err := conf.PreparePattern(testdata.pattern)
|
err := conf.PreparePattern(testdata.patterns)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !testdata.wanterr {
|
if !testdata.wanterr {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
Copyright © 2022-2024 Thomas von Dein
|
Copyright © 2022-2025 Thomas von Dein
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
||||||
@@ -192,7 +192,7 @@ func Execute() {
|
|||||||
|
|
||||||
// filters
|
// filters
|
||||||
rootCmd.PersistentFlags().StringArrayVarP(&conf.Rawfilters,
|
rootCmd.PersistentFlags().StringArrayVarP(&conf.Rawfilters,
|
||||||
"filter", "F", nil, "Filter by field (field=regexp)")
|
"filter", "F", nil, "Filter by field (field=regexp || field!=regexp)")
|
||||||
rootCmd.PersistentFlags().StringArrayVarP(&conf.Transposers,
|
rootCmd.PersistentFlags().StringArrayVarP(&conf.Transposers,
|
||||||
"regex-transposer", "R", nil, "apply /search/replace/ regexp to fields given in -T")
|
"regex-transposer", "R", nil, "apply /search/replace/ regexp to fields given in -T")
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ NAME
|
|||||||
|
|
||||||
SYNOPSIS
|
SYNOPSIS
|
||||||
Usage:
|
Usage:
|
||||||
tablizer [regex] [file, ...] [flags]
|
tablizer [regex,...] [file, ...] [flags]
|
||||||
|
|
||||||
Operational Flags:
|
Operational Flags:
|
||||||
-c, --columns string Only show the speficied columns (separated by ,)
|
-c, --columns string Only show the speficied columns (separated by ,)
|
||||||
@@ -17,7 +17,7 @@ SYNOPSIS
|
|||||||
-s, --separator string Custom field separator
|
-s, --separator string Custom field separator
|
||||||
-k, --sort-by int|name Sort by column (default: 1)
|
-k, --sort-by int|name Sort by column (default: 1)
|
||||||
-z, --fuzzy Use fuzzy search [experimental]
|
-z, --fuzzy Use fuzzy search [experimental]
|
||||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
-F, --filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||||
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
||||||
|
|
||||||
@@ -132,30 +132,43 @@ DESCRIPTION
|
|||||||
for the developer.
|
for the developer.
|
||||||
|
|
||||||
PATTERNS AND FILTERING
|
PATTERNS AND FILTERING
|
||||||
You can reduce the rows being displayed by using a regular expression
|
You can reduce the rows being displayed by using one or more regular
|
||||||
pattern. The regexp is PCRE compatible, refer to the syntax cheat sheet
|
expression patterns. The regexp language being used is the one of
|
||||||
here: <https://github.com/google/re2/wiki/Syntax>. If you want to read a
|
GOLANG, refer to the syntax cheat sheet here:
|
||||||
more comprehensive documentation about the topic and have perl installed
|
<https://pkg.go.dev/regexp/syntax>.
|
||||||
you can read it with:
|
|
||||||
|
If you want to read a more comprehensive documentation about the topic
|
||||||
|
and have perl installed you can read it with:
|
||||||
|
|
||||||
perldoc perlre
|
perldoc perlre
|
||||||
|
|
||||||
Or read it online: <https://perldoc.perl.org/perlre>.
|
Or read it online: <https://perldoc.perl.org/perlre>. But please note
|
||||||
|
that the GO regexp engine does NOT support all perl regex terms,
|
||||||
|
especially look-ahead and look-behind.
|
||||||
|
|
||||||
A note on modifiers: the regexp engine used in tablizer uses another
|
If you want to supply flags to a regex, then surround it with slashes
|
||||||
modifier syntax:
|
and append the flag. The following flags are supported:
|
||||||
|
|
||||||
(?MODIFIER)
|
i => case insensitive
|
||||||
|
! => negative match
|
||||||
The most important modifiers are:
|
|
||||||
|
|
||||||
"i" ignore case "m" multiline mode "s" single line mode
|
|
||||||
|
|
||||||
Example for a case insensitive search:
|
Example for a case insensitive search:
|
||||||
|
|
||||||
kubectl get pods -A | tablizer "(?i)account"
|
kubectl get pods -A | tablizer "/account/i"
|
||||||
|
|
||||||
You can use the experimental fuzzy search feature by providing the
|
If you use the "!" flag, then the regex match will be negated, that is,
|
||||||
|
if a line in the input matches the given regex, but "!" is supplied,
|
||||||
|
tablizer will NOT include it in the output.
|
||||||
|
|
||||||
|
For example, here we want to get all lines matching "foo" but not "bar":
|
||||||
|
|
||||||
|
cat table | tablizer foo '/bar/!'
|
||||||
|
|
||||||
|
This would match a line "foo zorro" but not "foo bar".
|
||||||
|
|
||||||
|
The flags can also be combined.
|
||||||
|
|
||||||
|
You can also use the experimental fuzzy search feature by providing the
|
||||||
option -z, in which case the pattern is regarded as a fuzzy search term,
|
option -z, in which case the pattern is regarded as a fuzzy search term,
|
||||||
not a regexp.
|
not a regexp.
|
||||||
|
|
||||||
@@ -170,6 +183,10 @@ DESCRIPTION
|
|||||||
If you specify more than one filter, both filters have to match (AND
|
If you specify more than one filter, both filters have to match (AND
|
||||||
operation).
|
operation).
|
||||||
|
|
||||||
|
These field filters can also be negated:
|
||||||
|
|
||||||
|
fieldname!=regexp
|
||||||
|
|
||||||
If the option -v is specified, the filtering is inverted.
|
If the option -v is specified, the filtering is inverted.
|
||||||
|
|
||||||
COLUMNS
|
COLUMNS
|
||||||
@@ -406,7 +423,7 @@ AUTHORS
|
|||||||
var usage = `
|
var usage = `
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
tablizer [regex] [file, ...] [flags]
|
tablizer [regex,...] [file, ...] [flags]
|
||||||
|
|
||||||
Operational Flags:
|
Operational Flags:
|
||||||
-c, --columns string Only show the speficied columns (separated by ,)
|
-c, --columns string Only show the speficied columns (separated by ,)
|
||||||
@@ -417,7 +434,7 @@ Operational Flags:
|
|||||||
-s, --separator string Custom field separator
|
-s, --separator string Custom field separator
|
||||||
-k, --sort-by int|name Sort by column (default: 1)
|
-k, --sort-by int|name Sort by column (default: 1)
|
||||||
-z, --fuzzy Use fuzzy search [experimental]
|
-z, --fuzzy Use fuzzy search [experimental]
|
||||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
-F, --filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||||
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
Copyright © 2022-2024 Thomas von Dein
|
Copyright © 2022-2025 Thomas von Dein
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
||||||
@@ -27,15 +27,46 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* [!]Match a line, use fuzzy search for normal pattern strings and
|
* [!]Match a line, use fuzzy search for normal pattern strings and
|
||||||
* regexp otherwise.
|
* regexp otherwise.
|
||||||
*/
|
|
||||||
|
'foo bar' foo, /bar/! => false => line contains foo and not (not bar)
|
||||||
|
'foo nix' foo, /bar/! => ture => line contains foo and (not bar)
|
||||||
|
'foo bar' foo, /bar/ => true => line contains both foo and bar
|
||||||
|
'foo nix' foo, /bar/ => false => line does not contain bar
|
||||||
|
'foo bar' foo, /nix/ => false => line does not contain nix
|
||||||
|
*/
|
||||||
func matchPattern(conf cfg.Config, line string) bool {
|
func matchPattern(conf cfg.Config, line string) bool {
|
||||||
if conf.UseFuzzySearch {
|
if len(conf.Patterns) == 0 {
|
||||||
return fuzzy.MatchFold(conf.Pattern, line)
|
// any line always matches ""
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
return conf.PatternR.MatchString(line)
|
if conf.UseFuzzySearch {
|
||||||
|
// fuzzy search only considers the 1st pattern
|
||||||
|
return fuzzy.MatchFold(conf.Patterns[0].Pattern, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
var match int
|
||||||
|
|
||||||
|
//fmt.Printf("<%s>\n", line)
|
||||||
|
for _, re := range conf.Patterns {
|
||||||
|
patmatch := re.PatternRe.MatchString(line)
|
||||||
|
if re.Negate {
|
||||||
|
// toggle the meaning of match
|
||||||
|
patmatch = !patmatch
|
||||||
|
}
|
||||||
|
|
||||||
|
if patmatch {
|
||||||
|
match++
|
||||||
|
}
|
||||||
|
|
||||||
|
//fmt.Printf("patmatch: %t, match: %d, pattern: %s, negate: %t\n", patmatch, match, re.Pattern, re.Negate)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fmt.Printf("result: %t\n", match == len(conf.Patterns))
|
||||||
|
//fmt.Println()
|
||||||
|
return match == len(conf.Patterns)
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -55,15 +86,19 @@ func FilterByFields(conf cfg.Config, data *Tabdata) (*Tabdata, bool, error) {
|
|||||||
keep := true
|
keep := true
|
||||||
|
|
||||||
for idx, header := range data.headers {
|
for idx, header := range data.headers {
|
||||||
if !Exists(conf.Filters, strings.ToLower(header)) {
|
lcheader := strings.ToLower(header)
|
||||||
|
if !Exists(conf.Filters, lcheader) {
|
||||||
// do not filter by unspecified field
|
// do not filter by unspecified field
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if !conf.Filters[strings.ToLower(header)].MatchString(row[idx]) {
|
match := conf.Filters[lcheader].Regex.MatchString(row[idx])
|
||||||
// there IS a filter, but it doesn't match
|
if conf.Filters[lcheader].Negate {
|
||||||
keep = false
|
match = !match
|
||||||
|
}
|
||||||
|
|
||||||
|
if !match {
|
||||||
|
keep = false
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -123,8 +158,11 @@ func Exists[K comparable, V any](m map[K]V, v K) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Filters the whole input lines, returns filtered lines
|
||||||
|
*/
|
||||||
func FilterByPattern(conf cfg.Config, input io.Reader) (io.Reader, error) {
|
func FilterByPattern(conf cfg.Config, input io.Reader) (io.Reader, error) {
|
||||||
if conf.Pattern == "" {
|
if len(conf.Patterns) == 0 {
|
||||||
return input, nil
|
return input, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,7 +174,7 @@ func FilterByPattern(conf cfg.Config, input io.Reader) (io.Reader, error) {
|
|||||||
line := strings.TrimSpace(scanner.Text())
|
line := strings.TrimSpace(scanner.Text())
|
||||||
if hadFirst {
|
if hadFirst {
|
||||||
// don't match 1st line, it's the header
|
// don't match 1st line, it's the header
|
||||||
if conf.Pattern != "" && matchPattern(conf, line) == conf.InvertMatch {
|
if matchPattern(conf, line) == conf.InvertMatch {
|
||||||
// by default -v is false, so if a line does NOT
|
// by default -v is false, so if a line does NOT
|
||||||
// match the pattern, we will ignore it. However,
|
// match the pattern, we will ignore it. However,
|
||||||
// if the user specified -v, the matching is inverted,
|
// if the user specified -v, the matching is inverted,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
Copyright © 2024 Thomas von Dein
|
Copyright © 2024-2025 Thomas von Dein
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
||||||
@@ -29,17 +29,17 @@ func TestMatchPattern(t *testing.T) {
|
|||||||
var input = []struct {
|
var input = []struct {
|
||||||
name string
|
name string
|
||||||
fuzzy bool
|
fuzzy bool
|
||||||
pattern string
|
patterns []*cfg.Pattern
|
||||||
line string
|
line string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "normal",
|
name: "normal",
|
||||||
pattern: "haus",
|
patterns: []*cfg.Pattern{{Pattern: "haus"}},
|
||||||
line: "hausparty",
|
line: "hausparty",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "fuzzy",
|
name: "fuzzy",
|
||||||
pattern: "hpt",
|
patterns: []*cfg.Pattern{{Pattern: "hpt"}},
|
||||||
line: "haus-party-termin",
|
line: "haus-party-termin",
|
||||||
fuzzy: true,
|
fuzzy: true,
|
||||||
},
|
},
|
||||||
@@ -55,7 +55,7 @@ func TestMatchPattern(t *testing.T) {
|
|||||||
conf.UseFuzzySearch = true
|
conf.UseFuzzySearch = true
|
||||||
}
|
}
|
||||||
|
|
||||||
err := conf.PreparePattern(inputdata.pattern)
|
err := conf.PreparePattern(inputdata.patterns)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("PreparePattern returned error: %s", err)
|
t.Errorf("PreparePattern returned error: %s", err)
|
||||||
}
|
}
|
||||||
@@ -98,6 +98,20 @@ func TestFilterByFields(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
name: "one-field-negative",
|
||||||
|
filter: []string{"one!=asd"},
|
||||||
|
expect: Tabdata{
|
||||||
|
headers: []string{
|
||||||
|
"ONE", "TWO", "THREE",
|
||||||
|
},
|
||||||
|
entries: [][]string{
|
||||||
|
{"19191", "EDD 1", "x"},
|
||||||
|
{"8d8", "AN 1", "y"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
name: "one-field-inverted",
|
name: "one-field-inverted",
|
||||||
filter: []string{"one=19"},
|
filter: []string{"one=19"},
|
||||||
|
|||||||
@@ -301,12 +301,20 @@ func colorizeData(conf cfg.Config, output string) string {
|
|||||||
|
|
||||||
return colorized
|
return colorized
|
||||||
|
|
||||||
case len(conf.Pattern) > 0 && !conf.NoColor && color.IsConsole(os.Stdout):
|
case len(conf.Patterns) > 0 && !conf.NoColor && color.IsConsole(os.Stdout):
|
||||||
r := regexp.MustCompile("(" + conf.Pattern + ")")
|
out := output
|
||||||
|
|
||||||
return r.ReplaceAllStringFunc(output, func(in string) string {
|
for _, re := range conf.Patterns {
|
||||||
|
if !re.Negate {
|
||||||
|
r := regexp.MustCompile("(" + re.Pattern + ")")
|
||||||
|
|
||||||
|
out = r.ReplaceAllStringFunc(out, func(in string) string {
|
||||||
return conf.ColorStyle.Sprint(in)
|
return conf.ColorStyle.Sprint(in)
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return output
|
return output
|
||||||
|
|||||||
20
lib/io.go
20
lib/io.go
@@ -29,13 +29,13 @@ import (
|
|||||||
const RWRR = 0755
|
const RWRR = 0755
|
||||||
|
|
||||||
func ProcessFiles(conf *cfg.Config, args []string) error {
|
func ProcessFiles(conf *cfg.Config, args []string) error {
|
||||||
fd, pattern, err := determineIO(conf, args)
|
fd, patterns, err := determineIO(conf, args)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := conf.PreparePattern(pattern); err != nil {
|
if err := conf.PreparePattern(patterns); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -63,9 +63,9 @@ func ProcessFiles(conf *cfg.Config, args []string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func determineIO(conf *cfg.Config, args []string) (io.Reader, string, error) {
|
func determineIO(conf *cfg.Config, args []string) (io.Reader, []*cfg.Pattern, error) {
|
||||||
var filehandle io.Reader
|
var filehandle io.Reader
|
||||||
var pattern string
|
var patterns []*cfg.Pattern
|
||||||
var haveio bool
|
var haveio bool
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
@@ -76,7 +76,7 @@ func determineIO(conf *cfg.Config, args []string) (io.Reader, string, error) {
|
|||||||
fd, err := os.OpenFile(conf.InputFile, os.O_RDONLY, RWRR)
|
fd, err := os.OpenFile(conf.InputFile, os.O_RDONLY, RWRR)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", fmt.Errorf("failed to read input file %s: %w", conf.InputFile, err)
|
return nil, nil, fmt.Errorf("failed to read input file %s: %w", conf.InputFile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
filehandle = fd
|
filehandle = fd
|
||||||
@@ -93,13 +93,15 @@ func determineIO(conf *cfg.Config, args []string) (io.Reader, string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(args) > 0 {
|
if len(args) > 0 {
|
||||||
pattern = args[0]
|
patterns = make([]*cfg.Pattern, len(args))
|
||||||
conf.Pattern = args[0]
|
for i, arg := range args {
|
||||||
|
patterns[i] = &cfg.Pattern{Pattern: arg}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !haveio {
|
if !haveio {
|
||||||
return nil, "", errors.New("no file specified and nothing to read on stdin")
|
return nil, nil, errors.New("no file specified and nothing to read on stdin")
|
||||||
}
|
}
|
||||||
|
|
||||||
return filehandle, pattern, nil
|
return filehandle, patterns, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -137,7 +137,7 @@ func parseTabular(conf cfg.Config, input io.Reader) (Tabdata, error) {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// data processing
|
// data processing
|
||||||
if conf.Pattern != "" && matchPattern(conf, line) == conf.InvertMatch {
|
if matchPattern(conf, line) == conf.InvertMatch {
|
||||||
// by default -v is false, so if a line does NOT
|
// by default -v is false, so if a line does NOT
|
||||||
// match the pattern, we will ignore it. However,
|
// match the pattern, we will ignore it. However,
|
||||||
// if the user specified -v, the matching is inverted,
|
// if the user specified -v, the matching is inverted,
|
||||||
|
|||||||
@@ -83,23 +83,26 @@ func TestParser(t *testing.T) {
|
|||||||
|
|
||||||
func TestParserPatternmatching(t *testing.T) {
|
func TestParserPatternmatching(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
|
name string
|
||||||
entries [][]string
|
entries [][]string
|
||||||
pattern string
|
patterns []*cfg.Pattern
|
||||||
invert bool
|
invert bool
|
||||||
want bool
|
want bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
|
name: "match",
|
||||||
entries: [][]string{
|
entries: [][]string{
|
||||||
{"asd", "igig", "cxxxncnc"},
|
{"asd", "igig", "cxxxncnc"},
|
||||||
},
|
},
|
||||||
pattern: "ig",
|
patterns: []*cfg.Pattern{{Pattern: "ig"}},
|
||||||
invert: false,
|
invert: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
name: "invert",
|
||||||
entries: [][]string{
|
entries: [][]string{
|
||||||
{"19191", "EDD 1", "X"},
|
{"19191", "EDD 1", "X"},
|
||||||
},
|
},
|
||||||
pattern: "ig",
|
patterns: []*cfg.Pattern{{Pattern: "ig"}},
|
||||||
invert: true,
|
invert: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -107,12 +110,15 @@ func TestParserPatternmatching(t *testing.T) {
|
|||||||
for _, inputdata := range input {
|
for _, inputdata := range input {
|
||||||
for _, testdata := range tests {
|
for _, testdata := range tests {
|
||||||
testname := fmt.Sprintf("parse-%s-with-pattern-%s-inverted-%t",
|
testname := fmt.Sprintf("parse-%s-with-pattern-%s-inverted-%t",
|
||||||
inputdata.name, testdata.pattern, testdata.invert)
|
inputdata.name, testdata.name, testdata.invert)
|
||||||
t.Run(testname, func(t *testing.T) {
|
t.Run(testname, func(t *testing.T) {
|
||||||
conf := cfg.Config{InvertMatch: testdata.invert, Pattern: testdata.pattern,
|
conf := cfg.Config{
|
||||||
Separator: inputdata.separator}
|
InvertMatch: testdata.invert,
|
||||||
|
Patterns: testdata.patterns,
|
||||||
|
Separator: inputdata.separator,
|
||||||
|
}
|
||||||
|
|
||||||
_ = conf.PreparePattern(testdata.pattern)
|
_ = conf.PreparePattern(testdata.patterns)
|
||||||
|
|
||||||
readFd := strings.NewReader(strings.TrimSpace(inputdata.text))
|
readFd := strings.NewReader(strings.TrimSpace(inputdata.text))
|
||||||
gotdata, err := Parse(conf, readFd)
|
gotdata, err := Parse(conf, readFd)
|
||||||
@@ -125,7 +131,7 @@ func TestParserPatternmatching(t *testing.T) {
|
|||||||
} else {
|
} else {
|
||||||
if !reflect.DeepEqual(testdata.entries, gotdata.entries) {
|
if !reflect.DeepEqual(testdata.entries, gotdata.entries) {
|
||||||
t.Errorf("Parser returned invalid data (pattern: %s, invert: %t)\nExp: %+v\nGot: %+v\n",
|
t.Errorf("Parser returned invalid data (pattern: %s, invert: %t)\nExp: %+v\nGot: %+v\n",
|
||||||
testdata.pattern, testdata.invert, testdata.entries, gotdata.entries)
|
testdata.name, testdata.invert, testdata.entries, gotdata.entries)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
9
mkrel.sh
9
mkrel.sh
@@ -42,8 +42,15 @@ for D in $DIST; do
|
|||||||
binfile="releases/${tool}-${os}-${arch}-${version}"
|
binfile="releases/${tool}-${os}-${arch}-${version}"
|
||||||
tardir="${tool}-${os}-${arch}-${version}"
|
tardir="${tool}-${os}-${arch}-${version}"
|
||||||
tarfile="releases/${tool}-${os}-${arch}-${version}.tar.gz"
|
tarfile="releases/${tool}-${os}-${arch}-${version}.tar.gz"
|
||||||
|
pie=""
|
||||||
|
|
||||||
|
if test "$D" = "linux/amd64"; then
|
||||||
|
pie="-buildmode=pie"
|
||||||
|
fi
|
||||||
|
|
||||||
set -x
|
set -x
|
||||||
GOOS=${os} GOARCH=${arch} go build -o ${binfile} -ldflags "-X 'github.com/tlinden/tablizer/cfg.VERSION=${version}'"
|
GOOS=${os} GOARCH=${arch} go build -tags osusergo,netgo -ldflags "-extldflags=-static -w -X 'github.com/tlinden/tablizer/cfg.VERSION=${version}'" --trimpath $pie -o ${binfile}
|
||||||
|
strip --strip-all ${binfile}
|
||||||
mkdir -p ${tardir}
|
mkdir -p ${tardir}
|
||||||
cp ${binfile} README.md LICENSE ${tardir}/
|
cp ${binfile} README.md LICENSE ${tardir}/
|
||||||
echo 'tool = tablizer
|
echo 'tool = tablizer
|
||||||
|
|||||||
46
t/test-multipatterns.txtar
Normal file
46
t/test-multipatterns.txtar
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# filtering
|
||||||
|
|
||||||
|
# a AND b
|
||||||
|
exec tablizer -r testtable.txt -H -cspecies invasive imperium
|
||||||
|
stdout 'namak'
|
||||||
|
! stdout human
|
||||||
|
|
||||||
|
# a AND !b
|
||||||
|
exec tablizer -r testtable.txt -H -cspecies invasive '/imperium/!'
|
||||||
|
stdout 'human'
|
||||||
|
! stdout namak
|
||||||
|
|
||||||
|
# a AND !b AND c
|
||||||
|
exec tablizer -r testtable.txt -H -cspecies peaceful '/imperium/!' planetary
|
||||||
|
stdout 'kenaha'
|
||||||
|
! stdout 'namak|heduu|riedl'
|
||||||
|
|
||||||
|
# case insensitive
|
||||||
|
exec tablizer -r testtable.txt -H -cspecies '/REGIONAL/i'
|
||||||
|
stdout namak
|
||||||
|
! stdout 'human|riedl|heduu|kenaa'
|
||||||
|
|
||||||
|
# case insensitive negated
|
||||||
|
exec tablizer -r testtable.txt -H -cspecies '/REGIONAL/!i'
|
||||||
|
stdout 'human|riedl|heduu|kenaa'
|
||||||
|
! stdout namak
|
||||||
|
|
||||||
|
# !a AND !b
|
||||||
|
exec tablizer -r testtable.txt -H -cspecies '/galactic/!' '/planetary/!'
|
||||||
|
stdout namak
|
||||||
|
! stdout 'human|riedl|heduu|kenaa'
|
||||||
|
|
||||||
|
# same case insensitive
|
||||||
|
exec tablizer -r testtable.txt -H -cspecies '/GALACTIC/i!' '/PLANETARY/!i'
|
||||||
|
stdout namak
|
||||||
|
! stdout 'human|riedl|heduu|kenaa'
|
||||||
|
|
||||||
|
# will be automatically created in work dir
|
||||||
|
-- testtable.txt --
|
||||||
|
SPECIES TYPE HOME STAGE SPREAD
|
||||||
|
human invasive earth brink planetary
|
||||||
|
riedl peaceful keauna civilized pangalactic
|
||||||
|
namak invasive namak imperium regional
|
||||||
|
heduu peaceful iu imperium galactic
|
||||||
|
kenaha peaceful kohi hunter-gatherer planetary
|
||||||
|
|
||||||
6
t/testtable5
Normal file
6
t/testtable5
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
SPECIES TYPE HOME STAGE
|
||||||
|
human invasive earth brink
|
||||||
|
riedl peaceful keauna civilized
|
||||||
|
namak invasive namak imperium
|
||||||
|
heduu peaceful iu imperium
|
||||||
|
kenaha peaceful kohi hunter-gatherer
|
||||||
61
tablizer.1
61
tablizer.1
@@ -134,6 +134,7 @@
|
|||||||
.\"
|
.\"
|
||||||
.IX Title "TABLIZER 1"
|
.IX Title "TABLIZER 1"
|
||||||
.TH TABLIZER 1 "2025-02-23" "1" "User Commands"
|
.TH TABLIZER 1 "2025-02-23" "1" "User Commands"
|
||||||
|
|
||||||
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
|
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
|
||||||
.\" way too many mistakes in technical documents.
|
.\" way too many mistakes in technical documents.
|
||||||
.if n .ad l
|
.if n .ad l
|
||||||
@@ -144,7 +145,7 @@ tablizer \- Manipulate tabular output of other programs
|
|||||||
.IX Header "SYNOPSIS"
|
.IX Header "SYNOPSIS"
|
||||||
.Vb 2
|
.Vb 2
|
||||||
\& Usage:
|
\& Usage:
|
||||||
\& tablizer [regex] [file, ...] [flags]
|
\& tablizer [regex,...] [file, ...] [flags]
|
||||||
\&
|
\&
|
||||||
\& Operational Flags:
|
\& Operational Flags:
|
||||||
\& \-c, \-\-columns string Only show the speficied columns (separated by ,)
|
\& \-c, \-\-columns string Only show the speficied columns (separated by ,)
|
||||||
@@ -155,7 +156,7 @@ tablizer \- Manipulate tabular output of other programs
|
|||||||
\& \-s, \-\-separator string Custom field separator
|
\& \-s, \-\-separator string Custom field separator
|
||||||
\& \-k, \-\-sort\-by int|name Sort by column (default: 1)
|
\& \-k, \-\-sort\-by int|name Sort by column (default: 1)
|
||||||
\& \-z, \-\-fuzzy Use fuzzy search [experimental]
|
\& \-z, \-\-fuzzy Use fuzzy search [experimental]
|
||||||
\& \-F, \-\-filter field=reg Filter given field with regex, can be used multiple times
|
\& \-F, \-\-filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||||
\& \-T, \-\-transpose\-columns string Transpose the speficied columns (separated by ,)
|
\& \-T, \-\-transpose\-columns string Transpose the speficied columns (separated by ,)
|
||||||
\& \-R, \-\-regex\-transposer /from/to/ Apply /search/replace/ regexp to fields given in \-T
|
\& \-R, \-\-regex\-transposer /from/to/ Apply /search/replace/ regexp to fields given in \-T
|
||||||
\&
|
\&
|
||||||
@@ -280,38 +281,52 @@ Finally the \fB\-d\fR option enables debugging output which is mostly
|
|||||||
useful for the developer.
|
useful for the developer.
|
||||||
.SS "\s-1PATTERNS AND FILTERING\s0"
|
.SS "\s-1PATTERNS AND FILTERING\s0"
|
||||||
.IX Subsection "PATTERNS AND FILTERING"
|
.IX Subsection "PATTERNS AND FILTERING"
|
||||||
You can reduce the rows being displayed by using a regular expression
|
You can reduce the rows being displayed by using one or more regular
|
||||||
pattern. The regexp is \s-1PCRE\s0 compatible, refer to the syntax cheat
|
expression patterns. The regexp language being used is the one of
|
||||||
sheet here: <https://github.com/google/re2/wiki/Syntax>. If you want
|
\&\s-1GOLANG,\s0 refer to the syntax cheat sheet here:
|
||||||
to read a more comprehensive documentation about the topic and have
|
<https://pkg.go.dev/regexp/syntax>.
|
||||||
perl installed you can read it with:
|
.PP
|
||||||
|
If you want to read a more comprehensive documentation about the
|
||||||
|
topic and have perl installed you can read it with:
|
||||||
.PP
|
.PP
|
||||||
.Vb 1
|
.Vb 1
|
||||||
\& perldoc perlre
|
\& perldoc perlre
|
||||||
.Ve
|
.Ve
|
||||||
.PP
|
.PP
|
||||||
Or read it online: <https://perldoc.perl.org/perlre>.
|
Or read it online: <https://perldoc.perl.org/perlre>. But please note
|
||||||
|
that the \s-1GO\s0 regexp engine does \s-1NOT\s0 support all perl regex terms,
|
||||||
|
especially look-ahead and look-behind.
|
||||||
.PP
|
.PP
|
||||||
A note on modifiers: the regexp engine used in tablizer uses another
|
If you want to supply flags to a regex, then surround it with slashes
|
||||||
modifier syntax:
|
and append the flag. The following flags are supported:
|
||||||
.PP
|
.PP
|
||||||
.Vb 1
|
.Vb 2
|
||||||
\& (?MODIFIER)
|
\& i => case insensitive
|
||||||
|
\& ! => negative match
|
||||||
.Ve
|
.Ve
|
||||||
.PP
|
.PP
|
||||||
The most important modifiers are:
|
|
||||||
.PP
|
|
||||||
\&\f(CW\*(C`i\*(C'\fR ignore case
|
|
||||||
\&\f(CW\*(C`m\*(C'\fR multiline mode
|
|
||||||
\&\f(CW\*(C`s\*(C'\fR single line mode
|
|
||||||
.PP
|
|
||||||
Example for a case insensitive search:
|
Example for a case insensitive search:
|
||||||
.PP
|
.PP
|
||||||
.Vb 1
|
.Vb 1
|
||||||
\& kubectl get pods \-A | tablizer "(?i)account"
|
\& kubectl get pods \-A | tablizer "/account/i"
|
||||||
.Ve
|
.Ve
|
||||||
.PP
|
.PP
|
||||||
You can use the experimental fuzzy search feature by providing the
|
If you use the \f(CW\*(C`!\*(C'\fR flag, then the regex match will be negated, that
|
||||||
|
is, if a line in the input matches the given regex, but \f(CW\*(C`!\*(C'\fR is
|
||||||
|
supplied, tablizer will \s-1NOT\s0 include it in the output.
|
||||||
|
.PP
|
||||||
|
For example, here we want to get all lines matching \*(L"foo\*(R" but not
|
||||||
|
\&\*(L"bar\*(R":
|
||||||
|
.PP
|
||||||
|
.Vb 1
|
||||||
|
\& cat table | tablizer foo \*(Aq/bar/!\*(Aq
|
||||||
|
.Ve
|
||||||
|
.PP
|
||||||
|
This would match a line \*(L"foo zorro\*(R" but not \*(L"foo bar\*(R".
|
||||||
|
.PP
|
||||||
|
The flags can also be combined.
|
||||||
|
.PP
|
||||||
|
You can also use the experimental fuzzy search feature by providing the
|
||||||
option \fB\-z\fR, in which case the pattern is regarded as a fuzzy search
|
option \fB\-z\fR, in which case the pattern is regarded as a fuzzy search
|
||||||
term, not a regexp.
|
term, not a regexp.
|
||||||
.PP
|
.PP
|
||||||
@@ -328,6 +343,12 @@ Fieldnames (== columns headers) are case insensitive.
|
|||||||
If you specify more than one filter, both filters have to match (\s-1AND\s0
|
If you specify more than one filter, both filters have to match (\s-1AND\s0
|
||||||
operation).
|
operation).
|
||||||
.PP
|
.PP
|
||||||
|
These field filters can also be negated:
|
||||||
|
.PP
|
||||||
|
.Vb 1
|
||||||
|
\& fieldname!=regexp
|
||||||
|
.Ve
|
||||||
|
.PP
|
||||||
If the option \fB\-v\fR is specified, the filtering is inverted.
|
If the option \fB\-v\fR is specified, the filtering is inverted.
|
||||||
.SS "\s-1COLUMNS\s0"
|
.SS "\s-1COLUMNS\s0"
|
||||||
.IX Subsection "COLUMNS"
|
.IX Subsection "COLUMNS"
|
||||||
|
|||||||
54
tablizer.pod
54
tablizer.pod
@@ -5,7 +5,7 @@ tablizer - Manipulate tabular output of other programs
|
|||||||
=head1 SYNOPSIS
|
=head1 SYNOPSIS
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
tablizer [regex] [file, ...] [flags]
|
tablizer [regex,...] [file, ...] [flags]
|
||||||
|
|
||||||
Operational Flags:
|
Operational Flags:
|
||||||
-c, --columns string Only show the speficied columns (separated by ,)
|
-c, --columns string Only show the speficied columns (separated by ,)
|
||||||
@@ -16,7 +16,7 @@ tablizer - Manipulate tabular output of other programs
|
|||||||
-s, --separator string Custom field separator
|
-s, --separator string Custom field separator
|
||||||
-k, --sort-by int|name Sort by column (default: 1)
|
-k, --sort-by int|name Sort by column (default: 1)
|
||||||
-z, --fuzzy Use fuzzy search [experimental]
|
-z, --fuzzy Use fuzzy search [experimental]
|
||||||
-F, --filter field=reg Filter given field with regex, can be used multiple times
|
-F, --filter field[!]=reg Filter given field with regex, can be used multiple times
|
||||||
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
-T, --transpose-columns string Transpose the speficied columns (separated by ,)
|
||||||
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
-R, --regex-transposer /from/to/ Apply /search/replace/ regexp to fields given in -T
|
||||||
|
|
||||||
@@ -144,32 +144,44 @@ useful for the developer.
|
|||||||
|
|
||||||
=head2 PATTERNS AND FILTERING
|
=head2 PATTERNS AND FILTERING
|
||||||
|
|
||||||
You can reduce the rows being displayed by using a regular expression
|
You can reduce the rows being displayed by using one or more regular
|
||||||
pattern. The regexp is PCRE compatible, refer to the syntax cheat
|
expression patterns. The regexp language being used is the one of
|
||||||
sheet here: L<https://github.com/google/re2/wiki/Syntax>. If you want
|
GOLANG, refer to the syntax cheat sheet here:
|
||||||
to read a more comprehensive documentation about the topic and have
|
L<https://pkg.go.dev/regexp/syntax>.
|
||||||
perl installed you can read it with:
|
|
||||||
|
If you want to read a more comprehensive documentation about the
|
||||||
|
topic and have perl installed you can read it with:
|
||||||
|
|
||||||
perldoc perlre
|
perldoc perlre
|
||||||
|
|
||||||
Or read it online: L<https://perldoc.perl.org/perlre>.
|
Or read it online: L<https://perldoc.perl.org/perlre>. But please note
|
||||||
|
that the GO regexp engine does NOT support all perl regex terms,
|
||||||
|
especially look-ahead and look-behind.
|
||||||
|
|
||||||
A note on modifiers: the regexp engine used in tablizer uses another
|
If you want to supply flags to a regex, then surround it with slashes
|
||||||
modifier syntax:
|
and append the flag. The following flags are supported:
|
||||||
|
|
||||||
(?MODIFIER)
|
i => case insensitive
|
||||||
|
! => negative match
|
||||||
The most important modifiers are:
|
|
||||||
|
|
||||||
C<i> ignore case
|
|
||||||
C<m> multiline mode
|
|
||||||
C<s> single line mode
|
|
||||||
|
|
||||||
Example for a case insensitive search:
|
Example for a case insensitive search:
|
||||||
|
|
||||||
kubectl get pods -A | tablizer "(?i)account"
|
kubectl get pods -A | tablizer "/account/i"
|
||||||
|
|
||||||
You can use the experimental fuzzy search feature by providing the
|
If you use the C<!> flag, then the regex match will be negated, that
|
||||||
|
is, if a line in the input matches the given regex, but C<!> is
|
||||||
|
supplied, tablizer will NOT include it in the output.
|
||||||
|
|
||||||
|
For example, here we want to get all lines matching "foo" but not
|
||||||
|
"bar":
|
||||||
|
|
||||||
|
cat table | tablizer foo '/bar/!'
|
||||||
|
|
||||||
|
This would match a line "foo zorro" but not "foo bar".
|
||||||
|
|
||||||
|
The flags can also be combined.
|
||||||
|
|
||||||
|
You can also use the experimental fuzzy search feature by providing the
|
||||||
option B<-z>, in which case the pattern is regarded as a fuzzy search
|
option B<-z>, in which case the pattern is regarded as a fuzzy search
|
||||||
term, not a regexp.
|
term, not a regexp.
|
||||||
|
|
||||||
@@ -184,6 +196,10 @@ Fieldnames (== columns headers) are case insensitive.
|
|||||||
If you specify more than one filter, both filters have to match (AND
|
If you specify more than one filter, both filters have to match (AND
|
||||||
operation).
|
operation).
|
||||||
|
|
||||||
|
These field filters can also be negated:
|
||||||
|
|
||||||
|
fieldname!=regexp
|
||||||
|
|
||||||
If the option B<-v> is specified, the filtering is inverted.
|
If the option B<-v> is specified, the filtering is inverted.
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user