Merge remote-tracking branch 'origin/development'

This commit is contained in:
2024-05-07 18:29:01 +02:00
22 changed files with 1013 additions and 543 deletions

View File

@@ -91,3 +91,11 @@ show-versions: buildlocal
goupdate:
go get -t -u=patch ./...
lint:
golangci-lint run
# keep til ireturn
lint-full:
golangci-lint run --enable-all --exclude-use-default --disable exhaustivestruct,exhaustruct,depguard,interfacer,deadcode,golint,structcheck,scopelint,varcheck,ifshort,maligned,nosnakecase,godot,funlen,gofumpt,cyclop,noctx,gochecknoglobals,paralleltest,forbidigo,gci,godox,goimports,ireturn,stylecheck,testpackage,mirror,nestif,revive,goerr113,gomnd
gocritic check -enableAll *.go

View File

@@ -70,6 +70,17 @@ NAME(1) READY(2) STATUS(3) RESTARTS(4) AGE(5)
repldepl-7bcd8d5b64-q2bf4 1/1 Running 1 (69m ago) 5h26m
```
Sometimes a filter regex is to broad and you wish to filter only on a
particular column. This is possible using `-F`:
```
% kubectl get pods | tablizer -n -Fname=2
NAME READY STATUS RESTARTS AGE
repldepl-7bcd8d5b64-q2bf4 1/1 Running 1 (69m ago) 5h26m
```
Here we filtered the `NAME` column for `2`, which would have matched
otherwise on all rows.
There are more output modes like org-mode (orgtbl) and markdown.
## Demo

View File

@@ -1,5 +1,5 @@
/*
Copyright © 2022 Thomas von Dein
Copyright © 2022-2024 Thomas von Dein
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
@@ -22,6 +22,7 @@ import (
"log"
"os"
"regexp"
"strings"
"github.com/glycerine/zygomys/zygo"
"github.com/gookit/color"
@@ -29,15 +30,16 @@ import (
)
const DefaultSeparator string = `(\s\s+|\t)`
const Version string = "v1.1.0"
const Version string = "v1.2.0"
const MAXPARTS = 2
var DefaultLoadPath string = os.Getenv("HOME") + "/.config/tablizer/lisp"
var DefaultConfigfile string = os.Getenv("HOME") + "/.config/tablizer/config"
var DefaultLoadPath = os.Getenv("HOME") + "/.config/tablizer/lisp"
var DefaultConfigfile = os.Getenv("HOME") + "/.config/tablizer/config"
var VERSION string // maintained by -x
// public config, set via config file or using defaults
type Configuration struct {
type Settings struct {
FG string `hcl:"FG"`
BG string `hcl:"BG"`
HighlightFG string `hcl:"HighlightFG"`
@@ -88,7 +90,11 @@ type Config struct {
// config file, optional
Configfile string
Configuration Configuration
Settings Settings
// used for field filtering
Rawfilters []string
Filters map[string]*regexp.Regexp
}
// maps outputmode short flags to output mode, ie. -O => -o orgtbl
@@ -110,7 +116,7 @@ const (
Shell
Yaml
CSV
Ascii
ASCII
)
// various sort types
@@ -124,7 +130,7 @@ type Sortmode struct {
var ValidHooks []string
// default color schemes
func (c *Config) Colors() map[color.Level]map[string]color.Color {
func (conf *Config) Colors() map[color.Level]map[string]color.Color {
colors := map[color.Level]map[string]color.Color{
color.Level16: {
"bg": color.BgGreen, "fg": color.FgWhite,
@@ -142,89 +148,86 @@ func (c *Config) Colors() map[color.Level]map[string]color.Color {
},
}
if len(c.Configuration.BG) > 0 {
colors[color.Level16]["bg"] = ColorStringToBGColor(c.Configuration.BG)
colors[color.Level256]["bg"] = ColorStringToBGColor(c.Configuration.BG)
colors[color.LevelRgb]["bg"] = ColorStringToBGColor(c.Configuration.BG)
if len(conf.Settings.BG) > 0 {
colors[color.Level16]["bg"] = ColorStringToBGColor(conf.Settings.BG)
colors[color.Level256]["bg"] = ColorStringToBGColor(conf.Settings.BG)
colors[color.LevelRgb]["bg"] = ColorStringToBGColor(conf.Settings.BG)
}
if len(c.Configuration.FG) > 0 {
colors[color.Level16]["fg"] = ColorStringToColor(c.Configuration.FG)
colors[color.Level256]["fg"] = ColorStringToColor(c.Configuration.FG)
colors[color.LevelRgb]["fg"] = ColorStringToColor(c.Configuration.FG)
if len(conf.Settings.FG) > 0 {
colors[color.Level16]["fg"] = ColorStringToColor(conf.Settings.FG)
colors[color.Level256]["fg"] = ColorStringToColor(conf.Settings.FG)
colors[color.LevelRgb]["fg"] = ColorStringToColor(conf.Settings.FG)
}
if len(c.Configuration.HighlightBG) > 0 {
colors[color.Level16]["hlbg"] = ColorStringToBGColor(c.Configuration.HighlightBG)
colors[color.Level256]["hlbg"] = ColorStringToBGColor(c.Configuration.HighlightBG)
colors[color.LevelRgb]["hlbg"] = ColorStringToBGColor(c.Configuration.HighlightBG)
if len(conf.Settings.HighlightBG) > 0 {
colors[color.Level16]["hlbg"] = ColorStringToBGColor(conf.Settings.HighlightBG)
colors[color.Level256]["hlbg"] = ColorStringToBGColor(conf.Settings.HighlightBG)
colors[color.LevelRgb]["hlbg"] = ColorStringToBGColor(conf.Settings.HighlightBG)
}
if len(c.Configuration.HighlightFG) > 0 {
colors[color.Level16]["hlfg"] = ColorStringToColor(c.Configuration.HighlightFG)
colors[color.Level256]["hlfg"] = ColorStringToColor(c.Configuration.HighlightFG)
colors[color.LevelRgb]["hlfg"] = ColorStringToColor(c.Configuration.HighlightFG)
if len(conf.Settings.HighlightFG) > 0 {
colors[color.Level16]["hlfg"] = ColorStringToColor(conf.Settings.HighlightFG)
colors[color.Level256]["hlfg"] = ColorStringToColor(conf.Settings.HighlightFG)
colors[color.LevelRgb]["hlfg"] = ColorStringToColor(conf.Settings.HighlightFG)
}
if len(c.Configuration.NoHighlightBG) > 0 {
colors[color.Level16]["nohlbg"] = ColorStringToBGColor(c.Configuration.NoHighlightBG)
colors[color.Level256]["nohlbg"] = ColorStringToBGColor(c.Configuration.NoHighlightBG)
colors[color.LevelRgb]["nohlbg"] = ColorStringToBGColor(c.Configuration.NoHighlightBG)
if len(conf.Settings.NoHighlightBG) > 0 {
colors[color.Level16]["nohlbg"] = ColorStringToBGColor(conf.Settings.NoHighlightBG)
colors[color.Level256]["nohlbg"] = ColorStringToBGColor(conf.Settings.NoHighlightBG)
colors[color.LevelRgb]["nohlbg"] = ColorStringToBGColor(conf.Settings.NoHighlightBG)
}
if len(c.Configuration.NoHighlightFG) > 0 {
colors[color.Level16]["nohlfg"] = ColorStringToColor(c.Configuration.NoHighlightFG)
colors[color.Level256]["nohlfg"] = ColorStringToColor(c.Configuration.NoHighlightFG)
colors[color.LevelRgb]["nohlfg"] = ColorStringToColor(c.Configuration.NoHighlightFG)
if len(conf.Settings.NoHighlightFG) > 0 {
colors[color.Level16]["nohlfg"] = ColorStringToColor(conf.Settings.NoHighlightFG)
colors[color.Level256]["nohlfg"] = ColorStringToColor(conf.Settings.NoHighlightFG)
colors[color.LevelRgb]["nohlfg"] = ColorStringToColor(conf.Settings.NoHighlightFG)
}
if len(c.Configuration.HighlightHdrBG) > 0 {
colors[color.Level16]["hdrbg"] = ColorStringToBGColor(c.Configuration.HighlightHdrBG)
colors[color.Level256]["hdrbg"] = ColorStringToBGColor(c.Configuration.HighlightHdrBG)
colors[color.LevelRgb]["hdrbg"] = ColorStringToBGColor(c.Configuration.HighlightHdrBG)
if len(conf.Settings.HighlightHdrBG) > 0 {
colors[color.Level16]["hdrbg"] = ColorStringToBGColor(conf.Settings.HighlightHdrBG)
colors[color.Level256]["hdrbg"] = ColorStringToBGColor(conf.Settings.HighlightHdrBG)
colors[color.LevelRgb]["hdrbg"] = ColorStringToBGColor(conf.Settings.HighlightHdrBG)
}
if len(c.Configuration.HighlightHdrFG) > 0 {
colors[color.Level16]["hdrfg"] = ColorStringToColor(c.Configuration.HighlightHdrFG)
colors[color.Level256]["hdrfg"] = ColorStringToColor(c.Configuration.HighlightHdrFG)
colors[color.LevelRgb]["hdrfg"] = ColorStringToColor(c.Configuration.HighlightHdrFG)
if len(conf.Settings.HighlightHdrFG) > 0 {
colors[color.Level16]["hdrfg"] = ColorStringToColor(conf.Settings.HighlightHdrFG)
colors[color.Level256]["hdrfg"] = ColorStringToColor(conf.Settings.HighlightHdrFG)
colors[color.LevelRgb]["hdrfg"] = ColorStringToColor(conf.Settings.HighlightHdrFG)
}
return colors
}
// find supported color mode, modifies config based on constants
func (c *Config) DetermineColormode() {
func (conf *Config) DetermineColormode() {
if !isTerminal(os.Stdout) {
color.Disable()
} else {
level := color.TermColorLevel()
colors := c.Colors()
colors := conf.Colors()
c.ColorStyle = color.New(colors[level]["bg"], colors[level]["fg"])
c.HighlightStyle = color.New(colors[level]["hlbg"], colors[level]["hlfg"])
c.NoHighlightStyle = color.New(colors[level]["nohlbg"], colors[level]["nohlfg"])
c.HighlightHdrStyle = color.New(colors[level]["hdrbg"], colors[level]["hdrfg"])
conf.ColorStyle = color.New(colors[level]["bg"], colors[level]["fg"])
conf.HighlightStyle = color.New(colors[level]["hlbg"], colors[level]["hlfg"])
conf.NoHighlightStyle = color.New(colors[level]["nohlbg"], colors[level]["nohlfg"])
conf.HighlightHdrStyle = color.New(colors[level]["hdrbg"], colors[level]["hdrfg"])
}
}
// Return true if current terminal is interactive
func isTerminal(f *os.File) bool {
o, _ := f.Stat()
if (o.Mode() & os.ModeCharDevice) == os.ModeCharDevice {
return true
} else {
return false
}
return (o.Mode() & os.ModeCharDevice) == os.ModeCharDevice
}
// main program version
// generated version string, used by -v contains lib.Version on
//
// main branch, and lib.Version-$branch-$lastcommit-$date on
//
// development branch
func Getversion() string {
// main program version
// generated version string, used by -v contains lib.Version on
// main branch, and lib.Version-$branch-$lastcommit-$date on
// development branch
return fmt.Sprintf("This is tablizer version %s", VERSION)
}
@@ -256,66 +259,94 @@ func (conf *Config) PrepareModeFlags(flag Modeflag) {
case flag.C:
conf.OutputMode = CSV
default:
conf.OutputMode = Ascii
conf.OutputMode = ASCII
}
}
func (c *Config) CheckEnv() {
func (conf *Config) PrepareFilters() error {
conf.Filters = make(map[string]*regexp.Regexp, len(conf.Rawfilters))
for _, filter := range conf.Rawfilters {
parts := strings.Split(filter, "=")
if len(parts) != MAXPARTS {
return errors.New("filter field and value must be separated by =")
}
reg, err := regexp.Compile(parts[1])
if err != nil {
return fmt.Errorf("failed to compile filter regex for field %s: %w",
parts[0], err)
}
conf.Filters[strings.ToLower(parts[0])] = reg
}
return nil
}
func (conf *Config) CheckEnv() {
// check for environment vars, command line flags have precedence,
// NO_COLOR is being checked by the color module itself.
if !c.NoNumbering {
if !conf.NoNumbering {
_, set := os.LookupEnv("T_NO_HEADER_NUMBERING")
if set {
c.NoNumbering = true
conf.NoNumbering = true
}
}
if len(c.Columns) == 0 {
if len(conf.Columns) == 0 {
cols := os.Getenv("T_COLUMNS")
if len(cols) > 1 {
c.Columns = cols
conf.Columns = cols
}
}
}
func (c *Config) ApplyDefaults() {
func (conf *Config) ApplyDefaults() {
// mode specific defaults
if c.OutputMode == Yaml || c.OutputMode == CSV {
c.NoNumbering = true
if conf.OutputMode == Yaml || conf.OutputMode == CSV {
conf.NoNumbering = true
}
ValidHooks = []string{"filter", "process", "transpose", "append"}
}
func (c *Config) PreparePattern(pattern string) error {
func (conf *Config) PreparePattern(pattern string) error {
PatternR, err := regexp.Compile(pattern)
if err != nil {
return errors.Unwrap(fmt.Errorf("Regexp pattern %s is invalid: %w", c.Pattern, err))
return fmt.Errorf("regexp pattern %s is invalid: %w", conf.Pattern, err)
}
c.PatternR = PatternR
c.Pattern = pattern
conf.PatternR = PatternR
conf.Pattern = pattern
return nil
}
func (c *Config) ParseConfigfile() error {
if path, err := os.Stat(c.Configfile); !os.IsNotExist(err) {
if !path.IsDir() {
configstring, err := os.ReadFile(path.Name())
if err != nil {
return err
}
// Parse config file. Ignore if the file doesn't exist but return an
// error if it exists but fails to read or parse
func (conf *Config) ParseConfigfile() error {
path, err := os.Stat(conf.Configfile)
err = hclsimple.Decode(
path.Name(), []byte(configstring),
nil, &c.Configuration,
)
if err != nil {
log.Fatalf("Failed to load configuration: %s", err)
}
}
if os.IsNotExist(err) || path.IsDir() {
// ignore non-existent or dirs
return nil
}
configstring, err := os.ReadFile(path.Name())
if err != nil {
return fmt.Errorf("failed to read config file %s: %w", path.Name(), err)
}
err = hclsimple.Decode(
path.Name(),
configstring,
nil,
&conf.Settings)
if err != nil {
return fmt.Errorf("failed to load configuration file %s: %w",
path.Name(), err)
}
return nil

View File

@@ -34,18 +34,18 @@ func TestPrepareModeFlags(t *testing.T) {
{Modeflag{O: true}, Orgtbl},
{Modeflag{Y: true}, Yaml},
{Modeflag{M: true}, Markdown},
{Modeflag{}, Ascii},
{Modeflag{}, ASCII},
}
// FIXME: use a map for easier printing
for _, tt := range tests {
testname := fmt.Sprintf("PrepareModeFlags-expect-%d", tt.expect)
for _, testdata := range tests {
testname := fmt.Sprintf("PrepareModeFlags-expect-%d", testdata.expect)
t.Run(testname, func(t *testing.T) {
c := Config{}
conf := Config{}
c.PrepareModeFlags(tt.flag)
if c.OutputMode != tt.expect {
t.Errorf("got: %d, expect: %d", c.OutputMode, tt.expect)
conf.PrepareModeFlags(testdata.flag)
if conf.OutputMode != testdata.expect {
t.Errorf("got: %d, expect: %d", conf.OutputMode, testdata.expect)
}
})
}
@@ -63,15 +63,15 @@ func TestPrepareSortFlags(t *testing.T) {
{Sortmode{}, "string"},
}
for _, tt := range tests {
testname := fmt.Sprintf("PrepareSortFlags-expect-%s", tt.expect)
for _, testdata := range tests {
testname := fmt.Sprintf("PrepareSortFlags-expect-%s", testdata.expect)
t.Run(testname, func(t *testing.T) {
c := Config{}
conf := Config{}
c.PrepareSortFlags(tt.flag)
conf.PrepareSortFlags(testdata.flag)
if c.SortMode != tt.expect {
t.Errorf("got: %s, expect: %s", c.SortMode, tt.expect)
if conf.SortMode != testdata.expect {
t.Errorf("got: %s, expect: %s", conf.SortMode, testdata.expect)
}
})
}
@@ -86,15 +86,16 @@ func TestPreparePattern(t *testing.T) {
{"[a-z", true},
}
for _, tt := range tests {
testname := fmt.Sprintf("PreparePattern-pattern-%s-wanterr-%t", tt.pattern, tt.wanterr)
for _, testdata := range tests {
testname := fmt.Sprintf("PreparePattern-pattern-%s-wanterr-%t",
testdata.pattern, testdata.wanterr)
t.Run(testname, func(t *testing.T) {
c := Config{}
conf := Config{}
err := c.PreparePattern(tt.pattern)
err := conf.PreparePattern(testdata.pattern)
if err != nil {
if !tt.wanterr {
if !testdata.wanterr {
t.Errorf("PreparePattern returned error: %s", err)
}
}

View File

@@ -1,5 +1,5 @@
/*
Copyright © 2022 Thomas von Dein
Copyright © 2022-2024 Thomas von Dein
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
@@ -33,11 +33,12 @@ import (
func man() {
man := exec.Command("less", "-")
var b bytes.Buffer
b.Write([]byte(manpage))
var buffer bytes.Buffer
buffer.Write([]byte(manpage))
man.Stdout = os.Stdout
man.Stdin = &b
man.Stdin = &buffer
man.Stderr = os.Stderr
err := man.Run()
@@ -58,7 +59,7 @@ func completion(cmd *cobra.Command, mode string) error {
case "powershell":
return cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout)
default:
return errors.New("Invalid shell parameter! Valid ones: bash|zsh|fish|powershell")
return errors.New("invalid shell parameter! Valid ones: bash|zsh|fish|powershell")
}
}
@@ -79,11 +80,13 @@ func Execute() {
RunE: func(cmd *cobra.Command, args []string) error {
if ShowVersion {
fmt.Println(cfg.Getversion())
return nil
}
if ShowManual {
man()
return nil
}
@@ -100,6 +103,11 @@ func Execute() {
conf.CheckEnv()
conf.PrepareModeFlags(modeflag)
conf.PrepareSortFlags(sortmode)
if err = conf.PrepareFilters(); err != nil {
return err
}
conf.DetermineColormode()
conf.ApplyDefaults()
@@ -115,41 +123,75 @@ func Execute() {
}
// options
rootCmd.PersistentFlags().BoolVarP(&conf.Debug, "debug", "d", false, "Enable debugging")
rootCmd.PersistentFlags().BoolVarP(&conf.NoNumbering, "no-numbering", "n", false, "Disable header numbering")
rootCmd.PersistentFlags().BoolVarP(&conf.NoHeaders, "no-headers", "H", false, "Disable header display")
rootCmd.PersistentFlags().BoolVarP(&conf.NoColor, "no-color", "N", false, "Disable pattern highlighting")
rootCmd.PersistentFlags().BoolVarP(&ShowVersion, "version", "V", false, "Print program version")
rootCmd.PersistentFlags().BoolVarP(&conf.InvertMatch, "invert-match", "v", false, "select non-matching rows")
rootCmd.PersistentFlags().BoolVarP(&ShowManual, "man", "m", false, "Display manual page")
rootCmd.PersistentFlags().BoolVarP(&conf.UseFuzzySearch, "fuzzy", "z", false, "Use fuzzy searching")
rootCmd.PersistentFlags().BoolVarP(&conf.UseHighlight, "highlight-lines", "L", false, "Use alternating background colors")
rootCmd.PersistentFlags().StringVarP(&ShowCompletion, "completion", "", "", "Display completion code")
rootCmd.PersistentFlags().StringVarP(&conf.Separator, "separator", "s", cfg.DefaultSeparator, "Custom field separator")
rootCmd.PersistentFlags().StringVarP(&conf.Columns, "columns", "c", "", "Only show the speficied columns (separated by ,)")
rootCmd.PersistentFlags().BoolVarP(&conf.Debug, "debug", "d", false,
"Enable debugging")
rootCmd.PersistentFlags().BoolVarP(&conf.NoNumbering, "no-numbering", "n", false,
"Disable header numbering")
rootCmd.PersistentFlags().BoolVarP(&conf.NoHeaders, "no-headers", "H", false,
"Disable header display")
rootCmd.PersistentFlags().BoolVarP(&conf.NoColor, "no-color", "N", false,
"Disable pattern highlighting")
rootCmd.PersistentFlags().BoolVarP(&ShowVersion, "version", "V", false,
"Print program version")
rootCmd.PersistentFlags().BoolVarP(&conf.InvertMatch, "invert-match", "v", false,
"select non-matching rows")
rootCmd.PersistentFlags().BoolVarP(&ShowManual, "man", "m", false,
"Display manual page")
rootCmd.PersistentFlags().BoolVarP(&conf.UseFuzzySearch, "fuzzy", "z", false,
"Use fuzzy searching")
rootCmd.PersistentFlags().BoolVarP(&conf.UseHighlight, "highlight-lines", "L", false,
"Use alternating background colors")
rootCmd.PersistentFlags().StringVarP(&ShowCompletion, "completion", "", "",
"Display completion code")
rootCmd.PersistentFlags().StringVarP(&conf.Separator, "separator", "s", cfg.DefaultSeparator,
"Custom field separator")
rootCmd.PersistentFlags().StringVarP(&conf.Columns, "columns", "c", "",
"Only show the speficied columns (separated by ,)")
// sort options
rootCmd.PersistentFlags().IntVarP(&conf.SortByColumn, "sort-by", "k", 0, "Sort by column (default: 1)")
rootCmd.PersistentFlags().IntVarP(&conf.SortByColumn, "sort-by", "k", 0,
"Sort by column (default: 1)")
// sort mode, only 1 allowed
rootCmd.PersistentFlags().BoolVarP(&conf.SortDescending, "sort-desc", "D", false, "Sort in descending order (default: ascending)")
rootCmd.PersistentFlags().BoolVarP(&sortmode.Numeric, "sort-numeric", "i", false, "sort according to string numerical value")
rootCmd.PersistentFlags().BoolVarP(&sortmode.Time, "sort-time", "t", false, "sort according to time string")
rootCmd.PersistentFlags().BoolVarP(&sortmode.Age, "sort-age", "a", false, "sort according to age (duration) string")
rootCmd.MarkFlagsMutuallyExclusive("sort-numeric", "sort-time", "sort-age")
rootCmd.PersistentFlags().BoolVarP(&conf.SortDescending, "sort-desc", "D", false,
"Sort in descending order (default: ascending)")
rootCmd.PersistentFlags().BoolVarP(&sortmode.Numeric, "sort-numeric", "i", false,
"sort according to string numerical value")
rootCmd.PersistentFlags().BoolVarP(&sortmode.Time, "sort-time", "t", false,
"sort according to time string")
rootCmd.PersistentFlags().BoolVarP(&sortmode.Age, "sort-age", "a", false,
"sort according to age (duration) string")
rootCmd.MarkFlagsMutuallyExclusive("sort-numeric", "sort-time",
"sort-age")
// output flags, only 1 allowed
rootCmd.PersistentFlags().BoolVarP(&modeflag.X, "extended", "X", false, "Enable extended output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.M, "markdown", "M", false, "Enable markdown table output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.O, "orgtbl", "O", false, "Enable org-mode table output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.S, "shell", "S", false, "Enable shell mode output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.Y, "yaml", "Y", false, "Enable yaml output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.C, "csv", "C", false, "Enable CSV output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.A, "ascii", "A", false, "Enable ASCII output (default)")
rootCmd.MarkFlagsMutuallyExclusive("extended", "markdown", "orgtbl", "shell", "yaml", "csv")
rootCmd.PersistentFlags().BoolVarP(&modeflag.X, "extended", "X", false,
"Enable extended output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.M, "markdown", "M", false,
"Enable markdown table output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.O, "orgtbl", "O", false,
"Enable org-mode table output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.S, "shell", "S", false,
"Enable shell mode output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.Y, "yaml", "Y", false,
"Enable yaml output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.C, "csv", "C", false,
"Enable CSV output")
rootCmd.PersistentFlags().BoolVarP(&modeflag.A, "ascii", "A", false,
"Enable ASCII output (default)")
rootCmd.MarkFlagsMutuallyExclusive("extended", "markdown", "orgtbl",
"shell", "yaml", "csv")
// lisp options
rootCmd.PersistentFlags().StringVarP(&conf.LispLoadPath, "load-path", "l", cfg.DefaultLoadPath, "Load path for lisp plugins (expects *.zy files)")
rootCmd.PersistentFlags().StringVarP(&conf.LispLoadPath, "load-path", "l", cfg.DefaultLoadPath,
"Load path for lisp plugins (expects *.zy files)")
// config file
rootCmd.PersistentFlags().StringVarP(&conf.Configfile, "config", "f", cfg.DefaultConfigfile,
"config file (default: ~/.config/tablizer/config)")
// filters
rootCmd.PersistentFlags().StringArrayVarP(&conf.Rawfilters, "filter", "F", nil, "Filter by field (field=regexp)")
// config file
rootCmd.PersistentFlags().StringVarP(&conf.Configfile, "config", "f", cfg.DefaultConfigfile, "config file (default: ~/.config/tablizer/config)")

View File

@@ -16,7 +16,8 @@ SYNOPSIS
-H, --no-headers Disable headers display
-s, --separator string Custom field separator
-k, --sort-by int Sort by column (default: 1)
-z, --fuzzy Use fuzzy seach [experimental]
-z, --fuzzy Use fuzzy search [experimental]
-F, --filter field=reg Filter given field with regex, can be used multiple times
Output Flags (mutually exclusive):
-X, --extended Enable extended output
@@ -117,7 +118,7 @@ DESCRIPTION
Finally the -d option enables debugging output which is mostly useful
for the developer.
PATTERNS
PATTERNS AND FILTERING
You can reduce the rows being displayed by using a regular expression
pattern. The regexp is PCRE compatible, refer to the syntax cheat sheet
here: <https://github.com/google/re2/wiki/Syntax>. If you want to read a
@@ -141,9 +142,22 @@ DESCRIPTION
kubectl get pods -A | tablizer "(?i)account"
You can use the experimental fuzzy seach feature by providing the option
-z, in which case the pattern is regarded as a fuzzy search term, not a
regexp.
You can use the experimental fuzzy search feature by providing the
option -z, in which case the pattern is regarded as a fuzzy search term,
not a regexp.
Sometimes you want to filter by one or more columns. You can do that
using the -F option. The option can be specified multiple times and has
the following format:
fieldname=regexp
Fieldnames (== columns headers) are case insensitive.
If you specify more than one filter, both filters have to match (AND
operation).
If the option -v is specified, the filtering is inverted.
COLUMNS
The parameter -c can be used to specify, which columns to display. By
@@ -298,7 +312,7 @@ LICENSE
This software is licensed under the GNU GENERAL PUBLIC LICENSE version
3.
Copyright (c) 2023 by Thomas von Dein
Copyright (c) 2022-2024 by Thomas von Dein
This software uses the following GO modules:
@@ -339,7 +353,8 @@ Operational Flags:
-H, --no-headers Disable headers display
-s, --separator string Custom field separator
-k, --sort-by int Sort by column (default: 1)
-z, --fuzzy Use fuzzy seach [experimental]
-z, --fuzzy Use fuzzy search [experimental]
-F, --filter field=reg Filter given field with regex, can be used multiple times
Output Flags (mutually exclusive):
-X, --extended Enable extended output

View File

@@ -1,5 +1,5 @@
/*
Copyright © 2022 Thomas von Dein
Copyright © 2022-2024 Thomas von Dein
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
@@ -24,3 +24,13 @@ type Tabdata struct {
headers []string // [ "ID", "NAME", ...]
entries [][]string
}
func (data *Tabdata) CloneEmpty() Tabdata {
newdata := Tabdata{
maxwidthHeader: data.maxwidthHeader,
columns: data.columns,
headers: data.headers,
}
return newdata
}

130
lib/filter.go Normal file
View File

@@ -0,0 +1,130 @@
/*
Copyright © 2022-2024 Thomas von Dein
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package lib
import (
"bufio"
"fmt"
"io"
"strings"
"github.com/lithammer/fuzzysearch/fuzzy"
"github.com/tlinden/tablizer/cfg"
)
/*
* [!]Match a line, use fuzzy search for normal pattern strings and
* regexp otherwise.
*/
func matchPattern(conf cfg.Config, line string) bool {
if conf.UseFuzzySearch {
return fuzzy.MatchFold(conf.Pattern, line)
}
return conf.PatternR.MatchString(line)
}
/*
* Filter parsed data by fields. The filter is positive, so if one or
* more filters match on a row, it will be kept, otherwise it will be
* excluded.
*/
func FilterByFields(conf cfg.Config, data Tabdata) (Tabdata, bool, error) {
if len(conf.Filters) == 0 {
// no filters, no checking
return Tabdata{}, false, nil
}
newdata := data.CloneEmpty()
for _, row := range data.entries {
keep := true
for idx, header := range data.headers {
if !Exists(conf.Filters, strings.ToLower(header)) {
// do not filter by unspecified field
continue
}
if !conf.Filters[strings.ToLower(header)].MatchString(row[idx]) {
// there IS a filter, but it doesn't match
keep = false
break
}
}
if keep == !conf.InvertMatch {
// also apply -v
newdata.entries = append(newdata.entries, row)
}
}
return newdata, true, nil
}
/* generic map.Exists(key) */
func Exists[K comparable, V any](m map[K]V, v K) bool {
if _, ok := m[v]; ok {
return true
}
return false
}
func FilterByPattern(conf cfg.Config, input io.Reader) (io.Reader, error) {
if conf.Pattern == "" {
return input, nil
}
scanner := bufio.NewScanner(input)
lines := []string{}
hadFirst := false
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if hadFirst {
// don't match 1st line, it's the header
if conf.Pattern != "" && matchPattern(conf, line) == conf.InvertMatch {
// by default -v is false, so if a line does NOT
// match the pattern, we will ignore it. However,
// if the user specified -v, the matching is inverted,
// so we ignore all lines, which DO match.
continue
}
// apply user defined lisp filters, if any
accept, err := RunFilterHooks(conf, line)
if err != nil {
return input, fmt.Errorf("failed to apply filter hook: %w", err)
}
if !accept {
// IF there are filter hook[s] and IF one of them
// returns false on the current line, reject it
continue
}
}
lines = append(lines, line)
hadFirst = true
}
return strings.NewReader(strings.Join(lines, "\n")), nil
}

162
lib/filter_test.go Normal file
View File

@@ -0,0 +1,162 @@
/*
Copyright © 2024 Thomas von Dein
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package lib
import (
"fmt"
"reflect"
"testing"
"github.com/tlinden/tablizer/cfg"
)
func TestMatchPattern(t *testing.T) {
var input = []struct {
name string
fuzzy bool
pattern string
line string
}{
{
name: "normal",
pattern: "haus",
line: "hausparty",
},
{
name: "fuzzy",
pattern: "hpt",
line: "haus-party-termin",
fuzzy: true,
},
}
for _, inputdata := range input {
testname := fmt.Sprintf("match-pattern-%s", inputdata.name)
t.Run(testname, func(t *testing.T) {
conf := cfg.Config{}
if inputdata.fuzzy {
conf.UseFuzzySearch = true
}
err := conf.PreparePattern(inputdata.pattern)
if err != nil {
t.Errorf("PreparePattern returned error: %s", err)
}
if !matchPattern(conf, inputdata.line) {
t.Errorf("matchPattern() did not match\nExp: true\nGot: false\n")
}
})
}
}
func TestFilterByFields(t *testing.T) {
data := Tabdata{
headers: []string{
"ONE", "TWO", "THREE",
},
entries: [][]string{
{"asd", "igig", "cxxxncnc"},
{"19191", "EDD 1", "x"},
{"8d8", "AN 1", "y"},
},
}
var input = []struct {
name string
filter []string
expect Tabdata
invert bool
}{
{
name: "one-field",
filter: []string{"one=19"},
expect: Tabdata{
headers: []string{
"ONE", "TWO", "THREE",
},
entries: [][]string{
{"19191", "EDD 1", "x"},
},
},
},
{
name: "one-field-inverted",
filter: []string{"one=19"},
invert: true,
expect: Tabdata{
headers: []string{
"ONE", "TWO", "THREE",
},
entries: [][]string{
{"asd", "igig", "cxxxncnc"},
{"8d8", "AN 1", "y"},
},
},
},
{
name: "many-fields",
filter: []string{"one=19", "two=DD"},
expect: Tabdata{
headers: []string{
"ONE", "TWO", "THREE",
},
entries: [][]string{
{"19191", "EDD 1", "x"},
},
},
},
{
name: "many-fields-inverted",
filter: []string{"one=19", "two=DD"},
invert: true,
expect: Tabdata{
headers: []string{
"ONE", "TWO", "THREE",
},
entries: [][]string{
{"asd", "igig", "cxxxncnc"},
{"8d8", "AN 1", "y"},
},
},
},
}
for _, inputdata := range input {
testname := fmt.Sprintf("filter-by-fields-%s", inputdata.name)
t.Run(testname, func(t *testing.T) {
conf := cfg.Config{Rawfilters: inputdata.filter, InvertMatch: inputdata.invert}
err := conf.PrepareFilters()
if err != nil {
t.Errorf("PrepareFilters returned error: %s", err)
}
data, _, _ := FilterByFields(conf, data)
if !reflect.DeepEqual(data, inputdata.expect) {
t.Errorf("Filtered data does not match expected data:\ngot: %+v\nexp: %+v", data, inputdata.expect)
}
})
}
}

View File

@@ -36,79 +36,87 @@ func contains(s []int, e int) bool {
return true
}
}
return false
}
// parse columns list given with -c, modifies config.UseColumns based
// on eventually given regex
func PrepareColumns(c *cfg.Config, data *Tabdata) error {
if len(c.Columns) > 0 {
for _, use := range strings.Split(c.Columns, ",") {
if len(use) == 0 {
msg := fmt.Sprintf("Could not parse columns list %s: empty column", c.Columns)
func PrepareColumns(conf *cfg.Config, data *Tabdata) error {
if conf.Columns == "" {
return nil
}
for _, use := range strings.Split(conf.Columns, ",") {
if len(use) == 0 {
return fmt.Errorf("could not parse columns list %s: empty column", conf.Columns)
}
usenum, err := strconv.Atoi(use)
if err != nil {
// might be a regexp
colPattern, err := regexp.Compile(use)
if err != nil {
msg := fmt.Sprintf("Could not parse columns list %s: %v", conf.Columns, err)
return errors.New(msg)
}
usenum, err := strconv.Atoi(use)
if err != nil {
// might be a regexp
colPattern, err := regexp.Compile(use)
if err != nil {
msg := fmt.Sprintf("Could not parse columns list %s: %v", c.Columns, err)
return errors.New(msg)
// find matching header fields
for i, head := range data.headers {
if colPattern.MatchString(head) {
conf.UseColumns = append(conf.UseColumns, i+1)
}
// find matching header fields
for i, head := range data.headers {
if colPattern.MatchString(head) {
c.UseColumns = append(c.UseColumns, i+1)
}
}
} else {
// we digress from go best practises here, because if
// a colum spec is not a number, we process them above
// inside the err handler for atoi(). so only add the
// number, if it's really just a number.
c.UseColumns = append(c.UseColumns, usenum)
}
} else {
// we digress from go best practises here, because if
// a colum spec is not a number, we process them above
// inside the err handler for atoi(). so only add the
// number, if it's really just a number.
conf.UseColumns = append(conf.UseColumns, usenum)
}
// deduplicate: put all values into a map (value gets map key)
// thereby removing duplicates, extract keys into new slice
// and sort it
imap := make(map[int]int, len(c.UseColumns))
for _, i := range c.UseColumns {
imap[i] = 0
}
c.UseColumns = nil
for k := range imap {
c.UseColumns = append(c.UseColumns, k)
}
sort.Ints(c.UseColumns)
}
// deduplicate: put all values into a map (value gets map key)
// thereby removing duplicates, extract keys into new slice
// and sort it
imap := make(map[int]int, len(conf.UseColumns))
for _, i := range conf.UseColumns {
imap[i] = 0
}
conf.UseColumns = nil
for k := range imap {
conf.UseColumns = append(conf.UseColumns, k)
}
sort.Ints(conf.UseColumns)
return nil
}
// prepare headers: add numbers to headers
func numberizeAndReduceHeaders(c cfg.Config, data *Tabdata) {
func numberizeAndReduceHeaders(conf cfg.Config, data *Tabdata) {
numberedHeaders := []string{}
maxwidth := 0 // start from scratch, so we only look at displayed column widths
for i, head := range data.headers {
headlen := 0
if len(c.Columns) > 0 {
for idx, head := range data.headers {
var headlen int
if len(conf.Columns) > 0 {
// -c specified
if !contains(c.UseColumns, i+1) {
if !contains(conf.UseColumns, idx+1) {
// ignore this one
continue
}
}
if c.NoNumbering {
if conf.NoNumbering {
numberedHeaders = append(numberedHeaders, head)
headlen = len(head)
} else {
numhead := fmt.Sprintf("%s(%d)", head, i+1)
numhead := fmt.Sprintf("%s(%d)", head, idx+1)
headlen = len(numhead)
numberedHeaders = append(numberedHeaders, numhead)
}
@@ -117,44 +125,54 @@ func numberizeAndReduceHeaders(c cfg.Config, data *Tabdata) {
maxwidth = headlen
}
}
data.headers = numberedHeaders
if data.maxwidthHeader != maxwidth && maxwidth > 0 {
data.maxwidthHeader = maxwidth
}
}
// exclude columns, if any
func reduceColumns(c cfg.Config, data *Tabdata) {
if len(c.Columns) > 0 {
func reduceColumns(conf cfg.Config, data *Tabdata) {
if len(conf.Columns) > 0 {
reducedEntries := [][]string{}
var reducedEntry []string
for _, entry := range data.entries {
reducedEntry = nil
for i, value := range entry {
if !contains(c.UseColumns, i+1) {
if !contains(conf.UseColumns, i+1) {
continue
}
reducedEntry = append(reducedEntry, value)
}
reducedEntries = append(reducedEntries, reducedEntry)
}
data.entries = reducedEntries
}
}
// FIXME: remove this when we only use Tablewriter and strip in ParseFile()!
func trimRow(row []string) []string {
// FIXME: remove this when we only use Tablewriter and strip in ParseFile()!
var fixedrow []string
for _, cell := range row {
fixedrow = append(fixedrow, strings.TrimSpace(cell))
var fixedrow = make([]string, len(row))
for idx, cell := range row {
fixedrow[idx] = strings.TrimSpace(cell)
}
return fixedrow
}
func colorizeData(c cfg.Config, output string) string {
if c.UseHighlight && color.IsConsole(os.Stdout) {
// FIXME: refactor this beast!
func colorizeData(conf cfg.Config, output string) string {
switch {
case conf.UseHighlight && color.IsConsole(os.Stdout):
highlight := true
colorized := ""
first := true
@@ -167,30 +185,34 @@ func colorizeData(c cfg.Config, output string) string {
// in pprint mode. This doesn't matter as long as
// we don't use colorization. But with colors the
// missing spaces can be seen.
if c.OutputMode == cfg.Ascii {
line = line + " "
if conf.OutputMode == cfg.ASCII {
line += " "
}
line = c.HighlightHdrStyle.Sprint(line)
line = conf.HighlightHdrStyle.Sprint(line)
first = false
} else {
line = c.HighlightStyle.Sprint(line)
line = conf.HighlightStyle.Sprint(line)
}
} else {
line = c.NoHighlightStyle.Sprint(line)
line = conf.NoHighlightStyle.Sprint(line)
}
highlight = !highlight
colorized += line + "\n"
}
return colorized
} else if len(c.Pattern) > 0 && !c.NoColor && color.IsConsole(os.Stdout) {
r := regexp.MustCompile("(" + c.Pattern + ")")
case len(conf.Pattern) > 0 && !conf.NoColor && color.IsConsole(os.Stdout):
r := regexp.MustCompile("(" + conf.Pattern + ")")
return r.ReplaceAllStringFunc(output, func(in string) string {
return c.ColorStyle.Sprint(in)
return conf.ColorStyle.Sprint(in)
})
} else {
default:
return output
}
}

View File

@@ -19,9 +19,10 @@ package lib
import (
"fmt"
"github.com/tlinden/tablizer/cfg"
"reflect"
"testing"
"github.com/tlinden/tablizer/cfg"
)
func TestContains(t *testing.T) {
@@ -71,18 +72,18 @@ func TestPrepareColumns(t *testing.T) {
{"[a-z,4,5", []int{4, 5}, true}, // invalid regexp
}
for _, tt := range tests {
testname := fmt.Sprintf("PrepareColumns-%s-%t", tt.input, tt.wanterror)
for _, testdata := range tests {
testname := fmt.Sprintf("PrepareColumns-%s-%t", testdata.input, testdata.wanterror)
t.Run(testname, func(t *testing.T) {
c := cfg.Config{Columns: tt.input}
err := PrepareColumns(&c, &data)
conf := cfg.Config{Columns: testdata.input}
err := PrepareColumns(&conf, &data)
if err != nil {
if !tt.wanterror {
if !testdata.wanterror {
t.Errorf("got error: %v", err)
}
} else {
if !reflect.DeepEqual(c.UseColumns, tt.exp) {
t.Errorf("got: %v, expected: %v", c.UseColumns, tt.exp)
if !reflect.DeepEqual(conf.UseColumns, testdata.exp) {
t.Errorf("got: %v, expected: %v", conf.UseColumns, testdata.exp)
}
}
})
@@ -114,14 +115,16 @@ func TestReduceColumns(t *testing.T) {
input := [][]string{{"a", "b", "c"}}
for _, tt := range tests {
testname := fmt.Sprintf("reduce-columns-by-%+v", tt.columns)
for _, testdata := range tests {
testname := fmt.Sprintf("reduce-columns-by-%+v", testdata.columns)
t.Run(testname, func(t *testing.T) {
c := cfg.Config{Columns: "x", UseColumns: tt.columns}
c := cfg.Config{Columns: "x", UseColumns: testdata.columns}
data := Tabdata{entries: input}
reduceColumns(c, &data)
if !reflect.DeepEqual(data.entries, tt.expect) {
t.Errorf("reduceColumns returned invalid data:\ngot: %+v\nexp: %+v", data.entries, tt.expect)
if !reflect.DeepEqual(data.entries, testdata.expect) {
t.Errorf("reduceColumns returned invalid data:\ngot: %+v\nexp: %+v",
data.entries, testdata.expect)
}
})
}
@@ -142,15 +145,17 @@ func TestNumberizeHeaders(t *testing.T) {
{[]string{"ONE", "TWO"}, []int{1, 2}, true},
}
for _, tt := range tests {
testname := fmt.Sprintf("numberize-headers-columns-%+v-nonum-%t", tt.columns, tt.nonum)
for _, testdata := range tests {
testname := fmt.Sprintf("numberize-headers-columns-%+v-nonum-%t",
testdata.columns, testdata.nonum)
t.Run(testname, func(t *testing.T) {
c := cfg.Config{Columns: "x", UseColumns: tt.columns, NoNumbering: tt.nonum}
conf := cfg.Config{Columns: "x", UseColumns: testdata.columns, NoNumbering: testdata.nonum}
usedata := data
numberizeAndReduceHeaders(c, &usedata)
if !reflect.DeepEqual(usedata.headers, tt.expect) {
numberizeAndReduceHeaders(conf, &usedata)
if !reflect.DeepEqual(usedata.headers, testdata.expect) {
t.Errorf("numberizeAndReduceHeaders returned invalid data:\ngot: %+v\nexp: %+v",
usedata.headers, tt.expect)
usedata.headers, testdata.expect)
}
})
}

View File

@@ -19,91 +19,96 @@ package lib
import (
"errors"
"github.com/tlinden/tablizer/cfg"
"fmt"
"io"
"os"
"github.com/tlinden/tablizer/cfg"
)
func ProcessFiles(c *cfg.Config, args []string) error {
fds, pattern, err := determineIO(c, args)
const RWRR = 0755
func ProcessFiles(conf *cfg.Config, args []string) error {
fds, pattern, err := determineIO(conf, args)
if err != nil {
return err
}
if err := c.PreparePattern(pattern); err != nil {
if err := conf.PreparePattern(pattern); err != nil {
return err
}
for _, fd := range fds {
data, err := Parse(*c, fd)
data, err := Parse(*conf, fd)
if err != nil {
return err
}
err = PrepareColumns(c, &data)
err = PrepareColumns(conf, &data)
if err != nil {
return err
}
printData(os.Stdout, *c, &data)
printData(os.Stdout, *conf, &data)
}
return nil
}
func determineIO(c *cfg.Config, args []string) ([]io.Reader, string, error) {
func determineIO(conf *cfg.Config, args []string) ([]io.Reader, string, error) {
var filehandles []io.Reader
var pattern string
var fds []io.Reader
var haveio bool
stat, _ := os.Stdin.Stat()
if (stat.Mode() & os.ModeCharDevice) == 0 {
// we're reading from STDIN, which takes precedence over file args
fds = append(fds, os.Stdin)
filehandles = append(filehandles, os.Stdin)
if len(args) > 0 {
// ignore any args > 1
pattern = args[0]
c.Pattern = args[0] // used for colorization by printData()
conf.Pattern = args[0] // used for colorization by printData()
}
haveio = true
} else {
if len(args) > 0 {
// threre were args left, take a look
if args[0] == "-" {
// in traditional unix programs a dash denotes STDIN (forced)
fds = append(fds, os.Stdin)
haveio = true
} else {
if _, err := os.Stat(args[0]); err != nil {
// first one is not a file, consider it as regexp and
// shift arg list
pattern = args[0]
c.Pattern = args[0] // used for colorization by printData()
args = args[1:]
}
} else if len(args) > 0 {
// threre were args left, take a look
if args[0] == "-" {
// in traditional unix programs a dash denotes STDIN (forced)
filehandles = append(filehandles, os.Stdin)
haveio = true
} else {
if _, err := os.Stat(args[0]); err != nil {
// first one is not a file, consider it as regexp and
// shift arg list
pattern = args[0]
conf.Pattern = args[0] // used for colorization by printData()
args = args[1:]
}
if len(args) > 0 {
// consider any other args as files
for _, file := range args {
if len(args) > 0 {
// consider any other args as files
for _, file := range args {
filehandle, err := os.OpenFile(file, os.O_RDONLY, RWRR)
fd, err := os.OpenFile(file, os.O_RDONLY, 0755)
if err != nil {
return nil, "", err
}
fds = append(fds, fd)
haveio = true
if err != nil {
return nil, "", fmt.Errorf("failed to read input file %s: %w", file, err)
}
filehandles = append(filehandles, filehandle)
haveio = true
}
}
}
}
if !haveio {
return nil, "", errors.New("No file specified and nothing to read on stdin!")
return nil, "", errors.New("no file specified and nothing to read on stdin")
}
return fds, pattern, nil
return filehandles, pattern, nil
}

View File

@@ -45,26 +45,29 @@ func AddHook(env *zygo.Zlisp, name string, args []zygo.Sexp) (zygo.Sexp, error)
return zygo.SexpNull, errors.New("argument of %add-hook should be: %hook-name %your-function")
}
switch t := args[0].(type) {
switch sexptype := args[0].(type) {
case *zygo.SexpSymbol:
if !HookExists(t.Name()) {
return zygo.SexpNull, errors.New("Unknown hook " + t.Name())
if !HookExists(sexptype.Name()) {
return zygo.SexpNull, errors.New("Unknown hook " + sexptype.Name())
}
hookname = t.Name()
hookname = sexptype.Name()
default:
return zygo.SexpNull, errors.New("hook name must be a symbol!")
return zygo.SexpNull, errors.New("hook name must be a symbol ")
}
switch t := args[1].(type) {
switch sexptype := args[1].(type) {
case *zygo.SexpSymbol:
_, exists := Hooks[hookname]
if !exists {
Hooks[hookname] = []*zygo.SexpSymbol{t}
Hooks[hookname] = []*zygo.SexpSymbol{sexptype}
} else {
Hooks[hookname] = append(Hooks[hookname], t)
Hooks[hookname] = append(Hooks[hookname], sexptype)
}
default:
return zygo.SexpNull, errors.New("hook function must be a symbol!")
return zygo.SexpNull, errors.New("hook function must be a symbol ")
}
return zygo.SexpNull, nil
@@ -86,11 +89,11 @@ func HookExists(key string) bool {
/*
* Basic sanity checks and load lisp file
*/
func LoadFile(env *zygo.Zlisp, path string) error {
func LoadAndEvalFile(env *zygo.Zlisp, path string) error {
if strings.HasSuffix(path, `.zy`) {
code, err := os.ReadFile(path)
if err != nil {
return err
return fmt.Errorf("failed to read lisp file %s: %w", path, err)
}
// FIXME: check what res (_ here) could be and mean
@@ -106,32 +109,40 @@ func LoadFile(env *zygo.Zlisp, path string) error {
/*
* Setup lisp interpreter environment
*/
func SetupLisp(c *cfg.Config) error {
func SetupLisp(conf *cfg.Config) error {
// iterate over load-path and evaluate all *.zy files there, if any
// we ignore if load-path does not exist, which is the default anyway
path, err := os.Stat(conf.LispLoadPath)
if os.IsNotExist(err) {
return nil
}
// init global hooks
Hooks = make(map[string][]*zygo.SexpSymbol)
// init sandbox
env := zygo.NewZlispSandbox()
env.AddFunction("addhook", AddHook)
// iterate over load-path and evaluate all *.zy files there, if any
// we ignore if load-path does not exist, which is the default anyway
if path, err := os.Stat(c.LispLoadPath); !os.IsNotExist(err) {
if !path.IsDir() {
err := LoadFile(env, c.LispLoadPath)
if err != nil {
return err
}
} else {
dir, err := os.ReadDir(c.LispLoadPath)
if err != nil {
return err
}
if !path.IsDir() {
// load single lisp file
err = LoadAndEvalFile(env, conf.LispLoadPath)
if err != nil {
return err
}
} else {
// load all lisp file in load dir
dir, err := os.ReadDir(conf.LispLoadPath)
if err != nil {
return fmt.Errorf("failed to read lisp dir %s: %w",
conf.LispLoadPath, err)
}
for _, entry := range dir {
if !entry.IsDir() {
err := LoadFile(env, c.LispLoadPath+"/"+entry.Name())
if err != nil {
return err
}
for _, entry := range dir {
if !entry.IsDir() {
err := LoadAndEvalFile(env, conf.LispLoadPath+"/"+entry.Name())
if err != nil {
return err
}
}
}
@@ -139,7 +150,8 @@ func SetupLisp(c *cfg.Config) error {
RegisterLib(env)
c.Lisp = env
conf.Lisp = env
return nil
}
@@ -155,20 +167,22 @@ returning false wins, that is if each function returns true the line
will be kept, if at least one of them returns false, it will be
skipped.
*/
func RunFilterHooks(c cfg.Config, line string) (bool, error) {
func RunFilterHooks(conf cfg.Config, line string) (bool, error) {
for _, hook := range Hooks["filter"] {
var result bool
c.Lisp.Clear()
res, err := c.Lisp.EvalString(fmt.Sprintf("(%s `%s`)", hook.Name(), line))
conf.Lisp.Clear()
res, err := conf.Lisp.EvalString(fmt.Sprintf("(%s `%s`)", hook.Name(), line))
if err != nil {
return false, err
return false, fmt.Errorf("failed to evaluate hook loader: %w", err)
}
switch t := res.(type) {
switch sexptype := res.(type) {
case *zygo.SexpBool:
result = t.Val
result = sexptype.Val
default:
return false, errors.New("filter hook shall return BOOL!")
return false, fmt.Errorf("filter hook shall return bool")
}
if !result {
@@ -197,8 +211,9 @@ The somewhat complicated code is being caused by the fact, that we
need to convert our internal structure to a lisp variable and vice
versa afterwards.
*/
func RunProcessHooks(c cfg.Config, data Tabdata) (Tabdata, bool, error) {
func RunProcessHooks(conf cfg.Config, data Tabdata) (Tabdata, bool, error) {
var userdata Tabdata
lisplist := []zygo.Sexp{}
if len(Hooks["process"]) == 0 {
@@ -216,7 +231,7 @@ func RunProcessHooks(c cfg.Config, data Tabdata) (Tabdata, bool, error) {
for idx, cell := range row {
err := entry.HashSet(&zygo.SexpStr{S: data.headers[idx]}, &zygo.SexpStr{S: cell})
if err != nil {
return userdata, false, err
return userdata, false, fmt.Errorf("failed to convert to lisp data: %w", err)
}
}
@@ -224,36 +239,38 @@ func RunProcessHooks(c cfg.Config, data Tabdata) (Tabdata, bool, error) {
}
// we need to add it to the env so that the function can use the struct directly
c.Lisp.AddGlobal("data", &zygo.SexpArray{Val: lisplist, Env: c.Lisp})
conf.Lisp.AddGlobal("data", &zygo.SexpArray{Val: lisplist, Env: conf.Lisp})
// execute the actual hook
hook := Hooks["process"][0]
var result bool
c.Lisp.Clear()
res, err := c.Lisp.EvalString(fmt.Sprintf("(%s data)", hook.Name()))
conf.Lisp.Clear()
var result bool
res, err := conf.Lisp.EvalString(fmt.Sprintf("(%s data)", hook.Name()))
if err != nil {
return userdata, false, err
return userdata, false, fmt.Errorf("failed to eval lisp loader: %w", err)
}
// we expect (bool, array(hash)) as return from the function
switch t := res.(type) {
switch sexptype := res.(type) {
case *zygo.SexpPair:
switch th := t.Head.(type) {
switch th := sexptype.Head.(type) {
case *zygo.SexpBool:
result = th.Val
default:
return userdata, false, errors.New("Expect (bool, array(hash)) as return value!")
return userdata, false, errors.New("xpect (bool, array(hash)) as return value")
}
switch tt := t.Tail.(type) {
switch sexptailtype := sexptype.Tail.(type) {
case *zygo.SexpArray:
lisplist = tt.Val
lisplist = sexptailtype.Val
default:
return userdata, false, errors.New("Expect (bool, array(hash)) as return value!")
return userdata, false, errors.New("expect (bool, array(hash)) as return value ")
}
default:
return userdata, false, errors.New("filter hook shall return array of hashes!")
return userdata, false, errors.New("filter hook shall return array of hashes ")
}
if !result {
@@ -268,20 +285,23 @@ func RunProcessHooks(c cfg.Config, data Tabdata) (Tabdata, bool, error) {
switch hash := item.(type) {
case *zygo.SexpHash:
for _, header := range data.headers {
entry, err := hash.HashGetDefault(c.Lisp, &zygo.SexpStr{S: header}, &zygo.SexpStr{S: ""})
entry, err := hash.HashGetDefault(
conf.Lisp,
&zygo.SexpStr{S: header},
&zygo.SexpStr{S: ""})
if err != nil {
return userdata, false, err
return userdata, false, fmt.Errorf("failed to get lisp hash entry: %w", err)
}
switch t := entry.(type) {
switch sexptype := entry.(type) {
case *zygo.SexpStr:
row = append(row, t.S)
row = append(row, sexptype.S)
default:
return userdata, false, errors.New("Hash values should be string!")
return userdata, false, errors.New("hsh values should be string ")
}
}
default:
return userdata, false, errors.New("Returned array should contain hashes!")
return userdata, false, errors.New("rturned array should contain hashes ")
}
userdata.entries = append(userdata.entries, row)

View File

@@ -19,6 +19,7 @@ package lib
import (
"errors"
"fmt"
"regexp"
"strconv"
@@ -31,29 +32,29 @@ func Splice2SexpList(list []string) zygo.Sexp {
for _, item := range list {
slist = append(slist, &zygo.SexpStr{S: item})
}
return zygo.MakeList(slist)
}
func StringReSplit(env *zygo.Zlisp, name string, args []zygo.Sexp) (zygo.Sexp, error) {
if len(args) < 2 {
return zygo.SexpNull, errors.New("expecting 2 arguments!")
return zygo.SexpNull, errors.New("expecting 2 arguments")
}
var separator string
var input string
var separator, input string
switch t := args[0].(type) {
case *zygo.SexpStr:
input = t.S
default:
return zygo.SexpNull, errors.New("second argument must be a string!")
return zygo.SexpNull, errors.New("second argument must be a string")
}
switch t := args[1].(type) {
case *zygo.SexpStr:
separator = t.S
default:
return zygo.SexpNull, errors.New("first argument must be a string!")
return zygo.SexpNull, errors.New("first argument must be a string")
}
sep := regexp.MustCompile(separator)
@@ -67,12 +68,15 @@ func String2Int(env *zygo.Zlisp, name string, args []zygo.Sexp) (zygo.Sexp, erro
switch t := args[0].(type) {
case *zygo.SexpStr:
num, err := strconv.Atoi(t.S)
if err != nil {
return zygo.SexpNull, err
return zygo.SexpNull, fmt.Errorf("failed to convert string to number: %w", err)
}
number = num
default:
return zygo.SexpNull, errors.New("argument must be a string!")
return zygo.SexpNull, errors.New("argument must be a string")
}
return &zygo.SexpInt{Val: int64(number)}, nil

View File

@@ -1,5 +1,5 @@
/*
Copyright © 2022 Thomas von Dein
Copyright © 2022-2024 Thomas von Dein
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
@@ -20,91 +20,44 @@ package lib
import (
"bufio"
"encoding/csv"
"errors"
"fmt"
"io"
"regexp"
"strings"
"github.com/alecthomas/repr"
"github.com/lithammer/fuzzysearch/fuzzy"
"github.com/tlinden/tablizer/cfg"
)
/*
* [!]Match a line, use fuzzy search for normal pattern strings and
* regexp otherwise.
*/
func matchPattern(c cfg.Config, line string) bool {
if len(c.Pattern) > 0 {
if c.UseFuzzySearch {
return fuzzy.MatchFold(c.Pattern, line)
} else {
return c.PatternR.MatchString(line)
}
}
return true
}
/*
Parser switch
*/
func Parse(c cfg.Config, input io.Reader) (Tabdata, error) {
if len(c.Separator) == 1 {
return parseCSV(c, input)
func Parse(conf cfg.Config, input io.Reader) (Tabdata, error) {
if len(conf.Separator) == 1 {
return parseCSV(conf, input)
}
return parseTabular(c, input)
return parseTabular(conf, input)
}
/*
Parse CSV input.
*/
func parseCSV(c cfg.Config, input io.Reader) (Tabdata, error) {
var content io.Reader = input
func parseCSV(conf cfg.Config, input io.Reader) (Tabdata, error) {
data := Tabdata{}
if len(c.Pattern) > 0 {
scanner := bufio.NewScanner(input)
lines := []string{}
hadFirst := false
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if hadFirst {
// don't match 1st line, it's the header
if matchPattern(c, line) == c.InvertMatch {
// by default -v is false, so if a line does NOT
// match the pattern, we will ignore it. However,
// if the user specified -v, the matching is inverted,
// so we ignore all lines, which DO match.
continue
}
// apply user defined lisp filters, if any
accept, err := RunFilterHooks(c, line)
if err != nil {
return data, errors.Unwrap(fmt.Errorf("Failed to apply filter hook: %w", err))
}
if !accept {
// IF there are filter hook[s] and IF one of them
// returns false on the current line, reject it
continue
}
}
lines = append(lines, line)
hadFirst = true
}
content = strings.NewReader(strings.Join(lines, "\n"))
// apply pattern, if any
content, err := FilterByPattern(conf, input)
if err != nil {
return data, err
}
csvreader := csv.NewReader(content)
csvreader.Comma = rune(c.Separator[0])
csvreader.Comma = rune(conf.Separator[0])
records, err := csvreader.ReadAll()
if err != nil {
return data, errors.Unwrap(fmt.Errorf("Could not parse CSV input: %w", err))
return data, fmt.Errorf("could not parse CSV input: %w", err)
}
if len(records) >= 1 {
@@ -125,10 +78,11 @@ func parseCSV(c cfg.Config, input io.Reader) (Tabdata, error) {
}
// apply user defined lisp process hooks, if any
userdata, changed, err := RunProcessHooks(c, data)
userdata, changed, err := RunProcessHooks(conf, data)
if err != nil {
return data, errors.Unwrap(fmt.Errorf("Failed to apply filter hook: %w", err))
return data, fmt.Errorf("failed to apply filter hook: %w", err)
}
if changed {
data = userdata
}
@@ -139,13 +93,13 @@ func parseCSV(c cfg.Config, input io.Reader) (Tabdata, error) {
/*
Parse tabular input.
*/
func parseTabular(c cfg.Config, input io.Reader) (Tabdata, error) {
func parseTabular(conf cfg.Config, input io.Reader) (Tabdata, error) {
data := Tabdata{}
var scanner *bufio.Scanner
hadFirst := false
separate := regexp.MustCompile(c.Separator)
separate := regexp.MustCompile(conf.Separator)
scanner = bufio.NewScanner(input)
@@ -162,10 +116,6 @@ func parseTabular(c cfg.Config, input io.Reader) (Tabdata, error) {
// process all header fields
for _, part := range parts {
// if Debug {
// fmt.Printf("Part: <%s>\n", string(line[beg:part[0]]))
//}
// register widest header field
headerlen := len(part)
if headerlen > data.maxwidthHeader {
@@ -180,7 +130,7 @@ func parseTabular(c cfg.Config, input io.Reader) (Tabdata, error) {
}
} else {
// data processing
if matchPattern(c, line) == c.InvertMatch {
if conf.Pattern != "" && matchPattern(conf, line) == conf.InvertMatch {
// by default -v is false, so if a line does NOT
// match the pattern, we will ignore it. However,
// if the user specified -v, the matching is inverted,
@@ -189,9 +139,9 @@ func parseTabular(c cfg.Config, input io.Reader) (Tabdata, error) {
}
// apply user defined lisp filters, if any
accept, err := RunFilterHooks(c, line)
accept, err := RunFilterHooks(conf, line)
if err != nil {
return data, errors.Unwrap(fmt.Errorf("Failed to apply filter hook: %w", err))
return data, fmt.Errorf("failed to apply filter hook: %w", err)
}
if !accept {
@@ -221,19 +171,30 @@ func parseTabular(c cfg.Config, input io.Reader) (Tabdata, error) {
}
if scanner.Err() != nil {
return data, errors.Unwrap(fmt.Errorf("Failed to read from io.Reader: %w", scanner.Err()))
return data, fmt.Errorf("failed to read from io.Reader: %w", scanner.Err())
}
// filter by field filters, if any
filtereddata, changed, err := FilterByFields(conf, data)
if err != nil {
return data, fmt.Errorf("failed to filter fields: %w", err)
}
if changed {
data = filtereddata
}
// apply user defined lisp process hooks, if any
userdata, changed, err := RunProcessHooks(c, data)
userdata, changed, err := RunProcessHooks(conf, data)
if err != nil {
return data, errors.Unwrap(fmt.Errorf("Failed to apply filter hook: %w", err))
return data, fmt.Errorf("failed to apply filter hook: %w", err)
}
if changed {
data = userdata
}
if c.Debug {
if conf.Debug {
repr.Print(data)
}

View File

@@ -62,12 +62,12 @@ func TestParser(t *testing.T) {
},
}
for _, in := range input {
testname := fmt.Sprintf("parse-%s", in.name)
for _, testdata := range input {
testname := fmt.Sprintf("parse-%s", testdata.name)
t.Run(testname, func(t *testing.T) {
readFd := strings.NewReader(strings.TrimSpace(in.text))
c := cfg.Config{Separator: in.separator}
gotdata, err := Parse(c, readFd)
readFd := strings.NewReader(strings.TrimSpace(testdata.text))
conf := cfg.Config{Separator: testdata.separator}
gotdata, err := Parse(conf, readFd)
if err != nil {
t.Errorf("Parser returned error: %s\nData processed so far: %+v", err, gotdata)
@@ -104,28 +104,28 @@ func TestParserPatternmatching(t *testing.T) {
},
}
for _, in := range input {
for _, tt := range tests {
for _, inputdata := range input {
for _, testdata := range tests {
testname := fmt.Sprintf("parse-%s-with-pattern-%s-inverted-%t",
in.name, tt.pattern, tt.invert)
inputdata.name, testdata.pattern, testdata.invert)
t.Run(testname, func(t *testing.T) {
c := cfg.Config{InvertMatch: tt.invert, Pattern: tt.pattern,
Separator: in.separator}
conf := cfg.Config{InvertMatch: testdata.invert, Pattern: testdata.pattern,
Separator: inputdata.separator}
_ = c.PreparePattern(tt.pattern)
_ = conf.PreparePattern(testdata.pattern)
readFd := strings.NewReader(strings.TrimSpace(in.text))
gotdata, err := Parse(c, readFd)
readFd := strings.NewReader(strings.TrimSpace(inputdata.text))
gotdata, err := Parse(conf, readFd)
if err != nil {
if !tt.want {
if !testdata.want {
t.Errorf("Parser returned error: %s\nData processed so far: %+v",
err, gotdata)
}
} else {
if !reflect.DeepEqual(tt.entries, gotdata.entries) {
if !reflect.DeepEqual(testdata.entries, gotdata.entries) {
t.Errorf("Parser returned invalid data (pattern: %s, invert: %t)\nExp: %+v\nGot: %+v\n",
tt.pattern, tt.invert, tt.entries, gotdata.entries)
testdata.pattern, testdata.invert, testdata.entries, gotdata.entries)
}
}
})
@@ -152,8 +152,8 @@ asd igig
19191 EDD 1 X`
readFd := strings.NewReader(strings.TrimSpace(table))
c := cfg.Config{Separator: cfg.DefaultSeparator}
gotdata, err := Parse(c, readFd)
conf := cfg.Config{Separator: cfg.DefaultSeparator}
gotdata, err := Parse(conf, readFd)
if err != nil {
t.Errorf("Parser returned error: %s\nData processed so far: %+v", err, gotdata)
@@ -161,6 +161,6 @@ asd igig
if !reflect.DeepEqual(data, gotdata) {
t.Errorf("Parser returned invalid data, Regex: %s\nExp: %+v\nGot: %+v\n",
c.Separator, data, gotdata)
conf.Separator, data, gotdata)
}
}

View File

@@ -32,51 +32,48 @@ import (
"gopkg.in/yaml.v3"
)
func printData(w io.Writer, c cfg.Config, data *Tabdata) {
// some output preparations:
func printData(writer io.Writer, conf cfg.Config, data *Tabdata) {
// add numbers to headers and remove this we're not interested in
numberizeAndReduceHeaders(c, data)
numberizeAndReduceHeaders(conf, data)
// remove unwanted columns, if any
reduceColumns(c, data)
reduceColumns(conf, data)
// sort the data
sortTable(c, data)
sortTable(conf, data)
switch c.OutputMode {
switch conf.OutputMode {
case cfg.Extended:
printExtendedData(w, c, data)
case cfg.Ascii:
printAsciiData(w, c, data)
printExtendedData(writer, conf, data)
case cfg.ASCII:
printASCIIData(writer, conf, data)
case cfg.Orgtbl:
printOrgmodeData(w, c, data)
printOrgmodeData(writer, conf, data)
case cfg.Markdown:
printMarkdownData(w, c, data)
printMarkdownData(writer, conf, data)
case cfg.Shell:
printShellData(w, c, data)
printShellData(writer, data)
case cfg.Yaml:
printYamlData(w, c, data)
printYamlData(writer, data)
case cfg.CSV:
printCSVData(w, c, data)
printCSVData(writer, data)
default:
printAsciiData(w, c, data)
printASCIIData(writer, conf, data)
}
}
func output(w io.Writer, str string) {
fmt.Fprint(w, str)
func output(writer io.Writer, str string) {
fmt.Fprint(writer, str)
}
/*
Emacs org-mode compatible table (also orgtbl-mode)
*/
func printOrgmodeData(w io.Writer, c cfg.Config, data *Tabdata) {
func printOrgmodeData(writer io.Writer, conf cfg.Config, data *Tabdata) {
tableString := &strings.Builder{}
table := tablewriter.NewWriter(tableString)
if !c.NoHeaders {
if !conf.NoHeaders {
table.SetHeader(data.headers)
}
@@ -100,8 +97,8 @@ func printOrgmodeData(w io.Writer, c cfg.Config, data *Tabdata) {
leftR := regexp.MustCompile(`(?m)^\\+`)
rightR := regexp.MustCompile(`\\+(?m)$`)
output(w, color.Sprint(
colorizeData(c,
output(writer, color.Sprint(
colorizeData(conf,
rightR.ReplaceAllString(
leftR.ReplaceAllString(tableString.String(), "|"), "|"))))
}
@@ -109,11 +106,11 @@ func printOrgmodeData(w io.Writer, c cfg.Config, data *Tabdata) {
/*
Markdown table
*/
func printMarkdownData(w io.Writer, c cfg.Config, data *Tabdata) {
func printMarkdownData(writer io.Writer, conf cfg.Config, data *Tabdata) {
tableString := &strings.Builder{}
table := tablewriter.NewWriter(tableString)
if !c.NoHeaders {
if !conf.NoHeaders {
table.SetHeader(data.headers)
}
@@ -125,19 +122,20 @@ func printMarkdownData(w io.Writer, c cfg.Config, data *Tabdata) {
table.SetCenterSeparator("|")
table.Render()
output(w, color.Sprint(colorizeData(c, tableString.String())))
output(writer, color.Sprint(colorizeData(conf, tableString.String())))
}
/*
Simple ASCII table without any borders etc, just like the input we expect
*/
func printAsciiData(w io.Writer, c cfg.Config, data *Tabdata) {
func printASCIIData(writer io.Writer, conf cfg.Config, data *Tabdata) {
tableString := &strings.Builder{}
table := tablewriter.NewWriter(tableString)
if !c.NoHeaders {
if !conf.NoHeaders {
table.SetHeader(data.headers)
}
table.AppendBulk(data.entries)
table.SetAutoWrapText(false)
@@ -151,7 +149,7 @@ func printAsciiData(w io.Writer, c cfg.Config, data *Tabdata) {
table.SetBorder(false)
table.SetNoWhiteSpace(true)
if !c.UseHighlight {
if !conf.UseHighlight {
// the tabs destroy the highlighting
table.SetTablePadding("\t") // pad with tabs
} else {
@@ -159,16 +157,17 @@ func printAsciiData(w io.Writer, c cfg.Config, data *Tabdata) {
}
table.Render()
output(w, color.Sprint(colorizeData(c, tableString.String())))
output(writer, color.Sprint(colorizeData(conf, tableString.String())))
}
/*
We simulate the \x command of psql (the PostgreSQL client)
*/
func printExtendedData(w io.Writer, c cfg.Config, data *Tabdata) {
func printExtendedData(writer io.Writer, conf cfg.Config, data *Tabdata) {
// needed for data output
format := fmt.Sprintf("%%%ds: %%s\n", data.maxwidthHeader)
out := ""
if len(data.entries) > 0 {
for _, entry := range data.entries {
for i, value := range entry {
@@ -179,67 +178,71 @@ func printExtendedData(w io.Writer, c cfg.Config, data *Tabdata) {
}
}
output(w, colorizeData(c, out))
output(writer, colorizeData(conf, out))
}
/*
Shell output, ready to be eval'd. Just like FreeBSD stat(1)
*/
func printShellData(w io.Writer, c cfg.Config, data *Tabdata) {
func printShellData(writer io.Writer, data *Tabdata) {
out := ""
if len(data.entries) > 0 {
for _, entry := range data.entries {
shentries := []string{}
for i, value := range entry {
for idx, value := range entry {
shentries = append(shentries, fmt.Sprintf("%s=\"%s\"",
data.headers[i], value))
data.headers[idx], value))
}
out += fmt.Sprint(strings.Join(shentries, " ")) + "\n"
out += strings.Join(shentries, " ") + "\n"
}
}
// no colorization here
output(w, out)
output(writer, out)
}
func printYamlData(w io.Writer, c cfg.Config, data *Tabdata) {
type D struct {
func printYamlData(writer io.Writer, data *Tabdata) {
type Data struct {
Entries []map[string]interface{} `yaml:"entries"`
}
d := D{}
yamlout := Data{}
for _, entry := range data.entries {
ml := map[string]interface{}{}
yamldata := map[string]interface{}{}
for i, entry := range entry {
for idx, entry := range entry {
style := yaml.TaggedStyle
_, err := strconv.Atoi(entry)
if err != nil {
style = yaml.DoubleQuotedStyle
}
ml[strings.ToLower(data.headers[i])] =
yamldata[strings.ToLower(data.headers[idx])] =
&yaml.Node{
Kind: yaml.ScalarNode,
Style: style,
Value: entry}
}
d.Entries = append(d.Entries, ml)
yamlout.Entries = append(yamlout.Entries, yamldata)
}
yamlstr, err := yaml.Marshal(&d)
yamlstr, err := yaml.Marshal(&yamlout)
if err != nil {
log.Fatal(err)
}
output(w, string(yamlstr))
output(writer, string(yamlstr))
}
func printCSVData(w io.Writer, c cfg.Config, data *Tabdata) {
csvout := csv.NewWriter(w)
func printCSVData(writer io.Writer, data *Tabdata) {
csvout := csv.NewWriter(writer)
if err := csvout.Write(data.headers); err != nil {
log.Fatalln("error writing record to csv:", err)

View File

@@ -20,10 +20,10 @@ package lib
import (
"bytes"
"fmt"
//"github.com/alecthomas/repr"
"github.com/tlinden/tablizer/cfg"
"strings"
"testing"
"github.com/tlinden/tablizer/cfg"
)
func newData() Tabdata {
@@ -73,7 +73,7 @@ var tests = []struct {
}{
// --------------------- Default settings mode tests ``
{
mode: cfg.Ascii,
mode: cfg.ASCII,
name: "default",
expect: `
NAME(1) DURATION(2) COUNT(3) WHEN(4)
@@ -250,39 +250,39 @@ DURATION(2) WHEN(4)
}
func TestPrinter(t *testing.T) {
for _, tt := range tests {
for _, testdata := range tests {
testname := fmt.Sprintf("print-sortcol-%d-desc-%t-sortby-%s-mode-%d-usecolumns-%s",
tt.column, tt.desc, tt.sortby, tt.mode, tt.usecolstr)
testdata.column, testdata.desc, testdata.sortby, testdata.mode, testdata.usecolstr)
t.Run(testname, func(t *testing.T) {
// replaces os.Stdout, but we ignore it
var w bytes.Buffer
var writer bytes.Buffer
// cmd flags
c := cfg.Config{
SortByColumn: tt.column,
SortDescending: tt.desc,
SortMode: tt.sortby,
OutputMode: tt.mode,
NoNumbering: tt.nonum,
UseColumns: tt.usecol,
conf := cfg.Config{
SortByColumn: testdata.column,
SortDescending: testdata.desc,
SortMode: testdata.sortby,
OutputMode: testdata.mode,
NoNumbering: testdata.nonum,
UseColumns: testdata.usecol,
NoColor: true,
}
c.ApplyDefaults()
conf.ApplyDefaults()
// the test checks the len!
if len(tt.usecol) > 0 {
c.Columns = "yes"
if len(testdata.usecol) > 0 {
conf.Columns = "yes"
} else {
c.Columns = ""
conf.Columns = ""
}
testdata := newData()
exp := strings.TrimSpace(tt.expect)
data := newData()
exp := strings.TrimSpace(testdata.expect)
printData(&w, c, &testdata)
printData(&writer, conf, &data)
got := strings.TrimSpace(w.String())
got := strings.TrimSpace(writer.String())
if got != exp {
t.Errorf("not rendered correctly:\n+++ got:\n%s\n+++ want:\n%s",

View File

@@ -18,21 +18,22 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
package lib
import (
"github.com/araddon/dateparse"
"github.com/tlinden/tablizer/cfg"
"regexp"
"sort"
"strconv"
"github.com/araddon/dateparse"
"github.com/tlinden/tablizer/cfg"
)
func sortTable(c cfg.Config, data *Tabdata) {
if c.SortByColumn <= 0 {
func sortTable(conf cfg.Config, data *Tabdata) {
if conf.SortByColumn <= 0 {
// no sorting wanted
return
}
// slightly modified here to match internal array indicies
col := c.SortByColumn
col := conf.SortByColumn
col-- // ui starts counting by 1, but use 0 internally
@@ -48,38 +49,42 @@ func sortTable(c cfg.Config, data *Tabdata) {
// actual sorting
sort.SliceStable(data.entries, func(i, j int) bool {
return compare(&c, data.entries[i][col], data.entries[j][col])
return compare(&conf, data.entries[i][col], data.entries[j][col])
})
}
// config is not modified here, but it would be inefficient to copy it every loop
func compare(c *cfg.Config, a string, b string) bool {
func compare(conf *cfg.Config, left string, right string) bool {
var comp bool
switch c.SortMode {
switch conf.SortMode {
case "numeric":
left, err := strconv.Atoi(a)
left, err := strconv.Atoi(left)
if err != nil {
left = 0
}
right, err := strconv.Atoi(b)
right, err := strconv.Atoi(right)
if err != nil {
right = 0
}
comp = left < right
case "duration":
left := duration2int(a)
right := duration2int(b)
left := duration2int(left)
right := duration2int(right)
comp = left < right
case "time":
left, _ := dateparse.ParseAny(a)
right, _ := dateparse.ParseAny(b)
left, _ := dateparse.ParseAny(left)
right, _ := dateparse.ParseAny(right)
comp = left.Unix() < right.Unix()
default:
comp = a < b
comp = left < right
}
if c.SortDescending {
if conf.SortDescending {
comp = !comp
}
@@ -87,15 +92,15 @@ func compare(c *cfg.Config, a string, b string) bool {
}
/*
We could use time.ParseDuration(), but this doesn't support days.
We could use time.ParseDuration(), but this doesn't support days.
We could also use github.com/xhit/go-str2duration/v2, which does
the job, but it's just another dependency, just for this little
gem. And we don't need a time.Time value. And int is good enough
for duration comparision.
We could also use github.com/xhit/go-str2duration/v2, which does
the job, but it's just another dependency, just for this little
gem. And we don't need a time.Time value. And int is good enough
for duration comparison.
Convert a durartion into an integer. Valid time units are "s",
"m", "h" and "d".
Convert a duration into an integer. Valid time units are "s",
"m", "h" and "d".
*/
func duration2int(duration string) int {
re := regexp.MustCompile(`(\d+)([dhms])`)
@@ -103,16 +108,17 @@ func duration2int(duration string) int {
for _, match := range re.FindAllStringSubmatch(duration, -1) {
if len(match) == 3 {
v, _ := strconv.Atoi(match[1])
durationvalue, _ := strconv.Atoi(match[1])
switch match[2][0] {
case 'd':
seconds += v * 86400
seconds += durationvalue * 86400
case 'h':
seconds += v * 3600
seconds += durationvalue * 3600
case 'm':
seconds += v * 60
seconds += durationvalue * 60
case 's':
seconds += v
seconds += durationvalue
}
}
}

View File

@@ -19,8 +19,9 @@ package lib
import (
"fmt"
"github.com/tlinden/tablizer/cfg"
"testing"
"github.com/tlinden/tablizer/cfg"
)
func TestDuration2Seconds(t *testing.T) {
@@ -36,12 +37,12 @@ func TestDuration2Seconds(t *testing.T) {
{"19t77X what?4s", 4},
}
for _, tt := range tests {
testname := fmt.Sprintf("duration-%s", tt.dur)
for _, testdata := range tests {
testname := fmt.Sprintf("duration-%s", testdata.dur)
t.Run(testname, func(t *testing.T) {
seconds := duration2int(tt.dur)
if seconds != tt.expect {
t.Errorf("got %d, want %d", seconds, tt.expect)
seconds := duration2int(testdata.dur)
if seconds != testdata.expect {
t.Errorf("got %d, want %d", seconds, testdata.expect)
}
})
}
@@ -66,13 +67,15 @@ func TestCompare(t *testing.T) {
{"time", "12/24/2022", "1/1/1970", true, true},
}
for _, tt := range tests {
testname := fmt.Sprintf("compare-mode-%s-a-%s-b-%s-desc-%t", tt.mode, tt.a, tt.b, tt.desc)
for _, testdata := range tests {
testname := fmt.Sprintf("compare-mode-%s-a-%s-b-%s-desc-%t",
testdata.mode, testdata.a, testdata.b, testdata.desc)
t.Run(testname, func(t *testing.T) {
c := cfg.Config{SortMode: tt.mode, SortDescending: tt.desc}
got := compare(&c, tt.a, tt.b)
if got != tt.want {
t.Errorf("got %t, want %t", got, tt.want)
c := cfg.Config{SortMode: testdata.mode, SortDescending: testdata.desc}
got := compare(&c, testdata.a, testdata.b)
if got != testdata.want {
t.Errorf("got %t, want %t", got, testdata.want)
}
})
}

View File

@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "TABLIZER 1"
.TH TABLIZER 1 "2023-11-22" "1" "User Commands"
.TH TABLIZER 1 "2024-05-07" "1" "User Commands"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -154,7 +154,8 @@ tablizer \- Manipulate tabular output of other programs
\& \-H, \-\-no\-headers Disable headers display
\& \-s, \-\-separator string Custom field separator
\& \-k, \-\-sort\-by int Sort by column (default: 1)
\& \-z, \-\-fuzzy Use fuzzy seach [experimental]
\& \-z, \-\-fuzzy Use fuzzy search [experimental]
\& \-F, \-\-filter field=reg Filter given field with regex, can be used multiple times
\&
\& Output Flags (mutually exclusive):
\& \-X, \-\-extended Enable extended output
@@ -264,8 +265,8 @@ Sorts timestamps.
.PP
Finally the \fB\-d\fR option enables debugging output which is mostly
useful for the developer.
.SS "\s-1PATTERNS\s0"
.IX Subsection "PATTERNS"
.SS "\s-1PATTERNS AND FILTERING\s0"
.IX Subsection "PATTERNS AND FILTERING"
You can reduce the rows being displayed by using a regular expression
pattern. The regexp is \s-1PCRE\s0 compatible, refer to the syntax cheat
sheet here: <https://github.com/google/re2/wiki/Syntax>. If you want
@@ -297,9 +298,24 @@ Example for a case insensitive search:
\& kubectl get pods \-A | tablizer "(?i)account"
.Ve
.PP
You can use the experimental fuzzy seach feature by providing the
You can use the experimental fuzzy search feature by providing the
option \fB\-z\fR, in which case the pattern is regarded as a fuzzy search
term, not a regexp.
.PP
Sometimes you want to filter by one or more columns. You can do that
using the \fB\-F\fR option. The option can be specified multiple times and
has the following format:
.PP
.Vb 1
\& fieldname=regexp
.Ve
.PP
Fieldnames (== columns headers) are case insensitive.
.PP
If you specify more than one filter, both filters have to match (\s-1AND\s0
operation).
.PP
If the option \fB\-v\fR is specified, the filtering is inverted.
.SS "\s-1COLUMNS\s0"
.IX Subsection "COLUMNS"
The parameter \fB\-c\fR can be used to specify, which columns to
@@ -487,7 +503,7 @@ or to submit a patch, please open an issue on github:
.IX Header "LICENSE"
This software is licensed under the \s-1GNU GENERAL PUBLIC LICENSE\s0 version 3.
.PP
Copyright (c) 2023 by Thomas von Dein
Copyright (c) 2022\-2024 by Thomas von Dein
.PP
This software uses the following \s-1GO\s0 modules:
.IP "repr (https://github.com/alecthomas/repr)" 4

View File

@@ -15,7 +15,8 @@ tablizer - Manipulate tabular output of other programs
-H, --no-headers Disable headers display
-s, --separator string Custom field separator
-k, --sort-by int Sort by column (default: 1)
-z, --fuzzy Use fuzzy seach [experimental]
-z, --fuzzy Use fuzzy search [experimental]
-F, --filter field=reg Filter given field with regex, can be used multiple times
Output Flags (mutually exclusive):
-X, --extended Enable extended output
@@ -128,7 +129,7 @@ Sorts timestamps.
Finally the B<-d> option enables debugging output which is mostly
useful for the developer.
=head2 PATTERNS
=head2 PATTERNS AND FILTERING
You can reduce the rows being displayed by using a regular expression
pattern. The regexp is PCRE compatible, refer to the syntax cheat
@@ -155,10 +156,24 @@ Example for a case insensitive search:
kubectl get pods -A | tablizer "(?i)account"
You can use the experimental fuzzy seach feature by providing the
You can use the experimental fuzzy search feature by providing the
option B<-z>, in which case the pattern is regarded as a fuzzy search
term, not a regexp.
Sometimes you want to filter by one or more columns. You can do that
using the B<-F> option. The option can be specified multiple times and
has the following format:
fieldname=regexp
Fieldnames (== columns headers) are case insensitive.
If you specify more than one filter, both filters have to match (AND
operation).
If the option B<-v> is specified, the filtering is inverted.
=head2 COLUMNS
The parameter B<-c> can be used to specify, which columns to
@@ -336,7 +351,7 @@ L<https://github.com/TLINDEN/tablizer/issues>.
This software is licensed under the GNU GENERAL PUBLIC LICENSE version 3.
Copyright (c) 2023 by Thomas von Dein
Copyright (c) 2022-2024 by Thomas von Dein
This software uses the following GO modules: