finalized conversion to protobuf:

- fixed import+export
- generalized file options
- always store keys as lowercase
- fixed+enhanced docs
- fixed tests
This commit is contained in:
2024-12-30 12:12:02 +01:00
parent bb5c268ca8
commit 1eb5efae0c
15 changed files with 128 additions and 152 deletions

View File

@@ -76,10 +76,14 @@ anydb set foo bar -t note,important
anydb list -t important anydb list -t important
# beside tags filtering you can also use regexps for searching # beside tags filtering you can also use regexps for searching
# note, by default the list command only searches through keys
anydb list '[a-z]+\d' anydb list '[a-z]+\d'
# do a full text search
anydb list '[a-z]+\d' -s
# anydb also supports a wide output # anydb also supports a wide output
anydb list -o wide anydb list -m wide
KEY TAGS SIZE AGE VALUE KEY TAGS SIZE AGE VALUE
blah important 4 B 7 seconds ago haha blah important 4 B 7 seconds ago haha
foo 3 B 15 seconds ago bar foo 3 B 15 seconds ago bar
@@ -90,13 +94,13 @@ anydb ls -l
anydb / anydb /
# other outputs are possible as well # other outputs are possible as well
anydb list -o json anydb list -m json
# you can backup your database # you can backup your database
anydb export -o backup.json anydb export -o backup.json
# and import it somewhere else # and import it somewhere else
anydb import -r backup.json anydb import -i backup.json
# you can encrypt entries. anydb asks for a passphrase # you can encrypt entries. anydb asks for a passphrase
# and will do the same when you retrieve the key using the # and will do the same when you retrieve the key using the

10
anydb.1
View File

@@ -133,7 +133,7 @@
.\" ======================================================================== .\" ========================================================================
.\" .\"
.IX Title "ANYDB 1" .IX Title "ANYDB 1"
.TH ANYDB 1 "2024-12-29" "1" "User Commands" .TH ANYDB 1 "2024-12-30" "1" "User Commands"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents. .\" way too many mistakes in technical documents.
.if n .ad l .if n .ad l
@@ -356,18 +356,18 @@ Usage:
.PP .PP
.Vb 2 .Vb 2
\& Usage: \& Usage:
\& anydb list [<filter\-regex>] [\-t <tag>] [\-m <mode>] [\-n \-N] [\-T <tpl>] [\-i] [flags] \& anydb list [<filter\-regex> | \-t <tag> ] [\-m <mode>] [\-nNif] [\-T <tpl>] [flags]
\& \&
\& Aliases: \& Aliases:
\& list, /, ls \& list, ls, /, find, search
\& \&
\& Flags: \& Flags:
\& \-i, \-\-case\-insensitive filter case insensitive \& \-i, \-\-case\-insensitive filter case insensitive
\& \-h, \-\-help help for list \& \-h, \-\-help help for list
\& \-m, \-\-mode string output format (table|wide|json|template), \& \-m, \-\-mode string output format (table|wide|json|template), wide is a verbose table. (default \*(Aqtable\*(Aq)
\& wide is a verbose table. (default \*(Aqtable\*(Aq)
\& \-n, \-\-no\-headers omit headers in tables \& \-n, \-\-no\-headers omit headers in tables
\& \-N, \-\-no\-human do not translate to human readable values \& \-N, \-\-no\-human do not translate to human readable values
\& \-s, \-\-search\-fulltext perform a full text search
\& \-t, \-\-tags stringArray tags, multiple allowed \& \-t, \-\-tags stringArray tags, multiple allowed
\& \-T, \-\-template string go template for \*(Aq\-m template\*(Aq \& \-T, \-\-template string go template for \*(Aq\-m template\*(Aq
\& \-l, \-\-wide\-output output mode: wide \& \-l, \-\-wide\-output output mode: wide

View File

@@ -206,18 +206,18 @@ The B<list> subcommand displays a list of all database entries.
Usage: Usage:
Usage: Usage:
anydb list [<filter-regex>] [-t <tag>] [-m <mode>] [-n -N] [-T <tpl>] [-i] [flags] anydb list [<filter-regex> | -t <tag> ] [-m <mode>] [-nNif] [-T <tpl>] [flags]
Aliases: Aliases:
list, /, ls list, ls, /, find, search
Flags: Flags:
-i, --case-insensitive filter case insensitive -i, --case-insensitive filter case insensitive
-h, --help help for list -h, --help help for list
-m, --mode string output format (table|wide|json|template), -m, --mode string output format (table|wide|json|template), wide is a verbose table. (default 'table')
wide is a verbose table. (default 'table')
-n, --no-headers omit headers in tables -n, --no-headers omit headers in tables
-N, --no-human do not translate to human readable values -N, --no-human do not translate to human readable values
-s, --search-fulltext perform a full text search
-t, --tags stringArray tags, multiple allowed -t, --tags stringArray tags, multiple allowed
-T, --template string go template for '-m template' -T, --template string go template for '-m template'
-l, --wide-output output mode: wide -l, --wide-output output mode: wide
@@ -254,6 +254,10 @@ features.
If you want to search case insensitive, add the option C<-i>. If you want to search case insensitive, add the option C<-i>.
By default anydb only searches through the keys. If you want to search
through the values as well, then use the C<-s> option, which enables
full-text search.
You can - as with the B<get> command - use other output modes. The You can - as with the B<get> command - use other output modes. The
default mode is "table". The "wide" mode is, as already mentioned, a default mode is "table". The "wide" mode is, as already mentioned, a
more detailed table. Also supported is "json" mode and "template" more detailed table. Also supported is "json" mode and "template"
@@ -323,7 +327,7 @@ the B<export> subcommand.
Usage: Usage:
Usage: Usage:
anydb export [-o <json filename>] [flags] anydb export -o <json filename> [flags]
Aliases: Aliases:
export, dump, backup export, dump, backup
@@ -332,12 +336,12 @@ Usage:
-h, --help help for export -h, --help help for export
-o, --output string output to file -o, --output string output to file
The database dump is a JSON representation of the whole database and The database dump is a JSON representation of the whole database and
will be printed to STDOUT by default. Redirect it to a file or use the will be printed to the file specified with the C<-o> option. If you
C<-o> option: specify "-" as the filename, it will be written to STDIN.
anydb export > dump.json
anydb export -o dump.json anydb export -o dump.json
anydb export -o - > dump.json
Please note, that encrypted values will not be decrypted. This might Please note, that encrypted values will not be decrypted. This might
change in a future version of anydb. change in a future version of anydb.
@@ -350,7 +354,7 @@ dump.
Usage: Usage:
Usage: Usage:
anydb import [<json file>] [flags] anydb import -i <json file> [flags]
Aliases: Aliases:
import, restore import, restore
@@ -360,12 +364,13 @@ Usage:
-h, --help help for import -h, --help help for import
-t, --tags stringArray tags, multiple allowed -t, --tags stringArray tags, multiple allowed
By default the C<import> subcommand reads the JSON contents from The C<import> subcommand reads the JSON contents from
STDIN. You might pipe the dump into it or use the option C<-r>: the file specified with the C<-i> option. If you specify "-" as the
filename, it will be read from STDIN.
anydb import < dump.json anydb import -i - < dump.json
anydb import -r dump.json anydb import -i dump.json
cat dump.json | anydb import cat dump.json | anydb import -i -
If there is already a database, it will be saved by appending a If there is already a database, it will be saved by appending a
timestamp and a new database with the contents of the dump will be timestamp and a new database with the contents of the dump will be

View File

@@ -35,16 +35,18 @@ type DbAttr struct {
Binary bool Binary bool
} }
// check if value is to be read from a file or stdin, setup preview
// text according to flags, lowercase key
func (attr *DbAttr) ParseKV() error { func (attr *DbAttr) ParseKV() error {
attr.Key = strings.ToLower(attr.Args[0])
switch len(attr.Args) { switch len(attr.Args) {
case 1: case 1:
// 1 arg = key + read from file or stdin // 1 arg = key + read from file or stdin
attr.Key = attr.Args[0]
if attr.File == "" { if attr.File == "" {
attr.File = "-" attr.File = "-"
} }
case 2: case 2:
attr.Key = attr.Args[0]
attr.Val = []byte(attr.Args[1]) attr.Val = []byte(attr.Args[1])
if attr.Args[1] == "-" { if attr.Args[1] == "-" {
@@ -58,9 +60,12 @@ func (attr *DbAttr) ParseKV() error {
} }
} }
if attr.Binary { switch {
case attr.Binary:
attr.Preview = "<binary-content>"
case attr.Encrypted:
attr.Preview = "<encrypted-content>" attr.Preview = "<encrypted-content>"
} else { default:
if len(attr.Val) > MaxValueWidth { if len(attr.Val) > MaxValueWidth {
attr.Preview = string(attr.Val)[0:MaxValueWidth] + "..." attr.Preview = string(attr.Val)[0:MaxValueWidth] + "..."
@@ -74,9 +79,6 @@ func (attr *DbAttr) ParseKV() error {
attr.Preview = string(attr.Val) attr.Preview = string(attr.Val)
} }
} }
if attr.Encrypted {
attr.Preview = "<encrypted-content>"
}
return nil return nil
} }

View File

@@ -85,7 +85,7 @@ func (db *DB) Close() error {
return db.DB.Close() return db.DB.Close()
} }
func (db *DB) List(attr *DbAttr) (DbEntries, error) { func (db *DB) List(attr *DbAttr, fulltext bool) (DbEntries, error) {
if err := db.Open(); err != nil { if err := db.Open(); err != nil {
return nil, err return nil, err
} }
@@ -99,7 +99,6 @@ func (db *DB) List(attr *DbAttr) (DbEntries, error) {
} }
err := db.DB.View(func(tx *bolt.Tx) error { err := db.DB.View(func(tx *bolt.Tx) error {
root := tx.Bucket([]byte(db.Bucket)) root := tx.Bucket([]byte(db.Bucket))
if root == nil { if root == nil {
return nil return nil
@@ -110,12 +109,19 @@ func (db *DB) List(attr *DbAttr) (DbEntries, error) {
return nil return nil
} }
databucket := root.Bucket([]byte("data"))
if databucket == nil {
return fmt.Errorf("failed to retrieve data sub bucket")
}
err := bucket.ForEach(func(key, pbentry []byte) error { err := bucket.ForEach(func(key, pbentry []byte) error {
var entry DbEntry var entry DbEntry
if err := proto.Unmarshal(pbentry, &entry); err != nil { if err := proto.Unmarshal(pbentry, &entry); err != nil {
return fmt.Errorf("failed to unmarshal from protobuf: %w", err) return fmt.Errorf("failed to unmarshal from protobuf: %w", err)
} }
entry.Value = databucket.Get([]byte(entry.Key)) // empty is ok
var include bool var include bool
switch { switch {
@@ -124,6 +130,12 @@ func (db *DB) List(attr *DbAttr) (DbEntries, error) {
filter.MatchString(strings.Join(entry.Tags, " ")) { filter.MatchString(strings.Join(entry.Tags, " ")) {
include = true include = true
} }
if !entry.Binary && !include && fulltext {
if filter.MatchString(string(entry.Value)) {
include = true
}
}
case len(attr.Tags) > 0: case len(attr.Tags) > 0:
for _, search := range attr.Tags { for _, search := range attr.Tags {
for _, tag := range entry.Tags { for _, tag := range entry.Tags {
@@ -261,35 +273,48 @@ func (db *DB) Get(attr *DbAttr) (*DbEntry, error) {
entry := DbEntry{} entry := DbEntry{}
err := db.DB.View(func(tx *bolt.Tx) error { err := db.DB.View(func(tx *bolt.Tx) error {
// root bucket
root := tx.Bucket([]byte(db.Bucket)) root := tx.Bucket([]byte(db.Bucket))
if root == nil { if root == nil {
return nil return nil
} }
// get meta sub bucket
bucket := root.Bucket([]byte("meta")) bucket := root.Bucket([]byte("meta"))
if bucket == nil { if bucket == nil {
return nil return nil
} }
// retrieve meta data
pbentry := bucket.Get([]byte(attr.Key)) pbentry := bucket.Get([]byte(attr.Key))
if pbentry == nil { if pbentry == nil {
return fmt.Errorf("no such key: %s", attr.Key) return fmt.Errorf("no such key: %s", attr.Key)
} }
// put into struct
if err := proto.Unmarshal(pbentry, &entry); err != nil { if err := proto.Unmarshal(pbentry, &entry); err != nil {
return fmt.Errorf("failed to unmarshal from protobuf: %w", err) return fmt.Errorf("failed to unmarshal from protobuf: %w", err)
} }
// get data sub bucket
databucket := root.Bucket([]byte("data")) databucket := root.Bucket([]byte("data"))
if databucket == nil { if databucket == nil {
return fmt.Errorf("failed to retrieve data sub bucket") return fmt.Errorf("failed to retrieve data sub bucket")
} }
entry.Value = databucket.Get([]byte(attr.Key)) // retrieve actual data value
if len(entry.Value) == 0 { value := databucket.Get([]byte(attr.Key))
if len(value) == 0 {
return fmt.Errorf("no such key: %s", attr.Key) return fmt.Errorf("no such key: %s", attr.Key)
} }
// we need to make a copy of it, otherwise we'll get an
// "unexpected fault address" error
vc := make([]byte, len(value))
copy(vc, value)
entry.Value = vc
return nil return nil
}) })
@@ -446,78 +471,50 @@ func (db *DB) Info() (*DbInfo, error) {
return info, err return info, err
} }
func (db *DB) Find(attr *DbAttr) (DbEntries, error) { func (db *DB) Getall(attr *DbAttr) (DbEntries, error) {
if err := db.Open(); err != nil { if err := db.Open(); err != nil {
return nil, err return nil, err
} }
defer db.Close() defer db.Close()
var entries DbEntries var entries DbEntries
var filter *regexp.Regexp
if len(attr.Args) > 0 {
filter = regexp.MustCompile(attr.Args[0])
}
err := db.DB.View(func(tx *bolt.Tx) error { err := db.DB.View(func(tx *bolt.Tx) error {
// root bucket
root := tx.Bucket([]byte(db.Bucket)) root := tx.Bucket([]byte(db.Bucket))
if root == nil { if root == nil {
return nil return nil
} }
// get meta sub bucket
bucket := root.Bucket([]byte("meta")) bucket := root.Bucket([]byte("meta"))
if bucket == nil { if bucket == nil {
return nil return nil
} }
// get data sub bucket
databucket := root.Bucket([]byte("data")) databucket := root.Bucket([]byte("data"))
if databucket == nil { if databucket == nil {
return fmt.Errorf("failed to retrieve data sub bucket") return fmt.Errorf("failed to retrieve data sub bucket")
} }
// iterate over all db entries in meta sub bucket
err := bucket.ForEach(func(key, pbentry []byte) error { err := bucket.ForEach(func(key, pbentry []byte) error {
var entry DbEntry var entry DbEntry
if err := proto.Unmarshal(pbentry, &entry); err != nil { if err := proto.Unmarshal(pbentry, &entry); err != nil {
return fmt.Errorf("failed to unmarshal from protobuf: %w", err) return fmt.Errorf("failed to unmarshal from protobuf: %w", err)
} }
entry.Value = databucket.Get([]byte(entry.Key)) // retrieve the value from the data sub bucket
value := databucket.Get([]byte(entry.Key))
var include bool // we need to make a copy of it, otherwise we'll get an
// "unexpected fault address" error
vc := make([]byte, len(value))
copy(vc, value)
switch { entry.Value = vc
case filter != nil: entries = append(entries, entry)
if filter.MatchString(entry.Key) ||
filter.MatchString(strings.Join(entry.Tags, " ")) {
include = true
}
if !entry.Binary && !include {
if filter.MatchString(string(entry.Value)) {
include = true
}
}
case len(attr.Tags) > 0:
for _, search := range attr.Tags {
for _, tag := range entry.Tags {
if tag == search {
include = true
break
}
}
if include {
break
}
}
default:
include = true
}
if include {
entries = append(entries, entry)
}
return nil return nil
}) })

View File

@@ -42,6 +42,7 @@ type Config struct {
NoHumanize bool NoHumanize bool
Encrypt bool // one entry Encrypt bool // one entry
CaseInsensitive bool CaseInsensitive bool
Fulltext bool
Listen string Listen string
Buckets map[string]BucketConfig // config file only Buckets map[string]BucketConfig // config file only

View File

@@ -187,18 +187,18 @@ SUBCOMMANDS
Usage: Usage:
Usage: Usage:
anydb list [<filter-regex>] [-t <tag>] [-m <mode>] [-n -N] [-T <tpl>] [-i] [flags] anydb list [<filter-regex> | -t <tag> ] [-m <mode>] [-nNif] [-T <tpl>] [flags]
Aliases: Aliases:
list, /, ls list, ls, /, find, search
Flags: Flags:
-i, --case-insensitive filter case insensitive -i, --case-insensitive filter case insensitive
-h, --help help for list -h, --help help for list
-m, --mode string output format (table|wide|json|template), -m, --mode string output format (table|wide|json|template), wide is a verbose table. (default 'table')
wide is a verbose table. (default 'table')
-n, --no-headers omit headers in tables -n, --no-headers omit headers in tables
-N, --no-human do not translate to human readable values -N, --no-human do not translate to human readable values
-s, --search-fulltext perform a full text search
-t, --tags stringArray tags, multiple allowed -t, --tags stringArray tags, multiple allowed
-T, --template string go template for '-m template' -T, --template string go template for '-m template'
-l, --wide-output output mode: wide -l, --wide-output output mode: wide

View File

@@ -182,7 +182,7 @@ func List(conf *cfg.Config) *cobra.Command {
) )
var cmd = &cobra.Command{ var cmd = &cobra.Command{
Use: "list [<filter-regex>] [-m <mode>] [-n -N] [-T <tpl>] [-i]", Use: "list [<filter-regex> | -t <tag> ] [-m <mode>] [-nNif] [-T <tpl>]",
Short: "List database contents", Short: "List database contents",
Long: `List database contents`, Long: `List database contents`,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
@@ -206,59 +206,7 @@ func List(conf *cfg.Config) *cobra.Command {
conf.Mode = "wide" conf.Mode = "wide"
} }
entries, err := conf.DB.List(&attr) entries, err := conf.DB.List(&attr, conf.Fulltext)
if err != nil {
return err
}
return output.List(os.Stdout, conf, entries)
},
}
cmd.PersistentFlags().StringVarP(&conf.Mode, "mode", "m", "", "output format (table|wide|json|template), wide is a verbose table. (default 'table')")
cmd.PersistentFlags().StringVarP(&conf.Template, "template", "T", "", "go template for '-m template'")
cmd.PersistentFlags().BoolVarP(&wide, "wide-output", "l", false, "output mode: wide")
cmd.PersistentFlags().BoolVarP(&conf.NoHeaders, "no-headers", "n", false, "omit headers in tables")
cmd.PersistentFlags().BoolVarP(&conf.NoHumanize, "no-human", "N", false, "do not translate to human readable values")
cmd.PersistentFlags().BoolVarP(&conf.CaseInsensitive, "case-insensitive", "i", false, "filter case insensitive")
cmd.Aliases = append(cmd.Aliases, "ls")
return cmd
}
func Find(conf *cfg.Config) *cobra.Command {
var (
attr app.DbAttr
wide bool
)
var cmd = &cobra.Command{
Use: "find <filter-regex> | -t <tag> [-m <mode>] [-n -N] [-T <tpl>] [-i]",
Short: "Find database contents",
Long: `Find database contents`,
RunE: func(cmd *cobra.Command, args []string) error {
// errors at this stage do not cause the usage to be shown
cmd.SilenceUsage = true
if len(args) > 0 {
if conf.CaseInsensitive {
attr.Args = []string{"(?i)" + args[0]}
} else {
attr.Args = args
}
}
// turn comma list into slice, if needed
if len(attr.Tags) == 1 && strings.Contains(attr.Tags[0], ",") {
attr.Tags = strings.Split(attr.Tags[0], ",")
}
if wide {
conf.Mode = "wide"
}
entries, err := conf.DB.Find(&attr)
if err != nil { if err != nil {
return err return err
} }
@@ -273,10 +221,12 @@ func Find(conf *cfg.Config) *cobra.Command {
cmd.PersistentFlags().BoolVarP(&conf.NoHeaders, "no-headers", "n", false, "omit headers in tables") cmd.PersistentFlags().BoolVarP(&conf.NoHeaders, "no-headers", "n", false, "omit headers in tables")
cmd.PersistentFlags().BoolVarP(&conf.NoHumanize, "no-human", "N", false, "do not translate to human readable values") cmd.PersistentFlags().BoolVarP(&conf.NoHumanize, "no-human", "N", false, "do not translate to human readable values")
cmd.PersistentFlags().BoolVarP(&conf.CaseInsensitive, "case-insensitive", "i", false, "filter case insensitive") cmd.PersistentFlags().BoolVarP(&conf.CaseInsensitive, "case-insensitive", "i", false, "filter case insensitive")
cmd.PersistentFlags().BoolVarP(&conf.Fulltext, "search-fulltext", "s", false, "perform a full text search")
cmd.PersistentFlags().StringArrayVarP(&attr.Tags, "tags", "t", nil, "tags, multiple allowed") cmd.PersistentFlags().StringArrayVarP(&attr.Tags, "tags", "t", nil, "tags, multiple allowed")
cmd.Aliases = append(cmd.Aliases, "ls")
cmd.Aliases = append(cmd.Aliases, "/") cmd.Aliases = append(cmd.Aliases, "/")
cmd.Aliases = append(cmd.Aliases, "f") cmd.Aliases = append(cmd.Aliases, "find")
cmd.Aliases = append(cmd.Aliases, "search") cmd.Aliases = append(cmd.Aliases, "search")
return cmd return cmd

View File

@@ -37,16 +37,16 @@ func Export(conf *cfg.Config) *cobra.Command {
) )
var cmd = &cobra.Command{ var cmd = &cobra.Command{
Use: "export [-o <json filename>]", Use: "export -o <json filename>",
Short: "Export database to json", Short: "Export database to json file",
Long: `Export database to json`, Long: `Export database to json file`,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
// errors at this stage do not cause the usage to be shown // errors at this stage do not cause the usage to be shown
cmd.SilenceUsage = true cmd.SilenceUsage = true
conf.Mode = "json" conf.Mode = "json"
entries, err := conf.DB.List(&attr) entries, err := conf.DB.Getall(&attr)
if err != nil { if err != nil {
return err return err
} }
@@ -55,7 +55,8 @@ func Export(conf *cfg.Config) *cobra.Command {
}, },
} }
cmd.PersistentFlags().StringVarP(&attr.File, "output", "o", "", "output to file") cmd.PersistentFlags().StringVarP(&attr.File, "output-file", "o", "", "filename or - for STDIN")
cmd.MarkPersistentFlagRequired("output-file")
cmd.Aliases = append(cmd.Aliases, "dump") cmd.Aliases = append(cmd.Aliases, "dump")
cmd.Aliases = append(cmd.Aliases, "backup") cmd.Aliases = append(cmd.Aliases, "backup")
@@ -69,7 +70,7 @@ func Import(conf *cfg.Config) *cobra.Command {
) )
var cmd = &cobra.Command{ var cmd = &cobra.Command{
Use: "import [<json file>]", Use: "import -i <json file>",
Short: "Import database dump", Short: "Import database dump",
Long: `Import database dump`, Long: `Import database dump`,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
@@ -86,7 +87,8 @@ func Import(conf *cfg.Config) *cobra.Command {
}, },
} }
cmd.PersistentFlags().StringVarP(&attr.File, "file", "r", "", "Filename or - for STDIN") cmd.PersistentFlags().StringVarP(&attr.File, "import-file", "i", "", "filename or - for STDIN")
cmd.MarkPersistentFlagRequired("import-file")
cmd.PersistentFlags().StringArrayVarP(&attr.Tags, "tags", "t", nil, "tags, multiple allowed") cmd.PersistentFlags().StringArrayVarP(&attr.Tags, "tags", "t", nil, "tags, multiple allowed")
cmd.Aliases = append(cmd.Aliases, "restore") cmd.Aliases = append(cmd.Aliases, "restore")

View File

@@ -122,7 +122,6 @@ func Execute() {
// CRUD // CRUD
rootCmd.AddCommand(Set(&conf)) rootCmd.AddCommand(Set(&conf))
rootCmd.AddCommand(List(&conf)) rootCmd.AddCommand(List(&conf))
rootCmd.AddCommand(Find(&conf))
rootCmd.AddCommand(Get(&conf)) rootCmd.AddCommand(Get(&conf))
rootCmd.AddCommand(Del(&conf)) rootCmd.AddCommand(Del(&conf))

View File

@@ -75,7 +75,6 @@ func WriteFile(writer io.Writer, conf *cfg.Config, attr *app.DbAttr, entry *app.
if attr.File == "-" { if attr.File == "-" {
fileHandle = os.Stdout fileHandle = os.Stdout
} else { } else {
fd, err := os.OpenFile(attr.File, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0755) fd, err := os.OpenFile(attr.File, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0755)
if err != nil { if err != nil {
return fmt.Errorf("failed to open file %s for writing: %w", attr.File, err) return fmt.Errorf("failed to open file %s for writing: %w", attr.File, err)
@@ -85,10 +84,10 @@ func WriteFile(writer io.Writer, conf *cfg.Config, attr *app.DbAttr, entry *app.
fileHandle = fd fileHandle = fd
} }
if entry.Binary { // actually write file content
// binary file content _, err = fileHandle.Write(entry.Value)
_, err = fileHandle.Write(entry.Value)
if !entry.Binary {
if entry.Value[entry.Size-1] != '\n' { if entry.Value[entry.Size-1] != '\n' {
// always add a terminal newline // always add a terminal newline
_, err = fileHandle.Write([]byte{'\n'}) _, err = fileHandle.Write([]byte{'\n'})

View File

@@ -54,7 +54,7 @@ func RestList(c *fiber.Ctx, conf *cfg.Config) error {
} }
// get list // get list
entries, err := conf.DB.List(attr) entries, err := conf.DB.List(attr, false)
if err != nil { if err != nil {
return JsonStatus(c, fiber.StatusForbidden, return JsonStatus(c, fiber.StatusForbidden,
"Unable to list keys: "+err.Error()) "Unable to list keys: "+err.Error())

View File

@@ -32,3 +32,20 @@ stdout 50
# look if it's inside the db # look if it's inside the db
exec anydb -f test.db ls exec anydb -f test.db ls
stdout datum.*binary-content stdout datum.*binary-content
# do the same thing with text content, start with a new text entry
exec anydb -f test.db set feed alpha
# which we write to a file
exec anydb -f test.db get feed -o out2.txt
exists out2.txt
# check if its filled (5 bytes + newline)
exec ls -l out2.txt
stdout 6
# compare content
exec cat out2.txt
stdout alpha

View File

@@ -23,10 +23,10 @@ exec anydb -f test.db export -o backup.json
stdout 'database contents exported to backup.json' stdout 'database contents exported to backup.json'
# import into new db # import into new db
exec anydb -f new.db import -r backup.json exec anydb -f new.db import -i backup.json
stdout 'imported.*entries' stdout 'imported.*entries'
# check contents # check contents
exec anydb -f new.db list exec anydb -f new.db list bar -s
stdout foo.*bar stdout foo.*bar

View File

@@ -37,12 +37,12 @@ exec anydb -f test.db list -t flower
! stdout bar ! stdout bar
# list with filter # list with filter
exec anydb -f test.db list b.r exec anydb -f test.db list b.r -s
stdout bar stdout bar
# list with -i filter # list with -i filter
exec anydb -f test.db list -i mucha exec anydb -f test.db list -is mucha
stdout MUCHA stdout mucha
# get single entry # get single entry
exec anydb -f test.db get color exec anydb -f test.db get color