mirror of
https://codeberg.org/scip/anydb.git
synced 2025-12-16 20:10:59 +01:00
finalized conversion to protobuf:
- fixed import+export - generalized file options - always store keys as lowercase - fixed+enhanced docs - fixed tests
This commit is contained in:
10
README.md
10
README.md
@@ -76,10 +76,14 @@ anydb set foo bar -t note,important
|
||||
anydb list -t important
|
||||
|
||||
# beside tags filtering you can also use regexps for searching
|
||||
# note, by default the list command only searches through keys
|
||||
anydb list '[a-z]+\d'
|
||||
|
||||
# do a full text search
|
||||
anydb list '[a-z]+\d' -s
|
||||
|
||||
# anydb also supports a wide output
|
||||
anydb list -o wide
|
||||
anydb list -m wide
|
||||
KEY TAGS SIZE AGE VALUE
|
||||
blah important 4 B 7 seconds ago haha
|
||||
foo 3 B 15 seconds ago bar
|
||||
@@ -90,13 +94,13 @@ anydb ls -l
|
||||
anydb /
|
||||
|
||||
# other outputs are possible as well
|
||||
anydb list -o json
|
||||
anydb list -m json
|
||||
|
||||
# you can backup your database
|
||||
anydb export -o backup.json
|
||||
|
||||
# and import it somewhere else
|
||||
anydb import -r backup.json
|
||||
anydb import -i backup.json
|
||||
|
||||
# you can encrypt entries. anydb asks for a passphrase
|
||||
# and will do the same when you retrieve the key using the
|
||||
|
||||
10
anydb.1
10
anydb.1
@@ -133,7 +133,7 @@
|
||||
.\" ========================================================================
|
||||
.\"
|
||||
.IX Title "ANYDB 1"
|
||||
.TH ANYDB 1 "2024-12-29" "1" "User Commands"
|
||||
.TH ANYDB 1 "2024-12-30" "1" "User Commands"
|
||||
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
|
||||
.\" way too many mistakes in technical documents.
|
||||
.if n .ad l
|
||||
@@ -356,18 +356,18 @@ Usage:
|
||||
.PP
|
||||
.Vb 2
|
||||
\& Usage:
|
||||
\& anydb list [<filter\-regex>] [\-t <tag>] [\-m <mode>] [\-n \-N] [\-T <tpl>] [\-i] [flags]
|
||||
\& anydb list [<filter\-regex> | \-t <tag> ] [\-m <mode>] [\-nNif] [\-T <tpl>] [flags]
|
||||
\&
|
||||
\& Aliases:
|
||||
\& list, /, ls
|
||||
\& list, ls, /, find, search
|
||||
\&
|
||||
\& Flags:
|
||||
\& \-i, \-\-case\-insensitive filter case insensitive
|
||||
\& \-h, \-\-help help for list
|
||||
\& \-m, \-\-mode string output format (table|wide|json|template),
|
||||
\& wide is a verbose table. (default \*(Aqtable\*(Aq)
|
||||
\& \-m, \-\-mode string output format (table|wide|json|template), wide is a verbose table. (default \*(Aqtable\*(Aq)
|
||||
\& \-n, \-\-no\-headers omit headers in tables
|
||||
\& \-N, \-\-no\-human do not translate to human readable values
|
||||
\& \-s, \-\-search\-fulltext perform a full text search
|
||||
\& \-t, \-\-tags stringArray tags, multiple allowed
|
||||
\& \-T, \-\-template string go template for \*(Aq\-m template\*(Aq
|
||||
\& \-l, \-\-wide\-output output mode: wide
|
||||
|
||||
35
anydb.pod
35
anydb.pod
@@ -206,18 +206,18 @@ The B<list> subcommand displays a list of all database entries.
|
||||
Usage:
|
||||
|
||||
Usage:
|
||||
anydb list [<filter-regex>] [-t <tag>] [-m <mode>] [-n -N] [-T <tpl>] [-i] [flags]
|
||||
anydb list [<filter-regex> | -t <tag> ] [-m <mode>] [-nNif] [-T <tpl>] [flags]
|
||||
|
||||
Aliases:
|
||||
list, /, ls
|
||||
list, ls, /, find, search
|
||||
|
||||
Flags:
|
||||
-i, --case-insensitive filter case insensitive
|
||||
-h, --help help for list
|
||||
-m, --mode string output format (table|wide|json|template),
|
||||
wide is a verbose table. (default 'table')
|
||||
-m, --mode string output format (table|wide|json|template), wide is a verbose table. (default 'table')
|
||||
-n, --no-headers omit headers in tables
|
||||
-N, --no-human do not translate to human readable values
|
||||
-s, --search-fulltext perform a full text search
|
||||
-t, --tags stringArray tags, multiple allowed
|
||||
-T, --template string go template for '-m template'
|
||||
-l, --wide-output output mode: wide
|
||||
@@ -254,6 +254,10 @@ features.
|
||||
|
||||
If you want to search case insensitive, add the option C<-i>.
|
||||
|
||||
By default anydb only searches through the keys. If you want to search
|
||||
through the values as well, then use the C<-s> option, which enables
|
||||
full-text search.
|
||||
|
||||
You can - as with the B<get> command - use other output modes. The
|
||||
default mode is "table". The "wide" mode is, as already mentioned, a
|
||||
more detailed table. Also supported is "json" mode and "template"
|
||||
@@ -323,7 +327,7 @@ the B<export> subcommand.
|
||||
Usage:
|
||||
|
||||
Usage:
|
||||
anydb export [-o <json filename>] [flags]
|
||||
anydb export -o <json filename> [flags]
|
||||
|
||||
Aliases:
|
||||
export, dump, backup
|
||||
@@ -332,12 +336,12 @@ Usage:
|
||||
-h, --help help for export
|
||||
-o, --output string output to file
|
||||
|
||||
The database dump is a JSON representation of the whole database and
|
||||
will be printed to STDOUT by default. Redirect it to a file or use the
|
||||
C<-o> option:
|
||||
The database dump is a JSON representation of the whole database and
|
||||
will be printed to the file specified with the C<-o> option. If you
|
||||
specify "-" as the filename, it will be written to STDIN.
|
||||
|
||||
anydb export > dump.json
|
||||
anydb export -o dump.json
|
||||
anydb export -o - > dump.json
|
||||
|
||||
Please note, that encrypted values will not be decrypted. This might
|
||||
change in a future version of anydb.
|
||||
@@ -350,7 +354,7 @@ dump.
|
||||
Usage:
|
||||
|
||||
Usage:
|
||||
anydb import [<json file>] [flags]
|
||||
anydb import -i <json file> [flags]
|
||||
|
||||
Aliases:
|
||||
import, restore
|
||||
@@ -360,12 +364,13 @@ Usage:
|
||||
-h, --help help for import
|
||||
-t, --tags stringArray tags, multiple allowed
|
||||
|
||||
By default the C<import> subcommand reads the JSON contents from
|
||||
STDIN. You might pipe the dump into it or use the option C<-r>:
|
||||
The C<import> subcommand reads the JSON contents from
|
||||
the file specified with the C<-i> option. If you specify "-" as the
|
||||
filename, it will be read from STDIN.
|
||||
|
||||
anydb import < dump.json
|
||||
anydb import -r dump.json
|
||||
cat dump.json | anydb import
|
||||
anydb import -i - < dump.json
|
||||
anydb import -i dump.json
|
||||
cat dump.json | anydb import -i -
|
||||
|
||||
If there is already a database, it will be saved by appending a
|
||||
timestamp and a new database with the contents of the dump will be
|
||||
|
||||
16
app/attr.go
16
app/attr.go
@@ -35,16 +35,18 @@ type DbAttr struct {
|
||||
Binary bool
|
||||
}
|
||||
|
||||
// check if value is to be read from a file or stdin, setup preview
|
||||
// text according to flags, lowercase key
|
||||
func (attr *DbAttr) ParseKV() error {
|
||||
attr.Key = strings.ToLower(attr.Args[0])
|
||||
|
||||
switch len(attr.Args) {
|
||||
case 1:
|
||||
// 1 arg = key + read from file or stdin
|
||||
attr.Key = attr.Args[0]
|
||||
if attr.File == "" {
|
||||
attr.File = "-"
|
||||
}
|
||||
case 2:
|
||||
attr.Key = attr.Args[0]
|
||||
attr.Val = []byte(attr.Args[1])
|
||||
|
||||
if attr.Args[1] == "-" {
|
||||
@@ -58,9 +60,12 @@ func (attr *DbAttr) ParseKV() error {
|
||||
}
|
||||
}
|
||||
|
||||
if attr.Binary {
|
||||
switch {
|
||||
case attr.Binary:
|
||||
attr.Preview = "<binary-content>"
|
||||
case attr.Encrypted:
|
||||
attr.Preview = "<encrypted-content>"
|
||||
} else {
|
||||
default:
|
||||
if len(attr.Val) > MaxValueWidth {
|
||||
attr.Preview = string(attr.Val)[0:MaxValueWidth] + "..."
|
||||
|
||||
@@ -74,9 +79,6 @@ func (attr *DbAttr) ParseKV() error {
|
||||
attr.Preview = string(attr.Val)
|
||||
}
|
||||
}
|
||||
if attr.Encrypted {
|
||||
attr.Preview = "<encrypted-content>"
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
87
app/db.go
87
app/db.go
@@ -85,7 +85,7 @@ func (db *DB) Close() error {
|
||||
return db.DB.Close()
|
||||
}
|
||||
|
||||
func (db *DB) List(attr *DbAttr) (DbEntries, error) {
|
||||
func (db *DB) List(attr *DbAttr, fulltext bool) (DbEntries, error) {
|
||||
if err := db.Open(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -99,7 +99,6 @@ func (db *DB) List(attr *DbAttr) (DbEntries, error) {
|
||||
}
|
||||
|
||||
err := db.DB.View(func(tx *bolt.Tx) error {
|
||||
|
||||
root := tx.Bucket([]byte(db.Bucket))
|
||||
if root == nil {
|
||||
return nil
|
||||
@@ -110,12 +109,19 @@ func (db *DB) List(attr *DbAttr) (DbEntries, error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
databucket := root.Bucket([]byte("data"))
|
||||
if databucket == nil {
|
||||
return fmt.Errorf("failed to retrieve data sub bucket")
|
||||
}
|
||||
|
||||
err := bucket.ForEach(func(key, pbentry []byte) error {
|
||||
var entry DbEntry
|
||||
if err := proto.Unmarshal(pbentry, &entry); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal from protobuf: %w", err)
|
||||
}
|
||||
|
||||
entry.Value = databucket.Get([]byte(entry.Key)) // empty is ok
|
||||
|
||||
var include bool
|
||||
|
||||
switch {
|
||||
@@ -124,6 +130,12 @@ func (db *DB) List(attr *DbAttr) (DbEntries, error) {
|
||||
filter.MatchString(strings.Join(entry.Tags, " ")) {
|
||||
include = true
|
||||
}
|
||||
|
||||
if !entry.Binary && !include && fulltext {
|
||||
if filter.MatchString(string(entry.Value)) {
|
||||
include = true
|
||||
}
|
||||
}
|
||||
case len(attr.Tags) > 0:
|
||||
for _, search := range attr.Tags {
|
||||
for _, tag := range entry.Tags {
|
||||
@@ -261,35 +273,48 @@ func (db *DB) Get(attr *DbAttr) (*DbEntry, error) {
|
||||
entry := DbEntry{}
|
||||
|
||||
err := db.DB.View(func(tx *bolt.Tx) error {
|
||||
// root bucket
|
||||
root := tx.Bucket([]byte(db.Bucket))
|
||||
if root == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// get meta sub bucket
|
||||
bucket := root.Bucket([]byte("meta"))
|
||||
if bucket == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// retrieve meta data
|
||||
pbentry := bucket.Get([]byte(attr.Key))
|
||||
if pbentry == nil {
|
||||
return fmt.Errorf("no such key: %s", attr.Key)
|
||||
}
|
||||
|
||||
// put into struct
|
||||
if err := proto.Unmarshal(pbentry, &entry); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal from protobuf: %w", err)
|
||||
}
|
||||
|
||||
// get data sub bucket
|
||||
databucket := root.Bucket([]byte("data"))
|
||||
if databucket == nil {
|
||||
return fmt.Errorf("failed to retrieve data sub bucket")
|
||||
}
|
||||
|
||||
entry.Value = databucket.Get([]byte(attr.Key))
|
||||
if len(entry.Value) == 0 {
|
||||
// retrieve actual data value
|
||||
value := databucket.Get([]byte(attr.Key))
|
||||
if len(value) == 0 {
|
||||
return fmt.Errorf("no such key: %s", attr.Key)
|
||||
}
|
||||
|
||||
// we need to make a copy of it, otherwise we'll get an
|
||||
// "unexpected fault address" error
|
||||
vc := make([]byte, len(value))
|
||||
copy(vc, value)
|
||||
|
||||
entry.Value = vc
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
@@ -446,78 +471,50 @@ func (db *DB) Info() (*DbInfo, error) {
|
||||
return info, err
|
||||
}
|
||||
|
||||
func (db *DB) Find(attr *DbAttr) (DbEntries, error) {
|
||||
func (db *DB) Getall(attr *DbAttr) (DbEntries, error) {
|
||||
if err := db.Open(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
var entries DbEntries
|
||||
var filter *regexp.Regexp
|
||||
|
||||
if len(attr.Args) > 0 {
|
||||
filter = regexp.MustCompile(attr.Args[0])
|
||||
}
|
||||
|
||||
err := db.DB.View(func(tx *bolt.Tx) error {
|
||||
|
||||
// root bucket
|
||||
root := tx.Bucket([]byte(db.Bucket))
|
||||
if root == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// get meta sub bucket
|
||||
bucket := root.Bucket([]byte("meta"))
|
||||
if bucket == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// get data sub bucket
|
||||
databucket := root.Bucket([]byte("data"))
|
||||
if databucket == nil {
|
||||
return fmt.Errorf("failed to retrieve data sub bucket")
|
||||
}
|
||||
|
||||
// iterate over all db entries in meta sub bucket
|
||||
err := bucket.ForEach(func(key, pbentry []byte) error {
|
||||
var entry DbEntry
|
||||
if err := proto.Unmarshal(pbentry, &entry); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal from protobuf: %w", err)
|
||||
}
|
||||
|
||||
entry.Value = databucket.Get([]byte(entry.Key))
|
||||
// retrieve the value from the data sub bucket
|
||||
value := databucket.Get([]byte(entry.Key))
|
||||
|
||||
var include bool
|
||||
// we need to make a copy of it, otherwise we'll get an
|
||||
// "unexpected fault address" error
|
||||
vc := make([]byte, len(value))
|
||||
copy(vc, value)
|
||||
|
||||
switch {
|
||||
case filter != nil:
|
||||
if filter.MatchString(entry.Key) ||
|
||||
filter.MatchString(strings.Join(entry.Tags, " ")) {
|
||||
include = true
|
||||
}
|
||||
|
||||
if !entry.Binary && !include {
|
||||
if filter.MatchString(string(entry.Value)) {
|
||||
include = true
|
||||
}
|
||||
}
|
||||
case len(attr.Tags) > 0:
|
||||
for _, search := range attr.Tags {
|
||||
for _, tag := range entry.Tags {
|
||||
if tag == search {
|
||||
include = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if include {
|
||||
break
|
||||
}
|
||||
}
|
||||
default:
|
||||
include = true
|
||||
}
|
||||
|
||||
if include {
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
entry.Value = vc
|
||||
entries = append(entries, entry)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
@@ -42,6 +42,7 @@ type Config struct {
|
||||
NoHumanize bool
|
||||
Encrypt bool // one entry
|
||||
CaseInsensitive bool
|
||||
Fulltext bool
|
||||
Listen string
|
||||
Buckets map[string]BucketConfig // config file only
|
||||
|
||||
|
||||
@@ -187,18 +187,18 @@ SUBCOMMANDS
|
||||
Usage:
|
||||
|
||||
Usage:
|
||||
anydb list [<filter-regex>] [-t <tag>] [-m <mode>] [-n -N] [-T <tpl>] [-i] [flags]
|
||||
anydb list [<filter-regex> | -t <tag> ] [-m <mode>] [-nNif] [-T <tpl>] [flags]
|
||||
|
||||
Aliases:
|
||||
list, /, ls
|
||||
list, ls, /, find, search
|
||||
|
||||
Flags:
|
||||
-i, --case-insensitive filter case insensitive
|
||||
-h, --help help for list
|
||||
-m, --mode string output format (table|wide|json|template),
|
||||
wide is a verbose table. (default 'table')
|
||||
-m, --mode string output format (table|wide|json|template), wide is a verbose table. (default 'table')
|
||||
-n, --no-headers omit headers in tables
|
||||
-N, --no-human do not translate to human readable values
|
||||
-s, --search-fulltext perform a full text search
|
||||
-t, --tags stringArray tags, multiple allowed
|
||||
-T, --template string go template for '-m template'
|
||||
-l, --wide-output output mode: wide
|
||||
|
||||
60
cmd/crud.go
60
cmd/crud.go
@@ -182,7 +182,7 @@ func List(conf *cfg.Config) *cobra.Command {
|
||||
)
|
||||
|
||||
var cmd = &cobra.Command{
|
||||
Use: "list [<filter-regex>] [-m <mode>] [-n -N] [-T <tpl>] [-i]",
|
||||
Use: "list [<filter-regex> | -t <tag> ] [-m <mode>] [-nNif] [-T <tpl>]",
|
||||
Short: "List database contents",
|
||||
Long: `List database contents`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
@@ -206,59 +206,7 @@ func List(conf *cfg.Config) *cobra.Command {
|
||||
conf.Mode = "wide"
|
||||
}
|
||||
|
||||
entries, err := conf.DB.List(&attr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return output.List(os.Stdout, conf, entries)
|
||||
},
|
||||
}
|
||||
|
||||
cmd.PersistentFlags().StringVarP(&conf.Mode, "mode", "m", "", "output format (table|wide|json|template), wide is a verbose table. (default 'table')")
|
||||
cmd.PersistentFlags().StringVarP(&conf.Template, "template", "T", "", "go template for '-m template'")
|
||||
cmd.PersistentFlags().BoolVarP(&wide, "wide-output", "l", false, "output mode: wide")
|
||||
cmd.PersistentFlags().BoolVarP(&conf.NoHeaders, "no-headers", "n", false, "omit headers in tables")
|
||||
cmd.PersistentFlags().BoolVarP(&conf.NoHumanize, "no-human", "N", false, "do not translate to human readable values")
|
||||
cmd.PersistentFlags().BoolVarP(&conf.CaseInsensitive, "case-insensitive", "i", false, "filter case insensitive")
|
||||
|
||||
cmd.Aliases = append(cmd.Aliases, "ls")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func Find(conf *cfg.Config) *cobra.Command {
|
||||
var (
|
||||
attr app.DbAttr
|
||||
wide bool
|
||||
)
|
||||
|
||||
var cmd = &cobra.Command{
|
||||
Use: "find <filter-regex> | -t <tag> [-m <mode>] [-n -N] [-T <tpl>] [-i]",
|
||||
Short: "Find database contents",
|
||||
Long: `Find database contents`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
// errors at this stage do not cause the usage to be shown
|
||||
cmd.SilenceUsage = true
|
||||
|
||||
if len(args) > 0 {
|
||||
if conf.CaseInsensitive {
|
||||
attr.Args = []string{"(?i)" + args[0]}
|
||||
} else {
|
||||
attr.Args = args
|
||||
}
|
||||
}
|
||||
|
||||
// turn comma list into slice, if needed
|
||||
if len(attr.Tags) == 1 && strings.Contains(attr.Tags[0], ",") {
|
||||
attr.Tags = strings.Split(attr.Tags[0], ",")
|
||||
}
|
||||
|
||||
if wide {
|
||||
conf.Mode = "wide"
|
||||
}
|
||||
|
||||
entries, err := conf.DB.Find(&attr)
|
||||
entries, err := conf.DB.List(&attr, conf.Fulltext)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -273,10 +221,12 @@ func Find(conf *cfg.Config) *cobra.Command {
|
||||
cmd.PersistentFlags().BoolVarP(&conf.NoHeaders, "no-headers", "n", false, "omit headers in tables")
|
||||
cmd.PersistentFlags().BoolVarP(&conf.NoHumanize, "no-human", "N", false, "do not translate to human readable values")
|
||||
cmd.PersistentFlags().BoolVarP(&conf.CaseInsensitive, "case-insensitive", "i", false, "filter case insensitive")
|
||||
cmd.PersistentFlags().BoolVarP(&conf.Fulltext, "search-fulltext", "s", false, "perform a full text search")
|
||||
cmd.PersistentFlags().StringArrayVarP(&attr.Tags, "tags", "t", nil, "tags, multiple allowed")
|
||||
|
||||
cmd.Aliases = append(cmd.Aliases, "ls")
|
||||
cmd.Aliases = append(cmd.Aliases, "/")
|
||||
cmd.Aliases = append(cmd.Aliases, "f")
|
||||
cmd.Aliases = append(cmd.Aliases, "find")
|
||||
cmd.Aliases = append(cmd.Aliases, "search")
|
||||
|
||||
return cmd
|
||||
|
||||
16
cmd/extra.go
16
cmd/extra.go
@@ -37,16 +37,16 @@ func Export(conf *cfg.Config) *cobra.Command {
|
||||
)
|
||||
|
||||
var cmd = &cobra.Command{
|
||||
Use: "export [-o <json filename>]",
|
||||
Short: "Export database to json",
|
||||
Long: `Export database to json`,
|
||||
Use: "export -o <json filename>",
|
||||
Short: "Export database to json file",
|
||||
Long: `Export database to json file`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
// errors at this stage do not cause the usage to be shown
|
||||
cmd.SilenceUsage = true
|
||||
|
||||
conf.Mode = "json"
|
||||
|
||||
entries, err := conf.DB.List(&attr)
|
||||
entries, err := conf.DB.Getall(&attr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -55,7 +55,8 @@ func Export(conf *cfg.Config) *cobra.Command {
|
||||
},
|
||||
}
|
||||
|
||||
cmd.PersistentFlags().StringVarP(&attr.File, "output", "o", "", "output to file")
|
||||
cmd.PersistentFlags().StringVarP(&attr.File, "output-file", "o", "", "filename or - for STDIN")
|
||||
cmd.MarkPersistentFlagRequired("output-file")
|
||||
|
||||
cmd.Aliases = append(cmd.Aliases, "dump")
|
||||
cmd.Aliases = append(cmd.Aliases, "backup")
|
||||
@@ -69,7 +70,7 @@ func Import(conf *cfg.Config) *cobra.Command {
|
||||
)
|
||||
|
||||
var cmd = &cobra.Command{
|
||||
Use: "import [<json file>]",
|
||||
Use: "import -i <json file>",
|
||||
Short: "Import database dump",
|
||||
Long: `Import database dump`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
@@ -86,7 +87,8 @@ func Import(conf *cfg.Config) *cobra.Command {
|
||||
},
|
||||
}
|
||||
|
||||
cmd.PersistentFlags().StringVarP(&attr.File, "file", "r", "", "Filename or - for STDIN")
|
||||
cmd.PersistentFlags().StringVarP(&attr.File, "import-file", "i", "", "filename or - for STDIN")
|
||||
cmd.MarkPersistentFlagRequired("import-file")
|
||||
cmd.PersistentFlags().StringArrayVarP(&attr.Tags, "tags", "t", nil, "tags, multiple allowed")
|
||||
|
||||
cmd.Aliases = append(cmd.Aliases, "restore")
|
||||
|
||||
@@ -122,7 +122,6 @@ func Execute() {
|
||||
// CRUD
|
||||
rootCmd.AddCommand(Set(&conf))
|
||||
rootCmd.AddCommand(List(&conf))
|
||||
rootCmd.AddCommand(Find(&conf))
|
||||
rootCmd.AddCommand(Get(&conf))
|
||||
rootCmd.AddCommand(Del(&conf))
|
||||
|
||||
|
||||
@@ -75,7 +75,6 @@ func WriteFile(writer io.Writer, conf *cfg.Config, attr *app.DbAttr, entry *app.
|
||||
if attr.File == "-" {
|
||||
fileHandle = os.Stdout
|
||||
} else {
|
||||
|
||||
fd, err := os.OpenFile(attr.File, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0755)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open file %s for writing: %w", attr.File, err)
|
||||
@@ -85,10 +84,10 @@ func WriteFile(writer io.Writer, conf *cfg.Config, attr *app.DbAttr, entry *app.
|
||||
fileHandle = fd
|
||||
}
|
||||
|
||||
if entry.Binary {
|
||||
// binary file content
|
||||
_, err = fileHandle.Write(entry.Value)
|
||||
// actually write file content
|
||||
_, err = fileHandle.Write(entry.Value)
|
||||
|
||||
if !entry.Binary {
|
||||
if entry.Value[entry.Size-1] != '\n' {
|
||||
// always add a terminal newline
|
||||
_, err = fileHandle.Write([]byte{'\n'})
|
||||
|
||||
@@ -54,7 +54,7 @@ func RestList(c *fiber.Ctx, conf *cfg.Config) error {
|
||||
}
|
||||
|
||||
// get list
|
||||
entries, err := conf.DB.List(attr)
|
||||
entries, err := conf.DB.List(attr, false)
|
||||
if err != nil {
|
||||
return JsonStatus(c, fiber.StatusForbidden,
|
||||
"Unable to list keys: "+err.Error())
|
||||
|
||||
@@ -32,3 +32,20 @@ stdout 50
|
||||
# look if it's inside the db
|
||||
exec anydb -f test.db ls
|
||||
stdout datum.*binary-content
|
||||
|
||||
# do the same thing with text content, start with a new text entry
|
||||
exec anydb -f test.db set feed alpha
|
||||
|
||||
# which we write to a file
|
||||
exec anydb -f test.db get feed -o out2.txt
|
||||
exists out2.txt
|
||||
|
||||
# check if its filled (5 bytes + newline)
|
||||
exec ls -l out2.txt
|
||||
stdout 6
|
||||
|
||||
# compare content
|
||||
exec cat out2.txt
|
||||
stdout alpha
|
||||
|
||||
|
||||
|
||||
@@ -23,10 +23,10 @@ exec anydb -f test.db export -o backup.json
|
||||
stdout 'database contents exported to backup.json'
|
||||
|
||||
# import into new db
|
||||
exec anydb -f new.db import -r backup.json
|
||||
exec anydb -f new.db import -i backup.json
|
||||
stdout 'imported.*entries'
|
||||
|
||||
# check contents
|
||||
exec anydb -f new.db list
|
||||
exec anydb -f new.db list bar -s
|
||||
stdout foo.*bar
|
||||
|
||||
|
||||
@@ -37,12 +37,12 @@ exec anydb -f test.db list -t flower
|
||||
! stdout bar
|
||||
|
||||
# list with filter
|
||||
exec anydb -f test.db list b.r
|
||||
exec anydb -f test.db list b.r -s
|
||||
stdout bar
|
||||
|
||||
# list with -i filter
|
||||
exec anydb -f test.db list -i mucha
|
||||
stdout MUCHA
|
||||
exec anydb -f test.db list -is mucha
|
||||
stdout mucha
|
||||
|
||||
# get single entry
|
||||
exec anydb -f test.db get color
|
||||
|
||||
Reference in New Issue
Block a user