mirror of
https://codeberg.org/scip/Data-Interactive-Inspect.git
synced 2025-12-16 20:21:02 +01:00
moved to codeberg
This commit is contained in:
@@ -1,23 +0,0 @@
|
||||
matrix:
|
||||
include:
|
||||
# - image: perl:5.22.4-stretch
|
||||
# - image: perl:5.36.0-slim-bullseye
|
||||
# - image: perl:5.38.0-slim-bookworm
|
||||
# - image: perl:5.40.0-slim-bookworm
|
||||
# - image: perl:5.42.0-slim-bookworm
|
||||
- image: perl:5.43.5-slim-bookworm
|
||||
|
||||
steps:
|
||||
test:
|
||||
when:
|
||||
event: [push]
|
||||
image: ${image}
|
||||
commands:
|
||||
- apt-get update -y
|
||||
- apt-get install -y gcc
|
||||
- cpanm -n YAML
|
||||
- cpanm -n File::Temp
|
||||
- cpanm -n Term::ReadLine
|
||||
- perl Makefile.PL
|
||||
- make
|
||||
- make test
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This is my own simple codeberg generic releaser. It takes to
|
||||
# binaries to be uploaded as arguments and takes every other args from
|
||||
# env. Works on tags or normal commits (push), tags must start with v.
|
||||
|
||||
|
||||
set -e
|
||||
|
||||
die() {
|
||||
echo $*
|
||||
exit 1
|
||||
}
|
||||
|
||||
if test -z "$DEPLOY_TOKEN"; then
|
||||
die "token DEPLOY_TOKEN not set"
|
||||
fi
|
||||
|
||||
git fetch --all
|
||||
|
||||
# determine current tag or commit hash
|
||||
version="$CI_COMMIT_TAG"
|
||||
previous=""
|
||||
log=""
|
||||
if test -z "$version"; then
|
||||
version="${CI_COMMIT_SHA:0:6}"
|
||||
log=$(git log -1 --oneline)
|
||||
else
|
||||
previous=$(git tag -l | grep -E "^v" | tac | grep -A1 "$version" | tail -1)
|
||||
log=$(git log -1 --oneline "${previous}..${version}" | sed 's|^|- |g')
|
||||
fi
|
||||
|
||||
# release body
|
||||
printf "# Changes\n\n %s\n" "$log" > body.txt
|
||||
|
||||
# create the release
|
||||
https --ignore-stdin --check-status -b -A bearer -a "$DEPLOY_TOKEN" POST \
|
||||
"https://codeberg.org/api/v1/repos/${CI_REPO_OWNER}/${CI_REPO_NAME}/releases" \
|
||||
tag_name="$version" name="Release $version" body=@body.txt > release.json
|
||||
|
||||
# we need the id to upload files
|
||||
ID=$(jq -r .id < release.json)
|
||||
|
||||
if test -z "$ID"; then
|
||||
cat release.json
|
||||
die "failed to create release"
|
||||
fi
|
||||
|
||||
# actually upload
|
||||
for file in "$@"; do
|
||||
https --ignore-stdin --check-status -A bearer -a "$DEPLOY_TOKEN" -f POST \
|
||||
"https://codeberg.org/api/v1/repos/${CI_REPO_OWNER}/${CI_REPO_NAME}/releases/$ID/assets" \
|
||||
"name=${file}" "attachment@${file}"
|
||||
done
|
||||
@@ -1,23 +0,0 @@
|
||||
# build release
|
||||
|
||||
steps:
|
||||
compile:
|
||||
when:
|
||||
event: [tag]
|
||||
image: perl:5.43.5-slim-bookworm
|
||||
commands:
|
||||
- perl Makefile.PL
|
||||
- make
|
||||
- make dist
|
||||
|
||||
release:
|
||||
image: alpine:latest
|
||||
when:
|
||||
event: [tag]
|
||||
environment:
|
||||
DEPLOY_TOKEN:
|
||||
from_secret: DEPLOY_TOKEN
|
||||
commands:
|
||||
- apk update
|
||||
- apk add --no-cache bash httpie jq git
|
||||
- .woodpecker/release.sh ${CI_REPO_NAME}-$CI_COMMIT_TAG.tar.gz
|
||||
34
Changelog
34
Changelog
@@ -1,34 +0,0 @@
|
||||
0.07
|
||||
added |more support to list and show commands.
|
||||
|
||||
0.06
|
||||
fixed crash, which happened when one executed the
|
||||
'list' command if inside an array. now we divert
|
||||
to 'show' in such a case.
|
||||
|
||||
0.05
|
||||
signal value matches in search with quotes
|
||||
|
||||
0.04
|
||||
fixed drop/append/pop/shift commands to work inside
|
||||
arrays as well.
|
||||
|
||||
fixed the quit command (it literally called 'exit')
|
||||
|
||||
0.03
|
||||
re-factored error handling a little
|
||||
|
||||
add 'search' command (alias: /<regex>)
|
||||
|
||||
added [de]serialize methods, which can be overwritten,
|
||||
by default we use YAML for serialization, but this
|
||||
can be changed.
|
||||
|
||||
0.02
|
||||
interactive command errors now lead to abort of
|
||||
inspect() if reading from STDIN.
|
||||
|
||||
fixed "get struct"
|
||||
|
||||
0.01
|
||||
initial commit
|
||||
1164
Inspect.pm
1164
Inspect.pm
File diff suppressed because it is too large
Load Diff
16
MANIFEST
16
MANIFEST
@@ -1,16 +0,0 @@
|
||||
MANIFEST
|
||||
Makefile.PL
|
||||
Inspect.pm
|
||||
README
|
||||
Changelog
|
||||
META.yml Module meta-data (added by MakeMaker)
|
||||
META.json Module meta-data (added by MakeMaker)
|
||||
t/run.t
|
||||
sample/bin/inspect.pl
|
||||
sample/formats/sample.csv
|
||||
sample/formats/sample.yml
|
||||
sample/formats/sample.ini
|
||||
sample/formats/sample.xml
|
||||
sample/formats/sample.json
|
||||
sample/formats/sample.conf
|
||||
sample/formats/sample.perl
|
||||
32
Makefile.PL
32
Makefile.PL
@@ -1,32 +0,0 @@
|
||||
#
|
||||
# Makefile.PL - build file for Data::Interactive::Inspect
|
||||
#
|
||||
# Copyright (c) 2007-2014 T. v.Dein <tom |AT| cpan.org>.
|
||||
# All Rights Reserved. Std. disclaimer applies.
|
||||
# Artistic License, same as perl itself. Have fun.
|
||||
#
|
||||
|
||||
use ExtUtils::MakeMaker;
|
||||
|
||||
WriteMakefile(
|
||||
NAME => 'Data::Interactive::Inspect',
|
||||
VERSION_FROM => 'Inspect.pm',
|
||||
ABSTRACT => 'Inspect and manipulate perl data structures interactively',
|
||||
LICENSE => 'perl',
|
||||
AUTHOR => 'Thomas v.Dein <tlinden@cpan.org>',
|
||||
clean => { FILES => '*~ */*~' },
|
||||
PREREQ_PM => {
|
||||
'YAML' => 0,
|
||||
'File::Temp' => 0,
|
||||
'Term::ReadLine' => 0,
|
||||
},
|
||||
dist => { COMPRESS => 'gzip -9f', SUFFIX => 'gz', },
|
||||
test => { TESTS => 't/*.t' },
|
||||
'META_MERGE' => {
|
||||
resources => {
|
||||
repository => 'https://codeberg.org/scip/Data-Interactive-Inspect',
|
||||
},
|
||||
},
|
||||
|
||||
);
|
||||
|
||||
55
README
55
README
@@ -1,55 +0,0 @@
|
||||
NAME
|
||||
Data::Interactive::Inspect - Inspect and manipulate perl data structures interactively
|
||||
|
||||
SYNOPSIS
|
||||
use Data::Interactive::Inspect;
|
||||
my $data = foo(); # get a hash ref from somewhere
|
||||
|
||||
# new shell object, the simple way
|
||||
my $shell = Data::Interactive::Inspect->new($data);
|
||||
|
||||
# or
|
||||
my $shell = Data::Interactive::Inspect->new(
|
||||
struct => $data,
|
||||
name => 'verkehrswege',
|
||||
begin => sub { .. },
|
||||
commit => sub { .. },
|
||||
rollback => sub { .. },
|
||||
editor => 'emacs',
|
||||
more => 'less'
|
||||
);
|
||||
|
||||
$data = $shell->inspect(); # opens a shell and returns modified hash ref on quit
|
||||
|
||||
DESCRIPTION
|
||||
This module provides an interactive shell which can be used to inspect
|
||||
and modify a perl data structure.
|
||||
|
||||
INSTALLATION
|
||||
|
||||
to install, type:
|
||||
perl Makefile.PL
|
||||
make
|
||||
make test
|
||||
make install
|
||||
|
||||
to read the complete documentation, type:
|
||||
perldoc Data::Interactive::Inspect
|
||||
|
||||
AUTHOR
|
||||
T.v.Dein <tlinden@cpan.org>
|
||||
|
||||
BUGS
|
||||
Report bugs to
|
||||
http://rt.cpan.org/NoAuth/ReportBug.html?Queue=Data::Interactive::Inspect
|
||||
|
||||
COPYRIGHT
|
||||
Copyright (c) 2015-2017 by T.v.Dein <tlinden@cpan.org>. All rights
|
||||
reserved.
|
||||
|
||||
LICENSE
|
||||
This program is free software; you can redistribute it and/or modify it
|
||||
under the same terms as Perl itself.
|
||||
|
||||
VERSION
|
||||
This is the manual page for Data::Interactive::Inspect Version 0.07.
|
||||
3
README.md
Normal file
3
README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
> [!CAUTION]
|
||||
> This software is now being maintained on [Codeberg](https://codeberg.org/scip/Data-Interactive-Inspect/).
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
#!/usr/bin/perl -w
|
||||
|
||||
# Copyright (c) 2015-2017 T.v.Dein <tlinden |AT| cpan.org>. All
|
||||
# Rights Reserved. Std. disclaimer applies. Artistic License, same as
|
||||
# perl itself. Have fun.
|
||||
|
||||
# This script can be used to interactively browse perl data
|
||||
# structures, which it reads from STDIN or a file. You can use it, for
|
||||
# instance, by printing some data structure in your application using
|
||||
# Data::Dumper and piping this output into this scripts input.
|
||||
|
||||
# However, if the argument is a file and has a known suffix, the
|
||||
# script automatically converts it into a perl data structure and
|
||||
# drops you into an interactive shell on that. That way you can
|
||||
# interactively browse XML or YAML files. Supported suffixes are: xml,
|
||||
# json, csv, yml, ini, conf.
|
||||
|
||||
# If the data structure evaulates, you'll be dropped into an interactive
|
||||
# prompt. Enter '?' to get help.
|
||||
|
||||
# The script also demonstrates how to use different serializers.
|
||||
|
||||
use Data::Interactive::Inspect;
|
||||
use Data::Dumper;
|
||||
use YAML; # needs to be installed anyway
|
||||
use strict;
|
||||
|
||||
|
||||
sub usage {
|
||||
print STDERR qq(
|
||||
Usage: $0 <file|-h>
|
||||
|
||||
Reads a perl data structure from <file>. If <file> is -, read from
|
||||
STDIN. Evaluates and start an interactive Data::Interactive::Inspect
|
||||
shell, which can be used to analyze the data.
|
||||
);
|
||||
exit 1;
|
||||
}
|
||||
|
||||
|
||||
my $arg = shift;
|
||||
my $perl = 1;
|
||||
my ($code);
|
||||
|
||||
if (! $arg) {
|
||||
usage;
|
||||
}
|
||||
|
||||
if ($arg ne '-' && ! -e $arg) {
|
||||
print STDERR "$arg not found or not readable!\n";
|
||||
usage;
|
||||
}
|
||||
|
||||
if ($arg eq '-') {
|
||||
loaddumper(join '', <>);
|
||||
}
|
||||
else {
|
||||
if ($arg =~ /\.xml$/i) {
|
||||
eval { require XML::Simple; };
|
||||
die "Sorry, XML::Simple is not installed, XML not supported!\n" if($@);
|
||||
my $xml = new XML::Simple;
|
||||
$code = $xml->XMLin($arg);
|
||||
$perl = 0;
|
||||
}
|
||||
elsif ($arg =~ /\.(yaml|yml)$/i) {
|
||||
$code = YAML::LoadFile($arg);
|
||||
$perl = 0;
|
||||
}
|
||||
elsif ($arg =~ /\.ini$/i) {
|
||||
eval { require Config::INI::Reader; };
|
||||
die "Sorry, Config::INI is not installed, INI not supported!\n" if($@);
|
||||
$code = Config::INI::Reader->read_file($arg);
|
||||
$perl = 0;
|
||||
}
|
||||
elsif ($arg =~ /\.conf$/i) {
|
||||
eval { require Config::General; };
|
||||
die "Sorry, Config::General is not installed, CONF not supported!\n" if($@);
|
||||
%{$code} = Config::General::ParseConfig(-ConfigFile => $arg, -InterPolateVars => 1, -UTF8 => 1);
|
||||
$perl = 0;
|
||||
}
|
||||
elsif ($arg =~ /\.json$/i) {
|
||||
eval { require JSON; };
|
||||
die "Sorry, JSON is not installed, JSON not supported!\n" if($@);
|
||||
my $json = JSON->new->utf8();
|
||||
$code = $json->decode(slurp($arg));
|
||||
}
|
||||
elsif ($arg =~ /\.csv$/i) {
|
||||
eval { require Text::CSV::Slurp; };
|
||||
die "Sorry, Text::CSV::Slurp is not installed, CSV not supported!\n" if($@);
|
||||
$code = Text::CSV::Slurp->load(file => $arg);
|
||||
}
|
||||
else {
|
||||
loaddumper(slurp($arg));
|
||||
}
|
||||
}
|
||||
|
||||
if ($@) {
|
||||
print STDERR "Parser or Eval error: $@!\n";
|
||||
exit 1;
|
||||
}
|
||||
else {
|
||||
if ($perl) {
|
||||
Data::Interactive::Inspect->new(struct => $code,
|
||||
serialize => sub { my $db = shift;
|
||||
my $c = Dumper($db);
|
||||
$c =~ s/^\s*\$[a-zA-Z0-9_]*\s*=\s*/ /;
|
||||
return $c;
|
||||
},
|
||||
deserialze => sub { my $code = shift;
|
||||
$code = "\$code = $code";
|
||||
eval $code;
|
||||
return $code;
|
||||
},
|
||||
)->inspect;
|
||||
}
|
||||
else {
|
||||
# no perl struct, stay with default
|
||||
Data::Interactive::Inspect->new(struct => $code)->inspect;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
sub slurp {
|
||||
my $arg = shift;
|
||||
open CODE, "<$arg" or die "Could not open data file $arg: $!\n";
|
||||
my $code = join '', <CODE>;
|
||||
close CODE;
|
||||
return $code;
|
||||
}
|
||||
|
||||
sub loaddumper {
|
||||
my $dump = shift;
|
||||
$dump =~ s/^\s*\$[a-zA-Z0-9_]*\s*=\s*/\$code = /;
|
||||
eval $dump; # fills global $code
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
<cops>
|
||||
<officer randall>
|
||||
name stein
|
||||
age 25
|
||||
</officer>
|
||||
<officer gordon>
|
||||
name bird
|
||||
age 31
|
||||
</officer>
|
||||
</cops>
|
||||
domain nix.to
|
||||
domain b0fh.org
|
||||
domain foo.bar
|
||||
message <<EOF
|
||||
yes. we are not here. you
|
||||
can reach us somewhere in
|
||||
outerspace.
|
||||
EOF
|
||||
nocomment <<EOF
|
||||
Comments in a here-doc should not be treated as comments.
|
||||
/* So this should appear in the output */
|
||||
EOF
|
||||
command = ssh -f -g orpheus.0x49.org \
|
||||
-l azrael -L:34777samir.okir.da.ru:22 \
|
||||
-L:31773:shane.sol1.rocket.de:22 \
|
||||
'exec sleep 99999990'
|
||||
user = tom
|
||||
passwd = sakkra
|
||||
<db>
|
||||
host = blah.blubber
|
||||
</db>
|
||||
|
||||
<beta>
|
||||
user1 hans
|
||||
</beta>
|
||||
|
||||
<beta>
|
||||
user2 max
|
||||
</beta>
|
||||
|
||||
quoted = "this one contains whitespace at the end "
|
||||
|
||||
quotedwithquotes = " holy crap, it contains \"masked quotes\" and 'single quotes' "
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
policyID,statecode,county,eq_site_limit,hu_site_limit,fl_site_limit,fr_site_limit,tiv_2011,tiv_2012,eq_site_deductible,hu_site_deductible,fl_site_deductible,fr_site_deductible,point_latitude,point_longitude,line,construction,point_granularity
|
||||
|
@@ -1,263 +0,0 @@
|
||||
|
||||
|
||||
[Settings]
|
||||
|
||||
;======================================================================
|
||||
|
||||
; Set detailed log for additional debugging info
|
||||
|
||||
DetailedLog=1
|
||||
|
||||
RunStatus=1
|
||||
|
||||
StatusPort=6090
|
||||
|
||||
StatusRefresh=10
|
||||
|
||||
Archive=1
|
||||
|
||||
; Sets the location of the MV_FTP log file
|
||||
|
||||
LogFile=/opt/ecs/mvuser/MV_IPTel/log/MV_IPTel.log
|
||||
|
||||
;======================================================================
|
||||
|
||||
Version=0.9 Build 4 Created July 11 2004 14:00
|
||||
|
||||
ServerName=Unknown
|
||||
|
||||
|
||||
[FTP]
|
||||
|
||||
;======================================================================
|
||||
|
||||
; set the FTP server active
|
||||
|
||||
RunFTP=1
|
||||
|
||||
; defines the FTP control port
|
||||
|
||||
FTPPort=21
|
||||
|
||||
; defines the FTP data port
|
||||
|
||||
FTPDataPort=20
|
||||
|
||||
|
||||
FTPDir=/opt/ecs/mvuser/MV_IPTel/data/FTPdata
|
||||
|
||||
; FTP Timeout (secs)
|
||||
|
||||
FTP_TimeOut=5
|
||||
|
||||
; Enable SuperUser
|
||||
|
||||
EnableSU=1
|
||||
|
||||
; set the SuperUser Name
|
||||
|
||||
SUUserName=mvuser
|
||||
|
||||
; set the SuperUser Password
|
||||
|
||||
SUPassword=Avaya
|
||||
|
||||
;
|
||||
|
||||
;======================================================================
|
||||
|
||||
[FTPS]
|
||||
|
||||
;======================================================================
|
||||
|
||||
; set the FTPS server active
|
||||
|
||||
RunFTPS=0
|
||||
|
||||
; defines the FTP control port
|
||||
|
||||
FTPPort=990
|
||||
|
||||
; defines the FTP data port
|
||||
|
||||
FTPDataPort=889
|
||||
|
||||
;======================================================================
|
||||
|
||||
|
||||
[TFTP]
|
||||
|
||||
;======================================================================
|
||||
|
||||
; set the Trivial FTP server active
|
||||
|
||||
RunTrivialFTP=1
|
||||
|
||||
; defines the Trivial FTP port
|
||||
|
||||
TrivialFTPPort=69
|
||||
|
||||
|
||||
TFTPDir=/opt/ecs/mvuser/MV_IPTel/data/TFTPdata
|
||||
|
||||
;======================================================================
|
||||
|
||||
[HTTP]
|
||||
|
||||
;======================================================================
|
||||
|
||||
; set the HTTP download server active
|
||||
|
||||
RunHTTP=1
|
||||
|
||||
; defines the HTTP download port
|
||||
|
||||
HTTPPort=81
|
||||
|
||||
; Sets the location of the HTTP data directory for downloads
|
||||
|
||||
HTTPDir=/opt/ecs/mvuser/MV_IPTel/data/HTTPdata
|
||||
|
||||
;======================================================================
|
||||
|
||||
[HTTPS]
|
||||
|
||||
;======================================================================
|
||||
|
||||
; set the HTTPS download server active
|
||||
|
||||
RunHTTPS=0
|
||||
|
||||
; defines the HTTPS download port
|
||||
|
||||
HTTPSPort=411
|
||||
|
||||
; Sets the location of the HTTPS data directory for downloads
|
||||
|
||||
HTTPSDir=/opt/ecs/mvuser/MV_IPTel/data/HTTPSdata
|
||||
|
||||
; Sets the location of the CertFile
|
||||
|
||||
CertFile=/opt/ecs/mvuser/MV_IPTel/certs/IPTelcert.pem
|
||||
|
||||
; Sets the location of the KeyFile
|
||||
|
||||
KeyFile=/opt/ecs/mvuser/MV_IPTel/certs/IPTelkey.pem
|
||||
|
||||
; Use Client Authorization
|
||||
|
||||
ClientAuth=0
|
||||
|
||||
; narrow config for Avaya IPTel (TLSV1 using RSA_NULL_SHA)
|
||||
|
||||
IPTel=0
|
||||
|
||||
; sets the SSL variants if not Avaya IPtel (IPTel=0)
|
||||
|
||||
SSLV2=0
|
||||
|
||||
SSLV3=0
|
||||
|
||||
TLSV1=1
|
||||
|
||||
UseProxy=0
|
||||
|
||||
ProxyAddr=simon.avaya.com
|
||||
|
||||
ProxyPort=9000
|
||||
|
||||
;======================================================================
|
||||
|
||||
[BACKUP_SERVERS]
|
||||
|
||||
;======================================================================
|
||||
|
||||
FileServer=0
|
||||
|
||||
RequestUpdates=0
|
||||
|
||||
RequestBackup=0
|
||||
|
||||
; Enable use of the Primary file server
|
||||
|
||||
UsePrimarySvr=0
|
||||
|
||||
; Primary file server IP address ( or resolvable DNS)
|
||||
|
||||
PrimaryIP=192.168.0.13
|
||||
|
||||
; Enable use of the Secondary file server
|
||||
|
||||
UseSecondarySvr=0
|
||||
|
||||
; Secondary file server IP address ( or resolvable DNS)
|
||||
|
||||
SecondaryIP=192.168.0.10
|
||||
|
||||
; Sets the update interval for Backups & updates ; 1 = min; 2
|
||||
|
||||
UpdateInterval=2
|
||||
|
||||
;Send FTP backup to the customer sever
|
||||
|
||||
CustomFTP=1
|
||||
|
||||
; FTP backup directory customer sever
|
||||
|
||||
CustomFTPDir=home/mvuser/backup
|
||||
|
||||
; FTP backup directory user login name
|
||||
|
||||
CustomFTPUName=tom
|
||||
|
||||
; FTP backup directory user password
|
||||
|
||||
CustomFTPPwd=jerry
|
||||
|
||||
; Enable CDR Backup - enable=1 on both File Server & Client
|
||||
|
||||
CDRBackup=0
|
||||
|
||||
; Enable BCMS Backup - enable=1 on both File Server & Client
|
||||
|
||||
BCMSBackup=0
|
||||
|
||||
; Retain CDR / BCMS copy data for x days ( Receiver always + 1
|
||||
|
||||
RetainDays=7.0
|
||||
|
||||
;======================================================================
|
||||
|
||||
|
||||
[SNMP]
|
||||
|
||||
;================================================================
|
||||
|
||||
;
|
||||
|
||||
; Validate FTP store with SNMP check
|
||||
|
||||
UseSNMP=1
|
||||
|
||||
; In case the SNMPGET syntax changes you can redefine the commands
|
||||
|
||||
; Uncomment the relevant line to override the internal command
|
||||
|
||||
;the syntax is "Command + IPADDR + ExtObj + Awk
|
||||
|
||||
; the IPADRR is derived from the connection
|
||||
|
||||
; Note there are relavant spaces at the start/end of the component
|
||||
|
||||
;Command=/usr/bin/snmpget
|
||||
|
||||
;Params= -v2c -cpublic
|
||||
|
||||
;ExtObject=.1.3.6.1.4.1.6889.2.69.1.4.9.0
|
||||
|
||||
;TypeObject=.1.3.6.1.4.1.6889.2.69.1.1.2.0
|
||||
|
||||
;Awk=| awk -F \" '' {print $2 } ''
|
||||
|
||||
;================================================================
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
{
|
||||
"abstract" : "Inspect and manipulate perl data structures interactively",
|
||||
"author" : [
|
||||
"Thomas v.Dein <tlinden@cpan.org>"
|
||||
],
|
||||
"dynamic_config" : 0,
|
||||
"generated_by" : "ExtUtils::MakeMaker version 7.0401, CPAN::Meta::Converter version 2.150005",
|
||||
"license" : [
|
||||
"perl_5"
|
||||
],
|
||||
"meta-spec" : {
|
||||
"url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec",
|
||||
"version" : "2"
|
||||
},
|
||||
"name" : "Data-Interactive-Inspect",
|
||||
"no_index" : {
|
||||
"directory" : [
|
||||
"t",
|
||||
"inc"
|
||||
]
|
||||
},
|
||||
"prereqs" : {
|
||||
"build" : {
|
||||
"requires" : {
|
||||
"ExtUtils::MakeMaker" : "0"
|
||||
}
|
||||
},
|
||||
"configure" : {
|
||||
"requires" : {
|
||||
"ExtUtils::MakeMaker" : "0"
|
||||
}
|
||||
},
|
||||
"runtime" : {
|
||||
"requires" : {
|
||||
"File::Temp" : "0",
|
||||
"Term::ReadLine" : "0",
|
||||
"YAML" : "0"
|
||||
}
|
||||
}
|
||||
},
|
||||
"release_status" : "stable",
|
||||
"resources" : {
|
||||
"repository" : {
|
||||
"url" : "https://codeberg.org/scip/Data-Interactive-Inspect"
|
||||
}
|
||||
},
|
||||
"version" : 0.06,
|
||||
"x_serialization_backend" : "JSON::PP version 2.27300"
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
$VAR1 = {
|
||||
'food' => {
|
||||
'Berry-Berry Belgian Waffles' => {
|
||||
'price' => '$8.95',
|
||||
'calories' => '900',
|
||||
'description' => 'Light Belgian waffles covered with an assortment of fresh berries and whipped cream'
|
||||
},
|
||||
'Homestyle Breakfast' => {
|
||||
'price' => '$6.95',
|
||||
'calories' => '950',
|
||||
'description' => 'Two eggs, bacon or sausage, toast, and our ever-popular hash browns'
|
||||
},
|
||||
'Belgian Waffles' => {
|
||||
'calories' => '650',
|
||||
'price' => '$5.95',
|
||||
'description' => 'Two of our famous Belgian Waffles with plenty of real maple syrup'
|
||||
},
|
||||
'French Toast' => {
|
||||
'price' => '$4.50',
|
||||
'calories' => '600',
|
||||
'description' => 'Thick slices made from our homemade sourdough bread'
|
||||
},
|
||||
'Strawberry Belgian Waffles' => {
|
||||
'price' => '$7.95',
|
||||
'calories' => '900',
|
||||
'description' => 'Light Belgian waffles covered with strawberries and whipped cream'
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -1,33 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<breakfast_menu>
|
||||
<food>
|
||||
<name>Belgian Waffles</name>
|
||||
<price>$5.95</price>
|
||||
<description>Two of our famous Belgian Waffles with plenty of real maple syrup</description>
|
||||
<calories>650</calories>
|
||||
</food>
|
||||
<food>
|
||||
<name>Strawberry Belgian Waffles</name>
|
||||
<price>$7.95</price>
|
||||
<description>Light Belgian waffles covered with strawberries and whipped cream</description>
|
||||
<calories>900</calories>
|
||||
</food>
|
||||
<food>
|
||||
<name>Berry-Berry Belgian Waffles</name>
|
||||
<price>$8.95</price>
|
||||
<description>Light Belgian waffles covered with an assortment of fresh berries and whipped cream</description>
|
||||
<calories>900</calories>
|
||||
</food>
|
||||
<food>
|
||||
<name>French Toast</name>
|
||||
<price>$4.50</price>
|
||||
<description>Thick slices made from our homemade sourdough bread</description>
|
||||
<calories>600</calories>
|
||||
</food>
|
||||
<food>
|
||||
<name>Homestyle Breakfast</name>
|
||||
<price>$6.95</price>
|
||||
<description>Two eggs, bacon or sausage, toast, and our ever-popular hash browns</description>
|
||||
<calories>950</calories>
|
||||
</food>
|
||||
</breakfast_menu>
|
||||
@@ -1,116 +0,0 @@
|
||||
---
|
||||
cloud:
|
||||
# Cloud Name: The cloud name must not contain spaces or special
|
||||
# characters. The name is used for the OpenStack region name. The
|
||||
# default value for the ICOS Hybrid cloud is RegionTwo.
|
||||
name: RegionTwo
|
||||
# Cloud Description
|
||||
description: ICOS Hybrid - Controller + N Compute Topology - x86 KVM
|
||||
# Cloud Administrator (admin) User's Password. For the ICOS Hybrid
|
||||
# cloud, the cloud administrator user's password is contained in the
|
||||
# password JSON file. It can be overridden here if needed.
|
||||
password: ~
|
||||
# Cloud Password JSON File. This is required for the ICOS Hybrid cloud.
|
||||
# Copy the example password file for the ICOS Hybrid cloud located in
|
||||
# the ICM chef-repo to the deployment folder, rename it, and set the
|
||||
# password values in the file for the on premise admin user and services
|
||||
# required for the ICOS Hybrid cloud. Enter the fully qualified path and
|
||||
# file name of that password file here.
|
||||
password_file: YOUR_PASSWORD_FILE
|
||||
# Cloud Database Service Type: db2, mariadb or mysql
|
||||
database_service_type: db2
|
||||
# Cloud Messaging Service Type: rabbitmq or qpid
|
||||
messaging_service_type: rabbitmq
|
||||
# (Optional) Cloud SSL certificate chain file. This is not required.
|
||||
# If not specified, the cacert_file will be created automatically,
|
||||
# and placed on the Chef server. Only specify this value if you are
|
||||
# providing your own cloud SSL certificate chain file, and controller
|
||||
# SSL certificate and private key files for FIPS compliance. If specified,
|
||||
# it must include all certificate chains required in the hybrid environment
|
||||
# including any needed in the off-premise region.
|
||||
# cacert_file: YOUR_CLOUD_CACERT_FILE_LOCATION
|
||||
# Cloud Features: The cloud features to be enabled or disabled. The
|
||||
# icos_hybrid_cloud, and fips_compliance features are enabled, by default. The
|
||||
# self_service_portal feature must be disabled for the ICOS Hybrid cloud.
|
||||
features:
|
||||
self_service_portal: disabled
|
||||
platform_resource_scheduler: enabled
|
||||
icos_hybrid_cloud: enabled
|
||||
fips_compliance: enabled
|
||||
# Cloud Topology: References the node name(s) for each role
|
||||
# within the cloud's topology. A self_service_portal_node_name
|
||||
# should not be specified for the ICOS Hybrid cloud.
|
||||
topology:
|
||||
database_node_name: controller
|
||||
controller_node_name: controller
|
||||
self_service_portal_node_name: ~
|
||||
kvm_compute_node_names: kvm_compute
|
||||
|
||||
# ================================================================
|
||||
# Environment Information
|
||||
# ================================================================
|
||||
environment:
|
||||
base: example-ibm-os-single-controller-n-compute
|
||||
default_attributes:
|
||||
# (Optional) Add Default Environment Attributes
|
||||
|
||||
override_attributes:
|
||||
# (Optional) Add Override Environment Attributes
|
||||
ntp.servers: [0.pool.ntp.org, 1.pool.ntp.org, 2.pool.ntp.org, 3.pool.ntp.org]
|
||||
|
||||
# ================================================================
|
||||
# Hybrid Cloud Information.
|
||||
# ================================================================
|
||||
hybrid:
|
||||
# The Keystone Identity service endpoint host. Enter either the host FQDN or
|
||||
# it's IP address. This value is not validated. Please ensure it is correct.
|
||||
# The value is defaulted to 192.168.101.10 for the ICOS Hybrid cloud.
|
||||
keystone_endpoint_host: 192.168.101.10
|
||||
# The Identity service admin tenant name. The value is defaulted to
|
||||
# on-prem-admin for the ICOS Hybrid cloud.
|
||||
admin_tenant_name: on-prem-admin
|
||||
# The Identity service admin user name. The value is defaulted to
|
||||
# admin-on-prem for the ICOS Hybrid cloud.
|
||||
admin_user: admin-on-prem
|
||||
# The SSL certificate chain file for the ICOS region. Enter the local
|
||||
# location on the Chef server of the ICOS SSL certificate chain file as a
|
||||
# fully qualified path and file name. This value is required unless both the
|
||||
# on-premise region, and the ICOS system have trusted, commercially signed
|
||||
# SSL certificates. This value is ignored if the optional cacert_file value
|
||||
# is specified.
|
||||
off_prem_certificate_chain_file: YOUR_OFF_PREM_CERTIFICATE_CHAIN_FILE_LOCAL_LOCATION
|
||||
|
||||
# ================================================================
|
||||
# Node Information
|
||||
# ================================================================
|
||||
nodes:
|
||||
- name: controller
|
||||
description: Cloud controller node
|
||||
fqdn: YOUR_CONTROLLER_NODE_FQDN
|
||||
password: ~
|
||||
identity_file: ~
|
||||
nics:
|
||||
management_network: eth0
|
||||
data_network: eth1
|
||||
# (Optional) Node Attribute JSON File
|
||||
attribute_file: ~
|
||||
# (Optional) Controller SSL certificate and private key files.
|
||||
# These values are not required. If not specified, the cert_file and
|
||||
# key_file will be created automatically, and placed on the Chef
|
||||
# server. Only specify these values if you are providing your own cloud
|
||||
# SSL certificate chain file, and controller SSL certificate and
|
||||
# private key files for FIPS compliance.
|
||||
# cert_file: YOUR_CONTROLLER_CERTIFICATE_FILE_LOCATION
|
||||
# key_file: YOUR_CONTROLLER_PRIVATE_KEY_FILE_LOCATION
|
||||
- name: kvm_compute
|
||||
description: Cloud KVM compute node
|
||||
fqdn: YOUR_KVM_COMPUTE_NODE_FQDN
|
||||
password: ~
|
||||
identity_file: ~
|
||||
nics:
|
||||
management_network: eth0
|
||||
data_network: eth1
|
||||
# (Optional) Node Attribute JSON File
|
||||
attribute_file: ~
|
||||
# Copy the kvm_compute node section above for additional
|
||||
# KVM compute nodes in your cloud.
|
||||
51
t/run.t
51
t/run.t
@@ -1,51 +0,0 @@
|
||||
# -*-perl-*-
|
||||
|
||||
use Test::More tests => 10;
|
||||
#use Test::More qw(no_plan);
|
||||
|
||||
require_ok( 'Data::Interactive::Inspect' );
|
||||
|
||||
my $cfg = {
|
||||
'v27' => '10',
|
||||
'v28' => 'ten',
|
||||
|
||||
'AoA' => [ 1, 2, 3, 4 ],
|
||||
|
||||
'AoH' => {
|
||||
'Homer' => { user => 'homer', uid => 100 },
|
||||
'Bart' => { user => 'bart', uid => 101 },
|
||||
'Lisa' => { user => 'lisa', uid => 102 },
|
||||
},
|
||||
};
|
||||
|
||||
my $shell = new_ok('Data::Interactive::Inspect', [ $cfg ]);
|
||||
ok($shell, "Data::Interactive::Inspect->new() returns an obj");
|
||||
|
||||
my $orig;
|
||||
foreach my $k (keys %{$cfg}) {
|
||||
$orig->{$k} = $cfg->{$k};
|
||||
}
|
||||
|
||||
|
||||
my $m1 = $shell->inspect("set v27 888\n");
|
||||
isnt($orig->{v27}, $m1->{v27}, "hash modified");
|
||||
|
||||
my $m2 = $shell->inspect("set GY { nom => 400 }");
|
||||
is_deeply($m2->{GY}, { nom => 400 }, "add a sub hash");
|
||||
|
||||
my $m3 = $shell->inspect("pop AoA");
|
||||
is_deeply($m3->{AoA}, [1,2,3], "remove last element of array");
|
||||
|
||||
my $m4 = $shell->inspect("shift AoA");
|
||||
is_deeply($m4->{AoA}, [2,3], "remove 1st element of arry");
|
||||
|
||||
my $m5 = $shell->inspect("append AoA 9");
|
||||
is_deeply($m5->{AoA}, [2,3,9], "append to array");
|
||||
|
||||
my $m6 = $shell->inspect("drop v28");
|
||||
isnt($orig->{v28}, $m6->{v28}, "delete a key");
|
||||
|
||||
my $m7 = $shell->inspect("enter AoH\nenter Bart\nset uid 0\n");
|
||||
is_deeply($m7->{AoH}->{Bart}, { user => 'bart', uid => 0 }, "browse and modify deeply");
|
||||
|
||||
done_testing();
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
use lib qw (blib/lib);
|
||||
use Data::Interactive::Inspect;
|
||||
use Data::Dumper;
|
||||
my $s = {
|
||||
h => [1,2,3,4,5],
|
||||
users => [
|
||||
{ login => 'max', age => 12 },
|
||||
{ login => 'leo', age => 23 },
|
||||
],
|
||||
any => {
|
||||
fear => {
|
||||
settings => {
|
||||
height => 89,
|
||||
mode => 'normal',
|
||||
looks => [ 3,5,6],
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
my $shell = Data::Interactive::Inspect->new($s);
|
||||
my $x = $shell->inspect();
|
||||
#print Dumper($x);
|
||||
Reference in New Issue
Block a user