nv-data-kvntree
v1.0.1
Published
nv-data-kvntree ================ - simple util to write map/set/array/dict - support nested key - support hash AND ref
Downloads
4
Readme
nv-data-kvntree
- simple util to write map/set/array/dict
- support nested key
- support hash AND ref
install
- npm install nv-data-kvntree
usage
const {
load_from_src,
load_from_obj
} = require("nv-data-kvntree");
Map: <...>
Set: (...)
Array: [...]
Dict/Object: {...}
Colon: : OR =
Comma: , OR ; //optional
Hash: # //for-multi-key-support
Ref: & //for-circular
example
load_from_src
var src =`
<
[ #aryk
10 ,
20 ,
30
[400 500n 600]
] : /*this is a k-node*/
{
"a" :
"A" ,
b"b"b :
"B"
} , /*this is a v-node*/
"c" :
(
0x40 ,
50 ,
60 ,
<{}:[70;80;90]>,
&aryk, //circular of #aryk
70
) //this is a set
>
`
var rt = load_from_src(src)
var mp = rt.to_obj()
/*
> mp
Map(2) {
[
10,
20,
30,
[ 400, 500n, 600 ],
'long-long-string',
'long-long-string'
] => { a: 'A', bbb: 'B' },
'c' => Set(6) {
64,
50,
60,
Map(1) { {} => [Array] },
[ 10, 20, 30, [Array], 'long-long-string', 'long-long-string' ],
70
}
}
>
*/
var arr = Array.from(mp.entries())
> arr[0][0]
[
10,
20,
30,
[ 400, 500n, 600 ],
'long-long-string',
'long-long-string'
]
> ref = Array.from(arr[1][1])[4]
[
10,
20,
30,
[ 400, 500n, 600 ],
'long-long-string',
'long-long-string'
]
> arr[0][0] === ref
true
/*
> rt.$sdfs_.filter(r=>!r.is_n()).map(r=>r.dtype_)
[
'map', 'ary', 'int',
'int', 'int', 'ary',
'int', 'bigint', 'int',
'str', 'ref', 'normal_dict',
'str', 'str', 'str',
'str', 'str', 'set',
'int', 'int', 'int',
'map', 'normal_dict', 'ary',
'int', 'int', 'int',
'ref', 'int'
]
> rt.$sdfs_.filter(r=>!r.is_n()).map(r=>r.stype_)
[
'root', 'map_k', 'ary_v',
'ary_v', 'ary_v', 'ary_v',
'ary_v', 'ary_v', 'ary_v',
'ary_v', 'ary_v', 'map_v',
'dict_k', 'dict_v', 'dict_k',
'dict_v', 'map_k', 'map_v',
'set_v', 'set_v', 'set_v',
'set_v', 'map_k', 'map_v',
'ary_v', 'ary_v', 'ary_v',
'set_v', 'set_v'
]
>
*/
rt.show()
/*
>
<
[ #aryk
10 ,
20 ,
30 ,
[
400 ,
500 ,
600
] ,
"long-long-string" #str ,
&str
] :
{
"a" :
"A" ,
"bbb" :
"B"
} ,
"c" :
(
64 ,
50 ,
60 ,
<
{
} :
[
70 ,
80 ,
90
]
> ,
&aryk ,
70
)
>
*/
rt.stringify()
/*
>
'<[ #aryk 10,20,30,[400,500,600],"long-long-string" #str ,&str]:{"a":"A","bbb":"B"},"c":(64,50,60,<{}:[70,80,90]>,&aryk,70)>'
> var s = rt.stringify()
undefined
> var nrt = load_from_src(s)
undefined
> nrt
V [1 %67f7b439:1% ] {}
> nrt.to_obj()
Map(2) {
[
10,
20,
30,
[ 400, 500, 600 ],
'long-long-string',
'long-long-string'
] => { a: 'A', bbb: 'B' },
'c' => Set(6) {
64,
50,
60,
Map(1) { {} => [Array] },
[ 10, 20, 30, [Array], 'long-long-string', 'long-long-string' ],
70
}
}
>
*/
load_from_obj
circular
var mp = new Map()
var arr = [10,20,30]
var dict = {a:'A',b:'B'}
mp.set(arr,dict)
mp.set('c',new Set([40,50,60,70]))
mp.set(dict,arr)
/*
Map(3) {
[ 10, 20, 30 ] => { a: 'A', b: 'B' },
'c' => Set(4) { 40, 50, 60, 70 },
{ a: 'A', b: 'B' } => [ 10, 20, 30 ]
}
>
*/
var rt = load_from_obj(mp)
/*
> rt.show()
<
[ #___bljzcdoc@ptr
10 ,
20 ,
30
] :
{ #___u_veqrs4@ptr
"a" :
"A" ,
"b" :
"B"
} ,
"c" :
(
40 ,
50 ,
60 ,
70
) ,
&___u_veqrs4@ptr :
&___bljzcdoc@ptr
>
*/
var arr = Array.from(rt.to_obj());
arr[0][0] === arr[2][1]
arr[0][1] === arr[2][0]
/*
> arr[0][0] === arr[2][1]
true
> arr[0][1] === arr[2][0]
true
>
*/
keys and values
var mp = new Map()
var arr = [10,20,30]
var dict = {a:'A',b:'B'}
mp.set(arr,dict)
mp.set('c',new Set([40,50,60,70]))
/*
> mp
Map(2) {
[ 10, 20, 30 ] => { a: 'A', b: 'B' },
'c' => Set(4) { 40, 50, 60, 70 }
}
>
*/
var rt = load_from_obj(mp)
rt.show();
rt.stringify();
rt.to_obj()
/*
> rt.show();
<
[
10 ,
20 ,
30
] :
{
"a" :
"A" ,
"b" :
"B"
} ,
"c" :
(
40 ,
50 ,
60 ,
70
)
>
undefined
>
> rt.stringify();
'<[10,20,30]:{"a":"A","b":"B"},"c":(40,50,60,70)>'
>
> rt.to_obj()
Map(2) {
[ 10, 20, 30 ] => { a: 'A', b: 'B' },
'c' => Set(4) { 40, 50, 60, 70 }
}
>
*/
rt.keys_.map(k=>k.to_obj())
rt.values_.map(v=>v.to_obj())
rt.entries_.map(e=>[e[0].to_obj(),e[1].to_obj()])
/*
> rt.keys_.map(k=>k.to_obj())
[ [ 10, 20, 30 ], 'c' ]
> rt.values_.map(v=>v.to_obj())
[ { a: 'A', b: 'B' }, Set(4) { 40, 50, 60, 70 } ]
> rt.entries_.map(e=>[e[0].to_obj(),e[1].to_obj()])
[
[ [ 10, 20, 30 ], { a: 'A', b: 'B' } ],
[ 'c', Set(4) { 40, 50, 60, 70 } ]
]
>
*/
clone and erase
var nrt = rt.clone();
nrt.to_obj();
nrt.$erase_r()
/*
> var nrt = rt.clone();
undefined
> nrt.to_obj();
Map(2) {
[ 10, 20, 30 ] => { a: 'A', b: 'B' },
'c' => Set(4) { 40, 50, 60, 70 }
}
*/
flat
rt.flat()
rt.flatv()
rt.flatk()
/*
> rt.flat()
[
10, 20, 30, 'a', 'A',
'b', 'B', 'c', 40, 50,
60, 70
]
> rt.flatv()
[
10, 20, 30, 'A',
'B', 40, 50, 60,
70
]
> rt.flatk()
[ 'a', 'b', 'c' ]
>
*/
swap
rt.keys_[0].swapkv()
rt.to_obj()
rt.keys_[1].swapkv()
rt.to_obj()
/*
> rt.keys_[0].swapkv()
V [65 %78d6b01c:65% ] {}
> rt.to_obj()
Map(2) {
{ a: 'A', b: 'B' } => [ 10, 20, 30 ],
'c' => Set(4) { 40, 50, 60, 70 }
}
> rt.keys_[1].swapkv()
V [74 %78d6b01c:74% ] {}
> rt.to_obj()
Map(2) {
{ a: 'A', b: 'B' } => [ 10, 20, 30 ],
Set(4) { 40, 50, 60, 70 } => 'c'
}
>
*/
METHODS
kv-node
rt.append_n
rt.clone
rt.dtype_
rt.entries_
rt.flat
rt.flatk
rt.flatv
rt.hashes_
rt.is_ptr
rt.is_val
rt.keys_
rt.parent_
rt.prepend_n
rt.ptr_
rt.show
rt.stringify
rt.to_obj
rt.unref
rt.val_
rt.values_
rt.is_k
rt.is_n
rt.is_v
rt.key_
rt.stype_
rt.swapkv
n-node
rt.$fstch_.addk
rt.$fstch_.addv
rt.$fstch_.delk
rt.$fstch_.delv
rt.$fstch_.insert_after
rt.$fstch_.insert_before
rt.$fstch_.is_empty
rt.$fstch_.is_full
rt.$fstch_.is_k
rt.$fstch_.is_n
rt.$fstch_.is_v
rt.$fstch_.k_
rt.$fstch_.show
rt.$fstch_.stringify
rt.$fstch_.swapkv
rt.$fstch_.v_
APIS
- load_from_obj(o,forest,max_size=100000,rtrn_forest=false);
- deepcopy(o,forest,max_size=100000,rtrn_forest=false);
- load_from_src(src,forest,max_size=100000,rtrn_forest=false);
RESTRICTION
- its slow if source larger than 2M
- for configure file is enough
LICENSE
- ISC