diff --git a/Cargo.lock b/Cargo.lock index 8a5bcdb9e0..48049a63ce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -49,7 +49,7 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "rand 0.8.5", + "rand", "sha1", "smallvec", "tokio", @@ -169,7 +169,7 @@ dependencies = [ "serde_urlencoded", "smallvec", "socket2", - "time 0.3.20", + "time", "url", ] @@ -242,7 +242,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom 0.2.15", + "getrandom", "once_cell", "version_check", "zerocopy", @@ -330,7 +330,7 @@ dependencies = [ "ffi-support", "log", "once_cell", - "rand 0.8.5", + "rand", "regex", "rmp-serde", "serde", @@ -354,7 +354,7 @@ dependencies = [ "num-traits", "once_cell", "openssl", - "rand 0.8.5", + "rand", "serde", "sha2", ] @@ -373,7 +373,7 @@ dependencies = [ "num-traits", "once_cell", "openssl", - "rand 0.8.5", + "rand", "serde", "sha2", ] @@ -386,7 +386,7 @@ dependencies = [ "bitvec", "log", "once_cell", - "rand 0.8.5", + "rand", "regex", "serde", "serde_json", @@ -522,7 +522,7 @@ dependencies = [ "display_as_json", "env_logger 0.11.5", "log", - "rand 0.8.5", + "rand", "reqwest 0.12.8", "serde", "serde_derive", @@ -538,7 +538,7 @@ dependencies = [ "aries_vcx_anoncreds", "aries_vcx_ledger", "aries_vcx_wallet", - "async-channel 2.3.1", + "async-channel", "async-trait", "backtrace", "base64 0.22.1", @@ -561,7 +561,7 @@ dependencies = [ "num-bigint", "pretty_assertions", "public_key", - "rand 0.8.5", + "rand", "regex", "serde", "serde_derive", @@ -572,7 +572,7 @@ dependencies = [ "strum_macros", "test_utils", "thiserror", - "time 0.3.20", + "time", "tokio", "url", "uuid", @@ -593,7 +593,7 @@ dependencies = [ "serde", "serde_json", "thiserror", - "time 0.3.20", + "time", "uuid", ] @@ -614,7 +614,7 @@ dependencies = [ "serde", "serde_json", "thiserror", - "time 0.3.20", + "time", "tokio", ] @@ -629,7 +629,7 @@ dependencies = [ "bs58", "log", "public_key", - "rand 0.8.5", + "rand", "serde", "serde_json", "thiserror", @@ -712,7 +712,7 @@ dependencies = [ "k256", "p256", "p384", - "rand 0.8.5", + "rand", "serde", "serde-json-core", "sha2", @@ -753,27 +753,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "async-attributes" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - [[package]] name = "async-channel" version = "2.3.1" @@ -786,53 +765,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "async-executor" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand", - "futures-lite", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.3.1", - "async-executor", - "async-io", - "async-lock", - "blocking", - "futures-lite", - "once_cell", -] - -[[package]] -name = "async-io" -version = "2.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444b0228950ee6501b3568d3c93bf1176a1fdbc3b758dcd9475046d30f4dc7e8" -dependencies = [ - "async-lock", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite", - "parking", - "polling", - "rustix", - "slab", - "tracing", - "windows-sys 0.59.0", -] - [[package]] name = "async-lock" version = "3.4.0" @@ -844,33 +776,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "async-std" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" -dependencies = [ - "async-attributes", - "async-channel 1.9.0", - "async-global-executor", - "async-io", - "async-lock", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - [[package]] name = "async-stream" version = "0.3.6" @@ -893,12 +798,6 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "async-task" -version = "4.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" - [[package]] name = "async-trait" version = "0.1.83" @@ -1132,19 +1031,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "blocking" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" -dependencies = [ - "async-channel 2.3.1", - "async-task", - "futures-io", - "futures-lite", - "piper", -] - [[package]] name = "bls12_381" version = "0.8.0" @@ -1153,7 +1039,7 @@ checksum = "d7bc6d6292be3a19e6379786dac800f551e5865a5bb51ebbe3064ab80433f403" dependencies = [ "ff", "group", - "rand_core 0.6.4", + "rand_core", "subtle", "zeroize", ] @@ -1442,16 +1328,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "console_error_panic_hook" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" -dependencies = [ - "cfg-if", - "wasm-bindgen", -] - [[package]] name = "const-oid" version = "0.9.6" @@ -1471,7 +1347,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" dependencies = [ "percent-encoding", - "time 0.3.20", + "time", "version_check", ] @@ -1598,7 +1474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "subtle", "zeroize", ] @@ -1610,7 +1486,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "typenum", ] @@ -1688,7 +1564,7 @@ dependencies = [ "log", "num", "owning_ref", - "time 0.3.20", + "time", "unicode-segmentation", "unicode-width", "xi-unicode", @@ -1721,38 +1597,14 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "darling" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d706e75d87e35569db781a9b5e2416cff1236a47ed380831f959382ccd5f858" -dependencies = [ - "darling_core 0.10.2", - "darling_macro 0.10.2", -] - [[package]] name = "darling" version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ - "darling_core 0.20.10", - "darling_macro 0.20.10", -] - -[[package]] -name = "darling_core" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c960ae2da4de88a91b2d920c2a7233b400bc33cb28453a2987822d8392519b" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.9.3", - "syn 1.0.109", + "darling_core", + "darling_macro", ] [[package]] @@ -1769,24 +1621,13 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "darling_macro" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72" -dependencies = [ - "darling_core 0.10.2", - "quote", - "syn 1.0.109", -] - [[package]] name = "darling_macro" version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ - "darling_core 0.20.10", + "darling_core", "quote", "syn 2.0.79", ] @@ -1843,7 +1684,7 @@ version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7431fa049613920234f22c47fdc33e6cf3ee83067091ea4277a3f8c4587aae38" dependencies = [ - "darling 0.20.10", + "darling", "proc-macro2", "quote", "syn 2.0.79", @@ -2037,27 +1878,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "dirs" -version = "5.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.48.0", -] - [[package]] name = "display_as_json" version = "0.1.0" @@ -2144,7 +1964,7 @@ dependencies = [ "generic-array", "group", "hkdf", - "rand_core 0.6.4", + "rand_core", "sec1", "subtle", "zeroize", @@ -2194,7 +2014,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59c3b24c345d8c314966bdc1832f6c2635bfcce8e7cf363bd115987bba2ee242" dependencies = [ - "darling 0.20.10", + "darling", "proc-macro2", "quote", "syn 2.0.79", @@ -2327,28 +2147,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "failure" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" -dependencies = [ - "backtrace", - "failure_derive", -] - -[[package]] -name = "failure_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "synstructure", -] - [[package]] name = "fastrand" version = "2.1.1" @@ -2361,7 +2159,7 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2503,7 +2301,6 @@ dependencies = [ "futures-core", "futures-task", "futures-util", - "num_cpus", ] [[package]] @@ -2597,19 +2394,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - [[package]] name = "getrandom" version = "0.2.15" @@ -2618,7 +2402,7 @@ checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", ] [[package]] @@ -2648,7 +2432,7 @@ dependencies = [ "num-integer", "num-traits", "once_cell", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -2657,18 +2441,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" -[[package]] -name = "gloo-timers" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "goblin" version = "0.6.1" @@ -2687,7 +2459,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -3084,23 +2856,6 @@ dependencies = [ "hashbrown 0.15.0", ] -[[package]] -name = "indy-api-types" -version = "0.1.0" -dependencies = [ - "bs58", - "futures", - "libc", - "log", - "openssl", - "serde", - "serde_derive", - "serde_json", - "sqlx", - "thiserror", - "ursa", -] - [[package]] name = "indy-blssignatures" version = "0.1.0" @@ -3108,7 +2863,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ea4f5684155eae13be942fc61ff57706254adc0c6e99316c98805a0985200db" dependencies = [ "amcl", - "rand 0.8.5", + "rand", "serde", "sha2", "sha3", @@ -3124,7 +2879,7 @@ dependencies = [ "indy-data-types 0.7.0", "log", "once_cell", - "rand 0.8.5", + "rand", "regex", "serde", "serde_json", @@ -3143,7 +2898,7 @@ dependencies = [ "ed25519-dalek", "hex", "once_cell", - "rand 0.8.5", + "rand", "regex", "serde", "serde_json", @@ -3165,7 +2920,7 @@ dependencies = [ "ed25519-dalek", "hex", "once_cell", - "rand 0.8.5", + "rand", "regex", "serde", "serde_json", @@ -3184,28 +2939,7 @@ dependencies = [ "serde", "serde_json", "thiserror", - "time 0.3.20", -] - -[[package]] -name = "indy-utils" -version = "0.1.0" -dependencies = [ - "base64 0.21.7", - "dirs", - "failure", - "indy-api-types", - "lazy_static", - "libc", - "log", - "openssl", - "rand 0.8.5", - "rmp-serde", - "serde", - "serde_derive", - "serde_json", - "sodiumoxide", - "zeroize", + "time", ] [[package]] @@ -3228,7 +2962,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-utils", - "rand 0.8.5", + "rand", "regex", "rmp-serde", "serde", @@ -3237,7 +2971,7 @@ dependencies = [ "sha3", "sled", "thiserror", - "time 0.3.20", + "time", "url", "zmq", ] @@ -3253,28 +2987,6 @@ dependencies = [ "url", ] -[[package]] -name = "indy-wallet" -version = "0.1.0" -dependencies = [ - "async-std", - "async-trait", - "bs58", - "byteorder", - "futures", - "indy-api-types", - "indy-utils", - "libc", - "log", - "lru", - "rmp-serde", - "serde", - "serde_derive", - "serde_json", - "sqlx", - "zeroize", -] - [[package]] name = "inout" version = "0.1.3" @@ -3294,12 +3006,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "int_traits" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b33c9a5c599d67d051c4dc25eb1b6b4ef715d1763c20c85c688717a1734f204e" - [[package]] name = "ipnet" version = "2.10.0" @@ -3386,15 +3092,6 @@ dependencies = [ "cpufeatures", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - [[package]] name = "language-tags" version = "0.3.2" @@ -3422,26 +3119,6 @@ version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags 2.6.0", - "libc", -] - -[[package]] -name = "libsodium-sys" -version = "0.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcbd1beeed8d44caa8a669ebaa697c313976e242c03cc9fb23d88bf1656f5542" -dependencies = [ - "libc", - "pkg-config", -] - [[package]] name = "libsqlite3-sys" version = "0.27.0" @@ -3453,32 +3130,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "libvdrtools" -version = "0.8.6" -dependencies = [ - "async-std", - "async-trait", - "bs58", - "dirs", - "failure", - "futures", - "hex", - "indy-api-types", - "indy-utils", - "indy-wallet", - "lazy_static", - "libc", - "log", - "log-derive", - "regex", - "serde", - "serde_derive", - "serde_json", - "ursa", - "zeroize", -] - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -3517,21 +3168,6 @@ name = "log" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" -dependencies = [ - "value-bag", -] - -[[package]] -name = "log-derive" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a42526bb432bcd1b43571d5f163984effa25409a29f1a3242a54d0577d55bcf" -dependencies = [ - "darling 0.10.2", - "proc-macro2", - "quote", - "syn 1.0.109", -] [[package]] name = "lru" @@ -3627,7 +3263,7 @@ dependencies = [ name = "messages_macros" version = "0.1.0" dependencies = [ - "darling 0.20.10", + "darling", "proc-macro2", "quote", "shared", @@ -3684,7 +3320,7 @@ checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.48.0", ] @@ -3697,7 +3333,7 @@ dependencies = [ "hermit-abi 0.3.9", "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.52.0", ] @@ -3797,7 +3433,7 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", - "rand 0.8.5", + "rand", ] [[package]] @@ -3812,7 +3448,7 @@ dependencies = [ "num-integer", "num-iter", "num-traits", - "rand 0.8.5", + "rand", "smallvec", "zeroize", ] @@ -3866,16 +3502,6 @@ dependencies = [ "libm", ] -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi 0.3.9", - "libc", -] - [[package]] name = "num_threads" version = "0.1.7" @@ -3953,12 +3579,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - [[package]] name = "os_str_bytes" version = "6.6.1" @@ -4059,7 +3679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" dependencies = [ "base64ct", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -4124,17 +3744,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "piper" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" -dependencies = [ - "atomic-waker", - "fastrand", - "futures-io", -] - [[package]] name = "pkcs1" version = "0.7.5" @@ -4168,21 +3777,6 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" -[[package]] -name = "polling" -version = "3.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511" -dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi 0.4.0", - "pin-project-lite", - "rustix", - "tracing", - "windows-sys 0.59.0", -] - [[package]] name = "poly1305" version = "0.8.0" @@ -4328,19 +3922,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -4348,18 +3929,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", + "rand_chacha", + "rand_core", ] [[package]] @@ -4369,16 +3940,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", + "rand_core", ] [[package]] @@ -4387,16 +3949,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", + "getrandom", ] [[package]] @@ -4417,17 +3970,6 @@ dependencies = [ "bitflags 2.6.0", ] -[[package]] -name = "redox_users" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" -dependencies = [ - "getrandom 0.2.15", - "libredox", - "thiserror", -] - [[package]] name = "regex" version = "1.11.0" @@ -4562,7 +4104,7 @@ checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom", "libc", "spin", "untrusted", @@ -4604,7 +4146,7 @@ dependencies = [ "num-traits", "pkcs1", "pkcs8", - "rand_core 0.6.4", + "rand_core", "signature", "spki", "subtle", @@ -4991,7 +4533,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -5048,17 +4590,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "sodiumoxide" -version = "0.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5cb2f14f9a51352ad65e59257a0a9459d5a36a3615f3d53a974c82fdaaa00a" -dependencies = [ - "libc", - "libsodium-sys", - "serde", -] - [[package]] name = "spin" version = "0.9.8" @@ -5213,7 +4744,7 @@ dependencies = [ "memchr", "once_cell", "percent-encoding", - "rand 0.8.5", + "rand", "rsa", "serde", "sha1", @@ -5253,7 +4784,7 @@ dependencies = [ "md-5", "memchr", "once_cell", - "rand 0.8.5", + "rand", "serde", "serde_json", "sha2", @@ -5312,12 +4843,6 @@ dependencies = [ "unicode-properties", ] -[[package]] -name = "strsim" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c" - [[package]] name = "strsim" version = "0.10.0" @@ -5392,18 +4917,6 @@ dependencies = [ "futures-core", ] -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "unicode-xid", -] - [[package]] name = "system-configuration" version = "0.5.1" @@ -5507,7 +5020,7 @@ dependencies = [ "lazy_static", "log", "public_key", - "rand 0.8.5", + "rand", "serde_json", "thiserror", "uuid", @@ -5539,17 +5052,6 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - [[package]] name = "time" version = "0.3.20" @@ -5865,12 +5367,6 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - [[package]] name = "unicode_categories" version = "0.1.1" @@ -6072,29 +5568,6 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" -[[package]] -name = "ursa" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8760a62e18e4d3e3f599e15c09a9f9567fd9d4a90594d45166162be8d232e63b" -dependencies = [ - "amcl", - "console_error_panic_hook", - "failure", - "hex", - "int_traits", - "js-sys", - "lazy_static", - "log", - "openssl", - "rand 0.7.3", - "serde", - "serde_json", - "time 0.1.45", - "wasm-bindgen", - "zeroize", -] - [[package]] name = "utf8parse" version = "0.2.2" @@ -6107,16 +5580,10 @@ version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" dependencies = [ - "getrandom 0.2.15", + "getrandom", "serde", ] -[[package]] -name = "value-bag" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" - [[package]] name = "vcpkg" version = "0.2.15" @@ -6138,18 +5605,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -6170,8 +5625,6 @@ checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", "once_cell", - "serde", - "serde_json", "wasm-bindgen-macro", ] @@ -6510,7 +5963,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" dependencies = [ "curve25519-dalek", - "rand_core 0.6.4", + "rand_core", "zeroize", ] diff --git a/Cargo.toml b/Cargo.toml index eae8408f3f..715a48e0e6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,6 @@ resolver = "2" members = [ "aries/aries_vcx", - "aries/misc/legacy/libvdrtools", "aries/messages_macros", "aries/messages", "aries/misc/shared", diff --git a/aries/agents/mediator/Cargo.toml b/aries/agents/mediator/Cargo.toml index 92aa550752..7358766c3a 100644 --- a/aries/agents/mediator/Cargo.toml +++ b/aries/agents/mediator/Cargo.toml @@ -27,7 +27,7 @@ messages = { path = "../../messages" } reqwest = { version = "0.12.5", features = ["json"] } serde = "1.0.188" serde_json = "1.0.106" -sqlx = "0.7" +sqlx = { version = "0.7", features = ["mysql"] } thiserror = "1.0.49" tokio = { version = "1", features = ["rt-multi-thread", "macros"] } tower-http = { version = "0.5.2", features = ["catch-panic"] } diff --git a/aries/misc/legacy/libvdrtools/Cargo.toml b/aries/misc/legacy/libvdrtools/Cargo.toml deleted file mode 100644 index ee932f628e..0000000000 --- a/aries/misc/legacy/libvdrtools/Cargo.toml +++ /dev/null @@ -1,50 +0,0 @@ -[package] -name = "libvdrtools" -version = "0.8.6" -authors = ["Evernym"] -edition = "2018" - -description = "A library that facilitates building standards compliant and interoperable solutions for self-sovereign identity by abstracting the operations for interacting with a verifiable data registry as defined by Hyperledger Aries." -license = "Apache-2.0" - -[lib] -name = "vdrtools" -path = "src/lib.rs" - -[features] -default = ["base58_bs58", "pair_amcl", "local_nodes_pool", "revocation_tests"] -base58_bs58 = ["bs58"] -pair_amcl = ["ursa"] -local_nodes_pool = [] -revocation_tests = [] -force_full_interaction_tests = [] -sodium_static = [] -only_high_cases = [] -mysql_storage = [] - -# Causes the build to fail on all warnings -fatal_warnings = [] - -[dependencies] -async-std = { version = "1", features = ["attributes"] } -async-trait = "0.1" -failure = { version = "0.1", features = ["backtrace"] } -hex = "0.4" -libc = "0.2" -log = "0.4" -log-derive = "0.4" -bs58 = { version = "0.5", optional = true } -serde = "1" -serde_json = "1" -serde_derive = "1" -lazy_static = "1" -zeroize = "1" -regex = "1" -indy-api-types = { path = "indy-api-types" } -indy-utils = { path = "indy-utils" } -indy-wallet = { path = "indy-wallet" } -futures = { version = "0.3", default-features = false, features = [ "executor", "alloc", "thread-pool" ] } -ursa = { version = "0.3.7", optional = true, default-features = false, features = ["cl_native"] } - -[dev-dependencies] -dirs = "5" diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/Cargo.toml b/aries/misc/legacy/libvdrtools/indy-api-types/Cargo.toml deleted file mode 100644 index 1cde54b95c..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "indy-api-types" -version = "0.1.0" -authors = ["Hyperledger Indy Contributors "] -edition = "2018" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -thiserror = "1" -futures = { version = "0.3", default-features = false, features = ["std"] } -log = { version = "0.4", features = ["std"] } -libc = "0.2" -serde = "1" -serde_json = "1" -serde_derive = "1" -openssl = { version = "0.10" } -bs58 = "0.5" -sqlx = { version = "0.7", features = [ "sqlite", "mysql", "runtime-tokio-rustls" ] } -ursa = { version = "0.3.7", default-features = false, features = ["wasm"] } \ No newline at end of file diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/mod.rs b/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/mod.rs deleted file mode 100644 index 2fff25cab2..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod wallet; diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/wallet/mod.rs b/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/wallet/mod.rs deleted file mode 100644 index 7d1fa5271f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/wallet/mod.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::{collections::HashMap, fmt}; - -use serde_json::value::Value; - -#[derive(Debug, Serialize, Deserialize, Clone, Default)] -pub struct Config { - pub id: String, - pub storage_type: Option, - pub storage_config: Option, - pub cache: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum CachingAlgorithm { - #[serde(rename = "lru")] - LRU, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct CacheConfig { - #[serde(default = "default_cache_size")] - pub size: usize, - pub entities: Vec, - #[serde(default = "default_caching_algorithm")] - pub algorithm: CachingAlgorithm, -} - -pub const DEFAULT_CACHE_SIZE: usize = 10; - -fn default_cache_size() -> usize { - DEFAULT_CACHE_SIZE -} - -fn default_caching_algorithm() -> CachingAlgorithm { - CachingAlgorithm::LRU -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Credentials { - pub key: String, - #[serde(default = "default_key_derivation_method")] - pub key_derivation_method: KeyDerivationMethod, - - pub rekey: Option, - #[serde(default = "default_key_derivation_method")] - pub rekey_derivation_method: KeyDerivationMethod, - - pub storage_credentials: Option, -} - -#[allow(non_camel_case_types)] -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum KeyDerivationMethod { - RAW, - ARGON2I_MOD, - ARGON2I_INT, -} - -pub fn default_key_derivation_method() -> KeyDerivationMethod { - KeyDerivationMethod::ARGON2I_MOD -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ExportConfig { - pub key: String, - pub path: String, - #[serde(default = "default_key_derivation_method")] - pub key_derivation_method: KeyDerivationMethod, -} - -#[derive(Debug, Deserialize)] -pub struct KeyConfig { - pub seed: Option, -} - -#[derive(Serialize, Deserialize)] -pub struct IndyRecord { - // Wallet record type - #[serde(rename = "type")] - pub type_: String, - // Wallet record id - pub id: String, - // Wallet record value - pub value: String, - // Wallet record tags - pub tags: HashMap, -} - -impl fmt::Debug for IndyRecord { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Record") - .field("type_", &self.type_) - .field("id", &self.id) - // Censor the value - .field("value", &"******".to_string()) - .field("tags", &self.tags) - .finish() - } -} - -pub type Tags = HashMap; diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/src/errors.rs b/aries/misc/legacy/libvdrtools/indy-api-types/src/errors.rs deleted file mode 100644 index cfa8677fe0..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/src/errors.rs +++ /dev/null @@ -1,639 +0,0 @@ -use std::{ - cell, - cell::RefCell, - error::Error, - ffi::{CString, NulError}, - fmt, io, ptr, - sync::Arc, -}; - -use libc::c_char; -use log; -#[cfg(feature = "casting_errors_wallet")] -use sqlx; -use thiserror::Error as ThisError; -use ursa::errors::{UrsaCryptoError, UrsaCryptoErrorKind}; -#[cfg(feature = "casting_errors_misc")] -use ursa::errors::{UrsaCryptoError, UrsaCryptoErrorKind}; - -use crate::ErrorCode; - -pub mod prelude { - pub use super::{ - err_msg, get_current_error_c_json, set_current_error, IndyError, IndyErrorExt, - IndyErrorKind, IndyResult, IndyResultExt, - }; -} - -#[derive(Copy, Clone, Eq, PartialEq, Debug, ThisError)] -pub enum IndyErrorKind { - // Common errors - #[error("Invalid library state")] - InvalidState, - #[error("Invalid structure")] - InvalidStructure, - #[error("Invalid parameter {0}")] - InvalidParam(u32), - #[error("IO error")] - IOError, - // Anoncreds errors - #[error("Duplicated master secret")] - MasterSecretDuplicateName, - #[error("Proof rejected")] - ProofRejected, - #[error("Revocation registry is full")] - RevocationRegistryFull, - #[error("Invalid revocation id")] - InvalidUserRevocId, - #[error("Credential revoked")] - CredentialRevoked, - #[error("Credential definition already exists")] - CredDefAlreadyExists, - // Ledger errors - #[error("No consensus")] - NoConsensus, - #[error("Invalid transaction")] - InvalidTransaction, - #[error("Item not found on ledger")] - LedgerItemNotFound, - // Pool errors - #[error("Pool not created")] - PoolNotCreated, - #[error("Invalid pool handle")] - InvalidPoolHandle, - #[error("Pool work terminated")] - PoolTerminated, - #[error("Pool timeout")] - PoolTimeout, - #[error("Pool ledger config already exists")] - PoolConfigAlreadyExists, - #[error("Pool Genesis Transactions are not compatible with Protocol version")] - PoolIncompatibleProtocolVersion, - // Crypto errors - #[error("Unknown crypto")] - UnknownCrypto, - // Wallet errors - #[error("Invalid wallet handle was passed")] - InvalidWalletHandle, - #[error("Unknown wallet storage type")] - UnknownWalletStorageType, - #[error("Wallet storage type already registered")] - WalletStorageTypeAlreadyRegistered, - #[error("Wallet with this name already exists")] - WalletAlreadyExists, - #[error("Wallet not found")] - WalletNotFound, - #[error("Wallet already opened")] - WalletAlreadyOpened, - #[error("Wallet security error")] - WalletAccessFailed, - #[error("Wallet encoding error")] - WalletEncodingError, - #[error("Wallet storage error occurred")] - WalletStorageError, - #[error("Wallet encryption error")] - WalletEncryptionError, - #[error("Wallet item not found")] - WalletItemNotFound, - #[error("Wallet item already exists")] - WalletItemAlreadyExists, - #[error("Wallet query error")] - WalletQueryError, - // DID errors - #[error("DID already exists")] - DIDAlreadyExists, - // Payments errors - #[error("Unknown payment method type")] - UnknownPaymentMethodType, - #[error("No method were scraped from inputs/outputs or more than one were scraped")] - IncompatiblePaymentMethods, - #[error("Payment insufficient funds on inputs")] - PaymentInsufficientFunds, - #[error("Payment Source does not exist")] - PaymentSourceDoesNotExist, - #[error("Payment operation not supported")] - PaymentOperationNotSupported, - #[error("Payment extra funds")] - PaymentExtraFunds, - #[error("The transaction is not allowed to a requester")] - TransactionNotAllowed, - #[error("Query account does not exist")] - QueryAccountDoesNotExist, - - #[error("Invalid VDR handle")] - InvalidVDRHandle, - #[error("Failed to get ledger for VDR Namespace")] - InvalidVDRNamespace, - #[error("Registered Ledger type does not match to the network of id")] - IncompatibleLedger, -} - -#[derive(Debug, Clone, ThisError)] -pub struct IndyError { - // FIXME: We have to use Arc as for now we clone messages in pool service - // FIXME: In theory we can avoid sync by refactoring of pool service - #[source] - kind: IndyErrorKind, - msg: Arc, -} - -impl fmt::Display for IndyError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - writeln!(f, "Error: {}", self.kind())?; - - if let Some(src) = self.kind.source() { - writeln!(f, " Caused by: {}", src)?; - } - - Ok(()) - } -} - -impl IndyError { - pub fn from_msg(kind: IndyErrorKind, msg: D) -> IndyError - where - D: fmt::Display + Send + Sync + 'static, - { - IndyError { - kind, - msg: Arc::new(msg.to_string()), - } - } - - pub fn kind(&self) -> IndyErrorKind { - self.kind - } - - pub fn extend(self, msg: D) -> IndyError - where - D: fmt::Display + fmt::Debug + Send + Sync + 'static, - { - IndyError { - kind: self.kind, - msg: Arc::new(format!("{}\n Caused by: {msg}", self.msg)), - } - } - - pub fn map(self, kind: IndyErrorKind, msg: D) -> IndyError - where - D: fmt::Display + Send + Sync + 'static, - { - IndyError { - kind, - msg: Arc::new(format!("{}\n Caused by: {msg}", self.msg)), - } - } -} - -pub fn err_msg(kind: IndyErrorKind, msg: D) -> IndyError -where - D: fmt::Display + fmt::Debug + Send + Sync + 'static, -{ - IndyError::from_msg(kind, msg) -} - -impl From for IndyError { - fn from(kind: IndyErrorKind) -> IndyError { - IndyError { - kind, - msg: Arc::new(String::new()), - } - } -} - -impl From for IndyError { - fn from(err: io::Error) -> Self { - IndyError { - kind: IndyErrorKind::IOError, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: cell::BorrowError) -> Self { - IndyError { - kind: IndyErrorKind::InvalidState, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: cell::BorrowMutError) -> Self { - IndyError { - kind: IndyErrorKind::InvalidState, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: futures::channel::oneshot::Canceled) -> Self { - IndyError { - kind: IndyErrorKind::InvalidState, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: log::SetLoggerError) -> IndyError { - IndyError { - kind: IndyErrorKind::InvalidState, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: UrsaCryptoError) -> Self { - match err.kind() { - UrsaCryptoErrorKind::InvalidState => { - IndyError::from_msg(IndyErrorKind::InvalidState, err) - } - UrsaCryptoErrorKind::InvalidStructure => { - IndyError::from_msg(IndyErrorKind::InvalidStructure, err) - } - UrsaCryptoErrorKind::IOError => IndyError::from_msg(IndyErrorKind::IOError, err), - UrsaCryptoErrorKind::InvalidRevocationAccumulatorIndex => { - IndyError::from_msg(IndyErrorKind::InvalidUserRevocId, err) - } - UrsaCryptoErrorKind::RevocationAccumulatorIsFull => { - IndyError::from_msg(IndyErrorKind::RevocationRegistryFull, err) - } - UrsaCryptoErrorKind::ProofRejected => { - IndyError::from_msg(IndyErrorKind::ProofRejected, err) - } - UrsaCryptoErrorKind::CredentialRevoked => { - IndyError::from_msg(IndyErrorKind::CredentialRevoked, err) - } - UrsaCryptoErrorKind::InvalidParam(_) => { - IndyError::from_msg(IndyErrorKind::InvalidStructure, err) - } - } - } -} - -impl From for IndyError { - fn from(_err: bs58::decode::Error) -> Self { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "The base58 input contained a character not part of the base58 alphabet", - ) - } -} - -impl From for IndyError { - fn from(err: openssl::error::ErrorStack) -> IndyError { - // TODO: FIXME: Analyze ErrorStack and split invalid structure errors from other errors - err.to_indy(IndyErrorKind::InvalidState, "Internal OpenSSL error") - } -} - -impl From for IndyError { - fn from(err: sqlx::Error) -> IndyError { - match &err { - sqlx::Error::RowNotFound => { - err.to_indy(IndyErrorKind::WalletItemNotFound, "Item not found") - } - sqlx::Error::Database(e) => match e.code() { - Some(code) => match code.as_ref() { - // Constraint unuque - sqlite (2067) - "2067" => err.to_indy( - IndyErrorKind::WalletItemAlreadyExists, - "Wallet item already exists", - ), - // Integrity constraint violation (23000) - "23000" => err.to_indy( - IndyErrorKind::WalletItemAlreadyExists, - "Wallet item already exists", - ), - _ => err.to_indy(IndyErrorKind::InvalidState, "Unexpected database error"), - }, - None => err.to_indy(IndyErrorKind::InvalidState, "Unexpected database error"), - }, - sqlx::Error::Io(_) => err.to_indy( - IndyErrorKind::IOError, - "IO error during access sqlite database", - ), - sqlx::Error::Tls(_) => err.to_indy( - IndyErrorKind::IOError, - "IO error during access sqlite database", - ), - _ => err.to_indy(IndyErrorKind::InvalidState, "Unexpected database error"), - } - } -} - -impl From for IndyError { - fn from(err: NulError) -> IndyError { - err.to_indy( - IndyErrorKind::InvalidState, - "Null symbols in payments strings", - ) // TODO: Review kind - } -} - -impl From> for ErrorCode { - fn from(r: Result) -> ErrorCode { - match r { - Ok(_) => ErrorCode::Success, - Err(err) => err.into(), - } - } -} - -impl From for ErrorCode { - fn from(err: IndyError) -> ErrorCode { - set_current_error(&err); - err.kind().into() - } -} - -impl From for ErrorCode { - fn from(code: IndyErrorKind) -> ErrorCode { - match code { - IndyErrorKind::InvalidState => ErrorCode::CommonInvalidState, - IndyErrorKind::InvalidStructure => ErrorCode::CommonInvalidStructure, - IndyErrorKind::InvalidParam(num) => match num { - 1 => ErrorCode::CommonInvalidParam1, - 2 => ErrorCode::CommonInvalidParam2, - 3 => ErrorCode::CommonInvalidParam3, - 4 => ErrorCode::CommonInvalidParam4, - 5 => ErrorCode::CommonInvalidParam5, - 6 => ErrorCode::CommonInvalidParam6, - 7 => ErrorCode::CommonInvalidParam7, - 8 => ErrorCode::CommonInvalidParam8, - 9 => ErrorCode::CommonInvalidParam9, - 10 => ErrorCode::CommonInvalidParam10, - 11 => ErrorCode::CommonInvalidParam11, - 12 => ErrorCode::CommonInvalidParam12, - 13 => ErrorCode::CommonInvalidParam13, - 14 => ErrorCode::CommonInvalidParam14, - 15 => ErrorCode::CommonInvalidParam15, - 16 => ErrorCode::CommonInvalidParam16, - 17 => ErrorCode::CommonInvalidParam17, - 18 => ErrorCode::CommonInvalidParam18, - 19 => ErrorCode::CommonInvalidParam19, - 20 => ErrorCode::CommonInvalidParam20, - 21 => ErrorCode::CommonInvalidParam21, - 22 => ErrorCode::CommonInvalidParam22, - 23 => ErrorCode::CommonInvalidParam23, - 24 => ErrorCode::CommonInvalidParam24, - 25 => ErrorCode::CommonInvalidParam25, - 26 => ErrorCode::CommonInvalidParam26, - 27 => ErrorCode::CommonInvalidParam27, - _ => ErrorCode::CommonInvalidState, - }, - IndyErrorKind::IOError => ErrorCode::CommonIOError, - IndyErrorKind::MasterSecretDuplicateName => { - ErrorCode::AnoncredsMasterSecretDuplicateNameError - } - IndyErrorKind::ProofRejected => ErrorCode::AnoncredsProofRejected, - IndyErrorKind::RevocationRegistryFull => { - ErrorCode::AnoncredsRevocationRegistryFullError - } - IndyErrorKind::InvalidUserRevocId => ErrorCode::AnoncredsInvalidUserRevocId, - IndyErrorKind::CredentialRevoked => ErrorCode::AnoncredsCredentialRevoked, - IndyErrorKind::CredDefAlreadyExists => ErrorCode::AnoncredsCredDefAlreadyExistsError, - IndyErrorKind::NoConsensus => ErrorCode::LedgerNoConsensusError, - IndyErrorKind::InvalidTransaction => ErrorCode::LedgerInvalidTransaction, - IndyErrorKind::LedgerItemNotFound => ErrorCode::LedgerNotFound, - IndyErrorKind::PoolNotCreated => ErrorCode::PoolLedgerNotCreatedError, - IndyErrorKind::InvalidPoolHandle => ErrorCode::PoolLedgerInvalidPoolHandle, - IndyErrorKind::PoolTerminated => ErrorCode::PoolLedgerTerminated, - IndyErrorKind::PoolTimeout => ErrorCode::PoolLedgerTimeout, - IndyErrorKind::PoolConfigAlreadyExists => ErrorCode::PoolLedgerConfigAlreadyExistsError, - IndyErrorKind::PoolIncompatibleProtocolVersion => { - ErrorCode::PoolIncompatibleProtocolVersion - } - IndyErrorKind::UnknownCrypto => ErrorCode::UnknownCryptoTypeError, - IndyErrorKind::InvalidWalletHandle => ErrorCode::WalletInvalidHandle, - IndyErrorKind::UnknownWalletStorageType => ErrorCode::WalletUnknownTypeError, - IndyErrorKind::WalletStorageTypeAlreadyRegistered => { - ErrorCode::WalletTypeAlreadyRegisteredError - } - IndyErrorKind::WalletAlreadyExists => ErrorCode::WalletAlreadyExistsError, - IndyErrorKind::WalletNotFound => ErrorCode::WalletNotFoundError, - IndyErrorKind::WalletAlreadyOpened => ErrorCode::WalletAlreadyOpenedError, - IndyErrorKind::WalletAccessFailed => ErrorCode::WalletAccessFailed, - IndyErrorKind::WalletEncodingError => ErrorCode::WalletDecodingError, - IndyErrorKind::WalletStorageError => ErrorCode::WalletStorageError, - IndyErrorKind::WalletEncryptionError => ErrorCode::WalletEncryptionError, - IndyErrorKind::WalletItemNotFound => ErrorCode::WalletItemNotFound, - IndyErrorKind::WalletItemAlreadyExists => ErrorCode::WalletItemAlreadyExists, - IndyErrorKind::WalletQueryError => ErrorCode::WalletQueryError, - IndyErrorKind::DIDAlreadyExists => ErrorCode::DidAlreadyExistsError, - IndyErrorKind::UnknownPaymentMethodType => ErrorCode::PaymentUnknownMethodError, - IndyErrorKind::IncompatiblePaymentMethods => ErrorCode::PaymentIncompatibleMethodsError, - IndyErrorKind::PaymentInsufficientFunds => ErrorCode::PaymentInsufficientFundsError, - IndyErrorKind::PaymentSourceDoesNotExist => ErrorCode::PaymentSourceDoesNotExistError, - IndyErrorKind::PaymentOperationNotSupported => { - ErrorCode::PaymentOperationNotSupportedError - } - IndyErrorKind::PaymentExtraFunds => ErrorCode::PaymentExtraFundsError, - IndyErrorKind::TransactionNotAllowed => ErrorCode::TransactionNotAllowedError, - IndyErrorKind::QueryAccountDoesNotExist => ErrorCode::QueryAccountDoesNotexistError, - IndyErrorKind::InvalidVDRHandle => ErrorCode::InvalidVDRHandle, - IndyErrorKind::InvalidVDRNamespace => ErrorCode::InvalidVDRNamespace, - IndyErrorKind::IncompatibleLedger => ErrorCode::IncompatibleLedger, - } - } -} - -impl From for IndyResult<()> { - fn from(err: ErrorCode) -> IndyResult<()> { - if err == ErrorCode::Success { - Ok(()) - } else { - Err(err.into()) - } - } -} - -impl From for IndyError { - fn from(err: ErrorCode) -> IndyError { - err_msg(err.into(), "Plugin returned error".to_string()) - } -} - -impl From for IndyErrorKind { - fn from(err: ErrorCode) -> IndyErrorKind { - match err { - ErrorCode::CommonInvalidState => IndyErrorKind::InvalidState, - ErrorCode::CommonInvalidStructure => IndyErrorKind::InvalidStructure, - ErrorCode::CommonInvalidParam1 => IndyErrorKind::InvalidParam(1), - ErrorCode::CommonInvalidParam2 => IndyErrorKind::InvalidParam(2), - ErrorCode::CommonInvalidParam3 => IndyErrorKind::InvalidParam(3), - ErrorCode::CommonInvalidParam4 => IndyErrorKind::InvalidParam(4), - ErrorCode::CommonInvalidParam5 => IndyErrorKind::InvalidParam(5), - ErrorCode::CommonInvalidParam6 => IndyErrorKind::InvalidParam(6), - ErrorCode::CommonInvalidParam7 => IndyErrorKind::InvalidParam(7), - ErrorCode::CommonInvalidParam8 => IndyErrorKind::InvalidParam(8), - ErrorCode::CommonInvalidParam9 => IndyErrorKind::InvalidParam(9), - ErrorCode::CommonInvalidParam10 => IndyErrorKind::InvalidParam(10), - ErrorCode::CommonInvalidParam11 => IndyErrorKind::InvalidParam(11), - ErrorCode::CommonInvalidParam12 => IndyErrorKind::InvalidParam(12), - ErrorCode::CommonInvalidParam13 => IndyErrorKind::InvalidParam(13), - ErrorCode::CommonInvalidParam14 => IndyErrorKind::InvalidParam(14), - ErrorCode::CommonInvalidParam15 => IndyErrorKind::InvalidParam(15), - ErrorCode::CommonInvalidParam16 => IndyErrorKind::InvalidParam(16), - ErrorCode::CommonInvalidParam17 => IndyErrorKind::InvalidParam(17), - ErrorCode::CommonInvalidParam18 => IndyErrorKind::InvalidParam(18), - ErrorCode::CommonInvalidParam19 => IndyErrorKind::InvalidParam(19), - ErrorCode::CommonInvalidParam20 => IndyErrorKind::InvalidParam(20), - ErrorCode::CommonInvalidParam21 => IndyErrorKind::InvalidParam(21), - ErrorCode::CommonInvalidParam22 => IndyErrorKind::InvalidParam(22), - ErrorCode::CommonInvalidParam23 => IndyErrorKind::InvalidParam(23), - ErrorCode::CommonInvalidParam24 => IndyErrorKind::InvalidParam(24), - ErrorCode::CommonInvalidParam25 => IndyErrorKind::InvalidParam(25), - ErrorCode::CommonInvalidParam26 => IndyErrorKind::InvalidParam(26), - ErrorCode::CommonInvalidParam27 => IndyErrorKind::InvalidParam(27), - ErrorCode::CommonIOError => IndyErrorKind::IOError, - ErrorCode::AnoncredsMasterSecretDuplicateNameError => { - IndyErrorKind::MasterSecretDuplicateName - } - ErrorCode::AnoncredsProofRejected => IndyErrorKind::ProofRejected, - ErrorCode::AnoncredsRevocationRegistryFullError => { - IndyErrorKind::RevocationRegistryFull - } - ErrorCode::AnoncredsInvalidUserRevocId => IndyErrorKind::InvalidUserRevocId, - ErrorCode::AnoncredsCredentialRevoked => IndyErrorKind::CredentialRevoked, - ErrorCode::AnoncredsCredDefAlreadyExistsError => IndyErrorKind::CredDefAlreadyExists, - ErrorCode::LedgerNoConsensusError => IndyErrorKind::NoConsensus, - ErrorCode::LedgerInvalidTransaction => IndyErrorKind::InvalidTransaction, - ErrorCode::LedgerNotFound => IndyErrorKind::LedgerItemNotFound, - ErrorCode::PoolLedgerNotCreatedError => IndyErrorKind::PoolNotCreated, - ErrorCode::PoolLedgerInvalidPoolHandle => IndyErrorKind::InvalidPoolHandle, - ErrorCode::PoolLedgerTerminated => IndyErrorKind::PoolTerminated, - ErrorCode::PoolLedgerTimeout => IndyErrorKind::PoolTimeout, - ErrorCode::PoolLedgerConfigAlreadyExistsError => IndyErrorKind::PoolConfigAlreadyExists, - ErrorCode::PoolIncompatibleProtocolVersion => { - IndyErrorKind::PoolIncompatibleProtocolVersion - } - ErrorCode::UnknownCryptoTypeError => IndyErrorKind::UnknownCrypto, - ErrorCode::WalletInvalidHandle => IndyErrorKind::InvalidWalletHandle, - ErrorCode::WalletUnknownTypeError => IndyErrorKind::UnknownWalletStorageType, - ErrorCode::WalletTypeAlreadyRegisteredError => { - IndyErrorKind::WalletStorageTypeAlreadyRegistered - } - ErrorCode::WalletAlreadyExistsError => IndyErrorKind::WalletAlreadyExists, - ErrorCode::WalletNotFoundError => IndyErrorKind::WalletNotFound, - ErrorCode::WalletAlreadyOpenedError => IndyErrorKind::WalletAlreadyOpened, - ErrorCode::WalletAccessFailed => IndyErrorKind::WalletAccessFailed, - ErrorCode::WalletDecodingError => IndyErrorKind::WalletEncodingError, - ErrorCode::WalletStorageError => IndyErrorKind::WalletStorageError, - ErrorCode::WalletEncryptionError => IndyErrorKind::WalletEncryptionError, - ErrorCode::WalletItemNotFound => IndyErrorKind::WalletItemNotFound, - ErrorCode::WalletItemAlreadyExists => IndyErrorKind::WalletItemAlreadyExists, - ErrorCode::WalletQueryError => IndyErrorKind::WalletQueryError, - ErrorCode::DidAlreadyExistsError => IndyErrorKind::DIDAlreadyExists, - ErrorCode::PaymentUnknownMethodError => IndyErrorKind::UnknownPaymentMethodType, - ErrorCode::PaymentIncompatibleMethodsError => IndyErrorKind::IncompatiblePaymentMethods, - ErrorCode::PaymentInsufficientFundsError => IndyErrorKind::PaymentInsufficientFunds, - ErrorCode::PaymentSourceDoesNotExistError => IndyErrorKind::PaymentSourceDoesNotExist, - ErrorCode::PaymentOperationNotSupportedError => { - IndyErrorKind::PaymentOperationNotSupported - } - ErrorCode::PaymentExtraFundsError => IndyErrorKind::PaymentExtraFunds, - ErrorCode::TransactionNotAllowedError => IndyErrorKind::TransactionNotAllowed, - ErrorCode::InvalidVDRHandle => IndyErrorKind::InvalidVDRHandle, - ErrorCode::InvalidVDRNamespace => IndyErrorKind::InvalidVDRNamespace, - ErrorCode::IncompatibleLedger => IndyErrorKind::IncompatibleLedger, - _code => IndyErrorKind::InvalidState, - } - } -} - -pub type IndyResult = Result; - -/// Extension methods for `Result`. -pub trait IndyResultExt { - fn to_indy(self, kind: IndyErrorKind, msg: D) -> IndyResult - where - D: fmt::Display + Send + Sync + 'static; -} - -impl IndyResultExt for Result -where - E: fmt::Display, -{ - fn to_indy(self, kind: IndyErrorKind, msg: D) -> IndyResult - where - D: fmt::Display + Send + Sync + 'static, - { - self.map_err(|err| err.to_indy(kind, msg)) - } -} - -/// Extension methods for `Error`. -pub trait IndyErrorExt { - fn to_indy(self, kind: IndyErrorKind, msg: D) -> IndyError - where - D: fmt::Display + Send + Sync + 'static; -} - -impl IndyErrorExt for E -where - E: fmt::Display, -{ - fn to_indy(self, kind: IndyErrorKind, msg: D) -> IndyError - where - D: fmt::Display + Send + Sync + 'static, - { - IndyError::from_msg(kind, format!("{msg}\n Caused by: {self}")) - } -} - -thread_local! { - pub static CURRENT_ERROR_C_JSON: RefCell> = const { RefCell::new(None) }; -} - -pub fn set_current_error(err: &IndyError) { - CURRENT_ERROR_C_JSON - .try_with(|error| { - let error_json = json!({ - "message": err.to_string(), - "backtrace": err.source().map(|bt| bt.to_string()) - }) - .to_string(); - error.replace(Some(string_to_cstring(error_json))); - }) - .map_err(|err| error!("Thread local variable access failed with: {:?}", err)) - .ok(); -} - -/// Get details for last occurred error. -/// -/// This function should be called in two places to handle both cases of error occurrence: -/// 1) synchronous - in the same application thread -/// 2) asynchronous - inside of function callback -/// -/// NOTE: Error is stored until the next one occurs in the same execution thread or until -/// asynchronous callback finished. Returning pointer has the same lifetime. -/// -/// #Params -/// * `error_json_p` - Reference that will contain error details (if any error has occurred before) -/// in the format: -/// { -/// "backtrace": Optional - error backtrace. -/// Collecting of backtrace can be enabled by: -/// 1) setting environment variable `RUST_BACKTRACE=1` -/// 2) calling `indy_set_runtime_config` API function with `collect_backtrace: true` -/// "message": str - human-readable error description -/// } -pub fn get_current_error_c_json() -> *const c_char { - let mut value = ptr::null(); - - CURRENT_ERROR_C_JSON - .try_with(|err| err.borrow().as_ref().map(|err| value = err.as_ptr())) - .map_err(|err| error!("Thread local variable access failed with: {:?}", err)) - .ok(); - - value -} - -pub fn string_to_cstring(s: String) -> CString { - CString::new(s).unwrap() -} diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/src/lib.rs b/aries/misc/legacy/libvdrtools/indy-api-types/src/lib.rs deleted file mode 100644 index 8e9f98c1b0..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/src/lib.rs +++ /dev/null @@ -1,280 +0,0 @@ -#[macro_use] -extern crate log; - -extern crate serde; - -#[macro_use] -extern crate serde_derive; - -#[macro_use] -extern crate serde_json; - -pub type IndyHandle = i32; - -#[repr(transparent)] -#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone, Serialize, Deserialize)] -pub struct WalletHandle(pub i32); -pub const INVALID_WALLET_HANDLE: WalletHandle = WalletHandle(0); - -impl From for WalletHandle { - fn from(value: i32) -> Self { - Self(value) - } -} - -pub type CallbackHandle = i32; - -pub type CommandHandle = i32; -pub const INVALID_COMMAND_HANDLE: CommandHandle = 0; - -pub type StorageHandle = i32; - -pub type VdrHandle = i32; -pub const INVALID_VDR_HANDLE: VdrHandle = 0; - -#[repr(transparent)] -#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone, Serialize, Deserialize)] -pub struct SearchHandle(pub i32); -pub const INVALID_SEARCH_HANDLE: SearchHandle = SearchHandle(0); - -/* -pub type SearchHandle = i32; -pub const INVALID_SEARCH_HANDLE : SearchHandle = 0; -*/ - -pub mod domain; - -pub mod errors; -pub use errors::IndyError; - -#[derive(Debug, PartialEq, Eq, Copy, Clone)] -#[repr(i32)] -pub enum ErrorCode { - Success = 0, - - // Common errors - - // Caller passed invalid value as param 1 (null, invalid json and etc..) - CommonInvalidParam1 = 100, - - // Caller passed invalid value as param 2 (null, invalid json and etc..) - CommonInvalidParam2 = 101, - - // Caller passed invalid value as param 3 (null, invalid json and etc..) - CommonInvalidParam3 = 102, - - // Caller passed invalid value as param 4 (null, invalid json and etc..) - CommonInvalidParam4 = 103, - - // Caller passed invalid value as param 5 (null, invalid json and etc..) - CommonInvalidParam5 = 104, - - // Caller passed invalid value as param 6 (null, invalid json and etc..) - CommonInvalidParam6 = 105, - - // Caller passed invalid value as param 7 (null, invalid json and etc..) - CommonInvalidParam7 = 106, - - // Caller passed invalid value as param 8 (null, invalid json and etc..) - CommonInvalidParam8 = 107, - - // Caller passed invalid value as param 9 (null, invalid json and etc..) - CommonInvalidParam9 = 108, - - // Caller passed invalid value as param 10 (null, invalid json and etc..) - CommonInvalidParam10 = 109, - - // Caller passed invalid value as param 11 (null, invalid json and etc..) - CommonInvalidParam11 = 110, - - // Caller passed invalid value as param 12 (null, invalid json and etc..) - CommonInvalidParam12 = 111, - - // Invalid library state was detected in runtime. It signals library bug - CommonInvalidState = 112, - - // Object (json, config, key, credential and etc...) passed by library caller has invalid - // structure - CommonInvalidStructure = 113, - - // IO Error - CommonIOError = 114, - - // Caller passed invalid value as param 13 (null, invalid json and etc..) - CommonInvalidParam13 = 115, - - // Caller passed invalid value as param 14 (null, invalid json and etc..) - CommonInvalidParam14 = 116, - - // Caller passed invalid value as param 15 (null, invalid json and etc..) - CommonInvalidParam15 = 117, - - // Caller passed invalid value as param 16 (null, invalid json and etc..) - CommonInvalidParam16 = 118, - - // Caller passed invalid value as param 17 (null, invalid json and etc..) - CommonInvalidParam17 = 119, - - // Caller passed invalid value as param 18 (null, invalid json and etc..) - CommonInvalidParam18 = 120, - - // Caller passed invalid value as param 19 (null, invalid json and etc..) - CommonInvalidParam19 = 121, - - // Caller passed invalid value as param 20 (null, invalid json and etc..) - CommonInvalidParam20 = 122, - - // Caller passed invalid value as param 21 (null, invalid json and etc..) - CommonInvalidParam21 = 123, - - // Caller passed invalid value as param 22 (null, invalid json and etc..) - CommonInvalidParam22 = 124, - - // Caller passed invalid value as param 23 (null, invalid json and etc..) - CommonInvalidParam23 = 125, - - // Caller passed invalid value as param 24 (null, invalid json and etc..) - CommonInvalidParam24 = 126, - - // Caller passed invalid value as param 25 (null, invalid json and etc..) - CommonInvalidParam25 = 127, - - // Caller passed invalid value as param 26 (null, invalid json and etc..) - CommonInvalidParam26 = 128, - - // Caller passed invalid value as param 27 (null, invalid json and etc..) - CommonInvalidParam27 = 129, - - // Wallet errors - // Caller passed invalid wallet handle - WalletInvalidHandle = 200, - - // Unknown type of wallet was passed on create_wallet - WalletUnknownTypeError = 201, - - // Attempt to register already existing wallet type - WalletTypeAlreadyRegisteredError = 202, - - // Attempt to create wallet with name used for another exists wallet - WalletAlreadyExistsError = 203, - - // Requested entity id isn't present in wallet - WalletNotFoundError = 204, - - // Trying to use wallet with pool that has different name - WalletIncompatiblePoolError = 205, - - // Trying to open wallet that was opened already - WalletAlreadyOpenedError = 206, - - // Attempt to open encrypted wallet with invalid credentials - WalletAccessFailed = 207, - - // Input provided to wallet operations is considered not valid - WalletInputError = 208, - - // Decoding of wallet data during input/output failed - WalletDecodingError = 209, - - // Storage error occurred during wallet operation - WalletStorageError = 210, - - // Error during encryption-related operations - WalletEncryptionError = 211, - - // Requested wallet item not found - WalletItemNotFound = 212, - - // Returned if wallet's add_record operation is used with record name that already exists - WalletItemAlreadyExists = 213, - - // Returned if provided wallet query is invalid - WalletQueryError = 214, - - // Ledger errors - // Trying to open pool ledger that wasn't created before - PoolLedgerNotCreatedError = 300, - - // Caller passed invalid pool ledger handle - PoolLedgerInvalidPoolHandle = 301, - - // Pool ledger terminated - PoolLedgerTerminated = 302, - - // No consensus during ledger operation - LedgerNoConsensusError = 303, - - // Attempt to parse invalid transaction response - LedgerInvalidTransaction = 304, - - // Attempt to send transaction without the necessary privileges - LedgerSecurityError = 305, - - // Attempt to create pool ledger config with name used for another existing pool - PoolLedgerConfigAlreadyExistsError = 306, - - // Timeout for action - PoolLedgerTimeout = 307, - - // Attempt to open Pool for witch Genesis Transactions are not compatible with set Protocol - // version. Call pool.indy_set_protocol_version to set correct Protocol version. - PoolIncompatibleProtocolVersion = 308, - - // Item not found on ledger. - LedgerNotFound = 309, - - // Revocation registry is full and creation of new registry is necessary - AnoncredsRevocationRegistryFullError = 400, - - AnoncredsInvalidUserRevocId = 401, - - // Attempt to generate master secret with duplicated name - AnoncredsMasterSecretDuplicateNameError = 404, - - AnoncredsProofRejected = 405, - - AnoncredsCredentialRevoked = 406, - - // Attempt to create credential definition with duplicated id - AnoncredsCredDefAlreadyExistsError = 407, - - // Crypto errors - // Unknown format of DID entity keys - UnknownCryptoTypeError = 500, - - // Attempt to create duplicate did - DidAlreadyExistsError = 600, - - // Unknown payment method was given - PaymentUnknownMethodError = 700, - - //No method were scraped from inputs/outputs or more than one were scraped - PaymentIncompatibleMethodsError = 701, - - // Insufficient funds on inputs - PaymentInsufficientFundsError = 702, - - // No such source on a ledger - PaymentSourceDoesNotExistError = 703, - - // Operation is not supported for payment method - PaymentOperationNotSupportedError = 704, - - // Extra funds on inputs - PaymentExtraFundsError = 705, - - // The transaction is not allowed to a requester - TransactionNotAllowedError = 706, - - // Query Account does not exist in the pool - QueryAccountDoesNotexistError = 808, - - // Caller passed invalid wallet handle - InvalidVDRHandle = 810, - - // Unable to get register Ledger for specified namespace and VDR - InvalidVDRNamespace = 811, - - // Registered Ledger type does not match to the network of id - IncompatibleLedger = 812, -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/Cargo.toml b/aries/misc/legacy/libvdrtools/indy-utils/Cargo.toml deleted file mode 100644 index a502e61f0e..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/Cargo.toml +++ /dev/null @@ -1,38 +0,0 @@ -[package] -name = "indy-utils" -version = "0.1.0" -authors = ["Hyperledger Indy Contributors "] -edition = "2018" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[features] -default = ["base64_rust_base64", "ed25519_sign_sodium", "ed25519_box_sodium", "sealedbox_sodium", "base64_rust_base64", "xsalsa20_sodium", "chacha20poly1305_ietf_sodium", "hash_openssl", "pwhash_argon2i13_sodium", "hmacsha256_sodium", "randombytes_sodium"] -base64_rust_base64 = [] -ed25519_sign_sodium = [] -ed25519_box_sodium = [] -sealedbox_sodium = [] -xsalsa20_sodium = [] -chacha20poly1305_ietf_sodium = [] -pwhash_argon2i13_sodium = [] -hmacsha256_sodium = [] -hash_openssl = [] -randombytes_sodium = [] - -[dependencies] -base64 = {version = "0.21.2"} -dirs = "5" -failure = "0.1" -indy-api-types = { path = "../indy-api-types" } -lazy_static = "1" -libc = "0.2" -log = "0.4" -openssl = { version = "0.10" } -serde = "1" -serde_json = "1" -serde_derive = "1" -sodiumoxide = {version = "0.0.16"} # touching this makes things go boom -zeroize = "1" - -[dev-dependencies] -rmp-serde = "1" -rand = "0.8" diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs deleted file mode 100644 index 204d91fc60..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs +++ /dev/null @@ -1,78 +0,0 @@ -use base64::{ - alphabet, - engine::{general_purpose, DecodePaddingMode, GeneralPurpose, GeneralPurposeConfig}, - Engine, -}; -use indy_api_types::errors::prelude::*; - -/// Default general purpose configuration, but padding decode mode of 'indifferent' (will decode -/// either) -const ANY_PADDING: GeneralPurposeConfig = - GeneralPurposeConfig::new().with_decode_padding_mode(DecodePaddingMode::Indifferent); -/// Standard Base64 URL Safe decoding and encoding, with indifference for padding mode when decoding -const URL_SAFE_ANY_PADDING: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, ANY_PADDING); - -pub fn encode(doc: &[u8]) -> String { - general_purpose::STANDARD.encode(doc) -} - -pub fn decode(doc: &str) -> Result, IndyError> { - general_purpose::STANDARD - .decode(doc) - .map_err(|e| e.to_indy(IndyErrorKind::InvalidStructure, "Invalid base64 sequence")) -} - -pub fn encode_urlsafe(doc: &[u8]) -> String { - general_purpose::URL_SAFE.encode(doc) -} - -pub fn decode_urlsafe(doc: &str) -> Result, IndyError> { - URL_SAFE_ANY_PADDING.decode(doc).map_err(|e| { - e.to_indy( - IndyErrorKind::InvalidStructure, - "Invalid base64URL_SAFE sequence", - ) - }) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn encode_works() { - let result = encode(&[1, 2, 3]); - assert_eq!("AQID", &result); - } - - #[test] - fn decode_works() { - let result = decode("AQID"); - - assert!(result.is_ok(), "Got error"); - assert_eq!(&[1, 2, 3], &result.unwrap()[..]); - } - - #[test] - fn encode_urlsafe_works() { - let result = encode_urlsafe(&[1, 2, 3]); - assert_eq!("AQID", &result); - } - - #[test] - fn decode_urlsafe_works() { - let result = decode_urlsafe("AQID"); - - assert!(result.is_ok(), "Got error"); - assert_eq!(&[1, 2, 3], &result.unwrap()[..]); - } - - #[test] - fn decode_urlsafe_works_with_or_without_padding() { - let result = decode_urlsafe("YWJjZA=="); - assert_eq!(vec![97, 98, 99, 100], result.unwrap()); - - let result = decode_urlsafe("YWJjZA"); - assert_eq!(vec![97, 98, 99, 100], result.unwrap()); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs deleted file mode 100644 index 5b913ec5da..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs +++ /dev/null @@ -1,428 +0,0 @@ -extern crate sodiumoxide; - -use std::{ - cmp, io, - io::{Read, Write}, -}; - -use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; - -use self::sodiumoxide::{crypto::aead::chacha20poly1305_ietf, utils}; -use super::pwhash_argon2i13; - -pub const KEYBYTES: usize = chacha20poly1305_ietf::KEYBYTES; -pub const NONCEBYTES: usize = chacha20poly1305_ietf::NONCEBYTES; -pub const TAGBYTES: usize = chacha20poly1305_ietf::TAGBYTES; - -sodium_type!(Key, chacha20poly1305_ietf::Key, KEYBYTES); -sodium_type!(Nonce, chacha20poly1305_ietf::Nonce, NONCEBYTES); -sodium_type!(Tag, chacha20poly1305_ietf::Tag, TAGBYTES); - -impl Nonce { - pub fn increment(&mut self) { - utils::increment_le(&mut (self.0).0); - } -} - -pub fn gen_key() -> Key { - Key(chacha20poly1305_ietf::gen_key()) -} - -pub fn derive_key( - passphrase: &str, - salt: &pwhash_argon2i13::Salt, - key_derivation_method: &KeyDerivationMethod, -) -> Result { - let mut key_bytes = [0u8; chacha20poly1305_ietf::KEYBYTES]; - - pwhash_argon2i13::pwhash( - &mut key_bytes, - passphrase.as_bytes(), - salt, - key_derivation_method, - ) - .map_err(|err| err.extend("Can't derive key"))?; - - Ok(Key::new(key_bytes)) -} - -pub fn gen_nonce() -> Nonce { - Nonce(chacha20poly1305_ietf::gen_nonce()) -} - -pub fn gen_nonce_and_encrypt(data: &[u8], key: &Key) -> (Vec, Nonce) { - let nonce = gen_nonce(); - - let encrypted_data = chacha20poly1305_ietf::seal(data, None, &nonce.0, &key.0); - - (encrypted_data, nonce) -} - -pub fn gen_nonce_and_encrypt_detached(data: &[u8], aad: &[u8], key: &Key) -> (Vec, Nonce, Tag) { - let nonce = gen_nonce(); - - let mut plain = data.to_vec(); - let tag = - chacha20poly1305_ietf::seal_detached(plain.as_mut_slice(), Some(aad), &nonce.0, &key.0); - - (plain.to_vec(), nonce, Tag(tag)) -} - -pub fn decrypt_detached( - data: &[u8], - key: &Key, - nonce: &Nonce, - tag: &Tag, - ad: Option<&[u8]>, -) -> Result, IndyError> { - let mut plain = data.to_vec(); - chacha20poly1305_ietf::open_detached(plain.as_mut_slice(), ad, &tag.0, &nonce.0, &key.0) - .map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to decrypt data: {:?}", - ) - }) - .map(|()| plain) -} - -pub fn encrypt(data: &[u8], key: &Key, nonce: &Nonce) -> Vec { - chacha20poly1305_ietf::seal(data, None, &nonce.0, &key.0) -} - -pub fn decrypt(data: &[u8], key: &Key, nonce: &Nonce) -> Result, IndyError> { - chacha20poly1305_ietf::open(data, None, &nonce.0, &key.0).map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to open sodium chacha20poly1305_ietf", - ) - }) -} - -pub struct Writer { - buffer: Vec, - chunk_size: usize, - key: Key, - nonce: Nonce, - inner: W, -} - -impl Writer { - pub fn new(inner: W, key: Key, nonce: Nonce, chunk_size: usize) -> Self { - Writer { - buffer: Vec::new(), - chunk_size, - key, - nonce, - inner, - } - } - - #[allow(unused)] - pub fn into_inner(self) -> W { - self.inner - } -} - -impl Write for Writer { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.buffer.write_all(buf)?; // TODO: Small optimizations are possible - - let mut chunk_start = 0; - - while self.buffer.len() >= chunk_start + self.chunk_size { - let chunk = &self.buffer[chunk_start..chunk_start + self.chunk_size]; - self.inner - .write_all(&encrypt(chunk, &self.key, &self.nonce))?; - self.nonce.increment(); - chunk_start += self.chunk_size; - } - - if chunk_start > 0 { - self.buffer.drain(..chunk_start); - } - - Ok(buf.len()) - } - - fn flush(&mut self) -> io::Result<()> { - if !self.buffer.is_empty() { - self.inner - .write_all(&encrypt(&self.buffer, &self.key, &self.nonce))?; - self.nonce.increment(); - } - - self.buffer.flush() - } -} - -pub struct Reader { - rest_buffer: Vec, - chunk_buffer: Vec, - key: Key, - nonce: Nonce, - inner: R, -} - -impl Reader { - pub fn new(inner: R, key: Key, nonce: Nonce, chunk_size: usize) -> Self { - Reader { - rest_buffer: Vec::new(), - chunk_buffer: vec![0; chunk_size + TAGBYTES], - key, - nonce, - inner, - } - } - - #[allow(unused)] - pub fn into_inner(self) -> R { - self.inner - } - - fn _read_chunk(&mut self) -> io::Result { - let mut read = 0; - - while read < self.chunk_buffer.len() { - match self.inner.read(&mut self.chunk_buffer[read..]) { - Ok(0) => break, - Ok(n) => read += n, - Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue, - Err(e) => return Err(e), - } - } - - if read == 0 { - Err(io::Error::new( - io::ErrorKind::UnexpectedEof, - "No more crypto chucks to consume", - )) - } else { - Ok(read) - } - } -} - -impl Read for Reader { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - let mut pos = 0; - - // Consume from rest buffer - if !self.rest_buffer.is_empty() { - let to_copy = cmp::min(self.rest_buffer.len(), buf.len() - pos); - buf[pos..pos + to_copy].copy_from_slice(&self.rest_buffer[..to_copy]); - pos += to_copy; - self.rest_buffer.drain(..to_copy); - } - - // Consume from chunks - while pos < buf.len() { - let chunk_size = self._read_chunk()?; - - let chunk = decrypt(&self.chunk_buffer[..chunk_size], &self.key, &self.nonce).map_err( - |_| io::Error::new(io::ErrorKind::InvalidData, "Invalid data in crypto chunk"), - )?; - - self.nonce.increment(); - - let to_copy = cmp::min(chunk.len(), buf.len() - pos); - buf[pos..pos + to_copy].copy_from_slice(&chunk[..to_copy]); - pos += to_copy; - - // Save rest in rest buffer - if pos == buf.len() && to_copy < chunk.len() { - self.rest_buffer.extend(&chunk[to_copy..]); - } - } - - Ok(buf.len()) - } -} - -#[cfg(test)] -mod tests { - extern crate rmp_serde; - - use super::*; - use crate::crypto::randombytes::randombytes; - - #[test] - fn derivation_argon2i_mod_produces_expected_result() { - let passphrase = "passphrase"; - let salt_bytes: [u8; 32] = [ - 24, 62, 35, 31, 123, 241, 94, 24, 192, 110, 199, 143, 173, 20, 23, 102, 184, 99, 221, - 64, 247, 230, 11, 253, 10, 7, 80, 236, 185, 249, 110, 187, - ]; - let key_bytes: [u8; 32] = [ - 148, 89, 76, 239, 127, 103, 13, 86, 84, 217, 216, 13, 223, 141, 225, 41, 223, 126, 145, - 138, 174, 31, 142, 199, 81, 12, 40, 201, 67, 8, 6, 251, - ]; - - let res = derive_key( - passphrase, - &pwhash_argon2i13::Salt::from_slice(&salt_bytes).unwrap(), - &KeyDerivationMethod::ARGON2I_MOD, - ) - .unwrap(); - - assert_eq!(res, Key::new(key_bytes)) - } - - #[test] - fn derivation_argon2i_int_produces_expected_result() { - let passphrase = "passphrase"; - let salt_bytes: [u8; 32] = [ - 24, 62, 35, 31, 123, 241, 94, 24, 192, 110, 199, 143, 173, 20, 23, 102, 184, 99, 221, - 64, 247, 230, 11, 253, 10, 7, 80, 236, 185, 249, 110, 187, - ]; - let key_bytes: [u8; 32] = [ - 247, 55, 177, 252, 244, 130, 218, 129, 113, 206, 72, 44, 29, 68, 134, 215, 249, 233, - 131, 199, 38, 87, 69, 217, 156, 217, 10, 160, 30, 148, 80, 160, - ]; - - let res = derive_key( - passphrase, - &pwhash_argon2i13::Salt::from_slice(&salt_bytes).unwrap(), - &KeyDerivationMethod::ARGON2I_INT, - ) - .unwrap(); - - assert_eq!(res, Key::new(key_bytes)) - } - - #[test] - fn gen_nonce_and_encrypt_decrypt_works() { - let data = randombytes(100); - let key = gen_key(); - - let (c, nonce) = gen_nonce_and_encrypt(&data, &key); - let u = decrypt(&c, &key, &nonce).unwrap(); - - assert_eq!(data, u); - } - - #[test] - pub fn gen_nonce_and_encrypt_detached_decrypt_detached_works() { - let data = randombytes(100); - let key = gen_key(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = b"some protocol data input to the encryption"; - - let (c, nonce, tag) = gen_nonce_and_encrypt_detached(&data, aad, &key); - let u = decrypt_detached(&c, &key, &nonce, &tag, Some(aad)).unwrap(); - assert_eq!(data, u); - } - - #[test] - fn encrypt_decrypt_works_for_nonce() { - let data = randombytes(16); - - let key = gen_key(); - let nonce = gen_nonce(); - let c = encrypt(&data, &key, &nonce); - let u = decrypt(&c, &key, &nonce).unwrap(); - - assert_eq!(data, u) - } - - #[test] - fn nonce_serialize_deserialize_works() { - let nonce = gen_nonce(); - let serialized = rmp_serde::to_vec(&nonce).unwrap(); - let deserialized: Nonce = rmp_serde::from_slice(&serialized).unwrap(); - - assert_eq!(serialized.len(), NONCEBYTES + 2); - assert_eq!(nonce, deserialized) - } - - #[test] - fn key_serialize_deserialize_works() { - let key = gen_key(); - let serialized = rmp_serde::to_vec(&key).unwrap(); - let deserialized: Key = rmp_serde::from_slice(&serialized).unwrap(); - - assert_eq!(serialized.len(), KEYBYTES + 2); - assert_eq!(key, deserialized) - } - - #[test] - fn writer_reader_works_for_less_than_one_chunk() { - let plain = randombytes(7); - let key = gen_key(); - let nonce = gen_nonce(); - - let mut writer = Writer::new(Vec::::new(), key.clone(), nonce.clone(), 10); - writer.write_all(&plain).unwrap(); - writer.flush().unwrap(); - - let encrypted = writer.into_inner(); - assert_eq!(encrypted.len(), 7 + TAGBYTES); - - let mut decrypted = vec![0u8; 7]; - let mut reader = Reader::new(&encrypted[..], key, nonce, 10); - reader.read_exact(&mut decrypted).unwrap(); - - assert_eq!(plain, decrypted); - } - - #[test] - fn writer_reader_works_for_exact_one_chunk() { - let plain = randombytes(10); - let key = gen_key(); - let nonce = gen_nonce(); - - let mut writer = Writer::new(Vec::::new(), key.clone(), nonce.clone(), 10); - writer.write_all(&plain).unwrap(); - writer.flush().unwrap(); - - let encrypted = writer.into_inner(); - assert_eq!(encrypted.len(), 10 + TAGBYTES); - - let mut decrypted = vec![0u8; 10]; - let mut reader = Reader::new(&encrypted[..], key, nonce, 10); - reader.read_exact(&mut decrypted).unwrap(); - - assert_eq!(plain, decrypted); - } - - #[test] - fn writer_reader_works_for_one_to_two_chunks() { - let plain = randombytes(13); - let key = gen_key(); - let nonce = gen_nonce(); - - let mut writer = Writer::new(Vec::::new(), key.clone(), nonce.clone(), 10); - writer.write_all(&plain).unwrap(); - writer.flush().unwrap(); - - let encrypted = writer.into_inner(); - assert_eq!(encrypted.len(), 13 + 2 * TAGBYTES); - - let mut decrypted = vec![0u8; 13]; - let mut reader = Reader::new(&encrypted[..], key, nonce, 10); - reader.read_exact(&mut decrypted).unwrap(); - - assert_eq!(plain, decrypted); - } - - #[test] - fn writer_reader_works_for_exact_two_chunks() { - let plain = randombytes(20); - let key = gen_key(); - let nonce = gen_nonce(); - - let mut writer = Writer::new(Vec::::new(), key.clone(), nonce.clone(), 10); - writer.write_all(&plain).unwrap(); - writer.flush().unwrap(); - - let encrypted = writer.into_inner(); - assert_eq!(encrypted.len(), 20 + 2 * TAGBYTES); - - let mut decrypted = vec![0u8; 20]; - let mut reader = Reader::new(&encrypted[..], key, nonce, 10); - reader.read_exact(&mut decrypted).unwrap(); - - assert_eq!(plain, decrypted); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs deleted file mode 100644 index d51f75d92f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs +++ /dev/null @@ -1,73 +0,0 @@ -extern crate sodiumoxide; - -use indy_api_types::errors::prelude::*; - -use self::sodiumoxide::crypto::box_; - -pub const NONCEBYTES: usize = box_::curve25519xsalsa20poly1305::NONCEBYTES; -pub const PUBLICKEYBYTES: usize = box_::curve25519xsalsa20poly1305::PUBLICKEYBYTES; -pub const SECRETKEYBYTES: usize = box_::curve25519xsalsa20poly1305::SECRETKEYBYTES; - -sodium_type!(Nonce, box_::Nonce, NONCEBYTES); -sodium_type!(PublicKey, box_::PublicKey, PUBLICKEYBYTES); -sodium_type!(SecretKey, box_::SecretKey, SECRETKEYBYTES); - -pub fn encrypt( - secret_key: &SecretKey, - public_key: &PublicKey, - doc: &[u8], - nonce: &Nonce, -) -> Result, IndyError> { - Ok(box_::seal(doc, &nonce.0, &public_key.0, &secret_key.0)) -} - -pub fn decrypt( - secret_key: &SecretKey, - public_key: &PublicKey, - doc: &[u8], - nonce: &Nonce, -) -> Result, IndyError> { - box_::open(doc, &nonce.0, &public_key.0, &secret_key.0).map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to open sodium _box", - ) - }) -} - -pub fn gen_nonce() -> Nonce { - Nonce(box_::gen_nonce()) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::crypto::{ed25519_sign, randombytes::randombytes}; - - #[test] - fn encrypt_decrypt_works() { - let text = randombytes(16); - let nonce = gen_nonce(); - let seed = ed25519_sign::Seed::from_slice(&randombytes(32)).unwrap(); - - let (alice_ver_key, alice_sign_key) = - ed25519_sign::create_key_pair_for_signature(Some(&seed)).unwrap(); - let alice_pk = ed25519_sign::vk_to_curve25519(&alice_ver_key).unwrap(); - let alice_sk = ed25519_sign::sk_to_curve25519(&alice_sign_key).unwrap(); - - let (bob_ver_key, bob_sign_key) = - ed25519_sign::create_key_pair_for_signature(Some(&seed)).unwrap(); - let bob_pk = ed25519_sign::vk_to_curve25519(&bob_ver_key).unwrap(); - let bob_sk = ed25519_sign::sk_to_curve25519(&bob_sign_key).unwrap(); - - let bob_encrypted_text = encrypt(&bob_sk, &alice_pk, &text, &nonce).unwrap(); - let bob_decrypt_result = decrypt(&alice_sk, &bob_pk, &bob_encrypted_text, &nonce); - assert!(bob_decrypt_result.is_ok()); - assert_eq!(text, bob_decrypt_result.unwrap()); - - let alice_encrypted_text = encrypt(&alice_sk, &bob_pk, &text, &nonce).unwrap(); - let alice_decrypted_text = decrypt(&bob_sk, &alice_pk, &alice_encrypted_text, &nonce); - assert!(alice_decrypted_text.is_ok()); - assert_eq!(text, alice_decrypted_text.unwrap()); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs deleted file mode 100644 index c89f8abeab..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs +++ /dev/null @@ -1,123 +0,0 @@ -use indy_api_types::errors::prelude::*; -use libc::c_int; -use sodiumoxide::crypto::{box_, sign}; - -use super::{ed25519_box, randombytes::randombytes}; - -pub const SEEDBYTES: usize = sign::SEEDBYTES; -pub const SIG_PUBLICKEYBYTES: usize = sign::PUBLICKEYBYTES; -pub const ENC_PUBLICKEYBYTES: usize = box_::PUBLICKEYBYTES; -pub const SIG_SECRETKEYBYTES: usize = sign::SECRETKEYBYTES; -pub const ENC_SECRETKEYBYTES: usize = box_::SECRETKEYBYTES; -pub const SIGNATUREBYTES: usize = sign::SIGNATUREBYTES; - -sodium_type!(Seed, sign::Seed, SEEDBYTES); -sodium_type!(PublicKey, sign::PublicKey, SIG_PUBLICKEYBYTES); -sodium_type!(SecretKey, sign::SecretKey, SIG_SECRETKEYBYTES); -sodium_type!(Signature, sign::Signature, SIGNATUREBYTES); - -extern "C" { - // TODO: fix hack: - // this functions isn't included to sodiumoxide rust wrappers, - // temporary local binding is used to call libsodium-sys function - pub fn crypto_sign_ed25519_pk_to_curve25519( - curve25519_pk: *mut [u8; ENC_PUBLICKEYBYTES], - ed25519_pk: *const [u8; SIG_PUBLICKEYBYTES], - ) -> c_int; - pub fn crypto_sign_ed25519_sk_to_curve25519( - curve25519_sk: *mut [u8; ENC_SECRETKEYBYTES], - ed25519_sk: *const [u8; SIG_SECRETKEYBYTES], - ) -> c_int; -} - -pub fn create_key_pair_for_signature( - seed: Option<&Seed>, -) -> Result<(PublicKey, SecretKey), IndyError> { - let (public_key, secret_key) = sign::keypair_from_seed( - &seed - .unwrap_or(&Seed::from_slice(&randombytes(SEEDBYTES)).unwrap()) - .0, - ); - - Ok((PublicKey(public_key), SecretKey(secret_key))) -} - -pub fn sign(secret_key: &SecretKey, doc: &[u8]) -> Result { - Ok(Signature(sign::sign_detached(doc, &secret_key.0))) -} - -pub fn verify( - public_key: &PublicKey, - doc: &[u8], - signature: &Signature, -) -> Result { - Ok(sign::verify_detached(&signature.0, doc, &public_key.0)) -} - -pub fn sk_to_curve25519(sk: &SecretKey) -> Result { - let mut to: [u8; ENC_SECRETKEYBYTES] = [0; ENC_SECRETKEYBYTES]; - unsafe { - crypto_sign_ed25519_sk_to_curve25519(&mut to, &(sk.0).0); - } - ed25519_box::SecretKey::from_slice(&to) -} - -pub fn vk_to_curve25519(pk: &PublicKey) -> Result { - let mut to: [u8; ENC_PUBLICKEYBYTES] = [0; ENC_PUBLICKEYBYTES]; - unsafe { - crypto_sign_ed25519_pk_to_curve25519(&mut to, &(pk.0).0); - } - ed25519_box::PublicKey::from_slice(&to) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::crypto::ed25519_box; - - #[test] - fn signin_verify_works() { - let seed = Seed::from_slice(&randombytes(SEEDBYTES)).unwrap(); - let text = randombytes(16); - - let (public_key, secret_key) = create_key_pair_for_signature(Some(&seed)).unwrap(); - let alice_signed_text = sign(&secret_key, &text).unwrap(); - let verified = verify(&public_key, &text, &alice_signed_text).unwrap(); - - assert!(verified); - } - - #[test] - fn pk_to_curve25519_works() { - let pk = vec![ - 236, 191, 114, 144, 108, 87, 211, 244, 148, 23, 20, 175, 122, 6, 159, 254, 85, 99, 145, - 152, 178, 133, 230, 236, 192, 69, 35, 136, 141, 194, 243, 134, - ]; - let pk = PublicKey::from_slice(&pk).unwrap(); - let pkc_test = vk_to_curve25519(&pk).unwrap(); - let pkc_exp = vec![ - 8, 45, 124, 147, 248, 201, 112, 171, 11, 51, 29, 248, 34, 127, 197, 241, 60, 158, 84, - 47, 4, 176, 238, 166, 110, 39, 207, 58, 127, 110, 76, 42, - ]; - let pkc_exp = ed25519_box::PublicKey::from_slice(&pkc_exp).unwrap(); - assert_eq!(pkc_exp, pkc_test); - } - - #[test] - fn sk_to_curve25519_works() { - let sk = vec![ - 78, 67, 205, 99, 150, 131, 75, 110, 56, 154, 76, 61, 27, 142, 36, 141, 44, 223, 122, - 199, 14, 230, 12, 163, 4, 255, 94, 230, 21, 242, 97, 200, 236, 191, 114, 144, 108, 87, - 211, 244, 148, 23, 20, 175, 122, 6, 159, 254, 85, 99, 145, 152, 178, 133, 230, 236, - 192, 69, 35, 136, 141, 194, 243, 134, - ]; - let sk = SecretKey::from_slice(&sk).unwrap(); - let skc_test = sk_to_curve25519(&sk).unwrap(); - let skc_exp = vec![ - 144, 112, 64, 101, 69, 167, 61, 44, 220, 148, 58, 187, 108, 73, 11, 247, 130, 161, 158, - 40, 100, 1, 40, 27, 76, 148, 209, 240, 195, 35, 153, 121, - ]; - let skc_exp = ed25519_box::SecretKey::from_slice(&skc_exp).unwrap(); - assert_eq!(skc_exp, skc_test); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hash/openssl.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hash/openssl.rs deleted file mode 100644 index 237ac7e01f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hash/openssl.rs +++ /dev/null @@ -1,84 +0,0 @@ -extern crate openssl; - -use indy_api_types::errors::prelude::*; - -use self::openssl::hash::{Hasher, MessageDigest}; - -pub const HASHBYTES: usize = 32; - -// these bytes are the same as openssl_hash(MessageDigest::sha256(), &[]) so we do not have to -// actually call the hash function -pub const EMPTY_HASH_BYTES: [u8; HASHBYTES] = [ - 227, 176, 196, 66, 152, 252, 28, 20, 154, 251, 244, 200, 153, 111, 185, 36, 39, 174, 65, 228, - 100, 155, 147, 76, 164, 149, 153, 27, 120, 82, 184, 85, -]; - -pub fn hash(input: &[u8]) -> Result, IndyError> { - let mut hasher = Hash::new_context()?; - hasher.update(input)?; - Ok(hasher.finish().map(|b| b.to_vec())?) -} - -pub struct Hash {} - -impl Hash { - pub fn new_context() -> Result { - Ok(Hasher::new(MessageDigest::sha256())?) - } - - pub fn hash_leaf(leaf: &T) -> Result, IndyError> - where - T: Hashable, - { - let mut ctx = Hash::new_context()?; - ctx.update(&[0x00])?; - leaf.update_context(&mut ctx)?; - Ok(ctx.finish().map(|b| b.to_vec())?) - } - - pub fn hash_nodes(left: &T, right: &T) -> Result, IndyError> - where - T: Hashable, - { - let mut ctx = Hash::new_context()?; - ctx.update(&[0x01])?; - left.update_context(&mut ctx)?; - right.update_context(&mut ctx)?; - Ok(ctx.finish().map(|b| b.to_vec())?) - } -} - -/// The type of values stored in a `MerkleTree` must implement -/// this trait, in order for them to be able to be fed -/// to a Ring `Context` when computing the hash of a leaf. -/// -/// A default instance for types that already implements -/// `AsRef<[u8]>` is provided. -/// -/// ## Example -/// -/// Here is an example of how to implement `Hashable` for a type -/// that does not (or cannot) implement `AsRef<[u8]>`: -/// -/// ```ignore -/// impl Hashable for PublicKey { -/// fn update_context(&self, context: &mut Hasher) -> Result<(), CommonError> { -/// let bytes: Vec = self.to_bytes(); -/// Ok(context.update(&bytes)?) -/// } -/// } -/// ``` -pub trait Hashable { - /// Update the given `context` with `self`. - /// - /// See `openssl::hash::Hasher::update` for more information. - fn update_context(&self, context: &mut Hasher) -> Result<(), IndyError>; -} - -impl> Hashable for T { - fn update_context(&self, context: &mut Hasher) -> Result<(), IndyError> { - context - .update(self.as_ref()) - .to_indy(IndyErrorKind::InvalidState, "Internal OpenSSL error") - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hmacsha256/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hmacsha256/sodium.rs deleted file mode 100644 index ec9dbe1e31..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hmacsha256/sodium.rs +++ /dev/null @@ -1,17 +0,0 @@ -extern crate sodiumoxide; - -use self::sodiumoxide::crypto::auth::hmacsha256; - -pub const KEYBYTES: usize = hmacsha256::KEYBYTES; -pub const TAGBYTES: usize = hmacsha256::TAGBYTES; - -sodium_type!(Key, hmacsha256::Key, KEYBYTES); -sodium_type!(Tag, hmacsha256::Tag, TAGBYTES); - -pub fn gen_key() -> Key { - Key(hmacsha256::gen_key()) -} - -pub fn authenticate(data: &[u8], key: &Key) -> Tag { - Tag(hmacsha256::authenticate(data, &key.0)) -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/mod.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/mod.rs deleted file mode 100644 index d8f1ecc87b..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/mod.rs +++ /dev/null @@ -1,44 +0,0 @@ -#[macro_use] -pub mod sodium_type; - -#[cfg(feature = "base64_rust_base64")] -#[path = "base64/rust_base64.rs"] -pub mod base64; - -#[cfg(feature = "chacha20poly1305_ietf_sodium")] -#[path = "chacha20poly1305_ietf/sodium.rs"] -pub mod chacha20poly1305_ietf; - -#[cfg(feature = "hash_openssl")] -#[path = "hash/openssl.rs"] -pub mod hash; - -#[cfg(feature = "hmacsha256_sodium")] -#[path = "hmacsha256/sodium.rs"] -pub mod hmacsha256; - -#[cfg(feature = "pwhash_argon2i13_sodium")] -#[path = "pwhash_argon2i13/sodium.rs"] -pub mod pwhash_argon2i13; - -#[cfg(feature = "randombytes_sodium")] -#[path = "randombytes/sodium.rs"] -pub mod randombytes; - -#[cfg(feature = "sealedbox_sodium")] -#[path = "sealedbox/sodium.rs"] -pub mod sealedbox; - -#[allow(dead_code)] /* FIXME Do we really need this module? */ -#[cfg(feature = "xsalsa20_sodium")] -#[path = "xsalsa20/sodium.rs"] -pub mod xsalsa20; - -#[cfg(feature = "ed25519_sign_sodium")] -#[path = "ed25519_sign/sodium.rs"] -pub mod ed25519_sign; - -#[cfg(feature = "ed25519_box_sodium")] -#[path = "ed25519_box/sodium.rs"] -// TODO: The name is misleading as the operations do not happen over ed25519 curve -pub mod ed25519_box; diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs deleted file mode 100644 index b629c34c1f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs +++ /dev/null @@ -1,133 +0,0 @@ -extern crate serde; -extern crate sodiumoxide; - -use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; -use libc::{c_int, c_ulonglong, size_t}; - -use self::sodiumoxide::crypto::pwhash; - -pub const SALTBYTES: usize = pwhash::SALTBYTES; - -sodium_type!(Salt, pwhash::Salt, SALTBYTES); - -pub fn gen_salt() -> Salt { - Salt(pwhash::gen_salt()) -} - -pub fn pwhash<'a>( - key: &'a mut [u8], - passwd: &[u8], - salt: &Salt, - key_derivation_method: &KeyDerivationMethod, -) -> Result<&'a [u8], IndyError> { - let (opslimit, memlimit) = unsafe { - match key_derivation_method { - KeyDerivationMethod::ARGON2I_MOD => ( - crypto_pwhash_argon2i_opslimit_moderate(), - crypto_pwhash_argon2i_memlimit_moderate(), - ), - KeyDerivationMethod::ARGON2I_INT => ( - crypto_pwhash_argon2i_opslimit_interactive(), - crypto_pwhash_argon2i_memlimit_interactive(), - ), - KeyDerivationMethod::RAW => { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "RAW key derivation method is not acceptable", - )) - } - } - }; - - let alg = unsafe { crypto_pwhash_alg_argon2i13() }; - - let res = unsafe { - crypto_pwhash( - key.as_mut_ptr(), - key.len() as c_ulonglong, - passwd.as_ptr(), - passwd.len() as c_ulonglong, - (salt.0).0.as_ptr(), - opslimit as c_ulonglong, - memlimit, - alg, - ) - }; - - if res == 0 { - Ok(key) - } else { - Err(IndyError::from_msg( - IndyErrorKind::InvalidState, - "Sodium pwhash failed", - )) - } -} - -extern "C" { - fn crypto_pwhash_alg_argon2i13() -> c_int; - fn crypto_pwhash_argon2i_opslimit_moderate() -> size_t; - fn crypto_pwhash_argon2i_memlimit_moderate() -> size_t; - fn crypto_pwhash_argon2i_opslimit_interactive() -> size_t; - fn crypto_pwhash_argon2i_memlimit_interactive() -> size_t; - - fn crypto_pwhash( - out: *mut u8, - outlen: c_ulonglong, - passwd: *const u8, - passwdlen: c_ulonglong, - salt: *const u8, // SODIUM_CRYPTO_PWHASH_SALTBYTES - opslimit: c_ulonglong, - memlimit: size_t, - alg: c_int, - ) -> c_int; -} - -#[cfg(test)] -mod tests { - use rmp_serde; - - use super::*; - - #[test] - fn get_salt_works() { - let salt = gen_salt(); - assert_eq!(salt[..].len(), SALTBYTES) - } - - #[test] - fn salt_serialize_deserialize_works() { - let salt = gen_salt(); - let serialized = rmp_serde::to_vec(&salt).unwrap(); - let deserialized: Salt = rmp_serde::from_slice(&serialized).unwrap(); - - assert_eq!(serialized.len(), SALTBYTES + 2); - assert_eq!(salt, deserialized) - } - - #[test] - fn pwhash_works() { - let passwd = b"Correct Horse Battery Staple"; - let mut key = [0u8; 64]; - - let salt = gen_salt(); - let _key = pwhash(&mut key, passwd, &salt, &KeyDerivationMethod::ARGON2I_MOD).unwrap(); - } - - #[test] - fn pwhash_works_for_interactive_method() { - let passwd = b"Correct Horse Battery Staple"; - - let salt = gen_salt(); - - let mut key = [0u8; 64]; - let key_moderate = - pwhash(&mut key, passwd, &salt, &KeyDerivationMethod::ARGON2I_MOD).unwrap(); - - let mut key = [0u8; 64]; - let key_interactive = - pwhash(&mut key, passwd, &salt, &KeyDerivationMethod::ARGON2I_INT).unwrap(); - - assert_ne!(key_moderate, key_interactive); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/randombytes/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/randombytes/sodium.rs deleted file mode 100644 index ef70300668..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/randombytes/sodium.rs +++ /dev/null @@ -1,66 +0,0 @@ -use indy_api_types::errors::prelude::*; -use libc::size_t; -use zeroize::Zeroize; - -pub const SEEDBYTES: usize = 32; // randombytes_seedbytes - -#[derive(Zeroize)] -#[zeroize(drop)] -pub struct Seed([u8; SEEDBYTES]); - -impl Seed { - pub fn from_slice(bytes: &[u8]) -> Result { - if bytes.len() != SEEDBYTES { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - format!( - "Invalid seed length, expected: {:}, provided: {}", - SEEDBYTES, - bytes.len() - ), - )); - } - - let mut seed = Seed([0; SEEDBYTES]); - - for (ni, &bsi) in seed.0.iter_mut().zip(bytes.iter()) { - *ni = bsi - } - - Ok(seed) - } -} - -pub fn randombytes(size: usize) -> Vec { - sodiumoxide::randombytes::randombytes(size) -} - -pub fn randombytes_deterministic(size: usize, seed: &Seed) -> Vec { - let mut out = vec![0u8; size]; - unsafe { randombytes_buf_deterministic(out.as_mut_ptr(), size, &seed.0) }; - out -} - -extern "C" { - fn randombytes_buf_deterministic(out: *mut u8, size: size_t, seed: *const [u8; SEEDBYTES]); -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn randombytes_deterministic_works() { - let seed = Seed::from_slice(&[ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, - 3, 4, 5, - ]) - .unwrap(); - let res = randombytes_deterministic(32, &seed); - let expected_bytes = vec![ - 7, 183, 0, 143, 100, 203, 87, 27, 32, 132, 126, 172, 180, 123, 39, 26, 18, 243, 64, 60, - 92, 43, 111, 227, 54, 129, 201, 185, 53, 73, 93, 93, - ]; - assert_eq!(expected_bytes, res); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs deleted file mode 100644 index 0f00f525fb..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs +++ /dev/null @@ -1,45 +0,0 @@ -extern crate sodiumoxide; - -use indy_api_types::errors::prelude::*; - -use self::sodiumoxide::crypto::sealedbox; -use super::ed25519_box; - -pub fn encrypt(pk: &ed25519_box::PublicKey, doc: &[u8]) -> Result, IndyError> { - Ok(sealedbox::seal(doc, &pk.0)) -} - -pub fn decrypt( - pk: &ed25519_box::PublicKey, - sk: &ed25519_box::SecretKey, - doc: &[u8], -) -> Result, IndyError> { - sealedbox::open(doc, &pk.0, &sk.0).map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to open sodium sealedbox", - ) - }) -} - -#[cfg(test)] -mod tests { - use self::sodiumoxide::crypto::box_; - use super::*; - use crate::crypto::{ - ed25519_box::{PublicKey, SecretKey}, - randombytes::randombytes, - }; - - #[test] - fn encrypt_decrypt_works() { - let (pk, sk) = box_::gen_keypair(); - let (pk, sk) = (PublicKey(pk), SecretKey(sk)); - let doc = randombytes(16); - - let encrypted_data = encrypt(&pk, &doc).unwrap(); - let decrypt_result = decrypt(&pk, &sk, &encrypted_data).unwrap(); - - assert_eq!(doc, decrypt_result); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sodium_type.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sodium_type.rs deleted file mode 100644 index 2b6ef226f7..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sodium_type.rs +++ /dev/null @@ -1,94 +0,0 @@ -// This macro allows to wrap Sodimoxide type to libvdrtools type keeping the same behaviour -#[macro_export] -macro_rules! sodium_type (($newtype:ident, $sodiumtype:path, $len:ident) => ( - pub struct $newtype(pub(super) $sodiumtype); - - impl $newtype { - - #[allow(dead_code)] - pub fn new(bytes: [u8; $len]) -> $newtype { - $newtype($sodiumtype(bytes)) - } - - #[allow(dead_code)] - pub fn from_slice(bs: &[u8]) -> Result<$newtype, indy_api_types::errors::IndyError> { - let inner = <$sodiumtype>::from_slice(bs) - .ok_or(indy_api_types::errors::err_msg(indy_api_types::errors::IndyErrorKind::InvalidStructure, format!("Invalid bytes for {:?}", stringify!($newtype))))?; - - Ok($newtype(inner)) - } - } - - impl Clone for $newtype { - fn clone(&self) -> $newtype { - $newtype(self.0.clone()) - } - } - - impl ::std::fmt::Debug for $newtype { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - self.0.fmt(f) - } - } - - impl ::std::cmp::PartialEq for $newtype { - fn eq(&self, other: &$newtype) -> bool { - self.0.eq(&other.0) - } - } - - impl ::std::cmp::Eq for $newtype {} - - impl ::serde::Serialize for $newtype { - fn serialize(&self, serializer: S) -> Result where S: ::serde::Serializer - { - serializer.serialize_bytes(&self.0[..]) - } - } - - impl<'de> ::serde::Deserialize<'de> for $newtype { - fn deserialize(deserializer: D) -> Result<$newtype, D::Error> where D: ::serde::Deserializer<'de> - { - <$sodiumtype>::deserialize(deserializer).map($newtype) - } - } - - impl ::std::ops::Index<::std::ops::Range> for $newtype { - type Output = [u8]; - - fn index(&self, _index: ::std::ops::Range) -> &[u8] { - self.0.index(_index) - } - } - - impl ::std::ops::Index<::std::ops::RangeTo> for $newtype { - type Output = [u8]; - - fn index(&self, _index: ::std::ops::RangeTo) -> &[u8] { - self.0.index(_index) - } - } - - impl ::std::ops::Index<::std::ops::RangeFrom> for $newtype { - type Output = [u8]; - - fn index(&self, _index: ::std::ops::RangeFrom) -> &[u8] { - self.0.index(_index) - } - } - - impl ::std::ops::Index<::std::ops::RangeFull> for $newtype { - type Output = [u8]; - - fn index(&self, _index: ::std::ops::RangeFull) -> &[u8] { - self.0.index(_index) - } - } - - impl AsRef<[u8]> for $newtype { - #[inline] - fn as_ref(&self) -> &[u8] { - &self[..] - } - } -)); diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs deleted file mode 100644 index 64cb09cb6a..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs +++ /dev/null @@ -1,72 +0,0 @@ -extern crate sodiumoxide; - -use indy_api_types::errors::prelude::*; - -use self::sodiumoxide::crypto::{secretbox, secretbox::xsalsa20poly1305}; - -pub const KEYBYTES: usize = xsalsa20poly1305::KEYBYTES; -pub const NONCEBYTES: usize = xsalsa20poly1305::NONCEBYTES; -pub const MACBYTES: usize = xsalsa20poly1305::MACBYTES; - -sodium_type!(Key, xsalsa20poly1305::Key, KEYBYTES); -sodium_type!(Nonce, xsalsa20poly1305::Nonce, NONCEBYTES); -sodium_type!(Tag, xsalsa20poly1305::Tag, MACBYTES); - -pub fn create_key() -> Key { - Key(secretbox::gen_key()) -} - -pub fn gen_nonce() -> Nonce { - Nonce(secretbox::gen_nonce()) -} - -pub fn encrypt(key: &Key, nonce: &Nonce, doc: &[u8]) -> Vec { - secretbox::seal(doc, &nonce.0, &key.0) -} - -pub fn decrypt(key: &Key, nonce: &Nonce, doc: &[u8]) -> Result, IndyError> { - secretbox::open(doc, &nonce.0, &key.0).map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to open sodium secretbox", - ) - }) -} - -pub fn encrypt_detached(key: &Key, nonce: &Nonce, doc: &[u8]) -> (Vec, Tag) { - let mut cipher = doc.to_vec(); - let tag = secretbox::seal_detached(cipher.as_mut_slice(), &nonce.0, &key.0); - - (cipher, Tag(tag)) -} - -pub fn decrypt_detached( - key: &Key, - nonce: &Nonce, - tag: &Tag, - doc: &[u8], -) -> Result, IndyError> { - let mut plain = doc.to_vec(); - secretbox::open_detached(plain.as_mut_slice(), &tag.0, &nonce.0, &key.0) - .map_err(|_| IndyError::from_msg(IndyErrorKind::InvalidStructure, "Unable to decrypt data")) - .map(|_| plain) -} - -#[cfg(test)] -mod tests { - use self::sodiumoxide::randombytes; - use super::*; - - #[test] - fn encrypt_decrypt_works() { - let nonce = gen_nonce(); - let key = create_key(); - let data = randombytes::randombytes(16); - - let encrypted_data = encrypt(&key, &nonce, &data); - let decrypt_result = decrypt(&key, &nonce, &encrypted_data); - - assert!(decrypt_result.is_ok()); - assert_eq!(data, decrypt_result.unwrap()); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/environment.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/environment.rs deleted file mode 100755 index e15e5d5a0a..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/environment.rs +++ /dev/null @@ -1,142 +0,0 @@ -use std::{env, path::PathBuf}; - -pub fn indy_home_path() -> PathBuf { - // TODO: FIXME: Provide better handling for the unknown home path case!!! - let mut path = dirs::home_dir().unwrap_or_else(|| PathBuf::from("/home/indy")); - let mut indy_client_dir = ".indy_client"; - - if cfg!(target_os = "ios") { - indy_client_dir = "Documents/.indy_client"; - } - - path.push(indy_client_dir); - - if cfg!(target_os = "android") { - path = android_indy_client_dir_path(); - } - - path -} - -pub fn android_indy_client_dir_path() -> PathBuf { - let external_storage = env::var("EXTERNAL_STORAGE"); - let android_dir: String; - - match external_storage { - Ok(val) => android_dir = val + "/.indy_client", - Err(err) => { - panic!("Failed to find external storage path {:?}", err) - } - } - - PathBuf::from(android_dir) -} - -pub fn wallet_home_path() -> PathBuf { - let mut path = indy_home_path(); - path.push("wallet"); - path -} - -pub fn pool_home_path() -> PathBuf { - let mut path = indy_home_path(); - path.push("pool"); - path -} - -pub fn pool_path(pool_name: &str) -> PathBuf { - let mut path = pool_home_path(); - path.push(pool_name); - path -} - -pub fn tmp_path() -> PathBuf { - let mut path = env::temp_dir(); - path.push("indy_client"); - path -} - -pub fn tmp_file_path(file_name: &str) -> PathBuf { - let mut path = tmp_path(); - path.push(file_name); - path -} - -pub fn test_pool_ip() -> String { - env::var("TEST_POOL_IP").unwrap_or("127.0.0.1".to_string()) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn indy_home_path_works() { - let path = indy_home_path(); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains(".indy_client")); - } - - #[test] - fn indy_home_path_works_twice() { - indy_home_path(); - indy_home_path(); - } - - #[test] - fn wallet_home_path_works() { - let path = wallet_home_path(); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains(".indy_client")); - assert!(path.to_string_lossy().contains("wallet")); - } - - #[test] - fn pool_home_path_works() { - let path = pool_home_path(); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains(".indy_client")); - assert!(path.to_string_lossy().contains("pool")); - } - - #[test] - fn pool_path_works() { - let path = pool_path("pool1"); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains(".indy_client")); - assert!(path.to_string_lossy().contains("pool1")); - } - - #[test] - fn tmp_path_works() { - let path = tmp_path(); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains("indy_client")); - } - - #[test] - fn tmp_file_path_works() { - let path = tmp_file_path("test.txt"); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains("indy_client")); - assert!(path.to_string_lossy().contains("test.txt")); - } - - #[test] - fn test_pool_ip_works() { - let pool_ip = test_pool_ip(); - assert!(!pool_ip.is_empty()); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/lib.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/lib.rs deleted file mode 100644 index 92e2aaccf2..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/lib.rs +++ /dev/null @@ -1,44 +0,0 @@ -// allow all clippy warnings, given this is legacy to be removed soon -#![allow(clippy::all)] -#[macro_use] -extern crate serde_json; - -#[cfg(debug_assertions)] -#[macro_export] -macro_rules! secret { - ($val:expr) => {{ - $val - }}; -} - -#[cfg(not(debug_assertions))] -#[macro_export] -macro_rules! secret { - ($val:expr) => {{ - "_" - }}; -} - -#[macro_use] -pub mod crypto; -pub mod environment; -pub mod sequence; -pub mod wql; - -use indy_api_types::{CommandHandle, SearchHandle, VdrHandle, WalletHandle}; - -pub fn next_wallet_handle() -> WalletHandle { - WalletHandle(sequence::get_next_id()) -} - -pub fn next_command_handle() -> CommandHandle { - sequence::get_next_id() -} - -pub fn next_search_handle() -> SearchHandle { - SearchHandle(sequence::get_next_id()) -} - -pub fn next_vdr_handle() -> VdrHandle { - sequence::get_next_id() -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/sequence.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/sequence.rs deleted file mode 100644 index 5032b8ecad..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/sequence.rs +++ /dev/null @@ -1,11 +0,0 @@ -use std::sync::atomic::{AtomicUsize, Ordering}; - -use lazy_static::lazy_static; - -lazy_static! { - static ref IDS_COUNTER: AtomicUsize = AtomicUsize::new(1); -} - -pub fn get_next_id() -> i32 { - (IDS_COUNTER.fetch_add(1, Ordering::SeqCst) + 1) as i32 -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/wql.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/wql.rs deleted file mode 100644 index d9cabdd8d6..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/wql.rs +++ /dev/null @@ -1,2791 +0,0 @@ -use std::string; - -use serde::{ - de, - ser::{Serialize, Serializer}, - Deserialize, Deserializer, -}; -use serde_json::{self, Value}; - -#[derive(Debug, Hash, Clone, PartialEq, Eq)] -pub enum Query { - And(Vec), - Or(Vec), - Not(Box), - Eq(String, String), - Neq(String, String), - Gt(String, String), - Gte(String, String), - Lt(String, String), - Lte(String, String), - Like(String, String), - In(String, Vec), -} - -impl Serialize for Query { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.to_value().serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for Query { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let v = Value::deserialize(deserializer)?; - - match v { - serde_json::Value::Object(map) => parse_query(map).map_err(de::Error::missing_field), - serde_json::Value::Array(array) => { - // cast old restrictions format to wql - let mut res: Vec = Vec::new(); - for sub_query in array { - let sub_query: serde_json::Map = sub_query - .as_object() - .ok_or_else(|| de::Error::custom("Restriction is invalid"))? - .clone() - .into_iter() - .filter(|(_, v)| !v.is_null()) - .collect(); - - if !sub_query.is_empty() { - res.push(serde_json::Value::Object(sub_query)); - } - } - - let mut map = serde_json::Map::new(); - map.insert("$or".to_string(), serde_json::Value::Array(res)); - - parse_query(map).map_err(de::Error::custom) - } - _ => Err(de::Error::missing_field( - "Restriction must be either object or array", - )), - } - } -} - -impl Query { - pub fn optimise(self) -> Option { - match self { - Query::Not(boxed_operator) => { - if let Query::Not(nested_operator) = *boxed_operator { - Some(*nested_operator) - } else { - Some(Query::Not(boxed_operator)) - } - } - Query::And(suboperators) if suboperators.is_empty() => None, - Query::And(mut suboperators) if suboperators.len() == 1 => { - suboperators.remove(0).optimise() - } - Query::And(suboperators) => { - let mut suboperators: Vec = suboperators - .into_iter() - .flat_map(|operator| operator.optimise()) - .collect(); - - match suboperators.len() { - 0 => None, - 1 => Some(suboperators.remove(0)), - _ => Some(Query::And(suboperators)), - } - } - Query::Or(suboperators) if suboperators.is_empty() => None, - Query::Or(mut suboperators) if suboperators.len() == 1 => { - suboperators.remove(0).optimise() - } - Query::Or(suboperators) => { - let mut suboperators: Vec = suboperators - .into_iter() - .flat_map(|operator| operator.optimise()) - .collect(); - - match suboperators.len() { - 0 => None, - 1 => Some(suboperators.remove(0)), - _ => Some(Query::Or(suboperators)), - } - } - Query::In(key, mut targets) if targets.len() == 1 => { - Some(Query::Eq(key, targets.remove(0))) - } - Query::In(key, targets) => Some(Query::In(key, targets)), - _ => Some(self), - } - } - - fn to_value(&self) -> serde_json::Value { - match *self { - Query::Eq(ref tag_name, ref tag_value) => json!({ tag_name: tag_value }), - Query::Neq(ref tag_name, ref tag_value) => json!({tag_name: {"$neq": tag_value}}), - Query::Gt(ref tag_name, ref tag_value) => json!({tag_name: {"$gt": tag_value}}), - Query::Gte(ref tag_name, ref tag_value) => json!({tag_name: {"$gte": tag_value}}), - Query::Lt(ref tag_name, ref tag_value) => json!({tag_name: {"$lt": tag_value}}), - Query::Lte(ref tag_name, ref tag_value) => json!({tag_name: {"$lte": tag_value}}), - Query::Like(ref tag_name, ref tag_value) => json!({tag_name: {"$like": tag_value}}), - Query::In(ref tag_name, ref tag_values) => json!({tag_name: {"$in": tag_values}}), - Query::And(ref operators) => { - if !operators.is_empty() { - json!({ - "$and": operators.iter().map(|q: &Query| q.to_value()).collect::>() - }) - } else { - json!({}) - } - } - Query::Or(ref operators) => { - if !operators.is_empty() { - json!({ - "$or": operators.iter().map(|q: &Query| q.to_value()).collect::>() - }) - } else { - json!({}) - } - } - Query::Not(ref stmt) => json!({"$not": stmt.to_value()}), - } - } -} - -impl Default for Query { - fn default() -> Self { - Query::And(Vec::new()) - } -} - -impl string::ToString for Query { - fn to_string(&self) -> String { - self.to_value().to_string() - } -} - -fn parse_query(map: serde_json::Map) -> Result { - let mut operators: Vec = Vec::new(); - - for (key, value) in map { - if let Some(operator_) = parse_operator(key, value)? { - operators.push(operator_); - } - } - - let query = if operators.len() == 1 { - operators.remove(0) - } else { - Query::And(operators) - }; - - Ok(query) -} - -fn parse_operator(key: String, value: serde_json::Value) -> Result, &'static str> { - match (key.as_str(), value) { - ("$and", serde_json::Value::Array(values)) if values.is_empty() => Ok(None), - ("$and", serde_json::Value::Array(values)) => { - let operators: Vec = parse_list_operators(values)?; - Ok(Some(Query::And(operators))) - } - ("$and", _) => Err("$and must be array of JSON objects"), - ("$or", serde_json::Value::Array(values)) if values.is_empty() => Ok(None), - ("$or", serde_json::Value::Array(values)) => { - let operators: Vec = parse_list_operators(values)?; - Ok(Some(Query::Or(operators))) - } - ("$or", _) => Err("$or must be array of JSON objects"), - ("$not", serde_json::Value::Object(map)) => { - let operator = parse_query(map)?; - Ok(Some(Query::Not(Box::new(operator)))) - } - ("$not", _) => Err("$not must be JSON object"), - (_, serde_json::Value::String(value)) => Ok(Some(Query::Eq(key, value))), - (_, serde_json::Value::Object(map)) => { - if map.len() == 1 { - let (operator_name, value) = map.into_iter().next().unwrap(); - parse_single_operator(operator_name, key, value).map(Some) - } else { - Err("value must be JSON object of length 1") - } - } - (_, _) => Err("Unsupported value"), - } -} - -fn parse_list_operators(operators: Vec) -> Result, &'static str> { - let mut out_operators: Vec = Vec::with_capacity(operators.len()); - - for value in operators.into_iter() { - if let serde_json::Value::Object(map) = value { - let suboperator = parse_query(map)?; - out_operators.push(suboperator); - } else { - return Err("operator must be array of JSON objects"); - } - } - - Ok(out_operators) -} - -fn parse_single_operator( - operator_name: String, - key: String, - value: serde_json::Value, -) -> Result { - match (&*operator_name, value) { - ("$neq", serde_json::Value::String(value_)) => Ok(Query::Neq(key, value_)), - ("$neq", _) => Err("$neq must be used with string"), - ("$gt", serde_json::Value::String(value_)) => Ok(Query::Gt(key, value_)), - ("$gt", _) => Err("$gt must be used with string"), - ("$gte", serde_json::Value::String(value_)) => Ok(Query::Gte(key, value_)), - ("$gte", _) => Err("$gte must be used with string"), - ("$lt", serde_json::Value::String(value_)) => Ok(Query::Lt(key, value_)), - ("$lt", _) => Err("$lt must be used with string"), - ("$lte", serde_json::Value::String(value_)) => Ok(Query::Lte(key, value_)), - ("$lte", _) => Err("$lte must be used with string"), - ("$like", serde_json::Value::String(value_)) => Ok(Query::Like(key, value_)), - ("$like", _) => Err("$like must be used with string"), - ("$in", serde_json::Value::Array(values)) => { - let mut target_values: Vec = Vec::with_capacity(values.len()); - - for v in values.into_iter() { - if let serde_json::Value::String(s) = v { - target_values.push(s); - } else { - return Err("$in must be used with array of strings"); - } - } - - Ok(Query::In(key, target_values)) - } - ("$in", _) => Err("$in must be used with array of strings"), - (_, _) => Err("Unknown operator"), - } -} - -#[cfg(test)] -mod tests { - use rand::{distributions::Alphanumeric, thread_rng, Rng}; - - use super::*; - - fn _random_string(len: usize) -> String { - thread_rng() - .sample_iter(&Alphanumeric) - .take(len) - .map(char::from) - .collect() - } - - /// parse - #[test] - fn test_simple_operator_empty_json_parse() { - let json = "{}"; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - let expected = Query::And(vec![]); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_explicit_empty_and_parse() { - let json = r#"{"$and":[]}"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - let expected = Query::And(vec![]); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_empty_or_parse() { - let json = r#"{"$or":[]}"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - let expected = Query::And(vec![]); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_empty_not_parse() { - let json = r#"{"$not":{}}"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - let expected = Query::Not(Box::new(Query::And(vec![]))); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_eq_plaintext_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":"{}"}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Eq(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$neq":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Neq(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$gt":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Gt(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$gte":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Gte(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$lt":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Lt(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_lte_plaintext_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$lte":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Lte(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$like":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Like(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_in_plaintext_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$in":["{}"]}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::In(name1, vec![value1]); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_in_plaintexts_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let value2 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"{}":{{"$in":["{}","{}","{}"]}}}}"#, - name1, value1, value2, value3 - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::In(name1, vec![value1, value2, value3]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":"{}"}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Eq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$neq":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Neq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$gt":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Gt(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$gte":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Gte(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$lt":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Lt(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$lte":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Lte(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$like":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Like(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$in":["{}"]}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::In(name1, vec![value1])]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_not_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"$not":{{"{}":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Not(Box::new(Query::Eq(name1, value1)))]); - - assert_eq!(query, expected); - } - - #[test] - #[ignore] // order - fn test_short_and_with_multiple_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"{}":"{}","{}":"{}","{}":"{}"}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Eq(name1, value1), - Query::Eq(name2, value2), - Query::Eq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":"{}"}},{{"{}":"{}"}},{{"{}":"{}"}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Eq(name1, value1), - Query::Eq(name2, value2), - Query::Eq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Neq(name1, value1), - Query::Neq(name2, value2), - Query::Neq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Gt(name1, value1), - Query::Gt(name2, value2), - Query::Gt(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Gte(name1, value1), - Query::Gte(name2, value2), - Query::Gte(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Lt(name1, value1), - Query::Lt(name2, value2), - Query::Lt(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Lte(name1, value1), - Query::Lte(name2, value2), - Query::Lte(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Like(name1, value1), - Query::Like(name2, value2), - Query::Like(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::In(name1, vec![value1]), - Query::In(name2, vec![value2]), - Query::In(name3, vec![value3]), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_not_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Not(Box::new(Query::Eq(name1, value1))), - Query::Not(Box::new(Query::Eq(name2, value2))), - Query::Not(Box::new(Query::Eq(name3, value3))), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_mixed_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8a = _random_string(10); - let value8b = _random_string(10); - let name9 = _random_string(10); - let value9 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":"{}"}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$in":["{}","{}"]}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8a, - value8b, - name9, - value9, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Eq(name1, value1), - Query::Neq(name2, value2), - Query::Gt(name3, value3), - Query::Gte(name4, value4), - Query::Lt(name5, value5), - Query::Lte(name6, value6), - Query::Like(name7, value7), - Query::In(name8, vec![value8a, value8b]), - Query::Not(Box::new(Query::Eq(name9, value9))), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":"{}"}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Eq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$neq":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Neq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$gt":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Gt(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$gte":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Gte(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$lt":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Lt(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$lte":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Lte(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$like":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Like(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$in":["{}"]}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::In(name1, vec![value1])]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_not_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"$not":{{"{}":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Not(Box::new(Query::Eq(name1, value1)))]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":"{}"}},{{"{}":"{}"}},{{"{}":"{}"}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Eq(name1, value1), - Query::Eq(name2, value2), - Query::Eq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Neq(name1, value1), - Query::Neq(name2, value2), - Query::Neq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Gt(name1, value1), - Query::Gt(name2, value2), - Query::Gt(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Gte(name1, value1), - Query::Gte(name2, value2), - Query::Gte(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Lt(name1, value1), - Query::Lt(name2, value2), - Query::Lt(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Lte(name1, value1), - Query::Lte(name2, value2), - Query::Lte(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Like(name1, value1), - Query::Like(name2, value2), - Query::Like(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::In(name1, vec![value1]), - Query::In(name2, vec![value2]), - Query::In(name3, vec![value3]), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_not_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Not(Box::new(Query::Eq(name1, value1))), - Query::Not(Box::new(Query::Eq(name2, value2))), - Query::Not(Box::new(Query::Eq(name3, value3))), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_mixed_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8a = _random_string(10); - let value8b = _random_string(10); - let name9 = _random_string(10); - let value9 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":"{}"}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$in":["{}","{}"]}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8a, - value8b, - name9, - value9, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Eq(name1, value1), - Query::Neq(name2, value2), - Query::Gt(name3, value3), - Query::Gte(name4, value4), - Query::Lt(name5, value5), - Query::Lte(name6, value6), - Query::Like(name7, value7), - Query::In(name8, vec![value8a, value8b]), - Query::Not(Box::new(Query::Eq(name9, value9))), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Eq(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$neq":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Neq(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$gt":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Gt(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$gte":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Gte(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$lt":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Lt(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$lte":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Lte(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$like":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Like(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$in":["{}"]}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::In(name1, vec![value1]))); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_or_not_complex_case_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8 = _random_string(10); - - let json = format!( - r#"{{"$not":{{"$and":[{{"{}":"{}"}},{{"$or":[{{"{}":{{"$gt":"{}"}}}},{{"$not":{{"{}":{{"$lte":"{}"}}}}}},{{"$and":[{{"{}":{{"$lt":"{}"}}}},{{"$not":{{"{}":{{"$gte":"{}"}}}}}}]}}]}},{{"$not":{{"{}":{{"$like":"{}"}}}}}},{{"$and":[{{"{}":"{}"}},{{"$not":{{"{}":{{"$neq":"{}"}}}}}}]}}]}}}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::And(vec![ - Query::Eq(name1, value1), - Query::Or(vec![ - Query::Gt(name2, value2), - Query::Not(Box::new(Query::Lte(name3, value3))), - Query::And(vec![ - Query::Lt(name4, value4), - Query::Not(Box::new(Query::Gte(name5, value5))), - ]), - ]), - Query::Not(Box::new(Query::Like(name6, value6))), - Query::And(vec![ - Query::Eq(name7, value7), - Query::Not(Box::new(Query::Neq(name8, value8))), - ]), - ]))); - - assert_eq!(query, expected); - } - - /// to string - #[test] - fn test_simple_operator_empty_and_to_string() { - let query = Query::And(vec![]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = "{}"; - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_empty_or_to_string() { - let query = Query::Or(vec![]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = "{}"; - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_empty_not_to_string() { - let query = Query::Not(Box::new(Query::And(vec![]))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = r#"{"$not":{}}"#; - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Eq(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":"{}"}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Neq(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$neq":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_gt_plaintext_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Gt(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$gt":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Gte(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$gte":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Lt(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$lt":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Lte(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$lte":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Like(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$like":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::In(name1.clone(), vec![value1.clone()]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$in":["{}"]}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_in_multimply_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let value2 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::In( - name1.clone(), - vec![value1.clone(), value2.clone(), value3.clone()], - ); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"{}":{{"$in":["{}","{}","{}"]}}}}"#, - name1, value1, value2, value3 - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Eq(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":"{}"}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Neq(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$neq":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Gt(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$gt":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Gte(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$gte":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Lt(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$lt":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Lte(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$lte":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Like(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$like":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::In(name1.clone(), vec![value1.clone()])]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$in":["{}"]}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_not_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Not(Box::new(Query::Eq( - name1.clone(), - value1.clone(), - )))]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"$not":{{"{}":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Eq(name2.clone(), value2.clone()), - Query::Eq(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":"{}"}},{{"{}":"{}"}},{{"{}":"{}"}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Neq(name1.clone(), value1.clone()), - Query::Neq(name2.clone(), value2.clone()), - Query::Neq(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Gt(name1.clone(), value1.clone()), - Query::Gt(name2.clone(), value2.clone()), - Query::Gt(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Gte(name1.clone(), value1.clone()), - Query::Gte(name2.clone(), value2.clone()), - Query::Gte(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Lt(name1.clone(), value1.clone()), - Query::Lt(name2.clone(), value2.clone()), - Query::Lt(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Lte(name1.clone(), value1.clone()), - Query::Lte(name2.clone(), value2.clone()), - Query::Lte(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Like(name1.clone(), value1.clone()), - Query::Like(name2.clone(), value2.clone()), - Query::Like(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::In(name1.clone(), vec![value1.clone()]), - Query::In(name2.clone(), vec![value2.clone()]), - Query::In(name3.clone(), vec![value3.clone()]), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_not_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Not(Box::new(Query::Eq(name1.clone(), value1.clone()))), - Query::Not(Box::new(Query::Eq(name2.clone(), value2.clone()))), - Query::Not(Box::new(Query::Eq(name3.clone(), value3.clone()))), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_mixed_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8a = _random_string(10); - let value8b = _random_string(10); - let name9 = _random_string(10); - let value9 = _random_string(10); - - let query = Query::And(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Neq(name2.clone(), value2.clone()), - Query::Gt(name3.clone(), value3.clone()), - Query::Gte(name4.clone(), value4.clone()), - Query::Lt(name5.clone(), value5.clone()), - Query::Lte(name6.clone(), value6.clone()), - Query::Like(name7.clone(), value7.clone()), - Query::In(name8.clone(), vec![value8a.clone(), value8b.clone()]), - Query::Not(Box::new(Query::Eq(name9.clone(), value9.clone()))), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":"{}"}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$in":["{}","{}"]}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8a, - value8b, - name9, - value9, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Eq(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":"{}"}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Neq(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$neq":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Gt(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$gt":"{}"}}}}]}}"#, name1, value1); - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Gte(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$gte":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Lt(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$lt":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Lte(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$lte":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Like(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$like":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::In(name1.clone(), vec![value1.clone()])]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$in":["{}"]}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_not_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Not(Box::new(Query::Eq( - name1.clone(), - value1.clone(), - )))]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"$not":{{"{}":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Eq(name2.clone(), value2.clone()), - Query::Eq(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":"{}"}},{{"{}":"{}"}},{{"{}":"{}"}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Neq(name1.clone(), value1.clone()), - Query::Neq(name2.clone(), value2.clone()), - Query::Neq(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Gt(name1.clone(), value1.clone()), - Query::Gt(name2.clone(), value2.clone()), - Query::Gt(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Gte(name1.clone(), value1.clone()), - Query::Gte(name2.clone(), value2.clone()), - Query::Gte(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Lt(name1.clone(), value1.clone()), - Query::Lt(name2.clone(), value2.clone()), - Query::Lt(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Lte(name1.clone(), value1.clone()), - Query::Lte(name2.clone(), value2.clone()), - Query::Lte(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Like(name1.clone(), value1.clone()), - Query::Like(name2.clone(), value2.clone()), - Query::Like(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::In(name1.clone(), vec![value1.clone()]), - Query::In(name2.clone(), vec![value2.clone()]), - Query::In(name3.clone(), vec![value3.clone()]), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_not_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Not(Box::new(Query::Eq(name1.clone(), value1.clone()))), - Query::Not(Box::new(Query::Eq(name2.clone(), value2.clone()))), - Query::Not(Box::new(Query::Eq(name3.clone(), value3.clone()))), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_mixed_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8a = _random_string(10); - let value8b = _random_string(10); - let name9 = _random_string(10); - let value9 = _random_string(10); - - let query = Query::Or(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Neq(name2.clone(), value2.clone()), - Query::Gt(name3.clone(), value3.clone()), - Query::Gte(name4.clone(), value4.clone()), - Query::Lt(name5.clone(), value5.clone()), - Query::Lte(name6.clone(), value6.clone()), - Query::Like(name7.clone(), value7.clone()), - Query::In(name8.clone(), vec![value8a.clone(), value8b.clone()]), - Query::Not(Box::new(Query::Eq(name9.clone(), value9.clone()))), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":"{}"}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$in":["{}","{}"]}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8a, - value8b, - name9, - value9, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Eq(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Neq(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$neq":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Gt(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$gt":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Gte(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$gte":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Lt(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$lt":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Lte(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$lte":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Like(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$like":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::In(name1.clone(), vec![value1.clone()]))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$in":["{}"]}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_or_not_complex_case_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8 = _random_string(10); - - let query = Query::Not(Box::new(Query::And(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Or(vec![ - Query::Gt(name2.clone(), value2.clone()), - Query::Not(Box::new(Query::Lte(name3.clone(), value3.clone()))), - Query::And(vec![ - Query::Lt(name4.clone(), value4.clone()), - Query::Not(Box::new(Query::Gte(name5.clone(), value5.clone()))), - ]), - ]), - Query::Not(Box::new(Query::Like(name6.clone(), value6.clone()))), - Query::And(vec![ - Query::Eq(name7.clone(), value7.clone()), - Query::Not(Box::new(Query::Neq(name8.clone(), value8.clone()))), - ]), - ]))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$not":{{"$and":[{{"{}":"{}"}},{{"$or":[{{"{}":{{"$gt":"{}"}}}},{{"$not":{{"{}":{{"$lte":"{}"}}}}}},{{"$and":[{{"{}":{{"$lt":"{}"}}}},{{"$not":{{"{}":{{"$gte":"{}"}}}}}}]}}]}},{{"$not":{{"{}":{{"$like":"{}"}}}}}},{{"$and":[{{"{}":"{}"}},{{"$not":{{"{}":{{"$neq":"{}"}}}}}}]}}]}}}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_old_format() { - let name1 = _random_string(10); - let name2 = _random_string(10); - let value1 = _random_string(10); - let value2 = _random_string(10); - - let json = format!( - r#"[{{"{}":"{}"}}, {{"{}":"{}"}}]"#, - name1, value1, name2, value2 - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Eq(name1, value1), Query::Eq(name2, value2)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_old_format_empty() { - let json = r#"[]"#.to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![]); - - assert_eq!(query, expected); - } - - #[test] - fn test_old_format_with_nulls() { - let name1 = _random_string(10); - let name2 = _random_string(10); - let value1 = _random_string(10); - - let json = json!(vec![ - json!({ name1.clone(): value1 }), - json!({ name2: serde_json::Value::Null }) - ]) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Eq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_optimise_and() { - let json = r#"{}"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_or() { - let json = r#"[]"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_single_nested_and() { - let json = json!({ - "$and": [ - { - "$and": [] - } - ] - }) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_several_nested_and() { - let json = json!({ - "$and": [ - { - "$and": [] - }, - { - "$and": [] - } - ] - }) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_single_nested_or() { - let json = json!({ - "$and": [ - { - "$or": [] - } - ] - }) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_several_nested_or() { - let json = json!({ - "$and": [ - { - "$or": [] - }, - { - "$or": [] - } - ] - }) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - assert_eq!(query.optimise(), None); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/Cargo.toml b/aries/misc/legacy/libvdrtools/indy-wallet/Cargo.toml deleted file mode 100644 index 88d5bd99ad..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -name = "indy-wallet" -version = "0.1.0" -authors = ["Hyperledger Indy Contributors "] -edition = "2018" - -[features] -default = [] -benchmark = [] -mysql = [] - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -async-trait = "0.1" -byteorder = "1" -futures = { version = "0.3", default-features = false, features = [ "alloc" ] } -indy-api-types = { path = "../indy-api-types" } -indy-utils = { path = "../indy-utils" } -libc = "0.2" -log = "0.4" -rmp-serde = "1" -bs58 = "0.5" -serde = "1" -serde_json = "1" -serde_derive = "1" -sqlx = { version = "0.7", features = [ "sqlite", "mysql", "runtime-tokio-rustls" ] } -zeroize = "1" -lru = "0.12" - -[dev-dependencies] -async-std = "1" diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/lru.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/lru.rs deleted file mode 100644 index fe50b13e4c..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/lru.rs +++ /dev/null @@ -1,50 +0,0 @@ -use std::num::NonZeroUsize; - -use lru::LruCache as InnerCache; - -use crate::cache::{ - wallet_cache::{WalletCacheKey, WalletCacheValue}, - Cache, -}; - -pub struct LruCache { - inner: InnerCache, -} - -impl LruCache { - pub fn new(size: NonZeroUsize) -> LruCache { - LruCache { - inner: InnerCache::new(size), - } - } -} - -impl Cache for LruCache { - fn put(&mut self, key: WalletCacheKey, value: WalletCacheValue) -> Option { - self.inner.put(key, value) - } - - fn get(&mut self, key: &WalletCacheKey) -> Option<&WalletCacheValue> { - self.inner.get(key) - } - - fn get_mut(&mut self, key: &WalletCacheKey) -> Option<&mut WalletCacheValue> { - self.inner.get_mut(key) - } - - fn pop(&mut self, key: &WalletCacheKey) -> Option { - self.inner.pop(key) - } - - fn peek(&self, key: &WalletCacheKey) -> Option<&WalletCacheValue> { - self.inner.peek(key) - } - - fn len(&self) -> usize { - self.inner.len() - } - - fn cap(&self) -> usize { - self.inner.cap().into() - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/mod.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/mod.rs deleted file mode 100644 index f01402ac65..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/mod.rs +++ /dev/null @@ -1,17 +0,0 @@ -mod lru; -pub mod wallet_cache; - -use crate::cache::wallet_cache::{WalletCacheKey, WalletCacheValue}; - -pub trait Cache { - fn put(&mut self, key: WalletCacheKey, value: WalletCacheValue) -> Option; - fn get(&mut self, key: &WalletCacheKey) -> Option<&WalletCacheValue>; - fn get_mut(&mut self, key: &WalletCacheKey) -> Option<&mut WalletCacheValue>; - fn pop(&mut self, key: &WalletCacheKey) -> Option; - #[allow(dead_code)] - fn peek(&self, key: &WalletCacheKey) -> Option<&WalletCacheValue>; - #[allow(dead_code)] - fn len(&self) -> usize; - #[allow(dead_code)] - fn cap(&self) -> usize; -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/wallet_cache.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/wallet_cache.rs deleted file mode 100644 index 2bf91dad4e..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/wallet_cache.rs +++ /dev/null @@ -1,338 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - iter::FromIterator, - num::NonZeroUsize, - sync::{ - atomic::{AtomicUsize, Ordering}, - Mutex, RwLock, - }, -}; - -use indy_api_types::domain::wallet::{CacheConfig, CachingAlgorithm}; - -use crate::{ - cache::{lru::LruCache, Cache}, - storage::{ - StorageRecord, Tag, - Tag::{Encrypted, PlainText}, - TagName, - TagName::{OfEncrypted, OfPlain}, - }, - wallet::EncryptedValue, - RecordOptions, -}; - -#[derive(PartialEq, Eq, Hash)] -pub struct WalletCacheKey { - type_: Vec, - id: Vec, -} - -pub struct WalletCacheValue { - value: EncryptedValue, - tags: Vec, -} - -pub struct WalletCache { - cache: Option>>, - cache_entities: HashSet, -} - -impl WalletCache { - pub fn new(config: Option) -> Self { - match config { - Some(cache_config) if cache_config.size > 0 && !cache_config.entities.is_empty() => { - let cache = match cache_config.algorithm { - CachingAlgorithm::LRU => { - LruCache::new(NonZeroUsize::new(cache_config.size).unwrap()) - } - }; - WalletCache { - cache: Some(Mutex::new(Box::new(cache))), - cache_entities: HashSet::from_iter(cache_config.entities.iter().cloned()), - } - } - _ => { - WalletCache { - // no cache - cache: None, - cache_entities: HashSet::new(), - } - } - } - } - - pub fn is_type_cacheable(&self, type_: &str) -> bool { - self.cache.is_some() && self.cache_entities.contains(&type_.to_owned()) - } - - pub fn add( - &self, - type_: &str, - etype: &[u8], - eid: &[u8], - evalue: &EncryptedValue, - etags: &[Tag], - ) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let value = WalletCacheValue { - value: evalue.to_owned(), - tags: etags.to_owned(), - }; - let _ = protected_cache.lock().unwrap().put(key, value); - } - } - } - - pub async fn add_tags(&self, type_: &str, etype: &[u8], eid: &[u8], etags: &[Tag]) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let _ = protected_cache - .lock() - .unwrap() //await - .get_mut(&key) - .map(|v| v.tags.append(&mut etags.to_owned())); - } - } - } - - pub async fn update_tags(&self, type_: &str, etype: &[u8], eid: &[u8], etags: &[Tag]) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let _ = protected_cache - .lock() - .unwrap() //await - .get_mut(&key) - .map(|v| v.tags = etags.to_vec()); - } - } - } - - pub async fn delete_tags(&self, type_: &str, etype: &[u8], eid: &[u8], etag_names: &[TagName]) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let mut enc_tag_names = HashSet::new(); - let mut plain_tag_names = HashSet::new(); - for x in etag_names { - match x { - OfEncrypted(value) => enc_tag_names.insert(value), - OfPlain(value) => plain_tag_names.insert(value), - }; - } - let _ = protected_cache - .lock() - .unwrap() //await - .get_mut(&key) - .map(|v| { - v.tags.retain(|el| match el { - Encrypted(tag_name, _) => !enc_tag_names.contains(tag_name), - PlainText(tag_name, _) => !plain_tag_names.contains(tag_name), - }); - }); - } - } - } - - pub async fn update(&self, type_: &str, etype: &[u8], eid: &[u8], evalue: &EncryptedValue) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let _ = protected_cache - .lock() - .unwrap() // await - .get_mut(&key) - .map(|v| v.value = evalue.to_owned()); - } - } - } - - pub async fn get( - &self, - type_: &str, - etype: &[u8], - eid: &[u8], - options: &RecordOptions, - ) -> Option { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - protected_cache - .lock() - .unwrap() //await - .get(&key) - .map(|v| StorageRecord { - id: eid.to_owned(), - value: if options.retrieve_value { - Some(v.value.clone()) - } else { - None - }, - type_: if options.retrieve_type { - Some(etype.to_owned()) - } else { - None - }, - tags: if options.retrieve_tags { - Some(v.tags.clone()) - } else { - None - }, - }) - } else { - None - } - } else { - None - } - } - - pub async fn delete(&self, type_: &str, etype: &[u8], eid: &[u8]) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let _ = protected_cache - .lock() - .unwrap() //await - .pop(&key); - } - } - } -} - -#[derive(Default, Debug)] -pub struct WalletCacheHitData { - pub hit: AtomicUsize, - pub miss: AtomicUsize, - pub not_cached: AtomicUsize, -} - -impl WalletCacheHitData { - fn inc(var: &AtomicUsize, increment: usize) -> usize { - var.fetch_add(increment, Ordering::Relaxed) - } - - fn get(var: &AtomicUsize) -> usize { - var.load(Ordering::Relaxed) - } - - pub fn inc_hit(&self) -> usize { - WalletCacheHitData::inc(&self.hit, 1) - } - - pub fn inc_miss(&self) -> usize { - WalletCacheHitData::inc(&self.miss, 1) - } - - pub fn inc_not_cached(&self) -> usize { - WalletCacheHitData::inc(&self.not_cached, 1) - } - - pub fn get_hit(&self) -> usize { - WalletCacheHitData::get(&self.hit) - } - - pub fn get_miss(&self) -> usize { - WalletCacheHitData::get(&self.miss) - } - - pub fn get_not_cached(&self) -> usize { - WalletCacheHitData::get(&self.not_cached) - } -} - -impl Clone for WalletCacheHitData { - fn clone(&self) -> Self { - WalletCacheHitData { - hit: AtomicUsize::from(self.get_hit()), - miss: AtomicUsize::from(self.get_miss()), - not_cached: AtomicUsize::from(self.get_not_cached()), - } - } - - fn clone_from(&mut self, source: &Self) { - *self.hit.get_mut() = source.get_hit(); - *self.miss.get_mut() = source.get_miss(); - *self.not_cached.get_mut() = source.get_not_cached(); - } -} - -pub struct WalletCacheHitMetrics { - pub data: RwLock>, -} - -impl WalletCacheHitMetrics { - pub fn new() -> Self { - WalletCacheHitMetrics { - data: RwLock::new(HashMap::new()), - } - } - - pub async fn inc_cache_hit(&self, type_: &str) -> usize { - self.update_data(type_, |x| x.inc_hit()).await - } - - pub async fn inc_cache_miss(&self, type_: &str) -> usize { - self.update_data(type_, |x| x.inc_miss()).await - } - - pub async fn inc_not_cached(&self, type_: &str) -> usize { - self.update_data(type_, |x| x.inc_not_cached()).await - } - - async fn update_data(&self, type_: &str, f: fn(&WalletCacheHitData) -> usize) -> usize { - let read_guard = self.data.read().unwrap(); //await; - match read_guard.get(type_) { - Some(x) => f(x), - None => { - drop(read_guard); - let mut write_guard = self.data.write().unwrap(); //await; - // check if data is inserted in the mean time until write lock is acquired. - match write_guard.get(type_) { - Some(x) => f(x), - None => { - // we are now holding exclusive access, so insert the item in map. - let d = Default::default(); - let result = f(&d); - write_guard.insert(type_.to_string(), d); - result - } - } - } - } - } - - #[allow(dead_code)] - pub async fn get_data_for_type(&self, type_: &str) -> Option { - self.data.read().unwrap().get(type_).cloned() - } - - pub fn get_data(&self) -> HashMap { - self.data.read().unwrap().clone() - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/encryption.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/encryption.rs deleted file mode 100644 index d3df984a1a..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/encryption.rs +++ /dev/null @@ -1,505 +0,0 @@ -use std::{collections::HashMap, str}; - -use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; -use indy_utils::crypto::{chacha20poly1305_ietf, hmacsha256, pwhash_argon2i13}; -use serde::{Deserialize, Serialize}; - -use crate::{ - storage::{StorageRecord, Tag, TagName}, - Keys, Metadata, WalletRecord, -}; - -pub(super) fn master_key_salt_from_slice(slice: &[u8]) -> IndyResult { - let salt = pwhash_argon2i13::Salt::from_slice(slice) - .to_indy(IndyErrorKind::WalletAccessFailed, "Invalid master key salt")?; - - Ok(salt) -} - -//TODO memzero for passphrase -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum KeyDerivationData { - Raw(String), - Argon2iMod(String, pwhash_argon2i13::Salt), - Argon2iInt(String, pwhash_argon2i13::Salt), -} - -impl KeyDerivationData { - pub fn from_passphrase_with_new_salt( - passphrase: &str, - derivation_method: &KeyDerivationMethod, - ) -> Self { - let salt = pwhash_argon2i13::gen_salt(); - let passphrase = passphrase.to_owned(); - match *derivation_method { - KeyDerivationMethod::ARGON2I_INT => KeyDerivationData::Argon2iInt(passphrase, salt), - KeyDerivationMethod::ARGON2I_MOD => KeyDerivationData::Argon2iMod(passphrase, salt), - KeyDerivationMethod::RAW => KeyDerivationData::Raw(passphrase), - } - } - - pub(super) fn from_passphrase_and_metadata( - passphrase: &str, - metadata: &Metadata, - derivation_method: &KeyDerivationMethod, - ) -> IndyResult { - let passphrase = passphrase.to_owned(); - - let data = match (derivation_method, metadata) { - (KeyDerivationMethod::RAW, &Metadata::MetadataRaw(_)) => { - KeyDerivationData::Raw(passphrase) - } - (KeyDerivationMethod::ARGON2I_INT, Metadata::MetadataArgon(metadata)) => { - let master_key_salt = master_key_salt_from_slice(&metadata.master_key_salt)?; - KeyDerivationData::Argon2iInt(passphrase, master_key_salt) - } - (KeyDerivationMethod::ARGON2I_MOD, Metadata::MetadataArgon(metadata)) => { - let master_key_salt = master_key_salt_from_slice(&metadata.master_key_salt)?; - KeyDerivationData::Argon2iMod(passphrase, master_key_salt) - } - _ => { - return Err(err_msg( - IndyErrorKind::WalletAccessFailed, - "Invalid combination of KeyDerivationMethod and Metadata", - )) - } - }; - - Ok(data) - } - - pub fn calc_master_key(&self) -> IndyResult { - match self { - KeyDerivationData::Raw(passphrase) => _raw_master_key(passphrase), - KeyDerivationData::Argon2iInt(passphrase, salt) => { - _derive_master_key(passphrase, salt, &KeyDerivationMethod::ARGON2I_INT) - } - KeyDerivationData::Argon2iMod(passphrase, salt) => { - _derive_master_key(passphrase, salt, &KeyDerivationMethod::ARGON2I_MOD) - } - } - } -} - -fn _derive_master_key( - passphrase: &str, - salt: &pwhash_argon2i13::Salt, - key_derivation_method: &KeyDerivationMethod, -) -> IndyResult { - let key = chacha20poly1305_ietf::derive_key(passphrase, salt, key_derivation_method)?; - Ok(key) -} - -fn _raw_master_key(passphrase: &str) -> IndyResult { - let bytes = bs58::decode(passphrase).into_vec()?; - - chacha20poly1305_ietf::Key::from_slice(&bytes).map_err(|err| err.extend("Invalid mastery key")) -} - -pub(super) fn encrypt_tag_names( - tag_names: &[&str], - tag_name_key: &chacha20poly1305_ietf::Key, - tags_hmac_key: &hmacsha256::Key, -) -> Vec { - tag_names - .iter() - .map(|tag_name| { - if tag_name.starts_with('~') { - TagName::OfPlain(encrypt_as_searchable( - &tag_name.as_bytes()[1..], - tag_name_key, - tags_hmac_key, - )) - } else { - TagName::OfEncrypted(encrypt_as_searchable( - tag_name.as_bytes(), - tag_name_key, - tags_hmac_key, - )) - } - }) - .collect::>() -} - -pub(super) fn encrypt_tags( - tags: &HashMap, - tag_name_key: &chacha20poly1305_ietf::Key, - tag_value_key: &chacha20poly1305_ietf::Key, - tags_hmac_key: &hmacsha256::Key, -) -> Vec { - tags.iter() - .map(|(tag_name, tag_value)| { - if tag_name.starts_with('~') { - // '~' character on start is skipped. - Tag::PlainText( - encrypt_as_searchable(&tag_name.as_bytes()[1..], tag_name_key, tags_hmac_key), - tag_value.to_string(), - ) - } else { - Tag::Encrypted( - encrypt_as_searchable(tag_name.as_bytes(), tag_name_key, tags_hmac_key), - encrypt_as_searchable(tag_value.as_bytes(), tag_value_key, tags_hmac_key), - ) - } - }) - .collect::>() -} - -pub(super) fn encrypt_as_searchable( - data: &[u8], - key: &chacha20poly1305_ietf::Key, - hmac_key: &hmacsha256::Key, -) -> Vec { - let tag = hmacsha256::authenticate(data, hmac_key); - let nonce = chacha20poly1305_ietf::Nonce::from_slice(&tag[..chacha20poly1305_ietf::NONCEBYTES]) - .unwrap(); // We can safely unwrap here - let ct = chacha20poly1305_ietf::encrypt(data, key, &nonce); - - let mut result: Vec = Default::default(); - result.extend_from_slice(&nonce[..]); - result.extend_from_slice(&ct); - result -} - -pub(super) fn encrypt_as_not_searchable(data: &[u8], key: &chacha20poly1305_ietf::Key) -> Vec { - let (ct, nonce) = chacha20poly1305_ietf::gen_nonce_and_encrypt(data, key); - - let mut result: Vec = Default::default(); - result.extend_from_slice(&nonce[..]); - result.extend_from_slice(&ct); - result -} - -pub(super) fn decrypt( - data: &[u8], - key: &chacha20poly1305_ietf::Key, - nonce: &chacha20poly1305_ietf::Nonce, -) -> IndyResult> { - let res = chacha20poly1305_ietf::decrypt(data, key, nonce)?; - Ok(res) -} - -pub(super) fn decrypt_merged( - joined_data: &[u8], - key: &chacha20poly1305_ietf::Key, -) -> IndyResult> { - let nonce = - chacha20poly1305_ietf::Nonce::from_slice(&joined_data[..chacha20poly1305_ietf::NONCEBYTES]) - .unwrap(); // We can safety unwrap here - let data = &joined_data[chacha20poly1305_ietf::NONCEBYTES..]; - let res = decrypt(data, key, &nonce)?; - Ok(res) -} - -pub(super) fn decrypt_tags( - etags: &Option>, - tag_name_key: &chacha20poly1305_ietf::Key, - tag_value_key: &chacha20poly1305_ietf::Key, -) -> IndyResult>> { - match *etags { - None => Ok(None), - Some(ref etags) => { - let mut tags: HashMap = HashMap::new(); - - for etag in etags { - let (name, value) = match *etag { - Tag::PlainText(ref ename, ref value) => { - let name = match decrypt_merged(ename, tag_name_key) { - Err(err) => { - return Err(err.to_indy( - IndyErrorKind::WalletEncryptionError, - "Unable to decrypt tag name", - )) - } - Ok(tag_name_bytes) => format!( - "~{}", - str::from_utf8(&tag_name_bytes).to_indy( - IndyErrorKind::WalletEncryptionError, - "Plaintext Tag name is invalid utf8" - )? - ), - }; - (name, value.clone()) - } - Tag::Encrypted(ref ename, ref evalue) => { - let name = String::from_utf8(decrypt_merged(ename, tag_name_key)?) - .to_indy( - IndyErrorKind::WalletEncryptionError, - "Tag name is invalid utf8", - )?; - let value = String::from_utf8(decrypt_merged(evalue, tag_value_key)?) - .to_indy( - IndyErrorKind::WalletEncryptionError, - "Tag value is invalid utf8", - )?; - (name, value) - } - }; - tags.insert(name, value); - } - - Ok(Some(tags)) - } - } -} - -pub(super) fn decrypt_storage_record( - record: &StorageRecord, - keys: &Keys, -) -> IndyResult { - let decrypted_name = decrypt_merged(&record.id, &keys.name_key)?; - - let decrypted_name = String::from_utf8(decrypted_name).to_indy( - IndyErrorKind::WalletEncryptionError, - "Record is invalid utf8", - )?; - - let decrypted_value = match record.value { - Some(ref value) => Some(value.decrypt(&keys.value_key)?), - None => None, - }; - - let decrypted_type = match record.type_ { - Some(ref type_) => { - let decrypted_type = decrypt_merged(type_, &keys.type_key)?; - Some(String::from_utf8(decrypted_type).to_indy( - IndyErrorKind::WalletEncryptionError, - "Record type is invalid utf8", - )?) - } - None => None, - }; - - let decrypted_tags = decrypt_tags(&record.tags, &keys.tag_name_key, &keys.tag_value_key)?; - Ok(WalletRecord::new( - decrypted_name, - decrypted_type, - decrypted_value, - decrypted_tags, - )) -} - -// #[cfg(test)] -// mod tests { -// use crate::wallet::EncryptedValue; -// use crate::wallet::Keys; -// use indy_utils::crypto::hmacsha256; - -// use super::*; - -// #[test] -// fn test_encrypt_decrypt_searchable() { -// let key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "test_data"; - -// let encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let decrypted_data = decrypt_merged(&encrypted_data, &key).unwrap(); - -// assert_eq!(&decrypted_data[..], data.as_bytes()); -// } - -// #[test] -// fn test_encrypt_decrypt_searchable_returns_error_if_wrong_key() { -// let key = chacha20poly1305_ietf::gen_key(); -// let key2 = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "test_data"; - -// let encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let res = decrypt_merged(&encrypted_data, &key2); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_searchable_returns_error_if_nonce_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "test_data"; - -// let mut encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let byte_value = encrypted_data[3]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[3] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_searchable_returns_error_if_data_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "12345678901234567890123456789012345678901234567890"; - -// let mut encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let index = encrypted_data.len() - 1; -// let byte_value = encrypted_data[index]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[index] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_searchable_returns_error_if_tag_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "12345678901234567890123456789012345678901234567890"; - -// let mut encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let byte_value = encrypted_data[chacha20poly1305_ietf::NONCEBYTES + 1]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[chacha20poly1305_ietf::NONCEBYTES + 1] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable() { -// let key = chacha20poly1305_ietf::gen_key(); -// let data = "test_data"; - -// let encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let decrypted_data = decrypt_merged(&encrypted_data, &key).unwrap(); - -// assert_eq!(&decrypted_data[..], data.as_bytes()); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable_returns_error_if_wrong_key() { -// let key = chacha20poly1305_ietf::gen_key(); -// let key2 = chacha20poly1305_ietf::gen_key(); -// let data = "test_data"; - -// let encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let res = decrypt_merged(&encrypted_data, &key2); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable_returns_error_if_nonce_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let data = "test_data"; - -// let mut encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let byte_value = encrypted_data[3]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[3] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable_returns_error_if_data_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let data = "12345678901234567890123456789012345678901234567890"; - -// let mut encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let index = encrypted_data.len() - 1; -// let byte_value = encrypted_data[index]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[index] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable_returns_error_if_tag_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let data = "12345678901234567890123456789012345678901234567890"; - -// let mut encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let byte_value = encrypted_data[chacha20poly1305_ietf::NONCEBYTES + 1]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[chacha20poly1305_ietf::NONCEBYTES + 1] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_tags() { -// let tags = serde_json::from_str(r#"{"tag1":"value1", "tag2":"value2", -// "~tag3":"value3"}"#).unwrap(); - -// let tag_name_key = chacha20poly1305_ietf::gen_key(); -// let tag_value_key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); - -// let c = encrypt_tags(&tags, &tag_name_key, &tag_value_key, &hmac_key); -// let u = decrypt_tags(&Some(c), &tag_name_key, &tag_value_key).unwrap().unwrap(); -// assert_eq!(tags, u); -// } - -// #[test] -// fn test_decrypt_tags_works_for_none() { -// let tag_name_key = chacha20poly1305_ietf::gen_key(); -// let tag_value_key = chacha20poly1305_ietf::gen_key(); - -// let u = decrypt_tags(&None, &tag_name_key, &tag_value_key).unwrap(); -// assert!(u.is_none()); -// } - -// #[test] -// fn test_decrypt_storage_record_works() { -// let keys = Keys::new(); -// let name = "test_name"; -// let value = "test_value"; -// let encrypted_value = EncryptedValue::encrypt(value, &keys.value_key); -// let type_ = "test_type"; -// let encrypted_name = encrypt_as_searchable(name.as_bytes(), &keys.name_key, -// &keys.item_hmac_key); let encrypted_type = encrypt_as_searchable(type_.as_bytes(), -// &keys.type_key, &keys.item_hmac_key); let mut tags = HashMap::new(); -// tags.insert("tag_name_1".to_string(), "tag_value_1".to_string()); -// tags.insert("~tag_name_2".to_string(), "tag_value_2".to_string()); -// let encrypted_tags = encrypt_tags(&tags, &keys.tag_name_key, &keys.tag_value_key, -// &keys.tags_hmac_key); - -// let storage_record = StorageRecord { -// id: encrypted_name, -// value: Some(encrypted_value), -// type_: Some(encrypted_type), -// tags: Some(encrypted_tags), -// }; -// let decrypted_wallet_record = decrypt_storage_record(&storage_record, &keys).unwrap(); - -// assert_eq!(&decrypted_wallet_record.id, name); -// assert_eq!(&decrypted_wallet_record.value.unwrap(), value); -// assert_eq!(&decrypted_wallet_record.type_.unwrap(), type_); -// assert_eq!(&decrypted_wallet_record.tags.unwrap(), &tags); -// } - -// #[test] -// fn test_decrypt_storage_record_fails_if_wrong_keys() { -// let keys = Keys::new(); -// let keys2 = Keys::new(); -// let name = "test_name"; -// let value = "test_value"; -// let encrypted_value = EncryptedValue::encrypt(value, &keys.value_key); -// let type_ = "test_type"; -// let encrypted_name = encrypt_as_searchable(name.as_bytes(), &keys.name_key, -// &keys.item_hmac_key); let encrypted_type = encrypt_as_searchable(type_.as_bytes(), -// &keys.type_key, &keys.item_hmac_key); let mut tags = HashMap::new(); -// tags.insert("tag_name_1".to_string(), "tag_value_1".to_string()); -// tags.insert("~tag_name_2".to_string(), "tag_value_2".to_string()); -// let encrypted_tags = encrypt_tags(&tags, &keys.tag_name_key, &keys.tag_value_key, -// &keys.tags_hmac_key); - -// let storage_record = StorageRecord { -// id: encrypted_name, -// value: Some(encrypted_value), -// type_: Some(encrypted_type), -// tags: Some(encrypted_tags), -// }; -// let res = decrypt_storage_record(&storage_record, &keys2); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } -// } diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/export_import.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/export_import.rs deleted file mode 100644 index a8e646f161..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/export_import.rs +++ /dev/null @@ -1,329 +0,0 @@ -use std::{ - io, - io::{BufReader, BufWriter, Read, Write}, - sync::Arc, - time::{SystemTime, UNIX_EPOCH}, -}; - -use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; -use indy_api_types::{ - domain::wallet::{IndyRecord, KeyDerivationMethod}, - errors::prelude::*, -}; -use indy_utils::crypto::{ - chacha20poly1305_ietf, - hash::{hash, HASHBYTES}, - pwhash_argon2i13, -}; -use serde::{Deserialize, Serialize}; - -use crate::{encryption::KeyDerivationData, Wallet, WalletRecord}; - -const CHUNK_SIZE: usize = 1024; - -#[derive(Debug, Serialize, Deserialize)] -pub enum EncryptionMethod { - // **ChaCha20-Poly1305-IETF** cypher in blocks per chunk_size bytes - ChaCha20Poly1305IETF { - // pwhash_argon2i13::Salt as bytes. Random salt used for deriving of key from passphrase - salt: Vec, - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each - // chunk to be sure in export file consistency - nonce: Vec, - // size of encrypted chunk - chunk_size: usize, - }, - // **ChaCha20-Poly1305-IETF interactive key derivation** cypher in blocks per chunk_size bytes - ChaCha20Poly1305IETFInteractive { - // pwhash_argon2i13::Salt as bytes. Random salt used for deriving of key from passphrase - salt: Vec, - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each - // chunk to be sure in export file consistency - nonce: Vec, - // size of encrypted chunk - chunk_size: usize, - }, - // **ChaCha20-Poly1305-IETF raw key** cypher in blocks per chunk_size bytes - ChaCha20Poly1305IETFRaw { - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each - // chunk to be sure in export file consistency - nonce: Vec, - // size of encrypted chunk - chunk_size: usize, - }, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct Header { - // Method of encryption for encrypted stream - pub encryption_method: EncryptionMethod, - // Export time in seconds from UNIX Epoch - pub time: u64, - // Version of header - pub version: u32, -} - -// Note that we use externally tagged enum serialization and header will be represented as: -// -// { -// "encryption_method": { -// "ChaCha20Poly1305IETF": { -// "salt": .., -// "nonce": .., -// "chunk_size": .., -// }, -// }, -// "time": .., -// "version": .., -// } - -pub(super) async fn export_continue( - wallet: Arc, - writer: &mut (dyn Write + Send + Sync), - version: u32, - key: chacha20poly1305_ietf::Key, - key_data: &KeyDerivationData, -) -> IndyResult<()> { - let nonce = chacha20poly1305_ietf::gen_nonce(); - let chunk_size = CHUNK_SIZE; - - let encryption_method = match key_data { - KeyDerivationData::Argon2iMod(_, salt) => EncryptionMethod::ChaCha20Poly1305IETF { - salt: salt[..].to_vec(), - nonce: nonce[..].to_vec(), - chunk_size, - }, - KeyDerivationData::Argon2iInt(_, salt) => { - EncryptionMethod::ChaCha20Poly1305IETFInteractive { - salt: salt[..].to_vec(), - nonce: nonce[..].to_vec(), - chunk_size, - } - } - KeyDerivationData::Raw(_) => EncryptionMethod::ChaCha20Poly1305IETFRaw { - nonce: nonce[..].to_vec(), - chunk_size, - }, - }; - - let header = Header { - encryption_method, - time: SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_secs(), - version, - }; - - let header = rmp_serde::to_vec(&header).to_indy( - IndyErrorKind::InvalidState, - "Can't serialize wallet export file header", - )?; - - // Write plain - let mut writer = BufWriter::new(writer); - writer.write_u32::(header.len() as u32)?; - writer.write_all(&header)?; - - // Write ecnrypted - let mut writer = chacha20poly1305_ietf::Writer::new(writer, key, nonce, chunk_size); - - writer.write_all(&hash(&header)?)?; - - let mut records = wallet.get_all().await?; - - while let Some(WalletRecord { - type_, - id, - value, - tags, - }) = records.next().await? - { - let record = IndyRecord { - type_: type_.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No type fetched for exported record", - ) - })?, - id, - value: value.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No value fetched for exported record", - ) - })?, - tags: tags.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No tags fetched for exported record", - ) - })?, - }; - - let record = rmp_serde::to_vec(&record) - .to_indy(IndyErrorKind::InvalidState, "Can't serialize record")?; - - writer.write_u32::(record.len() as u32)?; - writer.write_all(&record)?; - } - - writer.write_u32::(0)?; // END message - writer.flush()?; - Ok(()) -} - -#[allow(clippy::type_complexity)] -pub(super) fn preparse_file_to_import( - reader: T, - passphrase: &str, -) -> IndyResult<( - BufReader, - KeyDerivationData, - chacha20poly1305_ietf::Nonce, - usize, - Vec, -)> -where - T: Read, -{ - // Reads plain - let mut reader = BufReader::new(reader); - - let header_len = reader.read_u32::().map_err(_map_io_err)? as usize; - - if header_len == 0 { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "Invalid header length", - )); - } - - let mut header_bytes = vec![0u8; header_len]; - reader.read_exact(&mut header_bytes).map_err(_map_io_err)?; - - let header: Header = rmp_serde::from_slice(&header_bytes) - .to_indy(IndyErrorKind::InvalidStructure, "Header is malformed json")?; - - if header.version != 0 { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "Unsupported version", - )); - } - - let key_derivation_method = match header.encryption_method { - EncryptionMethod::ChaCha20Poly1305IETF { .. } => KeyDerivationMethod::ARGON2I_MOD, - EncryptionMethod::ChaCha20Poly1305IETFInteractive { .. } => { - KeyDerivationMethod::ARGON2I_INT - } - EncryptionMethod::ChaCha20Poly1305IETFRaw { .. } => KeyDerivationMethod::RAW, - }; - - let (import_key_derivation_data, nonce, chunk_size) = match header.encryption_method { - EncryptionMethod::ChaCha20Poly1305IETF { - salt, - nonce, - chunk_size, - } - | EncryptionMethod::ChaCha20Poly1305IETFInteractive { - salt, - nonce, - chunk_size, - } => { - let salt = pwhash_argon2i13::Salt::from_slice(&salt) - .to_indy(IndyErrorKind::InvalidStructure, "Invalid salt")?; - - let nonce = chacha20poly1305_ietf::Nonce::from_slice(&nonce) - .to_indy(IndyErrorKind::InvalidStructure, "Invalid nonce")?; - - let passphrase = passphrase.to_owned(); - - let key_data = match key_derivation_method { - KeyDerivationMethod::ARGON2I_INT => KeyDerivationData::Argon2iInt(passphrase, salt), - KeyDerivationMethod::ARGON2I_MOD => KeyDerivationData::Argon2iMod(passphrase, salt), - _ => unimplemented!("FIXME"), //FIXME - }; - - (key_data, nonce, chunk_size) - } - EncryptionMethod::ChaCha20Poly1305IETFRaw { nonce, chunk_size } => { - let nonce = chacha20poly1305_ietf::Nonce::from_slice(&nonce) - .to_indy(IndyErrorKind::InvalidStructure, "Invalid nonce")?; - - let key_data = KeyDerivationData::Raw(passphrase.to_owned()); - - (key_data, nonce, chunk_size) - } - }; - - Ok(( - reader, - import_key_derivation_data, - nonce, - chunk_size, - header_bytes, - )) -} - -pub(super) async fn finish_import( - wallet: &Wallet, - reader: BufReader, - key: chacha20poly1305_ietf::Key, - nonce: chacha20poly1305_ietf::Nonce, - chunk_size: usize, - header_bytes: Vec, -) -> IndyResult<()> -where - T: Read, -{ - // Reads encrypted - let mut reader = chacha20poly1305_ietf::Reader::new(reader, key, nonce, chunk_size); - - let mut header_hash = vec![0u8; HASHBYTES]; - reader.read_exact(&mut header_hash).map_err(_map_io_err)?; - - if hash(&header_bytes)? != header_hash { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "Invalid header hash", - )); - } - - loop { - let record_len = reader.read_u32::().map_err(_map_io_err)? as usize; - - if record_len == 0 { - break; - } - - let mut record = vec![0u8; record_len]; - reader.read_exact(&mut record).map_err(_map_io_err)?; - - let record: IndyRecord = rmp_serde::from_slice(&record).to_indy( - IndyErrorKind::InvalidStructure, - "Record is malformed msgpack", - )?; - - wallet - .add(&record.type_, &record.id, &record.value, &record.tags, true) - .await?; - } - - Ok(()) -} - -fn _map_io_err(e: io::Error) -> IndyError { - match e { - ref e - if e.kind() == io::ErrorKind::UnexpectedEof - || e.kind() == io::ErrorKind::InvalidData => - { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid export file format", - ) - } - e => e.to_indy(IndyErrorKind::IOError, "Can't read export file"), - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/iterator.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/iterator.rs deleted file mode 100644 index 81125d769d..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/iterator.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::sync::Arc; - -use indy_api_types::errors::IndyError; - -use super::{ - encryption::decrypt_storage_record, storage::StorageIterator, wallet::Keys, WalletRecord, -}; - -pub struct WalletIterator { - storage_iterator: Box, - keys: Arc, -} - -impl WalletIterator { - pub fn new(storage_iter: Box, keys: Arc) -> Self { - WalletIterator { - storage_iterator: storage_iter, - keys, - } - } - - pub async fn next(&mut self) -> Result, IndyError> { - let next_storage_entity = self.storage_iterator.next().await?; - - if let Some(next_storage_entity) = next_storage_entity { - Ok(Some(decrypt_storage_record( - &next_storage_entity, - &self.keys, - )?)) - } else { - Ok(None) - } - } - - pub fn get_total_count(&self) -> Result, IndyError> { - self.storage_iterator.get_total_count() - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/language.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/language.rs deleted file mode 100644 index 6b6b41a0a8..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/language.rs +++ /dev/null @@ -1,152 +0,0 @@ -use std::string; - -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::base64; - -#[derive(Debug, PartialEq, Eq, Hash, Clone)] -pub enum TagName { - EncryptedTagName(Vec), - PlainTagName(Vec), -} - -impl TagName { - pub fn from(s: String) -> IndyResult { - if s.is_empty() || s.starts_with('~') && s.len() == 1 { - return Err(err_msg( - IndyErrorKind::WalletQueryError, - "Tag name must not be empty", - )); - } - - if s.starts_with('~') { - Ok(TagName::PlainTagName(s.into_bytes()[1..].to_vec())) - } else { - Ok(TagName::EncryptedTagName(s.into_bytes())) - } - } -} - -impl string::ToString for TagName { - fn to_string(&self) -> String { - match *self { - TagName::EncryptedTagName(ref v) => format!(r#""{}""#, base64::encode(v)), - TagName::PlainTagName(ref v) => format!(r#""~{}""#, base64::encode(v)), - } - } -} - -#[derive(Debug, PartialEq, Hash, Eq, Clone)] -pub enum TargetValue { - Unencrypted(String), - Encrypted(Vec), -} - -impl From for TargetValue { - fn from(s: String) -> TargetValue { - TargetValue::Unencrypted(s) - } -} - -impl string::ToString for TargetValue { - fn to_string(&self) -> String { - match *self { - TargetValue::Unencrypted(ref s) => format!(r#""{}""#, s), - TargetValue::Encrypted(ref v) => format!(r#""{}""#, base64::encode(v)), - } - } -} - -#[derive(Debug, Hash, Clone)] -pub enum Operator { - And(Vec), - Or(Vec), - Not(Box), - Eq(TagName, TargetValue), - Neq(TagName, TargetValue), - Gt(TagName, TargetValue), - Gte(TagName, TargetValue), - Lt(TagName, TargetValue), - Lte(TagName, TargetValue), - Like(TagName, TargetValue), - In(TagName, Vec), -} - -impl string::ToString for Operator { - fn to_string(&self) -> String { - match *self { - Operator::Eq(ref tag_name, ref tag_value) => { - format!(r#"{{{}:{}}}"#, tag_name.to_string(), tag_value.to_string()) - } - Operator::Neq(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$neq":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Gt(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$gt":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Gte(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$gte":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Lt(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$lt":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Lte(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$lte":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Like(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$like":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::In(ref tag_name, ref tag_values) => { - format!( - r#"{{{}:{{"$in":[{}]}}}}"#, - tag_name.to_string(), - tag_values - .iter() - .map(|v| v.to_string()) - .collect::>() - .join(",") - ) - } - Operator::And(ref operators) => { - if !operators.is_empty() { - format!( - r#"{{"$and":[{}]}}"#, - operators - .iter() - .map(|o: &Operator| { o.to_string() }) - .collect::>() - .join(",") - ) - } else { - "{}".to_string() - } - } - Operator::Or(ref operators) => { - if !operators.is_empty() { - format!( - r#"{{"$or":[{}]}}"#, - operators - .iter() - .map(|o: &Operator| { o.to_string() }) - .collect::>() - .join(",") - ) - } else { - "{}".to_string() - } - } - Operator::Not(ref stmt) => format!(r#"{{"$not":{}}}"#, stmt.to_string()), - } - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/lib.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/lib.rs deleted file mode 100644 index 48df31dac3..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/lib.rs +++ /dev/null @@ -1,1206 +0,0 @@ -// allow all clippy warnings, given this is legacy to be removed soon -#![allow(clippy::all)] -use std::{ - collections::{HashMap, HashSet}, - fmt, fs, - io::BufReader, - path::PathBuf, - sync::{Arc, Mutex}, - unimplemented, -}; - -use indy_api_types::{ - domain::wallet::{CacheConfig, Config, Credentials, ExportConfig, Tags}, - errors::prelude::*, - WalletHandle, -}; -use indy_utils::{ - crypto::chacha20poly1305_ietf::{self, Key as MasterKey}, - secret, -}; -use iterator::WalletIterator; -use log::trace; -use serde::{Deserialize, Serialize}; -use serde_json::Value as SValue; - -pub use crate::encryption::KeyDerivationData; -use crate::{ - cache::wallet_cache::{WalletCache, WalletCacheHitData, WalletCacheHitMetrics}, - export_import::{export_continue, finish_import, preparse_file_to_import}, - storage::{ - default::SQLiteStorageType, mysql::MySqlStorageType, WalletStorage, WalletStorageType, - }, - wallet::{Keys, Wallet}, -}; - -mod encryption; -pub mod iterator; -mod query_encryption; -mod storage; - -// TODO: Remove query language out of wallet module -pub mod language; - -mod cache; -mod export_import; -mod wallet; - -#[allow(clippy::type_complexity)] -pub struct WalletService { - storage_types: Mutex>>, - wallets: Mutex>>, - wallet_ids: Mutex>, - pending_for_open: Mutex< - HashMap< - WalletHandle, - ( - String, /* id */ - Box, - Metadata, - Option, - ), - >, - >, - pending_for_import: Mutex< - HashMap< - WalletHandle, - ( - BufReader<::std::fs::File>, - chacha20poly1305_ietf::Nonce, - usize, - Vec, - KeyDerivationData, - ), - >, - >, - cache_hit_metrics: WalletCacheHitMetrics, -} - -#[allow(clippy::new_without_default)] -impl WalletService { - pub fn new() -> WalletService { - let storage_types = { - let s1: Arc = Arc::new(SQLiteStorageType::new()); - let s2: Arc = Arc::new(MySqlStorageType::new()); - - Mutex::new(HashMap::from([ - ("default".to_string(), s1), - ("mysql".to_string(), s2), - ])) - }; - - WalletService { - storage_types, - wallets: Mutex::new(HashMap::new()), - wallet_ids: Mutex::new(HashSet::new()), - pending_for_open: Mutex::new(HashMap::new()), - pending_for_import: Mutex::new(HashMap::new()), - cache_hit_metrics: WalletCacheHitMetrics::new(), - } - } - - pub async fn create_wallet( - &self, - config: &Config, - credentials: &Credentials, - key: (&KeyDerivationData, &MasterKey), - ) -> IndyResult<()> { - self._create_wallet(config, credentials, key).await?; - Ok(()) - } - - async fn _create_wallet( - &self, - config: &Config, - credentials: &Credentials, - (key_data, master_key): (&KeyDerivationData, &MasterKey), - ) -> IndyResult { - trace!( - "create_wallet >>> config: {:?}, credentials: {:?}", - config, - secret!(credentials) - ); - - let keys = Keys::new(); - let metadata = self._prepare_metadata(master_key, key_data, &keys)?; - - let (storage_type, storage_config, storage_credentials) = - self._get_config_and_cred_for_storage(config, credentials)?; - - storage_type - .create_storage( - &config.id, - storage_config.as_deref(), - storage_credentials.as_deref(), - &metadata, - ) - .await?; - - Ok(keys) - } - - pub async fn delete_wallet_prepare( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult<(Metadata, KeyDerivationData)> { - trace!( - "delete_wallet >>> config: {:?}, credentials: {:?}", - config, - secret!(credentials) - ); - - if self - .wallet_ids - .lock() - .unwrap() - .contains(&WalletService::_get_wallet_id(config)) - { - return Err(err_msg( - IndyErrorKind::InvalidState, - format!( - "Wallet has to be closed before deleting: {:?}", - WalletService::_get_wallet_id(config) - ), - )); - } - - // check credentials and close connection before deleting wallet - - let (_, metadata, key_derivation_data) = self - ._open_storage_and_fetch_metadata(config, credentials) - .await?; - - Ok((metadata, key_derivation_data)) - } - - pub async fn delete_wallet_continue( - &self, - config: &Config, - credentials: &Credentials, - metadata: &Metadata, - master_key: &MasterKey, - ) -> IndyResult<()> { - trace!( - "delete_wallet >>> config: {:?}, credentials: {:?}", - config, - secret!(credentials) - ); - - { - self._restore_keys(metadata, master_key)?; - } - - let (storage_type, storage_config, storage_credentials) = - self._get_config_and_cred_for_storage(config, credentials)?; - - storage_type - .delete_storage( - &config.id, - storage_config.as_deref(), - storage_credentials.as_deref(), - ) - .await?; - - trace!("delete_wallet <<<"); - Ok(()) - } - - pub async fn open_wallet_prepare( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult<(WalletHandle, KeyDerivationData, Option)> { - trace!( - "open_wallet >>> config: {:?}, credentials: {:?}", - config, - secret!(&credentials) - ); - - self._is_id_from_config_not_used(config)?; - - let (storage, metadata, key_derivation_data) = self - ._open_storage_and_fetch_metadata(config, credentials) - .await?; - - let wallet_handle = indy_utils::next_wallet_handle(); - - let rekey_data: Option = credentials.rekey.as_ref().map(|rekey| { - KeyDerivationData::from_passphrase_with_new_salt( - rekey, - &credentials.rekey_derivation_method, - ) - }); - - self.pending_for_open.lock().unwrap().insert( - wallet_handle, - ( - WalletService::_get_wallet_id(config), - storage, - metadata, - rekey_data.clone(), - ), - ); - - Ok((wallet_handle, key_derivation_data, rekey_data)) - } - - pub async fn open_wallet_continue( - &self, - wallet_handle: WalletHandle, - master_key: (&MasterKey, Option<&MasterKey>), - cache_config: Option, - ) -> IndyResult { - let (id, storage, metadata, rekey_data) = self - .pending_for_open - .lock() - .unwrap() - .remove(&wallet_handle) - .ok_or_else(|| err_msg(IndyErrorKind::InvalidState, "Open data not found"))?; - - let (master_key, rekey) = master_key; - let keys = self._restore_keys(&metadata, master_key)?; - - // Rotate master key - if let (Some(rekey), Some(rekey_data)) = (rekey, rekey_data) { - let metadata = self._prepare_metadata(rekey, &rekey_data, &keys)?; - storage.set_storage_metadata(&metadata).await?; - } - - let wallet = Wallet::new( - id.clone(), - storage, - Arc::new(keys), - WalletCache::new(cache_config), - ); - - self.wallets - .lock() - .unwrap() - .insert(wallet_handle, Arc::new(wallet)); - - self.wallet_ids.lock().unwrap().insert(id.to_string()); - - trace!("open_wallet <<< res: {:?}", wallet_handle); - - Ok(wallet_handle) - } - - async fn _open_storage_and_fetch_metadata( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult<(Box, Metadata, KeyDerivationData)> { - let storage = self._open_storage(config, credentials).await?; - - let metadata: Metadata = { - let metadata = storage.get_storage_metadata().await?; - - serde_json::from_slice(&metadata) - .to_indy(IndyErrorKind::InvalidState, "Cannot deserialize metadata")? - }; - - let key_derivation_data = KeyDerivationData::from_passphrase_and_metadata( - &credentials.key, - &metadata, - &credentials.key_derivation_method, - )?; - - Ok((storage, metadata, key_derivation_data)) - } - - pub async fn close_wallet(&self, handle: WalletHandle) -> IndyResult<()> { - trace!("close_wallet >>> handle: {:?}", handle); - - let wallet = self.wallets.lock().unwrap().remove(&handle); - - let wallet = if let Some(wallet) = wallet { - wallet - } else { - return Err(err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown wallet handle", - )); - }; - - self.wallet_ids.lock().unwrap().remove(wallet.get_id()); - - trace!("close_wallet <<<"); - - Ok(()) - } - - fn _map_wallet_storage_error(err: IndyError, type_: &str, name: &str) -> IndyError { - match err.kind() { - IndyErrorKind::WalletItemAlreadyExists => err_msg( - IndyErrorKind::WalletItemAlreadyExists, - format!( - "Wallet item already exists with type: {}, id: {}", - type_, name - ), - ), - IndyErrorKind::WalletItemNotFound => err_msg( - IndyErrorKind::WalletItemNotFound, - format!("Wallet item not found with type: {}, id: {}", type_, name), - ), - _ => err, - } - } - - pub async fn add_record( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - value: &str, - tags: &Tags, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .add(type_, name, value, tags, true) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn add_indy_record( - &self, - wallet_handle: WalletHandle, - name: &str, - value: &str, - tags: &Tags, - ) -> IndyResult<()> - where - T: Sized, - { - self.add_record( - wallet_handle, - &self.add_prefix(short_type_name::()), - name, - value, - tags, - ) - .await?; - - Ok(()) - } - - pub async fn add_indy_object( - &self, - wallet_handle: WalletHandle, - name: &str, - object: &T, - tags: &Tags, - ) -> IndyResult - where - T: ::serde::Serialize + Sized, - { - let object_json = serde_json::to_string(object).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot serialize {:?}", short_type_name::()), - )?; - - self.add_indy_record::(wallet_handle, name, &object_json, tags) - .await?; - - Ok(object_json) - } - - pub async fn update_record_value( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - value: &str, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .update(type_, name, value) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn update_indy_object( - &self, - wallet_handle: WalletHandle, - name: &str, - object: &T, - ) -> IndyResult - where - T: ::serde::Serialize + Sized, - { - let type_ = short_type_name::(); - - let wallet = self.get_wallet(wallet_handle).await?; - - let object_json = serde_json::to_string(object).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot serialize {:?}", type_), - )?; - - wallet - .update(&self.add_prefix(type_), name, &object_json) - .await?; - - Ok(object_json) - } - - pub async fn add_record_tags( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - tags: &Tags, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .add_tags(type_, name, tags) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn update_record_tags( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - tags: &Tags, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .update_tags(type_, name, tags) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn delete_record_tags( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - tag_names: &[&str], - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .delete_tags(type_, name, tag_names) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn delete_record( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .delete(type_, name) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn delete_indy_record( - &self, - wallet_handle: WalletHandle, - name: &str, - ) -> IndyResult<()> - where - T: Sized, - { - self.delete_record( - wallet_handle, - &self.add_prefix(short_type_name::()), - name, - ) - .await?; - - Ok(()) - } - - pub async fn get_record( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - options_json: &str, - ) -> IndyResult { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .get(type_, name, options_json, &self.cache_hit_metrics) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn get_indy_record( - &self, - wallet_handle: WalletHandle, - name: &str, - options_json: &str, - ) -> IndyResult - where - T: Sized, - { - self.get_record( - wallet_handle, - &self.add_prefix(short_type_name::()), - name, - options_json, - ) - .await - } - - pub async fn get_indy_record_value( - &self, - wallet_handle: WalletHandle, - name: &str, - options_json: &str, - ) -> IndyResult - where - T: Sized, - { - let type_ = short_type_name::(); - - let record = self - .get_record(wallet_handle, &self.add_prefix(type_), name, options_json) - .await?; - - let record_value = record - .get_value() - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - format!("{} not found for id: {:?}", type_, name), - ) - })? - .to_string(); - - Ok(record_value) - } - - // Dirty hack. json must live longer then result T - pub async fn get_indy_object( - &self, - wallet_handle: WalletHandle, - name: &str, - options_json: &str, - ) -> IndyResult - where - T: ::serde::de::DeserializeOwned + Sized, - { - let record_value = self - .get_indy_record_value::(wallet_handle, name, options_json) - .await?; - - serde_json::from_str(&record_value).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot deserialize {:?}", short_type_name::()), - ) - } - - // Dirty hack. json must live longer then result T - pub async fn get_indy_opt_object( - &self, - wallet_handle: WalletHandle, - name: &str, - options_json: &str, - ) -> IndyResult> - where - T: ::serde::de::DeserializeOwned + Sized, - { - match self - .get_indy_object::(wallet_handle, name, options_json) - .await - { - Ok(res) => Ok(Some(res)), - Err(ref err) if err.kind() == IndyErrorKind::WalletItemNotFound => Ok(None), - Err(err) => Err(err), - } - } - - pub async fn search_records( - &self, - wallet_handle: WalletHandle, - type_: &str, - query_json: &str, - options_json: &str, - ) -> IndyResult { - let wallet = self.get_wallet(wallet_handle).await?; - - Ok(WalletSearch { - iter: wallet.search(type_, query_json, Some(options_json)).await?, - }) - } - - pub async fn search_indy_records( - &self, - wallet_handle: WalletHandle, - query_json: &str, - options_json: &str, - ) -> IndyResult - where - T: Sized, - { - self.search_records( - wallet_handle, - &self.add_prefix(short_type_name::()), - query_json, - options_json, - ) - .await - } - - #[allow(dead_code)] // TODO: Should we implement getting all records or delete everywhere? - pub fn search_all_records(&self, _wallet_handle: WalletHandle) -> IndyResult { - // match self.wallets.lock().await.get(&wallet_handle) { - // Some(wallet) => wallet.search_all_records(), - // None => Err(IndyError::InvalidHandle(wallet_handle.to_string())) - // } - unimplemented!() - } - - pub async fn upsert_indy_object( - &self, - wallet_handle: WalletHandle, - name: &str, - object: &T, - ) -> IndyResult - where - T: ::serde::Serialize + Sized, - { - if self.record_exists::(wallet_handle, name).await? { - self.update_indy_object::(wallet_handle, name, object) - .await - } else { - self.add_indy_object::(wallet_handle, name, object, &HashMap::new()) - .await - } - } - - pub async fn record_exists( - &self, - wallet_handle: WalletHandle, - name: &str, - ) -> IndyResult - where - T: Sized, - { - match self - .get_record( - wallet_handle, - &self.add_prefix(short_type_name::()), - name, - &RecordOptions::id(), - ) - .await - { - Ok(_) => Ok(true), - Err(ref err) if err.kind() == IndyErrorKind::WalletItemNotFound => Ok(false), - Err(err) => Err(err), - } - } - - pub async fn check(&self, handle: WalletHandle) -> IndyResult<()> { - self.get_wallet(handle).await?; - Ok(()) - } - - pub async fn get_all(&self, handle: WalletHandle) -> IndyResult { - let wallet = self.get_wallet(handle).await?; - wallet.get_all().await - } - - pub async fn export_wallet( - &self, - wallet_handle: WalletHandle, - export_config: &ExportConfig, - version: u32, - key: (&KeyDerivationData, &MasterKey), - ) -> IndyResult<()> { - trace!( - "export_wallet >>> wallet_handle: {:?}, export_config: {:?}, version: {:?}", - wallet_handle, - secret!(export_config), - version - ); - - if version != 0 { - return Err(err_msg(IndyErrorKind::InvalidState, "Unsupported version")); - } - - let (key_data, key) = key; - - let wallet = self.get_wallet(wallet_handle).await?; - - let path = PathBuf::from(&export_config.path); - - if let Some(parent_path) = path.parent() { - fs::DirBuilder::new().recursive(true).create(parent_path)?; - } - - let mut export_file = fs::OpenOptions::new() - .write(true) - .create_new(true) - .open(export_config.path.clone())?; - - let res = export_continue(wallet, &mut export_file, version, key.clone(), key_data).await; - - trace!("export_wallet <<<"); - res - } - - pub async fn import_wallet_prepare( - &self, - config: &Config, - credentials: &Credentials, - export_config: &ExportConfig, - ) -> IndyResult<(WalletHandle, KeyDerivationData, KeyDerivationData)> { - trace!( - "import_wallet_prepare >>> config: {:?}, credentials: {:?}, export_config: {:?}", - config, - secret!(export_config), - secret!(export_config) - ); - - let exported_file_to_import = fs::OpenOptions::new() - .read(true) - .open(&export_config.path)?; - - let (reader, import_key_derivation_data, nonce, chunk_size, header_bytes) = - preparse_file_to_import(exported_file_to_import, &export_config.key)?; - let key_data = KeyDerivationData::from_passphrase_with_new_salt( - &credentials.key, - &credentials.key_derivation_method, - ); - - let wallet_handle = indy_utils::next_wallet_handle(); - - let stashed_key_data = key_data.clone(); - - self.pending_for_import.lock().unwrap().insert( - wallet_handle, - (reader, nonce, chunk_size, header_bytes, stashed_key_data), - ); - - Ok((wallet_handle, key_data, import_key_derivation_data)) - } - - pub async fn import_wallet_continue( - &self, - wallet_handle: WalletHandle, - config: &Config, - credentials: &Credentials, - key: (MasterKey, MasterKey), - ) -> IndyResult<()> { - let (reader, nonce, chunk_size, header_bytes, key_data) = self - .pending_for_import - .lock() - .unwrap() - .remove(&wallet_handle) - .unwrap(); - - let (import_key, master_key) = key; - - let keys = self - ._create_wallet(config, credentials, (&key_data, &master_key)) - .await?; - - self._is_id_from_config_not_used(config)?; - - let storage = self._open_storage(config, credentials).await?; - let metadata = storage.get_storage_metadata().await?; - - let res = { - let wallet = Wallet::new( - WalletService::_get_wallet_id(config), - storage, - Arc::new(keys), - WalletCache::new(None), - ); - - finish_import(&wallet, reader, import_key, nonce, chunk_size, header_bytes).await - }; - - if res.is_err() { - let metadata: Metadata = serde_json::from_slice(&metadata) - .to_indy(IndyErrorKind::InvalidState, "Cannot deserialize metadata")?; - - self.delete_wallet_continue(config, credentials, &metadata, &master_key) - .await?; - } - - // self.close_wallet(wallet_handle)?; - - trace!("import_wallet <<<"); - res - } - - pub fn get_wallets_count(&self) -> usize { - self.wallets.lock().unwrap().len() - } - - pub fn get_wallet_ids_count(&self) -> usize { - self.wallet_ids.lock().unwrap().len() - } - - pub fn get_pending_for_import_count(&self) -> usize { - self.pending_for_import.lock().unwrap().len() - } - - pub fn get_pending_for_open_count(&self) -> usize { - self.pending_for_open.lock().unwrap().len() - } - - pub async fn get_wallet_cache_hit_metrics_data(&self) -> HashMap { - self.cache_hit_metrics.get_data() - } - - #[allow(clippy::type_complexity)] - fn _get_config_and_cred_for_storage( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult<(Arc, Option, Option)> { - let storage_type = { - let storage_type = config.storage_type.as_deref().unwrap_or("default"); - - self.storage_types - .lock() - .unwrap() - .get(storage_type) - .ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownWalletStorageType, - "Unknown wallet storage type", - ) - })? - .clone() - }; - - let storage_config = config.storage_config.as_ref().map(SValue::to_string); - - let storage_credentials = credentials - .storage_credentials - .as_ref() - .map(SValue::to_string); - - Ok((storage_type, storage_config, storage_credentials)) - } - - fn _is_id_from_config_not_used(&self, config: &Config) -> IndyResult<()> { - let id = WalletService::_get_wallet_id(config); - if self.wallet_ids.lock().unwrap().contains(&id) { - return Err(err_msg( - IndyErrorKind::WalletAlreadyOpened, - format!( - "Wallet {} already opened", - WalletService::_get_wallet_id(config) - ), - )); - } - - Ok(()) - } - - fn _get_wallet_id(config: &Config) -> String { - let wallet_path = config - .storage_config - .as_ref() - .and_then(|storage_config| storage_config["path"].as_str()) - .unwrap_or(""); - - format!("{}{}", config.id, wallet_path) - } - - async fn _open_storage( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult> { - let (storage_type, storage_config, storage_credentials) = - self._get_config_and_cred_for_storage(config, credentials)?; - - let storage = storage_type - .open_storage( - &config.id, - storage_config.as_deref(), - storage_credentials.as_deref(), - ) - .await?; - - Ok(storage) - } - - fn _prepare_metadata( - &self, - master_key: &chacha20poly1305_ietf::Key, - key_data: &KeyDerivationData, - keys: &Keys, - ) -> IndyResult> { - let encrypted_keys = keys.serialize_encrypted(master_key)?; - - let metadata = match key_data { - KeyDerivationData::Raw(_) => Metadata::MetadataRaw(MetadataRaw { - keys: encrypted_keys, - }), - KeyDerivationData::Argon2iInt(_, salt) | KeyDerivationData::Argon2iMod(_, salt) => { - Metadata::MetadataArgon(MetadataArgon { - keys: encrypted_keys, - master_key_salt: salt[..].to_vec(), - }) - } - }; - - let res = serde_json::to_vec(&metadata).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize wallet metadata", - )?; - - Ok(res) - } - - fn _restore_keys(&self, metadata: &Metadata, master_key: &MasterKey) -> IndyResult { - let metadata_keys = metadata.get_keys(); - - let res = Keys::deserialize_encrypted(metadata_keys, master_key).map_err(|err| { - err.map( - IndyErrorKind::WalletAccessFailed, - "Invalid master key provided", - ) - })?; - - Ok(res) - } - - pub const PREFIX: &'static str = "Indy"; - - pub fn add_prefix(&self, type_: &str) -> String { - format!("{}::{}", WalletService::PREFIX, type_) - } - - async fn get_wallet(&self, wallet_handle: WalletHandle) -> IndyResult> { - let wallets = self.wallets.lock().unwrap(); //await; - let w = wallets.get(&wallet_handle); - if let Some(w) = w { - Ok(w.clone()) - } else { - Err(err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown wallet handle", - )) - } - } -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(untagged)] -pub enum Metadata { - MetadataArgon(MetadataArgon), - MetadataRaw(MetadataRaw), -} - -impl Metadata { - pub fn get_keys(&self) -> &Vec { - match *self { - Metadata::MetadataArgon(ref metadata) => &metadata.keys, - Metadata::MetadataRaw(ref metadata) => &metadata.keys, - } - } -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct MetadataArgon { - pub keys: Vec, - pub master_key_salt: Vec, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct MetadataRaw { - pub keys: Vec, -} - -#[derive(Clone, Serialize, Deserialize, PartialEq, Eq)] -pub struct WalletRecord { - #[serde(rename = "type")] - type_: Option, - id: String, - value: Option, - tags: Option, -} - -impl fmt::Debug for WalletRecord { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("WalletRecord") - .field("type_", &self.type_) - .field("id", &self.id) - .field("value", &self.value.as_ref().map(|_| "******")) - .field("tags", &self.tags) - .finish() - } -} - -impl Ord for WalletRecord { - fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { - (&self.type_, &self.id).cmp(&(&other.type_, &other.id)) - } -} - -impl PartialOrd for WalletRecord { - fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { - Some(self.cmp(other)) - } -} - -impl WalletRecord { - pub fn new( - name: String, - type_: Option, - value: Option, - tags: Option, - ) -> WalletRecord { - WalletRecord { - id: name, - type_, - value, - tags, - } - } - - pub fn get_id(&self) -> &str { - self.id.as_str() - } - - #[allow(dead_code)] - pub fn get_type(&self) -> Option<&str> { - self.type_.as_deref() - } - - pub fn get_value(&self) -> Option<&str> { - self.value.as_deref() - } - - #[allow(dead_code)] - pub fn get_tags(&self) -> Option<&Tags> { - self.tags.as_ref() - } -} - -fn default_true() -> bool { - true -} - -fn default_false() -> bool { - false -} - -#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct RecordOptions { - #[serde(default = "default_false")] - retrieve_type: bool, - #[serde(default = "default_true")] - retrieve_value: bool, - #[serde(default = "default_false")] - retrieve_tags: bool, -} - -impl RecordOptions { - pub fn id() -> String { - let options = RecordOptions { - retrieve_type: false, - retrieve_value: false, - retrieve_tags: false, - }; - - serde_json::to_string(&options).unwrap() - } - - pub fn id_value() -> String { - let options = RecordOptions { - retrieve_type: false, - retrieve_value: true, - retrieve_tags: false, - }; - - serde_json::to_string(&options).unwrap() - } - - pub fn id_value_tags() -> String { - let options = RecordOptions { - retrieve_type: false, - retrieve_value: true, - retrieve_tags: true, - }; - - serde_json::to_string(&options).unwrap() - } -} - -impl Default for RecordOptions { - fn default() -> RecordOptions { - RecordOptions { - retrieve_type: false, - retrieve_value: true, - retrieve_tags: false, - } - } -} - -pub struct WalletSearch { - iter: iterator::WalletIterator, -} - -impl WalletSearch { - pub fn get_total_count(&self) -> IndyResult> { - self.iter.get_total_count() - } - - pub async fn fetch_next_record(&mut self) -> IndyResult> { - self.iter.next().await - } -} - -#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct SearchOptions { - #[serde(default = "default_true")] - retrieve_records: bool, - #[serde(default = "default_false")] - retrieve_total_count: bool, - #[serde(default = "default_false")] - retrieve_type: bool, - #[serde(default = "default_true")] - retrieve_value: bool, - #[serde(default = "default_false")] - retrieve_tags: bool, -} - -impl SearchOptions { - pub fn id_value() -> String { - let options = SearchOptions { - retrieve_records: true, - retrieve_total_count: true, - retrieve_type: true, - retrieve_value: true, - retrieve_tags: false, - }; - - serde_json::to_string(&options).unwrap() - } -} - -impl Default for SearchOptions { - fn default() -> SearchOptions { - SearchOptions { - retrieve_records: true, - retrieve_total_count: false, - retrieve_type: false, - retrieve_value: true, - retrieve_tags: false, - } - } -} - -fn short_type_name() -> &'static str { - let type_name = std::any::type_name::(); - type_name.rsplit("::").next().unwrap_or(type_name) -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/query_encryption.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/query_encryption.rs deleted file mode 100644 index bf94a545c2..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/query_encryption.rs +++ /dev/null @@ -1,120 +0,0 @@ -use indy_api_types::errors::prelude::*; -use indy_utils::wql::Query; - -use super::{ - encryption::encrypt_as_searchable, - language::{Operator, TagName, TargetValue}, - wallet::Keys, -}; - -// Performs encryption of WQL query -// WQL query is provided as top-level Operator -pub(super) fn encrypt_query(query: Query, keys: &Keys) -> IndyResult { - transform(query, keys) -} - -fn transform(query: Query, keys: &Keys) -> IndyResult { - match query { - Query::Eq(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Eq(encrypted_name, encrypted_value)) - } - Query::Neq(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Neq(encrypted_name, encrypted_value)) - } - Query::Gt(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Gt(encrypted_name, encrypted_value)) - } - Query::Gte(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Gte(encrypted_name, encrypted_value)) - } - Query::Lt(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Lt(encrypted_name, encrypted_value)) - } - Query::Lte(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Lte(encrypted_name, encrypted_value)) - } - Query::Like(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Like(encrypted_name, encrypted_value)) - } - Query::In(name, values) => { - let ename = TagName::from(name.clone())?; - let ename = match ename { - TagName::EncryptedTagName(ref name) => { - let encrypted_name = - encrypt_as_searchable(&name[..], &keys.tag_name_key, &keys.tags_hmac_key); - TagName::EncryptedTagName(encrypted_name) - } - TagName::PlainTagName(ref name) => { - let encrypted_name = - encrypt_as_searchable(&name[..], &keys.tag_name_key, &keys.tags_hmac_key); - TagName::PlainTagName(encrypted_name) - } - }; - let mut encrypted_values: Vec = Vec::with_capacity(values.len()); - - for value in values { - encrypted_values.push(encrypt_name_value(name.clone(), value, keys)?.1); - } - Ok(Operator::In(ename, encrypted_values)) - } - Query::And(operators) => Ok(Operator::And(transform_list_operators(operators, keys)?)), - Query::Or(operators) => Ok(Operator::Or(transform_list_operators(operators, keys)?)), - Query::Not(boxed_operator) => { - Ok(Operator::Not(Box::new(transform(*boxed_operator, keys)?))) - } - } -} - -fn transform_list_operators(operators: Vec, keys: &Keys) -> IndyResult> { - let mut transformed = Vec::with_capacity(operators.len()); - - for operator in operators { - let transformed_operator = transform(operator, keys)?; - transformed.push(transformed_operator); - } - - Ok(transformed) -} - -// Encrypts a single tag name, tag value pair. -// If the tag name is EncryptedTagName enum variant, encrypts both the tag name and the tag value -// If the tag name is PlainTagName enum variant, encrypts only the tag name -fn encrypt_name_value( - name: String, - value: String, - keys: &Keys, -) -> IndyResult<(TagName, TargetValue)> { - let name = TagName::from(name)?; - let value = TargetValue::from(value); - match (name, value) { - (TagName::EncryptedTagName(ref name), TargetValue::Unencrypted(ref s)) => { - let encrypted_tag_name = - encrypt_as_searchable(&name[..], &keys.tag_name_key, &keys.tags_hmac_key); - let encrypted_tag_value = - encrypt_as_searchable(s.as_bytes(), &keys.tag_value_key, &keys.tags_hmac_key); - Ok(( - TagName::EncryptedTagName(encrypted_tag_name), - TargetValue::Encrypted(encrypted_tag_value), - )) - } - (TagName::PlainTagName(ref name), TargetValue::Unencrypted(ref s)) => { - let encrypted_tag_name = - encrypt_as_searchable(&name[..], &keys.tag_name_key, &keys.tags_hmac_key); - Ok(( - TagName::PlainTagName(encrypted_tag_name), - TargetValue::Unencrypted(s.clone()), - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Reached invalid combination of tag name and value while encrypting query", - )), - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/mod.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/mod.rs deleted file mode 100644 index d32790f65b..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/mod.rs +++ /dev/null @@ -1,939 +0,0 @@ -use std::{ - collections::{HashMap, VecDeque}, - fs, -}; - -use async_trait::async_trait; -use indy_api_types::errors::prelude::*; -use indy_utils::environment; -use log::LevelFilter; -use serde::Deserialize; -use sqlx::{ - sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions}, - ConnectOptions, SqlitePool, -}; - -use crate::{ - language, - storage::{StorageIterator, StorageRecord, Tag, TagName, WalletStorage, WalletStorageType}, - wallet::EncryptedValue, - RecordOptions, SearchOptions, -}; - -mod query; - -const _SQLITE_DB: &str = "sqlite.db"; - -struct SQLiteStorageIterator { - records: Option>, - total_count: Option, -} - -impl SQLiteStorageIterator { - fn new( - records: Option>, - total_count: Option, - ) -> IndyResult { - Ok(SQLiteStorageIterator { - records, - total_count, - }) - } -} - -#[async_trait] -impl StorageIterator for SQLiteStorageIterator { - async fn next(&mut self) -> IndyResult> { - if let Some(ref mut records) = self.records { - Ok(records.pop_front()) - } else { - Ok(None) - } - } - - fn get_total_count(&self) -> IndyResult> { - Ok(self.total_count.to_owned()) - } -} - -#[derive(Deserialize, Debug)] -struct Config { - path: Option, -} - -#[derive(Debug)] -struct SQLiteStorage { - pool: SqlitePool, -} - -pub struct SQLiteStorageType {} - -impl SQLiteStorageType { - pub fn new() -> SQLiteStorageType { - SQLiteStorageType {} - } - - fn _db_path(id: &str, config: Option<&Config>) -> std::path::PathBuf { - let mut path = match config { - Some(Config { - path: Some(ref path), - }) => std::path::PathBuf::from(path), - _ => environment::wallet_home_path(), - }; - - path.push(id); - path.push(_SQLITE_DB); - path - } -} - -#[async_trait] -impl WalletStorage for SQLiteStorage { - /// - /// Tries to fetch values and/or tags from the storage. - /// Returns Result with StorageEntity object which holds requested data in case of success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type_ of the item in storage - /// * `id` - id of the item in storage - /// * `options` - JSon containing what needs to be fetched. - /// Example: {"retrieveValue": true, "retrieveTags": true} - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `StorageEntity` - Contains name, optional value and optional tags - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemNotFound` - Item is not found in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn get(&self, type_: &[u8], id: &[u8], options: &str) -> IndyResult { - let options: RecordOptions = serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "RecordOptions is malformed json", - )?; - - let mut conn = self.pool.acquire().await?; - - let (item_id, value, key): (i64, Vec, Vec) = - sqlx::query_as("SELECT id, value, key FROM items where type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .fetch_one(&mut *conn) - .await?; - - let value = if options.retrieve_value { - Some(EncryptedValue::new(value, key)) - } else { - None - }; - - let type_ = if options.retrieve_type { - Some(type_.to_vec()) - } else { - None - }; - - let tags = if options.retrieve_tags { - let mut tags = Vec::new(); - - tags.extend( - sqlx::query_as::<_, (Vec, String)>( - "SELECT name, value from tags_plaintext where item_id = ?", - ) - .bind(item_id) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| Tag::PlainText(r.0, r.1)), - ); - - tags.extend( - sqlx::query_as::<_, (Vec, Vec)>( - "SELECT name, value from tags_encrypted where item_id = ?", - ) - .bind(item_id) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| Tag::Encrypted(r.0, r.1)), - ); - - Some(tags) - } else { - None - }; - - Ok(StorageRecord::new(id.to_vec(), value, type_, tags)) - } - - /// - /// inserts value and tags into storage. - /// Returns Result with () on success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type of the item in storage - /// * `id` - id of the item in storage - /// * `value` - value of the item in storage - /// * `value_key` - key used to encrypt the value - /// * `tags` - tags assigned to the value - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` class of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemAlreadyExists` - Item is already present in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn add( - &self, - type_: &[u8], - id: &[u8], - value: &EncryptedValue, - tags: &[Tag], - ) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let id = sqlx::query("INSERT INTO items (type, name, value, key) VALUES (?1, ?2, ?3, ?4)") - .bind(type_) - .bind(id) - .bind(&value.data) - .bind(&value.key) - .execute(&mut *tx) - .await? - .last_insert_rowid(); - - for tag in tags { - match *tag { - Tag::Encrypted(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT INTO tags_encrypted (item_id, name, value) VALUES (?1, ?2, ?3)", - ) - .bind(id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - Tag::PlainText(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT INTO tags_plaintext (item_id, name, value) VALUES (?1, ?2, ?3)", - ) - .bind(id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - }; - } - - tx.commit().await?; - Ok(()) - } - - async fn update(&self, type_: &[u8], id: &[u8], value: &EncryptedValue) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let row_updated = - sqlx::query("UPDATE items SET value = ?1, key = ?2 WHERE type = ?3 AND name = ?4") - .bind(&value.data) - .bind(&value.key) - .bind(type_) - .bind(id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - async fn add_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let (item_id,): (i64,) = - sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .fetch_one(&mut *tx) - .await?; - - for tag in tags { - match *tag { - Tag::Encrypted(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT OR REPLACE INTO tags_encrypted (item_id, name, value) VALUES (?1, \ - ?2, ?3)", - ) - .bind(item_id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - Tag::PlainText(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT OR REPLACE INTO tags_plaintext (item_id, name, value) VALUES (?1, \ - ?2, ?3)", - ) - .bind(item_id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - }; - } - - tx.commit().await?; - Ok(()) - } - - async fn update_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let (item_id,): (i64,) = - sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .fetch_one(&mut *tx) - .await?; - - sqlx::query("DELETE FROM tags_encrypted WHERE item_id = ?1") - .bind(item_id) - .execute(&mut *tx) - .await?; - - sqlx::query("DELETE FROM tags_plaintext WHERE item_id = ?1") - .bind(item_id) - .execute(&mut *tx) - .await?; - - for tag in tags { - match *tag { - Tag::Encrypted(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT INTO tags_encrypted (item_id, name, value) VALUES (?1, ?2, ?3)", - ) - .bind(item_id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - Tag::PlainText(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT INTO tags_plaintext (item_id, name, value) VALUES (?1, ?2, ?3)", - ) - .bind(item_id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - }; - } - - tx.commit().await?; - - Ok(()) - } - - async fn delete_tags(&self, type_: &[u8], id: &[u8], tag_names: &[TagName]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let (item_id,): (i64,) = - sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .fetch_one(&mut *tx) - .await?; - - for tag_name in tag_names { - match *tag_name { - TagName::OfEncrypted(ref tag_name) => { - sqlx::query("DELETE FROM tags_encrypted WHERE item_id = ?1 AND name = ?2") - .bind(item_id) - .bind(tag_name) - .execute(&mut *tx) - .await? - } - TagName::OfPlain(ref tag_name) => { - sqlx::query("DELETE FROM tags_plaintext WHERE item_id = ?1 AND name = ?2") - .bind(item_id) - .bind(tag_name) - .execute(&mut *tx) - .await? - } - }; - } - - tx.commit().await?; - Ok(()) - } - - /// - /// deletes value and tags into storage. - /// Returns Result with () on success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type of the item in storage - /// * `id` - id of the item in storage - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemNotFound` - Item is not found in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn delete(&self, type_: &[u8], id: &[u8]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let rows_affected = sqlx::query("DELETE FROM items where type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match rows_affected { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to delete not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row deleted. Seems wallet structure is inconsistent", - )), - } - } - - async fn get_storage_metadata(&self) -> IndyResult> { - let mut conn = self.pool.acquire().await?; - - let (metadata,): (Vec,) = sqlx::query_as::<_, (Vec,)>("SELECT value FROM metadata") - .fetch_one(&mut *conn) - .await?; - - Ok(metadata) - } - - async fn set_storage_metadata(&self, metadata: &[u8]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - sqlx::query("UPDATE metadata SET value = ?1") - .bind(metadata) - .execute(&mut *tx) - .await?; - - tx.commit().await?; - Ok(()) - } - - async fn get_all(&self) -> IndyResult> { - let mut conn = self.pool.acquire().await?; - let mut tags: Vec<(i64, Tag)> = Vec::new(); - - tags.extend( - sqlx::query_as::<_, (i64, Vec, String)>( - "SELECT item_id, name, value from tags_plaintext", - ) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| (r.0, Tag::PlainText(r.1, r.2))), - ); - - tags.extend( - sqlx::query_as::<_, (i64, Vec, Vec)>( - "SELECT item_id, name, value from tags_encrypted", - ) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| (r.0, Tag::Encrypted(r.1, r.2))), - ); - - let mut mtags = HashMap::new(); - - for (k, v) in tags { - mtags.entry(k).or_insert_with(Vec::new).push(v) - } - - let records: VecDeque<_> = sqlx::query_as::<_, (i64, Vec, Vec, Vec, Vec)>( - "SELECT id, name, value, key, type FROM items", - ) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| { - StorageRecord::new( - r.1, - Some(EncryptedValue::new(r.2, r.3)), - Some(r.4), - mtags.remove(&r.0).or_else(|| Some(Vec::new())), - ) - }) - .collect(); - - let total_count = records.len(); - - Ok(Box::new(SQLiteStorageIterator::new( - Some(records), - Some(total_count), - )?)) - } - - async fn search( - &self, - type_: &[u8], - query: &language::Operator, - options: Option<&str>, - ) -> IndyResult> { - let options = if let Some(options) = options { - serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "Search options is malformed json", - )? - } else { - SearchOptions::default() - }; - - let mut conn = self.pool.acquire().await?; - - let records = if options.retrieve_records { - let (query, args) = query::wql_to_sql(type_, query, None)?; - - // "SELECT i.id, i.name, i.value, i.key, i.type FROM items as i WHERE i.type = ?" - - let mut query = - sqlx::query_as::, Vec, Vec, Vec)>(&query); - - for arg in args.iter() { - query = match arg { - query::ToSQL::ByteSlice(a) => query.bind(a), - query::ToSQL::CharSlice(a) => query.bind(a), - } - } - - let mut records = query.fetch_all(&mut *conn).await?; - - let mut mtags = if options.retrieve_tags && !records.is_empty() { - let mut tags: Vec<(i64, Tag)> = Vec::new(); - - let in_binings = std::iter::repeat("?") - .take(records.len()) - .collect::>() - .join(","); - - let query = format!( - r#" - SELECT item_id, name, value - FROM tags_plaintext - WHERE item_id IN ({}) - "#, - in_binings - ); - - let mut query = sqlx::query_as::, String)>(&query); - - for record in records.iter() { - query = query.bind(record.0); - } - - tags.extend( - query - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| (r.0, Tag::PlainText(r.1, r.2))), - ); - - let query = format!( - r#" - SELECT item_id, name, value - FROM tags_encrypted - WHERE item_id IN ({}) - "#, - in_binings - ); - - let mut query = sqlx::query_as::, Vec)>(&query); - - for record in records.iter() { - query = query.bind(record.0); - } - - tags.extend( - query - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| (r.0, Tag::Encrypted(r.1, r.2))), - ); - - let mut mtags = HashMap::new(); - - for (k, v) in tags { - mtags.entry(k).or_insert_with(Vec::new).push(v) - } - - mtags - } else { - HashMap::new() - }; - - let records = records - .drain(..) - .map(|r| { - StorageRecord::new( - r.1, - if options.retrieve_value { - Some(EncryptedValue::new(r.2, r.3)) - } else { - None - }, - if options.retrieve_type { - Some(r.4) - } else { - None - }, - if options.retrieve_tags { - mtags.remove(&r.0).or_else(|| Some(Vec::new())) - } else { - None - }, - ) - }) - .collect(); - - Some(records) - } else { - None - }; - - let total_count = if options.retrieve_total_count { - let (query, args) = query::wql_to_sql_count(type_, query)?; - - let mut query = sqlx::query_as::(&query); - - for arg in args.iter() { - query = match arg { - query::ToSQL::ByteSlice(a) => query.bind(a), - query::ToSQL::CharSlice(a) => query.bind(a), - } - } - - let (total_count,) = query.fetch_one(&mut *conn).await?; - Some(total_count as usize) - } else { - None - }; - - Ok(Box::new(SQLiteStorageIterator::new(records, total_count)?)) - } - - fn close(&mut self) -> IndyResult<()> { - Ok(()) - } -} - -#[async_trait] -impl WalletStorageType for SQLiteStorageType { - /// - /// Deletes the SQLite database file with the provided id from the path specified in the - /// config file. - /// - /// # Arguments - /// - /// * `id` - id of the SQLite DB file - /// * `storage_config` - config containing the location of SQLite DB files - /// * `storage_credentials` - DB credentials - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::NotFound` - File with the provided id not found - /// * `IOError(..)` - Deletion of the file form the file-system failed - async fn delete_storage( - &self, - id: &str, - config: Option<&str>, - _credentials: Option<&str>, - ) -> IndyResult<()> { - let config = config - .map(serde_json::from_str::) - .map_or(Ok(None), |v| v.map(Some)) - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")?; - - let db_file_path = SQLiteStorageType::_db_path(id, config.as_ref()); - - if !db_file_path.exists() { - return Err(err_msg( - IndyErrorKind::WalletNotFound, - format!("Wallet storage file isn't found: {:?}", db_file_path), - )); - } - - std::fs::remove_dir_all(db_file_path.parent().unwrap())?; - Ok(()) - } - - /// - /// Creates the SQLite DB file with the provided name in the path specified in the config file, - /// and initializes the encryption keys needed for encryption and decryption of data. - /// - /// # Arguments - /// - /// * `id` - name of the SQLite DB file - /// * `config` - config containing the location of SQLite DB files - /// * `credentials` - DB credentials - /// * `metadata` - encryption keys that need to be stored in the newly created DB - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `AlreadyExists` - File with a given name already exists on the path - /// * `IOError("IO error during storage operation:...")` - Connection to the DB failed - /// * `IOError("Error occurred while creating wallet file:..)"` - Creation of schema failed - /// * `IOError("Error occurred while inserting the keys...")` - Insertion of keys failed - /// * `IOError(..)` - Deletion of the file form the file-system failed - async fn create_storage( - &self, - id: &str, - config: Option<&str>, - _credentials: Option<&str>, - metadata: &[u8], - ) -> IndyResult<()> { - let config = config - .map(serde_json::from_str::) - .map_or(Ok(None), |v| v.map(Some)) - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")?; - - let db_path = SQLiteStorageType::_db_path(id, config.as_ref()); - - if db_path.exists() { - return Err(err_msg( - IndyErrorKind::WalletAlreadyExists, - format!("Wallet database file already exists: {:?}", db_path), - )); - } - - fs::DirBuilder::new() - .recursive(true) - .create(db_path.parent().unwrap())?; - - let mut conn = SqliteConnectOptions::default() - .filename(db_path.as_path()) - .create_if_missing(true) - .journal_mode(SqliteJournalMode::Wal) - .log_statements(LevelFilter::Debug) - .connect() - .await?; - - let res = sqlx::query( - r#" - PRAGMA locking_mode=EXCLUSIVE; - PRAGMA foreign_keys=ON; - - BEGIN EXCLUSIVE TRANSACTION; - - /*** Keys Table ***/ - - CREATE TABLE metadata ( - id INTEGER NOT NULL, - value NOT NULL, - PRIMARY KEY(id) - ); - - /*** Items Table ***/ - - CREATE TABLE items( - id INTEGER NOT NULL, - type NOT NULL, - name NOT NULL, - value NOT NULL, - key NOT NULL, - PRIMARY KEY(id) - ); - - CREATE UNIQUE INDEX ux_items_type_name ON items(type, name); - - /*** Encrypted Tags Table ***/ - - CREATE TABLE tags_encrypted( - name NOT NULL, - value NOT NULL, - item_id INTEGER NOT NULL, - PRIMARY KEY(name, item_id), - FOREIGN KEY(item_id) - REFERENCES items(id) - ON DELETE CASCADE - ON UPDATE CASCADE - ); - - CREATE INDEX ix_tags_encrypted_name ON tags_encrypted(name); - CREATE INDEX ix_tags_encrypted_value ON tags_encrypted(value); - CREATE INDEX ix_tags_encrypted_item_id ON tags_encrypted(item_id); - - /*** PlainText Tags Table ***/ - - CREATE TABLE tags_plaintext( - name NOT NULL, - value NOT NULL, - item_id INTEGER NOT NULL, - PRIMARY KEY(name, item_id), - FOREIGN KEY(item_id) - REFERENCES items(id) - ON DELETE CASCADE - ON UPDATE CASCADE - ); - - CREATE INDEX ix_tags_plaintext_name ON tags_plaintext(name); - CREATE INDEX ix_tags_plaintext_value ON tags_plaintext(value); - CREATE INDEX ix_tags_plaintext_item_id ON tags_plaintext(item_id); - - /*** Insert metadata ***/ - INSERT INTO metadata(value) VALUES (?1); - - COMMIT; - "#, - ) - .persistent(false) - .bind(metadata) - .execute(&mut conn) - .await; - - // TODO: I am not sure force cleanup here is a good idea. - if let Err(err) = res { - std::fs::remove_file(db_path)?; - Err(err)?; - } - - Ok(()) - } - - /// - /// Establishes a connection to the SQLite DB with the provided id located in the path - /// specified in the config. In case of a successful onection returns a Storage object - /// embedding the connection and the encryption keys that will be used for encryption and - /// decryption operations. - /// - /// - /// # Arguments - /// - /// * `id` - id of the SQLite DB file - /// * `config` - config containing the location of SQLite DB files - /// * `credentials` - DB credentials - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `(Box, Vec)` - Tuple of `SQLiteStorage` and `encryption keys` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::NotFound` - File with the provided id not found - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn open_storage( - &self, - id: &str, - config: Option<&str>, - _credentials: Option<&str>, - ) -> IndyResult> { - let config: Option = config - .map(serde_json::from_str) - .map_or(Ok(None), |v| v.map(Some)) - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")?; - - let db_path = SQLiteStorageType::_db_path(id, config.as_ref()); - - if !db_path.exists() { - return Err(err_msg( - IndyErrorKind::WalletNotFound, - "No wallet database exists", - )); - } - - let connect_options = SqliteConnectOptions::new() - .filename(db_path.as_path()) - .journal_mode(SqliteJournalMode::Wal) - .disable_statement_logging(); - - Ok(Box::new(SQLiteStorage { - pool: SqlitePoolOptions::default() - .min_connections(1) - .max_connections(1) - .max_lifetime(None) - .connect_with(connect_options) - .await?, - })) - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/query.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/query.rs deleted file mode 100644 index fd85ebfde4..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/query.rs +++ /dev/null @@ -1,376 +0,0 @@ -use std::convert::From; - -use indy_api_types::errors::prelude::*; - -use crate::language::{Operator, TagName, TargetValue}; - -#[derive(Debug)] -pub(crate) enum ToSQL<'a> { - ByteSlice(&'a [u8]), - CharSlice(&'a str), -} - -impl<'a> From<&'a Vec> for ToSQL<'a> { - fn from(item: &'a Vec) -> Self { - ToSQL::ByteSlice(item.as_slice()) - } -} - -impl<'a> From<&'a [u8]> for ToSQL<'a> { - fn from(item: &'a [u8]) -> Self { - ToSQL::ByteSlice(item) - } -} - -impl<'a> From<&'a str> for ToSQL<'a> { - fn from(item: &'a str) -> Self { - ToSQL::CharSlice(item) - } -} - -impl<'a> From<&'a String> for ToSQL<'a> { - fn from(item: &'a String) -> Self { - ToSQL::CharSlice(item.as_str()) - } -} - -// Translates Wallet Query Language to SQL -// WQL input is provided as a reference to a top level Operator -// Result is a tuple of query string and query arguments -pub(crate) fn wql_to_sql<'a>( - class: &'a [u8], - op: &'a Operator, - _options: Option<&str>, -) -> Result<(String, Vec>), IndyError> { - let mut arguments: Vec> = Vec::new(); - arguments.push(class.into()); - - let clause_string = operator_to_sql(op, &mut arguments)?; - - const BASE: &str = - "SELECT i.id, i.name, i.value, i.key, i.type FROM items as i WHERE i.type = ?"; - if !clause_string.is_empty() { - let mut query_string = String::with_capacity(BASE.len() + 5 + clause_string.len()); - query_string.push_str(BASE); - query_string.push_str(" AND "); - query_string.push_str(&clause_string); - Ok((query_string, arguments)) - } else { - Ok((BASE.to_string(), arguments)) - } -} - -pub(crate) fn wql_to_sql_count<'a>( - class: &'a [u8], - op: &'a Operator, -) -> Result<(String, Vec>), IndyError> { - let mut arguments: Vec> = Vec::new(); - arguments.push(class.into()); - - let clause_string = operator_to_sql(op, &mut arguments)?; - let mut query_string = "SELECT count(*) FROM items as i WHERE i.type = ?".to_string(); - - if !clause_string.is_empty() { - query_string.push_str(" AND "); - query_string.push_str(&clause_string); - } - - Ok((query_string, arguments)) -} - -fn operator_to_sql<'a>(op: &'a Operator, arguments: &mut Vec>) -> IndyResult { - match *op { - Operator::Eq(ref tag_name, ref target_value) => { - eq_to_sql(tag_name, target_value, arguments) - } - Operator::Neq(ref tag_name, ref target_value) => { - neq_to_sql(tag_name, target_value, arguments) - } - Operator::Gt(ref tag_name, ref target_value) => { - gt_to_sql(tag_name, target_value, arguments) - } - Operator::Gte(ref tag_name, ref target_value) => { - gte_to_sql(tag_name, target_value, arguments) - } - Operator::Lt(ref tag_name, ref target_value) => { - lt_to_sql(tag_name, target_value, arguments) - } - Operator::Lte(ref tag_name, ref target_value) => { - lte_to_sql(tag_name, target_value, arguments) - } - Operator::Like(ref tag_name, ref target_value) => { - like_to_sql(tag_name, target_value, arguments) - } - Operator::In(ref tag_name, ref target_values) => { - in_to_sql(tag_name, target_values, arguments) - } - Operator::And(ref suboperators) => and_to_sql(suboperators, arguments), - Operator::Or(ref suboperators) => or_to_sql(suboperators, arguments), - Operator::Not(ref suboperator) => not_to_sql(suboperator, arguments), - } -} - -fn eq_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value = ?))" - .to_string(), - ) - } - ( - TagName::EncryptedTagName(ref queried_name), - TargetValue::Encrypted(ref queried_value), - ) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_encrypted WHERE name = ? AND value = ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for equality operator", - )), - } -} - -fn neq_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value != ?))" - .to_string(), - ) - } - ( - TagName::EncryptedTagName(ref queried_name), - TargetValue::Encrypted(ref queried_value), - ) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_encrypted WHERE name = ? AND value != ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for inequality operator", - )), - } -} - -fn gt_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value > ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $gt operator", - )), - } -} - -fn gte_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value >= ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $gte operator", - )), - } -} - -fn lt_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value < ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lt operator", - )), - } -} - -fn lte_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value <= ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lte operator", - )), - } -} - -fn like_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value LIKE ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $like operator", - )), - } -} - -fn in_to_sql<'a>( - name: &'a TagName, - values: &'a [TargetValue], - arguments: &mut Vec>, -) -> IndyResult { - let mut in_string = String::new(); - match *name { - TagName::PlainTagName(ref queried_name) => { - in_string.push_str( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value IN (", - ); - arguments.push(queried_name.into()); - - for (index, value) in values.iter().enumerate() { - if let TargetValue::Unencrypted(ref target) = *value { - in_string.push('?'); - arguments.push(target.into()); - if index < values.len() - 1 { - in_string.push(','); - } - } else { - return Err(err_msg( - IndyErrorKind::WalletQueryError, - "Encrypted tag value in $in for nonencrypted tag name", - )); - } - } - - Ok(in_string + ")))") - } - TagName::EncryptedTagName(ref queried_name) => { - in_string.push_str( - "(i.id in (SELECT item_id FROM tags_encrypted WHERE name = ? AND value IN (", - ); - arguments.push(queried_name.into()); - let index_before_last = values.len() - 2; - - for (index, value) in values.iter().enumerate() { - if let TargetValue::Encrypted(ref target) = *value { - in_string.push('?'); - arguments.push(target.into()); - if index <= index_before_last { - in_string.push(','); - } - } else { - return Err(err_msg( - IndyErrorKind::WalletQueryError, - "Unencrypted tag value in $in for encrypted tag name", - )); - } - } - - Ok(in_string + ")))") - } - } -} - -fn and_to_sql<'a>( - suboperators: &'a [Operator], - arguments: &mut Vec>, -) -> IndyResult { - join_operators(suboperators, " AND ", arguments) -} - -fn or_to_sql<'a>( - suboperators: &'a [Operator], - arguments: &mut Vec>, -) -> IndyResult { - join_operators(suboperators, " OR ", arguments) -} - -fn not_to_sql<'a>(suboperator: &'a Operator, arguments: &mut Vec>) -> IndyResult { - let suboperator_string = operator_to_sql(suboperator, arguments)?; - Ok("NOT (".to_string() + &suboperator_string + ")") -} - -fn join_operators<'a>( - operators: &'a [Operator], - join_str: &str, - arguments: &mut Vec>, -) -> IndyResult { - let mut s = String::new(); - if !operators.is_empty() { - s.push('('); - for (index, operator) in operators.iter().enumerate() { - let operator_string = operator_to_sql(operator, arguments)?; - s.push_str(&operator_string); - if index < operators.len() - 1 { - s.push_str(join_str); - } - } - s.push(')'); - } - Ok(s) -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mod.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mod.rs deleted file mode 100644 index 315dd0334b..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mod.rs +++ /dev/null @@ -1,112 +0,0 @@ -use async_trait::async_trait; -use indy_api_types::errors::prelude::*; - -use crate::{language, wallet::EncryptedValue}; - -pub mod default; -pub mod mysql; - -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] -pub enum Tag { - Encrypted(Vec, Vec), - PlainText(Vec, String), -} - -#[derive(Debug)] -pub enum TagName { - OfEncrypted(Vec), - OfPlain(Vec), -} - -#[derive(Clone, Debug)] -pub struct StorageRecord { - pub id: Vec, - pub value: Option, - pub type_: Option>, - pub tags: Option>, -} - -impl StorageRecord { - fn new( - id: Vec, - value: Option, - type_: Option>, - tags: Option>, - ) -> Self { - Self { - id, - value, - type_, - tags, - } - } -} - -#[async_trait] -pub trait StorageIterator: Send + Sync { - async fn next(&mut self) -> Result, IndyError>; - fn get_total_count(&self) -> Result, IndyError>; -} - -#[async_trait] -pub trait WalletStorage: Send + Sync { - async fn get(&self, type_: &[u8], id: &[u8], options: &str) - -> Result; - async fn add( - &self, - type_: &[u8], - id: &[u8], - value: &EncryptedValue, - tags: &[Tag], - ) -> Result<(), IndyError>; - async fn update( - &self, - type_: &[u8], - id: &[u8], - value: &EncryptedValue, - ) -> Result<(), IndyError>; - async fn add_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> Result<(), IndyError>; - async fn update_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> Result<(), IndyError>; - async fn delete_tags( - &self, - type_: &[u8], - id: &[u8], - tag_names: &[TagName], - ) -> Result<(), IndyError>; - async fn delete(&self, type_: &[u8], id: &[u8]) -> Result<(), IndyError>; - async fn get_storage_metadata(&self) -> Result, IndyError>; - async fn set_storage_metadata(&self, metadata: &[u8]) -> Result<(), IndyError>; - async fn get_all(&self) -> Result, IndyError>; - - // TODO: - async fn search( - &self, - type_: &[u8], - query: &language::Operator, - options: Option<&str>, - ) -> Result, IndyError>; - fn close(&mut self) -> Result<(), IndyError>; -} - -#[async_trait] -pub trait WalletStorageType: Send + Sync { - async fn create_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - metadata: &[u8], - ) -> Result<(), IndyError>; - async fn open_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - ) -> Result, IndyError>; - async fn delete_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - ) -> Result<(), IndyError>; -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/mod.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/mod.rs deleted file mode 100644 index f479e8fd1f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/mod.rs +++ /dev/null @@ -1,2246 +0,0 @@ -use std::{ - collections::{HashMap, VecDeque}, - iter::Iterator, -}; - -use async_trait::async_trait; -use futures::lock::Mutex; -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::base64; -use log::LevelFilter; -use query::{wql_to_sql, wql_to_sql_count}; -use serde::Deserialize; -use sqlx::{ - mysql::{MySqlConnectOptions, MySqlPoolOptions, MySqlRow}, - ConnectOptions, MySqlPool, Row, -}; - -use crate::{ - language, - storage::{StorageIterator, StorageRecord, Tag, TagName, WalletStorage, WalletStorageType}, - wallet::EncryptedValue, - RecordOptions, SearchOptions, -}; - -mod query; - -struct MySQLStorageIterator { - records: Option>>, - total_count: Option, -} - -impl MySQLStorageIterator { - fn new( - records: Option>>, - total_count: Option, - ) -> IndyResult { - Ok(MySQLStorageIterator { - records, - total_count, - }) - } -} - -#[async_trait] -impl StorageIterator for MySQLStorageIterator { - async fn next(&mut self) -> IndyResult> { - // TODO: Optimize!!! - if let Some(ref mut records) = self.records { - if let Some(record) = records.pop_front() { - return Ok(Some(record?)); - } else { - Ok(None) - } - } else { - Ok(None) - } - } - - fn get_total_count(&self) -> IndyResult> { - Ok(self.total_count.to_owned()) - } -} - -#[derive(Deserialize, Debug, Clone)] -struct Config { - pub read_host: String, - pub write_host: String, - pub port: u16, - pub db_name: String, - #[serde(default = "default_connection_limit")] - pub connection_limit: u32, -} - -fn default_connection_limit() -> u32 { - 100 -} - -#[derive(Deserialize, Clone)] -pub struct Credentials { - pub user: String, - pub pass: String, -} - -#[derive(Debug)] -struct MySqlStorage { - wallet_id: i64, - read_pool: MySqlPool, - write_pool: MySqlPool, -} - -pub struct MySqlStorageType { - connections: Mutex>, -} - -impl MySqlStorageType { - pub fn new() -> MySqlStorageType { - MySqlStorageType { - connections: Mutex::new(HashMap::new()), - } - } - - async fn _connect( - &self, - read_only: bool, - config: Config, - credentials: Credentials, - ) -> IndyResult { - let host_addr = if read_only { - &config.read_host - } else { - &config.write_host - }; - - let connection_string = format!( - "{}:{}@{}:{}/{}", - credentials.user, credentials.pass, host_addr, config.port, config.db_name - ); - - let mut connref = self.connections.lock().await; - - if let Some(connection) = connref.get(&connection_string) { - return Ok(connection.clone()); - } - - let my_sql_connect_options = MySqlConnectOptions::new() - .host(host_addr) - .database(&config.db_name) - .username(&credentials.user) - .password(&credentials.pass) - .log_statements(LevelFilter::Debug); - - let connection = MySqlPoolOptions::default() - .max_connections(config.connection_limit) - .test_before_acquire(false) - .connect_with(my_sql_connect_options) - .await?; - - connref.insert(connection_string, connection.clone()); - Ok(connection) - } -} - -#[async_trait] -impl WalletStorage for MySqlStorage { - /// - /// Tries to fetch values and/or tags from the storage. - /// Returns Result with StorageEntity object which holds requested data in case of success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type_ of the item in storage - /// * `id` - id of the item in storage - /// * `options` - JSon containing what needs to be fetched. - /// Example: {"retrieveValue": true, "retrieveTags": true} - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `StorageEntity` - Contains name, optional value and optional tags - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemNotFound` - Item is not found in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn get(&self, type_: &[u8], id: &[u8], options: &str) -> IndyResult { - let options: RecordOptions = serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "RecordOptions is malformed json", - )?; - - let mut conn = self.read_pool.acquire().await?; - - let (value, tags): (Option>, Option) = sqlx::query_as(&format!( - r#" - SELECT {}, {} - FROM items - WHERE - wallet_id = ? - AND type = ? - AND name = ? - "#, - if options.retrieve_value { - "value" - } else { - "NULL" - }, - if options.retrieve_tags { - "tags" - } else { - "NULL" - }, - )) - .bind(self.wallet_id) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .fetch_one(&mut *conn) - .await?; - - let value = if let Some(value) = value { - Some(EncryptedValue::from_bytes(&value)?) - } else { - None - }; - - let type_ = if options.retrieve_type { - Some(type_.to_vec()) - } else { - None - }; - - let tags = if let Some(tags) = tags { - Some(_tags_from_json(tags)?) - } else { - None - }; - - Ok(StorageRecord::new(id.to_vec(), value, type_, tags)) - } - - /// - /// inserts value and tags into storage. - /// Returns Result with () on success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type of the item in storage - /// * `id` - id of the item in storage - /// * `value` - value of the item in storage - /// * `value_key` - key used to encrypt the value - /// * `tags` - tags assigned to the value - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` class of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemAlreadyExists` - Item is already present in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn add( - &self, - type_: &[u8], - id: &[u8], - value: &EncryptedValue, - tags: &[Tag], - ) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - sqlx::query( - r#" - INSERT INTO items (type, name, value, tags, wallet_id) - VALUE (?, ?, ?, ?, ?) - "#, - ) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(&value.to_bytes()) - .bind(&_tags_to_json(tags)?) - .bind(self.wallet_id) - .execute(&mut *tx) - .await?; - - tx.commit().await?; - Ok(()) - } - - async fn update(&self, type_: &[u8], id: &[u8], value: &EncryptedValue) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - let row_updated = sqlx::query( - r#" - UPDATE items - SET value = ? - WHERE type = ? - AND name = ? - AND wallet_id = ? - "#, - ) - .bind(&value.to_bytes()) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - async fn add_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> IndyResult<()> { - if tags.is_empty() { - // FIXME: Think about checking item exists - return Ok(()); - } - - let tag_paths = _tags_to_plain(tags) - .into_iter() - .map(|(tag, val)| format!(r#"'$."{}"', "{}""#, tag, val)) - .collect::>() - .join(","); - - let mut tx = self.write_pool.begin().await?; - - let row_updated = sqlx::query(&format!( - r#" - UPDATE items - SET tags = JSON_SET(tags, {}) - WHERE type = ? - AND name = ? - AND wallet_id = ? - "#, - tag_paths - )) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - async fn update_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - let row_updated = sqlx::query( - r#" - UPDATE items - SET tags = ? - WHERE type = ? - AND name = ? - AND wallet_id = ? - "#, - ) - .bind(&_tags_to_json(tags)?) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - async fn delete_tags(&self, type_: &[u8], id: &[u8], tag_names: &[TagName]) -> IndyResult<()> { - if tag_names.is_empty() { - // FIXME: Think about checking item exists - return Ok(()); - } - - let mut tx = self.write_pool.begin().await?; - - let tag_name_paths = _tag_names_to_plain(tag_names) - .into_iter() - .map(|tag_name| format!(r#"'$."{}"'"#, tag_name)) - .collect::>() - .join(","); - - let row_updated = sqlx::query(&format!( - r#" - UPDATE items - SET tags = JSON_REMOVE(tags, {}) - WHERE type = ? - AND name = ? - AND wallet_id = ? - "#, - tag_name_paths - )) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - /// - /// deletes value and tags into storage. - /// Returns Result with () on success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type of the item in storage - /// * `id` - id of the item in storage - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemNotFound` - Item is not found in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn delete(&self, type_: &[u8], id: &[u8]) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - let rows_affected = sqlx::query( - r#" - DELETE FROM items - WHERE type = ? - AND name = ? - AND wallet_id = ?"#, - ) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match rows_affected { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to delete not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row deleted. Seems wallet structure is inconsistent", - )), - } - } - - async fn get_storage_metadata(&self) -> IndyResult> { - let mut conn = self.read_pool.acquire().await?; - - let (metadata,): (String,) = sqlx::query_as::<_, (String,)>( - r#" - SELECT metadata - FROM wallets - WHERE id = ? - "#, - ) - .bind(self.wallet_id) - .fetch_one(&mut *conn) - .await?; - - base64::decode(&metadata) - } - - async fn set_storage_metadata(&self, metadata: &[u8]) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - sqlx::query( - r#" - UPDATE wallets - SET metadata = ? - WHERE id = ? - "#, - ) - .bind(base64::encode(metadata)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await?; - - tx.commit().await?; - Ok(()) - } - - async fn get_all(&self) -> IndyResult> { - let records: VecDeque<_> = sqlx::query( - r#" - SELECT type, name, value, tags - FROM items - WHERE wallet_id = ? - "#, - ) - .bind(self.wallet_id) - .map(|r: MySqlRow| -> IndyResult { - let type_: String = r.get(0); - let id: String = r.get(1); - let value: Vec = r.get(2); - let tags: serde_json::Value = r.get(3); - - let res = StorageRecord::new( - base64::decode(&id)?, - Some(EncryptedValue::from_bytes(&value)?), - Some(base64::decode(&type_)?), - Some(_tags_from_json(tags)?), - ); - - Ok(res) - }) - .fetch_all(&self.read_pool) - .await? - .into_iter() - .collect(); - - let total_len = records.len(); - - // FIXME: Fetch total count - Ok(Box::new(MySQLStorageIterator::new( - Some(records), - Some(total_len), - )?)) - } - - async fn search( - &self, - type_: &[u8], - query: &language::Operator, - options: Option<&str>, - ) -> IndyResult> { - let options = if let Some(options) = options { - serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "Search options is malformed json", - )? - } else { - SearchOptions::default() - }; - - let mut conn = self.read_pool.acquire().await?; - - let total_count = if options.retrieve_total_count { - let (query, args) = wql_to_sql_count(self.wallet_id, type_, query)?; - let mut query = sqlx::query_as::(&query); - - for arg in args.iter() { - query = if arg.is_i64() { - query.bind(arg.as_i64().unwrap()) - } else if arg.is_string() { - query.bind(arg.as_str().unwrap()) - } else { - return Err(err_msg( - IndyErrorKind::InvalidState, - "Unexpected sql parameter type.", - )); - } - } - - let (total_count,) = query.fetch_one(&mut *conn).await?; - Some(total_count as usize) - } else { - None - }; - - let records = if options.retrieve_records { - let (query, args) = wql_to_sql(self.wallet_id, type_, query, &options)?; - - let mut query = sqlx::query::(&query); - - for arg in args.iter() { - query = if arg.is_i64() { - query.bind(arg.as_i64().unwrap()) - } else if arg.is_string() { - query.bind(arg.as_str().unwrap()) - } else { - return Err(err_msg( - IndyErrorKind::InvalidState, - "Unexpected sql parameter type.", - )); - } - } - - let records: VecDeque<_> = query - .map(|r: MySqlRow| -> IndyResult { - let type_ = if options.retrieve_type { - let type_: String = r.get(0); - Some(base64::decode(&type_)?) - } else { - None - }; - - let id = { - let id: String = r.get(1); - base64::decode(&id)? - }; - - let value = if options.retrieve_value { - let value: Vec = r.get(2); - Some(EncryptedValue::from_bytes(&value)?) - } else { - None - }; - - let tags = if options.retrieve_tags { - let tags: serde_json::Value = r.get(3); - Some(_tags_from_json(tags)?) - } else { - None - }; - - let res = StorageRecord::new(id, value, type_, tags); - - Ok(res) - }) - .fetch_all(&self.read_pool) - .await? - .into_iter() - .collect(); - - Some(records) - } else { - None - }; - - Ok(Box::new(MySQLStorageIterator::new(records, total_count)?)) - } - - fn close(&mut self) -> IndyResult<()> { - Ok(()) - } -} - -#[async_trait] -impl WalletStorageType for MySqlStorageType { - /// - /// Deletes the MySql database file with the provided id from the path specified in the - /// config file. - /// - /// # Arguments - /// - /// * `id` - id of the MySql DB file - /// * `storage_config` - config containing the location of MySql DB files - /// * `storage_credentials` - DB credentials - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::NotFound` - File with the provided id not found - /// * `IOError(..)` - Deletion of the file form the file-system failed - async fn delete_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - ) -> IndyResult<()> { - let config = config - .map(serde_json::from_str::) - .transpose() - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent config json", - ))?; - - let credentials = credentials - .map(serde_json::from_str::) - .transpose() - .to_indy( - IndyErrorKind::InvalidStructure, - "Malformed credentials json", - )? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent credentials json", - ))?; - - let mut tx = self - ._connect(false, config, credentials) - .await? - .begin() - .await?; - - let res = sqlx::query( - r#" - DELETE FROM wallets - WHERE name = ? - "#, - ) - .bind(id) - .execute(&mut *tx) - .await; - - let rows_affected = res?.rows_affected(); - - match rows_affected { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletNotFound, - "Item to delete not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row deleted. Seems wallet structure is inconsistent", - )), - } - } - - /// - /// Creates the MySql DB file with the provided name in the path specified in the config file, - /// and initializes the encryption keys needed for encryption and decryption of data. - /// - /// # Arguments - /// - /// * `id` - name of the MySql DB file - /// * `config` - config containing the location of MySql DB files - /// * `credentials` - DB credentials - /// * `metadata` - encryption keys that need to be stored in the newly created DB - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `AlreadyExists` - File with a given name already exists on the path - /// * `IOError("IO error during storage operation:...")` - Connection to the DB failed - /// * `IOError("Error occurred while creating wallet file:..)"` - Creation of schema failed - /// * `IOError("Error occurred while inserting the keys...")` - Insertion of keys failed - /// * `IOError(..)` - Deletion of the file form the file-system failed - async fn create_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - metadata: &[u8], - ) -> IndyResult<()> { - let mut config = config - .map(serde_json::from_str::) - .transpose() - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent config json", - ))?; - - let credentials = credentials - .map(serde_json::from_str::) - .transpose() - .to_indy( - IndyErrorKind::InvalidStructure, - "Malformed credentials json", - )? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent credentials json", - ))?; - - let my_sql_connect_options = MySqlConnectOptions::new() - .host(&config.write_host) - .username(&credentials.user) - .password(&credentials.pass) - .log_statements(LevelFilter::Debug); - - let mut pool = MySqlPoolOptions::default() - .max_connections(1) - .test_before_acquire(false) - .connect_with(my_sql_connect_options) - .await?; - - let mut con = pool.acquire().await?; - - // Basic SQL injection prevention - // since we cannot bind the database identifier - config.db_name = config.db_name.replace('`', "``"); - - sqlx::query(&format!( - "CREATE DATABASE IF NOT EXISTS `{}`;", - config.db_name - )) - .execute(&mut *con) - .await?; - - // Replace the previous single use pool - // with the actual one, get a connection - // and create the required tables - pool = self._connect(false, config, credentials).await?; - con = pool.acquire().await?; - - sqlx::query( - r#" - CREATE TABLE IF NOT EXISTS `items` ( - `wallet_id` int NOT NULL, - `type` varchar(256) NOT NULL, - `name` varchar(256) NOT NULL, - `value` blob NOT NULL, - `tags` varchar(256) DEFAULT NULL, - PRIMARY KEY (`wallet_id`, `type`, `name`) - );"#, - ) - .execute(&mut *con) - .await?; - - sqlx::query( - r#" - CREATE TABLE IF NOT EXISTS `wallets` ( - `id` int NOT NULL AUTO_INCREMENT, - `name` varchar(64) NOT NULL, - `metadata` varchar(4096) DEFAULT NULL, - PRIMARY KEY (`id`) - );"#, - ) - .execute(&mut *con) - .await?; - - let mut tx = pool.begin().await?; - - let res = sqlx::query( - r#" - INSERT INTO wallets (name, metadata) - VALUES (?, ?) - "#, - ) - .bind(id) - .bind(base64::encode(metadata)) - .execute(&mut *tx) - .await; - - match res { - Err(sqlx::Error::Database(e)) if e.code().is_some() && e.code().unwrap() == "23000" => { - return Err(err_msg( - IndyErrorKind::WalletAlreadyExists, - "Wallet already exists", - )) - } - e => e?, - }; - - // FIXME: return wallet already exists on 1062 error code from MySQL - - tx.commit().await?; - Ok(()) - } - - /// - /// Establishes a connection to the MySql DB with the provided id located in the path - /// specified in the config. In case of a successful onection returns a Storage object - /// embedding the connection and the encryption keys that will be used for encryption and - /// decryption operations. - /// - /// - /// # Arguments - /// - /// * `id` - id of the MySql DB file - /// * `config` - config containing the location of MySql DB files - /// * `credentials` - DB credentials - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `(Box, Vec)` - Tuple of `MySqlStorage` and `encryption keys` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::NotFound` - File with the provided id not found - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn open_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - ) -> IndyResult> { - let config = config - .map(serde_json::from_str::) - .transpose() - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent config json", - ))?; - - let credentials = credentials - .map(serde_json::from_str::) - .transpose() - .to_indy( - IndyErrorKind::InvalidStructure, - "Malformed credentials json", - )? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent credentials json", - ))?; - - let read_pool = self - ._connect(true, config.clone(), credentials.clone()) - .await?; - let write_pool = self._connect(false, config, credentials).await?; - - let res = sqlx::query_as::<_, (i64,)>( - r#" - SELECT id FROM wallets - WHERE name = ? - "#, - ) - .bind(id) - .fetch_one(&read_pool) - .await; - - let (wallet_id,) = match res { - Err(sqlx::Error::RowNotFound) => { - return Err(err_msg(IndyErrorKind::WalletNotFound, "Wallet not found")); - } - e => e?, - }; - - Ok(Box::new(MySqlStorage { - read_pool, - write_pool, - wallet_id, - })) - } -} - -#[cfg(test)] -#[allow(clippy::all)] -mod tests { - use indy_utils::environment; - - use super::{super::Tag, *}; - - #[allow(unused_macros)] - macro_rules! assert_kind { - ($kind:expr, $var:expr) => { - match $var { - Err(e) => assert_eq!($kind, e.kind()), - _ => assert!(false, "Result expected to be error"), - } - }; - } - - #[async_std::test] - #[cfg(feature = "benchmark")] - async fn mysql_storage_sync_send() { - use std::{sync::Arc, time::SystemTime}; - - use futures::{channel::oneshot, executor::ThreadPool, future::join_all}; - - let count = 1000; - let executor = ThreadPool::new().expect("Failed to new ThreadPool"); - let storage_type = Arc::new(Box::new(MySqlStorageType::new())); - - let waiters: Vec<_> = (0..count) - .into_iter() - .map(|id| { - let st = storage_type.clone(); - let (tx, rx) = oneshot::channel::>(); - - let future = async move { - let res = st - .delete_storage( - &format!("mysql_storage_sync_send_{}", id), - _config(), - _credentials(), - ) - .await; - - tx.send(res).unwrap(); - }; - - executor.spawn_ok(future); - rx - }) - .collect(); - - join_all(waiters).await; - println!("------------> 1 {:?}", SystemTime::now()); - - let waiters: Vec<_> = (0..count) - .into_iter() - .map(|id| { - let st = storage_type.clone(); - let (tx, rx) = oneshot::channel::>(); - - let future = async move { - let res = st - .create_storage( - &format!("mysql_storage_sync_send_{}", id), - _config(), - _credentials(), - &_metadata(), - ) - .await; - - tx.send(res).unwrap(); - }; - - executor.spawn_ok(future); - rx - }) - .collect(); - - join_all(waiters).await; - - println!("------------> 3 {:?}", SystemTime::now()); - - let waiters: Vec<_> = (0..count) - .into_iter() - .map(|id| { - let st = storage_type.clone(); - let (tx, rx) = oneshot::channel::>(); - - let future = async move { - let res = st - .delete_storage( - &format!("mysql_storage_sync_send_{}", id), - _config(), - _credentials(), - ) - .await; - - tx.send(res).unwrap(); - }; - - executor.spawn_ok(future); - rx - }) - .collect(); - - join_all(waiters).await; - - println!("------------> 5 {:?}", SystemTime::now()); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_create_works() { - _cleanup("mysql_storage_type_create_works").await; - - let storage_type = MySqlStorageType::new(); - - storage_type - .create_storage( - "mysql_storage_type_create_works", - _config(), - _credentials(), - &_metadata(), - ) - .await - .unwrap(); - - _cleanup("mysql_storage_type_create_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_create_works_for_twice() { - _cleanup("mysql_storage_type_create_works_for_twice").await; - - let storage_type = MySqlStorageType::new(); - storage_type - .create_storage( - "mysql_storage_type_create_works_for_twice", - _config(), - _credentials(), - &_metadata(), - ) - .await - .unwrap(); - - let res = storage_type - .create_storage( - "mysql_storage_type_create_works_for_twice", - _config(), - _credentials(), - &_metadata(), - ) - .await; - - assert_kind!(IndyErrorKind::WalletAlreadyExists, res); - - storage_type - .delete_storage( - "mysql_storage_type_create_works_for_twice", - _config(), - _credentials(), - ) - .await - .unwrap(); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_get_storage_metadata_works() { - _cleanup("mysql_storage_get_storage_metadata_works").await; - - { - let storage = _storage("mysql_storage_get_storage_metadata_works").await; - let metadata = storage.get_storage_metadata().await.unwrap(); - - assert_eq!(metadata, _metadata()); - } - - _cleanup("mysql_storage_get_storage_metadata_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_delete_works() { - _cleanup("mysql_storage_type_delete_works").await; - - let storage_type = MySqlStorageType::new(); - storage_type - .create_storage( - "mysql_storage_type_delete_works", - _config(), - _credentials(), - &_metadata(), - ) - .await - .unwrap(); - - storage_type - .delete_storage("mysql_storage_type_delete_works", _config(), _credentials()) - .await - .unwrap(); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_delete_works_for_non_existing() { - _cleanup("mysql_storage_type_delete_works_for_non_existing").await; - - let storage_type = MySqlStorageType::new(); - - storage_type - .create_storage( - "mysql_storage_type_delete_works_for_non_existing", - _config(), - _credentials(), - &_metadata(), - ) - .await - .unwrap(); - - let res = storage_type - .delete_storage("unknown", _config(), _credentials()) - .await; - assert_kind!(IndyErrorKind::WalletNotFound, res); - - storage_type - .delete_storage( - "mysql_storage_type_delete_works_for_non_existing", - _config(), - _credentials(), - ) - .await - .unwrap(); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_open_works() { - _cleanup("mysql_storage_type_open_works").await; - _storage("mysql_storage_type_open_works").await; - _cleanup("mysql_storage_type_open_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_open_works_for_not_created() { - _cleanup("mysql_storage_type_open_works_for_not_created").await; - - let storage_type = MySqlStorageType::new(); - - let res = storage_type - .open_storage("unknown", _config(), _credentials()) - .await; - - assert_kind!(IndyErrorKind::WalletNotFound, res); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_works_for_is_802() { - _cleanup("mysql_storage_add_works_for_is_802").await; - - { - let storage = _storage("mysql_storage_add_works_for_is_802").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.add(&_type1(), &_id1(), &_value1(), &_tags()).await; - assert_kind!(IndyErrorKind::WalletItemAlreadyExists, res); - - let res = storage.add(&_type1(), &_id1(), &_value1(), &_tags()).await; - assert_kind!(IndyErrorKind::WalletItemAlreadyExists, res); - } - - _cleanup("mysql_storage_add_works_for_is_802").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_set_get_works() { - _cleanup("mysql_storage_set_get_works").await; - - { - let storage = _storage("mysql_storage_set_get_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - } - - _cleanup("mysql_storage_set_get_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_set_get_works_for_twice() { - _cleanup("mysql_storage_set_get_works_for_twice").await; - - { - let storage = _storage("mysql_storage_set_get_works_for_twice").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.add(&_type1(), &_id1(), &_value2(), &_tags()).await; - assert_kind!(IndyErrorKind::WalletItemAlreadyExists, res); - } - - _cleanup("mysql_storage_set_get_works_for_twice").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_set_get_works_for_reopen() { - _cleanup("mysql_storage_set_get_works_for_reopen").await; - - _storage("mysql_storage_set_get_works_for_reopen") - .await - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = MySqlStorageType::new() - .open_storage( - "mysql_storage_set_get_works_for_reopen", - _config(), - _credentials(), - ) - .await - .unwrap() - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - - _cleanup("mysql_storage_set_get_works_for_reopen").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_get_works_for_wrong_key() { - _cleanup("mysql_storage_get_works_for_wrong_key").await; - - { - let storage = _storage("mysql_storage_get_works_for_wrong_key").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage - .get( - &_type1(), - &_id2(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await; - - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_get_works_for_wrong_key").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_works() { - _cleanup("mysql_storage_delete_works").await; - - { - let storage = _storage("mysql_storage_delete_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - - storage.delete(&_type1(), &_id1()).await.unwrap(); - - let res = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await; - - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_works_for_non_existing() { - _cleanup("mysql_storage_delete_works_for_non_existing").await; - - { - let storage = _storage("mysql_storage_delete_works_for_non_existing").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.delete(&_type1(), &_id2()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_works_for_non_existing").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_returns_error_item_not_found_if_no_such_type() { - _cleanup("mysql_storage_delete_returns_error_item_not_found_if_no_such_type").await; - - { - let storage = - _storage("mysql_storage_delete_returns_error_item_not_found_if_no_such_type").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.delete(&_type2(), &_id2()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_returns_error_item_not_found_if_no_such_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_get_all_works() { - _cleanup("mysql_storage_get_all_works").await; - - { - let storage = _storage("mysql_storage_get_all_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - storage - .add(&_type2(), &_id2(), &_value2(), &_tags()) - .await - .unwrap(); - - let mut storage_iterator = storage.get_all().await.unwrap(); - - let record = storage_iterator.next().await.unwrap().unwrap(); - assert_eq!(record.type_.unwrap(), _type1()); - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - - let record = storage_iterator.next().await.unwrap().unwrap(); - assert_eq!(record.type_.unwrap(), _type2()); - assert_eq!(record.value.unwrap(), _value2()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - - let record = storage_iterator.next().await.unwrap(); - assert!(record.is_none()); - } - - _cleanup("mysql_storage_get_all_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_get_all_works_for_empty() { - _cleanup("mysql_storage_get_all_works_for_empty").await; - - { - let storage = _storage("mysql_storage_get_all_works_for_empty").await; - let mut storage_iterator = storage.get_all().await.unwrap(); - - let record = storage_iterator.next().await.unwrap(); - assert!(record.is_none()); - } - - _cleanup("mysql_storage_get_all_works_for_empty").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_works() { - _cleanup("mysql_storage_update_works").await; - - { - let storage = _storage("mysql_storage_update_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - storage - .update(&_type1(), &_id1(), &_value2()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value2()); - } - - _cleanup("mysql_storage_update_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_works_for_non_existing_id() { - _cleanup("mysql_storage_update_works_for_non_existing_id").await; - - { - let storage = _storage("mysql_storage_update_works_for_non_existing_id").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let res = storage.update(&_type1(), &_id2(), &_value2()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_update_works_for_non_existing_id").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_works_for_non_existing_type() { - _cleanup("mysql_storage_update_works_for_non_existing_type").await; - - { - let storage = _storage("mysql_storage_update_works_for_non_existing_type").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let res = storage.update(&_type2(), &_id1(), &_value2()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_update_works_for_non_existing_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_tags_works() { - _cleanup("mysql_storage_add_tags_works").await; - - { - let storage = _storage("mysql_storage_add_tags_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - storage - .add_tags(&_type1(), &_id1(), &_new_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let expected_tags = { - let mut tags = _tags(); - tags.extend(_new_tags()); - _sort(tags) - }; - - assert_eq!(_sort(record.tags.unwrap()), expected_tags); - } - - _cleanup("mysql_storage_add_tags_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_tags_works_for_non_existing_id() { - _cleanup("mysql_storage_add_tags_works_for_non_existing_id").await; - - { - let storage = _storage("mysql_storage_add_tags_works_for_non_existing_id").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.add_tags(&_type1(), &_id2(), &_new_tags()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_add_tags_works_for_non_existing_id").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_tags_works_for_non_existing_type() { - _cleanup("mysql_storage_add_tags_works_for_non_existing_type").await; - - { - let storage = _storage("mysql_storage_add_tags_works_for_non_existing_type").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.add_tags(&_type2(), &_id1(), &_new_tags()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_add_tags_works_for_non_existing_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_tags_works_for_already_existing() { - _cleanup("mysql_storage_add_tags_works_for_already_existing").await; - - { - let storage = _storage("mysql_storage_add_tags_works_for_already_existing").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let tags_with_existing = { - let mut tags = _tags(); - tags.extend(_new_tags()); - tags - }; - - storage - .add_tags(&_type1(), &_id1(), &tags_with_existing) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let expected_tags = { - let mut tags = _tags(); - tags.extend(_new_tags()); - _sort(tags) - }; - - assert_eq!(_sort(record.tags.unwrap()), expected_tags); - } - - _cleanup("mysql_storage_add_tags_works_for_already_existing").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_tags_works() { - _cleanup("mysql_storage_update_tags_works").await; - - { - let storage = _storage("mysql_storage_update_tags_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - storage - .update_tags(&_type1(), &_id1(), &_new_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_new_tags())); - } - - _cleanup("mysql_storage_update_tags_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_tags_works_for_non_existing_id() { - _cleanup("mysql_storage_update_tags_works_for_non_existing_id").await; - - { - let storage = _storage("mysql_storage_update_tags_works_for_non_existing_id").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.update_tags(&_type1(), &_id2(), &_new_tags()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_update_tags_works_for_non_existing_id").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_tags_works_for_non_existing_type() { - _cleanup("mysql_storage_update_tags_works_for_non_existing_type").await; - - { - let storage = _storage("mysql_storage_update_tags_works_for_non_existing_type").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.update_tags(&_type1(), &_id2(), &_new_tags()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_update_tags_works_for_non_existing_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_tags_works_for_already_existing() { - _cleanup("mysql_storage_update_tags_works_for_already_existing").await; - { - let storage = _storage("mysql_storage_update_tags_works_for_already_existing").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let tags_with_existing = { - let mut tags = _tags(); - tags.extend(_new_tags()); - tags - }; - - storage - .update_tags(&_type1(), &_id1(), &tags_with_existing) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let expected_tags = { - let mut tags = _tags(); - tags.extend(_new_tags()); - _sort(tags) - }; - - assert_eq!(_sort(record.tags.unwrap()), expected_tags); - } - _cleanup("mysql_storage_update_tags_works_for_already_existing").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_tags_works() { - _cleanup("mysql_storage_delete_tags_works").await; - - { - let storage = _storage("mysql_storage_delete_tags_works").await; - - let tag_name1 = vec![0, 0, 0]; - let tag_name2 = vec![1, 1, 1]; - let tag_name3 = vec![2, 2, 2]; - let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); - let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); - let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); - let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; - - storage - .add(&_type1(), &_id1(), &_value1(), &tags) - .await - .unwrap(); - - let tag_names = vec![ - TagName::OfEncrypted(tag_name1.clone()), - TagName::OfPlain(tag_name2.clone()), - ]; - - storage - .delete_tags(&_type1(), &_id1(), &tag_names) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.tags.unwrap(), vec![tag3]); - } - - _cleanup("mysql_storage_delete_tags_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_tags_works_for_non_existing_type() { - _cleanup("mysql_storage_delete_tags_works_for_non_existing_type").await; - - { - let storage = _storage("mysql_storage_delete_tags_works_for_non_existing_type").await; - - let tag_name1 = vec![0, 0, 0]; - let tag_name2 = vec![1, 1, 1]; - let tag_name3 = vec![2, 2, 2]; - let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); - let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); - let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); - let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; - - storage - .add(&_type1(), &_id1(), &_value1(), &tags) - .await - .unwrap(); - - let tag_names = vec![ - TagName::OfEncrypted(tag_name1.clone()), - TagName::OfPlain(tag_name2.clone()), - ]; - - let res = storage.delete_tags(&_type2(), &_id1(), &tag_names).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_tags_works_for_non_existing_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_tags_works_for_non_existing_id() { - _cleanup("mysql_storage_delete_tags_works_for_non_existing_id").await; - - { - let storage = _storage("mysql_storage_delete_tags_works_for_non_existing_id").await; - - let tag_name1 = vec![0, 0, 0]; - let tag_name2 = vec![1, 1, 1]; - let tag_name3 = vec![2, 2, 2]; - let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); - let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); - let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); - let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; - - storage - .add(&_type1(), &_id1(), &_value1(), &tags) - .await - .unwrap(); - - let tag_names = vec![ - TagName::OfEncrypted(tag_name1.clone()), - TagName::OfPlain(tag_name2.clone()), - ]; - - let res = storage.delete_tags(&_type1(), &_id2(), &tag_names).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_tags_works_for_non_existing_id").await; - } - - fn _config() -> Option<&'static str> { - Some( - r#" - { - "read_host": "127.0.0.1", - "write_host": "127.0.0.1", - "port": 3306, - "db_name": "indy" - } - "#, - ) - } - - fn _credentials() -> Option<&'static str> { - Some( - r#" - { - "user": "root", - "pass": "pass@word1" - } - "#, - ) - } - - async fn _cleanup(name: &str) { - MySqlStorageType::new() - .delete_storage(name, _config(), _credentials()) - .await - .ok(); - } - - async fn _storage(name: &str) -> Box { - let storage_type = MySqlStorageType::new(); - - storage_type - .create_storage(name, _config(), _credentials(), &_metadata()) - .await - .unwrap(); - - storage_type - .open_storage(name, _config(), _credentials()) - .await - .unwrap() - } - - fn _metadata() -> Vec { - vec![ - 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, - 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, - 3, 4, 5, 6, 7, 8, - ] - } - - fn _type(i: u8) -> Vec { - vec![i, 1 + i, 2 + i] - } - - fn _type1() -> Vec { - _type(1) - } - - fn _type2() -> Vec { - _type(2) - } - - fn _id(i: u8) -> Vec { - vec![3 + i, 4 + i, 5 + i] - } - - fn _id1() -> Vec { - _id(1) - } - - fn _id2() -> Vec { - _id(2) - } - - fn _value(i: u8) -> EncryptedValue { - EncryptedValue { - data: vec![6 + i, 7 + i, 8 + i], - key: vec![ - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - ], - } - } - - fn _value1() -> EncryptedValue { - _value(1) - } - - fn _value2() -> EncryptedValue { - _value(2) - } - - fn _tags() -> Vec { - vec![ - Tag::Encrypted(vec![1, 5, 8], vec![3, 5, 6]), - Tag::PlainText(vec![1, 5, 8, 1], "Plain value".to_string()), - ] - } - - fn _new_tags() -> Vec { - vec![ - Tag::Encrypted(vec![1, 1, 1], vec![2, 2, 2]), - Tag::PlainText(vec![1, 1, 1], String::from("tag_value_3")), - ] - } - - fn _sort(mut v: Vec) -> Vec { - v.sort(); - v - } - - fn _custom_path(name: &str) -> String { - let mut path = environment::tmp_path(); - path.push(name); - path.to_str().unwrap().to_owned() - } -} - -// FIXME: copy/paste -fn _tags_to_plain(tags: &[Tag]) -> HashMap { - let mut map = HashMap::with_capacity(tags.len()); - - for tag in tags { - match *tag { - Tag::Encrypted(ref name, ref value) => { - map.insert(base64::encode(name), base64::encode(value)) - } - Tag::PlainText(ref name, ref value) => { - map.insert(format!("~{}", &base64::encode(name)), value.to_string()) - } - }; - } - - map -} - -// FIXME: copy/paste -fn _tags_to_json(tags: &[Tag]) -> IndyResult { - serde_json::to_string(&_tags_to_plain(tags)).to_indy( - IndyErrorKind::InvalidState, - "Unable to serialize tags as json", - ) -} - -// FIXME: copy/paste -fn _tags_from_json(json: serde_json::Value) -> IndyResult> { - let string_tags: HashMap = serde_json::from_value(json).to_indy( - IndyErrorKind::InvalidState, - "Unable to deserialize tags from json", - )?; - - let mut tags = Vec::with_capacity(string_tags.len()); - - for (k, v) in string_tags { - if k.starts_with('~') { - let mut key = k; - key.remove(0); - tags.push(Tag::PlainText( - base64::decode(&key).to_indy( - IndyErrorKind::InvalidState, - "Unable to decode tag key from base64", - )?, - v, - )); - } else { - tags.push(Tag::Encrypted( - base64::decode(&k).to_indy( - IndyErrorKind::InvalidState, - "Unable to decode tag key from base64", - )?, - base64::decode(&v).to_indy( - IndyErrorKind::InvalidState, - "Unable to decode tag value from base64", - )?, - )); - } - } - Ok(tags) -} - -// FIXME: copy/paste -fn _tag_names_to_plain(tag_names: &[TagName]) -> Vec { - tag_names - .iter() - .map(|tag_name| match *tag_name { - TagName::OfEncrypted(ref tag_name) => base64::encode(tag_name), - TagName::OfPlain(ref tag_name) => format!("~{}", base64::encode(tag_name)), - }) - .collect() -} - -// FIXME: copy/paste -fn _tag_names_to_json(tag_names: &[TagName]) -> IndyResult { - serde_json::to_string(&_tag_names_to_plain(tag_names)).to_indy( - IndyErrorKind::InvalidState, - "Unable to serialize tag names as json", - ) -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/query.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/query.rs deleted file mode 100644 index d54f4348a0..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/query.rs +++ /dev/null @@ -1,315 +0,0 @@ -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::base64; -use serde_json::Value; - -use crate::{ - language::{Operator, TagName, TargetValue}, - SearchOptions, -}; - -pub fn wql_to_sql( - wallet_id: i64, - type_: &[u8], - wql: &Operator, - options: &SearchOptions, -) -> IndyResult<(String, Vec)> { - let mut arguments: Vec = Vec::new(); - - let query_condition = match operator_to_sql(wql, &mut arguments) { - Ok(query_condition) => query_condition, - Err(err) => return Err(err), - }; - - let query_string = format!( - "SELECT {}, name, {}, {} FROM items WHERE {} type = ? AND wallet_id = ?", - if options.retrieve_type { - "type" - } else { - "NULL" - }, - if options.retrieve_value { - "value" - } else { - "NULL" - }, - if options.retrieve_tags { - "tags" - } else { - "NULL" - }, - if !query_condition.is_empty() { - query_condition + " AND" - } else { - "".to_string() - } - ); - - arguments.push(base64::encode(type_).into()); - arguments.push(wallet_id.into()); - - Ok((query_string, arguments)) -} - -pub fn wql_to_sql_count( - wallet_id: i64, - type_: &[u8], - wql: &Operator, -) -> IndyResult<(String, Vec)> { - let mut arguments: Vec = Vec::new(); - - let query_condition = match operator_to_sql(wql, &mut arguments) { - Ok(query_condition) => query_condition, - Err(err) => return Err(err), - }; - - let query_string = format!( - "SELECT count(*) FROM items i WHERE {} i.type = ? AND i.wallet_id = ?", - if !query_condition.is_empty() { - query_condition + " AND" - } else { - "".to_string() - } - ); - - arguments.push(base64::encode(type_).into()); - arguments.push(wallet_id.into()); - - Ok((query_string, arguments)) -} - -fn operator_to_sql(op: &Operator, arguments: &mut Vec) -> IndyResult { - match *op { - Operator::Eq(ref tag_name, ref target_value) => { - Ok(eq_to_sql(tag_name, target_value, arguments)) - } - Operator::Neq(ref tag_name, ref target_value) => { - Ok(neq_to_sql(tag_name, target_value, arguments)) - } - Operator::Gt(ref tag_name, ref target_value) => { - gt_to_sql(tag_name, target_value, arguments) - } - Operator::Gte(ref tag_name, ref target_value) => { - gte_to_sql(tag_name, target_value, arguments) - } - Operator::Lt(ref tag_name, ref target_value) => { - lt_to_sql(tag_name, target_value, arguments) - } - Operator::Lte(ref tag_name, ref target_value) => { - lte_to_sql(tag_name, target_value, arguments) - } - Operator::Like(ref tag_name, ref target_value) => { - like_to_sql(tag_name, target_value, arguments) - } - Operator::In(ref tag_name, ref target_values) => { - Ok(in_to_sql(tag_name, target_values, arguments)) - } - Operator::And(ref suboperators) => and_to_sql(suboperators, arguments), - Operator::Or(ref suboperators) => or_to_sql(suboperators, arguments), - Operator::Not(ref suboperator) => not_to_sql(suboperator, arguments), - } -} - -fn eq_to_sql(tag_name: &TagName, tag_value: &TargetValue, arguments: &mut Vec) -> String { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - - arguments.push(tag_value.to_plain().into()); - format!("(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) = ?)", tag_path) -} - -fn neq_to_sql(tag_name: &TagName, tag_value: &TargetValue, arguments: &mut Vec) -> String { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - - arguments.push(tag_value.to_plain().into()); - format!("(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) != ?)", tag_path) -} - -fn gt_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) > ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $gt operator", - )), - } -} - -fn gte_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) >= ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $gt operator", - )), - } -} - -fn lt_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) < ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lt operator", - )), - } -} - -fn lte_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) <= ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lt operator", - )), - } -} - -fn like_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) LIKE ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lt operator", - )), - } -} - -fn in_to_sql(tag_name: &TagName, tag_values: &[TargetValue], arguments: &mut Vec) -> String { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - let mut in_string = format!("JSON_UNQUOTE(JSON_EXTRACT(tags, {})) IN (", tag_path); - - for (index, tag_value) in tag_values.iter().enumerate() { - in_string.push('?'); - if index < tag_values.len() - 1 { - in_string.push(','); - } else { - in_string.push(')'); - } - - arguments.push(tag_value.to_plain().into()); - } - - in_string -} - -fn and_to_sql(suboperators: &[Operator], arguments: &mut Vec) -> IndyResult { - join_operators(suboperators, " AND ", arguments) -} - -fn or_to_sql(suboperators: &[Operator], arguments: &mut Vec) -> IndyResult { - join_operators(suboperators, " OR ", arguments) -} - -fn not_to_sql(suboperator: &Operator, arguments: &mut Vec) -> IndyResult { - let suboperator_string = operator_to_sql(suboperator, arguments)?; - Ok("NOT (".to_string() + &suboperator_string + ")") -} - -fn join_operators( - operators: &[Operator], - join_str: &str, - arguments: &mut Vec, -) -> IndyResult { - let mut s = String::new(); - - if !operators.is_empty() { - s.push('('); - for (index, operator) in operators.iter().enumerate() { - let operator_string = operator_to_sql(operator, arguments)?; - - s.push_str(&operator_string); - - if index < operators.len() - 1 { - s.push_str(join_str); - } - } - - s.push(')'); - } - - Ok(s) -} - -// FIXME: It is quite smilar for to_string method of tag and value, but for some reason -// to_string uses "". It is added to avoid potential damage as i have no time -// for investigation. -trait ToPlain { - fn to_plain(&self) -> String; -} - -impl ToPlain for TagName { - fn to_plain(&self) -> String { - match *self { - TagName::EncryptedTagName(ref v) => base64::encode(v), - TagName::PlainTagName(ref v) => format!("~{}", base64::encode(v)), - } - } -} - -impl ToPlain for TargetValue { - fn to_plain(&self) -> String { - match *self { - TargetValue::Unencrypted(ref s) => s.to_owned(), - TargetValue::Encrypted(ref v) => base64::encode(v), - } - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/wallet.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/wallet.rs deleted file mode 100644 index 3f11b26b8a..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/wallet.rs +++ /dev/null @@ -1,487 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use futures::future::join; -use indy_api_types::errors::prelude::*; -use indy_utils::{ - crypto::{chacha20poly1305_ietf, hmacsha256}, - wql::Query, -}; -use serde::{Deserialize, Serialize}; -use zeroize::Zeroize; - -use crate::{ - cache::wallet_cache::{WalletCache, WalletCacheHitMetrics}, - encryption::*, - iterator::WalletIterator, - query_encryption::encrypt_query, - storage, - storage::StorageRecord, - RecordOptions, WalletRecord, -}; - -#[derive(Serialize, Deserialize)] -pub struct Keys { - pub type_key: chacha20poly1305_ietf::Key, - pub name_key: chacha20poly1305_ietf::Key, - pub value_key: chacha20poly1305_ietf::Key, - pub item_hmac_key: hmacsha256::Key, - pub tag_name_key: chacha20poly1305_ietf::Key, - pub tag_value_key: chacha20poly1305_ietf::Key, - pub tags_hmac_key: hmacsha256::Key, -} - -#[allow(clippy::new_without_default)] -impl Keys { - pub fn new() -> Keys { - Keys { - type_key: chacha20poly1305_ietf::gen_key(), - name_key: chacha20poly1305_ietf::gen_key(), - value_key: chacha20poly1305_ietf::gen_key(), - item_hmac_key: hmacsha256::gen_key(), - tag_name_key: chacha20poly1305_ietf::gen_key(), - tag_value_key: chacha20poly1305_ietf::gen_key(), - tags_hmac_key: hmacsha256::gen_key(), - } - } - - pub fn serialize_encrypted( - &self, - master_key: &chacha20poly1305_ietf::Key, - ) -> IndyResult> { - let mut serialized = rmp_serde::to_vec(self) - .to_indy(IndyErrorKind::InvalidState, "Unable to serialize keys")?; - - let encrypted = encrypt_as_not_searchable(&serialized, master_key); - - serialized.zeroize(); - Ok(encrypted) - } - - pub fn deserialize_encrypted( - bytes: &[u8], - master_key: &chacha20poly1305_ietf::Key, - ) -> IndyResult { - let mut decrypted = decrypt_merged(bytes, master_key)?; - - let keys: Keys = rmp_serde::from_slice(&decrypted) - .to_indy(IndyErrorKind::InvalidState, "Invalid bytes for Key")?; - - decrypted.zeroize(); - Ok(keys) - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct EncryptedValue { - pub data: Vec, - pub key: Vec, -} - -#[allow(dead_code)] -const ENCRYPTED_KEY_LEN: usize = chacha20poly1305_ietf::TAGBYTES - + chacha20poly1305_ietf::NONCEBYTES - + chacha20poly1305_ietf::KEYBYTES; - -impl EncryptedValue { - pub fn new(data: Vec, key: Vec) -> Self { - Self { data, key } - } - - pub fn encrypt(data: &str, key: &chacha20poly1305_ietf::Key) -> Self { - let value_key = chacha20poly1305_ietf::gen_key(); - EncryptedValue::new( - encrypt_as_not_searchable(data.as_bytes(), &value_key), - encrypt_as_not_searchable(&value_key[..], key), - ) - } - - pub fn decrypt(&self, key: &chacha20poly1305_ietf::Key) -> IndyResult { - let mut value_key_bytes = decrypt_merged(&self.key, key)?; - - let value_key = chacha20poly1305_ietf::Key::from_slice(&value_key_bytes) - .map_err(|err| err.extend("Invalid value key"))?; // FIXME: review kind - - value_key_bytes.zeroize(); - - let res = String::from_utf8(decrypt_merged(&self.data, &value_key)?).to_indy( - IndyErrorKind::InvalidState, - "Invalid UTF8 string inside of value", - )?; - - Ok(res) - } - - #[allow(dead_code)] - pub fn to_bytes(&self) -> Vec { - let mut result = self.key.clone(); - result.extend_from_slice(self.data.as_slice()); - result - } - - #[allow(dead_code)] - pub fn from_bytes(joined_data: &[u8]) -> IndyResult { - // value_key is stored as NONCE || CYPHERTEXT. Lenth of CYPHERTHEXT is length of DATA + - // length of TAG. - if joined_data.len() < ENCRYPTED_KEY_LEN { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "Unable to split value_key from value: value too short", - )); // FIXME: review kind - } - - let value_key = joined_data[..ENCRYPTED_KEY_LEN].to_owned(); - let value = joined_data[ENCRYPTED_KEY_LEN..].to_owned(); - Ok(EncryptedValue { - data: value, - key: value_key, - }) - } -} - -pub(super) struct Wallet { - id: String, - storage: Box, - keys: Arc, - cache: WalletCache, -} - -impl Wallet { - pub fn new( - id: String, - storage: Box, - keys: Arc, - cache: WalletCache, - ) -> Wallet { - Wallet { - id, - storage, - keys, - cache, - } - } - - pub async fn add( - &self, - type_: &str, - name: &str, - value: &str, - tags: &HashMap, - cache_record: bool, - ) -> IndyResult<()> { - let etype = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let ename = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let evalue = EncryptedValue::encrypt(value, &self.keys.value_key); - - let etags = encrypt_tags( - tags, - &self.keys.tag_name_key, - &self.keys.tag_value_key, - &self.keys.tags_hmac_key, - ); - - self.storage.add(&etype, &ename, &evalue, &etags).await?; - if cache_record { - self.cache.add(type_, &etype, &ename, &evalue, &etags); - } - - Ok(()) - } - - pub async fn add_tags( - &self, - type_: &str, - name: &str, - tags: &HashMap, - ) -> IndyResult<()> { - let encrypted_type = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let encrypted_name = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let encrypted_tags = encrypt_tags( - tags, - &self.keys.tag_name_key, - &self.keys.tag_value_key, - &self.keys.tags_hmac_key, - ); - - self.storage - .add_tags(&encrypted_type, &encrypted_name, &encrypted_tags) - .await?; - self.cache - .add_tags(type_, &encrypted_type, &encrypted_name, &encrypted_tags) - .await; - - Ok(()) - } - - pub async fn update_tags( - &self, - type_: &str, - name: &str, - tags: &HashMap, - ) -> IndyResult<()> { - let encrypted_type = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let encrypted_name = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let encrypted_tags = encrypt_tags( - tags, - &self.keys.tag_name_key, - &self.keys.tag_value_key, - &self.keys.tags_hmac_key, - ); - - self.storage - .update_tags(&encrypted_type, &encrypted_name, &encrypted_tags) - .await?; - self.cache - .update_tags(type_, &encrypted_type, &encrypted_name, &encrypted_tags) - .await; - - Ok(()) - } - - pub async fn delete_tags(&self, type_: &str, name: &str, tag_names: &[&str]) -> IndyResult<()> { - let encrypted_type = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let encrypted_name = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let encrypted_tag_names = - encrypt_tag_names(tag_names, &self.keys.tag_name_key, &self.keys.tags_hmac_key); - - self.storage - .delete_tags(&encrypted_type, &encrypted_name, &encrypted_tag_names[..]) - .await?; - self.cache - .delete_tags( - type_, - &encrypted_type, - &encrypted_name, - &encrypted_tag_names[..], - ) - .await; - - Ok(()) - } - - pub async fn update(&self, type_: &str, name: &str, new_value: &str) -> IndyResult<()> { - let encrypted_type = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let encrypted_name = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let encrypted_value = EncryptedValue::encrypt(new_value, &self.keys.value_key); - - self.storage - .update(&encrypted_type, &encrypted_name, &encrypted_value) - .await?; - self.cache - .update(type_, &encrypted_type, &encrypted_name, &encrypted_value) - .await; - - Ok(()) - } - - pub async fn get( - &self, - type_: &str, - name: &str, - options: &str, - cache_hit_metrics: &WalletCacheHitMetrics, - ) -> IndyResult { - let etype = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let ename = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let result = if self.cache.is_type_cacheable(type_) { - let record_options: RecordOptions = serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "RecordOptions is malformed json", - )?; - - match self.cache.get(type_, &etype, &ename, &record_options).await { - Some(result) => { - cache_hit_metrics.inc_cache_hit(type_).await; - result - } - None => { - // no item in cache, lets retrieve it and put it in cache. - let metrics_fut = cache_hit_metrics.inc_cache_miss(type_); - let full_options = RecordOptions { - retrieve_type: record_options.retrieve_type, - retrieve_value: true, - retrieve_tags: true, - }; - - let full_options = serde_json::to_string(&full_options).unwrap(); - - let storage_fut = self.storage.get(&etype, &ename, &full_options); - // run these two futures in parallel. - let full_result = join(storage_fut, metrics_fut).await.0?; - - // save to cache only if valid data is returned (this should be always true). - if let (Some(evalue), Some(etags)) = (&full_result.value, &full_result.tags) { - self.cache.add(type_, &etype, &ename, evalue, etags); - } - StorageRecord { - id: full_result.id, - type_: if record_options.retrieve_type { - Some(etype) - } else { - None - }, - value: if record_options.retrieve_value { - full_result.value - } else { - None - }, - tags: if record_options.retrieve_tags { - full_result.tags - } else { - None - }, - } - } - } - } else { - let metrics_fut = cache_hit_metrics.inc_not_cached(type_); - let storage_fut = self.storage.get(&etype, &ename, options); - // run these two futures in parallel. - join(storage_fut, metrics_fut).await.0? - }; - - let value = match result.value { - None => None, - Some(encrypted_value) => Some(encrypted_value.decrypt(&self.keys.value_key)?), - }; - - let tags = decrypt_tags( - &result.tags, - &self.keys.tag_name_key, - &self.keys.tag_value_key, - )?; - - Ok(WalletRecord::new( - String::from(name), - result.type_.map(|_| type_.to_string()), - value, - tags, - )) - } - - pub async fn delete(&self, type_: &str, name: &str) -> IndyResult<()> { - let etype = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let ename = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - self.storage.delete(&etype, &ename).await?; - self.cache.delete(type_, &etype, &ename).await; - - Ok(()) - } - - pub async fn search( - &self, - type_: &str, - query: &str, - options: Option<&str>, - ) -> IndyResult { - let parsed_query: Query = ::serde_json::from_str::(query) - .map_err(|err| IndyError::from_msg(IndyErrorKind::WalletQueryError, err))? - .optimise() - .unwrap_or_default(); - - let encrypted_query = encrypt_query(parsed_query, &self.keys)?; - - let encrypted_type_ = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let storage_iterator = self - .storage - .search(&encrypted_type_, &encrypted_query, options) - .await?; - - let wallet_iterator = WalletIterator::new(storage_iterator, Arc::clone(&self.keys)); - - Ok(wallet_iterator) - } - - fn close(&mut self) -> IndyResult<()> { - self.storage.close() - } - - pub async fn get_all(&self) -> IndyResult { - let all_items = self.storage.get_all().await?; - Ok(WalletIterator::new(all_items, self.keys.clone())) - } - - pub fn get_id(&self) -> &str { - &self.id - } -} - -impl Drop for Wallet { - fn drop(&mut self) { - self.close().unwrap(); //FIXME pass the error to the API cb - } -} diff --git a/aries/misc/legacy/libvdrtools/src/controllers/crypto.rs b/aries/misc/legacy/libvdrtools/src/controllers/crypto.rs deleted file mode 100644 index 2cd94972ae..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/crypto.rs +++ /dev/null @@ -1,594 +0,0 @@ -use std::{collections::HashMap, str, sync::Arc}; - -use indy_api_types::{errors::prelude::*, WalletHandle}; -use indy_utils::crypto::{base64, chacha20poly1305_ietf}; -use indy_wallet::RecordOptions; - -use crate::{ - domain::crypto::{ - key::{Key, KeyInfo}, - pack::*, - }, - services::{CryptoService, WalletService}, -}; - -pub const PROTECTED_HEADER_ENC: &str = "xchacha20poly1305_ietf"; -pub const PROTECTED_HEADER_TYP: &str = "JWM/1.0"; -pub const PROTECTED_HEADER_ALG_AUTH: &str = "Authcrypt"; -pub const PROTECTED_HEADER_ALG_ANON: &str = "Anoncrypt"; - -pub struct CryptoController { - wallet_service: Arc, - crypto_service: Arc, -} - -impl CryptoController { - pub(crate) fn new( - wallet_service: Arc, - crypto_service: Arc, - ) -> CryptoController { - CryptoController { - wallet_service, - crypto_service, - } - } - - /// Creates keys pair and stores in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// key_json: Key information as json. Example: - /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set - /// random one will be created). Can be UTF-8, base64 or hex - /// string. "crypto_type": string, // Optional (if not set then ed25519 curve is used); - /// Currently only 'ed25519' value is supported for this field. } - /// - /// #Returns - /// verkey: Ver key of generated key pair, also used as key identifier - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn create_key( - &self, - wallet_handle: WalletHandle, - key_info: &KeyInfo, - ) -> IndyResult { - debug!( - "create_key >>> wallet_handle: {:?}, key_info: {:?}", - wallet_handle, - secret!(key_info) - ); - - let key = self.crypto_service.create_key(key_info).await?; - - self.wallet_service - .add_indy_object(wallet_handle, &key.verkey, &key, &HashMap::new()) - .await?; - - let res = key.verkey.to_string(); - debug!("create_key <<< res: {:?}", res); - Ok(res) - } - - /// Signs a message with a key. - /// - /// Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) - /// for specific DID. - /// - /// #Params - - /// wallet_handle: wallet handler (created by open_wallet). - /// signer_vk: id (verkey) of message signer. The key must be created by calling indy_create_key - /// or indy_create_and_store_my_did message_raw: a pointer to first byte of message to be - /// signed message_len: a message length - /// - /// #Returns - /// a signature string - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn crypto_sign( - &self, - wallet_handle: WalletHandle, - my_vk: &str, - msg: &[u8], - ) -> IndyResult> { - trace!( - "crypto_sign >>> wallet_handle: {:?}, sender_vk: {:?}, msg: {:?}", - wallet_handle, - my_vk, - msg - ); - - self.crypto_service.validate_key(my_vk).await?; - - let key: Key = self - .wallet_service - .get_indy_object(wallet_handle, my_vk, &RecordOptions::id_value()) - .await?; - - let res = self.crypto_service.sign(&key, msg).await?; - - trace!("crypto_sign <<< res: {:?}", res); - - Ok(res) - } - - /// Verify a signature with a verkey. - /// - /// Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) - /// for specific DID. - /// - /// #Params - - /// signer_vk: verkey of the message signer - /// message_raw: a pointer to first byte of message that has been signed - /// message_len: a message length - /// signature_raw: a pointer to first byte of signature to be verified - /// signature_len: a signature length - /// - /// #Returns - /// valid: true - if signature is valid, false - otherwise - /// - /// #Errors - /// Common* - /// Wallet* - /// Ledger* - /// Crypto* - pub async fn crypto_verify( - &self, - their_vk: &str, - msg: &[u8], - signature: &[u8], - ) -> IndyResult { - trace!( - "crypto_verify >>> their_vk: {:?}, msg: {:?}, signature: {:?}", - their_vk, - msg, - signature - ); - - self.crypto_service.validate_key(their_vk).await?; - - let res = self.crypto_service.verify(their_vk, msg, signature).await?; - - trace!("crypto_verify <<< res: {:?}", res); - - Ok(res) - } - - /// Packs a message by encrypting the message and serializes it in a JWE-like format - /// (Experimental) - /// - /// Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) - /// for specific DID. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// message: a pointer to the first byte of the message to be packed - /// message_len: the length of the message - /// receivers: a string in the format of a json list which will contain the list of receiver's - /// keys the message is being encrypted for. - /// Example: - /// "[, ]" - /// sender: the sender's verkey as a string When null pointer is used in this parameter, - /// anoncrypt is used - /// - /// #Returns - /// a JWE using authcrypt alg is defined below: - /// { - /// "protected": "b64URLencoded({ - /// "enc": "xsalsa20poly1305", - /// "typ": "JWM/1.0", - /// "alg": "Authcrypt", - /// "recipients": [ - /// { - /// "encrypted_key": base64URLencode(libsodium.crypto_box(my_key, their_vk, cek, - /// cek_iv)) "header": { - /// "kid": "base58encode(recipient_verkey)", - /// "sender" : base64URLencode(libsodium.crypto_box_seal(their_vk, - /// base58encode(sender_vk)), "iv" : base64URLencode(cek_iv) - /// } - /// }, - /// ], - /// })", - /// "iv": , - /// "ciphertext": b64URLencode(encrypt_detached({'@type'...}, protected_value_encoded, iv, - /// cek), "tag": - /// } - /// - /// Alternative example in using anoncrypt alg is defined below: - /// { - /// "protected": "b64URLencoded({ - /// "enc": "xsalsa20poly1305", - /// "typ": "JWM/1.0", - /// "alg": "Anoncrypt", - /// "recipients": [ - /// { - /// "encrypted_key": base64URLencode(libsodium.crypto_box_seal(their_vk, cek)), - /// "header": { - /// "kid": base58encode(recipient_verkey), - /// } - /// }, - /// ], - /// })", - /// "iv": b64URLencode(iv), - /// "ciphertext": b64URLencode(encrypt_detached({'@type'...}, protected_value_encoded, iv, - /// cek), "tag": b64URLencode(tag) - /// } - /// - /// - /// #Errors - /// Common* - /// Wallet* - /// Ledger* - /// Crypto* - // TODO: Refactor pack to be more modular to version changes or crypto_scheme changes - // this match statement is super messy, but the easiest way to comply with current architecture - pub async fn pack_msg( - &self, - message: Vec, - receiver_list: Vec, - sender_vk: Option, - wallet_handle: WalletHandle, - ) -> IndyResult> { - //break early and error out if no receivers keys are provided - if receiver_list.is_empty() { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "No receiver keys found".to_string(), - )); - } - - //generate content encryption key that will encrypt `message` - let cek = chacha20poly1305_ietf::gen_key(); - - let base64_protected = if let Some(sender_vk) = sender_vk { - self.crypto_service.validate_key(&sender_vk).await?; - - //returns authcrypted pack_message format. See Wire message format HIPE for details - self._prepare_protected_authcrypt(&cek, receiver_list, &sender_vk, wallet_handle) - .await? - } else { - //returns anoncrypted pack_message format. See Wire message format HIPE for details - self._prepare_protected_anoncrypt(&cek, receiver_list) - .await? - }; - - // Use AEAD to encrypt `message` with "protected" data as "associated data" - let (ciphertext, iv, tag) = - self.crypto_service - .encrypt_plaintext(message, &base64_protected, &cek); - - self._format_pack_message(&base64_protected, &ciphertext, &iv, &tag) - } - - async fn _prepare_protected_anoncrypt( - &self, - cek: &chacha20poly1305_ietf::Key, - receiver_list: Vec, - ) -> IndyResult { - let mut encrypted_recipients_struct: Vec = - Vec::with_capacity(receiver_list.len()); - - for their_vk in receiver_list { - //encrypt sender verkey - let enc_cek = self - .crypto_service - .crypto_box_seal(&their_vk, &cek[..]) - .await?; - - //create recipient struct and push to encrypted list - encrypted_recipients_struct.push(Recipient { - encrypted_key: base64::encode_urlsafe(enc_cek.as_slice()), - header: Header { - kid: their_vk, - sender: None, - iv: None, - }, - }); - } // end for-loop - - self._base64_encode_protected(encrypted_recipients_struct, false) - } - - async fn _prepare_protected_authcrypt( - &self, - cek: &chacha20poly1305_ietf::Key, - receiver_list: Vec, - sender_vk: &str, - wallet_handle: WalletHandle, - ) -> IndyResult { - let mut encrypted_recipients_struct: Vec = vec![]; - - //get my_key from my wallet - let my_key = self - .wallet_service - .get_indy_object(wallet_handle, sender_vk, &RecordOptions::id_value()) - .await?; - - //encrypt cek for recipient - for their_vk in receiver_list { - let (enc_cek, iv) = self - .crypto_service - .crypto_box(&my_key, &their_vk, &cek[..]) - .await?; - - let enc_sender = self - .crypto_service - .crypto_box_seal(&their_vk, sender_vk.as_bytes()) - .await?; - - //create recipient struct and push to encrypted list - encrypted_recipients_struct.push(Recipient { - encrypted_key: base64::encode_urlsafe(enc_cek.as_slice()), - header: Header { - kid: their_vk, - sender: Some(base64::encode_urlsafe(enc_sender.as_slice())), - iv: Some(base64::encode_urlsafe(iv.as_slice())), - }, - }); - } // end for-loop - - self._base64_encode_protected(encrypted_recipients_struct, true) - } - - fn _base64_encode_protected( - &self, - encrypted_recipients_struct: Vec, - alg_is_authcrypt: bool, - ) -> IndyResult { - let alg_val = if alg_is_authcrypt { - String::from(PROTECTED_HEADER_ALG_AUTH) - } else { - String::from(PROTECTED_HEADER_ALG_ANON) - }; - - //structure protected and base64URL encode it - let protected_struct = Protected { - enc: PROTECTED_HEADER_ENC.to_string(), - typ: PROTECTED_HEADER_TYP.to_string(), - alg: alg_val, - recipients: encrypted_recipients_struct, - }; - let protected_encoded = serde_json::to_string(&protected_struct).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to serialize protected field {}", err), - ) - })?; - - Ok(base64::encode_urlsafe(protected_encoded.as_bytes())) - } - - fn _format_pack_message( - &self, - base64_protected: &str, - ciphertext: &str, - iv: &str, - tag: &str, - ) -> IndyResult> { - //serialize pack message and return as vector of bytes - let jwe_struct = JWE { - protected: base64_protected.to_string(), - iv: iv.to_string(), - ciphertext: ciphertext.to_string(), - tag: tag.to_string(), - }; - - serde_json::to_vec(&jwe_struct).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to serialize JWE {}", err), - ) - }) - } - - /// Unpacks a JWE-like formatted message outputted by indy_pack_message (Experimental) - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// jwe_data: a pointer to the first byte of the JWE to be unpacked - /// jwe_len: the length of the JWE message in bytes - /// - /// #Returns - /// if authcrypt was used to pack the message returns this json structure: - /// { - /// message: , - /// sender_verkey: , - /// recipient_verkey: - /// } - /// - /// OR - /// - /// if anoncrypt was used to pack the message returns this json structure: - /// { - /// message: , - /// recipient_verkey: - /// } - /// - /// - /// #Errors - /// Common* - /// Wallet* - /// Ledger* - /// Crypto* - pub async fn unpack_msg( - &self, - jwe_struct: JWE, - wallet_handle: WalletHandle, - ) -> IndyResult> { - //decode protected data - let protected_decoded_vec = base64::decode_urlsafe(&jwe_struct.protected)?; - let protected_decoded_str = String::from_utf8(protected_decoded_vec).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to utf8 encode data {}", err), - ) - })?; - //convert protected_data_str to struct - let protected_struct: Protected = - serde_json::from_str(&protected_decoded_str).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to deserialize protected data {}", err), - ) - })?; - - //extract recipient that matches a key in the wallet - let (recipient, is_auth_recipient) = self - ._find_correct_recipient(protected_struct, wallet_handle) - .await?; - - //get cek and sender data - let (sender_verkey_option, cek) = if is_auth_recipient { - self._unpack_cek_authcrypt(recipient.clone(), wallet_handle) - .await - } else { - self._unpack_cek_anoncrypt(recipient.clone(), wallet_handle) - .await - }?; //close cek and sender_data match statement - - //decrypt message - let message = self.crypto_service.decrypt_ciphertext( - &jwe_struct.ciphertext, - &jwe_struct.protected, - &jwe_struct.iv, - &jwe_struct.tag, - &cek, - )?; - - //serialize and return decrypted message - let res = UnpackMessage { - message, - sender_verkey: sender_verkey_option, - recipient_verkey: recipient.header.kid, - }; - - serde_json::to_vec(&res).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to serialize message {}", err), - ) - }) - } - - async fn _find_correct_recipient( - &self, - protected_struct: Protected, - wallet_handle: WalletHandle, - ) -> IndyResult<(Recipient, bool)> { - for recipient in protected_struct.recipients { - let my_key_res = self - .wallet_service - .get_indy_object::( - wallet_handle, - &recipient.header.kid, - &RecordOptions::id_value(), - ) - .await; - - if my_key_res.is_ok() { - return Ok((recipient.clone(), recipient.header.sender.is_some())); - } - } - Err(IndyError::from(IndyErrorKind::WalletItemNotFound)) - } - - async fn _unpack_cek_authcrypt( - &self, - recipient: Recipient, - wallet_handle: WalletHandle, - ) -> IndyResult<(Option, chacha20poly1305_ietf::Key)> { - let encrypted_key_vec = base64::decode_urlsafe(&recipient.encrypted_key)?; - let iv = base64::decode_urlsafe(&recipient.header.iv.unwrap())?; - let enc_sender_vk = base64::decode_urlsafe(&recipient.header.sender.unwrap())?; - - //get my private key - let my_key = self - .wallet_service - .get_indy_object( - wallet_handle, - &recipient.header.kid, - &RecordOptions::id_value(), - ) - .await?; - - //decrypt sender_vk - let sender_vk_vec = self - .crypto_service - .crypto_box_seal_open(&my_key, enc_sender_vk.as_slice()) - .await?; - let sender_vk = String::from_utf8(sender_vk_vec).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to utf-8 encode sender_vk {}", err), - ) - })?; - - //decrypt cek - let cek_as_vec = self - .crypto_service - .crypto_box_open( - &my_key, - &sender_vk, - encrypted_key_vec.as_slice(), - iv.as_slice(), - ) - .await?; - - //convert cek to chacha Key struct - let cek: chacha20poly1305_ietf::Key = - chacha20poly1305_ietf::Key::from_slice(&cek_as_vec[..]).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decrypt cek {}", err), - ) - })?; - - Ok((Some(sender_vk), cek)) - } - - async fn _unpack_cek_anoncrypt( - &self, - recipient: Recipient, - wallet_handle: WalletHandle, - ) -> IndyResult<(Option, chacha20poly1305_ietf::Key)> { - let encrypted_key_vec = base64::decode_urlsafe(&recipient.encrypted_key)?; - - //get my private key - let my_key: Key = self - .wallet_service - .get_indy_object( - wallet_handle, - &recipient.header.kid, - &RecordOptions::id_value(), - ) - .await?; - - //decrypt cek - let cek_as_vec = self - .crypto_service - .crypto_box_seal_open(&my_key, encrypted_key_vec.as_slice()) - .await?; - - //convert cek to chacha Key struct - let cek: chacha20poly1305_ietf::Key = - chacha20poly1305_ietf::Key::from_slice(&cek_as_vec[..]).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decrypt cek {}", err), - ) - })?; - - Ok((None, cek)) - } -} diff --git a/aries/misc/legacy/libvdrtools/src/controllers/did.rs b/aries/misc/legacy/libvdrtools/src/controllers/did.rs deleted file mode 100644 index 95905df4fd..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/did.rs +++ /dev/null @@ -1,678 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use indy_api_types::{errors::prelude::*, WalletHandle}; -use indy_wallet::{RecordOptions, SearchOptions, WalletService}; - -use crate::{ - domain::crypto::{ - did::{ - Did, DidMetadata, DidValue, DidWithMeta, MyDidInfo, TemporaryDid, TheirDid, - TheirDidInfo, - }, - key::KeyInfo, - }, - services::CryptoService, - utils::crypto::base58::{DecodeBase58, ToBase58}, -}; - -pub struct DidController { - wallet_service: Arc, - crypto_service: Arc, -} - -impl DidController { - pub(crate) fn new( - wallet_service: Arc, - crypto_service: Arc, - ) -> DidController { - DidController { - wallet_service, - crypto_service, - } - } - - /// Creates keys (signing and encryption keys) for a new - /// DID (owned by the caller of the library). - /// Identity's DID must be either explicitly provided, or taken as the first 16 bit of verkey. - /// Saves the Identity DID with keys in a secured Wallet, so that it can be used to sign - /// and encrypt transactions. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// did_info: Identity information as json. See domain::crypto::did::MyDidInfo - /// Example: - /// { - /// "did": string, (optional; - /// if not provided and cid param is false then the first 16 bit of the verkey will - /// be used as a new DID; if not provided and cid is true then the full verkey - /// will be used as a new DID; if provided, then keys will be replaced - key - /// rotation use case) "seed": string, (optional) Seed that allows deterministic did - /// creation (if not set random one will be created). Can be - /// UTF-8, base64 or hex string. "crypto_type": string, (optional; if not set then - /// ed25519 curve is used; currently only 'ed25519' value is supported for - /// this field) "cid": bool, (optional; if not set then false is used;) - /// "ledger_type": string, (optional) type of the ledger to create fully qualified did. - /// "method_name": string, (optional) method name to create fully qualified did. - /// } - /// - /// #Returns - /// did: DID generated and stored in the wallet - /// verkey: The DIDs verification key - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn create_and_store_my_did( - &self, - wallet_handle: WalletHandle, - my_did_info: MyDidInfo, - ) -> IndyResult<(String, String)> { - trace!( - "create_and_store_my_did > wallet_handle {:?} my_did_info_json {:?}", - wallet_handle, - secret!(&my_did_info) - ); - - let (did, key) = self.crypto_service.create_my_did(&my_did_info).await?; - - if let Ok(current_did) = self._wallet_get_my_did(wallet_handle, &did.did).await { - if did.verkey == current_did.verkey { - let res = Ok((did.did.0, did.verkey)); - trace!("create_and_store_my_did < already exists {:?}", res); - return res; - } else { - Err(err_msg( - IndyErrorKind::DIDAlreadyExists, - format!( - "DID \"{}\" already exists but with different Verkey. You should specify \ - Seed used for initial generation", - did.did.0 - ), - ))?; - } - } - - self.wallet_service - .add_indy_object(wallet_handle, &did.did.0, &did, &HashMap::new()) - .await?; - - let _ = self - .wallet_service - .add_indy_object(wallet_handle, &key.verkey, &key, &HashMap::new()) - .await - .ok(); - - let res = Ok((did.did.0, did.verkey)); - trace!("create_and_store_my_did < {:?}", res); - res - } - - /// Generated temporary keys (signing and encryption keys) for an existing - /// DID (owned by the caller of the library). - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// did: target did to rotate keys. - /// key_info: key information as json. Example: - /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set - /// random one will be created). Can be UTF-8, base64 or hex - /// string. "crypto_type": string, (optional; if not set then ed25519 curve is used; - /// currently only 'ed25519' value is supported for this field) - /// } - /// - /// #Returns - /// verkey: The DIDs verification key - /// - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn replace_keys_start( - &self, - wallet_handle: WalletHandle, - key_info: KeyInfo, - my_did: DidValue, - ) -> IndyResult { - trace!( - "replace_keys_start > wallet_handle {:?} key_info_json {:?} my_did {:?}", - wallet_handle, - secret!(&key_info), - my_did - ); - - self.crypto_service.validate_did(&my_did)?; - - let my_did = self._wallet_get_my_did(wallet_handle, &my_did).await?; - - let temporary_key = self.crypto_service.create_key(&key_info).await?; - - let my_temporary_did = TemporaryDid { - did: my_did.did, - verkey: temporary_key.verkey.clone(), - }; - - self.wallet_service - .add_indy_object( - wallet_handle, - &temporary_key.verkey, - &temporary_key, - &HashMap::new(), - ) - .await?; - - self.wallet_service - .add_indy_object( - wallet_handle, - &my_temporary_did.did.0, - &my_temporary_did, - &HashMap::new(), - ) - .await?; - - let res = Ok(my_temporary_did.verkey); - trace!("replace_keys_start < {:?}", res); - res - } - - /// Apply temporary keys as main for an existing DID (owned by the caller of the library). - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// did: DID stored in the wallet - /// - /// #Returns - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn replace_keys_apply( - &self, - wallet_handle: WalletHandle, - my_did: DidValue, - ) -> IndyResult<()> { - trace!( - "replace_keys_apply > wallet_handle {:?} my_did {:?}", - wallet_handle, - my_did - ); - - self.crypto_service.validate_did(&my_did)?; - - let my_did = self._wallet_get_my_did(wallet_handle, &my_did).await?; - - let my_temporary_did: TemporaryDid = self - .wallet_service - .get_indy_object(wallet_handle, &my_did.did.0, &RecordOptions::id_value()) - .await?; - - let my_did = Did::from(my_temporary_did); - - self.wallet_service - .update_indy_object(wallet_handle, &my_did.did.0, &my_did) - .await?; - - self.wallet_service - .delete_indy_record::(wallet_handle, &my_did.did.0) - .await?; - - let res = Ok(()); - trace!("replace_keys_apply < {:?}", res); - res - } - - /// Saves their DID for a pairwise connection in a secured Wallet, - /// so that it can be used to verify transaction. - /// Updates DID associated verkey in case DID already exists in the Wallet. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// identity_json: Identity information as json. Example: - /// { - /// "did": string, (required) - /// "verkey": string - /// - optional is case of adding a new DID, and DID is cryptonym: did == verkey, - /// - mandatory in case of updating an existing DID - /// } - /// - /// #Returns - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn store_their_did( - &self, - wallet_handle: WalletHandle, - their_did_info: TheirDidInfo, - ) -> IndyResult<()> { - trace!( - "store_their_did > wallet_handle {:?} their_did_info {:?}", - wallet_handle, - their_did_info - ); - - let their_did = self - .crypto_service - .create_their_did(&their_did_info) - .await?; - - self.wallet_service - .upsert_indy_object(wallet_handle, &their_did.did.0, &their_did) - .await?; - - let res = Ok(()); - trace!("store_their_did < {:?}", res); - res - } - - /// Retrieves the information about the giving DID in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - The DID to retrieve information. - /// - /// #Returns - /// did_with_meta: { - /// "did": string - DID stored in the wallet, - /// "verkey": string - The DIDs transport key (ver key, key id), - /// "tempVerkey": string - Temporary DIDs transport key (ver key, key id), exist only during - /// the rotation of the keys. After rotation is done, it becomes a - /// new verkey. "metadata": string - The meta information stored with the DID - /// } - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn get_my_did_with_meta( - &self, - wallet_handle: WalletHandle, - my_did: DidValue, - ) -> IndyResult { - trace!( - "get_my_did_with_meta > wallet_handle {:?} my_did {:?}", - wallet_handle, - my_did - ); - - let did = self - .wallet_service - .get_indy_object::(wallet_handle, &my_did.0, &RecordOptions::id_value()) - .await?; - - let metadata = self - .wallet_service - .get_indy_opt_object::( - wallet_handle, - &did.did.0, - &RecordOptions::id_value(), - ) - .await?; - - let temp_verkey = self - .wallet_service - .get_indy_opt_object::( - wallet_handle, - &did.did.0, - &RecordOptions::id_value(), - ) - .await?; - - let did_with_meta = DidWithMeta { - did: did.did, - verkey: did.verkey, - temp_verkey: temp_verkey.map(|tv| tv.verkey), - metadata: metadata.map(|m| m.value), - }; - - let did_with_meta = serde_json::to_string(&did_with_meta) - .to_indy(IndyErrorKind::InvalidState, "Can't serialize DID")?; - - let res = Ok(did_with_meta); - trace!("get_my_did_with_meta < {:?}", res); - res - } - - /// Retrieves the information about all DIDs stored in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// - /// #Returns - /// dids: [{ - /// "did": string - DID stored in the wallet, - /// "verkey": string - The DIDs transport key (ver key, key id)., - /// "metadata": string - The meta information stored with the DID - /// }] - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn list_my_dids_with_meta(&self, wallet_handle: WalletHandle) -> IndyResult { - trace!("list_my_dids_with_meta > wallet_handle {:?}", wallet_handle); - - let mut did_search = self - .wallet_service - .search_indy_records::(wallet_handle, "{}", &SearchOptions::id_value()) - .await?; - - let mut metadata_search = self - .wallet_service - .search_indy_records::(wallet_handle, "{}", &SearchOptions::id_value()) - .await?; - - let mut temporarydid_search = self - .wallet_service - .search_indy_records::(wallet_handle, "{}", &SearchOptions::id_value()) - .await?; - - let mut dids: Vec = Vec::new(); - - let mut metadata_map: HashMap = HashMap::new(); - let mut temporarydid_map: HashMap = HashMap::new(); - - while let Some(record) = metadata_search.fetch_next_record().await? { - let did_id = record.get_id(); - - let tup: DidMetadata = record - .get_value() - .ok_or(err_msg( - IndyErrorKind::InvalidState, - "No value for DID record", - )) - .and_then(|tags_json| { - serde_json::from_str(tags_json).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot deserialize Did {:?}", did_id), - ) - })?; - - metadata_map.insert(String::from(did_id), tup.value); - } - - while let Some(record) = temporarydid_search.fetch_next_record().await? { - let did_id = record.get_id(); - - let did: TemporaryDid = record - .get_value() - .ok_or(err_msg( - IndyErrorKind::InvalidState, - "No value for DID record", - )) - .and_then(|tags_json| { - serde_json::from_str(tags_json).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot deserialize Did {:?}", did_id), - ) - })?; - - temporarydid_map.insert(did.did.0, did.verkey); - } - - while let Some(did_record) = did_search.fetch_next_record().await? { - let did_id = did_record.get_id(); - - let did: Did = did_record - .get_value() - .ok_or_else(|| err_msg(IndyErrorKind::InvalidState, "No value for DID record")) - .and_then(|tags_json| { - serde_json::from_str(tags_json).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot deserialize Did {:?}", did_id), - ) - })?; - - let temp_verkey = temporarydid_map.remove(&did.did.0); - let metadata = metadata_map.remove(&did.did.0); - - let did_with_meta = DidWithMeta { - did: did.did, - verkey: did.verkey, - temp_verkey, - metadata, - }; - - dids.push(did_with_meta); - } - - let dids = serde_json::to_string(&dids) - .to_indy(IndyErrorKind::InvalidState, "Can't serialize DIDs list")?; - - let res = Ok(dids); - trace!("list_my_dids_with_meta < {:?}", res); - res - } - - /// Returns ver key (key id) for the given DID. - /// - /// "indy_key_for_local_did" call looks data stored in the local wallet only and skips freshness - /// checking. - /// - /// Note if you want to get fresh data from the ledger you can use "indy_key_for_did" call - /// instead. - /// - /// Note that "indy_create_and_store_my_did" makes similar wallet record as "indy_create_key". - /// As result we can use returned ver key in all generic crypto and messaging functions. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - The DID to resolve key. - /// - /// #Returns - /// key - The DIDs ver key (key id). - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn key_for_local_did( - &self, - wallet_handle: WalletHandle, - did: DidValue, - ) -> IndyResult { - trace!( - "key_for_local_did > wallet_handle {:?} did {:?}", - wallet_handle, - did - ); - - self.crypto_service.validate_did(&did)?; - - // Look to my did - let my_did = match self._wallet_get_my_did(wallet_handle, &did).await { - Ok(my_did) => Some(my_did), - Err(err) if err.kind() == IndyErrorKind::WalletItemNotFound => None, - Err(err) => Err(err)?, - }; - - if let Some(my_did) = my_did { - let res = Ok(my_did.verkey); - trace!("key_for_local_did < my {:?}", res); - return res; - } - - // look to their did - let their_did = self._wallet_get_their_did(wallet_handle, &did).await?; - - let res = Ok(their_did.verkey); - trace!("key_for_local_did < {:?}", res); - res - } - - /// Saves/replaces the meta information for the giving DID in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - the DID to store metadata. - /// metadata - the meta information that will be store with the DID. - /// - /// #Returns - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn set_did_metadata( - &self, - wallet_handle: WalletHandle, - did: DidValue, - metadata: String, - ) -> IndyResult<()> { - trace!( - "set_did_metadata > wallet_handle {:?} did {:?} metadata {:?}", - wallet_handle, - did, - metadata - ); - - self.crypto_service.validate_did(&did)?; - - let metadata = DidMetadata { value: metadata }; - - self.wallet_service - .upsert_indy_object(wallet_handle, &did.0, &metadata) - .await?; - - let res = Ok(()); - trace!("set_did_metadata < {:?}", res); - res - } - - /// Retrieves the meta information for the giving DID in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - The DID to retrieve metadata. - /// - /// #Returns - /// metadata - The meta information stored with the DID; Can be null if no metadata was saved - /// for this DID. - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn get_did_metadata( - &self, - wallet_handle: WalletHandle, - did: DidValue, - ) -> IndyResult { - trace!( - "get_did_metadata > wallet_handle {:?} did {:?}", - wallet_handle, - did - ); - - self.crypto_service.validate_did(&did)?; - - let metadata = self - .wallet_service - .get_indy_object::(wallet_handle, &did.0, &RecordOptions::id_value()) - .await?; - - let res = Ok(metadata.value); - trace!("get_did_metadata < {:?}", res); - res - } - - /// Retrieves abbreviated verkey if it is possible otherwise return full verkey. - /// - /// #Params - - /// did: DID. - /// full_verkey: The DIDs verification key, - /// - /// #Returns - /// verkey: The DIDs verification key in either abbreviated or full form - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn abbreviate_verkey(&self, did: DidValue, verkey: String) -> IndyResult { - trace!("abbreviate_verkey > did {:?} verkey {:?}", did, verkey); - - self.crypto_service.validate_did(&did)?; - self.crypto_service.validate_key(&verkey).await?; - - if !did.is_abbreviatable() { - let res = Ok(verkey); - trace!("abbreviate_verkey < not abbreviatable {:?}", res); - return res; - } - - let did = &did.to_unqualified().0.decode_base58()?; - let dverkey = &verkey.decode_base58()?; - - let (first_part, second_part) = dverkey.split_at(16); - - let res = if first_part.eq(did.as_slice()) { - format!("~{}", second_part.to_base58()) - } else { - verkey - }; - - let res = Ok(res); - trace!("abbreviate_verkey < {:?}", res); - res - } - - async fn _update_dependent_entity_reference( - &self, - wallet_handle: WalletHandle, - id: &str, - new_id: &str, - ) -> IndyResult<()> - where - T: ::serde::Serialize + ::serde::de::DeserializeOwned + Sized, - { - if let Ok(record) = self - .wallet_service - .get_indy_record_value::(wallet_handle, id, "{}") - .await - { - self.wallet_service - .delete_indy_record::(wallet_handle, id) - .await?; - self.wallet_service - .add_indy_record::(wallet_handle, new_id, &record, &HashMap::new()) - .await?; - } - - Ok(()) - } - - async fn _wallet_get_my_did( - &self, - wallet_handle: WalletHandle, - my_did: &DidValue, - ) -> IndyResult { - self.wallet_service - .get_indy_object(wallet_handle, &my_did.0, &RecordOptions::id_value()) - .await - } - - async fn _wallet_get_their_did( - &self, - wallet_handle: WalletHandle, - their_did: &DidValue, - ) -> IndyResult { - self.wallet_service - .get_indy_object(wallet_handle, &their_did.0, &RecordOptions::id_value()) - .await - } -} diff --git a/aries/misc/legacy/libvdrtools/src/controllers/mod.rs b/aries/misc/legacy/libvdrtools/src/controllers/mod.rs deleted file mode 100644 index 960f6d8d9d..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -mod crypto; -pub(crate) mod did; -mod non_secrets; -mod wallet; - -pub(crate) use crypto::CryptoController; -pub(crate) use did::DidController; -pub(crate) use non_secrets::NonSecretsController; -pub(crate) use wallet::WalletController; diff --git a/aries/misc/legacy/libvdrtools/src/controllers/non_secrets.rs b/aries/misc/legacy/libvdrtools/src/controllers/non_secrets.rs deleted file mode 100644 index 156bdfadf9..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/non_secrets.rs +++ /dev/null @@ -1,504 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use futures::lock::Mutex; -use indy_api_types::{domain::wallet::Tags, errors::prelude::*, SearchHandle, WalletHandle}; -use indy_utils::next_search_handle; -use indy_wallet::{RecordOptions, SearchOptions, WalletRecord, WalletSearch, WalletService}; - -pub struct NonSecretsController { - wallet_service: Arc, - searches: Mutex>>>, -} - -impl NonSecretsController { - pub(crate) fn new(wallet_service: Arc) -> NonSecretsController { - NonSecretsController { - wallet_service, - searches: Mutex::new(HashMap::new()), - } - } - - /// Create a new non-secret record in the wallet - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// value: the value of record - /// tags_json: (optional) the record tags used for search and storing meta information as json: - /// { - /// "tagName1": , // string tag (will be stored encrypted) - /// "tagName2": , // string tag (will be stored encrypted) - /// "~tagName3": , // string tag (will be stored un-encrypted) - /// "~tagName4": , // string tag (will be stored un-encrypted) - /// } - /// Note that null means no tags - /// If tag name starts with "~" the tag will be stored un-encrypted that will allow - /// usage of this tag in complex search queries (comparison, predicates) - /// Encrypted tags can be searched only for exact matching - // TODO: change to String -> &str - pub async fn add_record( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - value: String, - tags: Option, - ) -> IndyResult<()> { - trace!( - "add_record > wallet_handle {:?} type_ {:?} id {:?} value {:?} tags {:?}", - wallet_handle, - type_, - id, - value, - tags - ); - - self._check_type(&type_)?; - - self.wallet_service - .add_record( - wallet_handle, - &type_, - &id, - &value, - &tags.unwrap_or_default(), - ) - .await?; - - let res = Ok(()); - trace!("add_record < {:?}", res); - res - } - - /// Update a non-secret wallet record value - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// value: the new value of record - pub async fn update_record_value( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - value: String, - ) -> IndyResult<()> { - trace!( - "update_record_value > wallet_handle {:?} type_ {:?} id {:?} value {:?}", - wallet_handle, - type_, - id, - value - ); - - self._check_type(&type_)?; - - self.wallet_service - .update_record_value(wallet_handle, &type_, &id, &value) - .await?; - - let res = Ok(()); - trace!("update_record_value < {:?}", res); - res - } - - /// Update a non-secret wallet record tags - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// tags_json: the record tags used for search and storing meta information as json: - /// { - /// "tagName1": , // string tag (will be stored encrypted) - /// "tagName2": , // string tag (will be stored encrypted) - /// "~tagName3": , // string tag (will be stored un-encrypted) - /// "~tagName4": , // string tag (will be stored un-encrypted) - /// } - /// If tag name starts with "~" the tag will be stored un-encrypted that will allow - /// usage of this tag in complex search queries (comparison, predicates) - /// Encrypted tags can be searched only for exact matching - pub async fn update_record_tags( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - tags: Tags, - ) -> IndyResult<()> { - trace!( - "update_record_tags > wallet_handle {:?} type_ {:?} id {:?} tags {:?}", - wallet_handle, - type_, - id, - tags - ); - - self._check_type(&type_)?; - - self.wallet_service - .update_record_tags(wallet_handle, &type_, &id, &tags) - .await?; - - let res = Ok(()); - trace!("update_record_tags < {:?}", res); - res - } - - /// Add new tags to the wallet record - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// tags_json: the record tags used for search and storing meta information as json: - /// { - /// "tagName1": , // string tag (will be stored encrypted) - /// "tagName2": , // string tag (will be stored encrypted) - /// "~tagName3": , // string tag (will be stored un-encrypted) - /// "~tagName4": , // string tag (will be stored un-encrypted) - /// } - /// If tag name starts with "~" the tag will be stored un-encrypted that will allow - /// usage of this tag in complex search queries (comparison, predicates) - /// Encrypted tags can be searched only for exact matching - /// Note if some from provided tags already assigned to the record than - /// corresponding tags values will be replaced - pub async fn add_record_tags( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - tags: Tags, - ) -> IndyResult<()> { - trace!( - "add_record_tags > wallet_handle {:?} type_ {:?} id {:?} tags {:?}", - wallet_handle, - type_, - id, - tags - ); - - self._check_type(&type_)?; - - self.wallet_service - .add_record_tags(wallet_handle, &type_, &id, &tags) - .await?; - - let res = Ok(()); - trace!("add_record_tags < {:?}", tags); - res - } - - /// Delete tags from the wallet record - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// tag_names_json: the list of tag names to remove from the record as json array: - /// ["tagName1", "tagName2", ...] - pub async fn delete_record_tags( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - tag_names_json: String, - ) -> IndyResult<()> { - trace!( - "delete_record_tags > wallet_handle {:?} type_ {:?} id {:?} tag_names_json {:?}", - wallet_handle, - type_, - id, - tag_names_json - ); - - self._check_type(&type_)?; - - let tag_names: Vec<&str> = serde_json::from_str(&tag_names_json).to_indy( - IndyErrorKind::InvalidStructure, - "Cannot deserialize tag names", - )?; - - self.wallet_service - .delete_record_tags(wallet_handle, &type_, &id, &tag_names) - .await?; - - let res = Ok(()); - trace!("delete_record_tags < {:?}", res); - res - } - - /// Delete an existing wallet record in the wallet - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: record type - /// id: the id of record - pub async fn delete_record( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - ) -> IndyResult<()> { - trace!( - "delete_record > wallet_handle {:?} type_ {:?} id {:?}", - wallet_handle, - type_, - id - ); - - self._check_type(&type_)?; - - self.wallet_service - .delete_record(wallet_handle, &type_, &id) - .await?; - - let res = Ok(()); - trace!("delete_record < {:?}", res); - res - } - - /// Get an wallet record by id - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// options_json: //TODO: FIXME: Think about replacing by bitmask - /// { - /// retrieveType: (optional, false by default) Retrieve record type, - /// retrieveValue: (optional, true by default) Retrieve record value, - /// retrieveTags: (optional, false by default) Retrieve record tags - /// } - /// #Returns - /// wallet record json: - /// { - /// id: "Some id", - /// type: "Some type", // present only if retrieveType set to true - /// value: "Some value", // present only if retrieveValue set to true - /// tags: , // present only if retrieveTags set to true - /// } - pub async fn get_record( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - options_json: String, - ) -> IndyResult { - trace!( - "get_record > wallet_handle {:?} type_ {:?} id {:?} options_json {:?}", - wallet_handle, - type_, - id, - options_json - ); - - self._check_type(&type_)?; - - serde_json::from_str::(&options_json).to_indy( - IndyErrorKind::InvalidStructure, - "Cannot deserialize options", - )?; - - let record = self - .wallet_service - .get_record(wallet_handle, &type_, &id, &options_json) - .await?; - - let record = serde_json::to_string(&record).to_indy( - IndyErrorKind::InvalidStructure, - "Cannot serialize WalletRecord", - )?; - - let res = Ok(record); - trace!("get_record < {:?}", res); - res - } - - /// Search for wallet records. - /// - /// Note instead of immediately returning of fetched records - /// this call returns wallet_search_handle that can be used later - /// to fetch records by small batches (with indy_fetch_wallet_search_next_records). - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// query_json: MongoDB style query to wallet record tags: - /// { - /// "tagName": "tagValue", - /// $or: { - /// "tagName2": { $regex: 'pattern' }, - /// "tagName3": { $gte: '123' }, - /// }, - /// } - /// options_json: //TODO: FIXME: Think about replacing by bitmask - /// { - /// retrieveRecords: (optional, true by default) If false only "counts" will be calculated, - /// retrieveTotalCount: (optional, false by default) Calculate total count, - /// retrieveType: (optional, false by default) Retrieve record type, - /// retrieveValue: (optional, true by default) Retrieve record value, - /// retrieveTags: (optional, false by default) Retrieve record tags, - /// } - /// #Returns - /// search_handle: Wallet search handle that can be used later - /// to fetch records by small batches (with indy_fetch_wallet_search_next_records) - pub async fn open_search( - &self, - wallet_handle: WalletHandle, - type_: String, - query_json: String, - options_json: String, - ) -> IndyResult { - trace!( - "open_search > wallet_handle {:?} type_ {:?} query_json {:?} options_json {:?}", - wallet_handle, - type_, - query_json, - options_json - ); - - self._check_type(&type_)?; - - serde_json::from_str::(&options_json).to_indy( - IndyErrorKind::InvalidStructure, - "Cannot deserialize options", - )?; - - let search = self - .wallet_service - .search_records(wallet_handle, &type_, &query_json, &options_json) - .await?; - - let search_handle = next_search_handle(); - - self.searches - .lock() - .await - .insert(search_handle, Arc::new(Mutex::new(search))); - - let res = Ok(search_handle); - trace!("open_search < {:?}", search_handle); - res - } - - /// Fetch next records for wallet search. - /// - /// Not if there are no records this call returns WalletNoRecords error. - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet) - /// wallet_search_handle: wallet search handle (created by indy_open_wallet_search) - /// count: Count of records to fetch - /// - /// #Returns - /// wallet records json: - /// { - /// totalCount: , // present only if retrieveTotalCount set to true - /// records: [{ // present only if retrieveRecords set to true - /// id: "Some id", - /// type: "Some type", // present only if retrieveType set to true - /// value: "Some value", // present only if retrieveValue set to true - /// tags: , // present only if retrieveTags set to true - /// }], - /// } - pub async fn fetch_search_next_records( - &self, - wallet_handle: WalletHandle, - wallet_search_handle: SearchHandle, - count: usize, - ) -> IndyResult { - trace!( - "fetch_search_next_records > wallet_handle {:?} wallet_search_handle {:?} count {:?}", - wallet_handle, - wallet_search_handle, - count - ); - - let search_mut = { - self.searches - .lock() - .await - .get(&wallet_search_handle) - .ok_or_else(|| { - err_msg(IndyErrorKind::InvalidWalletHandle, "Unknown search handle") - })? - .clone() - }; - - let mut search = search_mut.lock().await; - - let mut records: Vec = Vec::new(); - - for _ in 0..count { - match search.fetch_next_record().await? { - Some(record) => records.push(record), - None => break, - } - } - - let search_result = SearchRecords { - total_count: search.get_total_count()?, - records: if records.is_empty() { - None - } else { - Some(records) - }, - }; - - let search_result = serde_json::to_string(&search_result).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize SearchRecords", - )?; - - let res = Ok(search_result); - trace!("fetch_search_next_records < {:?}", res); - res - } - - /// Close wallet search (make search handle invalid) - /// - /// #Params - /// wallet_search_handle: wallet search handle - pub async fn close_search(&self, wallet_search_handle: SearchHandle) -> IndyResult<()> { - trace!( - "close_search > wallet_search_handle {:?}", - wallet_search_handle - ); - - self.searches - .lock() - .await - .remove(&wallet_search_handle) - .ok_or_else(|| err_msg(IndyErrorKind::InvalidWalletHandle, "Unknown search handle"))?; - - let res = Ok(()); - trace!("close_search < {:?}", res); - res - } - - fn _check_type(&self, _type: &str) -> IndyResult<()> { - // if type_.starts_with(WalletService::PREFIX) { - // Err(err_msg( - // IndyErrorKind::WalletAccessFailed, - // format!("Record of type \"{}\" is not available for fetching", type_), - // ))?; - // } - - Ok(()) - } -} - -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct SearchRecords { - pub total_count: Option, - pub records: Option>, -} diff --git a/aries/misc/legacy/libvdrtools/src/controllers/wallet.rs b/aries/misc/legacy/libvdrtools/src/controllers/wallet.rs deleted file mode 100644 index bc4eaf37ef..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/wallet.rs +++ /dev/null @@ -1,436 +0,0 @@ -use std::sync::Arc; - -// use async_std::task::spawn_blocking; -use indy_api_types::{ - domain::wallet::{Config, Credentials, ExportConfig, KeyConfig}, - errors::prelude::*, - WalletHandle, -}; -use indy_utils::crypto::{ - chacha20poly1305_ietf, chacha20poly1305_ietf::Key as MasterKey, randombytes, -}; -use indy_wallet::{iterator::WalletIterator, KeyDerivationData, WalletService}; - -use crate::{services::CryptoService, utils::crypto::base58::ToBase58}; - -pub struct WalletController { - wallet_service: Arc, - crypto_service: Arc, -} - -impl WalletController { - pub(crate) fn new( - wallet_service: Arc, - crypto_service: Arc, - ) -> WalletController { - WalletController { - wallet_service, - crypto_service, - } - } - - /// Create a new secure wallet. - /// - /// #Params - /// config: Wallet configuration json. - /// { - /// "id": string, Identifier of the wallet. - /// Configured storage uses this identifier to lookup exact wallet data placement. - /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. - /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage - /// call. "storage_config": optional, Storage configuration json. Storage type - /// defines set of supported keys. Can be optional if storage supports - /// default configuration. For 'default' storage type configuration is: - /// { - /// "path": optional, Path to the directory with wallet files. - /// Defaults to $HOME/.indy_client/wallet. - /// Wallet will be stored in the file {path}/{id}/sqlite.db - /// } - /// } - /// credentials: Wallet credentials json - /// { - /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "storage_credentials": optional Credentials for wallet - /// storage. Storage type defines set of supported keys. Can be - /// optional if storage supports default configuration. For - /// 'default' storage type should be empty. "key_derivation_method": optional - /// Algorithm to use for wallet key derivation: ARGON2I_MOD - - /// derive secured wallet master key (used by default) ARGON2I_INT - /// - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call } - /// - /// #Returns - /// err: Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn create(&self, config: Config, credentials: Credentials) -> IndyResult<()> { - trace!( - "_create > config: {:?} credentials: {:?}", - &config, - secret!(&credentials) - ); - - let key_data = KeyDerivationData::from_passphrase_with_new_salt( - &credentials.key, - &credentials.key_derivation_method, - ); - - let key = Self::_derive_key(&key_data).await?; - - let res = self - .wallet_service - .create_wallet(&config, &credentials, (&key_data, &key)) - .await; - - trace!("create < {:?}", res); - res - } - - /// Open the wallet. - /// - /// Wallet must be previously created with indy_create_wallet method. - /// - /// #Params - /// config: Wallet configuration json. - /// { - /// "id": string, Identifier of the wallet. - /// Configured storage uses this identifier to lookup exact wallet data placement. - /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. - /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with - /// indy_register_wallet_storage call. "storage_config": optional, Storage - /// configuration json. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. For - /// 'default' storage type configuration is: { - /// "path": optional, Path to the directory with wallet files. - /// Defaults to $HOME/.indy_client/wallet. - /// Wallet will be stored in the file {path}/{id}/sqlite.db - /// } - /// "cache": optional, Cache configuration json. If omitted the cache is disabled - /// (default). { - /// "size": optional, Number of items in cache, - /// "entities": List, Types of items being cached. eg. ["vdrtools::Did", - /// "vdrtools::Key"] "algorithm" optional, cache algorithm, defaults to - /// lru, which is the only one supported for now. } - /// } - /// credentials: Wallet credentials json - /// { - /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "rekey": optional, If present than wallet master key - /// will be rotated to a new one. "storage_credentials": optional Credentials - /// for wallet storage. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type should be empty. "key_derivation_method": - /// optional Algorithm to use for wallet key derivation: - /// ARGON2I_MOD - derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call - /// "rekey_derivation_method": optional Algorithm to use for wallet rekey - /// derivation: ARGON2I_MOD - derive secured wallet master rekey - /// (used by default) ARGON2I_INT - derive secured wallet master - /// rekey (less secured but faster) RAW - raw wallet rekey master - /// provided (skip derivation). RAW keys can be generated - /// with indy_generate_wallet_key call } - /// - /// #Returns - /// err: Error code - /// handle: Handle to opened wallet to use in methods that require wallet access. - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn open(&self, config: Config, credentials: Credentials) -> IndyResult { - trace!( - "open > config: {:?} credentials: {:?}", - &config, - secret!(&credentials) - ); - // TODO: try to refactor to avoid usage of continue methods - - let (wallet_handle, key_derivation_data, rekey_data) = self - .wallet_service - .open_wallet_prepare(&config, &credentials) - .await?; - - let key = Self::_derive_key(&key_derivation_data).await?; - - let rekey = if let Some(rekey_data) = rekey_data { - Some(Self::_derive_key(&rekey_data).await?) - } else { - None - }; - - let res = self - .wallet_service - .open_wallet_continue(wallet_handle, (&key, rekey.as_ref()), config.cache) - .await; - - trace!("open < res: {:?}", res); - - res - } - - /// Closes opened wallet and frees allocated resources. - /// - /// #Params - /// wallet_handle: wallet handle returned by indy_open_wallet. - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn close(&self, wallet_handle: WalletHandle) -> IndyResult<()> { - trace!("close > handle: {:?}", wallet_handle); - - self.wallet_service.close_wallet(wallet_handle).await?; - - trace!("close < res: ()"); - Ok(()) - } - - /// Deletes created wallet. - /// - /// #Params - /// config: Wallet configuration json. - /// { - /// "id": string, Identifier of the wallet. - /// Configured storage uses this identifier to lookup exact wallet data placement. - /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. - /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage - /// call. "storage_config": optional, Storage configuration json. Storage type - /// defines set of supported keys. Can be optional if storage supports - /// default configuration. For 'default' storage type configuration is: - /// { - /// "path": optional, Path to the directory with wallet files. - /// Defaults to $HOME/.indy_client/wallet. - /// Wallet will be stored in the file {path}/{id}/sqlite.db - /// } - /// } - /// credentials: Wallet credentials json - /// { - /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "storage_credentials": optional Credentials for wallet - /// storage. Storage type defines set of supported keys. Can be - /// optional if storage supports default configuration. For - /// 'default' storage type should be empty. "key_derivation_method": optional - /// Algorithm to use for wallet key derivation: ARGON2I_MOD - - /// derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call } - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn delete(&self, config: Config, credentials: Credentials) -> IndyResult<()> { - trace!( - "delete > config: {:?} credentials: {:?}", - &config, - secret!(&credentials) - ); - // TODO: try to refactor to avoid usage of continue methods - - let (metadata, key_derivation_data) = self - .wallet_service - .delete_wallet_prepare(&config, &credentials) - .await?; - - let key = Self::_derive_key(&key_derivation_data).await?; - - let res = self - .wallet_service - .delete_wallet_continue(&config, &credentials, &metadata, &key) - .await; - - trace!("delete < {:?}", res); - res - } - - /// Exports opened wallet - /// - /// #Params: - /// wallet_handle: wallet handle returned by indy_open_wallet - /// export_config: JSON containing settings for input operation. - /// { - /// "path": , Path of the file that contains exported wallet content - /// "key": , Key or passphrase used for wallet export key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "key_derivation_method": optional Algorithm to use for - /// wallet export key derivation: ARGON2I_MOD - derive secured - /// export key (used by default) ARGON2I_INT - derive secured - /// export key (less secured but faster) RAW - raw export key - /// provided (skip derivation). RAW keys can be generated - /// with indy_generate_wallet_key call } - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn export( - &self, - wallet_handle: WalletHandle, - export_config: ExportConfig, - ) -> IndyResult<()> { - trace!( - "export > handle: {:?} export_config: {:?}", - wallet_handle, - secret!(&export_config) - ); - - let key_data = KeyDerivationData::from_passphrase_with_new_salt( - &export_config.key, - &export_config.key_derivation_method, - ); - - let key = Self::_derive_key(&key_data).await?; - - let res = self - .wallet_service - .export_wallet(wallet_handle, &export_config, 0, (&key_data, &key)) - .await; - - trace!("export < {:?}", res); - res - } - - /// Creates a new secure wallet and then imports its content - /// according to fields provided in import_config - /// This can be seen as an indy_create_wallet call with additional content import - /// - /// #Params - /// config: Wallet configuration json. - /// { - /// "id": string, Identifier of the wallet. - /// Configured storage uses this identifier to lookup exact wallet data placement. - /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. - /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage - /// call. "storage_config": optional, Storage configuration json. Storage type - /// defines set of supported keys. Can be optional if storage supports - /// default configuration. For 'default' storage type configuration is: - /// { - /// "path": optional, Path to the directory with wallet files. - /// Defaults to $HOME/.indy_client/wallet. - /// Wallet will be stored in the file {path}/{id}/sqlite.db - /// } - /// } - /// credentials: Wallet credentials json - /// { - /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "storage_credentials": optional Credentials for wallet - /// storage. Storage type defines set of supported keys. Can be - /// optional if storage supports default configuration. For - /// 'default' storage type should be empty. "key_derivation_method": optional - /// Algorithm to use for wallet key derivation: ARGON2I_MOD - - /// derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call } - /// import_config: Import settings json. - /// { - /// "path": , path of the file that contains exported wallet content - /// "key": , key used for export of the wallet - /// } - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn import( - &self, - config: Config, - credentials: Credentials, - import_config: ExportConfig, - ) -> IndyResult<()> { - trace!( - "import > config: {:?} credentials: {:?} import_config: {:?}", - &config, - secret!(&credentials), - secret!(&import_config) - ); - // TODO: try to refactor to avoid usage of continue methods - - let (wallet_handle, key_data, import_key_data) = self - .wallet_service - .import_wallet_prepare(&config, &credentials, &import_config) - .await?; - - let import_key = Self::_derive_key(&import_key_data).await?; - let key = Self::_derive_key(&key_data).await?; - - let res = self - .wallet_service - .import_wallet_continue(wallet_handle, &config, &credentials, (import_key, key)) - .await; - - trace!("import < {:?}", res); - - res - } - - pub async fn get_all(&self, handle: WalletHandle) -> IndyResult { - self.wallet_service.get_all(handle).await - } - - /// Generate wallet master key. - /// Returned key is compatible with "RAW" key derivation method. - /// It allows to avoid expensive key derivation for use cases when wallet keys can be stored in - /// a secure enclave. - /// - /// #Params - /// config: (optional) key configuration json. - /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set random - /// one will be created). Can be UTF-8, base64 or hex string. - /// } - /// - /// #Returns - /// err: Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub fn generate_key(&self, config: Option) -> IndyResult { - trace!("generate_key > config: {:?}", secret!(&config)); - - let seed = config.as_ref().and_then(|config| config.seed.as_deref()); - - let key = match self.crypto_service.convert_seed(seed)? { - Some(seed) => randombytes::randombytes_deterministic( - chacha20poly1305_ietf::KEYBYTES, - &randombytes::Seed::from_slice(&seed[..])?, - ), - None => randombytes::randombytes(chacha20poly1305_ietf::KEYBYTES), - }; - - let res = key[..].to_base58(); - - trace!("generate_key < res: {:?}", res); - Ok(res) - } - - async fn _derive_key(key_data: &KeyDerivationData) -> IndyResult { - key_data.calc_master_key() - // let res = spawn_blocking(move || key_data.calc_master_key()).await?; - // Ok(res) - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential.rs deleted file mode 100644 index 9080a82f24..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential.rs +++ /dev/null @@ -1,56 +0,0 @@ -use std::collections::HashMap; - -use ursa::cl::{CredentialSignature, RevocationRegistry, SignatureCorrectnessProof, Witness}; - -use super::{ - credential_definition::CredentialDefinitionId, - revocation_registry_definition::RevocationRegistryId, schema::SchemaId, -}; - -#[derive(Debug, Deserialize, Serialize)] -pub struct Credential { - pub schema_id: SchemaId, - pub cred_def_id: CredentialDefinitionId, - pub rev_reg_id: Option, - pub values: CredentialValues, - pub signature: CredentialSignature, - pub signature_correctness_proof: SignatureCorrectnessProof, - pub rev_reg: Option, - pub witness: Option, -} - -impl Credential { - pub const QUALIFIABLE_TAGS: [&'static str; 5] = [ - "issuer_did", - "cred_def_id", - "schema_id", - "schema_issuer_did", - "rev_reg_id", - ]; - pub const EXTRA_TAG_SUFFIX: &'static str = "_short"; - - pub fn add_extra_tag_suffix(tag: &str) -> String { - format!("{}{}", tag, Self::EXTRA_TAG_SUFFIX) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -pub struct CredentialInfo { - pub referent: String, - pub attrs: ShortCredentialValues, - pub schema_id: SchemaId, - pub cred_def_id: CredentialDefinitionId, - pub rev_reg_id: Option, - pub cred_rev_id: Option, -} - -pub type ShortCredentialValues = HashMap; - -#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] -pub struct CredentialValues(pub HashMap); - -#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] -pub struct AttributeValues { - pub raw: String, - pub encoded: String, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_definition.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_definition.rs deleted file mode 100644 index 8a765e782e..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_definition.rs +++ /dev/null @@ -1,437 +0,0 @@ -use std::collections::HashMap; - -use indy_api_types::{ - errors::{IndyErrorKind, IndyResult}, - IndyError, -}; -use ursa::cl::{ - CredentialKeyCorrectnessProof, CredentialPrimaryPublicKey, CredentialPrivateKey, - CredentialRevocationPublicKey, -}; - -use super::{ - super::{ - anoncreds::{schema::SchemaId, DELIMITER}, - crypto::did::DidValue, - }, - indy_identifiers, -}; -use crate::utils::qualifier; - -pub const CL_SIGNATURE_TYPE: &str = "CL"; - -#[derive(Deserialize, Debug, Serialize, PartialEq, Clone)] -pub enum SignatureType { - CL, -} - -impl SignatureType { - pub fn to_str(&self) -> &'static str { - match *self { - SignatureType::CL => CL_SIGNATURE_TYPE, - } - } -} - -#[derive(Debug, Serialize, Deserialize, Clone, Default)] -pub struct CredentialDefinitionConfig { - #[serde(default)] - pub support_revocation: bool, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialDefinitionData { - pub primary: CredentialPrimaryPublicKey, - #[serde(skip_serializing_if = "Option::is_none")] - pub revocation: Option, -} - -#[derive(Deserialize, Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CredentialDefinitionV1 { - pub id: CredentialDefinitionId, - pub schema_id: SchemaId, - #[serde(rename = "type")] - pub signature_type: SignatureType, - pub tag: String, - pub value: CredentialDefinitionData, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum CredentialDefinition { - #[serde(rename = "1.0")] - CredentialDefinitionV1(CredentialDefinitionV1), -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct TemporaryCredentialDefinition { - pub cred_def: CredentialDefinition, - pub cred_def_priv_key: CredentialDefinitionPrivateKey, - pub cred_def_correctness_proof: CredentialDefinitionCorrectnessProof, -} - -impl CredentialDefinition { - pub fn to_unqualified(self) -> CredentialDefinition { - match self { - CredentialDefinition::CredentialDefinitionV1(cred_def) => { - CredentialDefinition::CredentialDefinitionV1(CredentialDefinitionV1 { - id: cred_def.id.to_unqualified(), - schema_id: cred_def.schema_id.to_unqualified(), - signature_type: cred_def.signature_type, - tag: cred_def.tag, - value: cred_def.value, - }) - } - } - } -} - -impl From for CredentialDefinitionV1 { - fn from(cred_def: CredentialDefinition) -> Self { - match cred_def { - CredentialDefinition::CredentialDefinitionV1(cred_def) => cred_def, - } - } -} - -pub type CredentialDefinitions = HashMap; - -pub fn cred_defs_map_to_cred_defs_v1_map( - cred_defs: CredentialDefinitions, -) -> HashMap { - cred_defs - .into_iter() - .map(|(cred_def_id, cred_def)| (cred_def_id, CredentialDefinitionV1::from(cred_def))) - .collect() -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialDefinitionPrivateKey { - pub value: CredentialPrivateKey, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialDefinitionCorrectnessProof { - pub value: CredentialKeyCorrectnessProof, -} - -qualifiable_type!(CredentialDefinitionId); - -impl CredentialDefinitionId { - pub const PREFIX: &'static str = "/anoncreds/v0/CLAIM_DEF/"; - pub const MARKER: &'static str = "3"; - - pub fn new( - did: &DidValue, - schema_id: &SchemaId, - signature_type: &str, - tag: &str, - ) -> IndyResult { - match did.get_method() { - Some(method) if method.starts_with("indy") => Ok(CredentialDefinitionId(format!( - "{}{}{}/{}", - did.0, - Self::PREFIX, - &schema_id.0, - tag - ))), - Some(_method) => Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unsupported DID method", - )), - None => { - let tag = if tag.is_empty() { - "".to_owned() - } else { - format!("{}{}", DELIMITER, tag) - }; - - let id = CredentialDefinitionId(format!( - "{}{}{}{}{}{}{}{}", - did.0, - DELIMITER, - Self::MARKER, - DELIMITER, - signature_type, - DELIMITER, - schema_id.0, - tag - )); - - Ok(id) - } - } - } - - pub fn parts(&self) -> Option<(DidValue, String, SchemaId, String)> { - trace!("CredentialDefinitionId::parts >> self.0 {}", self.0); - if let Some((did, seq_no, tag)) = - indy_identifiers::try_parse_indy_creddef_id(self.0.as_str()) - { - trace!("{:?} {:?} {:?}", did, seq_no, tag); - return Some(( - DidValue(did), - CL_SIGNATURE_TYPE.to_owned(), - SchemaId(seq_no), - tag, - )); - } - - let parts = self.0.split_terminator(DELIMITER).collect::>(); - - if parts.len() == 4 { - // Th7MpTaRZVRYnPiabds81Y:3:CL:1 - let did = parts[0].to_string(); - let signature_type = parts[2].to_string(); - let schema_id = parts[3].to_string(); - let tag = String::new(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 5 { - // Th7MpTaRZVRYnPiabds81Y:3:CL:1:tag - let did = parts[0].to_string(); - let signature_type = parts[2].to_string(); - let schema_id = parts[3].to_string(); - let tag = parts[4].to_string(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 7 { - // NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0 - let did = parts[0].to_string(); - let signature_type = parts[2].to_string(); - let schema_id = parts[3..7].join(DELIMITER); - let tag = String::new(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 8 { - // NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag - let did = parts[0].to_string(); - let signature_type = parts[2].to_string(); - let schema_id = parts[3..7].join(DELIMITER); - let tag = parts[7].to_string(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 9 { - // creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:3:tag - warn!("Deprecated format of FQ CredDef ID is used (creddef: suffix)"); - let did = parts[2..5].join(DELIMITER); - let signature_type = parts[6].to_string(); - let schema_id = parts[7].to_string(); - let tag = parts[8].to_string(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 16 { - // creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov: - // NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag - warn!("Deprecated format of FQ CredDef ID is used (creddef: suffix)"); - let did = parts[2..5].join(DELIMITER); - let signature_type = parts[6].to_string(); - let schema_id = parts[7..15].join(DELIMITER); - let tag = parts[15].to_string(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - None - } - - pub fn issuer_did(&self) -> Option { - self.parts().map(|(did, _, _, _)| did) - } - - pub fn qualify(&self, method: &str) -> IndyResult { - match self.parts() { - Some((did, signature_type, schema_id, tag)) => CredentialDefinitionId::new( - &did.qualify(method), - &schema_id.qualify(method)?, - &signature_type, - &tag, - ), - None => Ok(self.clone()), - } - } - - pub fn to_unqualified(&self) -> CredentialDefinitionId { - match self.parts() { - Some((did, signature_type, schema_id, tag)) => CredentialDefinitionId::new( - &did.to_unqualified(), - &schema_id.to_unqualified(), - &signature_type, - &tag, - ) - .expect("Can't create unqualified CredentialDefinitionId"), - None => self.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn _did() -> DidValue { - DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _signature_type() -> String { - "CL".to_string() - } - - fn _tag() -> String { - "tag".to_string() - } - - fn _did_qualified() -> DidValue { - DidValue("did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _schema_id_seq_no() -> SchemaId { - SchemaId("1".to_string()) - } - - fn _schema_id_unqualified() -> SchemaId { - SchemaId("NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0".to_string()) - } - - fn _schema_id_qualified() -> SchemaId { - SchemaId( - "did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/SCHEMA/gvt/1.0".to_string(), - ) - } - - fn _cred_def_id_unqualified() -> CredentialDefinitionId { - CredentialDefinitionId( - "NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag".to_string(), - ) - } - - fn _cred_def_id_unqualified_with_schema_as_seq_no() -> CredentialDefinitionId { - CredentialDefinitionId("NcYxiDXkpYi6ov5FcYDi1e:3:CL:1:tag".to_string()) - } - - fn _cred_def_id_unqualified_with_schema_as_seq_no_without_tag() -> CredentialDefinitionId { - CredentialDefinitionId("NcYxiDXkpYi6ov5FcYDi1e:3:CL:1".to_string()) - } - - fn _cred_def_id_unqualified_without_tag() -> CredentialDefinitionId { - CredentialDefinitionId( - "NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0".to_string(), - ) - } - - fn _cred_def_id_qualified_with_schema_as_seq_no() -> CredentialDefinitionId { - CredentialDefinitionId( - "did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag" - .to_string(), - ) - } - - mod to_unqualified { - use super::*; - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified() { - assert_eq!( - _cred_def_id_unqualified(), - _cred_def_id_unqualified().to_unqualified() - ); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_without_tag() { - assert_eq!( - _cred_def_id_unqualified_without_tag(), - _cred_def_id_unqualified_without_tag().to_unqualified() - ); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_without_tag_with_schema_as_seq_no() { - assert_eq!( - _cred_def_id_unqualified_with_schema_as_seq_no(), - _cred_def_id_unqualified_with_schema_as_seq_no().to_unqualified() - ); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_without_tag_with_schema_as_seq_no_without_tag( - ) { - assert_eq!( - _cred_def_id_unqualified_with_schema_as_seq_no_without_tag(), - _cred_def_id_unqualified_with_schema_as_seq_no_without_tag().to_unqualified() - ); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_qualified_with_schema_as_seq_no() { - assert_eq!( - _cred_def_id_unqualified_with_schema_as_seq_no(), - _cred_def_id_qualified_with_schema_as_seq_no().to_unqualified() - ); - } - } - - mod parts { - use super::*; - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified() { - let (did, signature_type, schema_id, tag) = _cred_def_id_unqualified().parts().unwrap(); - assert_eq!(_did(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_unqualified(), schema_id); - assert_eq!(_tag(), tag); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_without_tag() { - let (did, signature_type, schema_id, tag) = - _cred_def_id_unqualified_without_tag().parts().unwrap(); - assert_eq!(_did(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_unqualified(), schema_id); - assert_eq!(String::new(), tag); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_with_schema_as_seq() { - let (did, signature_type, schema_id, tag) = - _cred_def_id_unqualified_with_schema_as_seq_no() - .parts() - .unwrap(); - assert_eq!(_did(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_seq_no(), schema_id); - assert_eq!(_tag(), tag); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_with_schema_as_seq_without_tag() { - let (did, signature_type, schema_id, tag) = - _cred_def_id_unqualified_with_schema_as_seq_no_without_tag() - .parts() - .unwrap(); - assert_eq!(_did(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_seq_no(), schema_id); - assert_eq!(String::new(), tag); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_qualified_with_schema_as_seq() { - let (did, signature_type, schema_id, tag) = - _cred_def_id_qualified_with_schema_as_seq_no() - .parts() - .unwrap(); - assert_eq!(_did_qualified(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_seq_no(), schema_id); - assert_eq!(_tag(), tag); - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_offer.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_offer.rs deleted file mode 100644 index d980b5d52c..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_offer.rs +++ /dev/null @@ -1,30 +0,0 @@ -use ursa::cl::{CredentialKeyCorrectnessProof, Nonce}; - -use super::{credential_definition::CredentialDefinitionId, schema::SchemaId}; - -#[derive(Debug, Deserialize, Serialize)] -pub struct CredentialOffer { - pub schema_id: SchemaId, - pub cred_def_id: CredentialDefinitionId, - pub key_correctness_proof: CredentialKeyCorrectnessProof, - pub nonce: Nonce, - #[serde(skip_serializing_if = "Option::is_none")] - pub method_name: Option, -} - -impl CredentialOffer { - pub fn to_unqualified(self) -> CredentialOffer { - let method_name = if self.cred_def_id.is_fully_qualified() { - self.cred_def_id.get_method() - } else { - None - }; - CredentialOffer { - method_name, - schema_id: self.schema_id.to_unqualified(), - cred_def_id: self.cred_def_id.to_unqualified(), - key_correctness_proof: self.key_correctness_proof, - nonce: self.nonce, - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_request.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_request.rs deleted file mode 100644 index a85416094d..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_request.rs +++ /dev/null @@ -1,34 +0,0 @@ -use ursa::cl::{ - BlindedCredentialSecrets, BlindedCredentialSecretsCorrectnessProof, - CredentialSecretsBlindingFactors, Nonce, -}; - -use super::{super::crypto::did::DidValue, credential_definition::CredentialDefinitionId}; - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialRequest { - pub prover_did: DidValue, - pub cred_def_id: CredentialDefinitionId, - pub blinded_ms: BlindedCredentialSecrets, - pub blinded_ms_correctness_proof: BlindedCredentialSecretsCorrectnessProof, - pub nonce: Nonce, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialRequestMetadata { - pub master_secret_blinding_data: CredentialSecretsBlindingFactors, - pub nonce: Nonce, - pub master_secret_name: String, -} - -impl CredentialRequest { - pub fn to_unqualified(self) -> CredentialRequest { - CredentialRequest { - prover_did: self.prover_did.to_unqualified(), - cred_def_id: self.cred_def_id.to_unqualified(), - blinded_ms: self.blinded_ms, - blinded_ms_correctness_proof: self.blinded_ms_correctness_proof, - nonce: self.nonce, - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/indy_identifiers.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/indy_identifiers.rs deleted file mode 100644 index 9b5fcba751..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/indy_identifiers.rs +++ /dev/null @@ -1,155 +0,0 @@ -use lazy_static::lazy_static; -use regex::Regex; - -use super::{ - super::crypto::did::DidValue, credential_definition::CredentialDefinitionId, - revocation_registry_definition::CL_ACCUM, schema::SchemaId, -}; - -const NAMESPACE_RE: &str = r"[a-z][a-z0-9_:-]*"; -const DID_RE: &str = r"[1-9A-HJ-NP-Za-km-z]*"; //base58 -const SCHEMA_TYPE: &str = super::schema::SchemaId::PREFIX; -const SCHEMA_NAME_RE: &str = r"[^/]*"; -const SCHEMA_VER_RE: &str = r"[^/]*"; -const SCHEMA_SEQ_NO_RE: &str = r"[0-9]*"; - -lazy_static! { - static ref SCHEMA_RE: String = format!( - "(did:indy(:{NAMESPACE_RE})?:{DID_RE}){SCHEMA_TYPE}({SCHEMA_NAME_RE})/({SCHEMA_VER_RE})" - ); - static ref SCHEMA_REF_RE: String = format!("({SCHEMA_SEQ_NO_RE}|{})", *SCHEMA_RE); -} -const CREDDEF_TYPE: &str = super::credential_definition::CredentialDefinitionId::PREFIX; -const CREDDEF_TAG_RE: &str = r".*"; - -pub fn try_parse_indy_schema_id(id: &str) -> Option<(String, String, String)> { - let id_re = format!("^{}$", *SCHEMA_RE); - let id_re = Regex::new(id_re.as_str()).unwrap(); - if let Some(captures) = id_re.captures(id) { - trace!("try_parse_indy_schema_id: captures {:?}", captures); - if let (Some(did), Some(name), Some(ver)) = - (captures.get(1), captures.get(3), captures.get(4)) - { - return Some(( - did.as_str().to_owned(), - name.as_str().to_owned(), - ver.as_str().to_owned(), - )); - } - } - None -} - -pub fn try_parse_indy_creddef_id(id: &str) -> Option<(String, String, String)> { - let schema_ref_re = &*SCHEMA_REF_RE; - let id_re = format!( - "^(did:indy(:{NAMESPACE_RE})?:{DID_RE}){CREDDEF_TYPE}({schema_ref_re})/({CREDDEF_TAG_RE})$" - ); - let id_re = Regex::new(id_re.as_str()).unwrap(); - - if let Some(captures) = id_re.captures(id) { - trace!("try_parse_indy_creddef_id: captures {:?}", captures); - if let (Some(did), Some(seq_no), Some(tag)) = - (captures.get(1), captures.get(3), captures.get(9)) - { - return Some(( - did.as_str().to_owned(), - seq_no.as_str().to_owned(), - tag.as_str().to_owned(), - )); - } - } - - None -} - -pub fn try_parse_indy_rev_reg( - id: &str, -) -> Option<(DidValue, CredentialDefinitionId, String, String)> { - let creddef_name_re = r"[^/]*"; - let tag_re = r"[^/]*"; - let schema_ref_re = &*SCHEMA_REF_RE; - let id_re = format!( - "^(did:indy(:{NAMESPACE_RE})?:{DID_RE})/anoncreds/v0/REV_REG_DEF/{schema_ref_re}/\ - ({creddef_name_re})/({tag_re})$" - ); - let id_re = Regex::new(id_re.as_str()).unwrap(); - - if let Some(captures) = id_re.captures(id) { - trace!("try_parse_indy_rev_reg: captures {:?}", captures); - if let (Some(did), Some(schema_id), Some(creddef_name), Some(tag)) = ( - captures.get(1), - captures.get(3), - captures.get(8), - captures.get(9), - ) { - let did = DidValue(did.as_str().to_owned()); - let schema_id = SchemaId(schema_id.as_str().to_owned()); - let creddef_id = CredentialDefinitionId::new( - &did, - &schema_id, - super::credential_definition::CL_SIGNATURE_TYPE, - creddef_name.as_str(), - ) - .ok()?; - return Some(( - did, - creddef_id, - CL_ACCUM.to_owned(), - tag.as_str().to_owned(), - )); - } - } - - None -} - -#[test] -fn test_try_parse_valid_indy_creddefid_works() { - let (did, schema_seq_no, tag) = - try_parse_indy_creddef_id("did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag") - .unwrap(); - assert_eq!(did, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!(schema_seq_no, "1".to_owned()); - assert_eq!(tag, "tag".to_owned()); - - let (did, schema_ref, tag) = try_parse_indy_creddef_id( - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ - anoncreds/v0/SCHEMA/gvt/1.0/tag", - ) - .unwrap(); - assert_eq!(did, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!( - schema_ref, - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0".to_owned() - ); - assert_eq!(tag, "tag".to_owned()); -} - -#[test] -fn test_try_parse_valid_indy_revreg_works() { - let (did, creddef, _, tag) = try_parse_indy_rev_reg( - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/1/creddef_name/TAG1", - ) - .unwrap(); - assert_eq!(did.0, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!( - creddef.0, - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/creddef_name".to_owned() - ); - assert_eq!(tag, "TAG1".to_owned()); - - let (did, creddef, _, tag) = try_parse_indy_rev_reg( - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ - anoncreds/v0/SCHEMA/gvt/1.0/creddef_name/TAG1", - ) - .unwrap(); - assert_eq!(did.0, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!( - creddef.0, - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ - anoncreds/v0/SCHEMA/gvt/1.0/creddef_name" - .to_owned() - ); - assert_eq!(tag, "TAG1".to_owned()); -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/master_secret.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/master_secret.rs deleted file mode 100644 index 0b6b30c9c4..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/master_secret.rs +++ /dev/null @@ -1,6 +0,0 @@ -use ursa::cl::MasterSecret as CryptoMasterSecret; - -#[derive(Debug, Deserialize, Serialize)] -pub struct MasterSecret { - pub value: CryptoMasterSecret, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/mod.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/mod.rs deleted file mode 100644 index 77fd6297b3..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/mod.rs +++ /dev/null @@ -1,15 +0,0 @@ -pub mod credential; -pub mod credential_definition; -pub mod credential_offer; -pub mod credential_request; -pub mod indy_identifiers; -pub mod master_secret; -pub mod proof; -pub mod proof_request; -pub mod requested_credential; -pub mod revocation_registry; -pub mod revocation_registry_definition; -pub mod revocation_registry_delta; -pub mod schema; - -pub const DELIMITER: &str = ":"; diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof.rs deleted file mode 100644 index f66e0cce0a..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof.rs +++ /dev/null @@ -1,86 +0,0 @@ -use std::collections::HashMap; - -use ursa::cl::Proof as CryptoProof; - -use super::{ - credential_definition::CredentialDefinitionId, - revocation_registry_definition::RevocationRegistryId, schema::SchemaId, -}; - -#[derive(Debug, Serialize, Deserialize)] -pub struct Proof { - pub proof: CryptoProof, - pub requested_proof: RequestedProof, - pub identifiers: Vec, -} - -#[derive(Debug, Serialize, Deserialize, Default)] -pub struct RequestedProof { - pub revealed_attrs: HashMap, - #[serde(skip_serializing_if = "HashMap::is_empty")] - #[serde(default)] - pub revealed_attr_groups: HashMap, - #[serde(default)] - pub self_attested_attrs: HashMap, - #[serde(default)] - pub unrevealed_attrs: HashMap, - #[serde(default)] - pub predicates: HashMap, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct SubProofReferent { - pub sub_proof_index: u32, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct RevealedAttributeInfo { - pub sub_proof_index: u32, - pub raw: String, - pub encoded: String, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct RevealedAttributeGroupInfo { - pub sub_proof_index: u32, - pub values: HashMap, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct AttributeValue { - pub raw: String, - pub encoded: String, -} - -#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Hash)] -pub struct Identifier { - pub schema_id: SchemaId, - pub cred_def_id: CredentialDefinitionId, - pub rev_reg_id: Option, - pub timestamp: Option, -} - -#[cfg(test)] -mod tests { - use serde_json::json; - - use super::*; - - #[test] - fn deserialize_requested_proof_with_empty_revealed_attr_groups() { - let mut req_proof_old: RequestedProof = Default::default(); - req_proof_old.revealed_attrs.insert( - "attr1".to_string(), - RevealedAttributeInfo { - sub_proof_index: 0, - raw: "123".to_string(), - encoded: "123".to_string(), - }, - ); - let json = json!(req_proof_old).to_string(); - debug!("{}", json); - - let req_proof: RequestedProof = serde_json::from_str(&json).unwrap(); - assert!(req_proof.revealed_attr_groups.is_empty()) - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof_request.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof_request.rs deleted file mode 100644 index f3b28ad616..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof_request.rs +++ /dev/null @@ -1,476 +0,0 @@ -use std::{collections::HashMap, fmt}; - -use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer}; -use serde_json::{json, Value}; -use ursa::cl::Nonce; - -use super::{ - super::crypto::did::DidValue, credential::Credential, - credential_definition::CredentialDefinitionId, - revocation_registry_definition::RevocationRegistryId, schema::SchemaId, -}; -use crate::utils::{qualifier, wql::Query}; - -#[derive(Debug, Deserialize, Serialize)] -pub struct ProofRequestPayload { - pub nonce: Nonce, - pub name: String, - pub version: String, - #[serde(default)] - pub requested_attributes: HashMap, - #[serde(default)] - pub requested_predicates: HashMap, - pub non_revoked: Option, -} - -#[derive(Debug)] -pub enum ProofRequest { - ProofRequestV1(ProofRequestPayload), - ProofRequestV2(ProofRequestPayload), -} - -#[derive(Debug, Eq, PartialEq, Clone)] -pub enum ProofRequestsVersion { - V1, - V2, -} - -impl ProofRequest { - pub fn value(&self) -> &ProofRequestPayload { - match self { - ProofRequest::ProofRequestV1(proof_req) => proof_req, - ProofRequest::ProofRequestV2(proof_req) => proof_req, - } - } - - pub fn version(&self) -> ProofRequestsVersion { - match self { - ProofRequest::ProofRequestV1(_) => ProofRequestsVersion::V1, - ProofRequest::ProofRequestV2(_) => ProofRequestsVersion::V2, - } - } -} - -impl<'de> Deserialize<'de> for ProofRequest { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - #[derive(Deserialize)] - struct Helper { - ver: Option, - nonce: String, - } - - let v = Value::deserialize(deserializer)?; - - let helper = Helper::deserialize(&v).map_err(de::Error::custom)?; - let nonce_cleaned = helper.nonce.replace([' ', '_'], ""); - - let proof_req = match helper.ver { - Some(version) => match version.as_ref() { - "1.0" => { - let proof_request = - ProofRequestPayload::deserialize(v).map_err(de::Error::custom)?; - ProofRequest::ProofRequestV1(proof_request) - } - "2.0" => { - let proof_request = - ProofRequestPayload::deserialize(v).map_err(de::Error::custom)?; - ProofRequest::ProofRequestV2(proof_request) - } - _ => return Err(de::Error::unknown_variant(&version, &["2.0"])), - }, - None => { - let proof_request = - ProofRequestPayload::deserialize(v).map_err(de::Error::custom)?; - ProofRequest::ProofRequestV1(proof_request) - } - }; - let nonce_parsed = match &proof_req { - ProofRequest::ProofRequestV1(payload) => { - payload.nonce.to_dec().map_err(de::Error::custom)? - } - ProofRequest::ProofRequestV2(payload) => { - payload.nonce.to_dec().map_err(de::Error::custom)? - } - }; - if nonce_cleaned != nonce_parsed { - Err(de::Error::custom(format!( - "Invalid nonce provided: {}", - nonce_cleaned - ))) - } else { - Ok(proof_req) - } - } -} - -impl Serialize for ProofRequest { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let value = match self { - ProofRequest::ProofRequestV1(proof_req) => { - let mut value = ::serde_json::to_value(proof_req).map_err(ser::Error::custom)?; - value - .as_object_mut() - .unwrap() - .insert("ver".into(), json!("1.0")); - value - } - ProofRequest::ProofRequestV2(proof_req) => { - let mut value = ::serde_json::to_value(proof_req).map_err(ser::Error::custom)?; - value - .as_object_mut() - .unwrap() - .insert("ver".into(), json!("2.0")); - value - } - }; - - value.serialize(serializer) - } -} - -pub type ProofRequestExtraQuery = HashMap; - -#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Hash)] -pub struct NonRevocedInterval { - pub from: Option, - pub to: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -pub struct AttributeInfo { - #[serde(skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub names: Option>, - pub restrictions: Option, - pub non_revoked: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -pub struct PredicateInfo { - pub name: String, - pub p_type: PredicateTypes, - pub p_value: i32, - pub restrictions: Option, - pub non_revoked: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -pub enum PredicateTypes { - #[serde(rename = ">=")] - GE, - #[serde(rename = "<=")] - LE, - #[serde(rename = ">")] - GT, - #[serde(rename = "<")] - LT, -} - -impl fmt::Display for PredicateTypes { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - PredicateTypes::GE => write!(f, "GE"), - PredicateTypes::GT => write!(f, "GT"), - PredicateTypes::LE => write!(f, "LE"), - PredicateTypes::LT => write!(f, "LT"), - } - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct RequestedAttributeInfo { - pub attr_referent: String, - pub attr_info: AttributeInfo, - pub revealed: bool, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct RequestedPredicateInfo { - pub predicate_referent: String, - pub predicate_info: PredicateInfo, -} - -impl ProofRequest { - pub fn to_unqualified(self) -> ProofRequest { - let convert = |proof_request: &mut ProofRequestPayload| { - for (_, requested_attribute) in proof_request.requested_attributes.iter_mut() { - requested_attribute.restrictions = requested_attribute - .restrictions - .as_mut() - .map(|ref mut restrictions| _convert_query_to_unqualified(restrictions)); - } - for (_, requested_predicate) in proof_request.requested_predicates.iter_mut() { - requested_predicate.restrictions = requested_predicate - .restrictions - .as_mut() - .map(|ref mut restrictions| _convert_query_to_unqualified(restrictions)); - } - }; - - match self { - ProofRequest::ProofRequestV2(mut proof_request) => { - convert(&mut proof_request); - ProofRequest::ProofRequestV2(proof_request) - } - ProofRequest::ProofRequestV1(mut proof_request) => { - convert(&mut proof_request); - ProofRequest::ProofRequestV1(proof_request) - } - } - } -} - -fn _convert_query_to_unqualified(query: &Query) -> Query { - match query { - Query::Eq(tag_name, ref tag_value) => Query::Eq( - tag_name.to_string(), - _convert_value_to_unqualified(tag_name, tag_value), - ), - Query::Neq(ref tag_name, ref tag_value) => Query::Neq( - tag_name.to_string(), - _convert_value_to_unqualified(tag_name, tag_value), - ), - Query::In(ref tag_name, ref tag_values) => Query::In( - tag_name.to_string(), - tag_values - .iter() - .map(|tag_value| _convert_value_to_unqualified(tag_name, tag_value)) - .collect::>(), - ), - Query::And(ref queries) => Query::And( - queries - .iter() - .map(_convert_query_to_unqualified) - .collect::>(), - ), - Query::Or(ref queries) => Query::Or( - queries - .iter() - .map(_convert_query_to_unqualified) - .collect::>(), - ), - Query::Not(ref query) => _convert_query_to_unqualified(query), - query => query.clone(), - } -} - -fn _convert_value_to_unqualified(tag_name: &str, tag_value: &str) -> String { - match tag_name { - "issuer_did" | "schema_issuer_did" => DidValue(tag_value.to_string()).to_unqualified().0, - "schema_id" => SchemaId(tag_value.to_string()).to_unqualified().0, - "cred_def_id" => { - CredentialDefinitionId(tag_value.to_string()) - .to_unqualified() - .0 - } - "rev_reg_id" => { - RevocationRegistryId(tag_value.to_string()) - .to_unqualified() - .0 - } - _ => tag_value.to_string(), - } -} - -fn _process_operator(restriction_op: &Query, version: &ProofRequestsVersion) -> Result<(), String> { - match restriction_op { - Query::Eq(ref tag_name, ref tag_value) - | Query::Neq(ref tag_name, ref tag_value) - | Query::Gt(ref tag_name, ref tag_value) - | Query::Gte(ref tag_name, ref tag_value) - | Query::Lt(ref tag_name, ref tag_value) - | Query::Lte(ref tag_name, ref tag_value) - | Query::Like(ref tag_name, ref tag_value) => { - _check_restriction(tag_name, tag_value, version) - } - Query::In(ref tag_name, ref tag_values) => { - tag_values - .iter() - .map(|tag_value| _check_restriction(tag_name, tag_value, version)) - .collect::, String>>()?; - Ok(()) - } - Query::And(ref operators) | Query::Or(ref operators) => { - operators - .iter() - .map(|operator| _process_operator(operator, version)) - .collect::, String>>()?; - Ok(()) - } - Query::Not(ref operator) => _process_operator(operator, version), - } -} - -fn _check_restriction( - tag_name: &str, - tag_value: &str, - version: &ProofRequestsVersion, -) -> Result<(), String> { - if *version == ProofRequestsVersion::V1 - && Credential::QUALIFIABLE_TAGS.contains(&tag_name) - && qualifier::is_fully_qualified(tag_value) - { - return Err( - "Proof Request validation failed: fully qualified identifiers can not be used for \ - Proof Request of the first version. Please, set \"ver\":\"2.0\" to use fully \ - qualified identifiers." - .to_string(), - ); - } - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - - mod invalid_nonce { - use super::*; - - #[test] - fn proof_request_valid_nonce() { - let proof_req_json = json!({ - "nonce": "123456", - "name": "name", - "version": "2.0", - "requested_attributes": {}, - "requested_predicates": {}, - }) - .to_string(); - - let proof_req: ProofRequest = serde_json::from_str(&proof_req_json).unwrap(); - let payload = match proof_req { - ProofRequest::ProofRequestV1(p) => p, - ProofRequest::ProofRequestV2(p) => p, - }; - - assert_eq!(payload.nonce.to_dec().unwrap(), "123456"); - } - - #[test] - fn proof_request_invalid_nonce() { - let proof_req_json = json!({ - "nonce": "123abc", - "name": "name", - "version": "2.0", - "requested_attributes": {}, - "requested_predicates": {}, - }) - .to_string(); - - serde_json::from_str::(&proof_req_json).unwrap_err(); - } - } - - mod to_unqualified { - use super::*; - - const DID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e"; - const DID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e"; - const SCHEMA_ID_QUALIFIED: &str = - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0"; - const SCHEMA_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0"; - const CRED_DEF_ID_QUALIFIED: &str = - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag"; - const CRED_DEF_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:3:CL:1:tag"; - const REV_REG_ID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/\ - REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/\ - v0/SCHEMA/gvt/1.0/tag/TAG_1"; - const REV_REG_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:\ - CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:CL_ACCUM:\ - TAG_1"; - - #[test] - fn proof_request_to_unqualified() { - let mut requested_attributes: HashMap = HashMap::new(); - requested_attributes.insert( - "attr1_referent".to_string(), - AttributeInfo { - name: Some("name".to_string()), - names: None, - restrictions: Some(Query::And(vec![ - Query::Eq("issuer_did".to_string(), DID_QUALIFIED.to_string()), - Query::Eq("schema_id".to_string(), SCHEMA_ID_QUALIFIED.to_string()), - Query::Eq("cred_def_id".to_string(), CRED_DEF_ID_QUALIFIED.to_string()), - ])), - non_revoked: None, - }, - ); - - let mut requested_predicates: HashMap = HashMap::new(); - requested_predicates.insert( - "predicate1_referent".to_string(), - PredicateInfo { - name: "age".to_string(), - p_type: PredicateTypes::GE, - p_value: 0, - restrictions: Some(Query::And(vec![ - Query::Eq("schema_issuer_did".to_string(), DID_QUALIFIED.to_string()), - Query::Eq("rev_reg_id".to_string(), REV_REG_ID_QUALIFIED.to_string()), - ])), - non_revoked: None, - }, - ); - - let proof_request = ProofRequest::ProofRequestV2(ProofRequestPayload { - nonce: Nonce::new().unwrap(), - name: "proof_request_to_unqualified".to_string(), - version: "1.0".to_string(), - requested_attributes, - requested_predicates, - non_revoked: None, - }); - - let mut expected_requested_attributes: HashMap = HashMap::new(); - expected_requested_attributes.insert( - "attr1_referent".to_string(), - AttributeInfo { - name: Some("name".to_string()), - names: None, - restrictions: Some(Query::And(vec![ - Query::Eq("issuer_did".to_string(), DID_UNQUALIFIED.to_string()), - Query::Eq("schema_id".to_string(), SCHEMA_ID_UNQUALIFIED.to_string()), - Query::Eq( - "cred_def_id".to_string(), - CRED_DEF_ID_UNQUALIFIED.to_string(), - ), - ])), - non_revoked: None, - }, - ); - - let mut expected_requested_predicates: HashMap = HashMap::new(); - expected_requested_predicates.insert( - "predicate1_referent".to_string(), - PredicateInfo { - name: "age".to_string(), - p_type: PredicateTypes::GE, - p_value: 0, - restrictions: Some(Query::And(vec![ - Query::Eq("schema_issuer_did".to_string(), DID_UNQUALIFIED.to_string()), - Query::Eq("rev_reg_id".to_string(), REV_REG_ID_UNQUALIFIED.to_string()), - ])), - non_revoked: None, - }, - ); - - let proof_request = proof_request.to_unqualified(); - assert_eq!( - expected_requested_attributes, - proof_request.value().requested_attributes - ); - assert_eq!( - expected_requested_predicates, - proof_request.value().requested_predicates - ); - assert_eq!(ProofRequestsVersion::V2, proof_request.version()); - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/requested_credential.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/requested_credential.rs deleted file mode 100644 index 2f2ee0df53..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/requested_credential.rs +++ /dev/null @@ -1,21 +0,0 @@ -use std::collections::HashMap; - -#[derive(Debug, Deserialize, Serialize)] -pub struct RequestedCredentials { - pub self_attested_attributes: HashMap, - pub requested_attributes: HashMap, - pub requested_predicates: HashMap, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct RequestedAttribute { - pub cred_id: String, - pub timestamp: Option, - pub revealed: bool, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Hash, Clone)] -pub struct ProvingCredentialKey { - pub cred_id: String, - pub timestamp: Option, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry.rs deleted file mode 100644 index f9a1042dd9..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry.rs +++ /dev/null @@ -1,42 +0,0 @@ -use std::collections::HashMap; - -use ursa::cl::RevocationRegistry as CryptoRevocationRegistry; - -use super::revocation_registry_definition::RevocationRegistryId; - -#[derive(Debug, Serialize, Deserialize)] -pub struct RevocationRegistryV1 { - pub value: CryptoRevocationRegistry, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum RevocationRegistry { - #[serde(rename = "1.0")] - RevocationRegistryV1(RevocationRegistryV1), -} - -impl From for RevocationRegistryV1 { - fn from(rev_reg: RevocationRegistry) -> Self { - match rev_reg { - RevocationRegistry::RevocationRegistryV1(rev_reg) => rev_reg, - } - } -} - -pub type RevocationRegistries = HashMap>; - -pub fn rev_regs_map_to_rev_regs_local_map( - rev_regs: RevocationRegistries, -) -> HashMap> { - rev_regs - .into_iter() - .map(|(rev_reg_id, rev_reg_to_timespams)| { - let val = rev_reg_to_timespams - .into_iter() - .map(|(timestamp, rev_reg)| (timestamp, RevocationRegistryV1::from(rev_reg))) - .collect(); - (rev_reg_id, val) - }) - .collect() -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs deleted file mode 100644 index 003d0af9f6..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs +++ /dev/null @@ -1,323 +0,0 @@ -use std::collections::{HashMap, HashSet}; - -use indy_api_types::errors::{err_msg, IndyErrorKind, IndyResult}; -use lazy_static::lazy_static; -use regex::Regex; -use ursa::cl::{RevocationKeyPrivate, RevocationKeyPublic}; - -use super::{ - super::crypto::did::DidValue, credential_definition::CredentialDefinitionId, indy_identifiers, - DELIMITER, -}; -use crate::utils::qualifier; - -pub const CL_ACCUM: &str = "CL_ACCUM"; -pub const REV_REG_DEG_MARKER: &str = "4"; - -lazy_static! { - static ref QUALIFIED_REV_REG_ID: Regex = Regex::new( - "(^revreg:(?P[a-z0-9]+):)?(?P.+):4:(?P.+):(?P.+):\ - (?P.+)$" - ) - .unwrap(); -} - -#[derive(Deserialize, Debug, Serialize)] -pub struct RevocationRegistryConfig { - pub issuance_type: Option, - pub max_cred_num: Option, -} - -#[allow(non_camel_case_types)] -#[derive(Deserialize, Debug, Serialize, PartialEq, Clone)] -pub enum IssuanceType { - ISSUANCE_BY_DEFAULT, - ISSUANCE_ON_DEMAND, -} - -impl IssuanceType { - pub fn to_bool(&self) -> bool { - self.clone() == IssuanceType::ISSUANCE_BY_DEFAULT - } -} - -#[allow(non_camel_case_types)] -#[derive(Deserialize, Debug, Serialize, PartialEq)] -pub enum RegistryType { - CL_ACCUM, -} - -impl RegistryType { - pub fn to_str(&self) -> &'static str { - match *self { - RegistryType::CL_ACCUM => CL_ACCUM, - } - } -} - -#[derive(Deserialize, Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDefinitionValue { - pub issuance_type: IssuanceType, - pub max_cred_num: u32, - pub public_keys: RevocationRegistryDefinitionValuePublicKeys, - pub tails_hash: String, - pub tails_location: String, -} - -#[derive(Deserialize, Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDefinitionValuePublicKeys { - pub accum_key: RevocationKeyPublic, -} - -#[derive(Deserialize, Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDefinitionV1 { - pub id: RevocationRegistryId, - pub revoc_def_type: RegistryType, - pub tag: String, - pub cred_def_id: CredentialDefinitionId, - pub value: RevocationRegistryDefinitionValue, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum RevocationRegistryDefinition { - #[serde(rename = "1.0")] - RevocationRegistryDefinitionV1(RevocationRegistryDefinitionV1), -} - -impl RevocationRegistryDefinition { - pub fn to_unqualified(self) -> RevocationRegistryDefinition { - match self { - RevocationRegistryDefinition::RevocationRegistryDefinitionV1(rev_ref_def) => { - RevocationRegistryDefinition::RevocationRegistryDefinitionV1( - RevocationRegistryDefinitionV1 { - id: rev_ref_def.id.to_unqualified(), - revoc_def_type: rev_ref_def.revoc_def_type, - tag: rev_ref_def.tag, - cred_def_id: rev_ref_def.cred_def_id.to_unqualified(), - value: rev_ref_def.value, - }, - ) - } - } - } -} - -impl From for RevocationRegistryDefinitionV1 { - fn from(rev_reg_def: RevocationRegistryDefinition) -> Self { - match rev_reg_def { - RevocationRegistryDefinition::RevocationRegistryDefinitionV1(rev_reg_def) => { - rev_reg_def - } - } - } -} - -pub type RevocationRegistryDefinitions = - HashMap; - -pub fn rev_reg_defs_map_to_rev_reg_defs_v1_map( - rev_reg_defs: RevocationRegistryDefinitions, -) -> HashMap { - rev_reg_defs - .into_iter() - .map(|(rev_reg_id, rev_reg_def)| { - ( - rev_reg_id, - RevocationRegistryDefinitionV1::from(rev_reg_def), - ) - }) - .collect() -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct RevocationRegistryDefinitionPrivate { - pub value: RevocationKeyPrivate, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct RevocationRegistryInfo { - pub id: RevocationRegistryId, - pub curr_id: u32, - pub used_ids: HashSet, -} - -qualifiable_type!(RevocationRegistryId); - -impl RevocationRegistryId { - pub const PREFIX: &'static str = "/anoncreds/v0/REV_REG_DEF/"; - - pub fn new( - did: &DidValue, - cred_def_id: &CredentialDefinitionId, - rev_reg_type: &str, - tag: &str, - ) -> IndyResult { - match did.get_method() { - Some(method) if method.starts_with("indy") => { - if let Some((_issuer_did, _cl_type, schema_id, creddef_tag)) = cred_def_id.parts() { - Ok(RevocationRegistryId( - did.0.to_owned() - + "/anoncreds/v0/REV_REG_DEF/" - + &schema_id.0 - + "/" - + &creddef_tag - + "/" - + tag, - )) - } else { - Err(err_msg( - IndyErrorKind::InvalidStructure, - "Can't parse Indy CredDef to construct RevReg ID", - )) - } - } - None => Ok(RevocationRegistryId(format!( - "{}{}{}{}{}{}{}{}{}", - did.0, - DELIMITER, - REV_REG_DEG_MARKER, - DELIMITER, - cred_def_id.0, - DELIMITER, - rev_reg_type, - DELIMITER, - tag - ))), - Some(method) => Err(err_msg( - IndyErrorKind::InvalidStructure, - format!("Unsupported DID method {} for RevReg ID", method), - )), - } - } - - pub fn parts(&self) -> Option<(DidValue, CredentialDefinitionId, String, String)> { - trace!("RevocationRegistryId::parts >> self.0 {}", self.0); - if let Some(parts) = indy_identifiers::try_parse_indy_rev_reg(self.0.as_str()) { - trace!( - "RevocationRegistryId::parts: parsed Indy RevReg {:?}", - parts - ); - return Some(parts); - } - - QUALIFIED_REV_REG_ID.captures(&self.0).map(|caps| { - ( - DidValue(caps["did"].to_string()), - CredentialDefinitionId(caps["cred_def_id"].to_string()), - caps["rev_reg_type"].to_string(), - caps["tag"].to_string(), - ) - }) - } - - pub fn to_unqualified(&self) -> RevocationRegistryId { - match self.parts() { - Some((did, cred_def_id, rev_reg_type, tag)) => RevocationRegistryId::new( - &did.to_unqualified(), - &cred_def_id.to_unqualified(), - &rev_reg_type, - &tag, - ) - .expect("Can't create unqualified RevocationRegistryId"), - None => self.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn _did() -> DidValue { - DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _rev_reg_type() -> String { - "CL_ACCUM".to_string() - } - - fn _tag() -> String { - "TAG_1".to_string() - } - - fn _did_qualified() -> DidValue { - DidValue("did:sov:NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _cred_def_id_unqualified() -> CredentialDefinitionId { - CredentialDefinitionId( - "NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag".to_string(), - ) - } - - fn _cred_def_id_qualified() -> CredentialDefinitionId { - CredentialDefinitionId( - "creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:\ - NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag" - .to_string(), - ) - } - - fn _rev_reg_id_unqualified() -> RevocationRegistryId { - RevocationRegistryId( - "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.\ - 0:tag:CL_ACCUM:TAG_1" - .to_string(), - ) - } - - fn _rev_reg_id_qualified() -> RevocationRegistryId { - RevocationRegistryId( - "revreg:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:4:creddef:sov:did:sov:\ - NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:\ - CL_ACCUM:TAG_1" - .to_string(), - ) - } - - mod to_unqualified { - use super::*; - - #[test] - fn test_rev_reg_id_parts_for_id_as_unqualified() { - assert_eq!( - _rev_reg_id_unqualified(), - _rev_reg_id_unqualified().to_unqualified() - ); - } - - #[test] - fn test_rev_reg_id_parts_for_id_as_qualified() { - assert_eq!( - _rev_reg_id_unqualified(), - _rev_reg_id_qualified().to_unqualified() - ); - } - } - - mod parts { - use super::*; - - #[test] - fn test_rev_reg_id_parts_for_id_as_unqualified() { - let (did, cred_def_id, rev_reg_type, tag) = _rev_reg_id_unqualified().parts().unwrap(); - assert_eq!(_did(), did); - assert_eq!(_cred_def_id_unqualified(), cred_def_id); - assert_eq!(_rev_reg_type(), rev_reg_type); - assert_eq!(_tag(), tag); - } - - #[test] - fn test_rev_reg_id_parts_for_id_as_qualified() { - let (did, cred_def_id, rev_reg_type, tag) = _rev_reg_id_qualified().parts().unwrap(); - assert_eq!(_did_qualified(), did); - assert_eq!(_cred_def_id_qualified(), cred_def_id); - assert_eq!(_rev_reg_type(), rev_reg_type); - assert_eq!(_tag(), tag); - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs deleted file mode 100644 index f316d2cc6c..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs +++ /dev/null @@ -1,22 +0,0 @@ -use ursa::cl::RevocationRegistryDelta as RegistryDelta; - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDeltaV1 { - pub value: RegistryDelta, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum RevocationRegistryDelta { - #[serde(rename = "1.0")] - RevocationRegistryDeltaV1(RevocationRegistryDeltaV1), -} - -impl From for RevocationRegistryDeltaV1 { - fn from(rev_reg_delta: RevocationRegistryDelta) -> Self { - match rev_reg_delta { - RevocationRegistryDelta::RevocationRegistryDeltaV1(rev_reg_delta) => rev_reg_delta, - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/schema.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/schema.rs deleted file mode 100644 index d2823b1f0d..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/schema.rs +++ /dev/null @@ -1,255 +0,0 @@ -use std::collections::{HashMap, HashSet}; - -use indy_api_types::{ - errors::{IndyErrorKind, IndyResult}, - IndyError, -}; - -use super::{super::crypto::did::DidValue, indy_identifiers, DELIMITER}; -use crate::utils::qualifier; - -pub const MAX_ATTRIBUTES_COUNT: usize = 125; - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct SchemaV1 { - pub id: SchemaId, - pub name: String, - pub version: String, - #[serde(rename = "attrNames")] - pub attr_names: AttributeNames, - pub seq_no: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum Schema { - #[serde(rename = "1.0")] - SchemaV1(SchemaV1), -} - -impl Schema { - pub fn to_unqualified(self) -> Schema { - match self { - Schema::SchemaV1(schema) => Schema::SchemaV1(SchemaV1 { - id: schema.id.to_unqualified(), - name: schema.name, - version: schema.version, - attr_names: schema.attr_names, - seq_no: schema.seq_no, - }), - } - } -} - -impl From for SchemaV1 { - fn from(schema: Schema) -> Self { - match schema { - Schema::SchemaV1(schema) => schema, - } - } -} - -pub type Schemas = HashMap; - -pub fn schemas_map_to_schemas_v1_map(schemas: Schemas) -> HashMap { - schemas - .into_iter() - .map(|(schema_id, schema)| (schema_id, SchemaV1::from(schema))) - .collect() -} - -#[derive(Serialize, Deserialize, Debug, Clone, Default)] -pub struct AttributeNames(pub HashSet); - -impl AttributeNames { - pub fn new() -> Self { - AttributeNames(HashSet::new()) - } -} - -impl From> for AttributeNames { - fn from(attrs: HashSet) -> Self { - AttributeNames(attrs) - } -} - -impl From for HashSet { - fn from(value: AttributeNames) -> HashSet { - value.0 - } -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] -pub struct SchemaId(pub String); - -impl SchemaId { - pub const PREFIX: &'static str = "/anoncreds/v0/SCHEMA/"; - - pub fn get_method(&self) -> Option { - qualifier::method(&self.0) - } - - pub fn new(did: &DidValue, name: &str, version: &str) -> IndyResult { - const MARKER: &str = "2"; - match did.get_method() { - Some(method) if method.starts_with("indy") => Ok(SchemaId(format!( - "{}{}{}/{}", - did.0, - Self::PREFIX, - name, - version - ))), - Some(_method) => Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unsupported DID method", - )), - None => Ok(SchemaId(format!( - "{}:{}:{}:{}", - did.0, MARKER, name, version - ))), - } - } - - pub fn parts(&self) -> Option<(DidValue, String, String)> { - trace!("SchemaId::parts >> {:?}", self.0); - if let Some((did, name, ver)) = indy_identifiers::try_parse_indy_schema_id(&self.0) { - return Some((DidValue(did), name, ver)); - } - - let parts = self.0.split_terminator(DELIMITER).collect::>(); - - if parts.len() == 1 { - // 1 - return None; - } - - if parts.len() == 4 { - // NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0 - let did = parts[0].to_string(); - let name = parts[2].to_string(); - let version = parts[3].to_string(); - return Some((DidValue(did), name, version)); - } - - if parts.len() == 8 { - // schema:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0 - let did = parts[2..5].join(DELIMITER); - let name = parts[6].to_string(); - let version = parts[7].to_string(); - return Some((DidValue(did), name, version)); - } - - None - } - - pub fn qualify(&self, method: &str) -> IndyResult { - match self.parts() { - Some((did, name, version)) => SchemaId::new(&did.qualify(method), &name, &version), - None => Ok(self.clone()), - } - } - - pub fn to_unqualified(&self) -> SchemaId { - trace!("SchemaId::to_unqualified >> {}", &self.0); - match self.parts() { - Some((did, name, version)) => { - trace!( - "SchemaId::to_unqualified: parts {:?}", - (&did, &name, &version) - ); - SchemaId::new(&did.to_unqualified(), &name, &version) - .expect("Can't create unqualified SchemaId") - } - None => self.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn _did() -> DidValue { - DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _did_qualified() -> DidValue { - DidValue("did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _schema_id_seq_no() -> SchemaId { - SchemaId("1".to_string()) - } - - fn _schema_id_unqualified() -> SchemaId { - SchemaId("NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0".to_string()) - } - - fn _schema_id_qualified() -> SchemaId { - SchemaId( - "did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0" - .to_string(), - ) - } - - fn _schema_id_invalid() -> SchemaId { - SchemaId("NcYxiDXkpYi6ov5FcYDi1e:2".to_string()) - } - - mod to_unqualified { - use super::*; - - #[test] - fn test_schema_id_unqualify_for_id_as_seq_no() { - assert_eq!(_schema_id_seq_no(), _schema_id_seq_no().to_unqualified()); - } - - #[test] - fn test_schema_id_parts_for_id_as_unqualified() { - assert_eq!( - _schema_id_unqualified(), - _schema_id_unqualified().to_unqualified() - ); - } - - #[test] - fn test_schema_id_parts_for_id_as_qualified() { - assert_eq!( - _schema_id_unqualified(), - _schema_id_qualified().to_unqualified() - ); - } - - #[test] - fn test_schema_id_parts_for_invalid_unqualified() { - assert_eq!(_schema_id_invalid(), _schema_id_invalid().to_unqualified()); - } - } - - mod parts { - use super::*; - - #[test] - fn test_schema_id_parts_for_id_as_seq_no() { - assert!(_schema_id_seq_no().parts().is_none()); - } - - #[test] - fn test_schema_id_parts_for_id_as_unqualified() { - let (did, _, _) = _schema_id_unqualified().parts().unwrap(); - assert_eq!(_did(), did); - } - - #[test] - fn test_schema_id_parts_for_id_as_qualified() { - let (did, _, _) = _schema_id_qualified().parts().unwrap(); - assert_eq!(_did_qualified(), did); - } - - #[test] - fn test_schema_id_parts_for_invalid_unqualified() { - assert!(_schema_id_invalid().parts().is_none()); - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/cache.rs b/aries/misc/legacy/libvdrtools/src/domain/cache.rs deleted file mode 100644 index deea3c64be..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/cache.rs +++ /dev/null @@ -1,15 +0,0 @@ -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct PurgeOptions { - pub max_age: Option, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct GetCacheOptions { - pub no_cache: Option, // Skip usage of cache, - pub no_update: Option, // Use only cached data, do not try to update. - pub no_store: Option, // Skip storing fresh data if updated - pub min_fresh: Option, /* Return cached data if not older than this many seconds. -1 - * means do not check age. */ -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/crypto/did.rs b/aries/misc/legacy/libvdrtools/src/domain/crypto/did.rs deleted file mode 100644 index 9c48c7cf08..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/crypto/did.rs +++ /dev/null @@ -1,128 +0,0 @@ -use indy_api_types::errors::{IndyError, IndyErrorKind, IndyResult}; - -use crate::utils::qualifier; - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] -pub struct DidMethod(pub String); - -#[derive(Serialize, Deserialize, Clone, Debug, Default)] -pub struct MyDidInfo { - pub did: Option, - pub seed: Option, - pub crypto_type: Option, - pub cid: Option, - pub method_name: Option, - pub ledger_type: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct TheirDidInfo { - pub did: DidValue, - pub verkey: Option, -} - -impl TheirDidInfo { - pub fn new(did: DidValue, verkey: Option) -> TheirDidInfo { - TheirDidInfo { did, verkey } - } -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct Did { - pub did: DidValue, - pub verkey: String, -} - -impl Did { - pub fn new(did: DidValue, verkey: String) -> Did { - Did { did, verkey } - } -} - -qualifiable_type!(DidValue); - -impl DidValue { - pub const PREFIX: &'static str = "did"; - - pub fn new(did: &str, ledger_type: Option<&str>, method: Option<&str>) -> IndyResult { - match (ledger_type, method) { - (Some(ledger_type_), Some(method_)) => { - Ok(DidValue(did.to_string()).set_ledger_and_method(ledger_type_, method_)) - } - (None, Some(method_)) => Ok(DidValue(did.to_string()).set_method(method_)), - (None, None) => Ok(DidValue(did.to_string())), - (Some(_), None) => Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Ledger type can not be specified if DID method is undefined", - )), - } - } - - pub fn to_short(&self) -> ShortDidValue { - ShortDidValue(self.to_unqualified().0) - } - - pub fn qualify(&self, method: &str) -> DidValue { - self.set_method(method) - } - - pub fn to_unqualified(&self) -> DidValue { - DidValue(qualifier::to_unqualified(&self.0)) - } - - pub fn is_abbreviatable(&self) -> bool { - match self.get_method() { - Some(ref method) if method.starts_with("sov") || method.starts_with("indy") => true, - Some(_) => false, - None => true, - } - } -} - -qualifiable_type!(ShortDidValue); - -impl ShortDidValue { - pub const PREFIX: &'static str = "did"; - - pub fn qualify(&self, method: Option) -> DidValue { - match method { - Some(method_) => DidValue(self.set_method(&method_).0), - None => DidValue(self.0.to_string()), - } - } -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct DidMetadata { - pub value: String, -} - -#[derive(Serialize, Clone, Debug)] -#[serde(rename_all = "camelCase")] -pub struct DidWithMeta { - pub did: DidValue, - pub verkey: String, - pub temp_verkey: Option, - pub metadata: Option, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct TheirDid { - pub did: DidValue, - pub verkey: String, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct TemporaryDid { - pub did: DidValue, - pub verkey: String, -} - -impl From for Did { - fn from(temp_did: TemporaryDid) -> Self { - Did { - did: temp_did.did, - verkey: temp_did.verkey, - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/crypto/key.rs b/aries/misc/legacy/libvdrtools/src/domain/crypto/key.rs deleted file mode 100644 index 050b96e448..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/crypto/key.rs +++ /dev/null @@ -1,38 +0,0 @@ -extern crate zeroize; - -use self::zeroize::Zeroize; - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct Key { - pub verkey: String, - pub signkey: String, -} - -impl Key { - pub fn new(verkey: String, signkey: String) -> Key { - Key { verkey, signkey } - } -} - -impl Zeroize for Key { - fn zeroize(&mut self) { - self.signkey.zeroize(); - } -} - -impl Drop for Key { - fn drop(&mut self) { - self.signkey.zeroize(); - } -} - -#[derive(Serialize, Deserialize, Debug, Default)] -pub struct KeyInfo { - pub seed: Option, - pub crypto_type: Option, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct KeyMetadata { - pub value: String, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/crypto/mod.rs b/aries/misc/legacy/libvdrtools/src/domain/crypto/mod.rs deleted file mode 100644 index b529c85c74..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/crypto/mod.rs +++ /dev/null @@ -1,11 +0,0 @@ -pub mod did; -pub mod key; -pub mod pack; - -#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Clone)] -pub enum CryptoTypes { - #[serde(rename = "ed25519")] - Ed25519, - #[serde(rename = "secp256k1")] - Secp256k1, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/crypto/pack.rs b/aries/misc/legacy/libvdrtools/src/domain/crypto/pack.rs deleted file mode 100644 index 195c8b1c98..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/crypto/pack.rs +++ /dev/null @@ -1,40 +0,0 @@ -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct JWE { - pub protected: String, - pub iv: String, - pub ciphertext: String, - pub tag: String, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct Recipient { - pub encrypted_key: String, - pub header: Header, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct Header { - pub kid: String, - #[serde(default)] - #[serde(skip_serializing_if = "Option::is_none")] - pub iv: Option, - #[serde(default)] - #[serde(skip_serializing_if = "Option::is_none")] - pub sender: Option, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct Protected { - pub enc: String, - pub typ: String, - pub alg: String, - pub recipients: Vec, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct UnpackMessage { - pub message: String, - pub recipient_verkey: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub sender_verkey: Option, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/mod.rs b/aries/misc/legacy/libvdrtools/src/domain/mod.rs deleted file mode 100644 index 1386661e11..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod anoncreds; -pub mod cache; -pub mod crypto; - -#[derive(Debug, Serialize, Deserialize)] -pub struct IndyConfig { - pub crypto_thread_pool_size: Option, - pub collect_backtrace: Option, -} diff --git a/aries/misc/legacy/libvdrtools/src/lib.rs b/aries/misc/legacy/libvdrtools/src/lib.rs deleted file mode 100644 index 006e3b81f4..0000000000 --- a/aries/misc/legacy/libvdrtools/src/lib.rs +++ /dev/null @@ -1,109 +0,0 @@ -#[macro_use] -extern crate log; - -#[macro_use] -extern crate serde_derive; - -#[macro_use] -extern crate indy_utils; - -pub use indy_api_types as types; -pub use indy_wallet; - -#[macro_use] -mod utils; - -#[macro_use] -mod controllers; -pub mod domain; -mod services; - -use std::sync::Arc; - -pub use domain::{ - anoncreds::{ - credential::{AttributeValues, Credential, CredentialValues}, - credential_definition::{ - CredentialDefinition, CredentialDefinitionCorrectnessProof, CredentialDefinitionData, - CredentialDefinitionId, CredentialDefinitionPrivateKey, CredentialDefinitionV1, - SignatureType, - }, - credential_offer::CredentialOffer, - credential_request::{CredentialRequest, CredentialRequestMetadata}, - master_secret::MasterSecret, - revocation_registry::{RevocationRegistry, RevocationRegistryV1}, - revocation_registry_definition::{ - IssuanceType, RegistryType, RevocationRegistryConfig, RevocationRegistryDefinition, - RevocationRegistryDefinitionPrivate, RevocationRegistryDefinitionV1, - RevocationRegistryDefinitionValue, RevocationRegistryDefinitionValuePublicKeys, - RevocationRegistryId, RevocationRegistryInfo, - }, - revocation_registry_delta::{RevocationRegistryDelta, RevocationRegistryDeltaV1}, - schema::{AttributeNames, Schema, SchemaId, SchemaV1}, - }, - crypto::{ - did::{DidMethod, DidValue, MyDidInfo}, - key::KeyInfo, - pack::JWE, - }, -}; -pub use indy_api_types::{ - CommandHandle, IndyError, SearchHandle, WalletHandle, INVALID_COMMAND_HANDLE, - INVALID_SEARCH_HANDLE, INVALID_WALLET_HANDLE, -}; -pub use indy_wallet::WalletRecord; -use lazy_static::lazy_static; - -use crate::{ - controllers::{CryptoController, DidController, NonSecretsController, WalletController}, - services::{CryptoService, WalletService}, -}; - -// Global (lazy inited) instance of Locator -lazy_static! { - static ref LOCATOR: Locator = Locator::new(); -} - -pub struct Locator { - pub crypto_controller: CryptoController, - pub did_controller: DidController, - pub wallet_controller: WalletController, - pub non_secret_controller: NonSecretsController, -} - -impl Locator { - pub fn instance() -> &'static Locator { - &LOCATOR - } - - fn new() -> Locator { - info!("new >"); - - let crypto_service = Arc::new(CryptoService::new()); - let wallet_service = Arc::new(WalletService::new()); - - let crypto_controller = - CryptoController::new(wallet_service.clone(), crypto_service.clone()); - - let did_controller = DidController::new(wallet_service.clone(), crypto_service.clone()); - - let wallet_controller = WalletController::new(wallet_service.clone(), crypto_service); - let non_secret_controller = NonSecretsController::new(wallet_service); - - let res = Locator { - crypto_controller, - did_controller, - wallet_controller, - non_secret_controller, - }; - - info!("new <"); - res - } -} - -impl Drop for Locator { - fn drop(&mut self) { - info!(target: "Locator", "drop <>"); - } -} diff --git a/aries/misc/legacy/libvdrtools/src/services/crypto/ed25519.rs b/aries/misc/legacy/libvdrtools/src/services/crypto/ed25519.rs deleted file mode 100644 index 00c3ecbb96..0000000000 --- a/aries/misc/legacy/libvdrtools/src/services/crypto/ed25519.rs +++ /dev/null @@ -1,98 +0,0 @@ -use indy_api_types::errors::IndyError; -use indy_utils::crypto::{ed25519_box, ed25519_sign, sealedbox}; - -use super::CryptoType; - -pub struct ED25519CryptoType {} - -impl ED25519CryptoType { - pub fn new() -> ED25519CryptoType { - ED25519CryptoType {} - } -} - -impl CryptoType for ED25519CryptoType { - fn crypto_box( - &self, - sk: &ed25519_sign::SecretKey, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - nonce: &ed25519_box::Nonce, - ) -> Result, IndyError> { - ed25519_box::encrypt( - &ed25519_sign::sk_to_curve25519(sk)?, - &ed25519_sign::vk_to_curve25519(vk)?, - doc, - nonce, - ) - } - - fn crypto_box_open( - &self, - sk: &ed25519_sign::SecretKey, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - nonce: &ed25519_box::Nonce, - ) -> Result, IndyError> { - ed25519_box::decrypt( - &ed25519_sign::sk_to_curve25519(sk)?, - &ed25519_sign::vk_to_curve25519(vk)?, - doc, - nonce, - ) - } - - fn gen_nonce(&self) -> ed25519_box::Nonce { - ed25519_box::gen_nonce() - } - - fn create_key( - &self, - seed: Option<&ed25519_sign::Seed>, - ) -> Result<(ed25519_sign::PublicKey, ed25519_sign::SecretKey), IndyError> { - ed25519_sign::create_key_pair_for_signature(seed) - } - - fn sign( - &self, - sk: &ed25519_sign::SecretKey, - doc: &[u8], - ) -> Result { - ed25519_sign::sign(sk, doc) - } - - fn verify( - &self, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - signature: &ed25519_sign::Signature, - ) -> Result { - ed25519_sign::verify(vk, doc, signature) - } - - fn crypto_box_seal( - &self, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - ) -> Result, IndyError> { - sealedbox::encrypt(&ed25519_sign::vk_to_curve25519(vk)?, doc) - } - - fn crypto_box_seal_open( - &self, - vk: &ed25519_sign::PublicKey, - sk: &ed25519_sign::SecretKey, - doc: &[u8], - ) -> Result, IndyError> { - sealedbox::decrypt( - &ed25519_sign::vk_to_curve25519(vk)?, - &ed25519_sign::sk_to_curve25519(sk)?, - doc, - ) - } - - fn validate_key(&self, _vk: &ed25519_sign::PublicKey) -> Result<(), IndyError> { - // TODO: FIXME: Validate key - Ok(()) - } -} diff --git a/aries/misc/legacy/libvdrtools/src/services/crypto/mod.rs b/aries/misc/legacy/libvdrtools/src/services/crypto/mod.rs deleted file mode 100644 index af71363f61..0000000000 --- a/aries/misc/legacy/libvdrtools/src/services/crypto/mod.rs +++ /dev/null @@ -1,1183 +0,0 @@ -mod ed25519; - -use std::{collections::HashMap, str}; - -use async_std::sync::RwLock; -use ed25519::ED25519CryptoType; -use hex::FromHex; -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::{ - base64, chacha20poly1305_ietf, chacha20poly1305_ietf::gen_nonce_and_encrypt_detached, - ed25519_box, ed25519_sign, -}; - -use crate::{ - domain::crypto::{ - did::{Did, DidValue, MyDidInfo, TheirDid, TheirDidInfo}, - key::{Key, KeyInfo}, - }, - utils::crypto::{ - base58::{DecodeBase58, ToBase58}, - verkey_builder::{build_full_verkey, split_verkey, verkey_get_cryptoname}, - }, -}; - -const DEFAULT_CRYPTO_TYPE: &str = "ed25519"; - -//TODO fix this crypto trait so it matches the functions below -//TODO create a second crypto trait for additional functions -trait CryptoType: Send + Sync { - fn crypto_box( - &self, - sk: &ed25519_sign::SecretKey, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - nonce: &ed25519_box::Nonce, - ) -> IndyResult>; - - fn crypto_box_open( - &self, - sk: &ed25519_sign::SecretKey, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - nonce: &ed25519_box::Nonce, - ) -> IndyResult>; - - fn gen_nonce(&self) -> ed25519_box::Nonce; - - fn create_key( - &self, - seed: Option<&ed25519_sign::Seed>, - ) -> IndyResult<(ed25519_sign::PublicKey, ed25519_sign::SecretKey)>; - - fn validate_key(&self, _vk: &ed25519_sign::PublicKey) -> IndyResult<()>; - - fn sign(&self, sk: &ed25519_sign::SecretKey, doc: &[u8]) - -> IndyResult; - - fn verify( - &self, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - signature: &ed25519_sign::Signature, - ) -> IndyResult; - - fn crypto_box_seal(&self, vk: &ed25519_sign::PublicKey, doc: &[u8]) -> IndyResult>; - - fn crypto_box_seal_open( - &self, - vk: &ed25519_sign::PublicKey, - sk: &ed25519_sign::SecretKey, - doc: &[u8], - ) -> IndyResult>; -} - -pub struct CryptoService { - crypto_types: RwLock>>, -} - -impl CryptoService { - pub(crate) fn new() -> CryptoService { - let crypto_types = { - let mut types = HashMap::<&'static str, Box>::new(); - types.insert(DEFAULT_CRYPTO_TYPE, Box::new(ED25519CryptoType::new())); - RwLock::new(types) - }; - - CryptoService { crypto_types } - } - - pub(crate) fn defualt_crypto_type() -> &'static str { - DEFAULT_CRYPTO_TYPE - } - - pub(crate) async fn create_key(&self, key_info: &KeyInfo) -> IndyResult { - trace!("create_key > key_info {:?}", secret!(key_info)); - - let crypto_type_name = key_info - .crypto_type - .as_deref() - .unwrap_or(DEFAULT_CRYPTO_TYPE); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!("KeyInfo contains unknown crypto: {}", crypto_type_name), - ) - })?; - - let seed = self.convert_seed(key_info.seed.as_ref().map(String::as_ref))?; - let (vk, sk) = crypto_type.create_key(seed.as_ref())?; - let mut vk = vk[..].to_base58(); - let sk = sk[..].to_base58(); - - if !crypto_type_name.eq(DEFAULT_CRYPTO_TYPE) { - // Use suffix with crypto type name to store crypto type inside of vk - vk = format!("{}:{}", vk, crypto_type_name); - } - - let key = Key::new(vk, sk); - - let res = Ok(key); - trace!("create_key < {:?}", res); - res - } - - pub(crate) async fn create_my_did(&self, my_did_info: &MyDidInfo) -> IndyResult<(Did, Key)> { - trace!("create_my_did > my_did_info {:?}", secret!(my_did_info)); - - let crypto_type_name = my_did_info - .crypto_type - .as_deref() - .unwrap_or(DEFAULT_CRYPTO_TYPE); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!("MyDidInfo contains unknown crypto: {}", crypto_type_name), - ) - })?; - - let seed = self.convert_seed(my_did_info.seed.as_ref().map(String::as_ref))?; - let (vk, sk) = crypto_type.create_key(seed.as_ref())?; - let did = match my_did_info.did { - Some(ref did) => did.clone(), - _ if my_did_info.cid == Some(true) => DidValue::new( - &vk[..].to_vec().to_base58(), - my_did_info.ledger_type.as_deref(), - my_did_info - .method_name - .as_ref() - .map(|method| method.0.as_str()), - )?, - _ => DidValue::new( - &vk[0..16].to_vec().to_base58(), - my_did_info.ledger_type.as_deref(), - my_did_info - .method_name - .as_ref() - .map(|method| method.0.as_str()), - )?, - }; - - let mut vk = vk[..].to_base58(); - let sk = sk[..].to_base58(); - - if !crypto_type_name.eq(DEFAULT_CRYPTO_TYPE) { - // Use suffix with crypto type name to store crypto type inside of vk - vk = format!("{}:{}", vk, crypto_type_name); - } - - let did = (Did::new(did, vk.clone()), Key::new(vk, sk)); - - let res = Ok(did); - trace!("create_my_did < {:?}", res); - res - } - - pub(crate) async fn create_their_did( - &self, - their_did_info: &TheirDidInfo, - ) -> IndyResult { - trace!("create_their_did > their_did_info {:?}", their_did_info); - - // Check did is correct Base58 - self.validate_did(&their_did_info.did)?; - - let verkey = build_full_verkey( - &their_did_info.did.to_unqualified().0, - their_did_info.verkey.as_deref(), - )?; - - self.validate_key(&verkey).await?; - - let did = TheirDid { - did: their_did_info.did.clone(), - verkey, - }; - - let res = Ok(did); - trace!("create_their_did < {:?}", res); - res - } - - pub(crate) async fn sign(&self, my_key: &Key, doc: &[u8]) -> IndyResult> { - trace!("sign > my_key {:?} doc {:?}", my_key, doc); - - let crypto_type_name = verkey_get_cryptoname(&my_key.verkey); - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to sign message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let my_sk = ed25519_sign::SecretKey::from_slice( - my_key.signkey.as_str().decode_base58()?.as_slice(), - )?; - - let signature = crypto_type.sign(&my_sk, doc)?[..].to_vec(); - - let res = Ok(signature); - trace!("sign < {:?}", res); - res - } - - pub(crate) async fn verify( - &self, - their_vk: &str, - msg: &[u8], - signature: &[u8], - ) -> IndyResult { - trace!( - "verify > their_vk {:?} msg {:?} signature {:?}", - their_vk, - msg, - signature - ); - - let (their_vk, crypto_type_name) = split_verkey(their_vk); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to verify message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let their_vk = ed25519_sign::PublicKey::from_slice(&their_vk.decode_base58()?)?; - let signature = ed25519_sign::Signature::from_slice(signature)?; - - let valid = crypto_type.verify(&their_vk, msg, &signature)?; - - let res = Ok(valid); - trace!("verify < {:?}", res); - res - } - - pub(crate) async fn crypto_box( - &self, - my_key: &Key, - their_vk: &str, - doc: &[u8], - ) -> IndyResult<(Vec, Vec)> { - trace!( - "crypto_box > my_key {:?} their_vk {:?} doc {:?}", - my_key, - their_vk, - doc - ); - - let crypto_type_name = verkey_get_cryptoname(&my_key.verkey); - - let (their_vk, their_crypto_type_name) = split_verkey(their_vk); - - if !crypto_type_name.eq(their_crypto_type_name) { - // TODO: FIXME: Use dedicated error code - return Err(err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "My key crypto type is incompatible with their key crypto type: {} {}", - crypto_type_name, their_crypto_type_name - ), - )); - } - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to crypto_box message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let my_sk = ed25519_sign::SecretKey::from_slice( - my_key.signkey.as_str().decode_base58()?.as_slice(), - )?; - - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; - let nonce = crypto_type.gen_nonce(); - - let encrypted_doc = crypto_type.crypto_box(&my_sk, &their_vk, doc, &nonce)?; - let nonce = nonce[..].to_vec(); - - let res = Ok((encrypted_doc, nonce)); - trace!("crypto_box < {:?}", res); - res - } - - pub(crate) async fn crypto_box_open( - &self, - my_key: &Key, - their_vk: &str, - doc: &[u8], - nonce: &[u8], - ) -> IndyResult> { - trace!( - "crypto_box_open > my_key {:?} their_vk {:?} doc {:?} nonce {:?}", - my_key, - their_vk, - doc, - nonce - ); - - let crypto_type_name = verkey_get_cryptoname(&my_key.verkey); - let (their_vk, their_crypto_type_name) = split_verkey(their_vk); - - if !crypto_type_name.eq(their_crypto_type_name) { - // TODO: FIXME: Use dedicated error code - return Err(err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "My key crypto type is incompatible with their key crypto type: {} {}", - crypto_type_name, their_crypto_type_name - ), - )); - } - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to crypto_box_open message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let my_sk = - ed25519_sign::SecretKey::from_slice(my_key.signkey.decode_base58()?.as_slice())?; - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; - let nonce = ed25519_box::Nonce::from_slice(nonce)?; - - let decrypted_doc = crypto_type.crypto_box_open(&my_sk, &their_vk, doc, &nonce)?; - - let res = Ok(decrypted_doc); - trace!("crypto_box_open < {:?}", res); - res - } - - pub(crate) async fn crypto_box_seal(&self, their_vk: &str, doc: &[u8]) -> IndyResult> { - trace!("crypto_box_seal > their_vk {:?} doc {:?}", their_vk, doc); - - let (their_vk, crypto_type_name) = split_verkey(their_vk); - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to encrypt sealed message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; - let encrypted_doc = crypto_type.crypto_box_seal(&their_vk, doc)?; - - let res = Ok(encrypted_doc); - trace!("crypto_box_seal < {:?}", res); - res - } - - pub(crate) async fn crypto_box_seal_open( - &self, - my_key: &Key, - doc: &[u8], - ) -> IndyResult> { - trace!("crypto_box_seal_open > my_key {:?} doc {:?}", my_key, doc); - - let (my_vk, crypto_type_name) = split_verkey(&my_key.verkey); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to crypto_box_open sealed message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let my_vk = ed25519_sign::PublicKey::from_slice(my_vk.decode_base58()?.as_slice())?; - - let my_sk = ed25519_sign::SecretKey::from_slice( - my_key.signkey.as_str().decode_base58()?.as_slice(), - )?; - - let decrypted_doc = crypto_type.crypto_box_seal_open(&my_vk, &my_sk, doc)?; - - let res = Ok(decrypted_doc); - trace!("crypto_box_seal_open < {:?}", res); - res - } - - pub(crate) fn convert_seed( - &self, - seed: Option<&str>, - ) -> IndyResult> { - trace!("convert_seed > seed {:?}", secret!(seed)); - - if seed.is_none() { - trace!("convert_seed <<< res: None"); - return Ok(None); - } - - let seed = seed.unwrap(); - - let bytes = if seed.as_bytes().len() == ed25519_sign::SEEDBYTES { - // is acceptable seed length - seed.as_bytes().to_vec() - } else if seed.ends_with('=') { - // is base64 string - let decoded = base64::decode(seed).to_indy( - IndyErrorKind::InvalidStructure, - "Can't deserialize Seed from Base64 string", - )?; - if decoded.len() == ed25519_sign::SEEDBYTES { - decoded - } else { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - format!( - "Trying to use invalid base64 encoded `seed`. The number of bytes must be \ - {} ", - ed25519_sign::SEEDBYTES - ), - )); - } - } else if seed.as_bytes().len() == ed25519_sign::SEEDBYTES * 2 { - // is hex string - Vec::from_hex(seed).to_indy(IndyErrorKind::InvalidStructure, "Seed is invalid hex")? - } else { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - format!( - "Trying to use invalid `seed`. It can be either {} bytes string or base64 \ - string or {} bytes HEX string", - ed25519_sign::SEEDBYTES, - ed25519_sign::SEEDBYTES * 2 - ), - )); - }; - - let seed = ed25519_sign::Seed::from_slice(bytes.as_slice())?; - - let res = Ok(Some(seed)); - trace!("convert_seed < {:?}", secret!(&res)); - res - } - - pub(crate) async fn validate_key(&self, vk: &str) -> IndyResult<()> { - trace!("validate_key > vk {:?}", vk); - - let (vk, crypto_type_name) = split_verkey(vk); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to use key with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - if let Some(vk) = vk.strip_prefix('~') { - let _ = vk.decode_base58()?; // TODO: proper validate abbreviated verkey - } else { - let vk = ed25519_sign::PublicKey::from_slice(vk.decode_base58()?.as_slice())?; - crypto_type.validate_key(&vk)?; - }; - - let res = Ok(()); - trace!("validate_key < {:?}", res); - res - } - - pub(crate) fn validate_did(&self, did: &DidValue) -> IndyResult<()> { - trace!("validate_did > did {:?}", did); - // Useful method, huh? - // Soon some state did validation will be put here - - let res = Ok(()); - trace!("validate_did < {:?}", res); - res - } - - pub(crate) fn encrypt_plaintext( - &self, - plaintext: Vec, - aad: &str, - cek: &chacha20poly1305_ietf::Key, - ) -> (String, String, String) { - //encrypt message with aad - let (ciphertext, iv, tag) = - gen_nonce_and_encrypt_detached(plaintext.as_slice(), aad.as_bytes(), cek); - - //base64 url encode data - let iv_encoded = base64::encode_urlsafe(&iv[..]); - let ciphertext_encoded = base64::encode_urlsafe(ciphertext.as_slice()); - let tag_encoded = base64::encode_urlsafe(&tag[..]); - - (ciphertext_encoded, iv_encoded, tag_encoded) - } - - /* ciphertext helper functions */ - pub(crate) fn decrypt_ciphertext( - &self, - ciphertext: &str, - aad: &str, - iv: &str, - tag: &str, - cek: &chacha20poly1305_ietf::Key, - ) -> Result { - //convert ciphertext to bytes - let ciphertext_as_vec = base64::decode_urlsafe(ciphertext).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decode ciphertext {}", err), - ) - })?; - - let ciphertext_as_bytes = ciphertext_as_vec.as_ref(); - - //convert IV from &str to &Nonce - let nonce_as_vec = base64::decode_urlsafe(iv).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decode IV {}", err), - ) - })?; - - let nonce_as_slice = nonce_as_vec.as_slice(); - - let nonce = chacha20poly1305_ietf::Nonce::from_slice(nonce_as_slice).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to convert IV to Nonce type {}", err), - ) - })?; - - //convert tag from &str to &Tag - let tag_as_vec = base64::decode_urlsafe(tag).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decode tag {}", err), - ) - })?; - let tag_as_slice = tag_as_vec.as_slice(); - let tag = chacha20poly1305_ietf::Tag::from_slice(tag_as_slice).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to convert tag to Tag type {}", err), - ) - })?; - - //decrypt message - let plaintext_bytes = chacha20poly1305_ietf::decrypt_detached( - ciphertext_as_bytes, - cek, - &nonce, - &tag, - Some(aad.as_bytes()), - ) - .map_err(|err| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!("Failed to decrypt ciphertext {}", err), - ) - })?; - - //convert message to readable (UTF-8) string - String::from_utf8(plaintext_bytes).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to convert message to UTF-8 {}", err), - ) - }) - } -} - -#[cfg(test)] -mod tests { - use indy_utils::crypto::chacha20poly1305_ietf::gen_key; - - use super::*; - use crate::domain::crypto::did::MyDidInfo; - - #[async_std::test] - async fn create_my_did_with_works_for_empty_info() { - let service = CryptoService::new(); - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let my_did = service.create_my_did(&did_info).await; - assert!(my_did.is_ok()); - } - - #[async_std::test] - async fn create_my_did_works_for_passed_did() { - let service = CryptoService::new(); - - let did = DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()); - let did_info = MyDidInfo { - did: Some(did.clone()), - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (my_did, _) = service.create_my_did(&did_info).await.unwrap(); - assert_eq!(did, my_did.did); - } - - #[async_std::test] - async fn create_my_did_not_works_for_invalid_crypto_type() { - let service = CryptoService::new(); - - let did = DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()); - let crypto_type = Some("type".to_string()); - - let did_info = MyDidInfo { - did: Some(did), - cid: None, - seed: None, - crypto_type, - method_name: None, - ledger_type: None, - }; - - assert!(service.create_my_did(&did_info).await.is_err()); - } - - #[async_std::test] - async fn create_my_did_works_for_seed() { - let service = CryptoService::new(); - - let did = DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()); - let seed = Some("00000000000000000000000000000My1".to_string()); - - let did_info_with_seed = MyDidInfo { - did: Some(did.clone()), - cid: None, - seed, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let did_info_without_seed = MyDidInfo { - did: Some(did.clone()), - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (did_with_seed, _) = service.create_my_did(&did_info_with_seed).await.unwrap(); - let (did_without_seed, _) = service.create_my_did(&did_info_without_seed).await.unwrap(); - - assert_ne!(did_with_seed.verkey, did_without_seed.verkey) - } - - #[async_std::test] - async fn create_their_did_works_without_verkey() { - let service = CryptoService::new(); - let did = DidValue("CnEDk9HrMnmiHXEV1WFgbVCRteYnPqsJwrTdcZaNhFVW".to_string()); - - let their_did_info = TheirDidInfo::new(did.clone(), None); - let their_did = service.create_their_did(&their_did_info).await.unwrap(); - - assert_eq!(did, their_did.did); - assert_eq!(did.0, their_did.verkey); - } - - #[async_std::test] - async fn create_their_did_works_for_full_verkey() { - let service = CryptoService::new(); - let did = DidValue("8wZcEriaNLNKtteJvx7f8i".to_string()); - let verkey = "5L2HBnzbu6Auh2pkDRbFt5f4prvgE2LzknkuYLsKkacp"; - - let their_did_info = TheirDidInfo::new(did.clone(), Some(verkey.to_string())); - let their_did = service.create_their_did(&their_did_info).await.unwrap(); - - assert_eq!(did, their_did.did); - assert_eq!(verkey, their_did.verkey); - } - - #[async_std::test] - async fn create_their_did_works_for_abbreviated_verkey() { - let service = CryptoService::new(); - let did = DidValue("8wZcEriaNLNKtteJvx7f8i".to_string()); - - let their_did_info = - TheirDidInfo::new(did.clone(), Some("~NcYxiDXkpYi6ov5FcYDi1e".to_string())); - - let their_did = service.create_their_did(&their_did_info).await.unwrap(); - - assert_eq!(did, their_did.did); - - assert_eq!( - "5L2HBnzbu6Auh2pkDRbFt5f4prvgE2LzknkuYLsKkacp", - their_did.verkey - ); - } - - #[async_std::test] - async fn sign_works() { - let service = CryptoService::new(); - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let message = r#"message"#; - let (_, my_key) = service.create_my_did(&did_info).await.unwrap(); - let sig = service.sign(&my_key, message.as_bytes()).await; - - assert!(sig.is_ok()); - } - - #[async_std::test] - async fn sign_works_for_invalid_signkey() { - let service = CryptoService::new(); - let message = r#"message"#; - let my_key = Key::new( - "8wZcEriaNLNKtteJvx7f8i".to_string(), - "5L2HBnzbu6Auh2pkDRbFt5f4prvgE2LzknkuYLsKkacp".to_string(), - ); - assert!(service.sign(&my_key, message.as_bytes()).await.is_err()); - } - - #[async_std::test] - async fn sign_verify_works() { - let service = CryptoService::new(); - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let message = r#"message"#; - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - let signature = service.sign(&my_key, message.as_bytes()).await.unwrap(); - - let valid = service - .verify(&my_did.verkey, message.as_bytes(), &signature) - .await - .unwrap(); - - assert!(valid); - } - - #[async_std::test] - async fn sign_verify_works_for_verkey_contained_crypto_type() { - let service = CryptoService::new(); - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let message = r#"message"#; - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - let signature = service.sign(&my_key, message.as_bytes()).await.unwrap(); - let verkey = my_did.verkey + ":ed25519"; - let valid = service - .verify(&verkey, message.as_bytes(), &signature) - .await - .unwrap(); - assert!(valid); - } - - #[async_std::test] - async fn sign_verify_works_for_verkey_contained_invalid_crypto_type() { - let service = CryptoService::new(); - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let message = r#"message"#; - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - let signature = service.sign(&my_key, message.as_bytes()).await.unwrap(); - let verkey = format!("crypto_type:{}", my_did.verkey); - - assert!(service - .verify(&verkey, message.as_bytes(), &signature) - .await - .is_err()); - } - - #[async_std::test] - async fn verify_not_works_for_invalid_verkey() { - let service = CryptoService::new(); - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let message = r#"message"#; - let (_, my_key) = service.create_my_did(&did_info).await.unwrap(); - let signature = service.sign(&my_key, message.as_bytes()).await.unwrap(); - let verkey = "AnnxV4t3LUHKZaxVQDWoVaG44NrGmeDYMA4Gz6C2tCZd"; - - let valid = service - .verify(verkey, message.as_bytes(), &signature) - .await - .unwrap(); - - assert!(!valid); - } - - #[async_std::test] - async fn crypto_box_works() { - let service = CryptoService::new(); - let msg = "some message"; - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (_, my_key) = service.create_my_did(&did_info).await.unwrap(); - let (their_did, _) = service.create_my_did(&did_info.clone()).await.unwrap(); - let their_did = Did::new(their_did.did, their_did.verkey); - - let encrypted_message = service - .crypto_box(&my_key, &their_did.verkey, msg.as_bytes()) - .await; - - assert!(encrypted_message.is_ok()); - } - - #[async_std::test] - async fn crypto_box_and_crypto_box_open_works() { - let service = CryptoService::new(); - - let msg = "some message"; - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - - let my_key_for_encrypt = my_key.clone(); - - let their_did_for_decrypt = Did::new(my_did.did, my_did.verkey); - - let (their_did, their_key) = service.create_my_did(&did_info.clone()).await.unwrap(); - - let my_key_for_decrypt = their_key.clone(); - - let their_did_for_encrypt = Did::new(their_did.did, their_did.verkey); - - let (encrypted_message, noce) = service - .crypto_box( - &my_key_for_encrypt, - &their_did_for_encrypt.verkey, - msg.as_bytes(), - ) - .await - .unwrap(); - - let decrypted_message = service - .crypto_box_open( - &my_key_for_decrypt, - &their_did_for_decrypt.verkey, - &encrypted_message, - &noce, - ) - .await - .unwrap(); - - assert_eq!(msg.as_bytes().to_vec(), decrypted_message); - } - - #[async_std::test] - async fn crypto_box_and_crypto_box_open_works_for_verkey_contained_crypto_type() { - let service = CryptoService::new(); - - let msg = "some message"; - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - - let my_key_for_encrypt = my_key.clone(); - - let their_did_for_decrypt = Did::new(my_did.did, my_did.verkey); - - let (their_did, their_key) = service.create_my_did(&did_info.clone()).await.unwrap(); - let my_key_for_decrypt = their_key.clone(); - - let their_did_for_encrypt = Did::new(their_did.did, their_did.verkey); - - let (encrypted_message, noce) = service - .crypto_box( - &my_key_for_encrypt, - &their_did_for_encrypt.verkey, - msg.as_bytes(), - ) - .await - .unwrap(); - - let verkey = their_did_for_decrypt.verkey + ":ed25519"; - - let decrypted_message = service - .crypto_box_open(&my_key_for_decrypt, &verkey, &encrypted_message, &noce) - .await - .unwrap(); - - assert_eq!(msg.as_bytes().to_vec(), decrypted_message); - } - - #[async_std::test] - async fn crypto_box_seal_works() { - let service = CryptoService::new(); - let msg = "some message"; - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let (did, _) = service.create_my_did(&did_info.clone()).await.unwrap(); - let did = Did::new(did.did, did.verkey); - let encrypted_message = service.crypto_box_seal(&did.verkey, msg.as_bytes()).await; - assert!(encrypted_message.is_ok()); - } - - #[async_std::test] - async fn crypto_box_seal_and_crypto_box_seal_open_works() { - let service = CryptoService::new(); - let msg = "some message".as_bytes(); - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (did, key) = service.create_my_did(&did_info.clone()).await.unwrap(); - let encrypt_did = Did::new(did.did.clone(), did.verkey.clone()); - - let encrypted_message = service - .crypto_box_seal(&encrypt_did.verkey, msg) - .await - .unwrap(); - - let decrypted_message = service - .crypto_box_seal_open(&key, &encrypted_message) - .await - .unwrap(); - - assert_eq!(msg, decrypted_message.as_slice()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_and_decrypt_ciphertext_works() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, iv_encoded, tag) = - service.encrypt_plaintext(plaintext.clone(), aad, &cek); - - let expected_plaintext = service - .decrypt_ciphertext(&expected_ciphertext, aad, &iv_encoded, &tag, &cek) - .unwrap(); - - assert_eq!(expected_plaintext.as_bytes().to_vec(), plaintext); - } - - #[async_std::test] - async fn test_encrypt_plaintext_decrypt_ciphertext_empty_string_works() { - let service: CryptoService = CryptoService::new(); - let plaintext = "".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, iv_encoded, tag) = - service.encrypt_plaintext(plaintext.clone(), aad, &cek); - - let expected_plaintext = service - .decrypt_ciphertext(&expected_ciphertext, aad, &iv_encoded, &tag, &cek) - .unwrap(); - - assert_eq!(expected_plaintext.as_bytes().to_vec(), plaintext); - } - - #[async_std::test] - async fn test_encrypt_plaintext_decrypt_ciphertext_bad_iv_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, _, tag) = service.encrypt_plaintext(plaintext, aad, &cek); - - //convert values to base64 encoded strings - let bad_iv_input = "invalid_iv"; - - let expected_error = - service.decrypt_ciphertext(&expected_ciphertext, bad_iv_input, &tag, aad, &cek); - - assert!(expected_error.is_err()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_decrypt_ciphertext_bad_ciphertext_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (_, iv_encoded, tag) = service.encrypt_plaintext(plaintext, aad, &cek); - - let bad_ciphertext = base64::encode_urlsafe("bad_ciphertext".as_bytes()); - - let expected_error = - service.decrypt_ciphertext(&bad_ciphertext, &iv_encoded, &tag, aad, &cek); - - assert!(expected_error.is_err()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_and_decrypt_ciphertext_wrong_cek_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = chacha20poly1305_ietf::gen_key(); - - let (expected_ciphertext, iv_encoded, tag) = - service.encrypt_plaintext(plaintext, aad, &cek); - - let bad_cek = gen_key(); - - let expected_error = - service.decrypt_ciphertext(&expected_ciphertext, &iv_encoded, &tag, aad, &bad_cek); - - assert!(expected_error.is_err()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_and_decrypt_ciphertext_bad_tag_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, iv_encoded, _) = service.encrypt_plaintext(plaintext, aad, &cek); - - let bad_tag = "bad_tag".to_string(); - - let expected_error = - service.decrypt_ciphertext(&expected_ciphertext, &iv_encoded, &bad_tag, aad, &cek); - assert!(expected_error.is_err()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_and_decrypt_ciphertext_bad_aad_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, iv_encoded, tag) = - service.encrypt_plaintext(plaintext, aad, &cek); - - let bad_aad = "bad aad"; - - let expected_error = - service.decrypt_ciphertext(&expected_ciphertext, &iv_encoded, &tag, bad_aad, &cek); - assert!(expected_error.is_err()); - } -} diff --git a/aries/misc/legacy/libvdrtools/src/services/mod.rs b/aries/misc/legacy/libvdrtools/src/services/mod.rs deleted file mode 100644 index 83962a3493..0000000000 --- a/aries/misc/legacy/libvdrtools/src/services/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod crypto; -mod wallet; - -pub use crypto::CryptoService; -pub(crate) use wallet::WalletService; diff --git a/aries/misc/legacy/libvdrtools/src/services/wallet.rs b/aries/misc/legacy/libvdrtools/src/services/wallet.rs deleted file mode 100644 index 6505049d65..0000000000 --- a/aries/misc/legacy/libvdrtools/src/services/wallet.rs +++ /dev/null @@ -1 +0,0 @@ -pub(crate) use indy_wallet::WalletService; diff --git a/aries/misc/legacy/libvdrtools/src/utils/crypto/base58.rs b/aries/misc/legacy/libvdrtools/src/utils/crypto/base58.rs deleted file mode 100644 index 1f4948543a..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/crypto/base58.rs +++ /dev/null @@ -1,26 +0,0 @@ -use bs58::{decode, decode::Result, encode}; - -pub trait ToBase58 { - fn to_base58(&self) -> String; -} -pub trait DecodeBase58 { - fn decode_base58(self) -> Result>; -} - -impl ToBase58 for [u8] { - fn to_base58(&self) -> String { - encode(self).into_string() - } -} - -impl DecodeBase58 for &[u8] { - fn decode_base58(self) -> Result> { - decode(self).into_vec() - } -} - -impl DecodeBase58 for &str { - fn decode_base58(self) -> Result> { - decode(self.as_bytes()).into_vec() - } -} diff --git a/aries/misc/legacy/libvdrtools/src/utils/crypto/mod.rs b/aries/misc/legacy/libvdrtools/src/utils/crypto/mod.rs deleted file mode 100644 index df7b29719e..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/crypto/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod base58; -pub mod verkey_builder; diff --git a/aries/misc/legacy/libvdrtools/src/utils/crypto/verkey_builder.rs b/aries/misc/legacy/libvdrtools/src/utils/crypto/verkey_builder.rs deleted file mode 100644 index b0b6fb2126..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/crypto/verkey_builder.rs +++ /dev/null @@ -1,132 +0,0 @@ -use indy_api_types::errors::prelude::*; - -use crate::{ - services::CryptoService, - utils::crypto::base58::{DecodeBase58, ToBase58}, -}; - -pub fn build_full_verkey(dest: &str, verkey: Option<&str>) -> Result { - if let Some(verkey) = verkey { - let (verkey, crypto_type) = if verkey.contains(':') { - let splits: Vec<&str> = verkey.split(':').collect(); - (splits[0], Some(splits[1])) - } else { - (verkey, None) - }; - - let verkey = if let Some(verkey) = verkey.strip_prefix('~') { - let mut result = dest.decode_base58()?; - let mut end = verkey.decode_base58()?; - result.append(&mut end); - result.to_base58() - } else { - verkey.to_owned() - }; - - let verkey = if let Some(crypto_type) = crypto_type { - format!("{}:{}", verkey, crypto_type) - } else { - verkey - }; - - Ok(verkey) - } else { - // Cryptonym - Ok(dest.to_owned()) - } -} - -pub fn split_verkey(verkey: &str) -> (&str, &str) { - let position = verkey.find(':'); - match position { - Some(p) => { - let cryptoname = if p + 1 < verkey.len() { - verkey[p + 1..].as_ref() - } else { - CryptoService::defualt_crypto_type() - }; - let v = if p > 0 { verkey[..p].as_ref() } else { "" }; - (v, cryptoname) - } - None => (verkey, CryptoService::defualt_crypto_type()), - } -} - -pub fn verkey_get_cryptoname(verkey: &str) -> &str { - let position = verkey.find(':'); - match position { - Some(p) => { - if p + 1 < verkey.len() { - verkey[p + 1..].as_ref() - } else { - CryptoService::defualt_crypto_type() - } - } - None => CryptoService::defualt_crypto_type(), - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn split_verkey_empty() { - assert_eq!(split_verkey(""), ("", CryptoService::defualt_crypto_type())) - } - - #[test] - fn split_verkey_single_colon() { - assert_eq!( - split_verkey(":"), - ("", CryptoService::defualt_crypto_type()) - ) - } - - #[test] - fn split_verkey_ends_with_colon() { - assert_eq!( - split_verkey("foo:"), - ("foo", CryptoService::defualt_crypto_type()) - ) - } - - #[test] - fn split_verkey_starts_with_colon() { - assert_eq!(split_verkey(":bar"), ("", "bar")) - } - - #[test] - fn split_verkey_works() { - assert_eq!(split_verkey("foo:bar:baz"), ("foo", "bar:baz")) - } - - #[test] - fn verkey_get_cryptoname_empty() { - assert_eq!( - verkey_get_cryptoname(""), - CryptoService::defualt_crypto_type() - ) - } - - #[test] - fn verkey_get_cryptoname_single_colon() { - assert_eq!( - verkey_get_cryptoname(":"), - CryptoService::defualt_crypto_type() - ) - } - - #[test] - fn verkey_get_cryptoname_ends_with_colon() { - assert_eq!( - verkey_get_cryptoname("foo:"), - CryptoService::defualt_crypto_type() - ) - } - - #[test] - fn verkey_get_cryptoname_works() { - assert_eq!(verkey_get_cryptoname("foo:bar"), "bar") - } -} diff --git a/aries/misc/legacy/libvdrtools/src/utils/mod.rs b/aries/misc/legacy/libvdrtools/src/utils/mod.rs deleted file mode 100755 index c5c38aba13..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub mod crypto; -pub use indy_utils::wql; -#[macro_use] -pub mod qualifier; diff --git a/aries/misc/legacy/libvdrtools/src/utils/qualifier.rs b/aries/misc/legacy/libvdrtools/src/utils/qualifier.rs deleted file mode 100644 index 6da00b58b8..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/qualifier.rs +++ /dev/null @@ -1,94 +0,0 @@ -use lazy_static::lazy_static; -use regex::Regex; - -lazy_static! { - pub static ref REGEX: Regex = - Regex::new("^[a-z0-9]+(:(indy|cheqd))?(:[a-z0-9:]+)?:(.*)$").unwrap(); -} - -pub fn qualify(entity: &str, prefix: &str, method: &str) -> String { - format!("{}:{}:{}", prefix, method, entity) -} - -pub fn qualify_with_ledger(entity: &str, prefix: &str, method: &str, ledger_type: &str) -> String { - format!("{}:{}:{}:{}", prefix, method, ledger_type, entity) -} - -pub fn to_unqualified(entity: &str) -> String { - trace!("qualifier::to_unqualified >> {}", entity); - match REGEX.captures(entity) { - None => entity.to_string(), - Some(caps) => { - trace!("qualifier::to_unqualified: parts {:?}", caps); - caps.get(4) - .map(|m| m.as_str().to_string()) - .unwrap_or(entity.to_string()) - } - } -} - -pub fn method(entity: &str) -> Option { - match REGEX.captures(entity) { - None => None, - Some(caps) => { - trace!("qualifier::method: caps {:?}", caps); - match (caps.get(2), caps.get(3)) { - (Some(type_), Some(subnet)) => Some(type_.as_str().to_owned() + subnet.as_str()), - (Some(type_), None) => Some(type_.as_str().to_owned()), - _ => { - warn!( - "Unrecognized FQ method for {}, parsed items are (where 2nd is method \ - type, and 3rd is sub-method (namespace, ledger, type, etc){:?}", - entity, caps - ); - None - } - } - } - } -} - -pub fn is_fully_qualified(entity: &str) -> bool { - REGEX.is_match(entity) -} - -macro_rules! qualifiable_type (($newtype:ident) => ( - - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] - pub struct $newtype(pub String); - - impl $newtype { - - #[allow(dead_code)] - pub fn get_method(&self) -> Option { - qualifier::method(&self.0) - } - - #[allow(dead_code)] - pub fn set_method(&self, method: &str) -> $newtype { - $newtype(qualifier::qualify(&self.0, $newtype::PREFIX, &method)) - } - - #[allow(dead_code)] - pub fn set_ledger_and_method(&self, ledger_type: &str, method: &str) -> $newtype { - $newtype(qualifier::qualify_with_ledger(&self.0, $newtype::PREFIX, method, ledger_type)) - } - - #[allow(dead_code)] - pub fn is_fully_qualified(&self) -> bool { - self.0.contains($newtype::PREFIX) && qualifier::is_fully_qualified(&self.0) - } - } - - impl From<&str> for $newtype { - fn from(value: &str) -> Self { - Self(value.to_owned()) - } - } - - impl From<&String> for $newtype { - fn from(value: &String) -> Self { - Self(value.clone()) - } - } -));