diff --git a/.DS_Store b/.DS_Store index 532e02ff..2c3a358a 100755 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/Cargo.lock b/Cargo.lock index 0482a167..af9efc0d 100755 --- a/Cargo.lock +++ b/Cargo.lock @@ -55,8 +55,8 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f823d037a7ec6ea2197046bafd4ae150e6bc36f9ca347404f46a46823fa84f2" dependencies = [ - "approx", - "num-complex", + "approx 0.3.2", + "num-complex 0.2.4", "num-traits", ] @@ -99,6 +99,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + [[package]] name = "arboard" version = "2.1.1" @@ -140,6 +149,15 @@ dependencies = [ "system-deps", ] +[[package]] +name = "atomic-polyfill" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ff7eb3f316534d83a8a2c3d1674ace8a5a71198eba31e2e2b597833f699b28" +dependencies = [ + "critical-section", +] + [[package]] name = "atomic_refcell" version = "0.1.8" @@ -226,7 +244,7 @@ checksum = "5fe233b960f12f8007e3db2d136e3cb1c291bfd7396e384ee76025fc1a3932b4" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -339,15 +357,15 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.23" +version = "0.4.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" +checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" dependencies = [ "iana-time-zone", "js-sys", "num-integer", "num-traits", - "time", + "time 0.1.44", "wasm-bindgen", "winapi", ] @@ -506,6 +524,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "critical-section" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6548a0ad5d2549e111e1f6a11a6c2e2d00ce6a3dafe22948d67c2b443f775e52" + [[package]] name = "crossbeam-channel" version = "0.5.0" @@ -611,7 +635,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn", + "syn 1.0.103", ] [[package]] @@ -628,7 +652,7 @@ checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -652,7 +676,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn", + "syn 1.0.103", ] [[package]] @@ -663,7 +687,7 @@ checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -951,7 +975,7 @@ checksum = "c8469d0d40519bc608ec6863f1cc88f3f1deee15913f2f3b3e573d81ed38cccc" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -1302,12 +1326,34 @@ dependencies = [ "crunchy", ] +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +[[package]] +name = "heapless" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db04bc24a18b9ea980628ecf00e6c0264f3c1426dac36c00cb49b6fbad8b0743" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version 0.4.0", + "spin", + "stable_deref_trait", +] + [[package]] name = "heck" version = "0.4.0" @@ -1691,6 +1737,15 @@ dependencies = [ "rawpointer", ] +[[package]] +name = "matrixmultiply" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "add85d4dd35074e6fedc608f8c8f513a3548619a9024b751949ef0e8e45a4d84" +dependencies = [ + "rawpointer", +] + [[package]] name = "memchr" version = "2.5.0" @@ -1755,6 +1810,15 @@ dependencies = [ "adler 1.0.2", ] +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler 1.0.2", +] + [[package]] name = "mio" version = "0.8.5" @@ -1773,7 +1837,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aad9dfe950c057b1bfe9c1f2aa51583a8468ef2a5baba2ebbe06d775efeb7729" dependencies = [ - "time", + "time 0.1.44", "winapi", ] @@ -1784,16 +1848,72 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aaa9fddbc34c8c35dd2108515587b8ce0cab396f17977b8c738568e4edb521a2" dependencies = [ "alga", - "approx", + "approx 0.3.2", "generic-array", - "matrixmultiply", - "num-complex", + "matrixmultiply 0.2.4", + "num-complex 0.2.4", "num-rational 0.2.4", "num-traits", "rand 0.6.5", "typenum", ] +[[package]] +name = "nalgebra" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d506eb7e08d6329505faa8a3a00a5dcc6de9f76e0c77e4b75763ae3c770831ff" +dependencies = [ + "approx 0.5.1", + "matrixmultiply 0.3.2", + "nalgebra-macros 0.1.0", + "num-complex 0.4.3", + "num-rational 0.4.1", + "num-traits", + "rand 0.8.5", + "rand_distr", + "simba 0.6.0", + "typenum", +] + +[[package]] +name = "nalgebra" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d68d47bba83f9e2006d117a9a33af1524e655516b8919caac694427a6fb1e511" +dependencies = [ + "approx 0.5.1", + "matrixmultiply 0.3.2", + "nalgebra-macros 0.2.0", + "num-complex 0.4.3", + "num-rational 0.4.1", + "num-traits", + "simba 0.8.0", + "typenum", +] + +[[package]] +name = "nalgebra-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01fcc0b8149b4632adc89ac3b7b31a12fb6099a0317a4eb2ebff574ef7de7218" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.103", +] + +[[package]] +name = "nalgebra-macros" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d232c68884c0c99810a5a4d333ef7e47689cfd0edc85efc9e54e1e6bf5212766" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.103", +] + [[package]] name = "nanorand" version = "0.7.0" @@ -1895,7 +2015,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -1967,6 +2087,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-complex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +dependencies = [ + "num-traits", +] + [[package]] name = "num-integer" version = "0.1.44" @@ -2037,7 +2166,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -2098,7 +2227,7 @@ checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -2235,7 +2364,7 @@ checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -2293,45 +2422,22 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.47" +version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" +checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.21" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" dependencies = [ "proc-macro2", ] -[[package]] -name = "rand" -version = "0.3.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" -dependencies = [ - "libc", - "rand 0.4.6", -] - -[[package]] -name = "rand" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" -dependencies = [ - "fuchsia-cprng", - "libc", - "rand_core 0.3.1", - "rdrand", - "winapi", -] - [[package]] name = "rand" version = "0.6.5" @@ -2365,6 +2471,17 @@ dependencies = [ "rand_pcg 0.2.1", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + [[package]] name = "rand_chacha" version = "0.1.1" @@ -2385,6 +2502,16 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + [[package]] name = "rand_core" version = "0.3.1" @@ -2409,13 +2536,23 @@ dependencies = [ "getrandom 0.1.16", ] +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.8", +] + [[package]] name = "rand_distr" -version = "0.2.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2" +checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" dependencies = [ - "rand 0.7.3", + "num-traits", + "rand 0.8.5", ] [[package]] @@ -2524,21 +2661,19 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" [[package]] name = "rayon" -version = "1.5.3" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" +checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" dependencies = [ - "autocfg 1.0.1", - "crossbeam-deque", "either", "rayon-core", ] [[package]] name = "rayon-core" -version = "1.9.3" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" +checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -2658,12 +2793,24 @@ dependencies = [ [[package]] name = "rstar" -version = "0.7.1" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0650eaaa56cbd1726fd671150fce8ac6ed9d9a25d1624430d7ee9d196052f6b6" +checksum = "b40f1bfe5acdab44bc63e6699c28b74f75ec43afb59f3eda01e145aff86a25fa" dependencies = [ + "heapless", "num-traits", - "pdqselect", + "smallvec", +] + +[[package]] +name = "rstar" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f39465655a1e3d8ae79c6d9e007f4953bfc5d55297602df9dc38f9ae9f1359a" +dependencies = [ + "heapless", + "num-traits", + "smallvec", ] [[package]] @@ -2672,7 +2819,16 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" dependencies = [ - "semver", + "semver 0.11.0", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver 1.0.17", ] [[package]] @@ -2690,6 +2846,15 @@ dependencies = [ "bytemuck", ] +[[package]] +name = "safe_arch" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "794821e4ccb0d9f979512f9c1973480123f9bd62a90d74ab0f9426fcf8f4a529" +dependencies = [ + "bytemuck", +] + [[package]] name = "same-file" version = "1.0.6" @@ -2777,6 +2942,12 @@ dependencies = [ "semver-parser", ] +[[package]] +name = "semver" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" + [[package]] name = "semver-parser" version = "0.10.2" @@ -2788,29 +2959,29 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.123" +version = "1.0.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d5161132722baa40d802cc70b15262b98258453e85e5d1d365c757c73869ae" +checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.123" +version = "1.0.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9391c295d64fc0abb2c556bad848f33cb8296276b1ad2677d1ae1ace4f258f31" +checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.4", ] [[package]] name = "serde_json" -version = "1.0.87" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45" +checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" dependencies = [ "itoa", "ryu", @@ -2860,6 +3031,32 @@ dependencies = [ "libc", ] +[[package]] +name = "simba" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0b7840f121a46d63066ee7a99fc81dcabbc6105e437cae43528cea199b5a05f" +dependencies = [ + "approx 0.5.1", + "num-complex 0.4.3", + "num-traits", + "paste", + "wide", +] + +[[package]] +name = "simba" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50582927ed6f77e4ac020c057f37a268fc6aebc29225050365aacbb9deeeddc4" +dependencies = [ + "approx 0.5.1", + "num-complex 0.4.3", + "num-traits", + "paste", + "wide", +] + [[package]] name = "slab" version = "0.4.7" @@ -2932,13 +3129,23 @@ dependencies = [ "lock_api", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "statrs" -version = "0.9.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d8c8660e3867d1a0578cbf7fd9532f1368b7460bd00b080e2d4669618a9bec7" +checksum = "2d08e5e1748192713cc281da8b16924fb46be7b0c2431854eadc785823e5696e" dependencies = [ - "rand 0.3.23", + "approx 0.5.1", + "lazy_static", + "nalgebra 0.29.0", + "num-traits", + "rand 0.8.5", ] [[package]] @@ -2964,6 +3171,17 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "syn" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c622ae390c9302e214c31013517c2061ecb2699935882c60a9b37f82f8625ae" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "system-deps" version = "6.0.3" @@ -3017,7 +3235,7 @@ checksum = "060d69a0afe7796bf42e9e2ff91f5ee691fb15c53d38b4b62a9a53eb23164745" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.103", ] [[package]] @@ -3051,6 +3269,32 @@ dependencies = [ "winapi", ] +[[package]] +name = "time" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" +dependencies = [ + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" + +[[package]] +name = "time-macros" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36" +dependencies = [ + "time-core", +] + [[package]] name = "tiny-skia" version = "0.7.0" @@ -3062,7 +3306,7 @@ dependencies = [ "bytemuck", "cfg-if", "png", - "safe_arch", + "safe_arch 0.5.2", "tiny-skia-path", ] @@ -3190,9 +3434,9 @@ checksum = "375812fa44dab6df41c195cd2f7fecb488f6c09fbaafb62807488cefab642bff" [[package]] name = "typenum" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" [[package]] name = "ucd-trie" @@ -3328,7 +3572,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 1.0.103", "wasm-bindgen-shared", ] @@ -3362,7 +3606,7 @@ checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.103", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3504,21 +3748,21 @@ checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" [[package]] name = "whitebox_common" -version = "2.0.0" +version = "2.3.0" dependencies = [ "byteorder", - "nalgebra", + "nalgebra 0.32.2", "num-traits", "rand 0.7.3", - "rstar", - "rustc_version", + "rstar 0.10.0", + "rustc_version 0.3.3", "serde", "serde_json", ] [[package]] name = "whitebox_lidar" -version = "2.0.0" +version = "2.3.0" dependencies = [ "brotli", "byteorder", @@ -3532,13 +3776,15 @@ dependencies = [ [[package]] name = "whitebox_plugins" -version = "2.0.0" +version = "2.3.0" dependencies = [ "fasteval", "kd-tree 0.4.1", - "nalgebra", + "kdtree", + "nalgebra 0.18.1", "num_cpus", "rand 0.7.3", + "rstar 0.9.3", "tsp-rs", "typenum", "whitebox_common", @@ -3549,7 +3795,7 @@ dependencies = [ [[package]] name = "whitebox_raster" -version = "2.0.0" +version = "2.3.0" dependencies = [ "byteorder", "chrono", @@ -3562,7 +3808,7 @@ dependencies = [ [[package]] name = "whitebox_runner" -version = "0.1.0" +version = "2.0.0" dependencies = [ "anyhow", "case", @@ -3581,23 +3827,24 @@ dependencies = [ [[package]] name = "whitebox_tools" -version = "2.2.0" +version = "2.3.0" dependencies = [ "byteorder", "chrono", "kd-tree 0.5.1", "kdtree", - "miniz_oxide 0.3.7", - "nalgebra", + "miniz_oxide 0.7.1", + "nalgebra 0.32.2", "num_cpus", - "rand 0.7.3", + "rand 0.8.5", "rand_distr", "rayon", - "rstar", + "rstar 0.10.0", "serde", "serde_derive", "serde_json", "statrs", + "time 0.3.20", "typenum", "whitebox_common", "whitebox_lidar", @@ -3614,6 +3861,16 @@ dependencies = [ "whitebox_common", ] +[[package]] +name = "wide" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b689b6c49d6549434bf944e6b0f39238cf63693cb7a147e9d887507fffa3b223" +dependencies = [ + "bytemuck", + "safe_arch 0.6.0", +] + [[package]] name = "widestring" version = "0.5.1" @@ -3902,7 +4159,7 @@ dependencies = [ "flate2", "msdos_time", "podio", - "time", + "time 0.1.44", ] [[package]] @@ -3916,7 +4173,7 @@ dependencies = [ "crc32fast", "flate2", "thiserror", - "time", + "time 0.1.44", ] [[package]] diff --git a/WhiteboxTools_darwin_m_series.zip b/WhiteboxTools_darwin_m_series.zip new file mode 100644 index 00000000..22b50e02 Binary files /dev/null and b/WhiteboxTools_darwin_m_series.zip differ diff --git a/doc_img/IndividualTreeDetection.png b/doc_img/IndividualTreeDetection.png new file mode 100644 index 00000000..1a17d5d6 Binary files /dev/null and b/doc_img/IndividualTreeDetection.png differ diff --git a/doc_img/NormalizeLidar.png b/doc_img/NormalizeLidar.png new file mode 100644 index 00000000..5869cd4f Binary files /dev/null and b/doc_img/NormalizeLidar.png differ diff --git a/doc_img/WBRunner.png b/doc_img/WBRunner.png old mode 100755 new mode 100644 index d1d32bee..8436b8f7 Binary files a/doc_img/WBRunner.png and b/doc_img/WBRunner.png differ diff --git a/doc_img/WbRunner_light_mode.png b/doc_img/WbRunner_light_mode.png new file mode 100644 index 00000000..6e630bc5 Binary files /dev/null and b/doc_img/WbRunner_light_mode.png differ diff --git a/doc_img/WhiteboxToolsLogo.png b/doc_img/WhiteboxToolsLogo.png new file mode 100644 index 00000000..94c1bba4 Binary files /dev/null and b/doc_img/WhiteboxToolsLogo.png differ diff --git a/readme.txt b/readme.txt index 150f019a..582fd58c 100755 --- a/readme.txt +++ b/readme.txt @@ -56,19 +56,23 @@ for more details. * Release Notes: * ****************** -Version 2.3.0 (XX-XX-202X) -- Added the new Whitebox Runner v2.0. This is a whole new program with many advancements over the - previous version of the WbRunner. It is now written in pure Rust (compared with the old Python - TkInter app) using the egui user-interface library. It has a more modern feel, is cross-platform, - and has no dependencies (including Python). You can now open multiple tools simultaneously. -- Added the IndividualTreeDetection tool for intentifying points in a LiDAR point cloud that are associated - with the tops of individual trees. -- Added the NormalizeLidar tool for normalizing LiDAR point clouds, i.e., converting their z-values - from elevation to height-above-ground. +Version 2.3.0 (30-03-2023) +- Added the new Whitebox Runner v2.0. This version of WbRunner is an entirely new application with many + advancements over the previous version of the WbRunner. It is now written in pure Rust (compared with + the old Python TkInter app) using the egui user-interface library. It has a more modern feel, is + cross-platform, and has no dependencies (including Python). You can now open multiple tools simultaneously. +- WbRunner is now the preferred way now for installing the Whitebox Toolset Extension (WTE). - Added the LaunchWbRunner and InstallWbExtension tools so that the Whitebox Runner will be more accessible from other Whitebox frontends. This way users will always have a good fall-back if the frontend is not up-to-date with the WBT backend, since WbRunner is always current with the installed - version of WBT. Also, the WbRunner is the preferred way now for installing Whitebox extensions. + version of WBT. +- Added the IndividualTreeDetection tool for identifying points in a LiDAR point cloud that are associated + with the tops of individual trees. +- Added the NormalizeLidar tool for normalizing LiDAR point clouds, i.e., converting their z-values + from elevation to height-above-ground. +- Natural Resources Canada (NRCan) has sponsored the open-sourcing of two tools contained in the extension, + VectorStreamNetworkAnalysis and RepairStreamVectorTopology. These two tools are are now available in the + WbT Open Core. - Fixed a bug with the LidarShift tool. The tool was calculating the shift in transformed coordinates incorrectly leading to very strange outputs. - The MultiscaleTopographicPositionImage tool now takes an optional hillshade image as input. diff --git a/versioning_info.json b/versioning_info.json index e37bde78..3f538993 100644 --- a/versioning_info.json +++ b/versioning_info.json @@ -1,5 +1,5 @@ { - "wbtVersion": "2.2.0", + "wbtVersion": "2.3.0", "gteTools": [ "AccumulationCurvature", "AssessRoute", @@ -49,7 +49,6 @@ "RecreatePassLines", "RemoveFieldEdgePoints", "RemoveRasterPolygonHoles", - "RepairStreamVectorTopology", "RingCurvature", "RiverCenterlines", "Rotor", @@ -64,7 +63,6 @@ "SvmRegression", "TopographicPositionAnimation", "Unsphericity", - "VectorStreamNetworkAnalysis", "VerticalExcessCurvature", "YieldFilter", "YieldMap", @@ -85,7 +83,6 @@ "LowPointsOnHeadwaterDivides", "MultiscaleCurvatures", "Openness", - "RepairStreamVectorTopology", "RingCurvature", "RiverCenterlines", "Rotor", @@ -96,7 +93,6 @@ "SmoothVegetationResidual", "TopographicPositionAnimation", "Unsphericity", - "VectorStreamNetworkAnalysis", "VerticalExcessCurvature" ], "lidarTools": [ diff --git a/whitebox-common/Cargo.toml b/whitebox-common/Cargo.toml index ae8cf69f..dbbfcc39 100755 --- a/whitebox-common/Cargo.toml +++ b/whitebox-common/Cargo.toml @@ -1,17 +1,17 @@ [package] name = "whitebox_common" -version = "2.0.0" +version = "2.3.0" authors = ["John Lindsay "] edition = "2021" [dependencies] -byteorder = "^1.3.1" -nalgebra = "0.18.0" +byteorder = "^1.4.3" +nalgebra = "0.32.2" num-traits = "0.2.14" rand = { version = "0.7", features = ["small_rng"] } -rstar = "0.7.1" -serde = { version = "1.0.123", features = ["derive"] } -serde_json = "1.0.64" +rstar = "0.10.0" +serde = { version = "1.0.158", features = ["derive"] } +serde_json = "1.0.94" [build-dependencies] rustc_version = "0.3.3" \ No newline at end of file diff --git a/whitebox-common/src/structures/radial_basis_function.rs b/whitebox-common/src/structures/radial_basis_function.rs index d3a0ec35..54071b96 100755 --- a/whitebox-common/src/structures/radial_basis_function.rs +++ b/whitebox-common/src/structures/radial_basis_function.rs @@ -1,7 +1,11 @@ //! Based on rbf-interp, a library for multidimensional interpolation. //! by Raph Levien (raphlinus) //! https://github.com/linebender/rbf-interp/blob/master/src/lib.rs -use nalgebra::{DMatrix, DVector, SVD}; +use nalgebra::{ + DMatrix, + DVector, + SVD +}; #[derive(Clone, Copy)] pub enum Basis { diff --git a/whitebox-common/src/structures/rectangle_with_data.rs b/whitebox-common/src/structures/rectangle_with_data.rs index de23c246..ce040f67 100755 --- a/whitebox-common/src/structures/rectangle_with_data.rs +++ b/whitebox-common/src/structures/rectangle_with_data.rs @@ -60,39 +60,39 @@ impl PointDistance for RectangleWithData { } } -#[cfg(test)] -mod test { - use super::RectangleWithData; - use crate::rstar::{PointDistance, RTree}; +// #[cfg(test)] +// mod test { +// use super::RectangleWithData; +// use crate::rstar::{PointDistance, RTree}; - #[test] - fn rectangle_distance() { - let rectangle = RectangleWithData::new(1, [0.5, 0.5], [1.0, 2.0]); - let small_val = 0.00001; - assert!((rectangle.distance_2(&[0.5, 0.5]) - 0.0) < small_val); - assert!((rectangle.distance_2(&[0.0, 0.5]) - 0.5 * 0.5) < small_val); - assert!((rectangle.distance_2(&[0.5, 1.0]) - 0.0) < small_val); - assert!((rectangle.distance_2(&[0.0, 0.0]) - 0.5) < small_val); - assert!((rectangle.distance_2(&[0.0, 1.0]) - 0.5 * 0.5) < small_val); - assert!((rectangle.distance_2(&[1.0, 3.0]) - 1.0) < small_val); - assert!((rectangle.distance_2(&[1.0, 1.0]) - 0.0) < small_val); - } +// #[test] +// fn rectangle_distance() { +// let rectangle = RectangleWithData::new(1, [0.5, 0.5], [1.0, 2.0]); +// let small_val = 0.00001; +// assert!((rectangle.distance_2(&[0.5, 0.5]) - 0.0) < small_val); +// assert!((rectangle.distance_2(&[0.0, 0.5]) - 0.5 * 0.5) < small_val); +// assert!((rectangle.distance_2(&[0.5, 1.0]) - 0.0) < small_val); +// assert!((rectangle.distance_2(&[0.0, 0.0]) - 0.5) < small_val); +// assert!((rectangle.distance_2(&[0.0, 1.0]) - 0.5 * 0.5) < small_val); +// assert!((rectangle.distance_2(&[1.0, 3.0]) - 1.0) < small_val); +// assert!((rectangle.distance_2(&[1.0, 1.0]) - 0.0) < small_val); +// } - #[test] - fn rectangle_locate_all_at_point() { - let tree = RTree::bulk_load(vec![ - RectangleWithData::new(1, [0.0, 0.0], [2.0, 2.0]), - RectangleWithData::new(2, [1.0, 1.0], [3.0, 3.0]), - RectangleWithData::new(3, [2.5, 2.5], [4.0, 4.0]), - ]); +// #[test] +// fn rectangle_locate_all_at_point() { +// let tree = RTree::bulk_load(vec![ +// RectangleWithData::new(1, [0.0, 0.0], [2.0, 2.0]), +// RectangleWithData::new(2, [1.0, 1.0], [3.0, 3.0]), +// RectangleWithData::new(3, [2.5, 2.5], [4.0, 4.0]), +// ]); - assert_eq!(tree.locate_all_at_point(&[1.5, 1.5]).count(), 2); - assert_eq!(tree.locate_all_at_point(&[0.0, 0.0]).count(), 1); - assert_eq!(tree.locate_all_at_point(&[-1., 0.0]).count(), 0); - assert_eq!(tree.locate_all_at_point(&[2.6, 2.6]).count(), 2); +// assert_eq!(tree.locate_all_at_point(&[1.5, 1.5]).count(), 2); +// assert_eq!(tree.locate_all_at_point(&[0.0, 0.0]).count(), 1); +// assert_eq!(tree.locate_all_at_point(&[-1., 0.0]).count(), 0); +// assert_eq!(tree.locate_all_at_point(&[2.6, 2.6]).count(), 2); - let ret = tree.locate_all_at_point(&[1.5, 1.5]).collect::>(); - assert_eq!(ret[0].data, 2); - assert_eq!(ret[1].data, 1); - } -} +// let ret = tree.locate_all_at_point(&[1.5, 1.5]).collect::>(); +// assert_eq!(ret[0].data, 2); +// assert_eq!(ret[1].data, 1); +// } +// } diff --git a/whitebox-lidar/Cargo.toml b/whitebox-lidar/Cargo.toml index b5048192..d1186f09 100755 --- a/whitebox-lidar/Cargo.toml +++ b/whitebox-lidar/Cargo.toml @@ -1,11 +1,11 @@ [package] name = "whitebox_lidar" -version = "2.0.0" +version = "2.3.0" authors = ["John Lindsay "] edition = "2021" [dependencies] -byteorder = "^1.3.1" +byteorder = "^1.4.3" chrono = "0.4.21" las = { version = "0.8.0", features = ["laz"] } miniz_oxide = "0.3.6" diff --git a/whitebox-plugins/Cargo.toml b/whitebox-plugins/Cargo.toml index e0b4e228..7f037226 100755 --- a/whitebox-plugins/Cargo.toml +++ b/whitebox-plugins/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "whitebox_plugins" -version = "2.0.0" +version = "2.3.0" authors = ["John Lindsay "] edition = "2021" @@ -68,6 +68,10 @@ path = "src/quinn_flow_accumulation/main.rs" name = "raster_calculator" path = "src/raster_calculator/main.rs" +[[bin]] +name = "repair_stream_vector_topology" +path = "src/repair_stream_vector_topology/main.rs" + [[bin]] name = "rho8_flow_accumulation" path = "src/rho8_flow_accumulation/main.rs" @@ -80,12 +84,18 @@ path = "src/split_vector_lines/main.rs" name = "travelling_salesman_problem" path = "src/travelling_salesman_problem/main.rs" +[[bin]] +name = "vector_stream_network_analysis" +path = "src/vector_stream_network_analysis/main.rs" + [dependencies] fasteval = "0.2.4" kd-tree = { version = "0.4.1", features = ["rayon"] } +kdtree = "0.6.0" nalgebra = "0.18.0" num_cpus = "1.13.0" rand = { version = "0.7", features = ["small_rng"] } +rstar = "0.9.3" tsp-rs = "0.1.0" typenum = "1.15.0" whitebox_common = { path = "../whitebox-common" } diff --git a/whitebox-plugins/src/individual_tree_detection/main.rs b/whitebox-plugins/src/individual_tree_detection/main.rs index 1e997df4..4336504c 100644 --- a/whitebox-plugins/src/individual_tree_detection/main.rs +++ b/whitebox-plugins/src/individual_tree_detection/main.rs @@ -24,7 +24,7 @@ use std::thread; /// This tool can be used to identify points in a LiDAR point cloud that are associated with the tops of individual trees. The /// tool takes a LiDAR point cloud as an input (`input_lidar`) and it is best if the input file has been normalized using the -/// `lidar_tophat_transform` function, such that points record height above the ground surface. Note that the `input` +/// `NormalizeLidar` or `LidarTophatTransform` tools, such that points record height above the ground surface. Note that the `input` /// parameter is optional and if left unspecified the tool will search for all valid LiDAR (*.las, *.laz, *.zlidar) files /// contained within the current working directory. This 'batch mode' operation is common among many of the LiDAR processing /// tools. Output vectors are saved to disc automatically for each processed LiDAR file when operating in batch mode. @@ -46,8 +46,10 @@ use std::thread; /// simply set the `only_use_veg` parameter to True. This parameter should only be set to True when you know that the /// input file contains point classifications, otherwise the tool may generate an empty output vector file. /// +/// ![](../../doc_img/IndividualTreeDetection.png) +/// /// # See Also -/// `LidarTophatTransform` +/// `NormalizeLidar`, `LidarTophatTransform` fn main() { let args: Vec = env::args().collect(); diff --git a/whitebox-plugins/src/normalize_lidar/main.rs b/whitebox-plugins/src/normalize_lidar/main.rs index 5ff6e844..6e1b5824 100644 --- a/whitebox-plugins/src/normalize_lidar/main.rs +++ b/whitebox-plugins/src/normalize_lidar/main.rs @@ -43,6 +43,8 @@ use std::thread; /// results in some cases, the `NormalizeLidar` tool likely works better under more rugged topography and in areas with /// extensive building coverage, and provides greater control over the definition of the ground surface. /// +/// ![](../../doc_img/NormalizeLidar.png) +/// /// # See Also /// `LidarTophatTransform`, `IndividualTreeDetection`, `LidarGroundPointFilter`, `ClassifyLidar` fn main() { diff --git a/whitebox-plugins/src/repair_stream_vector_topology/main.rs b/whitebox-plugins/src/repair_stream_vector_topology/main.rs new file mode 100644 index 00000000..72c8ff4e --- /dev/null +++ b/whitebox-plugins/src/repair_stream_vector_topology/main.rs @@ -0,0 +1,868 @@ +/* +Authors: Prof. John Lindsay +Created: 03/08/2021 (oringinally in Whitebox Toolset Extension) +Last Modified: 23/03/2023 +License: MIT +*/ + +use rstar::primitives::{GeomWithData, Line}; +use rstar::RTree; +use std::env; +use std::f64; +use std::io::{Error, ErrorKind}; +use std::ops::Index; +use std::path; +use std::str; +use std::time::Instant; +use whitebox_common::structures::{ + LineSegment, + Point2D +}; +use whitebox_common::utils::{ + get_formatted_elapsed_time, + wrapped_print +}; +use whitebox_vector::{ + AttributeField, + FieldData, + FieldDataType, + Shapefile, + ShapefileGeometry, + ShapeType +}; +const EPSILON: f64 = std::f64::EPSILON; + +/// This tool can be used to resolve many of the topological errors and inconsistencies associated with +/// manually digitized vector stream networks, i.e. hydrography data. A properly structured stream network +/// should consist of a series of stream segments that connect a channel head to a downstream confluence, +/// or an upstream confluence to a downstream confluence/outlet. This tool will join vector arcs that +/// connect at arbitrary, non-confluence points along stream segments. It also splits an arc where +/// a tributary stream connects at a mid-point, thereby creating a proper confluence where two upstream +/// triburaries converge into a downstream segment. The tool also handles non-connecting tributaries +/// caused by dangling arcs, i.e. overshoots and undershoots. +/// +/// ![](../../doc_img/RepairStreamVectorTopology.png) +/// +/// The user must specify the name of the input vector stream network (`--input`) and the output file +/// (`--output`). Additionally, a distance threshold for snapping dangling arcs (`--snap`) must be +/// specified. This distance is in the input layer's x-y units. The tool works best on projected input +/// data, however, if the input are in geographic coordinates (latitude and longitude), then specifying a +/// small valued snap distance is advisable. Notice that the attributes of the input layer will not be +/// carried over to the output file because there is not a one-for-one feature correspondence between the +/// two files due to the joins and splits of stream segments. Instead the output attribute table will +/// only contain a feature ID (FID) entry. +/// +/// > Note: this tool should be used to pre-process vector streams that are input to the +/// > `VectorStreamNetworkAnalysis` tool. +/// +/// # See Also +/// `VectorStreamNetworkAnalysis`, `FixDanglingArcs` +fn main() { + let args: Vec = env::args().collect(); + + if args[1].trim() == "run" { + match run(&args) { + Ok(_) => {} + Err(e) => panic!("{:?}", e), + } + } + + if args.len() <= 1 || args[1].trim() == "help" { + // print help + help(); + } + + if args[1].trim() == "version" { + // print version information + version(); + } +} + +fn help() { + let mut ext = ""; + if cfg!(target_os = "windows") { + ext = ".exe"; + } + + let exe_name = &format!("repair_stream_vector_topology{}", ext); + let sep: String = path::MAIN_SEPARATOR.to_string(); + let s = r#" + This tool resolves topological errors and inconsistencies associated with digitized vector streams. + + The following commands are recognized: + help Prints help information. + run Runs the tool. + version Prints the tool version information. + + The following flags can be used with the 'run' command: + --routes Name of the input routes vector file. + -o, --output Name of the output HTML file. + --length Maximum segment length (m). + --dist Search distance, in grid cells, used in visibility analysis. + + Input/output file names can be fully qualified, or can rely on the + working directory contained in the WhiteboxTools settings.json file. + + Example Usage: + >> .*EXE_NAME run --routes=footpath.shp --dem=DEM.tif -o=assessedRoutes.shp --length=50.0 --dist=200 + + Note: Use of this tool requires a valid license. To obtain a license, + contact Whitebox Geospatial Inc. (support@whiteboxgeo.com). + "# + .replace("*", &sep) + .replace("EXE_NAME", exe_name); + println!("{}", s); +} + +fn version() { + const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION"); + println!( + "repair_stream_vector_topology v{} by Dr. John B. Lindsay (c) 2023.", + VERSION.unwrap_or("Unknown version") + ); +} + +fn get_tool_name() -> String { + String::from("RepairStreamVectorTopology") // This should be camel case and is a reference to the tool name. +} + +fn run(args: &Vec) -> Result<(), std::io::Error> { + let tool_name = get_tool_name(); + + let sep: String = path::MAIN_SEPARATOR.to_string(); + + // Read in the environment variables and get the necessary values + let configurations = whitebox_common::configs::get_configs()?; + let mut working_directory = configurations.working_directory.clone(); + if !working_directory.is_empty() && !working_directory.ends_with(&sep) { + working_directory += &sep; + } + + // read the arguments + let mut input_file = String::new(); + let mut output_file: String = String::new(); + let mut snap_dist = 1.0; + if args.len() <= 1 { + return Err(Error::new( + ErrorKind::InvalidInput, + "Tool run with too few parameters.", + )); + } + for i in 0..args.len() { + let mut arg = args[i].replace("\"", ""); + arg = arg.replace("\'", ""); + let cmd = arg.split("="); // in case an equals sign was used + let vec = cmd.collect::>(); + let mut keyval = false; + if vec.len() > 1 { + keyval = true; + } + let flag_val = vec[0].to_lowercase().replace("--", "-"); + if flag_val == "-i" || flag_val == "-input" { + input_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-o" || flag_val == "-output" { + output_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-snap" || flag_val == "-dist" { + snap_dist = if keyval { + vec[1] + .to_string() + .parse::() + .expect(&format!("Error parsing {}", flag_val)) + } else { + args[i + 1] + .to_string() + .parse::() + .expect(&format!("Error parsing {}", flag_val)) + }; + } + } + + if configurations.verbose_mode { + let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); + // 28 = length of the 'Powered by' by statement. + println!("{}", "*".repeat(welcome_len)); + println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); + println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); + println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); + println!("{}", "*".repeat(welcome_len)); + } + + let mut progress: usize; + let mut old_progress: usize = 1; + + let start = Instant::now(); + + if !input_file.contains(&sep) && !input_file.contains("/") { + input_file = format!("{}{}", working_directory, input_file); + } + + if !output_file.contains(&sep) && !output_file.contains("/") { + output_file = format!("{}{}", working_directory, output_file); + } + + if snap_dist <= 0f64 { + if configurations.verbose_mode { + wrapped_print("Error: The snap distance must be greater than 0.0.", 50); + } + } + + let input = Shapefile::read(&input_file)?; + + // Make sure the input vector file is of polyline type + if input.header.shape_type.base_shape_type() != ShapeType::PolyLine { + return Err(Error::new( + ErrorKind::InvalidInput, + "The vector data must be of PolyLine base shape type.", + )); + } + + // Read each line segment into an rtree. + type Location = GeomWithData<[f64; 2], usize>; + let mut line_segments = vec![]; + let mut end_nodes = vec![]; + let (mut part_start, mut part_end): (usize, usize); + let mut fid = 0usize; // fid is unique to each part in the vector + let mut segment_num = 0usize; + let mut polylines = vec![]; + for record_num in 0..input.num_records { + let record = input.get_record(record_num); + for part in 0..record.num_parts as usize { + part_start = record.parts[part] as usize; + part_end = if part < record.num_parts as usize - 1 { + record.parts[part + 1] as usize - 1 + } else { + record.num_points as usize - 1 + }; + + polylines.push( + Polyline::new( + &record.points[part_start..=part_end], + fid + ) + ); + + // segment_num = 0; + // for i in part_start+1..=part_end { + // line_segments.push( + // LineWithData::new( + // (fid, segment_num), + // // fid, + // [record.points[i-1].x, record.points[i-1].y], + // [record.points[i].x, record.points[i].y] + // ) + // ); + // segment_num += 1; + // } + + end_nodes.push(Location::new( + [record.points[part_start].x, record.points[part_start].y], + fid + )); + + end_nodes.push(Location::new( + [record.points[part_end].x, record.points[part_end].y], + fid + )); + + fid += 1; + } + + if configurations.verbose_mode { + progress = + (100.0_f64 * (record_num + 1) as f64 / input.num_records as f64) as usize; + if progress != old_progress { + println!("Reading vector: {}%", progress); + old_progress = progress; + } + } + } + + let mut num_polylines = polylines.len(); // will be updated after the joins. + + + + + // Find all of the segments that can be joined because they link at non-confluences. + let endnode_tree = RTree::bulk_load(end_nodes); + let precision = EPSILON * 10f64; + let mut p1: Point2D; + let mut connections = vec![[num_polylines, num_polylines]; num_polylines]; + let mut connected_polyline: usize; + let mut num_neighbours: usize; + for fid in 0..num_polylines { + // fid = polylines[poly_id].id1; + p1 = polylines[fid].get_first_node(); + let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision); + + connected_polyline = num_polylines; + num_neighbours = 0; + for p in ret { + if p.data != fid { + connected_polyline = p.data; + num_neighbours += 1; + } + } + if num_neighbours == 1 { + connections[fid][0] = connected_polyline; + } + + p1 = polylines[fid].get_last_node(); + let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision); + + connected_polyline = num_polylines; + num_neighbours = 0; + for p in ret { + if p.data != fid { + connected_polyline = p.data; + num_neighbours += 1; + } + } + if num_neighbours == 1 { + connections[fid][1] = connected_polyline; + } + + if configurations.verbose_mode { + progress = + (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize; + if progress != old_progress { + println!("Looking for joins in arcs: {}%", progress); + old_progress = progress; + } + } + } + + // now perform the actual joins + let mut marked_for_deletion = vec![false; num_polylines]; + for fid in 0..num_polylines { + // We're looking for segments where one end is joined and the other end is not. These are + // valid starting segements for chains of joined segments. + // if fid == 21414 || fid == 16471 || fid == 3703 || fid == 3683 { + // println!("{} {} {} {} {}", fid, connections[fid][0], connections[fid][1], marked_for_deletion[fid], num_polylines); + // } + if !marked_for_deletion[fid] { + let is_joined_at_start = connections[fid][0] < num_polylines && connections[fid][1] == num_polylines; + let mut is_joined_at_end = connections[fid][0] == num_polylines && connections[fid][1] < num_polylines; + // if fid == 21414 || fid == 16471 || fid == 3703 || fid == 3683 { + // println!("{} {} {} {} {}", fid, is_joined_at_start, connections[fid][0], is_joined_at_end, connections[fid][1]); + // } + if is_joined_at_start || is_joined_at_end { + // let flag_high = fid == 3683; + marked_for_deletion[fid] = true; + // It's a start to a connected chain. + let mut pl = Polyline::new_empty(fid); + if is_joined_at_end { + pl.vertices.extend_from_slice(&polylines[fid].vertices.clone()); + } else { + let mut rev = polylines[fid].vertices.clone(); + rev.reverse(); + pl.vertices.extend_from_slice(&rev); + } + // let mut current_fid = if connections[fid][0] < num_polylines { + // connections[fid][0] + // } else { + // connections[fid][1] + // }; + let mut current_fid = fid; + loop { + // if flag_high { + // let t1 = if connections[current_fid][0] < num_polylines { + // marked_for_deletion[connections[current_fid][0]] + // } else { + // true + // }; + + // let t2 = if connections[current_fid][1] < num_polylines { + // marked_for_deletion[connections[current_fid][1]] + // } else { + // true + // }; + + // println!("{} {} {} {} {}", current_fid, connections[current_fid][0], t1, connections[current_fid][1], t2); + // } + marked_for_deletion[current_fid] = true; + // is_joined_at_end = false; + current_fid = if connections[current_fid][0] < num_polylines && !marked_for_deletion[connections[current_fid][0]] { + connections[current_fid][0] + } else if connections[current_fid][1] < num_polylines && !marked_for_deletion[connections[current_fid][1]] { + // is_joined_at_end = true; + connections[current_fid][1] + } else { + break; + }; + + // which way is it joined? + is_joined_at_end = false; + if pl.get_last_node().distance(&polylines[current_fid].get_first_node()) <= precision { + is_joined_at_end = true; + } + + if is_joined_at_end { + pl.vertices.extend_from_slice(&polylines[current_fid].vertices.clone()); + } else { + let mut rev = polylines[current_fid].vertices.clone(); + rev.reverse(); + pl.vertices.extend_from_slice(&rev); + } + } + + polylines.push(pl); + } + } + } + + for i in (0..num_polylines).rev() { + if marked_for_deletion[i] { + polylines.remove(i); + } + } + + num_polylines = polylines.len(); + + // remove any zero-length segments. + for fid in 0..num_polylines { + for i in (1..polylines[fid].len()).rev() { + if polylines[fid][i].distance(&polylines[fid][i-1]) <= precision { + polylines[fid].vertices.remove(i); + } + } + } + + + + + + + + + + end_nodes = vec![]; + for fid in 0..num_polylines { + polylines[fid].id = fid; + + segment_num = 0; + for i in 1..polylines[fid].vertices.len() { + line_segments.push( + GeomWithData::new( + Line::new( + [polylines[fid].vertices[i-1].x, polylines[fid].vertices[i-1].y], + [polylines[fid].vertices[i].x, polylines[fid].vertices[i].y] + ), + (fid, segment_num) + ) + ); + segment_num += 1; + } + + p1 = polylines[fid].get_first_node(); + end_nodes.push(Location::new( + [p1.x, p1.y], + fid + )); + + p1 = polylines[fid].get_last_node(); + end_nodes.push(Location::new( + [p1.x, p1.y], + fid + )); + + if configurations.verbose_mode { + progress = + (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize; + if progress != old_progress { + println!("Looking for dangling arcs: {}%", progress); + old_progress = progress; + } + } + } + + + + + let endnode_tree = RTree::bulk_load(end_nodes); + let line_segments_tree = RTree::bulk_load(line_segments); + let snap_dist_sq = snap_dist * snap_dist; + // let mut points: Vec; + let mut num_vertices: usize; + let mut min_dist: f64; + let mut dist: f64; + let mut point = Point2D::new(0f64, 0f64); // just to satisfy the need to initialize. + let mut p2: Point2D; + let mut line_seg: LineSegment; + let mut line_seg2: LineSegment = LineSegment::new(Point2D::new(0f64, 0f64), Point2D::new(0f64, 0f64)); + let mut joined_feature: usize = 0; + for poly_id in 0..polylines.len() { + fid = polylines[poly_id].id; + p1 = polylines[fid].get_first_node(); + let ret = line_segments_tree.locate_within_distance([p1.x, p1.y], snap_dist_sq); + // See if any of the line segments within the snap distance are from a different polyline. + // If so, find the nearest point. + min_dist = f64::INFINITY; + for line in ret { + if line.data.0 != fid { + let geom = line.geom(); + let p = geom.nearest_point(&[p1.x, p1.y]); + p2 = Point2D::new(p[0], p[1]); + dist = p1.distance(&p2); + if dist < min_dist { + min_dist = dist; + point = p2; + segment_num = line.data.1; + joined_feature = line.data.0; + line_seg2 = LineSegment::new( + Point2D::new(geom.from[0], geom.from[1]), + Point2D::new(geom.to[0], geom.to[1]) + ); + } + } + } + + // how many endnodes is this endnode in contact with? This is for y-junctions + let ret_endnodes = endnode_tree.locate_within_distance([p1.x, p1.y], precision); + num_neighbours = 0; + for p in ret_endnodes { + if p.data != fid { + num_neighbours += 1; + } + } + + if (min_dist.is_finite() && min_dist > precision) || (min_dist <= precision && num_neighbours == 0) { + // Is it an undershoot or an overshoot? + // if it is an overshoot, then the nearest point will have a distance of zero with + // the current line segment too. That is, it will be coincident. + line_seg = LineSegment::new(p1, polylines[fid][1]); + + if (line_seg.dist_to_segment(point) - min_dist).abs() <= precision { + // It's an undershoot, add the point to the start of the polyline. + polylines[fid].insert(0, point); + // all the split indices will be one less than they should be now that we've + // inserted a vertex at the start. + polylines[fid].splits_offset_by_one = true; + polylines[joined_feature].insert_split_point(segment_num, point); + // points.push(point); + } else { // It's an overshoot. + point = match line_seg.get_intersection(&line_seg2) { + Some(ls) => ls.p1, + None => point // do nothing + }; + if polylines[fid][1].distance(&point) > precision { + polylines[fid].insert(0, point); + polylines[fid].remove(1); + } + polylines[joined_feature].insert_split_point(segment_num, point); + } + } + + p1 = polylines[fid].get_last_node(); + let ret = line_segments_tree.locate_within_distance([p1.x, p1.y], snap_dist_sq); + min_dist = f64::INFINITY; + for line in ret { + if line.data.0 != fid { + let geom = line.geom(); + let p = geom.nearest_point(&[p1.x, p1.y]); + p2 = Point2D::new(p[0], p[1]); + dist = p1.distance(&p2); + if dist < min_dist { + min_dist = dist; + point = p2; + segment_num = line.data.1; + joined_feature = line.data.0; + line_seg2 = LineSegment::new( + Point2D::new(geom.from[0], geom.from[1]), + Point2D::new(geom.to[0], geom.to[1]) + ); + } + } + } + + // how many endnodes is this endnode in contact with? This is for y-junctions + let ret_endnodes = endnode_tree.locate_within_distance([p1.x, p1.y], precision); + num_neighbours = 0; + for p in ret_endnodes { + if p.data != fid { + num_neighbours += 1; + } + } + + if (min_dist.is_finite() && min_dist > precision) || (min_dist <= precision && num_neighbours == 0) { + // if min_dist.is_finite() && min_dist >= precision { + // Is it an undershoot or an overshoot? + // if it is an overshoot, then the nearest point will have a distance of zero with + // the current line segment too. That is, it will be coincident. + line_seg = LineSegment::new(polylines[fid][polylines[fid].len()-2], p1); + if (line_seg.dist_to_segment(point) - min_dist).abs() <= precision { + // It's an undershoot, add the line end point. + // points.push(record.points[part_end].clone()); + polylines[fid].push(point); + polylines[joined_feature].insert_split_point(segment_num, point); + } else { // It's an overshoot + num_vertices = polylines[fid].len(); + polylines[fid].remove(num_vertices-1); + + point = match line_seg.get_intersection(&line_seg2) { + Some(ls) => ls.p1, + None => point // do nothing + }; + polylines[fid].push(point); + polylines[joined_feature].insert_split_point(segment_num, point); + } + } + + if configurations.verbose_mode { + progress = + (100.0_f64 * (poly_id + 1) as f64 / polylines.len() as f64) as usize; + if progress != old_progress { + println!("Looking for dangling arcs: {}%", progress); + old_progress = progress; + } + } + } + + // Deal with the splits. + let mut polylines2 = vec![]; + for poly_id in 0..polylines.len() { + if polylines[poly_id].split_points.len() == 0 { + polylines2.push(polylines[poly_id].clone()); + } else { + let splits = polylines[poly_id].split(); + for pl in splits { + polylines2.push(pl.clone()); + } + } + } + + + // remove any zero-length segments. + for fid in 0..polylines2.len() { + for i in (1..polylines2[fid].len()).rev() { + if polylines2[fid][i].distance(&polylines2[fid][i-1]) <= precision { + polylines2[fid].vertices.remove(i); + } + } + } + + + + // Find segments that have a gap at their endnodes and can be joined. + + + // create output file + let mut output = Shapefile::initialize_using_file(&output_file, &input, ShapeType::PolyLine, false)?; + + // add the attributes + // let in_atts = input.attributes.get_fields(); + + // output.attributes.add_fields(&in_atts); + output.attributes.add_field( + &AttributeField::new( + "FID", + FieldDataType::Int, + 7u8, + 0u8 + ) + ); + + let mut sfg: ShapefileGeometry; + // let mut record_num: usize; + for poly_id in 0..polylines2.len() { + sfg = ShapefileGeometry::new(ShapeType::PolyLine); + sfg.add_part(&polylines2[poly_id].vertices); + output.add_record(sfg); + + // record_num = polylines2[poly_id].id2; + // let att_data = input.attributes.get_record(record_num); + // output.attributes.add_record(att_data.clone(), false); + output.attributes.add_record(vec![FieldData::Int((poly_id + 1) as i32)], false); + + if configurations.verbose_mode { + progress = + (100.0_f64 * (poly_id + 1) as f64 / polylines2.len() as f64) as usize; + if progress != old_progress { + println!("Looking for dangling arcs: {}%", progress); + old_progress = progress; + } + } + } + + + if configurations.verbose_mode { + println!("Saving data...") + }; + let _ = match output.write() { + Ok(_) => { + if configurations.verbose_mode { + println!("Output file written") + } + } + Err(e) => return Err(e), + }; + + let elapsed_time = get_formatted_elapsed_time(start); + + if configurations.verbose_mode { + println!( + "\n{}", + &format!("Elapsed Time (Including I/O): {}", elapsed_time) + ); + } + + + Ok(()) +} + +#[derive(Default, Clone, Debug)] +struct Polyline { + vertices: Vec, + id: usize, + pub split_points: Vec<(usize, Point2D, f64)>, + splits_offset_by_one: bool, +} + +impl Index for Polyline { + type Output = Point2D; + + fn index<'a>(&'a self, index: usize) -> &'a Point2D { + &self.vertices[index] + } +} + +impl Polyline { + // Creates a new Polyline from vertices + fn new(vertices: &[Point2D], id: usize) -> Self { + Polyline { + vertices: vertices.clone().to_vec(), + id, + split_points: vec![], + splits_offset_by_one: false, + } + } + + // Creates a new empty Polyline + fn new_empty(id: usize) -> Polyline { + Polyline { + vertices: vec![], + id, + split_points: vec![], + splits_offset_by_one: false, + } + } + + // returns the number of vertices + fn len(&self) -> usize { + self.vertices.len() + } + + // Inserts a point vertex at the end of the line. + fn push(&mut self, v: Point2D) { + self.vertices.push(v); + } + + // Inserts a point vertex at a specific index. + fn insert(&mut self, index: usize, v: Point2D) { + if index <= self.len() { + self.vertices.insert(index, v); + } + } + + // Removes a point vertex at a specified index. + fn remove(&mut self, index: usize) { + if index <= self.len() { + self.vertices.remove(index); + } + } + + fn insert_split_point(&mut self, position: usize, point: Point2D) { + if position < self.len() - 1 { // position >= 0 && + self.split_points.push((position, point, 0f64)); + } + } + + fn split(&mut self) -> Vec { + // if there is an offset value it is because a vertex was added to the start of the polyline + if self.splits_offset_by_one { + for split in 0..self.split_points.len() { + self.split_points[split].0 += 1; + } + } + + // make sure there are no duplicate splits + for split in (1..self.split_points.len()).rev() { + if self.split_points[split].0 == self.split_points[split - 1].0 && + self.split_points[split].1 == self.split_points[split - 1].1 { + self.split_points.remove(split); + } + } + + // calculate cumulative segment distances at the start of the segment + let mut segment_distances = Vec::with_capacity(self.len()); + segment_distances.push(0f64); + for i in 1..self.len() { + segment_distances.push(segment_distances[i-1] + self[i-1].distance(&self[i])); + } + + // now calculate the cumulative distance from the start of the polyline of the split points. + let mut dist: f64; + for split in 0..self.split_points.len() { + dist = segment_distances[self.split_points[split].0] + self[self.split_points[split].0].distance(&self.split_points[split].1); + self.split_points[split].2 = dist; + } + + // This is a problem because we also need to sort the points by distance. + self.split_points + .sort_by(|a, b| a.2.partial_cmp(&b.2).unwrap()); + + + // perform the split + let mut ret: Vec = Vec::with_capacity(self.split_points.len() + 1); + if self.split_points.len() > 0 { + let mut current_split = 0; + let mut line = Polyline::new_empty(self.id); + for node in 0..self.len() { + if current_split == self.split_points.len() || node < self.split_points[current_split].0 { + line.push(self[node]); + } else { + line.push(self[node]); + line.push(self.split_points[current_split].1); + ret.push(line); + line = Polyline::new_empty(self.id); + line.push(self.split_points[current_split].1); + + // current_split += 1; + + // Deal with segments with multiple splits. + let former_node = self.split_points[current_split].0; + loop { + current_split += 1; + + if current_split < self.split_points.len() && former_node == self.split_points[current_split].0 { + line.push(self.split_points[current_split].1); + ret.push(line); + line = Polyline::new_empty(self.id); + line.push(self.split_points[current_split].1); + } else { + break; + } + } + } + } + ret.push(line); + } + + ret + } + + fn get_first_node(&self) -> Point2D { + self[0] + } + + fn get_last_node(&self) -> Point2D { + self[self.vertices.len() - 1] + } +} \ No newline at end of file diff --git a/whitebox-plugins/src/repair_stream_vector_topology/repair_stream_vector_topology.json b/whitebox-plugins/src/repair_stream_vector_topology/repair_stream_vector_topology.json new file mode 100755 index 00000000..33f1c64c --- /dev/null +++ b/whitebox-plugins/src/repair_stream_vector_topology/repair_stream_vector_topology.json @@ -0,0 +1,35 @@ +{ + "tool_name": "RepairStreamVectorTopology", + "exe": "repair_stream_vector_topology", + "short_description": "This tool resolves topological errors and inconsistencies associated with digitized vector streams.", + "help": "This tool resolves topological errors and inconsistencies associated with digitized vector streams.", + "toolbox": "Stream Network Analysis", + "license": "MIT", + "example": ">> .*EXE_NAME -r=RepairStreamVectorTopology --input=streams.shp --output=streams_fixed.shp --snap=2.0", + "parameters": [ + { + "name": "Input Vector Lines", + "flags": ["-i", "--input"], + "description": "Name of the input lines vector file.", + "parameter_type": {"ExistingFile":{"Vector":"Line"}}, + "default_value": null, + "optional": false + }, + { + "name": "Output Lines", + "flags": ["-o", "--output"], + "description": "Name of the output lines vector file.", + "parameter_type": {"NewFile":{"Vector":"Line"}}, + "default_value": null, + "optional": false + }, + { + "name": "Snap Distance", + "flags": ["--snap", "--dist"], + "description": "Snap distance, in xy units (metres).", + "parameter_type": "Float", + "default_value": "", + "optional": false + } + ] +} \ No newline at end of file diff --git a/whitebox-plugins/src/vector_stream_network_analysis/main.rs b/whitebox-plugins/src/vector_stream_network_analysis/main.rs new file mode 100644 index 00000000..57c890ed --- /dev/null +++ b/whitebox-plugins/src/vector_stream_network_analysis/main.rs @@ -0,0 +1,1218 @@ +/* +Authors: Prof. John Lindsay +Created: 28/07/2021 (oringinally in Whitebox Toolset Extension) +Last Modified: 23/03/2023 +License: MIT +*/ + +use std::cmp::Ordering; +use std::collections::BinaryHeap; +use kdtree::distance::squared_euclidean; +use kdtree::KdTree; +use std::io::{Error, ErrorKind}; +use std::{env, path, str}; +use std::time::Instant; +use std::sync::Arc; +const EPSILON: f64 = std::f64::EPSILON; +use whitebox_common::utils::{get_formatted_elapsed_time}; +use whitebox_common::structures::Point2D; +use whitebox_raster::*; +use whitebox_vector::*; + +/// This tool performs common stream network analysis operations on an input vector stream file (`--streams`). +/// The network indices produced by this analysis are contained within the output vector's (`--output`) +/// attribute table. The following table shows each of the network indices that are calculated. +/// +/// | Index Name | Description | +/// | :- | :- | +/// | OUTLET | Unique outlet identifying value, used as basin identifier | +/// | TRIB_ID | Unique tributary identifying value | +/// | DIST2MOUTH | Distance to outlet (i.e., mouth node) | +/// | DS_NODES | Number of downstream nodes | +/// | TUCL | Total upstream channel length; the channel equivalent to catchment area | +/// | MAXUPSDIST | Maximum upstream distance | +/// | HORTON | Horton stream order | +/// | STRAHLER | Strahler stream order | +/// | SHREVE | Shreve stream magnitude | +/// | HACK | Hack stream order | +/// | MAINSTREAM | Boolean value indicating whether link is the main stream trunk of its basin | +/// | MIN_ELEV | Minimum link elevation (from DEM) | +/// | MAX_ELEV | Maximum link elevation (from DEM) | +/// | IS_OUTLET | Boolean value indicating whether link is an outlet link | +/// +/// In addition to the input and output files, the user must also specify the name of an input DEM file +/// (`--dem`), the maximum ridge-cutting height, in DEM z units (`--cutting_height`), and the snap distance +/// used for identifying any topological errors in the stream file (`--snap`). The main function of the +/// input DEM is to distinguish between outlet and headwater links in the network, which +/// can be differentiated by their elevations during the priority-flood operation used in the algorithm +/// (see Lindsay et al. 2019). The maximum ridge-cutting height parameter is useful for preventing +/// erroneous stream capture in the headwaters when channel heads are very near (within the sanp distance), +/// which is usually very rare. The snap distance parameter is used to deal with certain common topological +/// errors. However, it is advisable that the input streams file be pre-processed prior to analysis. +/// +/// > Note: The input streams file for this tool should be pre-processed using the `RepairStreamVectorTopology` +/// > tool. **This is an important step**. +/// +/// OUTLET: +/// ![](../../doc_img/StreamVectorAnalysis1.png) +/// +/// HORTON: +/// ![](../../doc_img/StreamVectorAnalysis2.png) +/// +/// SHREVE: +/// ![](../../doc_img/StreamVectorAnalysis4.png) +/// +/// TRIB_ID: +/// ![](../../doc_img/StreamVectorAnalysis3.png) +/// +///Many of the network indices output by this tool for vector streams have raster equivalents in WhiteboxTools. +/// For example, see the `StrahlerStreamOrder`, `ShreveStreamMagnitude` tools. +/// +/// # Reference +/// Lindsay, JB, Yang, W, Hornby, DD. 2019. Drainage network analysis and structuring of topologically +/// noisy vector stream data. ISPRS International Journal of Geo-Information. 8(9), 422; DOI: +/// 10.3390/ijgi8090422 +/// +/// # See Also +/// `RepairStreamVectorTopology`, `StrahlerStreamOrder`, `ShreveStreamMagnitude` +fn main() { + let args: Vec = env::args().collect(); + + if args[1].trim() == "run" { + match run(&args) { + Ok(_) => {} + Err(e) => panic!("{:?}", e), + } + } + + if args.len() <= 1 || args[1].trim() == "help" { + // print help + help(); + } + + if args[1].trim() == "version" { + // print version information + version(); + } +} + +fn help() { + let mut ext = ""; + if cfg!(target_os = "windows") { + ext = ".exe"; + } + + let exe_name = &format!("vector_stream_network_analysis{}", ext); + let sep: String = path::MAIN_SEPARATOR.to_string(); + let s = r#" + vector_stream_network_analysis Help + + This tool can be used to + + The following commands are recognized: + help Prints help information. + run Runs the tool. + version Prints the tool version information. + + The following flags can be used with the 'run' command: + --streams Name of the input streams vector. + --dem Name of the input DEM raster file. + -o, --output Name of the output lines shapefile. + --cutting_height Maximum ridge-cutting height (z units). + --snap Snap distance, in xy units (metres). + + Input/output file names can be fully qualified, or can rely on the + working directory contained in the WhiteboxTools settings.json file. + + Example Usage: + >> .*EXE_NAME run --streams=rivers.shp --dem=DEM.tif -o=network_analysis.shp --cutting_height=10.0 --snap=1.0 + + Note: Use of this tool requires a valid license. To obtain a license, + contact Whitebox Geospatial Inc. (support@whiteboxgeo.com). + "# + .replace("*", &sep) + .replace("EXE_NAME", exe_name); + println!("{}", s); +} + +fn version() { + const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION"); + println!( + "vector_stream_network_analysis v{} by Dr. John B. Lindsay (c) 2023.", + VERSION.unwrap_or("Unknown version") + ); +} + +fn get_tool_name() -> String { + String::from("VectorStreamNetworkAnalysis") // This should be camel case and is a reference to the tool name. +} + +fn run(args: &Vec) -> Result<(), std::io::Error> { + let tool_name = get_tool_name(); + + let sep: String = path::MAIN_SEPARATOR.to_string(); + + // Read in the environment variables and get the necessary values + let configurations = whitebox_common::configs::get_configs()?; + let mut working_directory = configurations.working_directory.clone(); + if !working_directory.is_empty() && !working_directory.ends_with(&sep) { + working_directory += &sep; + } + + let mut streams_file: String = "".to_string(); + let mut dem_file: String = "".to_string(); + let mut output_file: String = "".to_string(); + let mut max_ridge_cutting_height = 10.0; + let mut snap_distance = 0.001; + + // read the arguments + if args.len() == 0 { + return Err(Error::new( + ErrorKind::InvalidInput, + "Tool run with no parameters.", + )); + } + for i in 0..args.len() { + let mut arg = args[i].replace("\"", ""); + arg = arg.replace("\'", ""); + let cmd = arg.split("="); // in case an equals sign was used + let vec = cmd.collect::>(); + let mut keyval = false; + if vec.len() > 1 { + keyval = true; + } + let flag_val = vec[0].to_lowercase().replace("--", "-"); + if flag_val == "-streams" { + streams_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-dem" { + dem_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-o" || flag_val == "-output" { + output_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-cutting_height" { + max_ridge_cutting_height = if keyval { + vec[1] + .to_string() + .parse::() + .expect(&format!("Error parsing {}", flag_val)) + } else { + args[i + 1] + .to_string() + .parse::() + .expect(&format!("Error parsing {}", flag_val)) + }; + } else if flag_val == "-snap" { + snap_distance = if keyval { + vec[1] + .to_string() + .parse::() + .expect(&format!("Error parsing {}", flag_val)) + } else { + args[i + 1] + .to_string() + .parse::() + .expect(&format!("Error parsing {}", flag_val)) + }; + } + } + + if configurations.verbose_mode { + let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); + // 28 = length of the 'Powered by' by statement. + println!("{}", "*".repeat(welcome_len)); + println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); + println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); + println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); + println!("{}", "*".repeat(welcome_len)); + } + + let mut progress: usize; + let mut old_progress: usize = 1; + + let start = Instant::now(); + + let snap_distance = snap_distance * snap_distance; + + let precision = EPSILON * 10f64; + + + if !streams_file.contains(&sep) && !streams_file.contains("/") { + streams_file = format!("{}{}", working_directory, streams_file); + } + if !dem_file.contains(&sep) && !dem_file.contains("/") { + dem_file = format!("{}{}", working_directory, dem_file); + } + if !output_file.contains(&sep) && !output_file.contains("/") { + output_file = format!("{}{}", working_directory, output_file); + } + + // Read in the DEM file + let dem = Arc::new(Raster::new(&dem_file, "r")?); + // let rows = dem.configs.rows as isize; + // let columns = dem.configs.columns as isize; + let nodata = dem.configs.nodata; + + let mut dist_multiplier = 1.0; + if dem.is_in_geographic_coordinates() { + // calculate a new z-conversion factor + let mut mid_lat = (dem.configs.north - dem.configs.south) / 2.0; + if mid_lat <= 90.0 && mid_lat >= -90.0 { + mid_lat = mid_lat.to_radians(); + // z_factor = 1.0 / (111320.0 * mid_lat.cos()); + let a = 6378137.0; + let b = 6356752.314; + let e2 = (a * a - b * b) / (a * a); + let num = std::f64::consts::PI * a * mid_lat.cos(); + let denum = 180.0 * ((1.0 - e2 * mid_lat.sin() * mid_lat.sin())).sqrt(); + let long_deg_dist = num / denum; + let lat_deg_dist = 111132.954 - 559.822 * (2.0f64 * mid_lat).cos() + 1.175 * (4.0f64 * mid_lat).cos(); + dist_multiplier = (long_deg_dist + lat_deg_dist) / 2.0; + } + } + + let input = Shapefile::read(&streams_file)?; + + // Make sure the input vector file is of polygon type + if input.header.shape_type.base_shape_type() != ShapeType::PolyLine { + return Err(Error::new( + ErrorKind::InvalidInput, + "The input vector data must be of PolyLine base shape type.", + )); + } + + + // create output file + let mut output = Shapefile::initialize_using_file(&output_file, &input, ShapeType::PolyLine, false)?; + + // add the attributes + + let fields_vec: Vec = vec![ + AttributeField::new( + "FID", + FieldDataType::Int, + 7u8, + 0u8 + ), + AttributeField::new( + "TUCL", + FieldDataType::Real, + 10u8, + 4u8 + ), + AttributeField::new( + "MAXUPSDIST", + FieldDataType::Real, + 10u8, + 4u8 + ), + AttributeField::new( + "MIN_ELEV", + FieldDataType::Real, + 10u8, + 4u8 + ), + AttributeField::new( + "MAX_ELEV", + FieldDataType::Real, + 10u8, + 4u8 + ), + AttributeField::new( + "OUTLET", + FieldDataType::Int, + 7u8, + 0u8 + ), + AttributeField::new( + "HORTON", + FieldDataType::Int, + 7u8, + 0u8 + ), + AttributeField::new( + "STRAHLER", + FieldDataType::Int, + 7u8, + 0u8 + ), + AttributeField::new( + "SHREVE", + FieldDataType::Int, + 7u8, + 0u8 + ), + AttributeField::new( + "HACK", + FieldDataType::Int, + 7u8, + 0u8 + ), + AttributeField::new( + "DIST2MOUTH", + FieldDataType::Real, + 10u8, + 4u8 + ), + AttributeField::new( + "DS_NODES", + FieldDataType::Int, + 7u8, + 0u8 + ), + AttributeField::new( + "IS_OUTLET", + FieldDataType::Int, + 1u8, + 0u8 + ), + AttributeField::new( + "DS_LINK_ID", + FieldDataType::Int, + 7u8, + 0u8 + ), + AttributeField::new( + "MAINSTEM", + FieldDataType::Int, + 1u8, + 0u8 + ), + AttributeField::new( + "TRIB_ID", + FieldDataType::Int, + 7u8, + 0u8 + ) + ]; + + + + // let in_atts = input.attributes.clone(); + // let mut parent_fid_att = 999; + // for i in 0..in_atts.fields.len() { + // let field = in_atts.get_field(i); + // if field.name == "FID" { + // parent_fid_att = i; + // } else { + // fields_vec.push(field.clone()); + // } + // } + + output.attributes.add_fields(&fields_vec); + + let mut output_confluences = Shapefile::initialize_using_file(&output_file.replace(".shp", "_confluences.shp"), &input, ShapeType::Point, false)?; + output_confluences + .attributes + .add_field(&AttributeField::new("FID", FieldDataType::Int, 6u8, 0u8)); + + + let mut output_channel_heads = Shapefile::initialize_using_file(&output_file.replace(".shp", "_channelHeads.shp"), &input, ShapeType::Point, false)?; + output_channel_heads + .attributes + .add_field(&AttributeField::new("FID", FieldDataType::Int, 6u8, 0u8)); + + let mut output_outlets = Shapefile::initialize_using_file(&output_file.replace(".shp", "_outlets.shp"), &input, ShapeType::Point, false)?; + output_outlets + .attributes + .add_field(&AttributeField::new("FID", FieldDataType::Int, 6u8, 0u8)); + + // First enter the line end-nodes into a kd-tree + let num_features = input.num_records; + // let mut count = 0; + let (mut part_start, mut part_end): (usize, usize); + let mut outlet_num: usize; + let mut total_num_parts = 0; + let mut is_beyond_edge_line: bool; + // let mut is_interior: bool; + // let mut flag: bool; + let (mut row, mut col): (isize, isize); + let (mut z, mut z1, mut z2): (f64, f64, f64); + let mut length: f64; + + if configurations.verbose_mode { + println!("Pre-processing..."); + } + + // count the number of parts + for record_num in 0..input.num_records { + let record = input.get_record(record_num); + total_num_parts += record.num_parts as usize; + } + + let mut crosses_nodata = vec![false; total_num_parts]; + let mut link_mag = vec![0f64; total_num_parts]; + let mut is_beyond_edge = vec![false; total_num_parts]; + + let mut link_key_points = Vec::with_capacity(total_num_parts); + + let mut link_lengths = vec![0f64; total_num_parts]; + let mut outlet_nums = vec![0; total_num_parts]; + let mut num_downstream_nodes = vec![0; total_num_parts]; + let mut points_tree = KdTree::with_capacity(2, 64); + + let mut link_min_elev = vec![f64::INFINITY; total_num_parts]; + let mut link_max_elev = vec![f64::NEG_INFINITY; total_num_parts]; + let mut downstream_link = vec![-99; total_num_parts]; + + let (mut x, mut x1, mut x2, mut y, mut y1, mut y2): (f64, f64, f64, f64, f64, f64); + + // Read the end-nodes into the KD-tree. + let mut feature_num = 0; + for rec_num in 0..input.num_records { + let record = input.get_record(rec_num); + for part in 0..record.num_parts as usize { + part_start = record.parts[part] as usize; + part_end = if part < record.num_parts as usize - 1 { + record.parts[part + 1] as usize - 1 + } else { + record.num_points as usize - 1 + }; + + // Is this line off the edge of the DEM or within an area of nodata? + is_beyond_edge_line = true; + + z1 = 0f64; + z2 = 0f64; + for i in part_start..=part_end { + row = dem.get_row_from_y(record.points[i].y); + col = dem.get_column_from_x(record.points[i].x); + z = dem.get_value(row, col); + if i == part_start { z1 = z; } + if i == part_end { z2 = z; } + + if z != nodata { + is_beyond_edge_line = false; + if z < link_min_elev[feature_num] { link_min_elev[feature_num] = z} + if z > link_max_elev[feature_num] { link_max_elev[feature_num] = z} + } else { + crosses_nodata[feature_num] = true; + } + } + + if is_beyond_edge_line { + is_beyond_edge[feature_num] = true; + } else { + // calculate the length of this line + length = 0.0; + for i in part_start+1..=part_end { + length += dist_multiplier * record.points[i].distance(&record.points[i-1]); // Math.sqrt((points[i][0] - points[i - 1][0]) * (points[i][0] - points[i - 1][0]) + (points[i][1] - points[i - 1][1]) * (points[i][1] - points[i - 1][1])) + } + link_lengths[feature_num] = length; + } + + x1 = record.points[part_start].x; + y1 = record.points[part_start].y; + points_tree.add([x1, y1], feature_num).unwrap(); + + x2 = record.points[part_end].x; + y2 = record.points[part_end].y; + points_tree.add([x2, y2], feature_num).unwrap(); + + link_key_points.push(StreamLinkKeyPoints::new(x1, y1, z1, x2, y2, z2)); + + feature_num += 1; + } + + if configurations.verbose_mode { + progress = + (100.0_f64 * (rec_num + 1) as f64 / num_features as f64) as usize; + if progress != old_progress { + println!("Building search tree: {}%", progress); + old_progress = progress; + } + } + } + + + /* + * Now we must find y-junctions. This occurs where + * a stream link's end node intersects with another + * stream link but not at one of its end-nodes. Instead, + * it touches one of its intermediate nodes. We will + * perform a NN search at the location of all + * intermediate nodes and wherever one is within the + * search distance of an end-node (already in the kd-tree) + * then it will be added to the kd-tree as well. + */ + // let mut num_y_junctions = 0; + feature_num = 0; + for rec_num in 0..input.num_records { + let record = input.get_record(rec_num); + for part in 0..record.num_parts as usize { + part_start = record.parts[part] as usize; + part_end = if part < record.num_parts as usize - 1 { + record.parts[part + 1] as usize - 1 + } else { + record.num_points as usize - 1 + }; + for i in part_start+1..part_end { + let ret = points_tree.within(&[record.points[i].x, record.points[i].y], precision, &squared_euclidean).unwrap(); + + if ret.len() > 0 { + // add it to the tree + points_tree.add([record.points[i].x, record.points[i].y], feature_num).unwrap(); + // num_y_junctions += 1; + + link_key_points[feature_num].add_intermediate_point(record.points[i].x, record.points[i].y); + + output_confluences.add_point_record(record.points[i].x, record.points[i].y); + output_confluences.attributes.add_record(vec![FieldData::Int(2i32)], false); + } + } + + feature_num += 1; + } + + if configurations.verbose_mode { + progress = + (100.0_f64 * (rec_num + 1) as f64 / num_features as f64) as usize; + if progress != old_progress { + println!("Building search tree: {}%", progress); + old_progress = progress; + } + } + } + + /* + * Exterior links can be identified + * as lines that either do not connect to another + * or that have at least one end-node with a NoData + * elevation value. Exterior links include both + * channel heads (first-order stream) and outlet links. + * Add each of these to a priority queue. + */ + + let mut queue = BinaryHeap::with_capacity(total_num_parts); + + // let mut is_channel_head = vec![false; total_num_parts]; + let mut is_exterior_link = vec![false; total_num_parts]; + let mut is_exterior: bool; + let mut is_outlet_link = vec![false; total_num_parts]; + let mut id: usize; + let mut j: usize; + for i in 0..total_num_parts { + if !is_beyond_edge[i] { + z = f64::INFINITY; + /* + * To be an exterior link, it must have + * at least one end that either isn't connected + * to any other link, has one link end that + * is nodata in the DEM, or + */ + is_exterior = false; + x = link_key_points[i].end_point1.x; + y = link_key_points[i].end_point1.y; + let ret = points_tree.within(&[x, y], precision, &squared_euclidean).unwrap(); + + j = 0; + for n in 0..ret.len() { + id = *ret[n].1; + if id != i && !is_beyond_edge[id] { + j += 1; + if link_min_elev[id] < z { z = link_min_elev[id]; } + } + } + + if j == 0 { + is_exterior = true; + } + + x = link_key_points[i].end_point2.x; + y = link_key_points[i].end_point2.y; + let ret = points_tree.within(&[x, y], precision, &squared_euclidean).unwrap(); + + j = 0; + for n in 0..ret.len() { + id = *ret[n].1; + if id != i && !is_beyond_edge[id] { + j += 1; + if link_min_elev[id] < z { z = link_min_elev[id]; } + } + } + + if j == 0 { + is_exterior = true; + } + + if is_exterior || crosses_nodata[i] { + is_exterior_link[i] = true; + if link_min_elev[i] <= z || crosses_nodata[i] { + + z = link_min_elev[i]; + queue.push(StreamLink{ index: i, min: z + max_ridge_cutting_height }); + } + } + } + + if configurations.verbose_mode { + progress = + (100.0_f64 * (i + 1) as f64 / total_num_parts as f64) as usize; + if progress != old_progress { + println!("Finding starting points: {}%", progress); + old_progress = progress; + } + } + } + + // perform the priority-flood operation + // let mut num_snapped_outlets = 0; + let mut sl: StreamLink; + let mut have_visited = vec![false; total_num_parts]; + let mut have_entered_queue = vec![false; total_num_parts]; + let mut num_infowing_links = vec![0; total_num_parts]; + let mut dist_to_outlet = vec![0f64; total_num_parts]; + let mut trib_num = vec![0; total_num_parts]; + let mut link: isize; + let mut current_max_outlet_num = 0; + let mut dsn: isize; + let mut is_confluence: bool; + let mut num_links: isize; + // let mut total_num_links: isize; + let mut num_links_visited = 0; + let mut end_point: Point2D; + + while !queue.is_empty() { + sl = queue.pop().expect("Error during pop operation."); + link = sl.index as isize; + if !have_visited[link as usize] { + have_visited[link as usize] = true; + have_entered_queue[link as usize] = true; + + dist_to_outlet[link as usize] += link_lengths[link as usize]; + + // What is the downstream link? + dsn = downstream_link[link as usize]; + + // What outlet number does the DSN belong to? + if dsn >= 0 { + outlet_num = outlet_nums[dsn as usize]; + } else { + // which end point is the downstream outlet node? + end_point = link_key_points[link as usize].end_point1; + + x = end_point.x; + y = end_point.y; + + let ret = points_tree.within(&[x, y], precision, &squared_euclidean).unwrap(); + num_links = 0; + for n in 0..ret.len() { + id = *ret[n].1; + if !is_beyond_edge[id] && !have_visited[id] && !is_outlet_link[id] { + num_links += 1; + } + } + + if num_links > 0 { + // end point 2 is the downstream node + x = link_key_points[link as usize].end_point2.x; + y = link_key_points[link as usize].end_point2.y; + } else { + // how many linking nodes are at end point 2? + end_point = link_key_points[link as usize].end_point2; + + x = end_point.x; + y = end_point.y; + let ret = points_tree.within(&[x, y], precision, &squared_euclidean).unwrap(); + num_links = 0; + for n in 0..ret.len() { + id = *ret[n].1; + if !is_beyond_edge[id] && !have_visited[id] && !is_outlet_link[id] { + num_links += 1; + } + } + + if num_links > 0 { + // end point 1 is the downstream node + x = link_key_points[link as usize].end_point1.x; + y = link_key_points[link as usize].end_point1.y; + } else { // it's a single channel stream, which end is lower? + if link_key_points[link as usize].z1 < link_key_points[link as usize].z2 || + (link_key_points[link as usize].z1 == nodata && link_key_points[link as usize].z2 != nodata) { + x = link_key_points[link as usize].end_point1.x; + y = link_key_points[link as usize].end_point1.y; + } else { + x = link_key_points[link as usize].end_point2.x; + y = link_key_points[link as usize].end_point2.y; + } + } + } + + if !crosses_nodata[link as usize] { + /* This is a dangling stream. First let's make + * sure that there isn't a link end node from + * a previously discovered outlet nearby that + * we could connect to this outlet point + */ + let ret = points_tree.nearest(&[x, y], 3, &squared_euclidean).unwrap(); + let mut snapped_neighbour = -1isize; + for n in 0..ret.len() { + id = *ret[n].1; + if !is_beyond_edge[id] && have_visited[id] && is_exterior_link[id] && id as isize != link { + // Check to see if the distance is less than the specified + // snap distance. + if ret[n].0 < snap_distance { + snapped_neighbour = id as isize; + break; + } + } + } + + if snapped_neighbour >= 0 { + // we found a neighbour to snap to + dsn = snapped_neighbour; + outlet_num = outlet_nums[dsn as usize]; + outlet_nums[link as usize] = outlet_num; + downstream_link[link as usize] = dsn; + num_infowing_links[dsn as usize] += 1; + num_downstream_nodes[link as usize] = num_downstream_nodes[dsn as usize] + 1; + dist_to_outlet[link as usize] += dist_to_outlet[dsn as usize]; + // num_snapped_outlets += 1; + } else { + // it is a true outlet + + // There isn't a DSN and we need a new outlet number + current_max_outlet_num += 1; + outlet_num = current_max_outlet_num; + outlet_nums[link as usize] = outlet_num; + is_outlet_link[link as usize] = true; + + // pointOfInterest = new whitebox.geospatialfiles.shapefile.Point(x, y); + // rowData = new Object[1]; + // rowData[0] = new Double(outletNum); + // outputOutlets.addRecord(pointOfInterest, rowData); + output_outlets.add_point_record(x, y); + output_outlets.attributes.add_record(vec![FieldData::Int(outlet_num as i32)], false); + } + } else { + // There isn't a DSN and we need a new outlet number + current_max_outlet_num += 1; + outlet_num = current_max_outlet_num; + outlet_nums[link as usize] = outlet_num; + is_outlet_link[link as usize] = true; + + // point_of_interest = new whitebox.geospatialfiles.shapefile.Point(x, y); + // rowData = new Object[1]; + // rowData[0] = new Double(outletNum); + // outputOutlets.addRecord(pointOfInterest, rowData); + output_outlets.add_point_record(x, y); + output_outlets.attributes.add_record(vec![FieldData::Int(outlet_num as i32)], false); + } + } + + for pt in link_key_points[link as usize].get_all_points() { // (XYPoint pt : linkKeyPoints[link].getAllPoints()) { + x = pt.x; + y = pt.y; + let ret = points_tree.within(&[x, y], precision, &squared_euclidean).unwrap(); + num_links = 0; + for n in 0..ret.len() { + id = *ret[n].1; + if !is_beyond_edge[id] && !have_entered_queue[id] { + num_links += 1; + } + } + + is_confluence = if num_links > 1 { true } else { false }; + if is_confluence { + // pointOfInterest = new whitebox.geospatialfiles.shapefile.Point(x, y); + // rowData = new Object[1]; + // rowData[0] = new Double(1); + // outputConfluences.addRecord(pointOfInterest, rowData); + output_confluences.add_point_record(x, y); + output_confluences.attributes.add_record(vec![FieldData::Int(1i32)], false); + } + for n in 0..ret.len() { + id = *ret[n].1; + if !is_beyond_edge[id] && !have_entered_queue[id] { + // add the link to the queue + z = link_min_elev[id]; + queue.push(StreamLink{ index: id, min: z }); + + have_entered_queue[id] = true; + + // update the DSN for this link + downstream_link[id] = link; + if is_confluence { + num_downstream_nodes[id] = num_downstream_nodes[link as usize] + 1; + } else { + num_downstream_nodes[id] = num_downstream_nodes[link as usize]; + } + + dist_to_outlet[id] += dist_to_outlet[link as usize]; + + outlet_nums[id] = outlet_num; + + num_infowing_links[link as usize] += 1; + } + } + } + + num_links_visited += 1; + if configurations.verbose_mode { + progress = + (100.0_f64 * (num_links_visited + 1) as f64 / total_num_parts as f64) as usize; + if progress != old_progress { + println!("Priority-Flood Operation: {}%", progress); + old_progress = progress; + } + } + } + } + + // calculate the link mag variables + let mut strahler_order = vec![0usize; total_num_parts]; + let mut shreve_order = vec![0usize; total_num_parts]; + let mut max_upstream_length = vec![0f64; total_num_parts]; + let mut stack = vec![]; + let mut found_downstream_end: bool; + for i in 0..total_num_parts { + if num_infowing_links[i] == 0 && !is_beyond_edge[i] { + stack.push(i); + strahler_order[i] = 1; + shreve_order[i] = 1; + + // this is a headwater, find which end is the channel head + found_downstream_end = false; + dsn = downstream_link[i]; + end_point = link_key_points[i].end_point1; + x = end_point.x; + y = end_point.y; + let ret = points_tree.within(&[x, y], precision, &squared_euclidean).unwrap(); + for j in 0..ret.len() { + id = *ret[j].1; + if id as isize == dsn { + found_downstream_end = true; + } + } + + if !found_downstream_end { + // pointOfInterest = new whitebox.geospatialfiles.shapefile.Point(x, y); + // rowData = new Object[1]; + // rowData[0] = new Double(1); + // outputChannelHeads.addRecord(pointOfInterest, rowData); + output_channel_heads.add_point_record(x, y); + output_channel_heads.attributes.add_record(vec![FieldData::Int(1i32)], false); + } else { + end_point = link_key_points[i].end_point2; + x = end_point.x; + y = end_point.y; + // pointOfInterest = new whitebox.geospatialfiles.shapefile.Point(x, y); + // rowData = new Object[1]; + // rowData[0] = new Double(1); + // outputChannelHeads.addRecord(pointOfInterest, rowData); + output_channel_heads.add_point_record(x, y); + output_channel_heads.attributes.add_record(vec![FieldData::Int(1i32)], false); + } + } + } + + let mut count = 0; + while !stack.is_empty() { + let i = stack.pop().expect("Error during pop operation."); + link_mag[i] += link_lengths[i]; + dsn = downstream_link[i]; + if dsn >= 0isize { + // pass this downstream + link_mag[dsn as usize] += link_mag[i]; + num_infowing_links[dsn as usize] -= 1; + if num_infowing_links[dsn as usize] == 0 { + stack.push(dsn as usize); + } + + if strahler_order[dsn as usize] == strahler_order[i] { + strahler_order[dsn as usize] += 1; + } else if strahler_order[i] > strahler_order[dsn as usize] { + strahler_order[dsn as usize] = strahler_order[i]; + } + + if max_upstream_length[i] + link_lengths[i] > max_upstream_length[dsn as usize] { + max_upstream_length[dsn as usize] = max_upstream_length[i] + link_lengths[i]; + } + + shreve_order[dsn as usize] += shreve_order[i]; + } + count += 1; + if configurations.verbose_mode { + progress = + (100.0_f64 * (count + 1) as f64 / total_num_parts as f64) as usize; + if progress != old_progress { + println!("Accumulation operations: {}%", progress); + old_progress = progress; + } + } + } + + // perform the outlet-to-head ops like finding the main stem + // and assign tributary numbers + let mut is_main_stem = vec![false; total_num_parts]; + let mut horton_order = vec![0usize; total_num_parts]; + let mut hack_order = vec![0usize; total_num_parts]; + stack = vec![]; + let mut current_trib_num = 0; + + for i in 0..total_num_parts { + if is_outlet_link[i] { + is_main_stem[i] = true; + horton_order[i] = strahler_order[i]; + hack_order[i] = 1; + stack.push(i); + current_trib_num += 1; + trib_num[i] = current_trib_num; + } + + if configurations.verbose_mode { + progress = + (100.0_f64 * (i + 1) as f64 / total_num_parts as f64) as usize; + if progress != old_progress { + println!("Assigning tributary IDs: {}%", progress); + old_progress = progress; + } + } + } + + + let mut neighbour_list = vec![]; + count = 0; + while !stack.is_empty() { + let i = stack.pop().expect("Error during pop operation."); + neighbour_list.clear(); + let mut max_tucl = 0f64; + let mut max_tucl_link = -1isize; + for pt in link_key_points[i].get_all_points() { // (XYPoint pt : linkKeyPoints[i].getAllPoints()) { + x = pt.x; + y = pt.y; + let ret = points_tree.within(&[x, y], precision, &squared_euclidean).unwrap(); + // num_links = 0; + for j in 0..ret.len() { + id = *ret[j].1; + if downstream_link[id] == i as isize { + neighbour_list.push(id); + if link_mag[id] > max_tucl { + max_tucl = link_mag[id]; + max_tucl_link = id as isize; + } + } + } + } + if max_tucl_link >= 0 { + //isMainStem[maxTUCLlink] = true; + for q in 0..neighbour_list.len() { + let n = neighbour_list[q]; + // add it to the stack + stack.push(n); + if n as isize != max_tucl_link { + current_trib_num += 1; + trib_num[n] = current_trib_num; + horton_order[n] = strahler_order[n]; + hack_order[n] = hack_order[i] + 1; + } else { + trib_num[n] = trib_num[i]; + horton_order[n] = horton_order[i]; + hack_order[n] = hack_order[i]; + if is_main_stem[downstream_link[n] as usize] { + is_main_stem[n] = true; + } + } + } + } + + count += 1; + if configurations.verbose_mode { + progress = + (100.0_f64 * (count + 1) as f64 / total_num_parts as f64) as usize; + if progress != old_progress { + println!("Assigning tributary IDs: {}%", progress); + old_progress = progress; + } + } + } + + // Output the data into the attribute table. + feature_num = 0; + count = 0; + let mut att_data: Vec; + let mut fid = 1; + for rec_num in 0..input.num_records { + let record = input.get_record(rec_num); + // num_points = record.points.len(); + for part in 0..record.num_parts as usize { + part_start = record.parts[part] as usize; + part_end = if part < record.num_parts as usize - 1 { + record.parts[part + 1] as usize - 1 + } else { + record.num_points as usize - 1 + }; + if !is_beyond_edge[feature_num as usize] { + let mut points: Vec = vec![]; + for i in part_start..=part_end { + points.push(record.points[i].clone()); + } + let mut sfg = ShapefileGeometry::new(ShapeType::PolyLine); + sfg.add_part(&points); + output.add_record(sfg); + + att_data = Vec::with_capacity(fields_vec.len()); + att_data.push(FieldData::Int(fid as i32)); + att_data.push(FieldData::Real(link_mag[feature_num])); + att_data.push(FieldData::Real(max_upstream_length[feature_num])); + att_data.push(FieldData::Real(link_min_elev[feature_num])); + att_data.push(FieldData::Real(link_max_elev[feature_num])); + att_data.push(FieldData::Int(outlet_nums[feature_num] as i32)); + att_data.push(FieldData::Int(horton_order[feature_num] as i32)); + att_data.push(FieldData::Int(strahler_order[feature_num] as i32)); + att_data.push(FieldData::Int(shreve_order[feature_num] as i32)); + att_data.push(FieldData::Int(hack_order[feature_num] as i32)); + att_data.push(FieldData::Real(dist_to_outlet[feature_num])); + att_data.push(FieldData::Int(num_downstream_nodes[feature_num] as i32)); + if is_outlet_link[feature_num] { + att_data.push(FieldData::Int(1)); + } else { + att_data.push(FieldData::Int(0)); + } + att_data.push(FieldData::Int(downstream_link[feature_num] as i32)); + if is_main_stem[feature_num] { + att_data.push(FieldData::Int(1)); + } else { + att_data.push(FieldData::Int(0)); + } + att_data.push(FieldData::Int(trib_num[feature_num] as i32)); + + // output.attributes.add_record( + // vec![FieldData::Int(fid as i32)], + // false, + // ); + + output.attributes.add_record(att_data.clone(), false); + fid += 1; + } + feature_num += 1; + } + + count += 1; + if configurations.verbose_mode { + progress = + (100.0_f64 * (count + 1) as f64 / total_num_parts as f64) as usize; + if progress != old_progress { + println!("Writing data: {}%", progress); + old_progress = progress; + } + } + } + + if configurations.verbose_mode { + println!("Saving data...") + }; + let _ = match output.write() { + Ok(_) => { + if configurations.verbose_mode { + println!("Output stream file written") + } + } + Err(e) => return Err(e), + }; + + let _ = match output_confluences.write() { + Ok(_) => { + if configurations.verbose_mode { + println!("Output confluences file written") + } + } + Err(e) => return Err(e), + }; + + let _ = match output_outlets.write() { + Ok(_) => { + if configurations.verbose_mode { + println!("Output outlets file written") + } + } + Err(e) => return Err(e), + }; + + let _ = match output_channel_heads.write() { + Ok(_) => { + if configurations.verbose_mode { + println!("Output channel heads file written") + } + } + Err(e) => return Err(e), + }; + + + let elapsed_time = get_formatted_elapsed_time(start); + + if configurations.verbose_mode { + println!( + "{}", + &format!("Elapsed Time (including I/O): {}", elapsed_time) + ); + } + + Ok(()) +} + + +struct StreamLinkKeyPoints { + pub end_point1: Point2D, + pub end_point2: Point2D, + pub z1: f64, + pub z2: f64, + pub intermediate_points: Vec, +} + +impl StreamLinkKeyPoints { + fn new(x1: f64, y1: f64, z1: f64, x2: f64, y2: f64, z2: f64) -> StreamLinkKeyPoints { + StreamLinkKeyPoints { + end_point1: Point2D::new(x1, y1), + z1: z1, + end_point2: Point2D::new(x2, y2), + z2: z2, + intermediate_points: vec![], + } + } + + fn add_intermediate_point(&mut self, x: f64, y: f64) { + self.intermediate_points.push(Point2D::new(x, y)); + } + + fn get_all_points(&self) -> Vec { + let mut points = vec![]; + points.push(self.end_point1); + points.push(self.end_point2); + for p in &self.intermediate_points { + points.push(p.clone()); + } + + points + } +} + +#[derive(PartialEq, Debug)] +struct StreamLink { + index: usize, + min: f64, +} + +impl Eq for StreamLink {} + +impl PartialOrd for StreamLink { + fn partial_cmp(&self, other: &Self) -> Option { + other.min.partial_cmp(&self.min) + } +} + +impl Ord for StreamLink { + fn cmp(&self, other: &Self) -> Ordering { + self.partial_cmp(other).unwrap() + } +} \ No newline at end of file diff --git a/whitebox-plugins/src/vector_stream_network_analysis/vector_stream_network_analysis.json b/whitebox-plugins/src/vector_stream_network_analysis/vector_stream_network_analysis.json new file mode 100755 index 00000000..983ca0c2 --- /dev/null +++ b/whitebox-plugins/src/vector_stream_network_analysis/vector_stream_network_analysis.json @@ -0,0 +1,51 @@ +{ + "tool_name": "VectorStreamNetworkAnalysis", + "exe": "vector_stream_network_analysis", + "short_description": "This tool performs common stream network analysis operations on an input vector stream file.", + "help": "This tool performs common stream network analysis operations on an input vector stream file.", + "toolbox": "Stream Network Analysis", + "license": "MIT", + "example": ">> .*EXE_NAME -r=VectorStreamNetworkAnalysis --streams=rivers.shp --dem=DEM.tif -o=network_analysis.shp --cutting_height=10.0 --snap=1.0", + "parameters": [ + { + "name": "Input Streams Vector", + "flags": ["--streams"], + "description": "Name of the input streams vector file.", + "parameter_type": {"ExistingFile":{"Vector":"Line"}}, + "default_value": null, + "optional": false + }, + { + "name": "Input DEM Raster", + "flags": ["--dem"], + "description": "Name of the input DEM raster file.", + "parameter_type": {"ExistingFile":"Raster"}, + "default_value": null, + "optional": false + }, + { + "name": "Output Lines", + "flags": ["-o", "--output"], + "description": "Name of the output lines shapefile.", + "parameter_type": {"NewFile":{"Vector":"Line"}}, + "default_value": null, + "optional": false + }, + { + "name": "Maximum Ridge-cutting Height (z units)", + "flags": ["--cutting_height"], + "description": "Maximum ridge-cutting height (z units).", + "parameter_type": "Float", + "default_value": "10.0", + "optional": true + }, + { + "name": "Snap Distance", + "flags": ["--snap"], + "description": "Snap distance, in xy units (metres).", + "parameter_type": "Float", + "default_value": "0.1", + "optional": true + } + ] +} \ No newline at end of file diff --git a/whitebox-raster/Cargo.toml b/whitebox-raster/Cargo.toml index b72cd98c..745a0bcf 100755 --- a/whitebox-raster/Cargo.toml +++ b/whitebox-raster/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "whitebox_raster" -version = "2.0.0" +version = "2.3.0" authors = ["John Lindsay "] edition = "2021" [dependencies] byteorder = "^1.3.1" -chrono = "0.4.15" +chrono = "0.4.21" lzw = "0.10.0" miniz_oxide = "0.3.6" num_cpus = "1.14.0" diff --git a/whitebox-runner/Cargo.toml b/whitebox-runner/Cargo.toml index 7e8b231c..05c10e7d 100644 --- a/whitebox-runner/Cargo.toml +++ b/whitebox-runner/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "whitebox_runner" -version = "0.1.0" +version = "2.0.0" authors = ["John Lindsay "] edition = "2021" diff --git a/whitebox-tools-app/Cargo.toml b/whitebox-tools-app/Cargo.toml index 48449e09..947629d5 100755 --- a/whitebox-tools-app/Cargo.toml +++ b/whitebox-tools-app/Cargo.toml @@ -1,26 +1,27 @@ [package] name = "whitebox_tools" -version = "2.2.0" +version = "2.3.0" authors = ["John Lindsay "] edition = "2021" [dependencies] -byteorder = "^1.3.1" -chrono = "0.4.15" +byteorder = "^1.4.3" +chrono = "0.4.24" kdtree = "0.6.0" kd-tree = "0.5.1" -miniz_oxide = "0.3.6" -nalgebra = "0.18.0" +miniz_oxide = "0.7.1" +nalgebra = "0.32.2" num_cpus = "1.6.2" -rand = { version = "0.7", features = ["small_rng"] } -rand_distr = "0.2.1" -rayon = "1.3.1" -rstar = "0.7.1" -serde = "1.0.94" -serde_derive = "1.0.94" -serde_json = "1.0.62" -statrs = "0.9.0" -typenum = "1.15.0" +rand = { version = "0.8.5", features = ["small_rng"] } +rand_distr = "0.4.3" +rayon = "1.7.0" +rstar = "0.10.0" +serde = "1.0.158" +serde_derive = "1.0.158" +serde_json = "1.0.94" +statrs = "0.16.0" +time = { version = "0.3.20", features = ["macros"] } +typenum = "1.16.0" whitebox_common = { path = "../whitebox-common" } whitebox_lidar = { path = "../whitebox-lidar" } whitebox_raster = { path = "../whitebox-raster" } diff --git a/whitebox-tools-app/src/main.rs b/whitebox-tools-app/src/main.rs index c414f719..9cdddd4c 100755 --- a/whitebox-tools-app/src/main.rs +++ b/whitebox-tools-app/src/main.rs @@ -2,7 +2,7 @@ This code is part of the WhiteboxTools geospatial analysis library. Authors: Dr. John Lindsay Created: 21/06/2017 -Last Modified: 30/01/2022 +Last Modified: 23/03/2023 License: MIT */ @@ -435,7 +435,7 @@ Example Usage: fn license() { let license_text = "WhiteboxTools License -Copyright 2017-2022 John Lindsay +Copyright 2017-2023 John Lindsay Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, diff --git a/whitebox-tools-app/src/tools/hydro_analysis/rho8_pointer.rs b/whitebox-tools-app/src/tools/hydro_analysis/rho8_pointer.rs index f9b94831..d65fe11a 100755 --- a/whitebox-tools-app/src/tools/hydro_analysis/rho8_pointer.rs +++ b/whitebox-tools-app/src/tools/hydro_analysis/rho8_pointer.rs @@ -269,7 +269,7 @@ impl WhiteboxTool for Rho8Pointer { if z_n != nodata { slope = match i { 1 | 3 | 5 | 7 => z - z_n, - _ => (z - z_n) / (2f64 - rng.gen_range(0f64, 1f64)), //between.ind_sample(&mut rng)), + _ => (z - z_n) / (2f64 - rng.gen_range(0f64..1f64)), //between.ind_sample(&mut rng)), }; if slope > max_slope && slope > 0f64 { max_slope = slope; diff --git a/whitebox-tools-app/src/tools/image_analysis/k_means_clustering.rs b/whitebox-tools-app/src/tools/image_analysis/k_means_clustering.rs index 5e0990a8..c53cd492 100755 --- a/whitebox-tools-app/src/tools/image_analysis/k_means_clustering.rs +++ b/whitebox-tools-app/src/tools/image_analysis/k_means_clustering.rs @@ -418,8 +418,8 @@ impl WhiteboxTool for KMeansClustering { // initialize the class centres randomly let mut rng = thread_rng(); for a in 0..num_classes { - let row = rng.gen_range(0, rows); // Range::new(0, rows).ind_sample(&mut rng); - let col = rng.gen_range(0, columns); // Range::new(0, columns).ind_sample(&mut rng); + let row = rng.gen_range(0..rows); // Range::new(0, rows).ind_sample(&mut rng); + let col = rng.gen_range(0..columns); // Range::new(0, columns).ind_sample(&mut rng); for i in 0..num_files { //let between = Range::new(minimum[i], maximum[i]); // class_centres[a][i] = between.ind_sample(&mut rng); @@ -639,7 +639,7 @@ impl WhiteboxTool for KMeansClustering { let mut attempt = 1; let mut found_large_class = false; while !found_large_class && attempt < chances { - let val = rng.gen_range(0, num_classes); // between.ind_sample(&mut rng); + let val = rng.gen_range(0..num_classes); // between.ind_sample(&mut rng); if class_n[val] > class_min_size[val] { large_class = val; class_min_size[val] += min_class_size; @@ -651,7 +651,7 @@ impl WhiteboxTool for KMeansClustering { for i in 0..num_files { // let between = Range::new(class_min[large_class][i], class_max[large_class][i]); class_centres[a][i] = - rng.gen_range(class_min[large_class][i], class_max[large_class][i]); + rng.gen_range(class_min[large_class][i]..class_max[large_class][i]); //between.ind_sample(&mut rng); } } diff --git a/whitebox-tools-app/src/tools/image_analysis/modified_k_means_clustering.rs b/whitebox-tools-app/src/tools/image_analysis/modified_k_means_clustering.rs index a30ea00d..332a98d1 100755 --- a/whitebox-tools-app/src/tools/image_analysis/modified_k_means_clustering.rs +++ b/whitebox-tools-app/src/tools/image_analysis/modified_k_means_clustering.rs @@ -435,8 +435,8 @@ impl WhiteboxTool for ModifiedKMeansClustering { // initialize the class centres randomly let mut rng = thread_rng(); for a in 0..num_classes { - let row = rng.gen_range(0, rows); // Range::new(0, rows).ind_sample(&mut rng); - let col = rng.gen_range(0, columns); // Range::new(0, columns).ind_sample(&mut rng); + let row = rng.gen_range(0..rows); // Range::new(0, rows).ind_sample(&mut rng); + let col = rng.gen_range(0..columns); // Range::new(0, columns).ind_sample(&mut rng); for i in 0..num_files { //let between = Range::new(minimum[i], maximum[i]); // class_centres[a][i] = between.ind_sample(&mut rng); diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/attribute_correlation_neighbourhood_analysis.rs b/whitebox-tools-app/src/tools/math_stat_analysis/attribute_correlation_neighbourhood_analysis.rs index 43470e9a..8814cb23 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/attribute_correlation_neighbourhood_analysis.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/attribute_correlation_neighbourhood_analysis.rs @@ -10,7 +10,7 @@ use crate::tools::*; use whitebox_vector::*; use kdtree::distance::squared_euclidean; use kdtree::KdTree; -use statrs::distribution::{StudentsT, Univariate}; +use statrs::distribution::{ContinuousCDF, StudentsT}; use std::cmp::Ordering::Equal; use std::env; use std::f64; diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/image_autocorrelation.rs b/whitebox-tools-app/src/tools/math_stat_analysis/image_autocorrelation.rs index d8cd58c8..3eb83dfd 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/image_autocorrelation.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/image_autocorrelation.rs @@ -6,7 +6,7 @@ Last Modified: 12/10/2018 License: MIT */ -use self::statrs::distribution::{Normal, Univariate}; +use self::statrs::distribution::{ContinuousCDF, Normal}; use whitebox_raster::*; use crate::tools::*; use num_cpus; diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/image_correlation_neighbourhood_analysis.rs b/whitebox-tools-app/src/tools/math_stat_analysis/image_correlation_neighbourhood_analysis.rs index 1a018994..06981784 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/image_correlation_neighbourhood_analysis.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/image_correlation_neighbourhood_analysis.rs @@ -8,7 +8,7 @@ License: MIT use whitebox_raster::*; use crate::tools::*; -use statrs::distribution::{StudentsT, Univariate}; +use statrs::distribution::{ContinuousCDF, StudentsT}; use std::cmp::Ordering::Equal; use std::env; use std::f64; diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/image_regression.rs b/whitebox-tools-app/src/tools/math_stat_analysis/image_regression.rs index f3dddbb2..3f7d8f3e 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/image_regression.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/image_regression.rs @@ -6,7 +6,7 @@ Last Modified: 18/10/2019 License: MIT */ -use self::statrs::distribution::{FisherSnedecor, StudentsT, Univariate}; +use self::statrs::distribution::{ContinuousCDF, FisherSnedecor, StudentsT}; use whitebox_raster::*; use whitebox_common::rendering::Scattergram; use crate::tools::*; @@ -757,8 +757,8 @@ impl WhiteboxTool for ImageRegression { let mut sample_num = 0usize; let (mut x, mut y): (f64, f64); while sample_num < num_samples { - let row = rng.gen_range(0, rows as isize); - let col = rng.gen_range(0, columns as isize); + let row = rng.gen_range(0..rows as isize); + let col = rng.gen_range(0..columns as isize); x = input1.get_value(row, col); y = input2.get_value(row, col); if x != nodata1 && y != nodata2 { diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/ks_normality_test.rs b/whitebox-tools-app/src/tools/math_stat_analysis/ks_normality_test.rs index c8478378..5c7e2bf4 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/ks_normality_test.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/ks_normality_test.rs @@ -297,8 +297,8 @@ impl WhiteboxTool for KsTestForNormality { let mut sample_cells = Vec::with_capacity(num_samples); let mut sample_num = 0usize; while sample_num < num_samples { - row = rng.gen_range(0, rows as isize); // row_rng.ind_sample(&mut rng); - col = rng.gen_range(0, columns as isize); // col_rng.ind_sample(&mut rng); + row = rng.gen_range(0..rows as isize); // row_rng.ind_sample(&mut rng); + col = rng.gen_range(0..columns as isize); // col_rng.ind_sample(&mut rng); z = input.get_value(row, col); if z != nodata { bin_num = ((z - min_value) / bin_size).floor() as usize; diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/paired_sample_t_test.rs b/whitebox-tools-app/src/tools/math_stat_analysis/paired_sample_t_test.rs index e1362a19..583de0ad 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/paired_sample_t_test.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/paired_sample_t_test.rs @@ -350,8 +350,8 @@ impl WhiteboxTool for PairedSampleTTest { let (mut row, mut col): (isize, isize); let mut sample_num = 0usize; while sample_num < num_samples { - row = rng.gen_range(0, rows as isize); - col = rng.gen_range(0, columns as isize); + row = rng.gen_range(0..rows as isize); + col = rng.gen_range(0..columns as isize); z1 = input1.get_value(row, col); z2 = input2.get_value(row, col); if z1 != nodata1 && z2 != nodata2 { diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/random_sample.rs b/whitebox-tools-app/src/tools/math_stat_analysis/random_sample.rs index 9311c655..4cf12f4d 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/random_sample.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/random_sample.rs @@ -231,8 +231,8 @@ impl WhiteboxTool for RandomSample { let mut sample_num = 0usize; let mut num_tries = 0usize; while sample_num < num_samples { - let row = rng.gen_range(0, rows as isize); //row_rng.ind_sample(&mut rng); - let col = rng.gen_range(0, columns as isize); //col_rng.ind_sample(&mut rng); + let row = rng.gen_range(0..rows as isize); //row_rng.ind_sample(&mut rng); + let col = rng.gen_range(0..columns as isize); //col_rng.ind_sample(&mut rng); if output.get_value(row, col) == 0f64 && input.get_value(row, col) != nodata { sample_num += 1; output.set_value(row, col, sample_num as f64); diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/turning_bands.rs b/whitebox-tools-app/src/tools/math_stat_analysis/turning_bands.rs index 356ee67f..81022b83 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/turning_bands.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/turning_bands.rs @@ -319,28 +319,28 @@ impl WhiteboxTool for TurningBandsSimulation { // assign the spatially autocorrelated data line an equation of a transect of the grid // first, pick two points on different edges of the grid at random. // Edges are as follows 0 = left, 1 = top, 2 = right, and 3 = bottom - let edge1 = rng.gen_range(0, 4); //between.ind_sample(&mut rng); + let edge1 = rng.gen_range(0..4); //between.ind_sample(&mut rng); let mut edge2 = edge1; while edge2 == edge1 { - edge2 = rng.gen_range(0, 4); //between.ind_sample(&mut rng); + edge2 = rng.gen_range(0..4); //between.ind_sample(&mut rng); } match edge1 { 0 => { pnt1x = 0f64; - pnt1y = rng2.gen_range(0, rows as isize) as f64; //between_rows.ind_sample(&mut rng); + pnt1y = rng2.gen_range(0..rows as isize) as f64; //between_rows.ind_sample(&mut rng); } 1 => { - pnt1x = rng2.gen_range(0, columns as isize) as f64; //between_cols.ind_sample(&mut rng); + pnt1x = rng2.gen_range(0..columns as isize) as f64; //between_cols.ind_sample(&mut rng); pnt1y = 0f64; } 2 => { pnt1x = (columns - 1) as f64; - pnt1y = rng2.gen_range(0, rows as isize) as f64; //between_rows.ind_sample(&mut rng); + pnt1y = rng2.gen_range(0..rows as isize) as f64; //between_rows.ind_sample(&mut rng); } _ => { // 3 - pnt1x = rng2.gen_range(0, columns as isize) as f64; //between_cols.ind_sample(&mut rng); + pnt1x = rng2.gen_range(0..columns as isize) as f64; //between_cols.ind_sample(&mut rng); pnt1y = (rows - 1) as f64; } } @@ -348,19 +348,19 @@ impl WhiteboxTool for TurningBandsSimulation { match edge2 { 0 => { pnt2x = 0f64; - pnt2y = rng2.gen_range(0, rows as isize) as f64; //between_rows.ind_sample(&mut rng); + pnt2y = rng2.gen_range(0..rows as isize) as f64; //between_rows.ind_sample(&mut rng); } 1 => { - pnt2x = rng2.gen_range(0, columns as isize) as f64; //between_cols.ind_sample(&mut rng); + pnt2x = rng2.gen_range(0..columns as isize) as f64; //between_cols.ind_sample(&mut rng); pnt2y = 0f64; } 2 => { pnt2x = (columns - 1) as f64; - pnt2y = rng2.gen_range(0, rows as isize) as f64; //between_rows.ind_sample(&mut rng); + pnt2y = rng2.gen_range(0..rows as isize) as f64; //between_rows.ind_sample(&mut rng); } _ => { // 3 - pnt2x = rng2.gen_range(0, columns as isize) as f64; //between_cols.ind_sample(&mut rng); + pnt2x = rng2.gen_range(0..columns as isize) as f64; //between_cols.ind_sample(&mut rng); pnt2y = (rows - 1) as f64; } } @@ -370,19 +370,19 @@ impl WhiteboxTool for TurningBandsSimulation { match edge2 { 0 => { pnt2x = 0f64; - pnt2y = rng2.gen_range(0, rows as isize) as f64; //between_rows.ind_sample(&mut rng); + pnt2y = rng2.gen_range(0..rows as isize) as f64; //between_rows.ind_sample(&mut rng); } 1 => { - pnt2x = rng2.gen_range(0, columns as isize) as f64; //between_cols.ind_sample(&mut rng); + pnt2x = rng2.gen_range(0..columns as isize) as f64; //between_cols.ind_sample(&mut rng); pnt2y = 0f64; } 2 => { pnt2x = (columns - 1) as f64; - pnt2y = rng2.gen_range(0, rows as isize) as f64; //between_rows.ind_sample(&mut rng); + pnt2y = rng2.gen_range(0..rows as isize) as f64; //between_rows.ind_sample(&mut rng); } _ => { // 3 - pnt2x = rng2.gen_range(0, columns as isize) as f64; //between_cols.ind_sample(&mut rng); + pnt2x = rng2.gen_range(0..columns as isize) as f64; //between_cols.ind_sample(&mut rng); pnt2y = (rows - 1) as f64; } } diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/two_sample_ks_test.rs b/whitebox-tools-app/src/tools/math_stat_analysis/two_sample_ks_test.rs index 055fc745..01616256 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/two_sample_ks_test.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/two_sample_ks_test.rs @@ -300,8 +300,8 @@ impl WhiteboxTool for TwoSampleKsTest { let (mut row, mut col): (isize, isize); let mut sample_num = 0usize; while sample_num < num_samples { - row = rng.gen_range(0, rows as isize); - col = rng.gen_range(0, columns as isize); + row = rng.gen_range(0..rows as isize); + col = rng.gen_range(0..columns as isize); z1 = input1.get_value(row, col); if z1 != nodata1 { data1.push(z1); @@ -320,8 +320,8 @@ impl WhiteboxTool for TwoSampleKsTest { sample_num = 0usize; while sample_num < num_samples { - row = rng.gen_range(0, rows as isize); - col = rng.gen_range(0, columns as isize); + row = rng.gen_range(0..rows as isize); + col = rng.gen_range(0..columns as isize); z2 = input2.get_value(row, col); if z2 != nodata2 { data2.push(z2); diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/wilcoxon_signed_rank_test.rs b/whitebox-tools-app/src/tools/math_stat_analysis/wilcoxon_signed_rank_test.rs index d3a5254a..086881bc 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/wilcoxon_signed_rank_test.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/wilcoxon_signed_rank_test.rs @@ -6,7 +6,7 @@ Last Modified: 25/10/2019 License: MIT */ -use self::statrs::distribution::{Normal, Univariate}; +use self::statrs::distribution::{ContinuousCDF, Normal}; use whitebox_raster::*; use whitebox_common::rendering::html::*; use whitebox_common::rendering::LineGraph; @@ -350,8 +350,8 @@ impl WhiteboxTool for WilcoxonSignedRankTest { let (mut row, mut col): (isize, isize); let mut sample_num = 0usize; while sample_num < num_samples { - row = rng.gen_range(0, rows as isize); - col = rng.gen_range(0, columns as isize); + row = rng.gen_range(0..rows as isize); + col = rng.gen_range(0..columns as isize); z1 = input1.get_value(row, col); z2 = input2.get_value(row, col); if z1 != nodata1 && z2 != nodata2 { diff --git a/whitebox-tools-app/src/tools/mod.rs b/whitebox-tools-app/src/tools/mod.rs index a49eea68..1738ffbf 100755 --- a/whitebox-tools-app/src/tools/mod.rs +++ b/whitebox-tools-app/src/tools/mod.rs @@ -1327,7 +1327,6 @@ impl ToolManager { "recreatepasslines", "registerlicense", "removefieldedgepoints", - "repairstreamvectortopology", "ringcurvature", "rotor", "shadowanimation", @@ -1341,7 +1340,6 @@ impl ToolManager { "svmregression", "topographicpositionanimation", "unsphericity", - "vectorstreamnetworkanalysis", "verticalexcesscurvature", "yieldfilter", "yieldmap", @@ -1482,7 +1480,6 @@ Example usage: match self.get_tool(tool_name.as_ref()) { Some(tool) => println!("{}", tool.get_tool_parameters()), None => { - // println!("I'm here {}", tool_name); let plugin_list = self.get_plugin_list()?; if plugin_list.contains_key(&tool_name.to_lowercase()) { let plugin_data = plugin_list.get(&tool_name.to_lowercase()).expect(&format!("Unrecognized plugin name {}.", tool_name)); diff --git a/whitebox-tools-app/src/tools/terrain_analysis/time_in_daylight.rs b/whitebox-tools-app/src/tools/terrain_analysis/time_in_daylight.rs index d1416ee1..e2502ae0 100755 --- a/whitebox-tools-app/src/tools/terrain_analysis/time_in_daylight.rs +++ b/whitebox-tools-app/src/tools/terrain_analysis/time_in_daylight.rs @@ -9,8 +9,7 @@ License: MIT use whitebox_raster::Raster; use whitebox_common::structures::Array2D; use crate::tools::*; -use chrono::prelude::*; -use chrono::{Date, FixedOffset, NaiveTime, TimeZone}; +use time::{Date, macros::time, OffsetDateTime, Time, UtcOffset}; use num_cpus; use rayon::prelude::*; use std::env; @@ -253,8 +252,10 @@ impl WhiteboxTool for TimeInDaylight { let mut utc_offset = 0f64; let mut start_day = 1u32; let mut end_day = 365u32; - let mut start_time = NaiveTime::from_hms_opt(0, 0, 0).unwrap(); // midnight - let mut end_time = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); // the second before midnight + // let mut start_time = NaiveTime::from_hms_opt(0, 0, 0).unwrap(); // midnight + // let mut end_time = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); // the second before midnight + let mut start_time = time!(0:0:0); + let mut end_time = time!(23:59:59); if args.len() == 0 { return Err(Error::new( @@ -413,7 +414,8 @@ impl WhiteboxTool for TimeInDaylight { 0i32 }; if hr >= 0 && hr < 24 && min >= 0 && min < 60 && sec >= 0 && sec < 60 { - start_time = NaiveTime::from_hms_opt(hr as u32, min as u32, sec as u32).unwrap(); + // start_time = NaiveTime::from_hms_opt(hr as u32, min as u32, sec as u32).unwrap(); + start_time = Time::from_hms(hr as u8, min as u8, sec as u8).unwrap(); } else { panic!("Invalid start time."); } @@ -447,7 +449,8 @@ impl WhiteboxTool for TimeInDaylight { 0i32 }; if hr >= 0 && hr < 24 && min >= 0 && min < 60 && sec >= 0 && sec < 60 { - end_time = NaiveTime::from_hms_opt(hr as u32, min as u32, sec as u32).unwrap(); + // end_time = NaiveTime::from_hms_opt(hr as u32, min as u32, sec as u32).unwrap(); + end_time = Time::from_hms(hr as u8, min as u8, sec as u8).unwrap(); } else { panic!("Invalid end time."); } @@ -567,8 +570,10 @@ impl WhiteboxTool for TimeInDaylight { let mut azimuth = 0f32; // altitudes_and_durations key: altitude, duration, time (as NaiveTime), day (as ordinal) + // let mut altitudes_and_durations = + // vec![(0f32, 0f64, NaiveTime::from_hms_opt(0, 0, 0).unwrap(), 0u32); 365]; let mut altitudes_and_durations = - vec![(0f32, 0f64, NaiveTime::from_hms_opt(0, 0, 0).unwrap(), 0u32); 365]; + vec![(0f32, 0f64, Time::from_hms(0, 0, 0).unwrap(), 0u32); 365]; let mut horizon_angle: Array2D = Array2D::new(rows, columns, 0f32, nodata_f32).expect("Error creating Array2D"); let mut total_daylight = 0f64; @@ -582,7 +587,7 @@ impl WhiteboxTool for TimeInDaylight { almanac[day - 1].data[bin].altitude as f32, almanac[day - 1].data[bin].duration, almanac[day - 1].data[bin].time, - almanac[day - 1].date.ordinal(), + almanac[day - 1].date.ordinal() as u32, ); if altitudes_and_durations[day - 1].3 >= start_day && altitudes_and_durations[day - 1].3 <= end_day @@ -904,26 +909,42 @@ fn generate_almanac( az_interval: f64, seconds_interval: usize, ) -> Vec { - let hour_sec = 3600f64; + // let hour_sec = 3600f64; let mut almanac = vec![]; let mut num_days = 0; // let doy = 1; //233; - for doy in 1..=366 { - let midnight = if utc_offset < 0f64 { - FixedOffset::west_opt((utc_offset.abs() * hour_sec) as i32).unwrap() - .yo(2020, doy as u32) - .and_hms(0, 0, 0) - } else { - FixedOffset::east_opt((utc_offset * hour_sec) as i32).unwrap() - .yo(2020, doy as u32) - .and_hms(0, 0, 0) - }; + for doy in 1..366 { + // let midnight = if utc_offset < 0f64 { + // FixedOffset::west_opt((utc_offset.abs() * hour_sec) as i32).unwrap() + // .yo(2020, doy as u32) + // .and_hms_opt(0, 0, 0).unwrap() + // // .from_local_datatime( + // // NaiveDate::from_yo_opt(2023, doy).unwrap().and_hms_opt(0, 0, 0).unwrap() + // // ) + + // } else { + // FixedOffset::east_opt((utc_offset * hour_sec) as i32).unwrap() + // .yo(2020, doy as u32) + // .and_hms_opt(0, 0, 0).unwrap() + // // .from_local_datatime( + // // NaiveDate::from_yo_opt(2023, doy).unwrap().and_hms_opt(0, 0, 0).unwrap() + // // ) + // }; + + let midnight = Date::from_ordinal_date(2023, doy as u16) + .unwrap() + .with_hms(0, 0, 0) + .unwrap() + .assume_offset( + UtcOffset::from_hms(utc_offset as i8, 0, 0) + .unwrap() + ); let mut diff: f64; let mut sunrise = false; let num_bins = (360.0f64 / az_interval).ceil() as usize; almanac.push(Day { - date: midnight.date(), + date: midnight, sunrise: PositionTime::default(), sunset: PositionTime::default(), data: vec![PositionTime::default(); num_bins], @@ -932,16 +953,31 @@ fn generate_almanac( for hr in 0..24 { for minute in 0..60 { for sec in (0..=45).step_by(seconds_interval) { - let dt = if utc_offset < 0f64 { - FixedOffset::west_opt((utc_offset.abs() * hour_sec) as i32).unwrap() - .yo(2020, doy as u32) - .and_hms_opt(hr, minute, sec).unwrap() - } else { - FixedOffset::east_opt((utc_offset * hour_sec) as i32).unwrap() - .yo(2020, doy as u32) - .and_hms_opt(hr, minute, sec).unwrap() - }; - let unixtime = dt.timestamp() * 1000 + dt.timestamp_subsec_millis() as i64; + // let dt = if utc_offset < 0f64 { + // FixedOffset::west_opt((utc_offset.abs() * hour_sec) as i32).unwrap() + // // .yo(2020, doy as u32) + // // .and_hms_opt(hr, minute, sec).unwrap() + // .from_local_datetime( + // &NaiveDateTime::new(NaiveDate::from_yo_opt(2023, doy).unwrap(), NaiveTime::from_hms_opt(hr, minute, sec).unwrap()) + // ) + // } else { + // FixedOffset::east_opt((utc_offset * hour_sec) as i32).unwrap() + // // .yo(2020, doy as u32) + // // .and_hms_opt(hr, minute, sec).unwrap() + // .from_local_datetime( + // &NaiveDateTime::new(NaiveDate::from_yo_opt(2023, doy).unwrap(), NaiveTime::from_hms_opt(hr, minute, sec).unwrap()) + // ) + // }; + let dt = Date::from_ordinal_date(2023, doy as u16) + .unwrap() + .with_hms(hr as u8, minute as u8, sec as u8) + .unwrap() + .assume_offset( + UtcOffset::from_hms(utc_offset as i8, 0, 0) + .unwrap() + ); + // let unixtime = dt.timestamp() * 1000 + dt.timestamp_subsec_millis() as i64; + let unixtime = (dt.unix_timestamp_nanos() / 1000) as i64; let pos = pos(unixtime, latitude, longitude); let az_actual = pos.azimuth.to_degrees(); let alt = pos.altitude.to_degrees(); @@ -987,7 +1023,8 @@ fn generate_almanac( } pub struct Day { - date: Date, + // date: Date, + date: OffsetDateTime, sunrise: PositionTime, sunset: PositionTime, data: Vec, @@ -1010,7 +1047,7 @@ pub struct PositionTime { azimuth: f64, // in degrees actual_azimuth: f64, // in degrees; because we are finding the closest time/position to the target azimuth, this won't be the same as azimuth, with proximity determined by the temporal resolution altitude: f64, // in degrees - time: NaiveTime, + time: Time, diff: f64, // only used for the approximation of azimuth duration: f64, // in seconds } @@ -1021,7 +1058,7 @@ impl PositionTime { azimuth: 0f64, actual_azimuth: 0f64, altitude: 0f64, - time: NaiveTime::from_hms_opt(0, 0, 0).unwrap(), + time: Time::from_hms(0, 0, 0).unwrap(), diff: 360f64, duration: 0f64, } diff --git a/whitebox-vector/Cargo.toml b/whitebox-vector/Cargo.toml index 87e02992..9e70aecb 100755 --- a/whitebox-vector/Cargo.toml +++ b/whitebox-vector/Cargo.toml @@ -6,5 +6,5 @@ edition = "2021" [dependencies] byteorder = "^1.3.1" -chrono = "0.4.15" +chrono = "0.4.21" whitebox_common = { path = "../whitebox-common" } \ No newline at end of file diff --git a/whitebox_tools.py b/whitebox_tools.py index 49aa628d..02f67912 100755 --- a/whitebox_tools.py +++ b/whitebox_tools.py @@ -725,6 +725,7 @@ def activate_license(self): + ############## @@ -906,7 +907,7 @@ def merge_vectors(self, inputs, output, callback=None): return self.run_tool('merge_vectors', args, callback) # returns 1 if error def modify_no_data_value(self, i, new_value="-32768.0", callback=None): - """Converts nodata values in a raster to zero. + """Modifies nodata values in a raster. Keyword arguments: @@ -1068,7 +1069,7 @@ def remove_raster_polygon_holes(self, i, output, threshold=3, use_diagonals=True return self.run_tool('remove_raster_polygon_holes', args, callback) # returns 1 if error def set_nodata_value(self, i, output, back_value=0.0, callback=None): - """Assign a specified value in an input image to the NoData value. + """Assign the NoData value for an input image. Keyword arguments: @@ -1478,7 +1479,7 @@ def find_lowest_or_highest_points(self, i, output, out_type="lowest", callback=N return self.run_tool('find_lowest_or_highest_points', args, callback) # returns 1 if error def heat_map(self, i, output, weight_field=None, bandwidth="", kernel="quartic", cell_size="", base=None, callback=None): - """This tool calculates a heat map, or kernel density estimation (KDE), for an input point set. + """Calculates a heat map, or kernel density estimation (KDE), for an input point set. Keyword arguments: @@ -1882,7 +1883,7 @@ def smooth_vectors(self, i, output, filter=3, callback=None): return self.run_tool('smooth_vectors', args, callback) # returns 1 if error def split_vector_lines(self, i, output, length=None, callback=None): - """This tool can be used to split a vector line coverage into even-lengthed segments. + """Used to split a vector line coverage into even-lengthed segments. Keyword arguments: @@ -1922,7 +1923,7 @@ def tin_gridding(self, i, output, field=None, use_z=False, resolution=None, base return self.run_tool('tin_gridding', args, callback) # returns 1 if error def travelling_salesman_problem(self, i, output, duration=60, callback=None): - """This tool finds approximate solutions to travelling salesman problems, the goal of which is to identify the shortest route connecting a set of locations. + """Finds approximate solutions to travelling salesman problems, the goal of which is to identify the shortest route connecting a set of locations. Keyword arguments: @@ -2105,7 +2106,7 @@ def clip(self, i, clip, output, callback=None): args.append("--output='{}'".format(output)) return self.run_tool('clip', args, callback) # returns 1 if error - def clip_raster_to_polygon(self, i, polygons, output, maintain_dimensions=True, callback=None): + def clip_raster_to_polygon(self, i, polygons, output, maintain_dimensions=False, callback=None): """Clips a raster to a vector polygon. Keyword arguments: @@ -2895,6 +2896,28 @@ def curvedness(self, dem, output, log=False, zfactor=1.0, callback=None): args.append("--zfactor={}".format(zfactor)) return self.run_tool('curvedness', args, callback) # returns 1 if error + def dem_void_filling(self, dem, fill, output, mean_plane_dist=20, edge_treatment="use DEM", weight_value=2.0, callback=None): + """This tool can be used to fill the void areas of a DEM using another fill DEM data set. + + Keyword arguments: + + dem -- Name of the input raster DEM file, containing the void areas. + fill -- Name of the input fill DEM file, containing the values used to fill the void areas in the other DEM. + output -- Name of the output void-filled DEM file. + mean_plane_dist -- Distance to void edge at which the mean-plane value is used as an offset, measured in grid cells. + edge_treatment -- How should void-edge cells be treated? Options include 'use DEM' (default), 'use Fill', 'average'. + weight_value -- Weight value used for IDW interpolation (default is 2.0). + callback -- Custom function for handling tool text outputs. + """ + args = [] + args.append("--dem='{}'".format(dem)) + args.append("--fill='{}'".format(fill)) + args.append("--output='{}'".format(output)) + args.append("--mean_plane_dist={}".format(mean_plane_dist)) + args.append("--edge_treatment={}".format(edge_treatment)) + args.append("--weight_value={}".format(weight_value)) + return self.run_tool('dem_void_filling', args, callback) # returns 1 if error + def dev_from_mean_elev(self, dem, output, filterx=11, filtery=11, callback=None): """Calculates deviation from mean elevation. @@ -3102,7 +3125,7 @@ def embankment_mapping(self, dem, road_vec, output, search_dist=2.5, min_road_wi return self.run_tool('embankment_mapping', args, callback) # returns 1 if error def exposure_towards_wind_flux(self, dem, output, azimuth="", max_dist="", zfactor="", callback=None): - """This tool evaluates hydrologic connectivity within a DEM. + """Evaluates hydrologic connectivity within a DEM. Keyword arguments: @@ -3218,7 +3241,7 @@ def gaussian_curvature(self, dem, output, log=False, zfactor=None, callback=None return self.run_tool('gaussian_curvature', args, callback) # returns 1 if error def gaussian_scale_space(self, dem, output, output_zscore, output_scale, points=None, sigma=0.5, step=0.5, num_steps=10, lsp="Slope", z_factor=None, callback=None): - """This tool uses the fast Gaussian approximation algorithm to produce scaled land-surface parameter measurements from an input DEM. + """Uses the fast Gaussian approximation algorithm to produce scaled land-surface parameter measurements from an input DEM. Keyword arguments: @@ -3418,7 +3441,7 @@ def local_hypsometric_analysis(self, i, out_mag, out_scale, min_scale=4, step=1, return self.run_tool('local_hypsometric_analysis', args, callback) # returns 1 if error def local_quadratic_regression(self, dem, output, filter=3, callback=None): - """This tool is an implementation of the constrained quadratic regression algorithm using a flexible window size described in Wood (1996). + """An implementation of the constrained quadratic regression algorithm using a flexible window size described in Wood (1996). Keyword arguments: @@ -3693,6 +3716,36 @@ def multidirectional_hillshade(self, dem, output, altitude=45.0, zfactor=None, f if full_mode: args.append("--full_mode") return self.run_tool('multidirectional_hillshade', args, callback) # returns 1 if error + def multiscale_curvatures(self, dem, out_mag, curv_type="ProfileCurv", out_scale=None, min_scale=0, step=1, num_steps=1, step_nonlinearity=1.0, log=True, standardize=False, callback=None): + """This tool calculates several multiscale curvatures and curvature-based indices from an input DEM. + + Keyword arguments: + + dem -- Name of the input raster DEM file. + curv_type -- Curvature type. + out_mag -- Output raster magnitude file. + out_scale -- Output raster scale file. + min_scale -- Minimum search neighbourhood radius in grid cells. + step -- Step size as any positive non-zero integer. + num_steps -- Number of steps. + step_nonlinearity -- Step nonlinearity factor (1.0-2.0 is typical). + log -- Display output values using a log-scale. + standardize -- Should each scale be standardized to z-scores?. + callback -- Custom function for handling tool text outputs. + """ + args = [] + args.append("--dem='{}'".format(dem)) + args.append("--curv_type={}".format(curv_type)) + args.append("--out_mag='{}'".format(out_mag)) + if out_scale is not None: args.append("--out_scale='{}'".format(out_scale)) + args.append("--min_scale={}".format(min_scale)) + args.append("--step={}".format(step)) + args.append("--num_steps={}".format(num_steps)) + args.append("--step_nonlinearity={}".format(step_nonlinearity)) + if log: args.append("--log") + if standardize: args.append("--standardize") + return self.run_tool('multiscale_curvatures', args, callback) # returns 1 if error + def multiscale_elevation_percentile(self, dem, out_mag, out_scale, sig_digits=3, min_scale=4, step=1, num_steps=10, step_nonlinearity=1.0, callback=None): """Calculates surface roughness over a range of spatial scales. @@ -4357,6 +4410,40 @@ def time_in_daylight(self, dem, output, lat, long, az_fraction=10.0, max_dist=10 args.append("--end_time={}".format(end_time)) return self.run_tool('time_in_daylight', args, callback) # returns 1 if error + def topo_render(self, dem, output, palette="soft", rev_palette=False, az=315.0, alt=30.0, background_hgt_offset=10.0, polygon=None, background_clr="[255, 255, 255]", attenuation=0.6, ambient_light=0.2, z_factor=1.0, callback=None): + """This tool creates a pseudo-3D rendering from an input DEM, for the purpose of effective topographic visualization. + + Keyword arguments: + + dem -- Name of the input digital elevation model (DEM) raster file. + output -- Name of the output raster file. + palette -- Palette name; options are 'atlas', 'high_relief', 'arid', 'soft', 'earthtones', 'muted', 'light_quant', 'purple', 'viridi', 'gn_yl', 'pi_y_g', 'bl_yl_rd', 'deep', 'imhof', and 'white'. + rev_palette -- Reverse the palette?. + az -- Light source azimuth direction (degrees, 0-360). + alt -- Light source altitude (degrees, 0-90). + background_hgt_offset -- Offset height of background, in z-units. + polygon -- Clipping polygon vector file (optional). + background_clr -- Background red-green-blue (RGB) or red-green-blue-alpha (RGBA) colour, e.g. '[255, 255, 245]', '[255, 255, 245, 200]'. + attenuation -- Attenuation parameter. Range is 0-4. Zero means no attenuation. + ambient_light -- Ambient light parameter. Range is 0.0-0.7. Zero means no ambient light. + z_factor -- Elevation multiplier, or a vertical exageration. + callback -- Custom function for handling tool text outputs. + """ + args = [] + args.append("--dem='{}'".format(dem)) + args.append("--output='{}'".format(output)) + args.append("--palette={}".format(palette)) + if rev_palette: args.append("--rev_palette") + args.append("--az={}".format(az)) + args.append("--alt={}".format(alt)) + args.append("--background_hgt_offset={}".format(background_hgt_offset)) + if polygon is not None: args.append("--polygon='{}'".format(polygon)) + args.append("--background_clr={}".format(background_clr)) + args.append("--attenuation={}".format(attenuation)) + args.append("--ambient_light={}".format(ambient_light)) + args.append("--z_factor={}".format(z_factor)) + return self.run_tool('topo_render', args, callback) # returns 1 if error + def topographic_position_animation(self, i, output, palette="bl_yl_rd", min_scale=1, num_steps=100, step_nonlinearity=1.5, height=600, delay=250, label="", dev_max=False, callback=None): """This tool creates an animated GIF of multi-scale local topographic position (elevation deviation). @@ -4812,7 +4899,7 @@ def downslope_flowpath_length(self, d8_pntr, output, watersheds=None, weights=No return self.run_tool('downslope_flowpath_length', args, callback) # returns 1 if error def edge_contamination(self, dem, output, flow_type="mfd", zfactor="", callback=None): - """This tool identifies grid cells within an input DEM that may be impacted by edge contamination for hydrological applications. + """Identifies grid cells within an input DEM that may be impacted by edge contamination for hydrological applications. Keyword arguments: @@ -5250,7 +5337,7 @@ def max_upslope_flowpath_length(self, dem, output, callback=None): return self.run_tool('max_upslope_flowpath_length', args, callback) # returns 1 if error def max_upslope_value(self, dem, values, output, callback=None): - """This tool calculates the maximum upslope value from an input values raster along flowpaths. + """Calculates the maximum upslope value from an input values raster along flowpaths. Keyword arguments: @@ -5304,7 +5391,7 @@ def num_inflowing_neighbours(self, dem, output, callback=None): return self.run_tool('num_inflowing_neighbours', args, callback) # returns 1 if error def qin_flow_accumulation(self, dem, output, out_type="specific contributing area", exponent=10.0, max_slope=45.0, threshold=None, log=False, clip=False, callback=None): - """This tool calculates Qin et al. (2007) flow accumulation. + """Calculates Qin et al. (2007) flow accumulation. Keyword arguments: @@ -5330,7 +5417,7 @@ def qin_flow_accumulation(self, dem, output, out_type="specific contributing are return self.run_tool('qin_flow_accumulation', args, callback) # returns 1 if error def quinn_flow_accumulation(self, dem, output, out_type="specific contributing area", exponent=1.0, threshold=None, log=False, clip=False, callback=None): - """This tool calculates Quinn et al. (1995) flow accumulation. + """Calculates Quinn et al. (1995) flow accumulation. Keyword arguments: @@ -5374,7 +5461,7 @@ def raise_walls(self, i, dem, output, breach=None, height=100.0, callback=None): return self.run_tool('raise_walls', args, callback) # returns 1 if error def rho8_flow_accumulation(self, i, output, out_type="specific contributing area", log=False, clip=False, pntr=False, esri_pntr=False, callback=None): - """This tool calculates Fairfield and Leymarie (1991) flow accumulation. + """Calculates Fairfield and Leymarie (1991) flow accumulation. Keyword arguments: @@ -7237,7 +7324,7 @@ def height_above_ground(self, i=None, output=None, callback=None): Keyword arguments: i -- Input LiDAR file (including extension). - output -- Output raster file (including extension). + output -- Output lidar file (including extension). callback -- Custom function for handling tool text outputs. """ args = [] @@ -7245,6 +7332,30 @@ def height_above_ground(self, i=None, output=None, callback=None): if output is not None: args.append("--output='{}'".format(output)) return self.run_tool('height_above_ground', args, callback) # returns 1 if error + def individual_tree_detection(self, i=None, output=None, min_search_radius=1.0, min_height=0.0, max_search_radius="", max_height="", only_use_veg=False, callback=None): + """Identifies points in a LiDAR point cloud that are associated with the tops of individual trees. + + Keyword arguments: + + i -- Name of the input LiDAR file. + output -- Name of the output vector points file. + min_search_radius -- Minimum search radius (m). + min_height -- Minimum height (m). + max_search_radius -- Maximum search radius (m). + max_height -- Maximum height (m). + only_use_veg -- Only use veg. class points?. + callback -- Custom function for handling tool text outputs. + """ + args = [] + if i is not None: args.append("--input='{}'".format(i)) + if output is not None: args.append("--output='{}'".format(output)) + args.append("--min_search_radius={}".format(min_search_radius)) + args.append("--min_height={}".format(min_height)) + args.append("--max_search_radius={}".format(max_search_radius)) + args.append("--max_height={}".format(max_height)) + if only_use_veg: args.append("--only_use_veg") + return self.run_tool('individual_tree_detection', args, callback) # returns 1 if error + def las_to_ascii(self, inputs, callback=None): """Converts one or more LAS files into ASCII text files. @@ -7395,7 +7506,7 @@ def lidar_colourize(self, in_lidar, in_image, output, callback=None): args.append("--output='{}'".format(output)) return self.run_tool('lidar_colourize', args, callback) # returns 1 if error - def lidar_contour(self, i=None, output=None, interval=10.0, smooth=5, parameter="elevation", returns="all", exclude_cls=None, minz=None, maxz=None, max_triangle_edge_length=None, callback=None): + def lidar_contour(self, i=None, output=None, interval=10.0, base=0.0, smooth=5, parameter="elevation", returns="all", exclude_cls=None, minz=None, maxz=None, max_triangle_edge_length=None, callback=None): """This tool creates a vector contour coverage from an input LiDAR point file. Keyword arguments: @@ -7403,6 +7514,7 @@ def lidar_contour(self, i=None, output=None, interval=10.0, smooth=5, parameter= i -- Name of the input LiDAR points. output -- Name of the output vector lines file. interval -- Contour interval. + base -- Base contour. smooth -- Smoothing filter size (in num. points), e.g. 3, 5, 7, 9, 11. parameter -- Interpolation parameter; options are 'elevation' (default), 'intensity', 'user_data'. returns -- Point return types to include; options are 'all' (default), 'last', 'first'. @@ -7416,6 +7528,7 @@ def lidar_contour(self, i=None, output=None, interval=10.0, smooth=5, parameter= if i is not None: args.append("--input='{}'".format(i)) if output is not None: args.append("--output='{}'".format(output)) args.append("--interval={}".format(interval)) + args.append("--base={}".format(base)) args.append("--smooth={}".format(smooth)) args.append("--parameter={}".format(parameter)) args.append("--returns={}".format(returns)) @@ -7664,7 +7777,7 @@ def lidar_nearest_neighbour_gridding(self, i=None, output=None, parameter="eleva i -- Input LiDAR file (including extension). output -- Output raster file (including extension). - parameter -- Interpolation parameter; options are 'elevation' (default), 'intensity', 'class', 'return_number', 'number_of_returns', 'scan angle', 'rgb', 'user data'. + parameter -- Interpolation parameter; options are 'elevation' (default), 'intensity', 'class', 'return_number', 'number_of_returns', 'scan angle', 'rgb', 'user data', 'time'. returns -- Point return types to include; options are 'all' (default), 'last', 'first'. resolution -- Output raster's grid resolution. radius -- Search Radius. @@ -8157,6 +8270,22 @@ def normal_vectors(self, i, output, radius=1.0, callback=None): args.append("--radius={}".format(radius)) return self.run_tool('normal_vectors', args, callback) # returns 1 if error + def normalize_lidar(self, i, output, dtm, callback=None): + """Normalizes a LiDAR point cloud. + + Keyword arguments: + + i -- Name of the input LiDAR file. + output -- Name of the output LiDAR file. + dtm -- Name of the input digital terrain model (DTM) raster file. + callback -- Custom function for handling tool text outputs. + """ + args = [] + args.append("--input='{}'".format(i)) + args.append("--output='{}'".format(output)) + args.append("--dtm='{}'".format(dtm)) + return self.run_tool('normalize_lidar', args, callback) # returns 1 if error + def recover_flightline_info(self, i, output, max_time_diff=5.0, pt_src_id=False, user_data=False, rgb=False, callback=None): """Associates LiDAR points by their flightlines. @@ -8740,7 +8869,7 @@ def attribute_histogram(self, i, field, output, callback=None): Keyword arguments: - i -- Input raster file. + i -- Input vector file. field -- Input field name in attribute table. output -- Output HTML file (default name will be based on input file if unspecified). callback -- Custom function for handling tool text outputs. @@ -8786,7 +8915,7 @@ def ceil(self, i, output, callback=None): return self.run_tool('ceil', args, callback) # returns 1 if error def conditional_evaluation(self, i, output, statement="", true=None, false=None, callback=None): - """This tool performs a conditional evaluation (if-then-else) operation on a raster. + """Performs a conditional evaluation (if-then-else) operation on a raster. Keyword arguments: @@ -9260,7 +9389,7 @@ def list_unique_values(self, i, field, output, callback=None): Keyword arguments: - i -- Input raster file. + i -- Input vector file. field -- Input field name in attribute table. output -- Output HTML file (default name will be based on input file if unspecified). callback -- Custom function for handling tool text outputs. @@ -9271,6 +9400,18 @@ def list_unique_values(self, i, field, output, callback=None): args.append("--output='{}'".format(output)) return self.run_tool('list_unique_values', args, callback) # returns 1 if error + def list_unique_values_raster(self, i, callback=None): + """Lists the unique values contained in a field within a vector's attribute table. + + Keyword arguments: + + i -- Input vector file. + callback -- Custom function for handling tool text outputs. + """ + args = [] + args.append("--input='{}'".format(i)) + return self.run_tool('list_unique_values_raster', args, callback) # returns 1 if error + def ln(self, i, output, callback=None): """Returns the natural logarithm of values in a raster. @@ -9522,7 +9663,7 @@ def random_sample(self, base, output, num_samples=1000, callback=None): return self.run_tool('random_sample', args, callback) # returns 1 if error def raster_calculator(self, output, statement="", callback=None): - """This tool performs a complex mathematical operations on one or more input raster images on a cell-to-cell basis. + """Performs a complex mathematical operations on one or more input raster images on a cell-to-cell basis. Keyword arguments: