diff --git a/Cargo.lock b/Cargo.lock index 704419af..e2ba64c8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,21 @@ dependencies = [ "memchr", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anes" version = "0.1.6" @@ -75,9 +90,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "approx" @@ -242,6 +257,20 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" +[[package]] +name = "chrono" +version = "0.4.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-link", +] + [[package]] name = "ciborium" version = "0.2.2" @@ -300,7 +329,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -336,6 +365,12 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + [[package]] name = "cpufeatures" version = "0.2.17" @@ -366,7 +401,7 @@ dependencies = [ "clap", "criterion-plot", "is-terminal", - "itertools", + "itertools 0.10.5", "num-traits", "once_cell", "oorandom", @@ -387,7 +422,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools", + "itertools 0.10.5", ] [[package]] @@ -529,7 +564,7 @@ checksum = "7e8671d54058979a37a26f3511fbf8d198ba1aa35ffb202c42587d918d77213a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -556,6 +591,7 @@ dependencies = [ "numpy", "pyo3", "pyo3-log", + "pyo3-stub-gen", "rand_xoshiro", "serde", "serde_json", @@ -710,6 +746,12 @@ dependencies = [ "log", ] +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + [[package]] name = "erased-serde" version = "0.4.5" @@ -853,6 +895,12 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + [[package]] name = "heck" version = "0.5.0" @@ -871,6 +919,30 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "indexmap" version = "1.9.3" @@ -878,7 +950,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", ] [[package]] @@ -898,9 +980,9 @@ dependencies = [ [[package]] name = "inventory" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b12ebb6799019b044deaf431eadfe23245b259bba5a2c0796acec3943a3cdb" +checksum = "ab08d7cd2c5897f2c949e5383ea7c7db03fb19130ffcfbf7eda795137ae3cb83" dependencies = [ "rustversion", ] @@ -931,6 +1013,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.14" @@ -1110,9 +1201,15 @@ dependencies = [ [[package]] name = "log" -version = "0.4.25" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "maplit" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "matrixmultiply" @@ -1225,8 +1322,8 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af5a8477ac96877b5bd1fd67e0c28736c12943aba24eda92b127e036b0c8f400" dependencies = [ - "indexmap", - "itertools", + "indexmap 1.9.3", + "itertools 0.10.5", "ndarray", "noisy_float", "num-integer", @@ -1360,9 +1457,9 @@ dependencies = [ [[package]] name = "numpy" -version = "0.22.1" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edb929bc0da91a4d85ed6c0a84deaa53d411abfb387fc271124f91bf6b89f14e" +checksum = "a7cfbf3f0feededcaa4d289fe3079b03659e85c5b5a177f4ba6fb01ab4fb3e39" dependencies = [ "libc", "ndarray", @@ -1370,6 +1467,7 @@ dependencies = [ "num-integer", "num-traits", "pyo3", + "pyo3-build-config", "rustc-hash", ] @@ -1451,7 +1549,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -1528,9 +1626,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -1550,9 +1648,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.22.6" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f402062616ab18202ae8319da13fa4279883a2b8a9d9f83f20dbade813ce1884" +checksum = "e5203598f366b11a02b13aa20cab591229ff0a89fd121a308a5df751d5fc9219" dependencies = [ "cfg-if", "indoc", @@ -1568,9 +1666,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.22.6" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b14b5775b5ff446dd1056212d778012cbe8a0fbffd368029fd9e25b514479c38" +checksum = "99636d423fa2ca130fa5acde3059308006d46f98caac629418e53f7ebb1e9999" dependencies = [ "once_cell", "target-lexicon", @@ -1578,9 +1676,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.22.6" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ab5bcf04a2cdcbb50c7d6105de943f543f9ed92af55818fd17b660390fc8636" +checksum = "78f9cf92ba9c409279bc3305b5409d90db2d2c22392d443a87df3a1adad59e33" dependencies = [ "libc", "pyo3-build-config", @@ -1588,9 +1686,9 @@ dependencies = [ [[package]] name = "pyo3-log" -version = "0.11.0" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ac84e6eec1159bc2a575c9ae6723baa6ee9d45873e9bebad1e3ad7e8d28a443" +checksum = "7079e412e909af5d6be7c04a7f29f6a2837a080410e1c529c9dee2c367383db4" dependencies = [ "arc-swap", "log", @@ -1599,34 +1697,65 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.22.6" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fd24d897903a9e6d80b968368a34e1525aeb719d568dba8b3d4bfa5dc67d453" +checksum = "0b999cb1a6ce21f9a6b147dcf1be9ffedf02e0043aec74dc390f3007047cecd9" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "pyo3-macros-backend" -version = "0.22.6" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c011a03ba1e50152b4b394b479826cad97e7a21eb52df179cd91ac411cbfbe" +checksum = "822ece1c7e1012745607d5cf0bcb2874769f0f7cb34c4cde03b9358eb9ef911a" dependencies = [ "heck", "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.98", + "syn 2.0.101", +] + +[[package]] +name = "pyo3-stub-gen" +version = "0.8.2" +source = "git+https://github.com/zao111222333/pyo3-stub-gen.git#a87049d1246364ceef0f66273a14b239f39867f9" +dependencies = [ + "anyhow", + "chrono", + "indexmap 2.9.0", + "inventory", + "itertools 0.13.0", + "log", + "maplit", + "num-complex", + "numpy", + "pyo3", + "pyo3-stub-gen-derive", + "serde", + "toml", +] + +[[package]] +name = "pyo3-stub-gen-derive" +version = "0.8.2" +source = "git+https://github.com/zao111222333/pyo3-stub-gen.git#a87049d1246364ceef0f66273a14b239f39867f9" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.101", ] [[package]] name = "quote" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] @@ -1758,9 +1887,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustc-hash" -version = "1.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustversion" @@ -1806,22 +1935,22 @@ checksum = "b07779b9b918cc05650cb30f404d4d7835d26df37c235eded8a6832e2fb82cca" [[package]] name = "serde" -version = "1.0.217" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -1836,6 +1965,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +dependencies = [ + "serde", +] + [[package]] name = "serial_test" version = "3.2.0" @@ -1858,7 +1996,7 @@ checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -1970,9 +2108,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.98" +version = "2.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" +checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" dependencies = [ "proc-macro2", "quote", @@ -1987,9 +2125,9 @@ checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" [[package]] name = "target-lexicon" -version = "0.12.16" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" +checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" [[package]] name = "term" @@ -2028,7 +2166,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -2039,7 +2177,7 @@ checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -2093,6 +2231,47 @@ dependencies = [ "serde_json", ] +[[package]] +name = "toml" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900f6c86a685850b1bc9f6223b20125115ee3f31e01207d81655bbcc0aea9231" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10558ed0bd2a1562e630926a2d1f0b98c827da99fabd3fe20920a59642504485" +dependencies = [ + "indexmap 2.9.0", + "serde", + "serde_spanned", + "toml_datetime", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28391a4201ba7eb1984cfeb6862c0b3ea2cfe23332298967c749dddc0d6cd976" + [[package]] name = "typeid" version = "1.0.2" @@ -2126,7 +2305,7 @@ checksum = "d9d30226ac9cbd2d1ff775f74e8febdab985dab14fb14aa2582c29a92d5555dc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -2197,7 +2376,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", "wasm-bindgen-shared", ] @@ -2219,7 +2398,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -2284,6 +2463,65 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-core" +version = "0.61.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", +] + +[[package]] +name = "windows-link" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" + +[[package]] +name = "windows-result" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-sys" version = "0.59.0" @@ -2357,6 +2595,15 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "winnow" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6cb8234a863ea0e8cd7284fcdd4f145233eb00fee02bbdd9861aec44e6477bc5" +dependencies = [ + "memchr", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -2375,7 +2622,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] diff --git a/python/Cargo.toml b/python/Cargo.toml index 9028793a..3054d983 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -11,7 +11,7 @@ readme = "README.md" [lib] name = "egobox" -crate-type = ["cdylib"] +crate-type = ["cdylib", "rlib"] [features] default = [] @@ -31,9 +31,11 @@ egobox-ego = { version = "0.28.1", path = "../crates/ego", features = [ "persistent", ] } -pyo3 = { version = "0.22", features = ["extension-module"] } -pyo3-log = "0.11" -numpy = "0.22.1" +pyo3 = "0.24" +pyo3-log = "0.12" +pyo3-stub-gen = { git = "https://github.com/zao111222333/pyo3-stub-gen.git", features = ["numpy"] } +# pyo3-stub-gen = { version = "0.8", features = ["numpy"] } +numpy = "0.24.0" linfa.workspace = true ndarray.workspace = true diff --git a/python/egobox/egobox.pyi b/python/egobox/egobox.pyi index 062570a5..af9c2948 100644 --- a/python/egobox/egobox.pyi +++ b/python/egobox/egobox.pyi @@ -1,6 +1,7 @@ # This file is automatically generated by pyo3_stub_gen # ruff: noqa: E501, F401 +import builtins import numpy import numpy.typing import typing @@ -12,25 +13,14 @@ class CorrelationSpec: class Egor: r""" Optimizer constructor - - fun: array[n, nx]) -> array[n, ny] - the function to be minimized - fun(x) = [obj(x), cstr_1(x), ... cstr_k(x)] where - obj is the objective function [n, nx] -> [n, 1] - cstr_i is the ith constraint function [n, nx] -> [n, 1] - an k the number of constraints (n_cstr) - hence ny = 1 (obj) + k (cstrs) - cstr functions are expected be negative (<=0) at the optimum. - This constraints will be approximated using surrogates, so - if constraints are cheap to evaluate better to pass them through run(fcstrs=[...]) - n_cstr (int): the number of constraints which will be approximated by surrogates (see `fun` argument) - + cstr_tol (list(n_cstr + n_fcstr,)): List of tolerances for constraints to be satisfied (cstr < tol), list size should be equal to n_cstr + n_fctrs where n_cstr is the `n_cstr` argument and `n_fcstr` the number of constraints passed as functions. + When None, tolerances default to DEFAULT_CSTR_TOL=1e-4. xspecs (list(XSpec)) where XSpec(xtype=FLOAT|INT|ORD|ENUM, xlimits=[] or tags=[strings]): Specifications of the nx components of the input x (eg. len(xspecs) == nx) @@ -70,10 +60,14 @@ class Egor: infill_strategy (InfillStrategy enum) Infill criteria to decide best next promising point. Can be either InfillStrategy.EI, InfillStrategy.WB2 or InfillStrategy.WB2S. - + + cstr_infill (bool) + Activate constrained infill criterion where the product of probability of feasibility of constraints + used as a factor of the infill criterion specified via infill_strategy + cstr_strategy (ConstraintStrategy enum) Constraint management either use the mean value or upper bound - Can be either ConstraintStrategy.MV (default) or ConstraintStrategy.UTB. + Can be either ConstraintStrategy.MeanValue or ConstraintStrategy.UpperTrustedBound. q_infill_strategy (QInfillStrategy enum) Parallel infill criteria (aka qEI) to get virtual next promising points in order to allow @@ -81,16 +75,16 @@ class Egor: Can be either QInfillStrategy.KB (Kriging Believer), QInfillStrategy.KBLB (KB Lower Bound), QInfillStrategy.KBUB (KB Upper Bound), QInfillStrategy.CLMIN (Constant Liar Minimum) - + q_points (int > 0): Number of points to be evaluated to allow parallel evaluation of the function under optimization. - + q_optmod (int >= 1) Number of iterations between two surrogate models true training (hypermarameters optimization) - otherwise previous hyperparameters are re-used only when computing q_points to be evaluated in parallel. - The default value is 1 meaning surrogates are properly trained for each q points determination. + otherwise previous hyperparameters are re-used only when computing q_points to be evaluated in parallel. + The default value is 1 meaning surrogates are properly trained for each q points determination. The value is used as a modulo of iteration number * q_points to trigger true training. - This is used to decrease the number of training at the expense of surrogate accuracy. + This is used to decrease the number of training at the expense of surrogate accuracy. infill_optimizer (InfillOptimizer enum) Internal optimizer used to optimize infill criteria. @@ -102,14 +96,14 @@ class Egor: trego (bool) When true, TREGO algorithm is used, otherwise classic EGO algorithm is used. - + coego_n_coop (int >= 0) Number of cooperative components groups which will be used by the CoEGO algorithm. Better to have n_coop a divider of nx or if not with a remainder as large as possible. - The CoEGO algorithm is used to tackle high-dimensional problems turning it in a set of + The CoEGO algorithm is used to tackle high-dimensional problems turning it in a set of partial optimizations using only nx / n_coop components at a time. The default value is 0 meaning that the CoEGO algorithm is not used. - + n_clusters (int) Number of clusters used by the mixture of surrogate experts (default is 1). When set to 0, the number of cluster is determined automatically and refreshed every @@ -117,7 +111,7 @@ class Egor: but it is counted anyway). When set to negative number -n, the number of clusters is determined automatically in [1, n] this is used to limit the number of trials hence the execution time. - + target (float) Known optimum used as stopping criterion. @@ -139,32 +133,32 @@ class Egor: seed (int >= 0) Random generator seed to allow computation reproducibility. """ - def __new__(cls,xspecs,n_cstr = ...,cstr_tol = ...,n_start = ...,n_doe = ...,doe = ...,regr_spec = ...,corr_spec = ...,infill_strategy = ...,q_points = ...,q_infill_strategy = ...,q_optmod = ...,infill_optimizer = ...,kpls_dim = ...,trego = ...,n_clusters = ...,target = ...,outdir = ...,warm_start = ...,hot_start = ...,seed = ...): ... - def minimize(self, fun,max_iters = ..., fcstrs = ...) -> OptimResult: + def __new__(cls, xspecs:typing.Any, n_cstr:builtins.int=0, cstr_tol:typing.Optional[typing.Sequence[builtins.float]]=None, n_start:builtins.int=20, n_doe:builtins.int=0, doe:typing.Optional[numpy.typing.NDArray[numpy.float64]]=None, regr_spec:builtins.int=1, corr_spec:builtins.int=1, infill_strategy:InfillStrategy=InfillStrategy.WB2, cstr_infill:builtins.bool=False, cstr_strategy:ConstraintStrategy=ConstraintStrategy.MC, q_points:builtins.int=1, q_infill_strategy:QInfillStrategy=QInfillStrategy.KB, infill_optimizer:InfillOptimizer=InfillOptimizer.COBYLA, kpls_dim:typing.Optional[builtins.int]=None, trego:builtins.bool=False, coego_n_coop:builtins.int=0, n_clusters:builtins.int=1, q_optmod:builtins.int=1, target:builtins.float=-inf, outdir:typing.Optional[builtins.str]=None, warm_start:builtins.bool=False, hot_start:typing.Optional[builtins.int]=None, seed:typing.Optional[builtins.int]=None) -> Egor: ... + def minimize(self, fun:typing.Any, fcstrs:typing.Sequence[typing.Any]=[], max_iters:builtins.int=20) -> OptimResult: r""" This function finds the minimum of a given function `fun` # Parameters - + fun: array[n, nx]) -> array[n, ny] the function to be minimized fun(x) = [obj(x), cstr_1(x), ... cstr_k(x)] where - obj is the objective function [n, nx] -> [n, 1] - cstr_i is the ith constraint function [n, nx] -> [n, 1] - an k the number of constraints (n_cstr) - hence ny = 1 (obj) + k (cstrs) + obj is the objective function [n, nx] -> [n, 1] + cstr_i is the ith constraint function [n, nx] -> [n, 1] + an k the number of constraints (n_cstr) + hence ny = 1 (obj) + k (cstrs) cstr functions are expected be negative (<=0) at the optimum. This constraints will be approximated using surrogates, so if constraints are cheap to evaluate better to pass them through run(fcstrs=[...]) - + max_iters: - the iteration budget, number of fun calls is n_doe + q_points * max_iters. - - fcstrs: + the iteration budget, number of fun calls is `n_doe + q_points * max_iters`. + + fcstrs: list of constraints functions defined as g(x, return_grad): (ndarray[nx], bool) -> float or ndarray[nx,] If the given `return_grad` boolean is `False` the function has to return the constraint float value to be made negative by the optimizer (which drives the input array `x`). - Otherwise the function has to return the gradient (ndarray[nx,]) of the constraint funtion + Otherwise the function has to return the gradient (ndarray[nx,]) of the constraint function wrt the `nx` components of `x`. # Returns @@ -172,9 +166,7 @@ class Egor: x_opt (array[1, nx]): x value where fun is at its minimum subject to constraints y_opt (array[1, nx]): fun(x_opt) """ - ... - - def suggest(self, x_doe,y_doe) -> numpy.typing.NDArray[numpy.float64]: + def suggest(self, x_doe:numpy.typing.NDArray[numpy.float64], y_doe:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" This function gives the next best location where to evaluate the function under optimization wrt to previous evaluations. @@ -188,12 +180,11 @@ class Egor: # Returns (array[1, nx]): suggested location where to evaluate objective and constraints """ - ... - - def get_result_index(self, y_doe) -> int: + def get_result_index(self, y_doe:numpy.typing.NDArray[numpy.float64]) -> builtins.int: r""" This function gives the best evaluation index given the outputs of the function (objective wrt constraints) under minimization. + Caveat: This function does not take into account function constraints values # Parameters y_doe (array[ns, 1 + n_cstr]): ns values of objective and constraints @@ -201,12 +192,11 @@ class Egor: # Returns index in y_doe of the best evaluation """ - ... - - def get_result(self, x_doe,y_doe) -> OptimResult: + def get_result(self, x_doe:numpy.typing.NDArray[numpy.float64], y_doe:numpy.typing.NDArray[numpy.float64]) -> OptimResult: r""" This function gives the best result given inputs and outputs of the function (objective wrt constraints) under minimization. + Caveat: This function does not take into account function constraints values # Parameters x_doe (array[ns, nx]): ns samples where function has been evaluated @@ -217,22 +207,23 @@ class Egor: x_opt (array[1, nx]): x value where fun is at its minimum subject to constraints y_opt (array[1, nx]): fun(x_opt) """ - ... - class ExpectedOptimum: - val: float - tol: float + val: builtins.float + tol: builtins.float + def __new__(cls, value:builtins.float, tolerance:builtins.float=1e-06) -> ExpectedOptimum: ... class GpMix: r""" Gaussian processes mixture builder - n_clusters (int >= 0) - Number of clusters used by the mixture of surrogate experts. + n_clusters (int) + Number of clusters used by the mixture of surrogate experts (default is 1). When set to 0, the number of cluster is determined automatically and refreshed every 10-points addition (should say 'tentative addition' because addition may fail for some points - but failures are counted anyway). + but it is counted anyway). + When set to negative number -n, the number of clusters is determined automatically in [1, n] + this is used to limit the number of trials hence the execution time. regr_spec (RegressionSpec flags, an int in [1, 7]): Specification of regression models used in mixture. @@ -271,7 +262,7 @@ class GpMix: seed (int >= 0) Random generator seed to allow computation reproducibility. """ - def __new__(cls,n_clusters = ...,regr_spec = ...,corr_spec = ...,recombination = ...,theta_init = ...,theta_bounds = ...,kpls_dim = ...,n_start = ...,seed = ...): ... + def __new__(cls, n_clusters:builtins.int=1, regr_spec:builtins.int=1, corr_spec:builtins.int=1, recombination:Recombination=Recombination.HARD, theta_init:typing.Optional[typing.Sequence[builtins.float]]=None, theta_bounds:typing.Optional[typing.Sequence[typing.Sequence[builtins.float]]]=None, kpls_dim:typing.Optional[builtins.int]=None, n_start:builtins.int=10, seed:typing.Optional[builtins.int]=None) -> GpMix: ... def fit(self, xt:numpy.typing.NDArray[numpy.float64], yt:numpy.typing.NDArray[numpy.float64]) -> Gpx: r""" Fit the parameters of the model using the training dataset to build a trained model @@ -281,37 +272,29 @@ class GpMix: yt (array[nsamples, 1]): output samples Returns Gpx object - the fitted Gaussian process mixture + the fitted Gaussian process mixture """ - ... - class Gpx: r""" A trained Gaussian processes mixture """ @staticmethod - def builder(n_clusters = ...,regr_spec = ...,corr_spec = ...,recombination = ...,theta_init = ...,theta_bounds = ...,kpls_dim = ...,n_start = ...,seed = ...) -> GpMix: + def builder(n_clusters:builtins.int=1, regr_spec:builtins.int=1, corr_spec:builtins.int=1, recombination:Recombination=Recombination.SMOOTH, theta_init:typing.Optional[typing.Sequence[builtins.float]]=None, theta_bounds:typing.Optional[typing.Sequence[typing.Sequence[builtins.float]]]=None, kpls_dim:typing.Optional[builtins.int]=None, n_start:builtins.int=10, seed:typing.Optional[builtins.int]=None) -> GpMix: r""" Get Gaussian processes mixture builder aka `GpMix` See `GpMix` constructor """ - ... - - def __repr__(self) -> str: + def __repr__(self) -> builtins.str: r""" Returns the String representation from serde json serializer """ - ... - - def __str__(self) -> str: + def __str__(self) -> builtins.str: r""" Returns a String informal representation """ - ... - - def save(self, filename:str) -> bool: + def save(self, filename:builtins.str) -> builtins.bool: r""" Save Gaussian processes mixture in a file. If the filename has .json JSON human readable format is used @@ -323,10 +306,8 @@ class Gpx: Returns True if save succeeds otherwise False """ - ... - @staticmethod - def load(filename:str) -> Gpx: + def load(filename:builtins.str) -> Gpx: r""" Load Gaussian processes mixture from file. @@ -334,8 +315,6 @@ class Gpx: filename (string) json filepath generated by saving a trained Gaussian processes mixture """ - ... - def predict(self, x:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" Predict output values at nsamples points. @@ -347,8 +326,6 @@ class Gpx: Returns the output values at nsamples x points (array[nsamples, 1]) """ - ... - def predict_var(self, x:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" Predict variances at nsample points. @@ -360,8 +337,6 @@ class Gpx: Returns the variances of the output values at nsamples input points (array[nsamples, 1]) """ - ... - def predict_gradients(self, x:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" Predict surrogate output derivatives at nsamples points. @@ -374,8 +349,6 @@ class Gpx: the output derivatives at nsamples x points (array[nsamples, nx]) wrt inputs The ith column is the partial derivative value wrt to the ith component of x at the given samples. """ - ... - def predict_var_gradients(self, x:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" Predict variance derivatives at nsamples points. @@ -388,9 +361,7 @@ class Gpx: the variance derivatives at nsamples x points (array[nsamples, nx]) wrt inputs The ith column is the partial derivative value wrt to the ith component of x at the given samples. """ - ... - - def sample(self, x:numpy.typing.NDArray[numpy.float64], n_traj:int) -> numpy.typing.NDArray[numpy.float64]: + def sample(self, x:numpy.typing.NDArray[numpy.float64], n_traj:builtins.int) -> numpy.typing.NDArray[numpy.float64]: r""" Sample gaussian process trajectories. @@ -402,17 +373,13 @@ class Gpx: Returns the trajectories as an array[nsamples, n_traj] """ - ... - - def dims(self) -> tuple[int, int]: + def dims(self) -> tuple[builtins.int, builtins.int]: r""" Get the input and output dimensions of the surrogate Returns the couple (nx, ny) """ - ... - def training_data(self) -> tuple[numpy.typing.NDArray[numpy.float64], numpy.typing.NDArray[numpy.float64]]: r""" Get the nt training data points used to fit the surrogate @@ -420,8 +387,6 @@ class Gpx: Returns the couple (ndarray[nt, nx], ndarray[nt,]) """ - ... - def thetas(self) -> numpy.typing.NDArray[numpy.float64]: r""" Get optimized thetas hyperparameters (ie once GP experts are fitted) @@ -429,8 +394,6 @@ class Gpx: Returns thetas as an array[n_clusters, nx or kpls_dim] """ - ... - def variances(self) -> numpy.typing.NDArray[numpy.float64]: r""" Get GP expert variance (ie posterior GP variance) @@ -438,8 +401,6 @@ class Gpx: Returns variances as an array[n_clusters] """ - ... - def likelihoods(self) -> numpy.typing.NDArray[numpy.float64]: r""" Get reduced likelihood values gotten when fitting the GP experts @@ -449,8 +410,6 @@ class Gpx: Returns likelihood as an array[n_clusters] """ - ... - class OptimResult: x_opt: numpy.typing.NDArray[numpy.float64] @@ -465,13 +424,11 @@ class SparseGpMix: r""" Sparse Gaussian processes mixture builder - n_clusters (int) - Number of clusters used by the mixture of surrogate experts (default is 1). + n_clusters (int >= 0) + Number of clusters used by the mixture of surrogate experts. When set to 0, the number of cluster is determined automatically and refreshed every 10-points addition (should say 'tentative addition' because addition may fail for some points - but it is counted anyway). - When set to negative number -n, the number of clusters is determined automatically in [1, n] - this is used to limit the number of trials hence the execution time. + but failures are counted anyway). corr_spec (CorrelationSpec flags, an int in [1, 15]): Specification of correlation models used in mixture. @@ -500,7 +457,7 @@ class SparseGpMix: seed (int >= 0) Random generator seed to allow computation reproducibility. """ - def __new__(cls,corr_spec = ...,theta_init = ...,theta_bounds = ...,kpls_dim = ...,n_start = ...,nz = ...,z = ...,method = ...,seed = ...): ... + def __new__(cls, corr_spec:builtins.int=1, theta_init:typing.Optional[typing.Sequence[builtins.float]]=None, theta_bounds:typing.Optional[typing.Sequence[typing.Sequence[builtins.float]]]=None, kpls_dim:typing.Optional[builtins.int]=None, n_start:builtins.int=10, nz:typing.Optional[builtins.int]=None, z:typing.Optional[numpy.typing.NDArray[numpy.float64]]=None, method:SparseMethod=SparseMethod.FITC, seed:typing.Optional[builtins.int]=None) -> SparseGpMix: ... def fit(self, xt:numpy.typing.NDArray[numpy.float64], yt:numpy.typing.NDArray[numpy.float64]) -> SparseGpx: r""" Fit the parameters of the model using the training dataset to build a trained model @@ -510,37 +467,29 @@ class SparseGpMix: yt (array[nsamples, 1]): output samples Returns Sgp object - the fitted Gaussian process mixture + the fitted Gaussian process mixture """ - ... - class SparseGpx: r""" A trained Gaussian processes mixture """ @staticmethod - def builder(corr_spec = ...,theta_init = ...,theta_bounds = ...,kpls_dim = ...,n_start = ...,nz = ...,z = ...,method = ...,seed = ...) -> SparseGpMix: + def builder(corr_spec:builtins.int=1, theta_init:typing.Optional[typing.Sequence[builtins.float]]=None, theta_bounds:typing.Optional[typing.Sequence[typing.Sequence[builtins.float]]]=None, kpls_dim:typing.Optional[builtins.int]=None, n_start:builtins.int=10, nz:typing.Optional[builtins.int]=None, z:typing.Optional[numpy.typing.NDArray[numpy.float64]]=None, method:SparseMethod=SparseMethod.FITC, seed:typing.Optional[builtins.int]=None) -> SparseGpMix: r""" Get Gaussian processes mixture builder aka `GpSparse` See `GpSparse` constructor """ - ... - - def __repr__(self) -> str: + def __repr__(self) -> builtins.str: r""" Returns the String representation from serde json serializer """ - ... - - def __str__(self) -> str: + def __str__(self) -> builtins.str: r""" Returns a String informal representation """ - ... - - def save(self, filename:str) -> bool: + def save(self, filename:builtins.str) -> builtins.bool: r""" Save Gaussian processes mixture in a file. If the filename has .json JSON human readable format is used @@ -552,10 +501,8 @@ class SparseGpx: Returns True if save succeeds otherwise False """ - ... - @staticmethod - def load(filename:str) -> SparseGpx: + def load(filename:builtins.str) -> SparseGpx: r""" Load Gaussian processes mixture from a json file. @@ -563,8 +510,6 @@ class SparseGpx: filename (string) json filepath generated by saving a trained Gaussian processes mixture """ - ... - def predict(self, x:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" Predict output values at nsamples points. @@ -576,8 +521,6 @@ class SparseGpx: Returns the output values at nsamples x points (array[nsamples]) """ - ... - def predict_var(self, x:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" Predict variances at nsample points. @@ -589,8 +532,6 @@ class SparseGpx: Returns the variances of the output values at nsamples input points (array[nsamples, 1]) """ - ... - def predict_gradients(self, x:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" Predict surrogate output derivatives at nsamples points. @@ -606,8 +547,6 @@ class SparseGpx: the output derivatives at nsamples x points (array[nsamples, nx]) wrt inputs The ith column is the partial derivative value wrt to the ith component of x at the given samples. """ - ... - def predict_var_gradients(self, x:numpy.typing.NDArray[numpy.float64]) -> numpy.typing.NDArray[numpy.float64]: r""" Predict variance derivatives at nsamples points. @@ -623,9 +562,7 @@ class SparseGpx: the variance derivatives at nsamples x points (array[nsamples, nx]) wrt inputs The ith column is the partial derivative value wrt to the ith component of x at the given samples. """ - ... - - def sample(self, x:numpy.typing.NDArray[numpy.float64], n_traj:int) -> numpy.typing.NDArray[numpy.float64]: + def sample(self, x:numpy.typing.NDArray[numpy.float64], n_traj:builtins.int) -> numpy.typing.NDArray[numpy.float64]: r""" Sample gaussian process trajectories. @@ -637,8 +574,6 @@ class SparseGpx: Returns the trajectories as an array[nsamples, n_traj] """ - ... - def thetas(self) -> numpy.typing.NDArray[numpy.float64]: r""" Get optimized thetas hyperparameters (ie once GP experts are fitted) @@ -646,8 +581,6 @@ class SparseGpx: Returns thetas as an array[n_clusters, nx or kpls_dim] """ - ... - def variances(self) -> numpy.typing.NDArray[numpy.float64]: r""" Get GP expert variance (ie posterior GP variance) @@ -655,8 +588,6 @@ class SparseGpx: Returns variances as an array[n_clusters] """ - ... - def likelihoods(self) -> numpy.typing.NDArray[numpy.float64]: r""" Get reduced likelihood values gotten when fitting the GP experts @@ -666,58 +597,65 @@ class SparseGpx: Returns likelihood as an array[n_clusters] """ - ... - class XSpec: xtype: XType - xlimits: list[float] - tags: list[str] - def __new__(cls,xtype,xlimits = ...,tags = ...): ... + xlimits: builtins.list[builtins.float] + tags: builtins.list[builtins.str] + def __new__(cls, xtype:XType, xlimits:typing.Sequence[builtins.float]=[], tags:typing.Sequence[builtins.str]=[]) -> XSpec: ... + +class ConstraintStrategy(Enum): + MC = auto() + UTB = auto() class InfillOptimizer(Enum): - Cobyla = auto() - Slsqp = auto() + COBYLA = auto() + SLSQP = auto() class InfillStrategy(Enum): - Ei = auto() - Wb2 = auto() - Wb2s = auto() - -class ConstraintStrategy(Enum): - Mv = auto() - Utb = auto() + EI = auto() + WB2 = auto() + WB2S = auto() class QInfillStrategy(Enum): - Kb = auto() - Kblb = auto() - Kbub = auto() - Clmin = auto() + KB = auto() + KBLB = auto() + KBUB = auto() + CLMIN = auto() class Recombination(Enum): - Hard = auto() - Smooth = auto() + HARD = auto() + r""" + prediction is taken from the expert with highest responsability + resulting in a model with discontinuities + """ + SMOOTH = auto() + r""" + Prediction is a combination experts prediction wrt their responsabilities, + an optional heaviside factor might be used control steepness of the change between + experts regions. + """ class Sampling(Enum): - Lhs = auto() - FullFactorial = auto() - Random = auto() - LhsClassic = auto() - LhsCentered = auto() - LhsMaximin = auto() - LhsCenteredMaximin = auto() + LHS = auto() + FULL_FACTORIAL = auto() + RANDOM = auto() + LHS_CLASSIC = auto() + LHS_CENTERED = auto() + LHS_MAXIMIN = auto() + LHS_CENTERED_MAXIMIN = auto() class SparseMethod(Enum): - Fitc = auto() - Vfe = auto() + FITC = auto() + VFE = auto() class XType(Enum): - Float = auto() - Int = auto() - Ord = auto() - Enum = auto() + FLOAT = auto() + INT = auto() + ORD = auto() + ENUM = auto() -def lhs(xspecs,n_samples,seed = ...) -> numpy.typing.NDArray[numpy.float64]: +def lhs(xspecs:typing.Any, n_samples:builtins.int, seed:typing.Optional[builtins.int]=None) -> numpy.typing.NDArray[numpy.float64]: r""" Samples generation using optimized Latin Hypercube Sampling @@ -729,9 +667,8 @@ def lhs(xspecs,n_samples,seed = ...) -> numpy.typing.NDArray[numpy.float64]: # Returns ndarray of shape (n_samples, n_variables) """ - ... -def sampling(method,xspecs,n_samples,seed = ...) -> numpy.typing.NDArray[numpy.float64]: +def sampling(method:Sampling, xspecs:typing.Any, n_samples:builtins.int, seed:typing.Optional[builtins.int]=None) -> numpy.typing.NDArray[numpy.float64]: r""" Samples generation using given method @@ -744,9 +681,8 @@ def sampling(method,xspecs,n_samples,seed = ...) -> numpy.typing.NDArray[numpy.f # Returns ndarray of shape (n_samples, n_variables) """ - ... -def to_specs(xlimits:typing.Sequence[typing.Sequence[float]]) -> typing.Any: +def to_specs(xlimits:typing.Sequence[typing.Sequence[builtins.float]]) -> typing.Any: r""" Utility function converting `xlimits` float data list specifying bounds of x components to x specified as a list of XType.Float types [egobox.XType] @@ -757,5 +693,4 @@ def to_specs(xlimits:typing.Sequence[typing.Sequence[float]]) -> typing.Any: # Returns xtypes: nx-size list of XSpec(XType(FLOAT), [lower_bound, upper_bounds]) where `nx` is the dimension of x """ - ... diff --git a/python/pyproject.toml b/python/pyproject.toml index a053bf43..047f0563 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -21,6 +21,8 @@ build-backend = "maturin" requires = ["maturin>=1.0, <2.0"] [tool.maturin] +python-source = "egobox" +module-name = "egobox" features = ["pyo3/extension-module"] # Optional usage of BLAS backend # cargo-extra-args = "--features linfa/intel-mkl-static" diff --git a/python/src/bin/stub_gen.rs b/python/src/bin/stub_gen.rs new file mode 100644 index 00000000..6b8b8e0f --- /dev/null +++ b/python/src/bin/stub_gen.rs @@ -0,0 +1,8 @@ +use pyo3_stub_gen::Result; + +fn main() -> Result<()> { + env_logger::Builder::from_env(env_logger::Env::default().filter_or("RUST_LOG", "info")).init(); + let stub = egobox::stub_info()?; + stub.generate()?; + Ok(()) +} diff --git a/python/src/egor.rs b/python/src/egor.rs index fb6facc1..69aaf963 100644 --- a/python/src/egor.rs +++ b/python/src/egor.rs @@ -20,6 +20,7 @@ use ndarray::{concatenate, Array1, Array2, ArrayView2, Axis}; use numpy::{IntoPyArray, PyArray1, PyArray2, PyArrayMethods, PyReadonlyArray2, ToPyArray}; use pyo3::exceptions::PyValueError; use pyo3::prelude::*; +use pyo3_stub_gen::derive::{gen_stub_pyclass, gen_stub_pyfunction, gen_stub_pymethods}; /// Utility function converting `xlimits` float data list specifying bounds of x components /// to x specified as a list of XType.Float types [egobox.XType] @@ -29,17 +30,18 @@ use pyo3::prelude::*; /// /// # Returns /// xtypes: nx-size list of XSpec(XType(FLOAT), [lower_bound, upper_bounds]) where `nx` is the dimension of x +#[gen_stub_pyfunction] #[pyfunction] -pub(crate) fn to_specs(py: Python, xlimits: Vec>) -> PyResult { +pub(crate) fn to_specs(py: Python, xlimits: Vec>) -> PyResult> { if xlimits.is_empty() || xlimits[0].is_empty() { let err = "Error: xspecs argument cannot be empty"; return Err(PyValueError::new_err(err.to_string())); } - Ok(xlimits + xlimits .iter() .map(|xlimit| XSpec::new(XType::Float, xlimit.clone(), vec![])) .collect::>() - .into_py(py)) + .into_pyobject(py) } /// Optimizer constructor @@ -163,6 +165,7 @@ pub(crate) fn to_specs(py: Python, xlimits: Vec>) -> PyResult /// seed (int >= 0) /// Random generator seed to allow computation reproducibility. /// +#[gen_stub_pyclass] #[pyclass] pub(crate) struct Egor { pub xspecs: PyObject, @@ -191,6 +194,7 @@ pub(crate) struct Egor { pub seed: Option, } +#[gen_stub_pyclass] #[pyclass] pub(crate) struct OptimResult { #[pyo3(get)] @@ -203,6 +207,7 @@ pub(crate) struct OptimResult { y_doe: Py>, } +#[gen_stub_pymethods] #[pymethods] impl Egor { #[new] @@ -336,7 +341,7 @@ impl Egor { ) -> PyResult { let obj = |x: &ArrayView2| -> Array2 { Python::with_gil(|py| { - let args = (x.to_owned().into_pyarray_bound(py),); + let args = (x.to_owned().into_pyarray(py),); let res = fun.bind(py).call1(args).unwrap(); let pyarray = res.downcast_into::>().unwrap(); pyarray.to_owned_array() @@ -350,12 +355,12 @@ impl Egor { let cstr = |x: &[f64], g: Option<&mut [f64]>, _u: &mut InfillObjData| -> f64 { Python::with_gil(|py| { if let Some(g) = g { - let args = (Array1::from(x.to_vec()).into_pyarray_bound(py), true); + let args = (Array1::from(x.to_vec()).into_pyarray(py), true); let grad = cstr.bind(py).call1(args).unwrap(); let grad = grad.downcast_into::>().unwrap().readonly(); g.copy_from_slice(grad.as_slice().unwrap()) } - let args = (Array1::from(x.to_vec()).into_pyarray_bound(py), false); + let args = (Array1::from(x.to_vec()).into_pyarray(py), false); let res = cstr.bind(py).call1(args).unwrap().extract().unwrap(); res }) @@ -378,10 +383,10 @@ impl Egor { .run() .expect("Egor should optimize the objective function") }); - let x_opt = res.x_opt.into_pyarray_bound(py).to_owned(); - let y_opt = res.y_opt.into_pyarray_bound(py).to_owned(); - let x_doe = res.x_doe.into_pyarray_bound(py).to_owned(); - let y_doe = res.y_doe.into_pyarray_bound(py).to_owned(); + let x_opt = res.x_opt.into_pyarray(py).to_owned(); + let y_opt = res.y_opt.into_pyarray(py).to_owned(); + let x_doe = res.x_doe.into_pyarray(py).to_owned(); + let y_doe = res.y_doe.into_pyarray(py).to_owned(); Ok(OptimResult { x_opt: x_opt.into(), y_opt: y_opt.into(), @@ -419,7 +424,7 @@ impl Egor { .min_within_mixint_space(&xtypes); let x_suggested = py.allow_threads(|| mixintegor.suggest(&x_doe, &y_doe)); - x_suggested.to_pyarray_bound(py).into() + x_suggested.to_pyarray(py).into() } /// This function gives the best evaluation index given the outputs @@ -467,10 +472,10 @@ impl Egor { let n_fcstrs = 0; let c_doe = Array2::zeros((y_doe.ncols(), n_fcstrs)); let idx = find_best_result_index(&y_doe, &c_doe, &self.cstr_tol(n_fcstrs)); - let x_opt = x_doe.row(idx).to_pyarray_bound(py).into(); - let y_opt = y_doe.row(idx).to_pyarray_bound(py).into(); - let x_doe = x_doe.to_pyarray_bound(py).into(); - let y_doe = y_doe.to_pyarray_bound(py).into(); + let x_opt = x_doe.row(idx).to_pyarray(py).into(); + let y_opt = y_doe.row(idx).to_pyarray(py).into(); + let x_doe = x_doe.to_pyarray(py).into(); + let y_doe = y_doe.to_pyarray(py).into(); OptimResult { x_opt, y_opt, diff --git a/python/src/gp_mix.rs b/python/src/gp_mix.rs index 0a4c9889..afe5d598 100644 --- a/python/src/gp_mix.rs +++ b/python/src/gp_mix.rs @@ -22,6 +22,7 @@ use ndarray::{array, Array1, Array2, Axis, Ix1, Ix2, Zip}; use ndarray_rand::rand::SeedableRng; use numpy::{IntoPyArray, PyArray1, PyArray2, PyReadonlyArray2, PyReadonlyArrayDyn}; use pyo3::prelude::*; +use pyo3_stub_gen::derive::{gen_stub_pyclass, gen_stub_pymethods}; use rand_xoshiro::Xoshiro256Plus; /// Gaussian processes mixture builder @@ -71,6 +72,7 @@ use rand_xoshiro::Xoshiro256Plus; /// seed (int >= 0) /// Random generator seed to allow computation reproducibility. /// +#[gen_stub_pyclass] #[pyclass] pub(crate) struct GpMix { pub n_clusters: NbClusters, @@ -84,6 +86,7 @@ pub(crate) struct GpMix { pub seed: Option, } +#[gen_stub_pymethods] #[pymethods] impl GpMix { #[new] @@ -226,9 +229,11 @@ impl GpMix { } /// A trained Gaussian processes mixture +#[gen_stub_pyclass] #[pyclass] pub(crate) struct Gpx(Box); +#[gen_stub_pymethods] #[pymethods] impl Gpx { /// Get Gaussian processes mixture builder aka `GpMix` @@ -328,7 +333,7 @@ impl Gpx { .predict(&x.as_array()) .unwrap() .insert_axis(Axis(1)) - .into_pyarray_bound(py) + .into_pyarray(py) } /// Predict variances at nsample points. @@ -345,10 +350,7 @@ impl Gpx { py: Python<'py>, x: PyReadonlyArray2, ) -> Bound<'py, PyArray2> { - self.0 - .predict_var(&x.as_array()) - .unwrap() - .into_pyarray_bound(py) + self.0.predict_var(&x.as_array()).unwrap().into_pyarray(py) } /// Predict surrogate output derivatives at nsamples points. @@ -369,7 +371,7 @@ impl Gpx { self.0 .predict_gradients(&x.as_array()) .unwrap() - .into_pyarray_bound(py) + .into_pyarray(py) } /// Predict variance derivatives at nsamples points. @@ -390,7 +392,7 @@ impl Gpx { self.0 .predict_var_gradients(&x.as_array()) .unwrap() - .into_pyarray_bound(py) + .into_pyarray(py) } /// Sample gaussian process trajectories. @@ -412,7 +414,7 @@ impl Gpx { self.0 .sample(&x.as_array(), n_traj) .unwrap() - .into_pyarray_bound(py) + .into_pyarray(py) } /// Get the input and output dimensions of the surrogate @@ -435,8 +437,8 @@ impl Gpx { ) -> (Bound<'py, PyArray2>, Bound<'py, PyArray1>) { let (xdata, ydata) = self.0.training_data(); ( - xdata.to_owned().into_pyarray_bound(py), - ydata.to_owned().into_pyarray_bound(py), + xdata.to_owned().into_pyarray(py), + ydata.to_owned().into_pyarray(py), ) } @@ -452,7 +454,7 @@ impl Gpx { Zip::from(thetas.rows_mut()) .and(experts) .for_each(|mut theta, expert| theta.assign(expert.theta())); - thetas.into_pyarray_bound(py) + thetas.into_pyarray(py) } /// Get GP expert variance (ie posterior GP variance) @@ -466,7 +468,7 @@ impl Gpx { Zip::from(&mut variances) .and(experts) .for_each(|var, expert| *var = expert.variance()); - variances.into_pyarray_bound(py) + variances.into_pyarray(py) } /// Get reduced likelihood values gotten when fitting the GP experts @@ -482,6 +484,6 @@ impl Gpx { Zip::from(&mut likelihoods) .and(experts) .for_each(|lkh, expert| *lkh = expert.likelihood()); - likelihoods.into_pyarray_bound(py) + likelihoods.into_pyarray(py) } } diff --git a/python/src/lib.rs b/python/src/lib.rs index 61edcca6..846dd1a7 100644 --- a/python/src/lib.rs +++ b/python/src/lib.rs @@ -56,3 +56,5 @@ fn egobox(_py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> { Ok(()) } + +pyo3_stub_gen::define_stub_info_gatherer!(stub_info); diff --git a/python/src/sampling.rs b/python/src/sampling.rs index 0be59a43..e17827e4 100644 --- a/python/src/sampling.rs +++ b/python/src/sampling.rs @@ -3,7 +3,9 @@ use egobox_doe::{LhsKind, SamplingMethod}; use egobox_ego::gpmix::mixint::MixintContext; use numpy::{IntoPyArray, PyArray2}; use pyo3::prelude::*; +use pyo3_stub_gen::derive::{gen_stub_pyclass_enum, gen_stub_pyfunction}; +#[gen_stub_pyclass_enum] #[pyclass(eq, eq_int, rename_all = "SCREAMING_SNAKE_CASE")] #[derive(Debug, Clone, Copy, PartialEq)] pub enum Sampling { @@ -27,6 +29,7 @@ pub enum Sampling { /// # Returns /// ndarray of shape (n_samples, n_variables) /// +#[gen_stub_pyfunction] #[pyfunction] #[pyo3(signature = (method, xspecs, n_samples, seed=None))] pub fn sampling( @@ -76,7 +79,7 @@ pub fn sampling( } } .sample(n_samples); - doe.into_pyarray_bound(py) + doe.into_pyarray(py) } /// Samples generation using optimized Latin Hypercube Sampling @@ -89,6 +92,7 @@ pub fn sampling( /// # Returns /// ndarray of shape (n_samples, n_variables) /// +#[gen_stub_pyfunction] #[pyfunction] #[pyo3(signature = (xspecs, n_samples, seed=None))] pub(crate) fn lhs( diff --git a/python/src/sparse_gp_mix.rs b/python/src/sparse_gp_mix.rs index 43191599..e4af66cf 100644 --- a/python/src/sparse_gp_mix.rs +++ b/python/src/sparse_gp_mix.rs @@ -21,6 +21,7 @@ use ndarray::{array, Array1, Array2, Axis, Ix1, Ix2, Zip}; use ndarray_rand::rand::SeedableRng; use numpy::{IntoPyArray, PyArray1, PyArray2, PyReadonlyArray2}; use pyo3::prelude::*; +use pyo3_stub_gen::derive::{gen_stub_pyclass, gen_stub_pymethods}; use rand_xoshiro::Xoshiro256Plus; /// Sparse Gaussian processes mixture builder @@ -58,6 +59,7 @@ use rand_xoshiro::Xoshiro256Plus; /// seed (int >= 0) /// Random generator seed to allow computation reproducibility. /// +#[gen_stub_pyclass] #[pyclass] pub(crate) struct SparseGpMix { pub correlation_spec: CorrelationSpec, @@ -71,6 +73,7 @@ pub(crate) struct SparseGpMix { pub seed: Option, } +#[gen_stub_pymethods] #[pymethods] impl SparseGpMix { #[new] @@ -216,9 +219,11 @@ impl SparseGpMix { } /// A trained Gaussian processes mixture +#[gen_stub_pyclass] #[pyclass] pub(crate) struct SparseGpx(Box); +#[gen_stub_pymethods] #[pymethods] impl SparseGpx { /// Get Gaussian processes mixture builder aka `GpSparse` @@ -314,10 +319,7 @@ impl SparseGpx { /// the output values at nsamples x points (array[nsamples]) /// fn predict<'py>(&self, py: Python<'py>, x: PyReadonlyArray2) -> Bound<'py, PyArray1> { - self.0 - .predict(&x.as_array()) - .unwrap() - .into_pyarray_bound(py) + self.0.predict(&x.as_array()).unwrap().into_pyarray(py) } /// Predict variances at nsample points. @@ -337,7 +339,7 @@ impl SparseGpx { self.0 .predict_var(&x.as_array().to_owned()) .unwrap() - .into_pyarray_bound(py) + .into_pyarray(py) } /// Predict surrogate output derivatives at nsamples points. @@ -361,7 +363,7 @@ impl SparseGpx { self.0 .predict_gradients(&x.as_array()) .unwrap() - .into_pyarray_bound(py) + .into_pyarray(py) } /// Predict variance derivatives at nsamples points. @@ -385,7 +387,7 @@ impl SparseGpx { self.0 .predict_var_gradients(&x.as_array()) .unwrap() - .into_pyarray_bound(py) + .into_pyarray(py) } /// Sample gaussian process trajectories. @@ -407,7 +409,7 @@ impl SparseGpx { self.0 .sample(&x.as_array(), n_traj) .unwrap() - .into_pyarray_bound(py) + .into_pyarray(py) } /// Get optimized thetas hyperparameters (ie once GP experts are fitted) @@ -422,7 +424,7 @@ impl SparseGpx { Zip::from(thetas.rows_mut()) .and(experts) .for_each(|mut theta, expert| theta.assign(expert.theta())); - thetas.into_pyarray_bound(py) + thetas.into_pyarray(py) } /// Get GP expert variance (ie posterior GP variance) @@ -436,7 +438,7 @@ impl SparseGpx { Zip::from(&mut variances) .and(experts) .for_each(|var, expert| *var = expert.variance()); - variances.into_pyarray_bound(py) + variances.into_pyarray(py) } /// Get reduced likelihood values gotten when fitting the GP experts @@ -452,6 +454,6 @@ impl SparseGpx { Zip::from(&mut likelihoods) .and(experts) .for_each(|lkh, expert| *lkh = expert.likelihood()); - likelihoods.into_pyarray_bound(py) + likelihoods.into_pyarray(py) } } diff --git a/python/src/types.rs b/python/src/types.rs index d0c879c0..93af9e20 100644 --- a/python/src/types.rs +++ b/python/src/types.rs @@ -1,5 +1,7 @@ use pyo3::prelude::*; +use pyo3_stub_gen::derive::{gen_stub_pyclass, gen_stub_pyclass_enum, gen_stub_pymethods}; +#[gen_stub_pyclass_enum] #[pyclass(eq, eq_int, rename_all = "UPPERCASE")] #[derive(Debug, Clone, PartialEq)] pub enum Recombination { @@ -12,10 +14,12 @@ pub enum Recombination { Smooth = 1, } +#[gen_stub_pyclass] #[pyclass] #[derive(Clone)] pub(crate) struct RegressionSpec(pub(crate) u8); +#[gen_stub_pymethods] #[pymethods] impl RegressionSpec { #[classattr] @@ -28,10 +32,12 @@ impl RegressionSpec { pub(crate) const QUADRATIC: u8 = egobox_moe::RegressionSpec::QUADRATIC.bits(); } +#[gen_stub_pyclass] #[pyclass] #[derive(Clone)] pub(crate) struct CorrelationSpec(pub(crate) u8); +#[gen_stub_pymethods] #[pymethods] impl CorrelationSpec { #[classattr] @@ -48,6 +54,7 @@ impl CorrelationSpec { pub(crate) const MATERN52: u8 = egobox_moe::CorrelationSpec::MATERN52.bits(); } +#[gen_stub_pyclass_enum] #[pyclass(eq, eq_int, rename_all = "UPPERCASE")] #[derive(Debug, Clone, Copy, PartialEq)] pub(crate) enum InfillStrategy { @@ -56,6 +63,7 @@ pub(crate) enum InfillStrategy { Wb2s = 3, } +#[gen_stub_pyclass_enum] #[pyclass(eq, eq_int, rename_all = "UPPERCASE")] #[derive(Debug, Clone, Copy, PartialEq)] pub(crate) enum ConstraintStrategy { @@ -63,6 +71,7 @@ pub(crate) enum ConstraintStrategy { Utb = 2, } +#[gen_stub_pyclass_enum] #[pyclass(eq, eq_int, rename_all = "UPPERCASE")] #[derive(Debug, Clone, Copy, PartialEq)] pub(crate) enum QInfillStrategy { @@ -72,6 +81,7 @@ pub(crate) enum QInfillStrategy { Clmin = 4, } +#[gen_stub_pyclass_enum] #[pyclass(eq, eq_int, rename_all = "UPPERCASE")] #[derive(Debug, Clone, Copy, PartialEq)] pub(crate) enum InfillOptimizer { @@ -79,6 +89,7 @@ pub(crate) enum InfillOptimizer { Slsqp = 2, } +#[gen_stub_pyclass] #[pyclass] #[derive(Clone, Copy)] pub(crate) struct ExpectedOptimum { @@ -88,6 +99,7 @@ pub(crate) struct ExpectedOptimum { pub(crate) tol: f64, } +#[gen_stub_pymethods] #[pymethods] impl ExpectedOptimum { #[new] @@ -100,6 +112,7 @@ impl ExpectedOptimum { } } +#[gen_stub_pyclass_enum] #[pyclass(eq, eq_int, rename_all = "UPPERCASE")] #[derive(Clone, Copy, Debug, PartialEq)] pub(crate) enum XType { @@ -109,6 +122,7 @@ pub(crate) enum XType { Enum = 4, } +#[gen_stub_pyclass] #[pyclass] #[derive(FromPyObject, Debug)] pub(crate) struct XSpec { @@ -120,6 +134,7 @@ pub(crate) struct XSpec { pub(crate) tags: Vec, } +#[gen_stub_pymethods] #[pymethods] impl XSpec { #[new] @@ -133,6 +148,7 @@ impl XSpec { } } +#[gen_stub_pyclass_enum] #[pyclass(eq, eq_int, rename_all = "UPPERCASE")] #[derive(Debug, Clone, Copy, PartialEq)] pub(crate) enum SparseMethod {