diff --git a/.editorconfig b/.editorconfig index a071a989..94b66ac2 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,26 +1,40 @@ - -root = true - -[*] -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true - -[!src/llvm-project] -indent_style = space -indent_size = 4 - -[*.rs] -max_line_length = 100 - -[*.md] -# double whitespace at end of line -# denotes a line break in Markdown -trim_trailing_whitespace = false - -[*.yml] -indent_size = 2 - -[Makefile] -indent_style = tab +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +[*] +charset = utf-8 +end_of_line = crlf +indent_style = space +indent_size = 4 +insert_final_newline = false +quote_type = double +trim_trailing_whitespace = false + +[*.{ts,tsx,js,jsx,cjs,mjs}] +indent_size = 2 +quote_type = single + +[*.{json,yaml,yml,toml}] +indent_size = 2 + +[*{toml}] +end_of_line = lf +indent_size = 2 + +[*.md] +insert_final_newline = true +trim_trailing_whitespace = true + +[*.rs] +indent_size = 4 + +[*.{html,htm,xml}] +indent_size = 2 + +[*.{css,scss,sass,less}] +indent_size = 2 +trim_trailing_whitespace = true +insert_final_newline = true + diff --git a/.github/dependabot.yml b/.github/dependabot.yml index b122d1b5..a0033ceb 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,17 +5,7 @@ updates: schedule: interval: monthly directories: - - / - - /concision - - /core - - /data - - /derive - - /init - - /macros - - /neural - - /tensor - - /utils - - /ext + - /* - /models/* - package-ecosystem: devcontainers directory: / diff --git a/.github/workflows/cargo-bench.yml b/.github/workflows/cargo-bench.yml deleted file mode 100644 index 915db1e3..00000000 --- a/.github/workflows/cargo-bench.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Benchmark - -concurrency: - cancel-in-progress: false - group: ${{ github.workflow }}-${{ github.ref }} - -env: - CARGO_TERM_COLOR: always - RUST_BACKTRACE: full - -on: - repository_dispatch: - types: [ cargo-bench, benchmark ] - workflow_dispatch: - -permissions: - contents: write - checks: write - -jobs: - benchmark: - runs-on: ubuntu-latest - outputs: - digest: ${{ steps.artifacts.outputs.artifact-digest }} - id: ${{ steps.artifacts.outputs.artifact-id }} - url: ${{ steps.artifacts.outputs.artifact-url }} - strategy: - fail-fast: false - matrix: - target: [ x86_64-unknown-linux-gnu ] - steps: - - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup Rust - uses: actions-rust-lang/setup-rust-toolchain@v1 - with: - cache-key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - target: ${{ matrix.target }} - - - name: Benchmark the workspace - run: cargo bench --locked --verbose --workspace --target ${{ matrix.target }} --features full -- - - - name: Upload the benchmarks - id: artifacts - uses: actions/upload-artifact@v4 - with: - name: Benchmark Report (${{ github.event.repository.name }}) - if-no-files-found: error - overwrite: true - path: target/criterion/ diff --git a/.github/workflows/cargo-clippy.yml b/.github/workflows/cargo-clippy.yml index ebcad617..244030d5 100644 --- a/.github/workflows/cargo-clippy.yml +++ b/.github/workflows/cargo-clippy.yml @@ -6,24 +6,15 @@ concurrency: on: pull_request: - branches: - - main - - $default-branch - types: - - opened - - reopened - - synchronize + branches: [main, master] + types: [edited, opened, ready_for_review, reopened] push: - branches: - - main - - $default-branch - tags: - - v*.*.* - - "*-nightly" + branches: [main, master] + tags: [v*.*.*, "*-nightly"] release: - types: [ created, edited ] + types: [created, edited] repository_dispatch: - types: [ clippy, cargo-clippy ] + types: [clippy, cargo-clippy] workflow_dispatch: jobs: @@ -35,30 +26,24 @@ jobs: security-events: write statuses: write steps: - - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup Rust + - name: Checkout + uses: actions/checkout@v5 + - name: Setup Rust uses: actions-rust-lang/setup-rust-toolchain@v1 with: cache-key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} components: clippy, rustfmt toolchain: nightly override: true - - - name: Setup the for sarif output + - name: Setup the for sarif output run: cargo install clippy-sarif sarif-fmt - - - name: Run Clippy - run: - cargo clippy + - name: Run Clippy + run: cargo clippy --all-features --workspace --message-format=json | clippy-sarif | tee rust-clippy-results.sarif | sarif-fmt - - - name: Upload analysis - uses: github/codeql-action/upload-sarif@v3 + - name: Upload analysis + uses: github/codeql-action/upload-sarif@v4 continue-on-error: true with: sarif_file: rust-clippy-results.sarif diff --git a/.github/workflows/cargo-publish.yml b/.github/workflows/cargo-publish.yml index 7592aea4..f956b7fb 100644 --- a/.github/workflows/cargo-publish.yml +++ b/.github/workflows/cargo-publish.yml @@ -6,25 +6,26 @@ concurrency: on: repository_dispatch: - types: [ deploy, publish, cargo-publish, crates-io ] + types: [deploy, publish, cargo-publish, crates-io] workflow_dispatch: inputs: publish: default: true - description: 'Publish the crate(s) to crates.io?' + description: "Publish the crate(s) to crates.io?" type: boolean +env: + CARGO_TERM_COLOR: always + RUST_BACKTRACE: full + jobs: crates-io: + environment: crates-io runs-on: ubuntu-latest env: CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} - CARGO_TERM_COLOR: always - RUST_BACKTRACE: full - environment: - name: crates-io outputs: - url: ${{ steps.results.outputs.url }} + url: ${{ steps.results.outputs.url }} permissions: contents: read deployments: write @@ -34,7 +35,9 @@ jobs: max-parallel: 1 matrix: package: + - concision-traits - concision-init + - concision-params - concision-utils - concision-core - concision-data @@ -46,23 +49,23 @@ jobs: # non-sdk packages - concision-kan - concision-s4 + - concision-snn - concision-transformer - concision-models steps: - - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup Rust + - name: Checkout + uses: actions/checkout@v5 + with: + fetch-depth: 0 + ref: ${{ github.ref }} + token: ${{ secrets.GITHUB_TOKEN }} + - name: Setup Rust uses: actions-rust-lang/setup-rust-toolchain@v1 with: cache-key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - - - name: Publish (${{ matrix.package }}) + - name: Publish (${{ matrix.package }}) id: publish run: cargo publish --locked --package ${{ matrix.package }} - - - name: Set output(s) + - name: Set output(s) id: results - run: - echo "url=https://crates.io/crates/${{ matrix.package }}" >> "$GITHUB_OUTPUT" + run: echo "url=https://crates.io/crates/${{ matrix.package }}" >> "$GITHUB_OUTPUT" diff --git a/.github/workflows/cleanup.yml b/.github/workflows/cleanup.yml index 338cc169..f442853c 100644 --- a/.github/workflows/cleanup.yml +++ b/.github/workflows/cleanup.yml @@ -2,17 +2,16 @@ name: Cleanup on: pull_request: - types: - - closed + types: [closed] jobs: - cache_cleanup: + pr_cache_cleanup: + name: Cleanup PR Cache(s) runs-on: ubuntu-latest permissions: actions: write steps: - - - name: Cleanup + - name: Cleanup run: | echo "Fetching list of cache keys" cacheKeysForPR=$(gh cache list --ref $BRANCH --limit 100 --json id --jq '.[].id') diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml index 9a7667d8..419b8c3a 100644 --- a/.github/workflows/nix.yml +++ b/.github/workflows/nix.yml @@ -6,24 +6,15 @@ concurrency: on: pull_request: - branches: - - main - - $default-branch - types: - - opened - - reopened - - synchronize + branches: [main, master] + types: [edited, opened, ready_for_review, reopened] push: - branches: - - main - - $default-branch - tags: - - v*.*.* - - "*-nightly" + branches: [main, master] + tags: [v*.*.*, "*-nightly"] release: - types: [ created, edited ] + types: [created, edited] repository_dispatch: - types: [ nix, nix-build ] + types: [nix, nix-build] workflow_dispatch: permissions: @@ -34,17 +25,13 @@ jobs: continue-on-error: true runs-on: ubuntu-latest steps: - - - uses: actions/checkout@v4 - - - uses: cachix/install-nix-action@v31 - with: - github_access_token: ${{ secrets.GITHUB_TOKEN }} - - - name: Build - id: build - run: nix build - - - name: Check the flake - id: check - run: nix flake check + - uses: actions/checkout@v5 + - uses: cachix/install-nix-action@v31 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + - name: Build + id: build + run: nix build + - name: Check the flake + id: check + run: nix flake check diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a6426452..7cb2b6f2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,20 +2,19 @@ name: Release on: release: - types: - - published + types: [published] repository_dispatch: - types: [ release ] + types: [release] workflow_dispatch: inputs: draft: default: false - description: 'Create a draft release' + description: "Create a draft release" required: true type: boolean prerelease: default: false - description: 'Create a prerelease' + description: "Create a prerelease" required: true type: boolean @@ -23,6 +22,10 @@ permissions: contents: write discussions: write +env: + IS_DRAFT: ${{ github.event.inputs.draft || github.event.release.draft || false }} + IS_PRERELEASE: ${{ github.event.inputs.prerelease || github.event.release.prerelease || false }} + jobs: publish: environment: @@ -30,33 +33,19 @@ jobs: url: https://crates.io/crates/concision runs-on: ubuntu-latest steps: - - - name: Checkout - uses: actions/checkout@v4 + - name: Checkout + uses: actions/checkout@v5 with: fetch-depth: 0 ref: ${{ github.ref }} repository: ${{ github.repository }} - - - name: Publish to crates.io + - name: Publish to crates.io uses: peter-evans/repository-dispatch@v3 with: event-type: cargo-publish client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}' token: ${{ github.token }} - release: - continue-on-error: true - needs: publish - env: - IS_PRERELEASE: ${{ github.event.inputs.prerelease || false }} - IS_DRAFT: ${{ github.event.inputs.draft || false }} - runs-on: ubuntu-latest - steps: - - - name: Checkout - uses: actions/checkout@v4 - - - name: Create release + - name: Create release uses: softprops/action-gh-release@v2 with: append_body: false diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 2b51fba2..0537e405 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -10,27 +10,18 @@ env: on: pull_request: - branches: - - main - - $default-branch - types: - - opened - - reopened - - synchronize + branches: [main, master] + types: [edited, opened, ready_for_review, reopened, synchronize] push: - branches: - - main - - $default-branch - tags: - - v*.*.* - - "*-nightly" + branches: [main, master] + tags: [v*.*.*, "*-nightly"] repository_dispatch: - types: [ rust ] + types: [rust] workflow_dispatch: inputs: benchmark: default: false - description: 'Run benchmarks' + description: "Run benchmarks" required: true type: boolean @@ -40,69 +31,68 @@ jobs: strategy: fail-fast: false matrix: - target: [ x86_64-unknown-linux-gnu ] + target: [x86_64-unknown-linux-gnu] steps: - - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup Rust + - name: Checkout + uses: actions/checkout@v5 + - name: Setup Rust uses: actions-rust-lang/setup-rust-toolchain@v1 with: cache-key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} target: ${{ matrix.target }} - - - name: Build the workspace + - name: Build the workspace run: cargo build --release --locked --workspace --features full --target ${{ matrix.target }} benchmark: if: ${{ inputs.benchmark || github.event_name == 'push' }} runs-on: ubuntu-latest outputs: - digest: ${{ steps.cargo-bench.outputs.digest }} - id: ${{ steps.cargo-bench.outputs.id }} - url: ${{ steps.cargo-bench.outputs.url }} + digest: ${{ steps.artifacts.outputs.artifact-digest }} + id: ${{ steps.artifacts.outputs.artifact-id }} + url: ${{ steps.artifacts.outputs.artifact-url }} permissions: actions: read contents: write + strategy: + fail-fast: false + matrix: + target: [x86_64-unknown-linux-gnu] steps: - - - name: Checkout - uses: actions/checkout@v4 + - name: Checkout + uses: actions/checkout@v5 + - name: Setup Rust + uses: actions-rust-lang/setup-rust-toolchain@v1 with: - fetch-depth: 0 - ref: ${{ github.ref }} - repository: ${{ github.repository }} - - - name: Benchmark the workspace - id: cargo-bench - uses: peter-evans/repository-dispatch@v3 + cache-key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + target: ${{ matrix.target }} + - name: Benchmark the workspace + run: cargo bench --locked --verbose --workspace --target ${{ matrix.target }} --features full + - name: Upload the benchmarks + id: artifacts + uses: actions/upload-artifact@v5 with: - event-type: cargo-bench - client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}' - token: ${{ github.token }} + name: Benchmark Report (${{ github.event.repository.name }}) + if-no-files-found: error + overwrite: true + path: target/criterion/ test: needs: build runs-on: ubuntu-latest strategy: fail-fast: false matrix: - features: [ full, default ] - target: [ x86_64-unknown-linux-gnu ] + features: [full, default] + target: [x86_64-unknown-linux-gnu] steps: - - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup Rust + - name: Checkout + uses: actions/checkout@v5 + - name: Setup Rust uses: actions-rust-lang/setup-rust-toolchain@v1 with: cache-key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} target: ${{ matrix.target }} - - - name: Test (${{ matrix.features }}) + - name: Test (${{ matrix.features }}) if: matrix.features != 'default' && matrix.features != 'all' run: cargo test -r --locked --workspace --target ${{ matrix.target }} --features ${{ matrix.features }} - - - name: Test (default) + - name: Test (default) if: matrix.features == 'default' run: cargo test -r --locked --workspace --target ${{ matrix.target }} diff --git a/Cargo.lock b/Cargo.lock index 7bc16da6..35a970d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,36 +2,15 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" - [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -49,15 +28,15 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstyle" -version = "1.0.11" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "approx" @@ -69,16 +48,10 @@ dependencies = [ ] [[package]] -name = "arrayref" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" - -[[package]] -name = "arrayvec" -version = "0.7.6" +name = "atomic-waker" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" @@ -86,65 +59,17 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" -[[package]] -name = "backtrace" -version = "0.3.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets", -] - [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" -[[package]] -name = "bincode" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" -dependencies = [ - "bincode_derive", - "serde", - "unty", -] - -[[package]] -name = "bincode_derive" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf95709a440f45e986983918d0e8a1f30a9b1df04918fc828670606804ac3c09" -dependencies = [ - "virtue", -] - [[package]] name = "bitflags" -version = "2.9.1" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" - -[[package]] -name = "blake3" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if", - "constant_time_eq", -] +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "bumpalo" @@ -154,9 +79,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "cast" @@ -175,31 +100,29 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.27" +version = "1.2.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d487aa071b5f64da6f19a3e848e3578944b726ee5a4854b82172f02aa876bfdc" +checksum = "cd405d82c84ff7f35739f175f67d8b9fb7687a0e84ccdc78bd3568839827cf07" dependencies = [ + "find-msvc-tools", "shlex", ] [[package]] name = "cfg-if" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "chrono" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", - "js-sys", "num-traits", "serde", - "wasm-bindgen", "windows-link", ] @@ -232,18 +155,18 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.40" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.5.40" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" dependencies = [ "anstyle", "clap_lex", @@ -251,13 +174,13 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "concision" -version = "0.2.7" +version = "0.2.8" dependencies = [ "anyhow", "approx", @@ -277,26 +200,24 @@ dependencies = [ [[package]] name = "concision-core" -version = "0.2.7" +version = "0.2.8" dependencies = [ - "anyhow", "approx", "concision-init", + "concision-params", + "concision-traits", "concision-utils", "getrandom", "lazy_static", "ndarray", - "ndtensor", "num", "num-complex", "num-traits", "paste", - "rand 0.9.1", + "rand 0.9.2", "rand_distr", "rayon", "rustfft", - "scsys", - "scsys-derive", "serde", "serde_derive", "serde_json", @@ -304,13 +225,13 @@ dependencies = [ "strum", "thiserror", "tracing", + "variants", ] [[package]] name = "concision-data" -version = "0.2.7" +version = "0.2.8" dependencies = [ - "anyhow", "approx", "concision-core", "ndarray", @@ -319,16 +240,16 @@ dependencies = [ "num-traits", "rayon", "reqwest", - "scsys", "serde", "serde_json", "thiserror", "tracing", + "variants", ] [[package]] name = "concision-derive" -version = "0.2.7" +version = "0.2.8" dependencies = [ "proc-macro2", "quote", @@ -337,7 +258,7 @@ dependencies = [ [[package]] name = "concision-ext" -version = "0.2.7" +version = "0.2.8" dependencies = [ "anyhow", "approx", @@ -349,19 +270,18 @@ dependencies = [ "num-traits", "rayon", "rustfft", - "scsys", "serde", "serde_derive", "serde_json", "tracing", "tracing-subscriber", + "variants", ] [[package]] name = "concision-init" -version = "0.2.7" +version = "0.2.8" dependencies = [ - "anyhow", "approx", "getrandom", "lazy_static", @@ -370,7 +290,7 @@ dependencies = [ "num-complex", "num-traits", "paste", - "rand 0.9.1", + "rand 0.9.2", "rand_distr", "serde", "serde_derive", @@ -383,7 +303,7 @@ dependencies = [ [[package]] name = "concision-kan" -version = "0.2.7" +version = "0.2.8" dependencies = [ "anyhow", "approx", @@ -395,7 +315,7 @@ dependencies = [ [[package]] name = "concision-macros" -version = "0.2.7" +version = "0.2.8" dependencies = [ "proc-macro2", "quote", @@ -404,7 +324,7 @@ dependencies = [ [[package]] name = "concision-models" -version = "0.2.7" +version = "0.2.8" dependencies = [ "anyhow", "approx", @@ -416,19 +336,20 @@ dependencies = [ "lazy_static", "ndarray", "num-traits", - "scsys", "tracing", "tracing-subscriber", + "variants", ] [[package]] name = "concision-neural" -version = "0.2.7" +version = "0.2.8" dependencies = [ "anyhow", "approx", "concision-core", "concision-data", + "concision-params", "either", "getrandom", "lazy_static", @@ -437,12 +358,10 @@ dependencies = [ "num-complex", "num-traits", "paste", - "rand 0.9.1", + "rand 0.9.2", "rand_distr", "rayon", "rustfft", - "scsys", - "scsys-derive", "serde", "serde_derive", "serde_json", @@ -450,23 +369,78 @@ dependencies = [ "strum", "thiserror", "tracing", + "variants", +] + +[[package]] +name = "concision-params" +version = "0.2.8" +dependencies = [ + "approx", + "concision-init", + "concision-traits", + "getrandom", + "lazy_static", + "ndarray", + "num-complex", + "num-traits", + "rand 0.9.2", + "rand_distr", + "rayon", + "serde", + "serde_derive", + "serde_json", + "thiserror", + "variants", ] [[package]] name = "concision-s4" -version = "0.2.7" +version = "0.2.8" +dependencies = [ + "anyhow", + "approx", + "concision", + "ndarray", + "num-traits", + "tracing", +] + +[[package]] +name = "concision-snn" +version = "0.2.8" dependencies = [ "anyhow", "approx", "concision", "ndarray", "num-traits", + "serde", + "serde_derive", + "serde_json", "tracing", ] +[[package]] +name = "concision-traits" +version = "0.2.8" +dependencies = [ + "approx", + "getrandom", + "lazy_static", + "ndarray", + "num-complex", + "num-traits", + "paste", + "rand 0.9.2", + "rand_distr", + "thiserror", + "variants", +] + [[package]] name = "concision-transformer" -version = "0.2.7" +version = "0.2.8" dependencies = [ "anyhow", "approx", @@ -478,7 +452,7 @@ dependencies = [ [[package]] name = "concision-utils" -version = "0.2.7" +version = "0.2.8" dependencies = [ "anyhow", "approx", @@ -489,12 +463,10 @@ dependencies = [ "num-complex", "num-traits", "paste", - "rand 0.9.1", + "rand 0.9.2", "rand_distr", "rayon", "rustfft", - "scsys", - "scsys-derive", "serde", "serde_derive", "serde_json", @@ -502,26 +474,9 @@ dependencies = [ "strum", "thiserror", "tracing", + "variants", ] -[[package]] -name = "config" -version = "0.15.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595aae20e65c3be792d05818e8c63025294ac3cb7e200f11459063a352a6ef80" -dependencies = [ - "pathdiff", - "serde", - "serde_json", - "winnow", -] - -[[package]] -name = "constant_time_eq" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" - [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -530,16 +485,16 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "criterion" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679" +checksum = "e1c047a62b0cc3e145fa84415a3191f628e980b194c2755aa12300a4e6cbd928" dependencies = [ "anes", "cast", "ciborium", "clap", "criterion-plot", - "itertools 0.13.0", + "itertools", "num-traits", "oorandom", "plotters", @@ -553,12 +508,12 @@ dependencies = [ [[package]] name = "criterion-plot" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +checksum = "9b1bcc0dc7dfae599d84ad0b1a55f80cde8af3725da8313b528da95ef783e338" dependencies = [ "cast", - "itertools 0.10.5", + "itertools", ] [[package]] @@ -592,16 +547,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array 0.14.7", - "typenum", -] - [[package]] name = "darling" version = "0.20.11" @@ -639,21 +584,12 @@ dependencies = [ [[package]] name = "deranged" -version = "0.4.0" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" dependencies = [ "powerfmt", - "serde", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "crypto-common", + "serde_core", ] [[package]] @@ -679,6 +615,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" +[[package]] +name = "find-msvc-tools" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" + [[package]] name = "fnv" version = "1.0.7" @@ -687,9 +629,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -727,54 +669,29 @@ dependencies = [ "pin-utils", ] -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "generic-array" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c8444bc9d71b935156cc0ccab7f622180808af7867b1daae6547d773591703" -dependencies = [ - "serde", - "typenum", -] - [[package]] name = "getrandom" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "js-sys", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasip2", "wasm-bindgen", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "half" -version = "2.6.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" dependencies = [ "cfg-if", "crunchy", + "zerocopy", ] [[package]] @@ -785,9 +702,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.15.4" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" [[package]] name = "heck" @@ -803,12 +720,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "http" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", - "fnv", "itoa", ] @@ -843,18 +759,20 @@ checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "hyper" -version = "1.6.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "http", "http-body", "httparse", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -862,9 +780,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.14" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb" +checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56" dependencies = [ "base64", "bytes", @@ -886,9 +804,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.63" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -910,9 +828,9 @@ dependencies = [ [[package]] name = "icu_collections" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", "potential_utf", @@ -923,9 +841,9 @@ dependencies = [ [[package]] name = "icu_locale_core" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", @@ -936,11 +854,10 @@ dependencies = [ [[package]] name = "icu_normalizer" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", @@ -951,42 +868,38 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" dependencies = [ - "displaydoc", "icu_collections", "icu_locale_core", "icu_properties_data", "icu_provider", - "potential_utf", "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" +checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" [[package]] name = "icu_provider" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", "icu_locale_core", - "stable_deref_trait", - "tinystr", "writeable", "yoke", "zerofrom", @@ -1002,9 +915,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -1034,24 +947,14 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.10.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", - "hashbrown 0.15.4", + "hashbrown 0.16.1", "serde", -] - -[[package]] -name = "io-uring" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013" -dependencies = [ - "bitflags", - "cfg-if", - "libc", + "serde_core", ] [[package]] @@ -1062,23 +965,14 @@ checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "iri-string" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" dependencies = [ "memchr", "serde", ] -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.13.0" @@ -1096,9 +990,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" dependencies = [ "once_cell", "wasm-bindgen", @@ -1112,9 +1006,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.174" +version = "0.2.177" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" [[package]] name = "libm" @@ -1124,23 +1018,23 @@ checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "litemap" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "log" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" [[package]] name = "matchers" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" dependencies = [ - "regex-automata 0.1.10", + "regex-automata", ] [[package]] @@ -1155,35 +1049,26 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.5" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" - -[[package]] -name = "miniz_oxide" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" -dependencies = [ - "adler2", -] +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "mio" -version = "1.0.4" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" dependencies = [ "libc", - "wasi 0.11.1+wasi-snapshot-preview1", - "windows-sys 0.59.0", + "wasi", + "windows-sys 0.61.2", ] [[package]] name = "ndarray" -version = "0.16.1" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "882ed72dce9365842bf196bdeedf5055305f11fc8c03dee7bb0194a6cad34841" +checksum = "0c7c9125e8f6f10c9da3aad044cc918cf8784fa34de857b1aa68038eb05a50a9" dependencies = [ "approx", "cblas-sys", @@ -1199,42 +1084,13 @@ dependencies = [ "serde", ] -[[package]] -name = "ndtensor" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea8ab4dd20883ddf58cdbade1810f8840d0d420f33e5143b407ec7c3102ae2ec" -dependencies = [ - "anyhow", - "approx", - "getrandom", - "ndarray", - "num", - "num-complex", - "num-traits", - "paste", - "rand 0.9.1", - "rand_distr", - "rayon", - "scsys", - "scsys-derive", - "serde", - "serde_derive", - "serde_json", - "smart-default", - "strum", - "thiserror", - "tracing", -] - [[package]] name = "nu-ansi-term" -version = "0.46.0" +version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "overload", - "winapi", + "windows-sys 0.61.2", ] [[package]] @@ -1322,15 +1178,6 @@ dependencies = [ "libm", ] -[[package]] -name = "object" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" -dependencies = [ - "memchr", -] - [[package]] name = "once_cell" version = "1.21.3" @@ -1343,29 +1190,17 @@ version = "11.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - [[package]] name = "paste" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "pathdiff" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" - [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pin-project-lite" @@ -1424,9 +1259,9 @@ dependencies = [ [[package]] name = "potential_utf" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" dependencies = [ "zerovec", ] @@ -1457,18 +1292,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.40" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] @@ -1490,9 +1325,9 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha", "rand_core 0.9.3", @@ -1532,7 +1367,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8615d50dcf34fa31f7ab52692afec947c4dd0ab803cc87cb3b0b4570ff7463" dependencies = [ "num-traits", - "rand 0.9.1", + "rand 0.9.2", "serde", "serde_with", ] @@ -1545,9 +1380,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" [[package]] name = "rayon" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -1556,9 +1391,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -1567,53 +1402,38 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.1" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", + "regex-automata", + "regex-syntax", ] [[package]] name = "regex-automata" -version = "0.1.10" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", -] - -[[package]] -name = "regex-automata" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.5", + "regex-syntax", ] [[package]] name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - -[[package]] -name = "regex-syntax" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" -version = "0.12.22" +version = "0.12.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" +checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64", "bytes", @@ -1641,17 +1461,11 @@ dependencies = [ "web-sys", ] -[[package]] -name = "rustc-demangle" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" - [[package]] name = "rustfft" -version = "6.4.0" +version = "6.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6f140db74548f7c9d7cce60912c9ac414e74df5e718dc947d514b051b42f3f4" +checksum = "21db5f9893e91f41798c88680037dba611ca6674703c1a18601b01a72c8adb89" dependencies = [ "num-complex", "num-integer", @@ -1663,9 +1477,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" @@ -1683,142 +1497,29 @@ dependencies = [ ] [[package]] -name = "scsys" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00fffd67a93106e087d1863df313f6f9d2d65b415bf6e684c8fb33c5b0c5cf36" -dependencies = [ - "scsys-config", - "scsys-core", - "scsys-crypto", - "scsys-derive", - "scsys-traits", - "scsys-util", -] - -[[package]] -name = "scsys-config" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2cac05dfdbdefee1b2c9da644ec51c862fa37bef6a6611f2c89fb2f15973e" -dependencies = [ - "anyhow", - "config", - "scsys-core", - "serde", - "serde_derive", - "serde_json", - "smart-default", - "strum", - "thiserror", - "tracing", - "tracing-subscriber", - "url", -] - -[[package]] -name = "scsys-core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dccf5713c694fb005f4ba8bd8987cdf05da2494d1e6fe8ad8e6a48ff2564f2b9" -dependencies = [ - "anyhow", - "chrono", - "getrandom", - "num-traits", - "paste", - "rand 0.9.1", - "rand_distr", - "serde", - "serde_derive", - "serde_json", - "smart-default", - "strum", - "thiserror", - "time", - "tracing", - "uuid", - "wasm-bindgen", -] - -[[package]] -name = "scsys-crypto" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea33a001aaeca8617c3622d79256c1a33065af3ac55f37db9df46229ca64a1d" -dependencies = [ - "anyhow", - "bincode", - "blake3", - "chrono", - "digest", - "generic-array 1.2.0", - "getrandom", - "paste", - "rand 0.9.1", - "rand_distr", - "scsys-core", - "serde", - "serde_derive", - "serde_json", - "smart-default", - "strum", - "thiserror", - "time", - "tracing", - "typenum", - "uuid", -] - -[[package]] -name = "scsys-derive" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8fb965e4c5f680530848ae90393d30a51c678fb9aba6261ccc1a2fcd487e7a0" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "scsys-traits" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64cfc23268b9d518c55fab9899512bbceb513f0fd692f886d6fa3b1e88d05e98" -dependencies = [ - "num-traits", -] - -[[package]] -name = "scsys-util" -version = "0.3.1" +name = "serde" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57dccdae8b4079446f05f2c2978ba316a76ab4958e20d4ee3cd2e5166258f7a2" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ - "num-traits", - "rand 0.9.1", - "serde", + "serde_core", "serde_derive", - "serde_json", - "strum", - "tracing", ] [[package]] -name = "serde" -version = "1.0.219" +name = "serde_core" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", @@ -1827,14 +1528,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] @@ -1859,7 +1561,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.10.0", + "indexmap 2.12.1", "serde", "serde_derive", "serde_json", @@ -1894,12 +1596,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" -[[package]] -name = "slab" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" - [[package]] name = "smallvec" version = "1.15.1" @@ -1919,19 +1615,19 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.10" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.60.2", ] [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "strength_reduce" @@ -1947,31 +1643,30 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" -version = "0.27.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.27.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" dependencies = [ "heck", "proc-macro2", "quote", - "rustversion", "syn", ] [[package]] name = "syn" -version = "2.0.104" +version = "2.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" dependencies = [ "proc-macro2", "quote", @@ -2000,18 +1695,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", @@ -2029,13 +1724,12 @@ dependencies = [ [[package]] name = "time" -version = "0.3.41" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", - "js-sys", "num-conv", "powerfmt", "serde", @@ -2045,15 +1739,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.22" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -2061,9 +1755,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", @@ -2081,18 +1775,15 @@ dependencies = [ [[package]] name = "tokio" -version = "1.46.0" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1140bb80481756a8cbe10541f37433b459c5aa1e727b4c020fbfebdc25bf3ec4" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", - "io-uring", "libc", "mio", "pin-project-lite", - "slab", "socket2", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -2112,9 +1803,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +checksum = "9cf146f99d442e8e68e585f5d798ccd3cad9a7835b917e09728880a862706456" dependencies = [ "bitflags", "bytes", @@ -2174,15 +1865,14 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" dependencies = [ "matchers", "nu-ansi-term", "once_cell", - "regex", - "serde", + "regex-automata", "sharded-slab", "thread_local", "tracing", @@ -2205,29 +1895,17 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" -[[package]] -name = "typenum" -version = "1.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" - [[package]] name = "unicode-ident" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" - -[[package]] -name = "unty" -version = "0.0.4" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", @@ -2242,28 +1920,38 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] -name = "uuid" -version = "1.17.0" +name = "variants" +version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" +checksum = "c3d2f539cc5b8d5442f7558e2981adabd7462d6f318df8a5a12860a7242587e3" dependencies = [ - "getrandom", - "js-sys", - "serde", - "wasm-bindgen", + "anyhow", + "thiserror", + "variants-derive", + "variants-macros", ] [[package]] -name = "version_check" -version = "0.9.5" +name = "variants-derive" +version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +checksum = "def932f16e8fac8b72d08953f57f081006c7006fac129d2aff1e4bebbbcfe9ab" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] [[package]] -name = "virtue" -version = "0.0.18" +name = "variants-macros" +version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" +checksum = "c2bd5a945ea1267baa1c77d70e626e8c0268818d0edc1f1671f11def7e6708b4" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] [[package]] name = "walkdir" @@ -2291,45 +1979,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.14.2+wasi-0.2.4" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "wit-bindgen-rt", + "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" dependencies = [ "cfg-if", "js-sys", @@ -2340,9 +2015,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2350,22 +2025,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" dependencies = [ + "bumpalo", "proc-macro2", "quote", "syn", - "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" dependencies = [ "unicode-ident", ] @@ -2383,50 +2058,28 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" dependencies = [ "js-sys", "wasm-bindgen", ] -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - [[package]] name = "windows-core" -version = "0.61.2" +version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", @@ -2437,9 +2090,9 @@ dependencies = [ [[package]] name = "windows-implement" -version = "0.60.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", @@ -2448,9 +2101,9 @@ dependencies = [ [[package]] name = "windows-interface" -version = "0.59.1" +version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", @@ -2459,52 +2112,53 @@ dependencies = [ [[package]] name = "windows-link" -version = "0.1.3" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-result" -version = "0.3.4" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.4.2" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ "windows-link", ] [[package]] name = "windows-sys" -version = "0.52.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-targets", + "windows-link", ] [[package]] name = "windows-targets" -version = "0.52.6" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ + "windows-link", "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", @@ -2517,83 +2171,70 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] -name = "winnow" -version = "0.7.11" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" -dependencies = [ - "memchr", -] - -[[package]] -name = "wit-bindgen-rt" -version = "0.39.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags", -] +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "yoke" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ - "serde", "stable_deref_trait", "yoke-derive", "zerofrom", @@ -2601,9 +2242,9 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", @@ -2613,18 +2254,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.26" +version = "0.8.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +checksum = "4ea879c944afe8a2b25fef16bb4ba234f47c694565e97383b36f3a878219065c" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.26" +version = "0.8.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +checksum = "cf955aa904d6040f70dc8e9384444cb1030aed272ba3cb09bbc4ab9e7c1f34f5" dependencies = [ "proc-macro2", "quote", @@ -2654,9 +2295,9 @@ dependencies = [ [[package]] name = "zerotrie" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" dependencies = [ "displaydoc", "yoke", @@ -2665,9 +2306,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.2" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", @@ -2676,9 +2317,9 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index fce34071..0029123c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,72 +1,59 @@ [workspace] -default-members = [ - "concision" -] +default-members = ["concision"] members = [ - "concision", - "core", - "data", - "derive", - "init", - "macros", - "neural", - "utils", - "ext", - "models/*", + "concision", + "core", + "data", + "derive", + "ext", + "init", + "macros", + "neural", + "params", + "traits", + "utils", + "models/*", ] resolver = "3" [workspace.package] -authors = [ - "FL03 (https://github.com/FL03)", - "Scattered-Systems (https://github.com/scattered-systems)" -] -categories = [ - "algorithms", - "mathematics", - "science" -] +authors = ["FL03 (https://github.com/FL03)", "Scattered-Systems (https://github.com/scattered-systems)"] +categories = ["algorithms", "mathematics", "science"] description = "Concision is a toolkit for designing machine-learning models in Rust." edition = "2024" homepage = "https://github.com/FL03/concision/wiki" -keywords = [ - "data-science", - "machine-learning", - "scsys", - "toolkit" -] +keywords = ["data-science", "machine-learning", "scsys", "toolkit"] license = "Apache-2.0" readme = "README.md" repository = "https://github.com/FL03/concision.git" rust-version = "1.85.0" -version = "0.2.7" +version = "0.2.8" [workspace.dependencies] -concision = { default-features = false, path = "concision", version = "0.2.7" } -concision-core = { default-features = false, path = "core", version = "0.2.7" } -concision-data = { default-features = false, path = "data", version = "0.2.7" } -concision-derive = { default-features = false, path = "derive", version = "0.2.7" } -concision-init = { default-features = false, path = "init", version = "0.2.7" } -concision-macros = { default-features = false, path = "macros", version = "0.2.7" } -concision-neural = { default-features = false, path = "neural", version = "0.2.7" } -concision-utils = { default-features = false, path = "utils", version = "0.2.7" } +concision = { default-features = false, path = "concision", version = "0.2.8" } + +concision-core = { default-features = false, path = "core", version = "0.2.8" } +concision-data = { default-features = false, path = "data", version = "0.2.8" } +concision-derive = { default-features = false, path = "derive", version = "0.2.8" } +concision-init = { default-features = false, path = "init", version = "0.2.8" } +concision-macros = { default-features = false, path = "macros", version = "0.2.8" } +concision-neural = { default-features = false, path = "neural", version = "0.2.8" } +concision-params = { default-features = false, path = "params", version = "0.2.8" } +concision-traits = { default-features = false, path = "traits", version = "0.2.8" } +concision-utils = { default-features = false, path = "utils", version = "0.2.8" } # extras -concision-ext = { default-features = false, path = "ext", version = "0.2.7" } +concision-ext = { default-features = false, path = "ext", version = "0.2.8" } # models -concision-models = { default-features = false, path = "macros", version = "0.2.7" } -concision-kan = { default-features = false, path = "models/kan", version = "0.2.7" } -concision-s4 = { default-features = false, path = "models/s4", version = "0.2.7" } -concision-transformer = { default-features = false, path = "models/transformer", version = "0.2.7" } +concision-kan = { default-features = false, path = "models/kan", version = "0.2.8" } +concision-models = { default-features = false, path = "models/models", version = "0.2.8" } +concision-s4 = { default-features = false, path = "models/s4", version = "0.2.8" } +concision-snn = { default-features = false, path = "models/snn", version = "0.2.8" } +concision-transformer = { default-features = false, path = "models/transformer", version = "0.2.8" } # custom -ndtensor = { default-features = false, version = "0.1.1" } -scsys = { default-features = false, features = ["derive"], version = "0.3.1" } -scsys-derive = { default-features = false, version = "0.3.1" } -# async -futures = { default-features = false, version = "0.3" } -tokio = { default-features = false, version = "1" } +variants = { default-features = false, features = ["derive"], version = "0.0.1" } # benchmarking -criterion = { version = "0.6" } +criterion = { version = "0.7" } # concurrency & parallelism crossbeam = { default-features = false, version = "0.8" } rayon = { default-features = false, version = "1" } @@ -76,8 +63,8 @@ serde_derive = { default-features = false, version = "1" } serde_json = { default-features = false, version = "1" } # math approx = { version = "0.5" } -ndarray = { default-features = false, version = "0.16" } -ndarray-linalg = { default-features = false, version = "0.17" } +ndarray = { default-features = false, version = "0.17" } +ndarray-linalg = { default-features = false, version = "0.18" } ndarray-stats = "0.6" num = { default-features = false, version = "0.4" } num-complex = { default-features = false, version = "0.4" } diff --git a/concision/Cargo.toml b/concision/Cargo.toml index ca106362..6f053c2a 100644 --- a/concision/Cargo.toml +++ b/concision/Cargo.toml @@ -14,27 +14,52 @@ repository.workspace = true rust-version.workspace = true version.workspace = true -[package.metadata.docs.rs] -all-features = false -doc-scrape-examples = true -features = ["full"] -rustc-args = ["--cfg", "docsrs"] -version = "v{{version}}" - -[package.metadata.release] -no-dev-version = true -tag-name = "{{version}}" - [lib] -crate-type = [ - "cdylib", - "rlib" -] +crate-type = ["cdylib", "rlib"] + bench = true doc = true doctest = true test = true +# ************* [Benchmarks] ************* +[[bench]] +harness = false +name = "default" +path = "benches/default.rs" +required-features = ["std"] + +[[bench]] +harness = false +name = "params" +path = "benches/params.rs" +required-features = [ + "neural", + "approx", + "rand", + "std", + "tracing", +] + +# ************* [Examples] ************* +[[example]] +name = "basic" +required-features = [ + "approx", + "rand", + "std", + "tracing", +] + +# ************* [Unit Tests] ************* +[[test]] +name = "default" + +[[test]] +name = "simple" +path = "tests/simple/main.rs" +required-features = ["approx", "default", "neural", "rand"] + [dependencies] concision-core = { workspace = true } concision-data = { optional = true, workspace = true } @@ -43,7 +68,7 @@ concision-macros = { optional = true, workspace = true } concision-neural = { optional = true, workspace = true } [dev-dependencies] -anyhow = { workspace = true } +anyhow = { features = ["std"], workspace = true } approx = { workspace = true } criterion = { features = ["plotters"], workspace = true } lazy_static = { workspace = true } @@ -55,28 +80,27 @@ tracing-subscriber = { workspace = true } [features] default = [ - "data", - "neural", - "std", - "utils", + "data", + "neural", + "std", + "utils", ] full = [ - "default", - "anyhow", - "approx", - "data", - "derive", - "macros", - "rand", - "serde", - "tracing" + "default", + "approx", + "data", + "derive", + "macros", + "rand", + "serde", + "tracing" ] nightly = [ - "concision-core/nightly", - "concision-data?/nightly", - "concision-neural?/nightly", + "concision-core/nightly", + "concision-data?/nightly", + "concision-neural?/nightly", ] # ************* [FF:Features] ************* @@ -88,106 +112,95 @@ data = ["dep:concision-data"] init = ["concision-core/init"] -neural = [ - "dep:concision-neural", - "alloc", -] +neural = ["dep:concision-neural", "alloc"] -utils = [ - "concision-core/utils" -] +utils = ["concision-core/utils"] # ************* [FF:Environments] ************* std = [ - "alloc", - "concision-core/std", - "concision-data?/std", - "concision-neural?/std", + "alloc", + "concision-core/std", + "concision-data?/std", + "concision-neural?/std", ] wasi = [ - "concision-core/wasi", - "concision-data?/wasi", - "concision-neural?/wasi" + "concision-core/wasi", + "concision-data?/wasi", + "concision-neural?/wasi" ] wasm = [ - "concision-core/wasm", - "concision-data?/wasm", - "concision-neural?/wasm" + "concision-core/wasm", + "concision-data?/wasm", + "concision-neural?/wasm" ] # ************* [FF:Dependencies] ************* alloc = [ - "concision-core/alloc", - "concision-data?/alloc", - "concision-neural?/alloc", -] - -anyhow = [ - "concision-core/anyhow", - "concision-data?/anyhow", - "concision-neural?/anyhow", + "concision-core/alloc", + "concision-data?/alloc", + "concision-neural?/alloc", ] approx = [ - "concision-core/approx", - "concision-data?/approx", - "concision-neural?/approx" + "concision-core/approx", + "concision-data?/approx", + "concision-neural?/approx" ] complex = [ - "concision-core/complex", - "concision-data?/complex", - "concision-neural?/complex" + "concision-core/complex", + "concision-data?/complex", + "concision-neural?/complex" ] json = [ - "concision-core/json", - "concision-data?/json", - "concision-neural?/json" + "concision-core/json", + "concision-data?/json", + "concision-neural?/json" ] rand = [ - "concision-core/rand", - "concision-data?/rand", - "concision-neural?/rand" + "concision-core/rand", + "concision-data?/rand", + "concision-neural?/rand" ] rng = [ - "concision-core/rng", - "concision-data?/rng", - "concision-neural?/rng" + "concision-core/rng", + "concision-data?/rng", + "concision-neural?/rng" ] rayon = [ - "concision-core/rayon", - "concision-data?/rayon", - "concision-neural?/rayon" + "concision-core/rayon", + "concision-data?/rayon", + "concision-neural?/rayon" ] rustfft = [ - "concision-core/rustfft", - "concision-neural?/rustfft" + "concision-core/rustfft", + "concision-neural?/rustfft" ] serde = [ - "concision-core/serde", - "concision-data?/serde", - "concision-neural?/serde" + "concision-core/serde", + "concision-data?/serde", + "concision-neural?/serde" ] tracing = [ - "concision-core/tracing", - "concision-data?/tracing", - "concision-neural?/tracing" + "concision-core/tracing", + "concision-data?/tracing", + "concision-neural?/tracing" ] # ********* [FF] Blas ********* blas = [ - "concision-core/blas", - "concision-data?/blas", - "concision-neural?/blas" + "concision-core/blas", + "concision-data?/blas", + "concision-neural?/blas" ] intel-mkl-system = ["blas"] @@ -202,40 +215,14 @@ openblas-system = ["blas"] openblas-static = ["blas"] -# ************* [Benchmarks] ************* -[[bench]] -harness = false -name = "default" -path = "benches/default.rs" -required-features = ["std"] - -[[bench]] -harness = false -name = "params" -path = "benches/params.rs" -required-features = [ - "neural", - "approx", - "rand", - "std", - "tracing", -] - -# ************* [Examples] ************* -[[example]] -name = "basic" -required-features = [ - "approx", - "rand", - "std", - "tracing", -] - -# ************* [Unit Tests] ************* -[[test]] -name = "default" +# ********* [Metadata] ********* +[package.metadata.docs.rs] +all-features = false +doc-scrape-examples = true +features = ["full"] +rustc-args = ["--cfg", "docsrs"] +version = "v{{version}}" -[[test]] -name = "simple" -path = "tests/simple/main.rs" -required-features = ["approx", "default", "neural", "rand"] \ No newline at end of file +[package.metadata.release] +no-dev-version = true +tag-name = "{{version}}" diff --git a/concision/examples/basic.rs b/concision/examples/basic.rs index f2acc4c8..15e54089 100644 --- a/concision/examples/basic.rs +++ b/concision/examples/basic.rs @@ -33,7 +33,7 @@ fn main() -> anyhow::Result<()> { assert_eq!(params.bias().shape(), &[n]); tracing::info!("Randomized parameters: {params:?}"); - let y = params.forward(&inputs)?; + let y = params.forward(&inputs).expect("forward pass failed"); assert_eq!(y.shape(), &[n]); tracing::info!("Forward pass: {y:?}"); diff --git a/concision/tests/simple/main.rs b/concision/tests/simple/main.rs index 74783f12..cb5754f3 100644 --- a/concision/tests/simple/main.rs +++ b/concision/tests/simple/main.rs @@ -47,7 +47,7 @@ fn test_simple_model() -> anyhow::Result<()> { let expected = Array1::from_elem(model.layout().output(), 0.5); // forward the input through the model - let output = model.predict(&input)?; + let output = model.predict(&input).expect("prediction failed"); // verify the output shape assert_eq!(output.dim(), (features.output())); // compare the results to what we expected diff --git a/concision/tests/simple/model.rs b/concision/tests/simple/model.rs index 923c526f..ace1d77d 100644 --- a/concision/tests/simple/model.rs +++ b/concision/tests/simple/model.rs @@ -2,7 +2,7 @@ appellation: model authors: @FL03 */ -use cnc::nn::{DeepModelParams, Model, ModelError, ModelFeatures, StandardModelConfig, Train}; +use cnc::nn::{DeepModelParams, Model, ModelFeatures, NeuralError, StandardModelConfig, Train}; use cnc::{Forward, Norm, Params, ReLU, Sigmoid}; use ndarray::prelude::*; @@ -65,7 +65,7 @@ where { type Output = Array; - fn forward(&self, input: &ArrayBase) -> cnc::Result { + fn forward(&self, input: &ArrayBase) -> Option { let mut output = self .params() .input() @@ -79,7 +79,7 @@ where .params() .output() .forward_then(&output, |y| y.sigmoid())?; - Ok(y) + Some(y) } } @@ -99,12 +99,12 @@ where &mut self, input: &ArrayBase, target: &ArrayBase, - ) -> Result { + ) -> Result { if input.len() != self.layout().input() { - return Err(ModelError::InvalidInputShape); + return Err(NeuralError::InvalidInputShape); } if target.len() != self.layout().output() { - return Err(ModelError::InvalidOutputShape); + return Err(NeuralError::InvalidOutputShape); } // get the learning rate from the model's configuration let lr = self @@ -121,15 +121,28 @@ where let mut activations = Vec::new(); activations.push(input.to_owned()); - let mut output = self.params().input().forward(&input)?.relu(); + let mut output = self + .params() + .input() + .forward(&input) + .expect("Failed to complete the forward pass for the input layer") + .relu(); activations.push(output.to_owned()); // collect the activations of the hidden for layer in self.params().hidden() { - output = layer.forward(&output)?.relu(); + output = layer + .forward(&output) + .expect("failed to complete the forward pass for the hidden layer") + .relu(); activations.push(output.to_owned()); } - output = self.params().output().forward(&output)?.sigmoid(); + output = self + .params() + .output() + .forward(&output) + .expect("Output layer failed to forward propagate") + .sigmoid(); activations.push(output.to_owned()); // Calculate output layer error @@ -143,7 +156,8 @@ where // Update output weights self.params_mut() .output_mut() - .backward(activations.last().unwrap(), &delta, lr)?; + .backward(activations.last().unwrap(), &delta, lr) + .expect("Output failed training..."); let num_hidden = self.layout().layers(); // Iterate through hidden layers in reverse order @@ -159,7 +173,9 @@ where }; // Normalize delta to prevent exploding gradients delta /= delta.l2_norm(); - self.params_mut().hidden_mut()[i].backward(&activations[i + 1], &delta, lr)?; + self.params_mut().hidden_mut()[i] + .backward(&activations[i + 1], &delta, lr) + .expect("Hidden failed training..."); } /* Backpropagate to the input layer @@ -172,7 +188,8 @@ where delta /= delta.l2_norm(); // Normalize the delta to prevent exploding gradients self.params_mut() .input_mut() - .backward(&activations[1], &delta, lr)?; + .backward(&activations[1], &delta, lr) + .expect("failed to backpropagate input layer during training..."); Ok(loss) } @@ -200,15 +217,15 @@ where &mut self, input: &ArrayBase, target: &ArrayBase, - ) -> Result { + ) -> Result { if input.nrows() == 0 || target.nrows() == 0 { - return Err(ModelError::InvalidBatchSize); + return Err(NeuralError::InvalidBatchSize); } if input.ncols() != self.layout().input() { - return Err(ModelError::InvalidInputShape); + return Err(NeuralError::InvalidInputShape); } if target.ncols() != self.layout().output() || target.nrows() != input.nrows() { - return Err(ModelError::InvalidOutputShape); + return Err(NeuralError::InvalidOutputShape); } let batch_size = input.nrows(); let mut loss = A::zero(); diff --git a/core/Cargo.toml b/core/Cargo.toml index d1fd4840..f46649a4 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -25,10 +25,7 @@ no-dev-version = true tag-name = "{{version}}" [lib] -crate-type = [ - "cdylib", - "rlib", -] +crate-type = ["cdylib", "rlib"] bench = false doc = true doctest = true @@ -36,12 +33,12 @@ test = true [dependencies] # local -concision-init = { optional = true, workspace = true } +concision-init = { workspace = true } +concision-params = { workspace = true } +concision-traits = { workspace = true } concision-utils = { optional = true, workspace = true } # custom -ndtensor = { workspace = true } -scsys = { workspace = true } -scsys-derive = { workspace = true } +variants = { workspace = true } # concurrency & parallelism rayon = { optional = true, workspace = true } # data & serialization @@ -49,7 +46,6 @@ serde = { features = ["derive"], optional = true, workspace = true } serde_derive = { optional = true, workspace = true } serde_json = { optional = true, workspace = true } # error-handling -anyhow = { optional = true, workspace = true } thiserror = { workspace = true } # logging tracing = { optional = true, workspace = true } @@ -73,183 +69,162 @@ rand_distr = { optional = true, workspace = true } lazy_static = { workspace = true } [features] -default = [ - "std", - "utils", -] +default = ["std", "utils"] full = [ - "default", - "anyhow", - "approx", - "complex", - "init", - "json", - "rand", - "serde", - "tracing", + "default", + "approx", + "complex", + "init", + "json", + "rand", + "serde", + "signal", + "tracing", ] nightly = [ - "concision-init?/nightly", - "ndtensor/nightly", - "concision-utils?/nightly", + "concision-init/nightly", + "concision-params/nightly", + "concision-traits/nightly", + "concision-utils?/nightly", ] # ************* [FF:Features] ************* init = [ - "cnc_init", - "rand", + "rand", ] -json = [ - "alloc", - "serde", - "serde_json", - "scsys/json", - "ndtensor/json", -] +json = ["alloc", "serde", "serde_json"] signal =[ - "complex", - "concision-utils?/signal", - "rustfft", + "complex", + "concision-utils?/signal", + "rustfft", ] -utils = [ - "cnc_utils", -] +utils = ["concision_utils"] -cnc_init = ["dep:concision-init"] - -cnc_utils = ["dep:concision-utils"] +concision_utils = ["dep:concision-utils"] # ************* [FF:Dependencies] ************* std = [ - "alloc", - "anyhow?/std", - "concision-init?/std", - "concision-utils?/std", - "ndarray/std", - "ndtensor/std", - "num/std", - "num-complex?/std", - "num-traits/std", - "rand?/std", - "rand?/std_rng", - "serde/std", - "scsys/std", - "strum/std", - "thiserror/std", - "tracing?/std", + "alloc", + "concision-init/std", + "concision-params/std", + "concision-traits/std", + "concision-utils?/std", + "ndarray/std", + "num/std", + "num-complex?/std", + "num-traits/std", + "rand?/std", + "rand?/std_rng", + "serde/std", + "strum/std", + "thiserror/std", + "tracing?/std", + "variants/std", ] wasi = [ - "concision-init?/wasi", - "concision-utils?/wasi", - "ndtensor/wasi", - "scsys/wasi", + "concision-init/wasi", + "concision-params/wasi", + "concision-traits/wasi", + "concision-utils?/wasi", ] wasm = [ - "getrandom?/wasm_js", - "concision-init?/wasm", - "concision-utils?/wasm", - "ndtensor/wasm", - "scsys/wasm", + "getrandom?/wasm_js", + "concision-init/wasm", + "concision-params/wasm", + "concision-traits/wasm", + "concision-utils?/wasm", ] # ************* [FF:Dependencies] ************* alloc = [ - "concision-init?/alloc", - "concision-utils?/alloc", - "ndtensor/alloc", - "num/alloc", - "serde?/alloc", - "serde_json?/alloc", - "scsys/alloc", -] - -anyhow = [ - "dep:anyhow", - "ndtensor/anyhow", - "scsys/anyhow", + "concision-init/alloc", + "concision-params/alloc", + "concision-traits/alloc", + "concision-utils?/alloc", + "num/alloc", + "serde?/alloc", + "serde_json?/alloc", + "variants/alloc", ] approx = [ - "dep:approx", - "concision-init?/approx", - "concision-utils?/approx", - "ndarray/approx", - "ndtensor/approx", + "dep:approx", + "concision-init/approx", + "concision-params/approx", + "concision-utils?/approx", + "ndarray/approx", ] blas = [ - "concision-init?/blas", - "concision-utils?/blas", - "ndarray/blas", - "ndtensor/blas", + "concision-init/blas", + "concision-params/blas", + "concision-utils?/blas", + "ndarray/blas", ] complex = [ - "dep:num-complex", - "concision-init?/complex", - "concision-utils?/complex", - "ndtensor/complex", + "dep:num-complex", + "concision-init/complex", + "concision-params/complex", + "concision-utils?/complex", ] rand = [ - "dep:rand", - "dep:rand_distr", - "concision-init?/rand", - "concision-utils?/rand", - "ndtensor/rand", - "num/rand", - "num-complex?/rand", - "rng", - "scsys/rand", + "dep:rand", + "dep:rand_distr", + "concision-init/rand", + "concision-params/rand", + "concision-traits/rand", + "concision-utils?/rand", + "num/rand", + "num-complex?/rand", + "rng", ] rayon = [ - "dep:rayon", - "concision-utils?/rayon", - "ndarray/rayon", - "ndtensor/rayon", + "dep:rayon", + "concision-params/rayon", + "concision-utils?/rayon", + "ndarray/rayon", ] rng = [ - "dep:getrandom", - "concision-init?/rng", - "concision-utils?/rng", - "ndtensor/rng", - "rand?/small_rng", - "rand?/thread_rng", - "scsys/rng", + "dep:getrandom", + "concision-init/rng", + "concision-params/rng", + "concision-traits/rng", + "concision-utils?/rng", + "rand?/small_rng", + "rand?/thread_rng", ] rustfft = ["dep:rustfft"] serde = [ - "concision-init?/serde", - "concision-utils?/serde", - "dep:serde", - "dep:serde_derive", - "ndarray/serde", - "ndtensor/serde", - "num/serde", - "num-complex?/serde", - "rand?/serde", - "rand_distr?/serde", - "scsys/serde", + "dep:serde", + "dep:serde_derive", + "concision-init/serde", + "concision-params/serde", + "concision-utils?/serde", + "ndarray/serde", + "num/serde", + "num-complex?/serde", + "rand?/serde", + "rand_distr?/serde", ] serde_json = ["dep:serde_json"] tracing = [ - "concision-init?/tracing", - "concision-utils?/tracing", - "dep:tracing", - "ndtensor/tracing", - "scsys/tracing", + "concision-init/tracing", + "concision-utils?/tracing", + "dep:tracing", ] # ************* [Unit Tests] ************* diff --git a/core/src/activate/impls/impl_binary.rs b/core/src/activate/impls/impl_binary.rs index 9602f4e4..16421cd8 100644 --- a/core/src/activate/impls/impl_binary.rs +++ b/core/src/activate/impls/impl_binary.rs @@ -33,7 +33,7 @@ impl_heavyside!( f32, f64, i8, i16, i32, i64, i128, isize, u8, u16, u32, u64, u128, usize, ); -impl Heavyside for ArrayBase +impl Heavyside for ArrayBase where A: Clone + Heavyside, D: Dimension, @@ -50,7 +50,7 @@ where } } -impl Heavyside for &ArrayBase +impl Heavyside for &ArrayBase where A: Clone + Heavyside, D: Dimension, diff --git a/core/src/activate/impls/impl_nonlinear.rs b/core/src/activate/impls/impl_nonlinear.rs index f10d4805..a6e793c7 100644 --- a/core/src/activate/impls/impl_nonlinear.rs +++ b/core/src/activate/impls/impl_nonlinear.rs @@ -7,7 +7,7 @@ use crate::activate::{ReLU, Sigmoid, Softmax, Tanh, utils::sigmoid_derivative}; use ndarray::{Array, ArrayBase, Data, Dimension, ScalarOperand}; use num_traits::{Float, One, Zero}; -impl ReLU for ArrayBase +impl ReLU for ArrayBase where A: Copy + PartialOrd + Zero + One, S: Data, @@ -24,7 +24,7 @@ where } } -impl Sigmoid for ArrayBase +impl Sigmoid for ArrayBase where A: ScalarOperand + Float, S: Data, @@ -44,7 +44,7 @@ where } } -impl Softmax for ArrayBase +impl Softmax for ArrayBase where A: ScalarOperand + Float, S: Data, @@ -67,7 +67,7 @@ where } } -impl Tanh for ArrayBase +impl Tanh for ArrayBase where A: ScalarOperand + Float, S: Data, diff --git a/core/src/activate/utils/non_linear.rs b/core/src/activate/utils/non_linear.rs index f9374282..3af4249c 100644 --- a/core/src/activate/utils/non_linear.rs +++ b/core/src/activate/utils/non_linear.rs @@ -51,7 +51,7 @@ where /// ```math /// f(x_i) = \frac{e^{x_i}}{\sum_j e^{x_j}} /// ``` -pub fn softmax(args: &ArrayBase) -> Array +pub fn softmax(args: &ArrayBase) -> Array where A: Float + ScalarOperand, D: Dimension, @@ -65,7 +65,7 @@ where /// ```math /// f(x_i) = \frac{e^{x_i}}{\sum_j e^{x_j}} /// ``` -pub fn softmax_axis(args: &ArrayBase, axis: usize) -> Array +pub fn softmax_axis(args: &ArrayBase, axis: usize) -> Array where A: Float + ScalarOperand, D: RemoveAxis, diff --git a/core/src/error.rs b/core/src/error.rs index e8d411e3..bd0fdde9 100644 --- a/core/src/error.rs +++ b/core/src/error.rs @@ -23,18 +23,14 @@ pub enum Error { #[error(transparent)] PadError(#[from] crate::ops::pad::error::PadError), #[error(transparent)] - ParamError(#[from] crate::params::error::ParamsError), + TraitError(#[from] concision_traits::Error), #[error(transparent)] - #[cfg(feature = "cnc_init")] - InitError(#[from] concision_init::error::InitError), + ParamError(#[from] concision_params::ParamsError), #[error(transparent)] - TensorError(#[from] ndtensor::error::TensorError), + InitError(#[from] concision_init::InitError), #[error(transparent)] - #[cfg(feature = "cnc_utils")] + #[cfg(feature = "concision_utils")] UtilityError(#[from] concision_utils::error::UtilityError), - #[cfg(feature = "anyhow")] - #[error(transparent)] - AnyError(#[from] anyhow::Error), #[cfg(feature = "alloc")] #[error(transparent)] BoxError(#[from] Box), @@ -76,6 +72,7 @@ impl Error { Self::Unknown(error.to_string()) } } + #[cfg(feature = "alloc")] impl From for Error { fn from(value: String) -> Self { diff --git a/core/src/lib.rs b/core/src/lib.rs index c8e37a85..a3e01201 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -40,7 +40,7 @@ #![crate_type = "lib"] #[cfg(not(all(feature = "std", feature = "alloc")))] -compile_error! { +compiler_error! { "At least one of the 'std' or 'alloc' features must be enabled." } @@ -54,27 +54,33 @@ pub use rand; #[doc(no_inline)] pub use rand_distr; +#[doc(inline)] +pub use concision_traits as traits; + /// this module establishes generic random initialization routines for models, params, and /// tensors. #[doc(inline)] -#[cfg(feature = "cnc_init")] pub use concision_init as init; +/// The [`params`] module works to provide a generic structure for handling weights and biases +#[doc(inline)] +pub use concision_params as params; /// this module implements various utilities useful for developing machine learning models #[doc(inline)] -#[cfg(feature = "cnc_utils")] +#[cfg(feature = "concision_utils")] pub use concision_utils as utils; -/// An n-dimensional tensor -pub use ndtensor as tensor; -#[cfg(feature = "cnc_init")] -pub use self::init::prelude::*; -#[cfg(feature = "cnc_utils")] +#[cfg(feature = "concision_utils")] pub use self::utils::prelude::*; #[doc(inline)] pub use self::{ - activate::prelude::*, error::*, loss::prelude::*, ops::prelude::*, params::prelude::*, - tensor::prelude::*, traits::*, + activate::prelude::*, + error::*, + init::{Init, InitInplace, Initialize}, + loss::prelude::*, + ops::prelude::*, + params::prelude::*, + traits::prelude::*, }; #[macro_use] @@ -88,9 +94,6 @@ pub mod activate; pub mod error; /// this module focuses on the loss functions used in training neural networks. pub mod loss; -/// this module provides the [`ParamsBase`] type for the library, which is used to define the -/// parameters of a neural network. -pub mod params; pub mod ops { //! This module provides the core operations for tensors, including filling, padding, @@ -98,74 +101,24 @@ pub mod ops { #[doc(inline)] pub use self::prelude::*; - pub mod fill; pub mod mask; - pub mod norm; pub mod pad; - pub mod reshape; pub(crate) mod prelude { - #[doc(inline)] - pub use super::fill::*; #[doc(inline)] pub use super::mask::*; #[doc(inline)] - pub use super::norm::*; - #[doc(inline)] pub use super::pad::*; - #[doc(inline)] - pub use super::reshape::*; - } -} - -pub mod traits { - //! This module provides the core traits for the library, such as [`Backward`] and - //! [`Forward`] - #[doc(inline)] - pub use self::prelude::*; - - mod apply; - mod clip; - mod codex; - mod convert; - mod gradient; - mod like; - mod propagation; - mod shape; - mod store; - mod wnb; - - mod prelude { - #[doc(inline)] - pub use super::apply::*; - #[doc(inline)] - pub use super::clip::*; - #[doc(inline)] - pub use super::codex::*; - #[doc(inline)] - pub use super::convert::*; - #[doc(inline)] - pub use super::gradient::*; - #[doc(inline)] - pub use super::like::*; - #[doc(inline)] - pub use super::propagation::*; - #[doc(inline)] - pub use super::shape::*; - #[doc(inline)] - pub use super::store::*; - #[doc(inline)] - pub use super::wnb::*; } } #[doc(hidden)] pub mod prelude { - #[cfg(feature = "cnc_init")] pub use concision_init::prelude::*; - #[cfg(feature = "cnc_utils")] + pub use concision_params::prelude::*; + pub use concision_traits::prelude::*; + #[cfg(feature = "concision_utils")] pub use concision_utils::prelude::*; - pub use ndtensor::prelude::*; #[doc(no_inline)] pub use crate::activate::prelude::*; @@ -173,8 +126,4 @@ pub mod prelude { pub use crate::loss::prelude::*; #[doc(no_inline)] pub use crate::ops::prelude::*; - #[doc(no_inline)] - pub use crate::params::prelude::*; - #[doc(no_inline)] - pub use crate::traits::*; } diff --git a/core/src/loss/traits/entropy.rs b/core/src/loss/traits/entropy.rs index 72959006..0d40da4f 100644 --- a/core/src/loss/traits/entropy.rs +++ b/core/src/loss/traits/entropy.rs @@ -17,7 +17,7 @@ pub trait CrossEntropy { use ndarray::{ArrayBase, Data, Dimension, ScalarOperand}; use num_traits::{Float, FromPrimitive}; -impl CrossEntropy for ArrayBase +impl CrossEntropy for ArrayBase where A: Float + FromPrimitive + ScalarOperand, D: Dimension, diff --git a/core/src/loss/traits/standard.rs b/core/src/loss/traits/standard.rs index ca970006..0b922fe7 100644 --- a/core/src/loss/traits/standard.rs +++ b/core/src/loss/traits/standard.rs @@ -33,7 +33,7 @@ pub trait MeanSquaredError { use ndarray::{ArrayBase, Data, Dimension, ScalarOperand}; use num_traits::{Float, FromPrimitive}; -impl MeanAbsoluteError for ArrayBase +impl MeanAbsoluteError for ArrayBase where A: Float + FromPrimitive + ScalarOperand, D: Dimension, @@ -46,7 +46,7 @@ where } } -impl MeanSquaredError for ArrayBase +impl MeanSquaredError for ArrayBase where A: Float + FromPrimitive + ScalarOperand, D: Dimension, diff --git a/core/src/ops/mask/dropout.rs b/core/src/ops/mask/dropout.rs index 9dafd661..62d3417a 100644 --- a/core/src/ops/mask/dropout.rs +++ b/core/src/ops/mask/dropout.rs @@ -6,7 +6,7 @@ pub trait DropOut { } #[cfg(feature = "init")] -impl DropOut for ndarray::ArrayBase +impl DropOut for ndarray::ArrayBase where A: num_traits::Num + ndarray::ScalarOperand, D: ndarray::Dimension, @@ -15,7 +15,7 @@ where type Output = ndarray::Array; fn dropout(&self, p: f64) -> Self::Output { - pub use crate::init::Initialize; + pub use concision_init::Initialize; use ndarray::Array; let dim = self.dim(); // Create a mask of the same shape as the input array diff --git a/core/src/ops/norm.rs b/core/src/ops/norm.rs deleted file mode 100644 index 46ed66ae..00000000 --- a/core/src/ops/norm.rs +++ /dev/null @@ -1,14 +0,0 @@ -/* - appellation: norm - authors: @FL03 -*/ -//! this module implements various normalization operations for tensors -#[doc(inline)] -pub use self::prelude::*; - -mod l_norm; - -mod prelude { - #[doc(inline)] - pub use super::l_norm::*; -} diff --git a/core/src/ops/pad.rs b/core/src/ops/pad.rs index b6be6e0e..6564b9ab 100644 --- a/core/src/ops/pad.rs +++ b/core/src/ops/pad.rs @@ -32,7 +32,7 @@ pub struct Padding { use ndarray::{Array, ArrayBase, DataOwned, Dimension}; use num::traits::{FromPrimitive, Num}; -impl Pad for ArrayBase +impl Pad for ArrayBase where A: Copy + FromPrimitive + Num, D: Dimension, diff --git a/core/src/ops/pad/mode.rs b/core/src/ops/pad/mode.rs index 8d8e43a8..7dd96abd 100644 --- a/core/src/ops/pad/mode.rs +++ b/core/src/ops/pad/mode.rs @@ -14,7 +14,7 @@ use num::Zero; Ord, PartialEq, PartialOrd, - scsys_derive::VariantConstructors, + variants::VariantConstructors, strum::AsRefStr, strum::Display, strum::EnumCount, diff --git a/core/src/ops/pad/utils.rs b/core/src/ops/pad/utils.rs index e2f04955..75ad6234 100644 --- a/core/src/ops/pad/utils.rs +++ b/core/src/ops/pad/utils.rs @@ -19,7 +19,7 @@ fn reader(nb_dim: usize, pad: &[[usize; 2]]) -> Result, PadError } pub fn pad( - data: &ArrayBase, + data: &ArrayBase, pad: &[[usize; 2]], mode: PadMode, ) -> Result, PadError> @@ -40,7 +40,7 @@ where } pub fn pad_to( - data: &ArrayBase, + data: &ArrayBase, pad: &[[usize; 2]], mode: PadMode, output: &mut Array, diff --git a/core/tests/params.rs b/core/tests/params.rs index 52b6b516..fc591f97 100644 --- a/core/tests/params.rs +++ b/core/tests/params.rs @@ -2,39 +2,11 @@ Appellation: params Contrib: @FL03 */ -extern crate concision_core as cnc; -use cnc::params::Params; +use concision_params::Params; use approx::assert_abs_diff_eq; use ndarray::prelude::*; -#[test] -fn test_params_ones() { - // weights retain the given shape (d_in, d_out) - // bias retains the shape (d_out,) - let ones = Params::::ones((3, 4)); - assert_eq!(ones.dim(), (3, 4)); - assert_eq!(ones.bias().dim(), 4); - assert!( - ones.iter() - .all(|(w, b)| w.iter().all(|&wi| wi == 1.0) && b == &1.0) - ); -} - -#[test] -fn test_params_zeros() { - // weights retain the given shape (d_in, d_out) - // bias retains the shape (d_out,) - let zeros = Params::::zeros((3, 4)); - assert_eq!(zeros.dim(), (3, 4)); - assert_eq!(zeros.bias().dim(), 4); - assert!( - zeros - .iter() - .all(|(w, b)| w.iter().all(|&wi| wi == 0.0) && b == &0.0) - ); -} - #[test] fn test_params_forward() { let params = Params::::ones((3, 4)); @@ -46,22 +18,3 @@ fn test_params_forward() { // where W = ones(3, 4) and b = ones(4) assert_abs_diff_eq!(output, array![7.0, 7.0, 7.0, 7.0], epsilon = 1e-3); } - -#[test] -#[cfg(feature = "init")] -fn test_params_init() { - use cnc::init::Initialize; - - let lecun = Params::::lecun_normal((3, 4)); - assert_eq!(lecun.dim(), (3, 4)); - - let glorot_norm = Params::::glorot_normal((3, 4)); - assert_eq!(glorot_norm.dim(), (3, 4)); - assert_ne!(lecun, glorot_norm); - let glorot_uniform = Params::::glorot_uniform((3, 4)).expect("glorot_uniform failed"); - assert_eq!(glorot_uniform.dim(), (3, 4)); - assert_ne!(lecun, glorot_uniform); - assert_ne!(glorot_norm, glorot_uniform); - let truncnorm = Params::::truncnorm((3, 4), 0.0, 1.0).expect("truncnorm failed"); - assert_eq!(truncnorm.dim(), (3, 4)); -} diff --git a/data/Cargo.toml b/data/Cargo.toml index dfbdc3d3..6fb68f9c 100644 --- a/data/Cargo.toml +++ b/data/Cargo.toml @@ -2,7 +2,6 @@ build = "build.rs" description = "this crate provides additional tools for working with datasets" name = "concision-data" - authors.workspace = true categories.workspace = true edition.workspace = true @@ -14,33 +13,26 @@ repository.workspace = true rust-version.workspace = true version.workspace = true -[package.metadata.docs.rs] -all-features = false -doc-scrape-examples = true -features = ["full"] -rustc-args = ["--cfg", "docsrs"] -version = "v{{version}}" - -[package.metadata.release] -no-dev-version = true -tag-name = "{{version}}" - [lib] -crate-type = [ - "cdylib", - "rlib", -] +crate-type = ["cdylib","rlib"] bench = false doc = true doctest = true test = true +# ************* [Unit Tests] ************* +[[test]] +name = "default" + +[[test]] +name = "loader" +required-features = ["loader"] + [dependencies] concision-core = { workspace = true } # custom -scsys = { workspace = true } +variants = { workspace = true } # error handling -anyhow = { optional = true, workspace = true } thiserror = { workspace = true } # mathematics approx = { optional = true, workspace = true } @@ -65,7 +57,6 @@ default = [ full = [ "default", - "anyhow", "approx", "complex", "json", @@ -91,33 +82,26 @@ std = [ "ndarray/std", "num/std", "num-complex?/std", - "scsys/std", "serde?/std", "serde_json?/std", "tracing?/std", + "variants/std", ] wasi = [ "concision-core/wasi", - "scsys/wasi", ] wasm = [ "concision-core/wasm", - "scsys/wasm", ] # ************* [FF:Dependencies] ************* alloc = [ "concision-core/alloc", "num/alloc", - "serde?/alloc" -] - -anyhow = [ - "dep:anyhow", - "concision-core/anyhow", - "scsys/anyhow", + "serde?/alloc", + "variants/alloc", ] approx = [ @@ -142,7 +126,6 @@ json = [ "serde_json", "concision-core/json", "reqwest?/json", - "scsys/json", ] rayon = [ @@ -153,12 +136,10 @@ rayon = [ rand = [ "concision-core/rand", "rng", - "scsys/rand", ] rng = [ "concision-core/rng", - "scsys/rng", ] reqwest = ["dep:reqwest"] @@ -169,7 +150,6 @@ serde = [ "ndarray/serde", "num/serde", "num-complex?/serde", - "scsys/serde", ] serde_json = ["dep:serde_json"] @@ -177,13 +157,16 @@ serde_json = ["dep:serde_json"] tracing = [ "concision-core/tracing", "dep:tracing", - "scsys/tracing", ] -# ************* [Unit Tests] ************* -[[test]] -name = "default" +# ********* [Metadata] ********* +[package.metadata.docs.rs] +all-features = false +doc-scrape-examples = true +features = ["full"] +rustc-args = ["--cfg", "docsrs"] +version = "v{{version}}" -[[test]] -name = "loader" -required-features = ["loader"] \ No newline at end of file +[package.metadata.release] +no-dev-version = true +tag-name = "{{version}}" diff --git a/ext/Cargo.toml b/ext/Cargo.toml index 4fa73751..279468b0 100644 --- a/ext/Cargo.toml +++ b/ext/Cargo.toml @@ -25,11 +25,8 @@ no-dev-version = true tag-name = "{{version}}" [lib] -crate-type = [ - "cdylib", - "rlib", -] bench = false +crate-type = ["cdylib", "rlib"] doc = true doctest = true test = true @@ -38,13 +35,11 @@ test = true # local concision = { features = ["neural"], workspace = true } # custom -scsys = { workspace = true } +variants = { workspace = true } # concurrency & parallelism rayon = { optional = true, workspace = true } # data-structures ndarray = { workspace = true } -# error handling -anyhow = { optional = true, workspace = true } # mathematics approx = { optional = true, workspace = true } num = { workspace = true } @@ -60,138 +55,95 @@ tracing = { optional = true, workspace = true } [dev-dependencies] anyhow = { features = ["std"], workspace = true } -lazy_static ={ workspace = true } +lazy_static = { workspace = true } tracing-subscriber = { features = ["std"], workspace = true } [features] -default = [ - "attention", - "std", -] +default = ["attention", "std"] full = [ - "default", - "anyhow", - "complex", - "rand", - "json", - "serde", - "tracing" + "complex", + "default", + "json", + "rand", + "serde", + "tracing" ] -nightly = [ - "concision/nightly", -] +nightly = ["concision/nightly"] # ************* [FF:Features] ************* attention = [] signal = [ - "complex", - "rustfft" + "complex", "rustfft" ] json = [ - "alloc", - "serde", - "serde_json", + "alloc", "serde", "serde_json" ] init = [ - "concision/init", - "rand" + "concision/init", "rand" ] # ************* [FF:Environments] ************* std = [ - "alloc", - "anyhow?/std", - "approx?/std", - "concision/std", - "ndarray/std", - "num/std", - "num-complex?/std", - "num-traits/std", - "scsys/std", - "serde?/std", - "serde_json?/std", - "tracing?/std", + "alloc", + "approx?/std", + "concision/std", + "ndarray/std", + "num-complex?/std", + "num-traits/std", + "num/std", + "serde?/std", + "serde_json?/std", + "tracing?/std", + "variants/std" ] -wasi = [ - "concision/wasi", - "scsys/wasi", -] +wasi = ["concision/wasi"] wasm = [ - "concision/wasm", - "scsys/wasm", - "rayon?/web_spin_lock", + "concision/wasm", + "rayon?/web_spin_lock" ] # ************* [FF:Dependencies] ************* alloc = [ - "concision/alloc", - "num/alloc", - "scsys/alloc", - "serde?/alloc", - "serde_json?/alloc", -] - -anyhow = [ - "dep:anyhow", - "concision/anyhow", - "scsys/anyhow", + "concision/alloc", + "num/alloc", + "serde?/alloc", + "serde_json?/alloc", + "variants/alloc" ] -approx = [ - "dep:approx", - "concision/approx", - "ndarray/approx", -] +approx = ["concision/approx", "dep:approx", "ndarray/approx"] -blas = [ - "concision/blas", - "ndarray/blas", -] +blas = ["concision/blas", "ndarray/blas"] complex = ["dep:num-complex"] -rand = [ - "concision/rand", - "num/rand", - "num-complex?/rand", -] +rand = ["concision/rand", "num-complex?/rand", "num/rand"] -rayon = [ - "concision/rayon", - "ndarray/rayon", -] +rayon = ["concision/rayon", "ndarray/rayon"] -rng = [ - "concision/rng", - "scsys/rng", -] +rng = ["concision/rng"] rustfft = ["dep:rustfft"] serde = [ - "dep:serde", - "dep:serde_derive", - "concision/serde", - "ndarray/serde", - "num/serde", - "num-complex?/serde", - "scsys/serde", + "concision/serde", + "dep:serde", + "dep:serde_derive", + "ndarray/serde", + "num-complex?/serde", + "num/serde" ] serde_json = ["dep:serde_json"] -tracing = [ - "dep:tracing", - "concision/tracing", - "scsys/tracing", -] +tracing = ["concision/tracing", "dep:tracing"] # ************* [Unit Tests] ************* [[test]] name = "attention" -required-features = ["approx", "attention", "default", "rand"] \ No newline at end of file +required-features = ["approx", "attention", "default", "rand"] diff --git a/ext/src/attention/qkv.rs b/ext/src/attention/qkv.rs index f25425e1..0d6a502b 100644 --- a/ext/src/attention/qkv.rs +++ b/ext/src/attention/qkv.rs @@ -129,11 +129,11 @@ where { type Output = Z; - fn forward(&self, input: &X) -> cnc::Result { + fn forward(&self, input: &X) -> Option { let query = input.dot(&self.query); let key = input.dot(&self.key); let value = input.dot(&self.value); let output = query + key + value; - Ok(output) + Some(output) } } diff --git a/init/Cargo.toml b/init/Cargo.toml index 896eac0d..7075650f 100644 --- a/init/Cargo.toml +++ b/init/Cargo.toml @@ -25,11 +25,8 @@ no-dev-version = true tag-name = "{{version}}" [lib] -crate-type = [ - "cdylib", - "rlib", -] bench = false +crate-type = ["cdylib", "rlib"] doc = true doctest = true test = true @@ -40,7 +37,6 @@ serde = { features = ["derive"], optional = true, workspace = true } serde_derive = { optional = true, workspace = true } serde_json = { optional = true, workspace = true } # error-handling -anyhow = { optional = true, workspace = true } thiserror = { workspace = true } # logging tracing = { optional = true, workspace = true } @@ -63,80 +59,51 @@ rand_distr = { optional = true, workspace = true } lazy_static = { workspace = true } [features] -default = [ - "rand", - "std", -] +default = ["rand", "std"] -full = [ - "approx", - "complex", - "default", - "serde", - "tracing", -] +full = ["approx", "complex", "default", "serde", "tracing"] nightly = [] - # ************* [FF:Dependencies] ************* std = [ - "alloc", - "anyhow?/std", - "ndarray/std", - "num/std", - "num-complex?/std", - "num-traits/std", - "rand/std", - "rand/std_rng", - "serde/std", - "strum/std", - "thiserror/std", - "tracing?/std", + "alloc", + "ndarray/std", + "num-complex?/std", + "num-traits/std", + "num/std", + "rand/std", + "rand/std_rng", + "serde/std", + "strum/std", + "thiserror/std", + "tracing?/std" ] wasi = [] -wasm = [ - "getrandom?/wasm_js", -] +wasm = ["getrandom?/wasm_js"] # ************* [FF:Dependencies] ************* -alloc = [ - "num/alloc", - "serde?/alloc", -] +alloc = ["num/alloc", "serde?/alloc"] -approx = [ - "dep:approx", - "ndarray/approx", -] +approx = ["dep:approx", "ndarray/approx"] blas = ["ndarray/blas"] complex = ["dep:num-complex"] -rand = [ - "dep:rand", - "dep:rand_distr", - "num/rand", - "num-complex?/rand", - "rng", -] +rand = ["dep:rand", "dep:rand_distr", "num-complex?/rand", "num/rand", "rng"] -rng = [ - "dep:getrandom", - "rand?/small_rng", - "rand?/thread_rng", -] +rng = ["dep:getrandom", "rand?/small_rng", "rand?/thread_rng"] serde = [ - "dep:serde", - "dep:serde_derive", - "ndarray/serde", - "num/serde", - "num-complex?/serde", - "rand?/serde", - "rand_distr?/serde", + "dep:serde", + "dep:serde_derive", + "ndarray/serde", + "num-complex?/serde", + "num/serde", + "rand?/serde", + "rand_distr?/serde" ] tracing = ["dep:tracing"] diff --git a/models/kan/Cargo.toml b/models/kan/Cargo.toml index 9aae7f8e..9793ea29 100644 --- a/models/kan/Cargo.toml +++ b/models/kan/Cargo.toml @@ -25,11 +25,8 @@ no-dev-version = true tag-name = "{{version}}" [lib] -crate-type = [ - "cdylib", - "rlib", -] bench = false +crate-type = ["cdylib", "rlib"] doc = true doctest = true test = true @@ -38,7 +35,7 @@ test = true # sdk concision = { features = ["neural"], workspace = true } # error -anyhow = { optional = true, workspace = true } +anyhow = { workspace = true } # mathematics approx = { optional = true, workspace = true } ndarray = { workspace = true } @@ -47,63 +44,26 @@ num-traits = { workspace = true } tracing = { optional = true, workspace = true } [features] -default = [ - "std", -] +default = ["std"] -full = [ - "default", - "rand", - "serde", - "tracing" -] +full = ["default", "rand", "serde", "tracing"] # ************* [FF:Environments] ************* -std = [ - "concision/std", - "ndarray/std", - "num-traits/std", - "tracing/std", -] +std = ["concision/std", "ndarray/std", "num-traits/std", "tracing/std"] -wasi = [ - "concision/wasi", -] +wasi = ["concision/wasi"] -wasm = [ - "concision/wasm", -] +wasm = ["concision/wasm"] # ************* [FF:Dependencies] ************* -anyhow = [ - "dep:anyhow", - "concision/anyhow", -] -approx = [ - "dep:approx", - "concision/approx", - "ndarray/approx", -] - -blas = [ - "concision/blas", - "ndarray/blas" -] - -rand = [ - "concision/rand", -] - -rayon = [ - "concision/rayon", - "ndarray/rayon" -] - -serde = [ - "concision/serde", -] - -tracing = [ - "dep:tracing", - "concision/tracing", -] +approx = ["concision/approx", "dep:approx", "ndarray/approx"] + +blas = ["concision/blas", "ndarray/blas"] + +rand = ["concision/rand"] + +rayon = ["concision/rayon", "ndarray/rayon"] + +serde = ["concision/serde"] + +tracing = ["concision/tracing", "dep:tracing"] diff --git a/models/models/Cargo.toml b/models/models/Cargo.toml index a40f62c6..71eb27ea 100644 --- a/models/models/Cargo.toml +++ b/models/models/Cargo.toml @@ -14,27 +14,23 @@ repository.workspace = true rust-version.workspace = true version.workspace = true -[package.metadata.docs.rs] -all-features = false -doc-scrape-examples = true -features = ["full"] -rustc-args = ["--cfg", "docsrs"] -version = "v{{version}}" - -[package.metadata.release] -no-dev-version = true -tag-name = "{{version}}" - [lib] -crate-type = [ - "cdylib", - "rlib", -] +crate-type = ["cdylib", "rlib"] bench = false doc = true doctest = true test = true +# ************* [Examples] ************* +[[example]] +name = "simple" +required-features = ["approx", "rand", "tracing"] + +# ************* [Unit Tests] ************* +[[test]] +name = "simple" +required-features = ["approx", "rand"] + [dependencies] concision-kan = { optional = true, workspace = true } concision-s4 = { optional = true, workspace = true } @@ -43,9 +39,7 @@ concision-transformer = { optional = true, workspace = true } concision = { features = ["neural"], workspace = true } concision-ext = { workspace = true } # custom -scsys = { workspace = true } -# error handling -anyhow = { optional = true, workspace = true } +variants = { workspace = true } # mathematics approx = { optional = true, workspace = true } ndarray = { workspace = true } @@ -58,25 +52,22 @@ anyhow = { features = ["std"], workspace = true } lazy_static ={ workspace = true } tracing-subscriber = { features = ["std"], workspace = true } +# ********* [Features] ********* [features] -default = [ - "simple", - "std", -] +default = ["std"] full = [ - "anyhow", - "default", - "models", - "rand", - "serde", - "tracing" + "default", + "models", + "rand", + "serde", + "tracing" ] -# ************* [FF:Features] ************* +# ************* [FF:Flags] ************* models = [ - "simple", - "transformer" + "simple", + "transformer" ] simple = [] @@ -89,99 +80,79 @@ transformer = ["dep:concision-transformer"] # ************* [FF:Environments] ************* std = [ - "concision/std", - "concision-ext/std", - "concision-kan?/std", - "concision-s4?/std", - "concision-transformer?/std", - "scsys/std", - "ndarray/std", - "num-traits/std", - "tracing?/std", + "concision/std", + "concision-ext/std", + "concision-kan?/std", + "concision-s4?/std", + "concision-transformer?/std", + "ndarray/std", + "num-traits/std", + "tracing?/std", ] # ************* [FF:Dependencies] ************* -anyhow = [ - "dep:anyhow", - "concision/anyhow", - "concision-ext/anyhow", - "concision-kan?/anyhow", - "concision-s4?/anyhow", - "concision-transformer?/anyhow", - "scsys/anyhow", -] - approx = [ - "dep:approx", - "concision/approx", - "concision-ext/approx", - "concision-kan?/approx", - "concision-s4?/approx", - "concision-transformer?/approx", - "ndarray/approx", + "dep:approx", + "concision/approx", + "concision-ext/approx", + "concision-kan?/approx", + "concision-s4?/approx", + "concision-transformer?/approx", + "ndarray/approx", ] blas = [ - "concision/blas", - "concision-ext/blas", - "concision-kan?/blas", - "concision-s4?/blas", - "concision-transformer?/blas", - "ndarray/blas", + "concision/blas", + "concision-ext/blas", + "concision-kan?/blas", + "concision-s4?/blas", + "concision-transformer?/blas", + "ndarray/blas", ] rand = [ - "concision/rand", - "concision-ext/rand", - "concision-kan?/rand", - "concision-s4?/rand", - "concision-transformer?/rand", + "concision/rand", + "concision-ext/rand", + "concision-kan?/rand", + "concision-s4?/rand", + "concision-transformer?/rand", ] rayon = [ - "concision/rayon", - "concision-ext/rayon", - "concision-kan?/rayon", - "concision-s4?/rayon", - "concision-transformer?/rayon", - "ndarray/rayon", + "concision/rayon", + "concision-ext/rayon", + "concision-kan?/rayon", + "concision-s4?/rayon", + "concision-transformer?/rayon", + "ndarray/rayon", ] serde = [ - "concision/serde", - "concision-ext/rng", - "concision-kan?/serde", - "concision-s4?/serde", - "concision-transformer?/serde", - "ndarray/serde", - "scsys/serde", + "concision/serde", + "concision-ext/rng", + "concision-kan?/serde", + "concision-s4?/serde", + "concision-transformer?/serde", + "ndarray/serde", ] tracing = [ - "dep:tracing", - "concision/tracing", - "concision-ext/tracing", - "concision-kan?/tracing", - "concision-s4?/tracing", - "concision-transformer?/tracing", - "scsys/tracing", + "dep:tracing", + "concision/tracing", + "concision-ext/tracing", + "concision-kan?/tracing", + "concision-s4?/tracing", + "concision-transformer?/tracing", ] -# ************* [Examples] ************* -[[example]] -name = "simple" -required-features = [ - "anyhow", - "approx", - "rand", - "tracing", -] +# ********* [Metadata] ********* +[package.metadata.docs.rs] +all-features = false +doc-scrape-examples = true +features = ["full"] +rustc-args = ["--cfg", "docsrs"] +version = "v{{version}}" -# ************* [FF:Dependencies] ************* -[[test]] -name = "simple" -required-features = [ - "anyhow", - "approx", - "rand", -] +[package.metadata.release] +no-dev-version = true +tag-name = "{{version}}" \ No newline at end of file diff --git a/models/models/src/simple.rs b/models/models/src/simple.rs index 7d3fb234..ee9d005b 100644 --- a/models/models/src/simple.rs +++ b/models/models/src/simple.rs @@ -2,7 +2,7 @@ Appellation: simple Contrib: @FL03 */ -use cnc::nn::{DeepModelParams, Model, ModelError, ModelFeatures, StandardModelConfig, Train}; +use cnc::nn::{DeepModelParams, Model, ModelFeatures, NeuralError, StandardModelConfig, Train}; use cnc::{Forward, Norm, Params, ReLU, Sigmoid}; use ndarray::prelude::*; @@ -125,7 +125,7 @@ where { type Output = Array; - fn forward(&self, input: &ArrayBase) -> cnc::Result { + fn forward(&self, input: &ArrayBase) -> Option { let mut output = self .params() .input() @@ -139,7 +139,7 @@ where .params() .output() .forward_then(&output, |y| y.sigmoid())?; - Ok(y) + Some(y) } } @@ -153,23 +153,18 @@ where #[cfg_attr( feature = "tracing", - tracing::instrument( - skip(self, input, target), - level = "trace", - name = "backward", - target = "model", - ) + tracing::instrument(skip(self, input, target), level = "trace", target = "model",) )] fn train( &mut self, input: &ArrayBase, target: &ArrayBase, - ) -> Result { - if input.len() != self.features().input() { - return Err(ModelError::InvalidInputShape); + ) -> Result { + if input.len() != self.layout().input() { + return Err(NeuralError::InvalidInputShape); } - if target.len() != self.features().output() { - return Err(ModelError::InvalidOutputShape); + if target.len() != self.layout().output() { + return Err(NeuralError::InvalidOutputShape); } // get the learning rate from the model's configuration let lr = self @@ -186,15 +181,28 @@ where let mut activations = Vec::new(); activations.push(input.to_owned()); - let mut output = self.params().input().forward(&input)?.relu(); + let mut output = self + .params() + .input() + .forward(&input) + .expect("Failed to complete the forward pass for the input layer") + .relu(); activations.push(output.to_owned()); // collect the activations of the hidden for layer in self.params().hidden() { - output = layer.forward(&output)?.relu(); + output = layer + .forward(&output) + .expect("failed to complete the forward pass for the hidden layer") + .relu(); activations.push(output.to_owned()); } - output = self.params().output().forward(&output)?.sigmoid(); + output = self + .params() + .output() + .forward(&output) + .expect("Output layer failed to forward propagate") + .sigmoid(); activations.push(output.to_owned()); // Calculate output layer error @@ -208,9 +216,10 @@ where // Update output weights self.params_mut() .output_mut() - .backward(activations.last().unwrap(), &delta, lr)?; + .backward(activations.last().unwrap(), &delta, lr) + .expect("Output failed training..."); - let num_hidden = self.features().layers(); + let num_hidden = self.layout().layers(); // Iterate through hidden layers in reverse order for i in (0..num_hidden).rev() { // Calculate error for this layer @@ -224,7 +233,9 @@ where }; // Normalize delta to prevent exploding gradients delta /= delta.l2_norm(); - self.params_mut().hidden_mut()[i].backward(&activations[i + 1], &delta, lr)?; + self.params_mut().hidden_mut()[i] + .backward(&activations[i + 1], &delta, lr) + .expect("Hidden failed training..."); } /* Backpropagate to the input layer @@ -237,7 +248,8 @@ where delta /= delta.l2_norm(); // Normalize the delta to prevent exploding gradients self.params_mut() .input_mut() - .backward(&activations[1], &delta, lr)?; + .backward(&activations[1], &delta, lr) + .expect("failed to backpropagate input layer during training..."); Ok(loss) } @@ -265,15 +277,15 @@ where &mut self, input: &ArrayBase, target: &ArrayBase, - ) -> Result { + ) -> Result { if input.nrows() == 0 || target.nrows() == 0 { - return Err(ModelError::InvalidBatchSize); + return Err(NeuralError::InvalidBatchSize); } if input.ncols() != self.features().input() { - return Err(ModelError::InvalidInputShape); + return Err(NeuralError::InvalidInputShape); } if target.ncols() != self.features().output() || target.nrows() != input.nrows() { - return Err(ModelError::InvalidOutputShape); + return Err(NeuralError::InvalidOutputShape); } let mut loss = A::zero(); diff --git a/models/s4/Cargo.toml b/models/s4/Cargo.toml index 9bef6f98..40a42d3c 100644 --- a/models/s4/Cargo.toml +++ b/models/s4/Cargo.toml @@ -24,20 +24,14 @@ version = "v{{version}}" no-dev-version = true tag-name = "{{version}}" [lib] -crate-type = [ - "cdylib", - "rlib", -] bench = false +crate-type = ["cdylib", "rlib"] doc = true doctest = true test = true [dependencies] -# sdk concision = { features = ["neural"], workspace = true } -# error -anyhow = { optional = true, workspace = true } # mathematics approx = { optional = true, workspace = true } ndarray = { workspace = true } @@ -45,64 +39,30 @@ num-traits = { workspace = true } # logging tracing = { optional = true, workspace = true } +[dev-dependencies] +anyhow = { features = ["std"], workspace = true } + [features] -default = [ - "std", -] +default = ["std"] -full = [ - "default", - "rand", - "serde", - "tracing" -] +full = ["default", "rand", "serde", "tracing"] # ************* [FF:Environments] ************* -std = [ - "concision/std", - "ndarray/std", - "num-traits/std", - "tracing/std", -] +std = ["concision/std", "ndarray/std", "num-traits/std", "tracing/std"] -wasi = [ - "concision/wasi", -] +wasi = ["concision/wasi"] -wasm = [ - "concision/wasm", -] +wasm = ["concision/wasm"] # ************* [FF:Dependencies] ************* -anyhow = [ - "dep:anyhow", - "concision/anyhow", -] -approx = [ - "dep:approx", - "concision/approx", - "ndarray/approx", -] - -blas = [ - "concision/blas", - "ndarray/blas" -] - -rand = [ - "concision/rand", -] - -rayon = [ - "concision/rayon", - "ndarray/rayon" -] - -serde = [ - "concision/serde", -] - -tracing = [ - "dep:tracing", - "concision/tracing", -] +approx = ["concision/approx", "dep:approx", "ndarray/approx"] + +blas = ["concision/blas", "ndarray/blas"] + +rand = ["concision/rand"] + +rayon = ["concision/rayon", "ndarray/rayon"] + +serde = ["concision/serde"] + +tracing = ["concision/tracing", "dep:tracing"] diff --git a/models/snn/Cargo.toml b/models/snn/Cargo.toml new file mode 100644 index 00000000..f5d2d96e --- /dev/null +++ b/models/snn/Cargo.toml @@ -0,0 +1,111 @@ +[package] +build = "build.rs" +description = "Synaptic Neural Networks for the Concision Machine Learning Framework" +name = "concision-snn" + +authors.workspace = true +categories.workspace = true +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +readme.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[lib] +bench = false +crate-type = ["cdylib", "rlib"] +doc = true +doctest = true +test = true + +# ********* [Unit Tests] ********* +[[test]] +name = "neurons" +required-features = ["default"] + +[dependencies] +concision = { features = ["neural"], workspace = true } +# mathematics +approx = { optional = true, workspace = true } +ndarray = { workspace = true } +num-traits = { workspace = true } +# serialization +serde = { optional = true, workspace = true } +serde_derive = { optional = true, workspace = true } +serde_json = { optional = true, workspace = true } +# logging +tracing = { optional = true, workspace = true } + +[dev-dependencies] +anyhow = { features = ["std"], workspace = true } + +[features] +default = ["std"] + +full = [ + "default", + "json", + "rand", + "serde", + "tracing" +] + +json = ["alloc", "serde", "serde_json"] + +# ************* [FF:Environments] ************* +std = [ + "alloc", + "concision/std", + "ndarray/std", + "num-traits/std", + "serde?/std", + "serde_json?/std", + "tracing/std" + ] + +wasi = ["concision/wasi"] + +wasm = ["concision/wasm"] + +# ************* [FF:Dependencies] ************* +alloc = [ + "concision/alloc", + "serde?/alloc", + "serde_json?/alloc", +] + +approx = [ + "concision/approx", + "dep:approx", + "ndarray/approx" +] + +blas = ["concision/blas", "ndarray/blas"] + +rand = ["concision/rand"] + +rayon = ["concision/rayon", "ndarray/rayon"] + +serde = [ + "dep:serde", + "dep:serde_derive", + "concision/serde", +] + +serde_json = ["dep:serde_json"] + +tracing = ["concision/tracing", "dep:tracing"] + +# ********* [Metadata] ********* +[package.metadata.docs.rs] +all-features = false +features = ["full"] +rustc-args = ["--cfg", "docsrs"] +version = "v{{version}}" + +[package.metadata.release] +no-dev-version = true +tag-name = "{{version}}" diff --git a/models/snn/build.rs b/models/snn/build.rs new file mode 100644 index 00000000..940a4ce4 --- /dev/null +++ b/models/snn/build.rs @@ -0,0 +1,8 @@ +/* + Appellation: build + Contrib: FL03 +*/ + +fn main() { + println!("cargo::rustc-check-cfg=cfg(no_std)"); +} diff --git a/models/snn/src/lib.rs b/models/snn/src/lib.rs new file mode 100644 index 00000000..e1449010 --- /dev/null +++ b/models/snn/src/lib.rs @@ -0,0 +1,51 @@ +/* + appellation: concision-snn + authors: @FL03 +*/ +//! Spiking neural networks (SNNs) for the [`concision`](https://crates.io/crates/concision) machine learning framework. +//! +//! ## References +//! +//! - [Deep Learning in Spiking Neural Networks](https://arxiv.org/abs/1804.08150) +//! +#![crate_type = "lib"] +#![cfg_attr(not(feature = "std"), no_std)] +#![allow(clippy::module_inception)] + +#[cfg(feature = "alloc")] +extern crate alloc; +extern crate concision as cnc; + +#[cfg(not(any(feature = "std", feature = "alloc")))] +compiler_error! { + "Either feature \"std\" or feature \"alloc\" must be enabled." +} + +#[doc(inline)] +pub use self::{model::*, neuron::*, types::*}; + +pub mod model; +pub mod neuron; + +pub mod types { + //! Types for spiking neural networks + #[doc(inline)] + pub use self::prelude::*; + + mod event; + mod result; + + pub(crate) mod prelude { + pub use super::event::*; + pub use super::result::*; + } +} + +pub mod prelude { + #[doc(inline)] + pub use crate::model::*; + #[doc(inline)] + pub use crate::neuron::*; + #[doc(inline)] + pub use crate::types::*; +} diff --git a/models/snn/src/model.rs b/models/snn/src/model.rs new file mode 100644 index 00000000..4b03fc48 --- /dev/null +++ b/models/snn/src/model.rs @@ -0,0 +1,120 @@ +/* + appellation: model + authors: @FL03 +*/ + +use cnc::nn::{DeepModelParams, Model, ModelFeatures, StandardModelConfig}; +#[cfg(feature = "rand")] +use cnc::rand_distr; + +use num_traits::{Float, FromPrimitive}; + +#[derive(Clone, Debug)] +pub struct SpikingNeuralNetwork { + pub config: StandardModelConfig, + pub features: ModelFeatures, + pub params: DeepModelParams, +} + +impl SpikingNeuralNetwork +where + T: Float + FromPrimitive, +{ + pub fn new(config: StandardModelConfig, features: ModelFeatures) -> Self + where + T: Clone + Default, + { + let params = DeepModelParams::default(features); + SpikingNeuralNetwork { + config, + features, + params, + } + } + #[cfg(feature = "rand")] + pub fn init(self) -> Self + where + T: Float + FromPrimitive, + rand_distr::StandardNormal: rand_distr::Distribution, + { + let params = DeepModelParams::glorot_normal(self.features()); + SpikingNeuralNetwork { params, ..self } + } + /// returns a reference to the model configuration + pub const fn config(&self) -> &StandardModelConfig { + &self.config + } + /// returns a mutable reference to the model configuration + pub const fn config_mut(&mut self) -> &mut StandardModelConfig { + &mut self.config + } + /// returns the model features + pub const fn features(&self) -> ModelFeatures { + self.features + } + /// returns a mutable reference to the model features + pub const fn features_mut(&mut self) -> &mut ModelFeatures { + &mut self.features + } + /// returns a reference to the model parameters + pub const fn params(&self) -> &DeepModelParams { + &self.params + } + /// returns a mutable reference to the model parameters + pub const fn params_mut(&mut self) -> &mut DeepModelParams { + &mut self.params + } + /// set the current configuration and return a mutable reference to the model + pub fn set_config(&mut self, config: StandardModelConfig) -> &mut Self { + self.config = config; + self + } + /// set the current features and return a mutable reference to the model + pub fn set_features(&mut self, features: ModelFeatures) -> &mut Self { + self.features = features; + self + } + /// set the current parameters and return a mutable reference to the model + pub fn set_params(&mut self, params: DeepModelParams) -> &mut Self { + self.params = params; + self + } + /// consumes the current instance to create another with the given configuration + pub fn with_config(self, config: StandardModelConfig) -> Self { + Self { config, ..self } + } + /// consumes the current instance to create another with the given features + pub fn with_features(self, features: ModelFeatures) -> Self { + Self { features, ..self } + } + /// consumes the current instance to create another with the given parameters + pub fn with_params(self, params: DeepModelParams) -> Self { + Self { params, ..self } + } +} + +impl Model for SpikingNeuralNetwork { + type Config = StandardModelConfig; + + type Layout = ModelFeatures; + + fn config(&self) -> &StandardModelConfig { + &self.config + } + + fn config_mut(&mut self) -> &mut StandardModelConfig { + &mut self.config + } + + fn layout(&self) -> ModelFeatures { + self.features + } + + fn params(&self) -> &DeepModelParams { + &self.params + } + + fn params_mut(&mut self) -> &mut DeepModelParams { + &mut self.params + } +} diff --git a/models/snn/src/neuron.rs b/models/snn/src/neuron.rs new file mode 100644 index 00000000..141a839a --- /dev/null +++ b/models/snn/src/neuron.rs @@ -0,0 +1,284 @@ +/* + Appellation: neuron + Created At: 2025.11.25:09:33:30 + Contrib: @FL03 +*/ +//! Single spiking neuron (LIF + adaptation + exponential synapse) example in pure Rust. +//! +//! Model (forward-Euler integration; units are arbitrary but consistent): +//! tau_m * dv/dt = -(v - v_rest) + R*(I_ext + I_syn) - w +//! tau_w * dw/dt = -w +//! tau_s * ds/dt = -s (+ instantaneous increments when presynaptic spikes arrive) +//! +//! Spike: when v >= v_thresh -> spike emitted, v <- v_reset, w += b +//! I_syn = s +//! +//! The implementation is conservative with allocations and idiomatic Rust. +use super::types::{StepResult, SynapticEvent}; + +/// Leaky Integrate-and-Fire neuron with an adaptation term and exponential synaptic current. +/// +/// All fields are public for convenience in research workflows; in production you may want to +/// expose read-only getters and safe setters only. +#[derive(Clone)] +pub struct SpikingNeuron { + // ---- Parameters ---- + /// Membrane time constant `tau_m` (ms) + pub tau_m: f64, + /// Membrane resistance `R` (MΩ or arbitrary) + pub resistance: f64, + /// Resting potential `v_rest` (mV) + pub v_rest: f64, + /// Threshold potential `v_thresh` (mV) + pub v_thresh: f64, + /// Reset potential after spike `v_reset` (mV) + pub v_reset: f64, + + /// Adaptation time constant `tau_w` (ms) + pub tau_w: f64, + /// Adaptation increment added on spike `b` (same units as w/current) + pub b: f64, + + /// Synaptic time constant `tau_s` (ms) + pub tau_s: f64, + + // ---- State variables ---- + /// Membrane potential `v` + pub v: f64, + /// Adaptation variable `w` + pub w: f64, + /// Synaptic variable `s` representing total synaptic current + pub s: f64, + + // ---- Optional numerical safeguards ---- + /// Minimum allowed dt for integration (ms) + pub min_dt: f64, +} + +impl SpikingNeuron { + /// Create a new neuron with common default parameters (units: ms and mV-like). + /// + /// Many fields are set to common neuroscience-like defaults but these are research parameters + /// and should be tuned for your experiments. + pub fn new_default() -> Self { + let tau_m = 20.0; // ms + let resistance = 1.0; // arbitrary + let v_rest = -65.0; // mV + let v_thresh = -50.0; // mV + let v_reset = -65.0; // mV + let tau_w = 200.0; // ms (slow adaptation) + let b = 0.5; // adaptation increment + let tau_s = 5.0; // ms (fast synapse) + Self { + tau_m, + resistance, + v_rest, + v_thresh, + v_reset, + tau_w, + b, + tau_s, + v: v_rest, + w: 0.0, + s: 0.0, + min_dt: 1e-6, + } + } + + /// Create a neuron with explicit parameters and initial state. + pub fn new( + tau_m: f64, + resistance: f64, + v_rest: f64, + v_thresh: f64, + v_reset: f64, + tau_w: f64, + b: f64, + tau_s: f64, + initial_v: Option, + ) -> Self { + let v0 = initial_v.unwrap_or(v_rest); + Self { + tau_m, + resistance, + v_rest, + v_thresh, + v_reset, + tau_w, + b, + tau_s, + v: v0, + w: 0.0, + s: 0.0, + min_dt: 1e-6, + } + } + + /// Reset state variables (keeps parameters). + pub fn reset(&mut self) { + self.v = self.v_rest; + self.w = 0.0; + self.s = 0.0; + } + + /// Apply a presynaptic spike event to the neuron; this increments the synaptic variable `s` + /// by `weight` instantaneously (models delta spike arrival). + pub fn receive_spike(&mut self, weight: f64) { + self.s += weight; + } + + /// Integrate the neuron state forward by `dt` milliseconds using forward Euler. + /// + /// `i_ext` is an externally injected current (same units as `s`). + /// `dt` must be > 0. + pub fn step(&mut self, dt: f64, i_ext: f64) -> StepResult { + let dt = if dt <= 0.0 { + panic!("dt must be > 0") + } else { + dt.max(self.min_dt) + }; + + // synaptic current is represented by `s` + // ds/dt = -s / tau_s + let ds = -self.s / self.tau_s; + let s_next = self.s + dt * ds; + + // total synaptic current for this step (use current s, or average between s and s_next) + // we use s for explicit Euler consistency. + let i_syn = self.s; + + // membrane dv/dt = (-(v - v_rest) + R*(i_ext + i_syn) - w) / tau_m + let dv = + (-(self.v - self.v_rest) + self.resistance * (i_ext + i_syn) - self.w) / self.tau_m; + let v_next = self.v + dt * dv; + + // adaptation dw/dt = -w / tau_w + let dw = -self.w / self.tau_w; + let w_next = self.w + dt * dw; + + // Commit state tentatively + self.v = v_next; + self.w = w_next; + self.s = s_next; + + // Check for spike (simple threshold crossing) + if self.v >= self.v_thresh { + // spike: apply reset and adaptation increment + self.v = self.v_reset; + self.w += self.b; + StepResult { + spiked: true, + v: self.v, + } + } else { + StepResult { + spiked: false, + v: self.v, + } + } + } + + /// Get current membrane potential + pub fn membrane_potential(&self) -> f64 { + self.v + } + + /// Get current synaptic variable + pub fn synaptic_state(&self) -> f64 { + self.s + } + + /// Get adaptation variable + pub fn adaptation(&self) -> f64 { + self.w + } +} + +impl Default for SpikingNeuron { + fn default() -> Self { + let tau_m = 20.0; // ms + let resistance = 1.0; // arbitrary + let v_rest = -65.0; // mV + let v_thresh = -50.0; // mV + let v_reset = -65.0; // mV + let tau_w = 200.0; // ms (slow adaptation) + let b = 0.5; // adaptation increment + let tau_s = 5.0; // ms (fast synapse) + Self { + tau_m, + resistance, + v_rest, + v_thresh, + v_reset, + tau_w, + b, + tau_s, + v: v_rest, + w: 0.0, + s: 0.0, + min_dt: 1e-6, + } + } +} + +#[allow(dead_code)] +/// Minimal demonstration of neuron usage. Simulates a neuron for `t_sim` ms with dt, +/// injects a constant external current `i_ext`, and injects discrete synaptic events at specified times. +fn example() { + // Simulation parameters + let dt = 0.1; // ms + let t_sim = 500.0; // ms + let steps = (t_sim / dt) as usize; + + // Create neuron with defaults + let mut neuron = SpikingNeuron::new_default(); + + // Example external current (constant) + let i_ext = 1.8; // tune to see spiking (units consistent with resistance & s) + + // Example presynaptic spike times (ms) and weights + let presyn_spikes: Vec<(f64, f64)> = + vec![(50.0, 2.0), (100.0, 1.5), (150.0, 2.2), (300.0, 3.0)]; + + // Convert into an index-able event list + let mut events: Vec> = vec![Vec::new(); steps + 1]; + for (t_spike, weight) in presyn_spikes { + let idx = (t_spike / dt).round() as isize; + if idx >= 0 && (idx as usize) < events.len() { + events[idx as usize].push(SynapticEvent { weight }); + } + } + + // Simulation loop + let mut spike_times: Vec = Vec::new(); + for step in 0..steps { + let t = step as f64 * dt; + + // deliver presynaptic events scheduled for this time step + for ev in &events[step] { + neuron.receive_spike(ev.weight); + } + + // step the neuron + let res = neuron.step(dt, i_ext); + + if res.spiked { + spike_times.push(t); + // For debugging: print spike time + println!("Spike at {:.3} ms (v reset = {:.3})", t, neuron.v); + } + + // optionally, record v, w, s for analysis (omitted here for brevity) + let _v = neuron.membrane_potential(); + let _w = neuron.adaptation(); + let _s = neuron.synaptic_state(); + + // small example of printing membrane potential every 50 ms + if step % ((50.0 / dt) as usize) == 0 { + println!("t={:.1} ms, v={:.3} mV, w={:.3}, s={:.3}", t, _v, _w, _s); + } + } + + println!("Total spikes: {}", spike_times.len()); + println!("Spike times: {:?}", spike_times); +} diff --git a/models/snn/src/types/event.rs b/models/snn/src/types/event.rs new file mode 100644 index 00000000..b731ab78 --- /dev/null +++ b/models/snn/src/types/event.rs @@ -0,0 +1,21 @@ +/* + Appellation: event + Created At: 2025.11.25:09:25:50 + Contrib: @FL03 +*/ + +/// A simple synaptic event: weight added to synaptic variable `s` when it arrives. + +#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] +pub struct SynapticEvent { + /// instantaneous weight added to synaptic variable `s`. + pub weight: T, +} + +impl SynapticEvent { + /// Create a new SynapticEvent + pub const fn new(weight: T) -> Self { + Self { weight } + } +} diff --git a/models/snn/src/types/result.rs b/models/snn/src/types/result.rs new file mode 100644 index 00000000..e1a5779b --- /dev/null +++ b/models/snn/src/types/result.rs @@ -0,0 +1,38 @@ +/* + Appellation: result + Created At: 2025.11.25:09:21:16 + Contrib: @FL03 +*/ + +/// Result of a single integration step. +#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] +pub struct StepResult { + /// Whether the neuron emitted a spike on this step. + pub(crate) spiked: bool, + /// The membrane potential after the step (mV or arbitrary units). + pub(crate) v: T, +} + +impl StepResult { + /// returns a new instance of the `StepResult` + pub const fn new(spiked: bool, v: T) -> Self { + Self { spiked, v } + } + + pub const fn spiked(v: T) -> Self { + Self { spiked: true, v } + } + + pub const fn not_spiked(v: T) -> Self { + Self { spiked: false, v } + } + + pub const fn is_spiked(&self) -> bool { + self.spiked + } + /// returns a reference to the membrane potential (`v`) + pub const fn membrane_potential(&self) -> &T { + &self.v + } +} diff --git a/models/snn/tests/neurons.rs b/models/snn/tests/neurons.rs new file mode 100644 index 00000000..8cce7427 --- /dev/null +++ b/models/snn/tests/neurons.rs @@ -0,0 +1,45 @@ +/* + Appellation: neurons + Created At: 2025.11.25:21:34:30 + Contrib: @FL03 +*/ +use concision_snn::SpikingNeuron; + +#[test] +fn test_snn_neuron_resting_no_input() { + let mut n = SpikingNeuron::new_default(); + let dt = 1.0; + // simulate 100 ms with no input -> should not spike and v near v_rest + for _ in 0..100 { + let res = n.step(dt, 0.0); + assert!(!res.is_spiked()); + } + let v = n.membrane_potential(); + assert!((v - n.v_rest).abs() < 1e-6 || (v - n.v_rest).abs() < 1e-2); +} + +#[test] +fn test_receive_spike_increases_synaptic_state() { + let mut n = SpikingNeuron::new_default(); + let before = n.synaptic_state(); + n.receive_spike(2.5); + assert!(n.synaptic_state() > before); +} + +#[test] +#[ignore = "Need to fix"] +fn test_spiking_with_sufficient_input() { + // params + let dt: f64 = 0.1; + let i_ext: f64 = 5.0; // large i_ext to force spiking + // neuron + let mut n = SpikingNeuron::new_default(); + let mut spiked = false; + let mut steps = 0_usize; + // apply strong constant external current for a while + while !spiked && steps < 1000 { + spiked = n.step(dt, i_ext).is_spiked(); + steps += 1; + } + assert!(spiked, "Neuron did not spike under strong current"); +} diff --git a/models/transformer/Cargo.toml b/models/transformer/Cargo.toml index 4258b35b..cf7797cc 100644 --- a/models/transformer/Cargo.toml +++ b/models/transformer/Cargo.toml @@ -25,20 +25,14 @@ no-dev-version = true tag-name = "{{version}}" [lib] -crate-type = [ - "cdylib", - "rlib", -] bench = false +crate-type = ["cdylib", "rlib"] doc = true doctest = true test = true [dependencies] -# sdk concision = { features = ["neural"], workspace = true } -# error -anyhow = { optional = true, workspace = true } # mathematics approx = { optional = true, workspace = true } ndarray = { workspace = true } @@ -46,64 +40,30 @@ num-traits = { workspace = true } # logging tracing = { optional = true, workspace = true } +[dev-dependencies] +anyhow = { features = ["std"], workspace = true } + [features] -default = [ - "std", -] +default = ["std"] -full = [ - "default", - "rand", - "serde", - "tracing" -] +full = ["default", "rand", "serde", "tracing"] # ************* [FF:Environments] ************* -std = [ - "concision/std", - "ndarray/std", - "num-traits/std", - "tracing/std", -] +std = ["concision/std", "ndarray/std", "num-traits/std", "tracing/std"] -wasi = [ - "concision/wasi", -] +wasi = ["concision/wasi"] -wasm = [ - "concision/wasm", -] +wasm = ["concision/wasm"] # ************* [FF:Dependencies] ************* -anyhow = [ - "dep:anyhow", - "concision/anyhow", -] -approx = [ - "dep:approx", - "concision/approx", - "ndarray/approx", -] - -blas = [ - "concision/blas", - "ndarray/blas" -] - -rand = [ - "concision/rand", -] - -rayon = [ - "concision/rayon", - "ndarray/rayon" -] - -serde = [ - "concision/serde", -] - -tracing = [ - "dep:tracing", - "concision/tracing", -] +approx = ["concision/approx", "dep:approx", "ndarray/approx"] + +blas = ["concision/blas", "ndarray/blas"] + +rand = ["concision/rand"] + +rayon = ["concision/rayon", "ndarray/rayon"] + +serde = ["concision/serde"] + +tracing = ["concision/tracing", "dep:tracing"] diff --git a/models/transformer/src/model.rs b/models/transformer/src/model.rs index bf3cf79e..5612527e 100644 --- a/models/transformer/src/model.rs +++ b/models/transformer/src/model.rs @@ -1,9 +1,9 @@ /* - Appellation: transformer + Appellation: transformer Contrib: @FL03 */ -use cnc::nn::{DeepModelParams, Model, ModelError, ModelFeatures, StandardModelConfig, Train}; +use cnc::nn::{DeepModelParams, Model, ModelFeatures, NeuralError, StandardModelConfig, Train}; #[cfg(feature = "rand")] use cnc::rand_distr; use cnc::{Forward, Norm, Params, ReLU, Sigmoid}; @@ -128,7 +128,7 @@ where { type Output = V; - fn forward(&self, input: &U) -> cnc::Result { + fn forward(&self, input: &U) -> Option { let mut output = self.params().input().forward_then(&input, |y| y.relu())?; for layer in self.params().hidden() { @@ -139,7 +139,7 @@ where .params() .output() .forward_then(&output, |y| y.sigmoid())?; - Ok(y) + Some(y) } } @@ -164,12 +164,12 @@ where &mut self, input: &ArrayBase, target: &ArrayBase, - ) -> Result { + ) -> Result { if input.len() != self.features().input() { - return Err(ModelError::InvalidInputShape); + return Err(NeuralError::InvalidInputShape); } if target.len() != self.features().output() { - return Err(ModelError::InvalidOutputShape); + return Err(NeuralError::InvalidOutputShape); } // get the learning rate from the model's configuration let lr = self @@ -186,15 +186,28 @@ where let mut activations = Vec::new(); activations.push(input.to_owned()); - let mut output = self.params().input().forward(&input)?.relu(); + let mut output = self + .params() + .input() + .forward(&input) + .expect("Output layer failed to forward propagate during training...") + .relu(); activations.push(output.to_owned()); // collect the activations of the hidden for layer in self.params().hidden() { - output = layer.forward(&output)?.relu(); + output = layer + .forward(&output) + .expect("Hidden layer failed to forward propagate during training...") + .relu(); activations.push(output.to_owned()); } - output = self.params().output().forward(&output)?.sigmoid(); + output = self + .params() + .output() + .forward(&output) + .expect("Input layer failed to forward propagate during training...") + .sigmoid(); activations.push(output.to_owned()); // Calculate output layer error @@ -208,7 +221,8 @@ where // Update output weights self.params_mut() .output_mut() - .backward(activations.last().unwrap(), &delta, lr)?; + .backward(activations.last().unwrap(), &delta, lr) + .expect("Backward propagation failed..."); let num_hidden = self.features().layers(); // Iterate through hidden layers in reverse order @@ -224,7 +238,9 @@ where }; // Normalize delta to prevent exploding gradients delta /= delta.l2_norm(); - self.params_mut().hidden_mut()[i].backward(&activations[i + 1], &delta, lr)?; + self.params_mut().hidden_mut()[i] + .backward(&activations[i + 1], &delta, lr) + .expect("Backward propagation failed..."); } /* Backpropagate to the input layer @@ -237,7 +253,8 @@ where delta /= delta.l2_norm(); // Normalize the delta to prevent exploding gradients self.params_mut() .input_mut() - .backward(&activations[1], &delta, lr)?; + .backward(&activations[1], &delta, lr) + .expect("Input layer backward pass failed"); Ok(loss) } @@ -265,15 +282,15 @@ where &mut self, input: &ArrayBase, target: &ArrayBase, - ) -> Result { + ) -> Result { if input.nrows() == 0 || target.nrows() == 0 { - return Err(ModelError::InvalidBatchSize); + return Err(NeuralError::InvalidBatchSize); } if input.ncols() != self.features().input() { - return Err(ModelError::InvalidInputShape); + return Err(NeuralError::InvalidInputShape); } if target.ncols() != self.features().output() || target.nrows() != input.nrows() { - return Err(ModelError::InvalidOutputShape); + return Err(NeuralError::InvalidOutputShape); } let mut loss = A::zero(); diff --git a/neural/Cargo.toml b/neural/Cargo.toml index 8d747e46..deb577ce 100644 --- a/neural/Cargo.toml +++ b/neural/Cargo.toml @@ -14,21 +14,8 @@ repository.workspace = true rust-version.workspace = true version.workspace = true -[package.metadata.docs.rs] -all-features = false -features = ["full"] -rustc-args = ["--cfg", "docsrs"] -version = "v{{version}}" - -[package.metadata.release] -no-dev-version = true -tag-name = "{{version}}" - [lib] -crate-type = [ - "cdylib", - "rlib" -] +crate-type = ["cdylib", "rlib"] bench = false doc = true doctest = true @@ -36,10 +23,10 @@ test = true [dependencies] concision-core = { workspace = true } +concision-params = { workspace = true } concision-data = { workspace = true } # custom -scsys = { workspace = true } -scsys-derive = { workspace = true } +variants = { workspace = true } # concurrency & parallelism rayon = { optional = true, workspace = true } # data-structures @@ -64,165 +51,163 @@ paste = { workspace = true } smart-default = { workspace = true } strum = { workspace = true } # error handling -anyhow = { optional = true, workspace = true } thiserror = { workspace = true } # logging tracing = { optional = true, workspace = true } [dev-dependencies] +anyhow = { features = ["std"], workspace = true } lazy_static = { workspace = true } [features] -default = [ - "std", -] +default = ["std"] full = [ - "default", - "anyhow", - "approx", - "complex", - "init", - "rand", - "rustfft", - "serde", - "tracing" + "default", + "approx", + "complex", + "init", + "rand", + "rustfft", + "serde", + "tracing" ] nightly = [ - "concision-core/nightly", - "concision-data/nightly", + "concision-core/nightly", + "concision-data/nightly", + "concision-params/nightly", + "variants/nightly", ] # ************* [FF:Features] ************* -init = [ - "concision-core/init" -] +init = ["concision-core/init"] json = [ - "alloc", - "concision-core/json", - "concision-data/json", - "serde_json", + "alloc", + "concision-core/json", + "concision-data/json", + "concision-params/json", + "serde_json", ] # ************* [FF:Environments] ************* std = [ - "anyhow?/std", - "concision-core/std", - "concision-data/std", - "ndarray/std", - "num/std", - "num-traits/std", - "num-complex?/std", - "rand?/std", - "rand?/std_rng", - "scsys/std", - "serde?/std", - "serde_json?/std", - "strum/std", - "thiserror/std", - "tracing?/std", + "concision-core/std", + "concision-data/std", + "concision-params/std", + "ndarray/std", + "num/std", + "num-traits/std", + "num-complex?/std", + "rand?/std", + "rand?/std_rng", + "serde?/std", + "serde_json?/std", + "strum/std", + "thiserror/std", + "tracing?/std", + "variants/std", ] wasi = [ - "concision-core/wasi", - "concision-data/wasi", - "scsys/wasi", + "concision-core/wasi", + "concision-data/wasi", + "concision-params/wasi", ] wasm = [ - "concision-core/wasm", - "concision-data/wasm", - "getrandom?/wasm_js", - "rayon?/web_spin_lock", - "scsys/wasm", + "concision-core/wasm", + "concision-data/wasm", + "concision-params/wasm", + "getrandom?/wasm_js", + "rayon?/web_spin_lock", ] # ************* [FF:Dependencies] ************* alloc = [ - "concision-core/alloc", - "concision-data/alloc", - "serde?/alloc", - "serde_json?/alloc", - "num/alloc", - "scsys/alloc", -] - -anyhow = [ - "dep:anyhow", - "concision-core/anyhow", - "concision-data/anyhow", - "scsys/anyhow", + "concision-core/alloc", + "concision-data/alloc", + "serde?/alloc", + "serde_json?/alloc", + "num/alloc", + "variants/alloc", ] approx = [ - "concision-core/approx", - "concision-data/approx", - "dep:approx", - "ndarray/approx", + "concision-core/approx", + "concision-data/approx", + "dep:approx", + "ndarray/approx", ] blas = [ - "concision-core/blas", - "concision-data/blas", - "ndarray/blas" + "concision-core/blas", + "concision-data/blas", + "ndarray/blas" ] -complex = [ - "dep:num-complex", - "concision-core/complex", - "concision-data/complex", +complex = [ + "dep:num-complex", + "concision-core/complex", + "concision-data/complex", ] rand = [ - "dep:rand", - "dep:rand_distr", - "concision-core/rand", - "concision-data/rand", - "num/rand", - "rng", - "scsys/rand", + "dep:rand", + "dep:rand_distr", + "concision-core/rand", + "concision-data/rand", + "num/rand", + "rng", ] rng = [ - "concision-core/rng", - "concision-data/rng", - "scsys/rng", + "concision-core/rng", + "concision-data/rng", ] rayon = [ - "concision-core/rayon", - "concision-data/rayon", - "dep:rayon", - "ndarray/rayon" + "concision-core/rayon", + "concision-data/rayon", + "dep:rayon", + "ndarray/rayon" ] serde = [ - "concision-core/serde", - "concision-data/serde", - "dep:serde", - "dep:serde_derive", - "ndarray/serde", - "num/serde", - "scsys/serde", + "concision-core/serde", + "concision-data/serde", + "dep:serde", + "dep:serde_derive", + "ndarray/serde", + "num/serde", ] serde_json = ["dep:serde_json"] tracing = [ - "dep:tracing", - "concision-core/tracing", - "concision-data/tracing", - "scsys/tracing", + "dep:tracing", + "concision-core/tracing", + "concision-data/tracing", ] +# ********* [Metadata] ********* +[package.metadata.docs.rs] +all-features = false +features = ["full"] +rustc-args = ["--cfg", "docsrs"] +version = "v{{version}}" + +[package.metadata.release] +no-dev-version = true +tag-name = "{{version}}" + # ************* [Unit Tests] ************* [[test]] name = "default" [[test]] name = "layers" -required-features = ["alloc", "anyhow"] +required-features = ["alloc"] [[test]] name = "masks" diff --git a/neural/src/config/traits/config.rs b/neural/src/config/traits/config.rs index 6e74c29b..c8f2c136 100644 --- a/neural/src/config/traits/config.rs +++ b/neural/src/config/traits/config.rs @@ -1,79 +1,79 @@ -/* - Appellation: config - Contrib: @FL03 -*/ - -/// The [`RawConfig`] trait defines a basic interface for all _configurations_ used within the -/// framework for neural networks, their layers, and more. -pub trait RawConfig { - type Ctx; -} - -/// The [`NetworkConfig`] trait extends the [`RawConfig`] trait to provide a more robust -/// interface for neural network configurations. -pub trait NetworkConfig: RawConfig { - fn get(&self, key: K) -> Option<&T> - where - K: AsRef; - fn get_mut(&mut self, key: K) -> Option<&mut T> - where - K: AsRef; - - fn set(&mut self, key: K, value: T) -> Option - where - K: AsRef; - fn remove(&mut self, key: K) -> Option - where - K: AsRef; - fn contains(&self, key: K) -> bool - where - K: AsRef; - - fn keys(&self) -> Vec; -} - -macro_rules! hyperparam_method { - (@dyn $name:ident: $type:ty) => { - fn $name(&self) -> Option<&$type> where T: 'static { - self.get(stringify!($name)).map(|v| v.downcast_ref::<$type>()).flatten() - } - }; - (@impl $name:ident: $type:ty) => { - fn $name(&self) -> Option<&$type> { - self.get(stringify!($name)) - } - }; - (#[dyn] $($name:ident $type:ty),* $(,)?) => { - $( - hyperparam_method!(@dyn $name: $type); - )* - }; - ($($name:ident $type:ty),* $(,)?) => { - $( - hyperparam_method!(@impl $name: $type); - )* - }; -} - -pub trait TrainingConfiguration: NetworkConfig { - fn epochs(&self) -> usize; - - fn batch_size(&self) -> usize; - - hyperparam_method! { - learning_rate T, - momentum T, - weight_decay T, - dropout T, - decay T, - beta1 T, - beta2 T, - epsilon T, - gradient_clip T, - - } -} - -/* - ************* Implementations ************* -*/ +/* + Appellation: config + Contrib: @FL03 +*/ + +/// The [`RawConfig`] trait defines a basic interface for all _configurations_ used within the +/// framework for neural networks, their layers, and more. +pub trait RawConfig { + type Ctx; +} + +/// The [`NetworkConfig`] trait extends the [`RawConfig`] trait to provide a more robust +/// interface for neural network configurations. +pub trait NetworkConfig: RawConfig { + fn get(&self, key: K) -> Option<&T> + where + K: AsRef; + fn get_mut(&mut self, key: K) -> Option<&mut T> + where + K: AsRef; + + fn set(&mut self, key: K, value: T) -> Option + where + K: AsRef; + fn remove(&mut self, key: K) -> Option + where + K: AsRef; + fn contains(&self, key: K) -> bool + where + K: AsRef; + + fn keys(&self) -> Vec; +} + +macro_rules! hyperparam_method { + (@dyn $name:ident: $type:ty) => { + fn $name(&self) -> Option<&$type> where T: 'static { + self.get(stringify!($name)).map(|v| v.downcast_ref::<$type>()).flatten() + } + }; + (@impl $name:ident: $type:ty) => { + fn $name(&self) -> Option<&$type> { + self.get(stringify!($name)) + } + }; + (#[dyn] $($name:ident $type:ty),* $(,)?) => { + $( + hyperparam_method!(@dyn $name: $type); + )* + }; + ($($name:ident $type:ty),* $(,)?) => { + $( + hyperparam_method!(@impl $name: $type); + )* + }; +} + +pub trait TrainingConfiguration: NetworkConfig { + fn epochs(&self) -> usize; + + fn batch_size(&self) -> usize; + + hyperparam_method! { + learning_rate T, + momentum T, + weight_decay T, + dropout T, + decay T, + beta1 T, + beta2 T, + epsilon T, + gradient_clip T, + + } +} + +/* + ************* Implementations ************* +*/ diff --git a/neural/src/config/types/hyper_params.rs b/neural/src/config/types/hyper_params.rs index 597348d6..f07b201f 100644 --- a/neural/src/config/types/hyper_params.rs +++ b/neural/src/config/types/hyper_params.rs @@ -28,7 +28,6 @@ use crate::types::KeyValue; Hash, Ord, PartialOrd, - scsys::VariantConstructors, strum::AsRefStr, strum::Display, strum::EnumCount, @@ -37,6 +36,7 @@ use crate::types::KeyValue; strum::EnumString, strum::VariantArray, strum::VariantNames, + variants::VariantConstructors, ), strum(serialize_all = "snake_case") )] diff --git a/neural/src/error.rs b/neural/src/error.rs index 53f416f2..0906fd70 100644 --- a/neural/src/error.rs +++ b/neural/src/error.rs @@ -10,9 +10,15 @@ #[cfg(feature = "alloc")] use alloc::{boxed::Box, string::String}; +#[allow(deprecated)] +#[deprecated(since = "0.2.8", note = "use `NeuralResult` instead")] +pub type ModelResult = core::result::Result; +#[deprecated(since = "0.2.8", note = "use `NeuralError` instead")] +pub type ModelError = NeuralError; + /// a type alias for a [Result](core::result::Result) configured to use the [`ModelError`] /// implementation as its error type. -pub type ModelResult = core::result::Result; +pub type NeuralResult = core::result::Result; /// The [`ModelError`] type is used to define the various errors encountered by the different /// components of a neural network. It is designed to be comprehensive, covering a wide range of @@ -21,9 +27,9 @@ pub type ModelResult = core::result::Result; /// intended to provide a clear and consistent way to handle errors across the neural network /// components, making it easier to debug and resolve issues that may occur during the development /// and execution of neural network models. -#[derive(Debug, scsys::VariantConstructors, thiserror::Error)] +#[derive(Debug, variants::VariantConstructors, thiserror::Error)] #[non_exhaustive] -pub enum ModelError { +pub enum NeuralError { /// The model is not initialized #[error("The model is not initialized")] NotInitialized, @@ -56,30 +62,30 @@ pub enum ModelError { ParameterError(String), } -impl From for concision_core::error::Error { - fn from(err: ModelError) -> Self { +impl From for concision_core::error::Error { + fn from(err: NeuralError) -> Self { match err { - ModelError::CoreError(e) => e, + NeuralError::CoreError(e) => e, _ => concision_core::error::Error::box_error(err), } } } #[cfg(feature = "alloc")] -impl From> for ModelError { +impl From> for NeuralError { fn from(err: Box) -> Self { cnc::Error::BoxError(err).into() } } #[cfg(feature = "alloc")] -impl From for ModelError { +impl From for NeuralError { fn from(err: String) -> Self { cnc::Error::unknown(err).into() } } #[cfg(feature = "alloc")] -impl From<&str> for ModelError { +impl From<&str> for NeuralError { fn from(err: &str) -> Self { cnc::Error::unknown(err).into() } diff --git a/neural/src/layers/layer.rs b/neural/src/layers/layer.rs index cb1bdcc4..af4f80c1 100644 --- a/neural/src/layers/layer.rs +++ b/neural/src/layers/layer.rs @@ -102,10 +102,11 @@ where params: self.params, } } - pub fn forward(&self, input: &X) -> cnc::Result + pub fn forward(&self, input: &X) -> Option where - F: Activator< as Forward>::Output, Output = Y>, - ParamsBase: Forward, + ParamsBase: Forward, + F: Activator< as Forward>::Output, Output = Y>, + A: Clone, X: Clone, Y: Clone, { diff --git a/neural/src/layers/layer/impl_layer.rs b/neural/src/layers/layer/impl_layer.rs index 2591ad36..bcff4504 100644 --- a/neural/src/layers/layer/impl_layer.rs +++ b/neural/src/layers/layer/impl_layer.rs @@ -40,10 +40,13 @@ where { type Output = Y; - fn forward(&self, inputs: &X) -> cnc::Result { - let y = self.params().forward(inputs)?; + fn forward(&self, inputs: &X) -> Option { + let y = self + .params() + .forward(inputs) + .expect("Forward pass failed in LayerBase"); - Ok(self.rho().activate(y)) + Some(self.rho().activate(y)) } } diff --git a/neural/src/layers/traits/layers.rs b/neural/src/layers/traits/layers.rs index 5048847a..17f73dba 100644 --- a/neural/src/layers/traits/layers.rs +++ b/neural/src/layers/traits/layers.rs @@ -5,7 +5,6 @@ use super::{Activator, ActivatorGradient}; use cnc::params::ParamsBase; -use cnc::tensor::NdTensor; use cnc::{Backward, Forward}; use ndarray::{Data, Dimension, RawData}; @@ -40,7 +39,7 @@ where *self.params_mut() = params; } /// backward propagate error through the layer - fn backward(&mut self, input: X, error: Y, gamma: Self::Elem) -> cnc::Result + fn backward(&mut self, input: X, error: Y, gamma: Self::Elem) -> Option where S: Data, Self: ActivatorGradient, @@ -53,9 +52,8 @@ where self.params_mut().backward(&input, &delta, gamma) } /// complete a forward pass through the layer - fn forward(&self, input: &X) -> cnc::Result + fn forward(&self, input: &X) -> Option where - Y: NdTensor, ParamsBase: Forward, Self: Activator, { diff --git a/neural/src/layout/features.rs b/neural/src/layout/features.rs index 27843211..dcc4d232 100644 --- a/neural/src/layout/features.rs +++ b/neural/src/layout/features.rs @@ -40,7 +40,16 @@ pub struct ModelFeatures { } impl ModelFeatures { - /// creates a new instance of [`ModelFeatures`] for a deep neurao network, using the given + /// creates a new instance of [`ModelFeatures`] for a neural network with `n` layers. If + /// the number of layers is `<=1` then the [`ModelFormat`] is automatically + /// configured as a _shallow_ neural network. + pub const fn new(input: usize, hidden: usize, output: usize, layers: usize) -> Self { + match layers { + 0 | 1 => Self::shallow(input, hidden, output), + _ => Self::deep(input, hidden, output, layers), + } + } + /// creates a new instance of [`ModelFeatures`] for a deep neural network, using the given /// input, hidden, and output features with the given number of hidden layers pub const fn deep(input: usize, hidden: usize, output: usize, layers: usize) -> Self { Self { diff --git a/neural/src/layout/traits/layout.rs b/neural/src/layout/traits/layout.rs index 7a02c764..eb2c5638 100644 --- a/neural/src/layout/traits/layout.rs +++ b/neural/src/layout/traits/layout.rs @@ -1,82 +1,82 @@ -/* - appellation: layout - authors: @FL03 -*/ - -/// The [`ModelLayout`] trait defines an interface for object capable of representing the -/// _layout_; i.e. the number of input, hidden, and output features of a neural network model -/// containing some number of hidden layers. -pub trait ModelLayout: Copy + core::fmt::Debug { - /// returns a copy of the input features for the model - fn input(&self) -> usize; - /// returns a mutable reference to the input features for the model - fn input_mut(&mut self) -> &mut usize; - /// returns a copy of the hidden features for the model - fn hidden(&self) -> usize; - /// returns a mutable reference to the hidden features for the model - fn hidden_mut(&mut self) -> &mut usize; - /// returns a copy of the number of hidden layers for the model - fn layers(&self) -> usize; - /// returns a mutable reference to the number of hidden layers for the model - fn layers_mut(&mut self) -> &mut usize; - /// returns a copy of the output features for the model - fn output(&self) -> usize; - /// returns a mutable reference to the output features for the model - fn output_mut(&mut self) -> &mut usize; - #[inline] - /// update the number of input features for the model and return a mutable reference to the - /// current layout. - fn set_input(&mut self, input: usize) -> &mut Self { - *self.input_mut() = input; - self - } - #[inline] - /// update the number of hidden features for the model and return a mutable reference to - /// the current layout. - fn set_hidden(&mut self, hidden: usize) -> &mut Self { - *self.hidden_mut() = hidden; - self - } - #[inline] - /// update the number of hidden layers for the model and return a mutable reference to - /// the current layout. - fn set_layers(&mut self, layers: usize) -> &mut Self { - *self.layers_mut() = layers; - self - } - #[inline] - /// update the number of output features for the model and return a mutable reference to - /// the current layout. - fn set_output(&mut self, output: usize) -> &mut Self { - *self.output_mut() = output; - self - } - /// the dimension of the input layer; (input, hidden) - fn dim_input(&self) -> (usize, usize) { - (self.input(), self.hidden()) - } - /// the dimension of the hidden layers; (hidden, hidden) - fn dim_hidden(&self) -> (usize, usize) { - (self.hidden(), self.hidden()) - } - /// the dimension of the output layer; (hidden, output) - fn dim_output(&self) -> (usize, usize) { - (self.hidden(), self.output()) - } - /// the total number of parameters in the model - fn size(&self) -> usize { - self.size_input() + self.size_hidden() + self.size_output() - } - /// the total number of input parameters in the model - fn size_input(&self) -> usize { - self.input() * self.hidden() - } - /// the total number of hidden parameters in the model - fn size_hidden(&self) -> usize { - self.hidden() * self.hidden() * self.layers() - } - /// the total number of output parameters in the model - fn size_output(&self) -> usize { - self.hidden() * self.output() - } -} +/* + appellation: layout + authors: @FL03 +*/ + +/// The [`ModelLayout`] trait defines an interface for object capable of representing the +/// _layout_; i.e. the number of input, hidden, and output features of a neural network model +/// containing some number of hidden layers. +pub trait ModelLayout: Copy + core::fmt::Debug { + /// returns a copy of the number of input features for the model + fn input(&self) -> usize; + /// returns a mutable reference to number of the input features for the model + fn input_mut(&mut self) -> &mut usize; + /// returns a copy of the number of hidden features for the model + fn hidden(&self) -> usize; + /// returns a mutable reference to the number of hidden features for the model + fn hidden_mut(&mut self) -> &mut usize; + /// returns a copy of the number of hidden layers for the model + fn layers(&self) -> usize; + /// returns a mutable reference to the number of hidden layers for the model + fn layers_mut(&mut self) -> &mut usize; + /// returns a copy of the output features for the model + fn output(&self) -> usize; + /// returns a mutable reference to the output features for the model + fn output_mut(&mut self) -> &mut usize; + #[inline] + /// update the number of input features for the model and return a mutable reference to the + /// current layout. + fn set_input(&mut self, input: usize) -> &mut Self { + *self.input_mut() = input; + self + } + #[inline] + /// update the number of hidden features for the model and return a mutable reference to + /// the current layout. + fn set_hidden(&mut self, hidden: usize) -> &mut Self { + *self.hidden_mut() = hidden; + self + } + #[inline] + /// update the number of hidden layers for the model and return a mutable reference to + /// the current layout. + fn set_layers(&mut self, layers: usize) -> &mut Self { + *self.layers_mut() = layers; + self + } + #[inline] + /// update the number of output features for the model and return a mutable reference to + /// the current layout. + fn set_output(&mut self, output: usize) -> &mut Self { + *self.output_mut() = output; + self + } + /// the dimension of the input layer; (input, hidden) + fn dim_input(&self) -> (usize, usize) { + (self.input(), self.hidden()) + } + /// the dimension of the hidden layers; (hidden, hidden) + fn dim_hidden(&self) -> (usize, usize) { + (self.hidden(), self.hidden()) + } + /// the dimension of the output layer; (hidden, output) + fn dim_output(&self) -> (usize, usize) { + (self.hidden(), self.output()) + } + /// the total number of parameters in the model + fn size(&self) -> usize { + self.size_input() + self.size_hidden() + self.size_output() + } + /// the total number of input parameters in the model + fn size_input(&self) -> usize { + self.input() * self.hidden() + } + /// the total number of hidden parameters in the model + fn size_hidden(&self) -> usize { + self.hidden() * self.hidden() * self.layers() + } + /// the total number of output parameters in the model + fn size_output(&self) -> usize { + self.hidden() * self.output() + } +} diff --git a/neural/src/lib.rs b/neural/src/lib.rs index 7a3ee8d4..b3170da2 100644 --- a/neural/src/lib.rs +++ b/neural/src/lib.rs @@ -1,134 +1,137 @@ -/* - Appellation: concision-neural - Contrib: @FL03 -*/ -//! Various components, implementations, and traits for creating neural networks. The crate -//! builds off of the [`concision_core`] crate, making extensive use of the [`ParamsBase`](cnc::ParamsBase) -//! type to define the parameters of layers within a network. -//! -//! ## Overview -//! -//! Neural networks are a fundamental part of machine learning, and this crate provides a -//! comprehensive set of tools to build, configure, and train neural network models. Listed -//! below are several key components of the crate: -//! -//! - [`Model`]: A trait for defining a neural network model. -//! - [`StandardModelConfig`]: A standard configuration for the models -//! - [`ModelFeatures`]: A default implementation of the [`ModelLayout`] trait that -//! sufficiently defines both shallow and deep neural networks. -//! -//! ### _Model Parameters_ -//! -//! Additionally, the crate defines a sequential -//! -//! **Note**: You should stick with the type aliases for the [`ModelParamsBase`] type, as they -//! drastically simplify the type-face of the model parameters. Attempting to generalize over -//! the hidden layers of the model might lead to excessive complexity. That being said, there -//! are provided methods and routines to convert from a shallow to deep model, and vice versa. -//! -//! - [`DeepModelParams`]: An owned representation of the [`ModelParamsBase`] for deep -//! neural networks. -//! - [`ShallowModelParams`]: An owned representation of the [`ModelParamsBase`] for shallow -//! neural networks. -//! -//! ### Traits -//! -//! This crate extends the [`Forward`](cnc::Forward) and [`Backward`](cnc::Backward) traits -//! from the [`core`](cnc) crate to provide additional functionality for neural networks. -//! -//! - [`Predict`]: A more robust implementation of the [`Forward`] trait -//! - [`Train`]: A trait for training a neural network model. -//! -#![cfg_attr(not(feature = "std"), no_std)] -#![allow( - clippy::missing_saftey_doc, - clippy::module_inception, - clippy::needless_doctest_main, - clippy::upper_case_acronyms -)] -// ensure that either `std` or `alloc` feature is enabled -#[cfg(not(any(feature = "std", feature = "alloc")))] -compile_error! { - "At least one of the 'std' or 'alloc' features must be enabled." -} - -extern crate concision_core as cnc; - -#[cfg(feature = "alloc")] -extern crate alloc; - -#[doc(inline)] -pub use self::{ - config::prelude::*, - error::*, - layers::{Layer, LayerBase}, - layout::prelude::*, - params::prelude::*, - train::prelude::*, - traits::*, - types::*, -}; - -#[macro_use] -pub(crate) mod macros { - #[macro_use] - pub mod seal; -} - -pub mod config; -pub mod error; -pub mod layers; -pub mod layout; -pub mod params; -pub mod train; - -pub(crate) mod traits { - #[doc(inline)] - pub use self::prelude::*; - - mod hidden; - mod models; - mod predict; - - mod prelude { - #[doc(inline)] - pub use super::hidden::*; - #[doc(inline)] - pub use super::models::*; - #[doc(inline)] - pub use super::predict::*; - } -} - -pub(crate) mod types { - #[doc(inline)] - pub use self::prelude::*; - - mod dropout; - mod key_value; - - mod prelude { - #[doc(inline)] - pub use super::dropout::*; - #[doc(inline)] - pub use super::key_value::*; - } -} - -#[doc(hidden)] -pub mod prelude { - #[doc(no_inline)] - pub use super::config::prelude::*; - #[doc(hidden)] - pub use crate::layers::prelude::*; - #[doc(no_inline)] - pub use crate::layout::prelude::*; - #[doc(no_inline)] - pub use crate::params::prelude::*; - #[doc(no_inline)] - pub use crate::train::prelude::*; - #[doc(no_inline)] - pub use crate::traits::*; - #[doc(no_inline)] - pub use crate::types::*; -} +/* + Appellation: concision-neural + Contrib: @FL03 +*/ +//! Various components, implementations, and traits for creating neural networks. The crate +//! builds off of the [`concision_core`] crate, making extensive use of the [`ParamsBase`](cnc::ParamsBase) +//! type to define the parameters of layers within a network. +//! +//! ## Overview +//! +//! Neural networks are a fundamental part of machine learning, and this crate provides a +//! comprehensive set of tools to build, configure, and train neural network models. Listed +//! below are several key components of the crate: +//! +//! - [`Model`]: A trait for defining a neural network model. +//! - [`StandardModelConfig`]: A standard configuration for the models +//! - [`ModelFeatures`]: A default implementation of the [`ModelLayout`] trait that +//! sufficiently defines both shallow and deep neural networks. +//! +//! ### _Model Parameters_ +//! +//! Additionally, the crate defines a sequential +//! +//! **Note**: You should stick with the type aliases for the [`ModelParamsBase`] type, as they +//! drastically simplify the type-face of the model parameters. Attempting to generalize over +//! the hidden layers of the model might lead to excessive complexity. That being said, there +//! are provided methods and routines to convert from a shallow to deep model, and vice versa. +//! +//! - [`DeepModelParams`]: An owned representation of the [`ModelParamsBase`] for deep +//! neural networks. +//! - [`ShallowModelParams`]: An owned representation of the [`ModelParamsBase`] for shallow +//! neural networks. +//! +//! ### Traits +//! +//! This crate extends the [`Forward`](cnc::Forward) and [`Backward`](cnc::Backward) traits +//! from the [`core`](cnc) crate to provide additional functionality for neural networks. +//! +//! - [`Predict`]: A more robust implementation of the [`Forward`] trait +//! - [`Train`]: A trait for training a neural network model. +//! +#![cfg_attr(not(feature = "std"), no_std)] +#![allow( + clippy::missing_saftey_doc, + clippy::module_inception, + clippy::needless_doctest_main, + clippy::upper_case_acronyms +)] +// ensure that either `std` or `alloc` feature is enabled +#[cfg(not(any(feature = "std", feature = "alloc")))] +compile_error! { + "At least one of the 'std' or 'alloc' features must be enabled." +} + +extern crate concision_core as cnc; + +#[cfg(feature = "alloc")] +extern crate alloc; + +#[doc(inline)] +pub use self::{ + config::prelude::*, + error::*, + layers::{Layer, LayerBase}, + layout::prelude::*, + params::prelude::*, + train::prelude::*, + traits::*, + types::*, +}; + +#[macro_use] +pub(crate) mod macros { + #[macro_use] + pub mod seal; +} + +pub mod config; +pub mod error; +pub mod layers; +pub mod layout; +pub mod params; +pub mod train; + +pub(crate) mod traits { + #[doc(inline)] + pub use self::prelude::*; + + mod hidden; + mod models; + mod network; + mod predict; + + mod prelude { + #[doc(inline)] + pub use super::hidden::*; + #[doc(inline)] + pub use super::models::*; + #[doc(inline)] + pub use super::network::*; + #[doc(inline)] + pub use super::predict::*; + } +} + +pub(crate) mod types { + #[doc(inline)] + pub use self::prelude::*; + + mod dropout; + mod key_value; + + mod prelude { + #[doc(inline)] + pub use super::dropout::*; + #[doc(inline)] + pub use super::key_value::*; + } +} + +#[doc(hidden)] +pub mod prelude { + #[doc(no_inline)] + pub use super::config::prelude::*; + #[doc(hidden)] + pub use crate::layers::prelude::*; + #[doc(no_inline)] + pub use crate::layout::prelude::*; + #[doc(no_inline)] + pub use crate::params::prelude::*; + #[doc(no_inline)] + pub use crate::train::prelude::*; + #[doc(no_inline)] + pub use crate::traits::*; + #[doc(no_inline)] + pub use crate::types::*; +} diff --git a/neural/src/params/impls/impl_model_params.rs b/neural/src/params/impls/impl_model_params.rs index 84b5f964..8347bf3e 100644 --- a/neural/src/params/impls/impl_model_params.rs +++ b/neural/src/params/impls/impl_model_params.rs @@ -8,7 +8,7 @@ use crate::{DeepModelRepr, RawHidden}; use cnc::params::ParamsBase; use ndarray::{Data, Dimension, RawDataClone}; -impl Clone for ModelParamsBase +impl Clone for ModelParamsBase where D: Dimension, H: RawHidden + Clone, @@ -24,7 +24,7 @@ where } } -impl core::fmt::Debug for ModelParamsBase +impl core::fmt::Debug for ModelParamsBase where D: Dimension, H: RawHidden + core::fmt::Debug, @@ -40,7 +40,7 @@ where } } -impl core::fmt::Display for ModelParamsBase +impl core::fmt::Display for ModelParamsBase where D: Dimension, H: RawHidden + core::fmt::Debug, @@ -58,7 +58,7 @@ where } } -impl core::ops::Index for ModelParamsBase +impl core::ops::Index for ModelParamsBase where D: Dimension, S: Data, @@ -78,7 +78,7 @@ where } } -impl core::ops::IndexMut for ModelParamsBase +impl core::ops::IndexMut for ModelParamsBase where D: Dimension, S: Data, diff --git a/neural/src/params/impls/impl_model_params_rand.rs b/neural/src/params/impls/impl_model_params_rand.rs index a533e058..ca9fe26d 100644 --- a/neural/src/params/impls/impl_model_params_rand.rs +++ b/neural/src/params/impls/impl_model_params_rand.rs @@ -14,7 +14,7 @@ use num_traits::{Float, FromPrimitive}; use rand_distr::uniform::{SampleUniform, Uniform}; use rand_distr::{Distribution, StandardNormal}; -impl ShallowParamsBase +impl ShallowParamsBase where S: DataOwned, { @@ -74,7 +74,7 @@ where } } -impl DeepParamsBase +impl DeepParamsBase where S: DataOwned, { diff --git a/neural/src/params/impls/impl_model_params_serde.rs b/neural/src/params/impls/impl_model_params_serde.rs index 0eefa3e0..345271a4 100644 --- a/neural/src/params/impls/impl_model_params_serde.rs +++ b/neural/src/params/impls/impl_model_params_serde.rs @@ -14,16 +14,16 @@ use serde::ser::{Serialize, SerializeStruct, Serializer}; /// serialization and deserialization. const FIELDS: [&str; 3] = ["input", "hidden", "output"]; -struct ModelParamsBaseVisitor +struct ModelParamsBaseVisitor::Elem> where D: Dimension, - S: RawData, + S: RawData, H: RawHidden, { - marker: PhantomData<(S, D, H)>, + marker: PhantomData<(S, D, H, A)>, } -impl<'a, A, S, D, H> Visitor<'a> for ModelParamsBaseVisitor +impl<'a, A, S, D, H> Visitor<'a> for ModelParamsBaseVisitor where A: Deserialize<'a>, D: Dimension + Deserialize<'a>, @@ -31,7 +31,7 @@ where H: RawHidden + Deserialize<'a>, ::Smaller: Deserialize<'a>, { - type Value = ModelParamsBase; + type Value = ModelParamsBase; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("The visitor is expecting to receive a `ModelParamsBase` object.") @@ -59,7 +59,7 @@ where } } -impl<'a, A, S, D, H> Deserialize<'a> for ModelParamsBase +impl<'a, A, S, D, H> Deserialize<'a> for ModelParamsBase where A: Deserialize<'a>, D: Dimension + Deserialize<'a>, @@ -81,7 +81,7 @@ where } } -impl Serialize for ModelParamsBase +impl Serialize for ModelParamsBase where A: Serialize, D: Dimension + Serialize, diff --git a/neural/src/params/impls/impl_params_deep.rs b/neural/src/params/impls/impl_params_deep.rs index cc0acd4c..920720f5 100644 --- a/neural/src/params/impls/impl_params_deep.rs +++ b/neural/src/params/impls/impl_params_deep.rs @@ -6,11 +6,11 @@ use crate::{DeepParamsBase, ModelParamsBase}; use crate::ModelFeatures; use crate::traits::DeepModelRepr; -use cnc::params::ParamsBase; +use concision_params::ParamsBase; use ndarray::{Data, DataOwned, Dimension, Ix2, RawData}; use num_traits::{One, Zero}; -impl ModelParamsBase +impl ModelParamsBase where D: Dimension, S: RawData, @@ -26,7 +26,7 @@ where } } -impl DeepParamsBase +impl DeepParamsBase where D: Dimension, S: RawData, @@ -101,7 +101,7 @@ where /// sequentially forwards the input through the model without any activations or other /// complexities in-between. not overly usefuly, but it is here for completeness #[inline] - pub fn forward(&self, input: &X) -> cnc::Result + pub fn forward(&self, input: &X) -> Option where A: Clone, S: Data, @@ -118,7 +118,7 @@ where } } -impl DeepParamsBase +impl DeepParamsBase where S: RawData, { diff --git a/neural/src/params/impls/impl_params_shallow.rs b/neural/src/params/impls/impl_params_shallow.rs index 81529bd6..06cb1bf9 100644 --- a/neural/src/params/impls/impl_params_shallow.rs +++ b/neural/src/params/impls/impl_params_shallow.rs @@ -6,13 +6,14 @@ use crate::params::{ModelParamsBase, ShallowParamsBase}; use crate::ModelFeatures; use crate::traits::ShallowModelRepr; -use cnc::{ParamsBase, ReLU, Sigmoid}; +use cnc::{ReLU, Sigmoid}; +use concision_params::ParamsBase; use ndarray::{ Array1, ArrayBase, Data, DataOwned, Dimension, Ix2, RawData, RemoveAxis, ScalarOperand, }; use num_traits::Float; -impl ModelParamsBase +impl ModelParamsBase where D: Dimension, S: RawData, @@ -28,7 +29,7 @@ where } } -impl ShallowParamsBase +impl ShallowParamsBase where S: RawData, D: Dimension, @@ -56,16 +57,16 @@ where size + self.output().count_weight() } /// returns an immutable reference to the hidden weights - pub const fn hidden_weights(&self) -> &ArrayBase { + pub const fn hidden_weights(&self) -> &ArrayBase { self.hidden().weights() } /// returns an mutable reference to the hidden weights - pub const fn hidden_weights_mut(&mut self) -> &mut ArrayBase { + pub const fn hidden_weights_mut(&mut self) -> &mut ArrayBase { self.hidden_mut().weights_mut() } } -impl ShallowParamsBase +impl ShallowParamsBase where S: RawData, { @@ -81,7 +82,7 @@ where } } /// forward input through the controller network - pub fn forward(&self, input: &Array1) -> cnc::Result> + pub fn forward(&self, input: &Array1) -> Option> where A: Float + ScalarOperand, S: Data, @@ -93,11 +94,11 @@ where // forward the input through the output layer; activate using sigmoid output = self.output().forward(&output)?.sigmoid(); - Ok(output) + Some(output) } } -impl Default for ShallowParamsBase +impl Default for ShallowParamsBase where S: DataOwned, A: Clone + Default, diff --git a/neural/src/params/model_params.rs b/neural/src/params/model_params.rs index bf6cfa68..641cd60c 100644 --- a/neural/src/params/model_params.rs +++ b/neural/src/params/model_params.rs @@ -21,10 +21,10 @@ use crate::{DeepModelRepr, RawHidden}; /// /// This type also enables us to define a set of common initialization routines and introduce /// other standards for dealing with parameters in a neural network. -pub struct ModelParamsBase +pub struct ModelParamsBase::Elem> where D: Dimension, - S: RawData, + S: RawData, H: RawHidden, { /// the input layer of the model @@ -38,7 +38,7 @@ where /// storage type `S`, the dimension `D`, and the hidden layer type `H`. This implementation /// focuses on providing basic initialization routines and accessors for the various layers /// within the model. -impl ModelParamsBase +impl ModelParamsBase where D: Dimension, S: RawData, @@ -119,35 +119,35 @@ where self.hidden().as_slice() } /// returns an immutable reference to the input bias - pub const fn input_bias(&self) -> &ArrayBase { + pub const fn input_bias(&self) -> &ArrayBase { self.input().bias() } /// returns a mutable reference to the input bias - pub const fn input_bias_mut(&mut self) -> &mut ArrayBase { + pub const fn input_bias_mut(&mut self) -> &mut ArrayBase { self.input_mut().bias_mut() } /// returns an immutable reference to the input weights - pub const fn input_weights(&self) -> &ArrayBase { + pub const fn input_weights(&self) -> &ArrayBase { self.input().weights() } /// returns an mutable reference to the input weights - pub const fn input_weights_mut(&mut self) -> &mut ArrayBase { + pub const fn input_weights_mut(&mut self) -> &mut ArrayBase { self.input_mut().weights_mut() } /// returns an immutable reference to the output bias - pub const fn output_bias(&self) -> &ArrayBase { + pub const fn output_bias(&self) -> &ArrayBase { self.output().bias() } /// returns a mutable reference to the output bias - pub const fn output_bias_mut(&mut self) -> &mut ArrayBase { + pub const fn output_bias_mut(&mut self) -> &mut ArrayBase { self.output_mut().bias_mut() } /// returns an immutable reference to the output weights - pub const fn output_weights(&self) -> &ArrayBase { + pub const fn output_weights(&self) -> &ArrayBase { self.output().weights() } /// returns an mutable reference to the output weights - pub const fn output_weights_mut(&mut self) -> &mut ArrayBase { + pub const fn output_weights_mut(&mut self) -> &mut ArrayBase { self.output_mut().weights_mut() } /// returns the number of hidden layers in the model diff --git a/neural/src/params/types/aliases.rs b/neural/src/params/types/aliases.rs index dc38f2f7..94910628 100644 --- a/neural/src/params/types/aliases.rs +++ b/neural/src/params/types/aliases.rs @@ -8,17 +8,17 @@ use ndarray::{Ix2, OwnedRepr}; /// A type alias for an owned representation of the [`ModelParamsBase`] generic of type `A` /// and the dimension `D`. -pub type ModelParams = ModelParamsBase, D, H>; +pub type ModelParams = ModelParamsBase, D, H, A>; /// a type alias for an owned representation of the [`DeepParamsBase`] generic of type `A` and /// the dimension `D`. -pub type DeepModelParams = DeepParamsBase, D>; +pub type DeepModelParams = DeepParamsBase, D, A>; /// a type alias for a _deep_ representation of the [`ModelParamsBase`] using a vector of /// parameters as the hidden layers. -pub type DeepParamsBase = ModelParamsBase>>; +pub type DeepParamsBase = ModelParamsBase>, A>; /// a type alias for an owned representation of the [`DeepParamsBase`] generic of type `A` and /// the dimension `D`. pub type ShallowModelParams = ShallowParamsBase, D>; /// a type alias for a _shallow_ representation of the [`ModelParamsBase`] using a single /// [`ParamsBase`] instance as the hidden layer. -pub type ShallowParamsBase = ModelParamsBase>; +pub type ShallowParamsBase = ModelParamsBase, A>; diff --git a/neural/src/train/error.rs b/neural/src/train/error.rs index ed5a0441..08c6ba27 100644 --- a/neural/src/train/error.rs +++ b/neural/src/train/error.rs @@ -5,7 +5,7 @@ pub(crate) type TrainingResult = Result; /// The [`TrainingError`] type enumerates the various errors that can occur during the /// training process. -#[derive(Debug, scsys::VariantConstructors, thiserror::Error)] +#[derive(Debug, thiserror::Error, variants::VariantConstructors)] #[non_exhaustive] pub enum TrainingError { #[error("Invalid Training Data")] diff --git a/neural/src/train/traits/train.rs b/neural/src/train/traits/train.rs index 0bb9d6e8..c7a5e58e 100644 --- a/neural/src/train/traits/train.rs +++ b/neural/src/train/traits/train.rs @@ -4,15 +4,15 @@ */ use crate::train::error::TrainingError; -use crate::error::ModelResult; +use crate::error::NeuralResult; /// This trait defines the training process for the network pub trait Train { type Output; - fn train(&mut self, input: &X, target: &Y) -> ModelResult; + fn train(&mut self, input: &X, target: &Y) -> NeuralResult; - fn train_for(&mut self, input: &X, target: &Y, epochs: usize) -> ModelResult { + fn train_for(&mut self, input: &X, target: &Y, epochs: usize) -> NeuralResult { let mut output = None; for _ in 0..epochs { diff --git a/neural/src/traits/models.rs b/neural/src/traits/models.rs index cc742057..1e98487f 100644 --- a/neural/src/traits/models.rs +++ b/neural/src/traits/models.rs @@ -1,150 +1,146 @@ -/* - appellation: models - authors: @FL03 -*/ -use crate::config::NetworkConfig; -use crate::{DeepModelParams, ModelLayout}; -use crate::{Predict, Train}; -use concision_core::params::Params; -use concision_data::DatasetBase; - -/// The [`Model`] trait defines the core interface for all models; implementors will need to -/// provide the type of configuration used by the model, the type of layout used by the model, -/// and the type of parameters used by the model. The crate provides standard, or default, -/// definitions of both the configuration and layout types, however, for -pub trait Model { - /// The type of configuration used for the model - type Config: NetworkConfig; - /// The type of [`ModelLayout`] used by the model for this implementation. - type Layout: ModelLayout; - /// returns an immutable reference to the models configuration; this is typically used to - /// access the models hyperparameters (i.e. learning rate, momentum, etc.) and other - /// related control parameters. - fn config(&self) -> &Self::Config; - /// returns a mutable reference to the models configuration; useful for setting hyperparams - fn config_mut(&mut self) -> &mut Self::Config; - /// returns a copy of the model's current layout (features); a type providing the model - /// with a particular number of features for the various layers of a deep neural network. - /// - /// the layout is used in everything from creation and initialization routines to - /// validating the dimensionality of the model's inputs, outputs, training data, etc. - fn layout(&self) -> Self::Layout; - /// returns an immutable reference to the model parameters - fn params(&self) -> &DeepModelParams; - /// returns a mutable reference to the model's parameters - fn params_mut(&mut self) -> &mut DeepModelParams; - /// propagates the input through the model; each layer is applied in sequence meaning that - /// the output of each previous layer is the input to the next layer. This pattern - /// repeats until the output layer returns the final result. - /// - /// By default, the trait simply passes each output from one layer to the next, however, - /// custom models will likely override this method to inject activation methods and other - /// related logic - fn predict(&self, inputs: &U) -> crate::ModelResult - where - Self: Predict, - { - Predict::predict(self, inputs) - } - /// a convience method that trains the model using the provided dataset; this method - /// requires that the model implements the [`Train`] trait and that the dataset - fn train(&mut self, dataset: &DatasetBase) -> crate::ModelResult - where - Self: Train, - { - Train::train(self, dataset.records(), dataset.targets()) - } -} - -pub trait ModelExt: Model { - /// [`replace`](core::mem::replace) the current configuration and returns the old one; - fn replace_config(&mut self, config: Self::Config) -> Self::Config { - core::mem::replace(self.config_mut(), config) - } - /// [`replace`](core::mem::replace) the current model parameters and returns the old one - fn replace_params(&mut self, params: DeepModelParams) -> DeepModelParams { - core::mem::replace(self.params_mut(), params) - } - /// overrides the current configuration and returns a mutable reference to the model - fn set_config(&mut self, config: Self::Config) -> &mut Self { - *self.config_mut() = config; - self - } - /// overrides the current model parameters and returns a mutable reference to the model - fn set_params(&mut self, params: DeepModelParams) -> &mut Self { - *self.params_mut() = params; - self - } - /// returns an immutable reference to the input layer; - #[inline] - fn input_layer(&self) -> &Params { - self.params().input() - } - /// returns a mutable reference to the input layer; - #[inline] - fn input_layer_mut(&mut self) -> &mut Params { - self.params_mut().input_mut() - } - /// returns an immutable reference to the hidden layer(s); - #[inline] - fn hidden_layers(&self) -> &Vec> { - self.params().hidden() - } - /// returns a mutable reference to the hidden layer(s); - #[inline] - fn hidden_layers_mut(&mut self) -> &mut Vec> { - self.params_mut().hidden_mut() - } - /// returns an immutable reference to the output layer; - #[inline] - fn output_layer(&self) -> &Params { - self.params().output() - } - /// returns a mutable reference to the output layer; - #[inline] - fn output_layer_mut(&mut self) -> &mut Params { - self.params_mut().output_mut() - } - #[inline] - fn set_input_layer(&mut self, layer: Params) -> &mut Self { - self.params_mut().set_input(layer); - self - } - #[inline] - fn set_hidden_layers(&mut self, layers: Vec>) -> &mut Self { - self.params_mut().set_hidden(layers); - self - } - #[inline] - fn set_output_layer(&mut self, layer: Params) -> &mut Self { - self.params_mut().set_output(layer); - self - } - /// returns a 2-tuple representing the dimensions of the input layer; (input, hidden) - fn input_dim(&self) -> (usize, usize) { - self.layout().dim_input() - } - /// returns a 2-tuple representing the dimensions of the hidden layers; (hidden, hidden) - fn hidden_dim(&self) -> (usize, usize) { - self.layout().dim_hidden() - } - /// returns the total number of hidden layers in the model; - fn hidden_layers_count(&self) -> usize { - self.layout().layers() - } - /// returns a 2-tuple representing the dimensions of the output layer; (hidden, output) - fn output_dim(&self) -> (usize, usize) { - self.layout().dim_output() - } -} - -/// The [`DeepNeuralNetwork`] trait is a specialization of the [`Model`] trait that -/// provides additional functionality for deep neural networks. This trait is -pub trait DeepNeuralNetwork: Model {} - -impl ModelExt for M -where - M: Model, - M::Layout: ModelLayout, -{ -} +/* + appellation: models + authors: @FL03 +*/ +use crate::config::NetworkConfig; +use crate::{DeepModelParams, ModelLayout}; +use crate::{Predict, Train}; +use concision_core::params::Params; +use concision_data::DatasetBase; + +/// The [`Model`] trait defines the core interface for all models; implementors will need to +/// provide the type of configuration used by the model, the type of layout used by the model, +/// and the type of parameters used by the model. The crate provides standard, or default, +/// definitions of both the configuration and layout types, however, for +pub trait Model { + /// The type of configuration used for the model + type Config: NetworkConfig; + /// The type of [`ModelLayout`] used by the model for this implementation. + type Layout: ModelLayout; + /// returns an immutable reference to the models configuration; this is typically used to + /// access the models hyperparameters (i.e. learning rate, momentum, etc.) and other + /// related control parameters. + fn config(&self) -> &Self::Config; + /// returns a mutable reference to the models configuration; useful for setting hyperparams + fn config_mut(&mut self) -> &mut Self::Config; + /// returns a copy of the model's current layout (features); a type providing the model + /// with a particular number of features for the various layers of a deep neural network. + /// + /// the layout is used in everything from creation and initialization routines to + /// validating the dimensionality of the model's inputs, outputs, training data, etc. + fn layout(&self) -> Self::Layout; + /// returns an immutable reference to the model parameters + fn params(&self) -> &DeepModelParams; + /// returns a mutable reference to the model's parameters + fn params_mut(&mut self) -> &mut DeepModelParams; + /// propagates the input through the model; each layer is applied in sequence meaning that + /// the output of each previous layer is the input to the next layer. This pattern + /// repeats until the output layer returns the final result. + /// + /// By default, the trait simply passes each output from one layer to the next, however, + /// custom models will likely override this method to inject activation methods and other + /// related logic + fn predict(&self, inputs: &U) -> Option + where + Self: Predict, + { + Predict::predict(self, inputs) + } + /// a convience method that trains the model using the provided dataset; this method + /// requires that the model implements the [`Train`] trait and that the dataset + fn train(&mut self, dataset: &DatasetBase) -> crate::NeuralResult + where + Self: Train, + { + Train::train(self, dataset.records(), dataset.targets()) + } +} + +pub trait ModelExt: Model { + /// [`replace`](core::mem::replace) the current configuration and returns the old one; + fn replace_config(&mut self, config: Self::Config) -> Self::Config { + core::mem::replace(self.config_mut(), config) + } + /// [`replace`](core::mem::replace) the current model parameters and returns the old one + fn replace_params(&mut self, params: DeepModelParams) -> DeepModelParams { + core::mem::replace(self.params_mut(), params) + } + /// overrides the current configuration and returns a mutable reference to the model + fn set_config(&mut self, config: Self::Config) -> &mut Self { + *self.config_mut() = config; + self + } + /// overrides the current model parameters and returns a mutable reference to the model + fn set_params(&mut self, params: DeepModelParams) -> &mut Self { + *self.params_mut() = params; + self + } + /// returns an immutable reference to the input layer; + #[inline] + fn input_layer(&self) -> &Params { + self.params().input() + } + /// returns a mutable reference to the input layer; + #[inline] + fn input_layer_mut(&mut self) -> &mut Params { + self.params_mut().input_mut() + } + /// returns an immutable reference to the hidden layer(s); + #[inline] + fn hidden_layers(&self) -> &Vec> { + self.params().hidden() + } + /// returns a mutable reference to the hidden layer(s); + #[inline] + fn hidden_layers_mut(&mut self) -> &mut Vec> { + self.params_mut().hidden_mut() + } + /// returns an immutable reference to the output layer; + #[inline] + fn output_layer(&self) -> &Params { + self.params().output() + } + /// returns a mutable reference to the output layer; + #[inline] + fn output_layer_mut(&mut self) -> &mut Params { + self.params_mut().output_mut() + } + #[inline] + fn set_input_layer(&mut self, layer: Params) -> &mut Self { + self.params_mut().set_input(layer); + self + } + #[inline] + fn set_hidden_layers(&mut self, layers: Vec>) -> &mut Self { + self.params_mut().set_hidden(layers); + self + } + #[inline] + fn set_output_layer(&mut self, layer: Params) -> &mut Self { + self.params_mut().set_output(layer); + self + } + /// returns a 2-tuple representing the dimensions of the input layer; (input, hidden) + fn input_dim(&self) -> (usize, usize) { + self.layout().dim_input() + } + /// returns a 2-tuple representing the dimensions of the hidden layers; (hidden, hidden) + fn hidden_dim(&self) -> (usize, usize) { + self.layout().dim_hidden() + } + /// returns the total number of hidden layers in the model; + fn hidden_layers_count(&self) -> usize { + self.layout().layers() + } + /// returns a 2-tuple representing the dimensions of the output layer; (hidden, output) + fn output_dim(&self) -> (usize, usize) { + self.layout().dim_output() + } +} + +impl ModelExt for M +where + M: Model, + M::Layout: ModelLayout, +{ +} diff --git a/neural/src/traits/network.rs b/neural/src/traits/network.rs new file mode 100644 index 00000000..05aacb12 --- /dev/null +++ b/neural/src/traits/network.rs @@ -0,0 +1,35 @@ +/* + appellation: network + authors: @FL03 +*/ +use super::{DeepModelRepr, RawHidden, ShallowModelRepr}; +use crate::config::NetworkConfig; +use ndarray::{Dimension, RawData}; + +pub trait NeuralNetwork +where + D: Dimension, + S: RawData, +{ + type Config: NetworkConfig; + type Hidden: RawHidden; + + fn config(&self) -> &Self::Config; + fn config_mut(&mut self) -> &mut Self::Config; +} + +pub trait ShallowNeuralNetwork: NeuralNetwork +where + D: Dimension, + S: RawData, + Self::Hidden: ShallowModelRepr, +{ +} + +pub trait DeepNeuralNetwork: NeuralNetwork +where + D: Dimension, + S: RawData, + Self::Hidden: DeepModelRepr, +{ +} diff --git a/neural/src/traits/predict.rs b/neural/src/traits/predict.rs index 57c265ca..bc5b814e 100644 --- a/neural/src/traits/predict.rs +++ b/neural/src/traits/predict.rs @@ -17,7 +17,7 @@ pub trait Predict { private!(); - fn predict(&self, input: &Rhs) -> crate::ModelResult; + fn predict(&self, input: &Rhs) -> Option; } /// The [`PredictWithConfidence`] trait is an extension of the [`Predict`] trait, providing @@ -25,10 +25,7 @@ pub trait Predict { pub trait PredictWithConfidence: Predict { type Confidence; - fn predict_with_confidence( - &self, - input: &Rhs, - ) -> crate::ModelResult<(Self::Output, Self::Confidence)>; + fn predict_with_confidence(&self, input: &Rhs) -> Option<(Self::Output, Self::Confidence)>; } /* @@ -46,8 +43,8 @@ where seal!(); - fn predict(&self, input: &U) -> crate::ModelResult { - self.forward(input).map_err(core::convert::Into::into) + fn predict(&self, input: &U) -> Option { + self.forward(input) } } @@ -59,10 +56,7 @@ where { type Confidence = A; - fn predict_with_confidence( - &self, - input: &U, - ) -> Result<(Self::Output, Self::Confidence), crate::ModelError> { + fn predict_with_confidence(&self, input: &U) -> Option<(Self::Output, Self::Confidence)> { // Get the base prediction let prediction = Predict::predict(self, input)?; let shape = prediction.shape(); @@ -83,6 +77,6 @@ where // Confidence: inverse of variance (clipped to avoid division by zero) let confidence = (A::one() + avg_variance).recip(); - Ok((prediction, confidence)) + Some((prediction, confidence)) } } diff --git a/neural/src/types/dropout.rs b/neural/src/types/dropout.rs index 2d848f6b..5fa915d2 100644 --- a/neural/src/types/dropout.rs +++ b/neural/src/types/dropout.rs @@ -39,7 +39,7 @@ where { type Output = ::Output; - fn forward(&self, input: &U) -> cnc::Result { - Ok(input.dropout(self.p)) + fn forward(&self, input: &U) -> Option { + Some(input.dropout(self.p)) } } diff --git a/neural/tests/layers.rs b/neural/tests/layers.rs index 2fbb6262..42172f87 100644 --- a/neural/tests/layers.rs +++ b/neural/tests/layers.rs @@ -2,8 +2,8 @@ appellation: layers authors: @FL03 */ - +use concision_neural::NeuralResult; #[test] -fn test_layer_base() -> anyhow::Result<()> { +fn test_layer_base() -> NeuralResult<()> { Ok(()) } diff --git a/neural/tests/masks.rs b/neural/tests/masks.rs index 1217d1ef..0780bed8 100644 --- a/neural/tests/masks.rs +++ b/neural/tests/masks.rs @@ -2,16 +2,16 @@ extern crate concision_core as cnc; extern crate concision_neural as neural; use cnc::Forward; -use concision_neural::error::ModelError; +use concision_neural::error::NeuralError; use ndarray::prelude::*; use neural::Dropout; #[test] -fn test_dropout() -> Result<(), ModelError> { +fn test_dropout() -> Result<(), NeuralError> { let shape = (512, 2048); let arr = Array2::::ones(shape); let dropout = Dropout::new(0.5); - let out = dropout.forward(&arr)?; + let out = dropout.forward(&arr).expect("Dropout forward pass failed"); assert!(arr.iter().all(|&x| x == 1.0)); assert!(out.iter().any(|x| x == &0f64)); diff --git a/params/Cargo.toml b/params/Cargo.toml new file mode 100644 index 00000000..5538e3f9 --- /dev/null +++ b/params/Cargo.toml @@ -0,0 +1,180 @@ +[package] +build = "build.rs" +description = "this crate implements the core modules for the concision framework" +name = "concision-params" + +authors.workspace = true +categories.workspace = true +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +readme.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] +bench = false +doc = true +doctest = true +test = true + +# ************* [Unit Tests] ************* +[[test]] +name = "default" + +[[test]] +name = "params" +required-features = ["std"] + +[dependencies] +concision-init = { optional = true, workspace = true } +concision-traits = { workspace = true } +# custom +variants = { workspace = true } +# concurrency & parallelism +rayon = { optional = true, workspace = true } +# data & serialization +serde = { features = ["derive"], optional = true, workspace = true } +serde_derive = { optional = true, workspace = true } +serde_json = { optional = true, workspace = true } +# error-handling +thiserror = { workspace = true } +# mathematics +approx = { optional = true, workspace = true } +ndarray = { workspace = true } +num-complex = { optional = true, workspace = true } +num-traits = { workspace = true } +# random +getrandom = { default-features = false, optional = true, workspace = true } +rand = { optional = true, workspace = true } +rand_distr = { optional = true, workspace = true } + +[dev-dependencies] +lazy_static = { workspace = true } + +[features] +default = ["std"] + +full = [ + "default", + "approx", + "complex", + "init", + "json", + "rand", + "serde", +] + +nightly = [ + "concision-init?/nightly", + "concision-traits/nightly", +] + +# ************* [FF:Features] ************* +init = [ + "concision_init", + "rand", +] + +json = ["alloc", "serde", "serde_json"] + +concision_init = ["dep:concision-init"] + +# ************* [FF:Dependencies] ************* +std = [ + "alloc", + "concision-init?/std", + "concision-traits/std", + "ndarray/std", + "num-complex?/std", + "num-traits/std", + "rand?/std", + "rand?/std_rng", + "serde/std", + "thiserror/std", + "variants/std", +] + +wasi = [ + "concision-init?/wasi", + "concision-traits/wasi", +] + +wasm = [ + "getrandom?/wasm_js", + "concision-init?/wasm", + "concision-traits/wasm", +] +# ************* [FF:Dependencies] ************* +alloc = [ + "concision-init?/alloc", + "concision-traits/alloc", + "serde?/alloc", + "serde_json?/alloc", + "variants/alloc", +] + +approx = [ + "dep:approx", + "concision-init?/approx", + "ndarray/approx", +] + +blas = [ + "concision-init?/blas", + "ndarray/blas", +] + +complex = [ + "dep:num-complex", + "concision-init?/complex", +] + +rand = [ + "dep:rand", + "dep:rand_distr", + "concision-init?/rand", + "concision-traits/rand", + "num-complex?/rand", + "rng", +] + +rayon = [ + "dep:rayon", + "ndarray/rayon", +] + +rng = [ + "dep:getrandom", + "concision-init?/rng", + "concision-traits/rng", + "rand?/small_rng", + "rand?/thread_rng", +] + +serde = [ + "concision-init?/serde", + "dep:serde", + "dep:serde_derive", + "ndarray/serde", + "num-complex?/serde", + "rand?/serde", + "rand_distr?/serde", +] + +serde_json = ["dep:serde_json"] + + +# ********* [Metadata] ********* +[package.metadata.docs.rs] +all-features = false +features = ["full"] +rustc-args = ["--cfg", "docsrs"] +version = "v{{version}}" + +[package.metadata.release] +no-dev-version = true +tag-name = "{{version}}" diff --git a/params/build.rs b/params/build.rs new file mode 100644 index 00000000..940a4ce4 --- /dev/null +++ b/params/build.rs @@ -0,0 +1,8 @@ +/* + Appellation: build + Contrib: FL03 +*/ + +fn main() { + println!("cargo::rustc-check-cfg=cfg(no_std)"); +} diff --git a/core/src/params/error.rs b/params/src/error.rs similarity index 69% rename from core/src/params/error.rs rename to params/src/error.rs index eaa5a099..58547e6f 100644 --- a/core/src/params/error.rs +++ b/params/src/error.rs @@ -2,6 +2,11 @@ Appellation: error Contrib: @FL03 */ +//! This module defines error types and handling mechanisms for the `params` crate. +//! + +/// A type alias for a [`Result`](core::result::Result) which uses the [`ParamsError`] type +pub type Result = core::result::Result; /// the [`ParamsError`] enumerates various errors that can occur within the parameters of a /// neural network. @@ -23,4 +28,6 @@ pub enum ParamsError { InvalidParameterType, #[error("Invalid parameter value")] InvalidParameterValue, + #[error(transparent)] + ShapeError(#[from] ndarray::ShapeError), } diff --git a/core/src/params/impls/impl_params.rs b/params/src/impls/impl_params.rs similarity index 79% rename from core/src/params/impls/impl_params.rs rename to params/src/impls/impl_params.rs index 6d003d78..391028af 100644 --- a/core/src/params/impls/impl_params.rs +++ b/params/src/impls/impl_params.rs @@ -43,7 +43,7 @@ where } } -impl core::fmt::Debug for ParamsBase +impl core::fmt::Debug for ParamsBase where D: Dimension, S: Data, @@ -57,7 +57,7 @@ where } } -impl core::fmt::Display for ParamsBase +impl core::fmt::Display for ParamsBase where D: Dimension, S: Data, @@ -73,7 +73,7 @@ where } } -impl Clone for ParamsBase +impl Clone for ParamsBase where D: Dimension, S: ndarray::RawDataClone, @@ -84,7 +84,7 @@ where } } -impl Copy for ParamsBase +impl Copy for ParamsBase where D: Dimension + Copy, ::Smaller: Copy, @@ -93,7 +93,7 @@ where { } -impl PartialEq for ParamsBase +impl PartialEq for ParamsBase where D: Dimension, S: Data, @@ -104,29 +104,29 @@ where } } -impl PartialEq<&ParamsBase> for ParamsBase +impl PartialEq<&ParamsBase> for ParamsBase where D: Dimension, S: Data, A: PartialEq, { - fn eq(&self, other: &&ParamsBase) -> bool { + fn eq(&self, other: &&ParamsBase) -> bool { self.bias() == other.bias() && self.weights() == other.weights() } } -impl PartialEq<&mut ParamsBase> for ParamsBase +impl PartialEq<&mut ParamsBase> for ParamsBase where D: Dimension, S: Data, A: PartialEq, { - fn eq(&self, other: &&mut ParamsBase) -> bool { + fn eq(&self, other: &&mut ParamsBase) -> bool { self.bias() == other.bias() && self.weights() == other.weights() } } -impl Eq for ParamsBase +impl Eq for ParamsBase where D: Dimension, S: Data, @@ -134,13 +134,13 @@ where { } -impl IntoIterator for ParamsBase +impl IntoIterator for ParamsBase where D: Dimension, S: RawData, { - type Item = ParamsBase; - type IntoIter = Once>; + type Item = ParamsBase; + type IntoIter = Once>; fn into_iter(self) -> Self::IntoIter { core::iter::once(self) diff --git a/core/src/params/impls/impl_params_deprecated.rs b/params/src/impls/impl_params_deprecated.rs similarity index 85% rename from core/src/params/impls/impl_params_deprecated.rs rename to params/src/impls/impl_params_deprecated.rs index 0a5c19a3..4df62350 100644 --- a/core/src/params/impls/impl_params_deprecated.rs +++ b/params/src/impls/impl_params_deprecated.rs @@ -7,7 +7,7 @@ use crate::params::ParamsBase; use ndarray::{Dimension, RawData}; #[doc(hidden)] -impl ParamsBase +impl ParamsBase where S: RawData, D: Dimension, diff --git a/core/src/params/impls/impl_params_init.rs b/params/src/impls/impl_params_init.rs similarity index 93% rename from core/src/params/impls/impl_params_init.rs rename to params/src/impls/impl_params_init.rs index 2e7ae370..063cda3c 100644 --- a/core/src/params/impls/impl_params_init.rs +++ b/params/src/impls/impl_params_init.rs @@ -4,7 +4,7 @@ */ use crate::params::ParamsBase; -use crate::init::Initialize; +use concision_init::Initialize; use ndarray::{ ArrayBase, Axis, DataOwned, Dimension, RawData, RemoveAxis, ScalarOperand, ShapeBuilder, }; @@ -12,7 +12,7 @@ use num_traits::{Float, FromPrimitive}; use rand::rngs::SmallRng; use rand_distr::Distribution; -impl ParamsBase +impl ParamsBase where A: Float + FromPrimitive + ScalarOperand, D: Dimension, @@ -33,7 +33,7 @@ where } } -impl Initialize for ParamsBase +impl Initialize for ParamsBase where D: RemoveAxis, S: RawData, diff --git a/core/src/params/impls/impl_params_iter.rs b/params/src/impls/impl_params_iter.rs similarity index 96% rename from core/src/params/impls/impl_params_iter.rs rename to params/src/impls/impl_params_iter.rs index df2a6551..1531aba0 100644 --- a/core/src/params/impls/impl_params_iter.rs +++ b/params/src/impls/impl_params_iter.rs @@ -4,7 +4,7 @@ */ use crate::params::ParamsBase; -use crate::params::iter::{Iter, IterMut}; +use crate::iter::{Iter, IterMut}; use ndarray::iter as nditer; use ndarray::{Axis, Data, DataMut, Dimension, RawData, RemoveAxis}; @@ -14,7 +14,7 @@ use ndarray::{Axis, Data, DataMut, Dimension, RawData, RemoveAxis}; /// - immutable and mutable iterators over each parameter (weights and bias) respectively; /// - an iterator over the parameters, which zips together an axis iterator over the columns of /// the weights and an iterator over the bias; -impl ParamsBase +impl ParamsBase where S: RawData, D: Dimension, diff --git a/core/src/params/impls/impl_params_ops.rs b/params/src/impls/impl_params_ops.rs similarity index 71% rename from core/src/params/impls/impl_params_ops.rs rename to params/src/impls/impl_params_ops.rs index fbbbc5ab..bd06e02c 100644 --- a/core/src/params/impls/impl_params_ops.rs +++ b/params/src/impls/impl_params_ops.rs @@ -2,14 +2,38 @@ Appellation: impl_ops Contrib: @FL03 */ -use crate::params::{Params, ParamsBase}; -use crate::{ApplyGradient, ApplyGradientExt, Backward, Forward, Norm}; +use crate::{Params, ParamsBase}; +use concision_traits::{ + ApplyGradient, ApplyGradientExt, Backward, Biased, Forward, Norm, Weighted, +}; use ndarray::linalg::Dot; -use ndarray::prelude::*; use ndarray::{ArrayBase, Data, DataMut, Dimension, ScalarOperand}; +use ndarray::{RawData, prelude::*}; use num_traits::{Float, FromPrimitive}; -impl ParamsBase +impl ParamsBase +where + A: Clone, + D: Dimension, + S: Data, +{ + /// perform a single backpropagation step + pub fn backward(&mut self, input: &X, grad: &Y, lr: A) -> Option + where + Self: Backward, + { + >::backward(self, input, grad, lr) + } + /// forward propagation + pub fn forward(&self, input: &X) -> Option + where + Self: Forward, + { + >::forward(self, input) + } +} + +impl ParamsBase where A: ScalarOperand + Float + FromPrimitive, D: Dimension, @@ -27,9 +51,10 @@ where let weights = self.weights().l2_norm(); bias + weights } + /// a convenience method used to apply a gradient to the parameters using the given /// learning rate. - pub fn apply_gradient(&mut self, grad: &Delta, lr: A) -> crate::Result + pub fn apply_gradient(&mut self, grad: &Delta, lr: A) -> Option where S: DataMut, Self: ApplyGradient, @@ -37,12 +62,7 @@ where >::apply_gradient(self, grad, lr) } - pub fn apply_gradient_with_decay( - &mut self, - grad: &Grad, - lr: A, - decay: A, - ) -> crate::Result + pub fn apply_gradient_with_decay(&mut self, grad: &Grad, lr: A, decay: A) -> Option where S: DataMut, Self: ApplyGradient, @@ -56,7 +76,7 @@ where lr: A, momentum: A, velocity: &mut V, - ) -> crate::Result + ) -> Option where S: DataMut, Self: ApplyGradientExt, @@ -73,7 +93,7 @@ where decay: A, momentum: A, velocity: &mut V, - ) -> crate::Result + ) -> Option where S: DataMut, Self: ApplyGradientExt, @@ -84,7 +104,35 @@ where } } -impl ApplyGradient, A> for ParamsBase +impl Weighted for ParamsBase +where + S: RawData, + D: Dimension, +{ + fn weights(&self) -> &ArrayBase { + self.weights() + } + + fn weights_mut(&mut self) -> &mut ArrayBase { + self.weights_mut() + } +} + +impl Biased for ParamsBase +where + S: RawData, + D: Dimension, +{ + fn bias(&self) -> &ArrayBase { + self.bias() + } + + fn bias_mut(&mut self) -> &mut ArrayBase { + self.bias_mut() + } +} + +impl ApplyGradient, A> for ParamsBase where A: Float + FromPrimitive + ScalarOperand, S: DataMut, @@ -93,12 +141,12 @@ where { type Output = (); - fn apply_gradient(&mut self, grad: &ParamsBase, lr: A) -> crate::Result { + fn apply_gradient(&mut self, grad: &ParamsBase, lr: A) -> Option { // apply the bias gradient self.bias_mut().apply_gradient(grad.bias(), lr)?; // apply the weight gradient self.weights_mut().apply_gradient(grad.weights(), lr)?; - Ok(()) + Some(()) } fn apply_gradient_with_decay( @@ -106,18 +154,18 @@ where grad: &ParamsBase, lr: A, decay: A, - ) -> crate::Result { + ) -> Option { // apply the bias gradient self.bias_mut() .apply_gradient_with_decay(grad.bias(), lr, decay)?; // apply the weight gradient self.weights_mut() .apply_gradient_with_decay(grad.weights(), lr, decay)?; - Ok(()) + Some(()) } } -impl ApplyGradientExt, A> for ParamsBase +impl ApplyGradientExt, A> for ParamsBase where A: Float + FromPrimitive + ScalarOperand, S: DataMut, @@ -132,7 +180,7 @@ where lr: A, momentum: A, velocity: &mut Self::Velocity, - ) -> crate::Result<()> { + ) -> Option<()> { // apply the bias gradient self.bias_mut().apply_gradient_with_momentum( grad.bias(), @@ -147,7 +195,7 @@ where momentum, velocity.weights_mut(), )?; - Ok(()) + Some(()) } fn apply_gradient_with_decay_and_momentum( @@ -157,7 +205,7 @@ where decay: A, momentum: A, velocity: &mut Self::Velocity, - ) -> crate::Result<()> { + ) -> Option<()> { // apply the bias gradient self.bias_mut().apply_gradient_with_decay_and_momentum( grad.bias(), @@ -174,7 +222,7 @@ where momentum, velocity.weights_mut(), )?; - Ok(()) + Some(()) } } @@ -192,7 +240,7 @@ where input: &ArrayBase, delta: &ArrayBase, gamma: Self::Elem, - ) -> crate::Result { + ) -> Option { // compute the weight gradient let weight_delta = delta.t().dot(input); // update the weights and bias @@ -200,11 +248,11 @@ where self.bias_mut() .apply_gradient(&delta.sum_axis(Axis(0)), gamma)?; // return the sum of the squared delta - Ok(delta.pow2().sum()) + Some(delta.pow2().sum()) } } -impl Backward, ArrayBase> for Params +impl Backward, ArrayBase> for Params where A: Float + FromPrimitive + ScalarOperand, S: Data, @@ -215,21 +263,21 @@ where fn backward( &mut self, - input: &ArrayBase, - delta: &ArrayBase, + input: &ArrayBase, + delta: &ArrayBase, gamma: Self::Elem, - ) -> crate::Result { + ) -> Option { // compute the weight gradient let weight_delta = input * delta; // update the weights and bias self.weights_mut().apply_gradient(&weight_delta, gamma)?; self.bias_mut().apply_gradient(delta, gamma)?; // return the sum of the squared delta - Ok(delta.pow2().sum()) + Some(delta.pow2().sum()) } } -impl Backward, ArrayBase> for Params +impl Backward, ArrayBase> for Params where A: Float + FromPrimitive + ScalarOperand, S: Data, @@ -240,21 +288,21 @@ where fn backward( &mut self, - input: &ArrayBase, - delta: &ArrayBase, + input: &ArrayBase, + delta: &ArrayBase, gamma: Self::Elem, - ) -> crate::Result { + ) -> Option { // compute the weight gradient let dw = &self.weights * delta.t().dot(input); // update the weights and bias self.weights_mut().apply_gradient(&dw, gamma)?; self.bias_mut().apply_gradient(delta, gamma)?; // return the sum of the squared delta - Ok(delta.pow2().sum()) + Some(delta.pow2().sum()) } } -impl Backward, ArrayBase> for Params +impl Backward, ArrayBase> for Params where A: Float + FromPrimitive + ScalarOperand, S: Data, @@ -265,10 +313,10 @@ where fn backward( &mut self, - input: &ArrayBase, - delta: &ArrayBase, + input: &ArrayBase, + delta: &ArrayBase, gamma: Self::Elem, - ) -> crate::Result { + ) -> Option { // compute the weight gradient let weight_delta = input.dot(&delta.t()); // compute the bias gradient @@ -277,11 +325,13 @@ where self.weights_mut().apply_gradient(&weight_delta, gamma)?; self.bias_mut().apply_gradient(&bias_delta, gamma)?; // return the sum of the squared delta - Ok(delta.pow2().sum()) + let y = input.dot(self.weights()) + self.bias(); + let res = (&y - delta).pow2().sum(); + Some(res) } } -impl Forward for ParamsBase +impl Forward for ParamsBase where A: Clone, D: Dimension, @@ -291,8 +341,7 @@ where { type Output = Z; - fn forward(&self, input: &X) -> crate::Result { - let output = input.dot(&self.weights) + &self.bias; - Ok(output) + fn forward(&self, input: &X) -> Option { + Some(input.dot(self.weights()) + self.bias()) } } diff --git a/core/src/params/impls/impl_params_rand.rs b/params/src/impls/impl_params_rand.rs similarity index 95% rename from core/src/params/impls/impl_params_rand.rs rename to params/src/impls/impl_params_rand.rs index 9c81ec33..d61fd71f 100644 --- a/core/src/params/impls/impl_params_rand.rs +++ b/params/src/impls/impl_params_rand.rs @@ -8,7 +8,7 @@ use ndarray::{DataOwned, Dimension, RawData, RemoveAxis, ScalarOperand, ShapeBui use num_traits::{Float, FromPrimitive}; use rand_distr::Distribution; -impl ParamsBase +impl ParamsBase where A: Float + FromPrimitive + ScalarOperand, D: Dimension, diff --git a/core/src/params/impls/impl_params_serde.rs b/params/src/impls/impl_params_serde.rs similarity index 93% rename from core/src/params/impls/impl_params_serde.rs rename to params/src/impls/impl_params_serde.rs index 34421d75..07bce482 100644 --- a/core/src/params/impls/impl_params_serde.rs +++ b/params/src/impls/impl_params_serde.rs @@ -26,7 +26,7 @@ where A: Deserialize<'a>, ::Smaller: Deserialize<'a>, { - type Value = ParamsBase; + type Value = ParamsBase; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a ParamsBase object") @@ -47,7 +47,7 @@ where } } -impl<'a, A, S, D> Deserialize<'a> for ParamsBase +impl<'a, A, S, D> Deserialize<'a> for ParamsBase where D: Dimension + Deserialize<'a>, S: DataOwned, @@ -68,7 +68,7 @@ where } } -impl Serialize for ParamsBase +impl Serialize for ParamsBase where A: Serialize, D: Dimension + Serialize, diff --git a/core/src/params/iter.rs b/params/src/iter.rs similarity index 100% rename from core/src/params/iter.rs rename to params/src/iter.rs diff --git a/core/src/params/mod.rs b/params/src/lib.rs similarity index 56% rename from core/src/params/mod.rs rename to params/src/lib.rs index 46ef101b..c1636594 100644 --- a/core/src/params/mod.rs +++ b/params/src/lib.rs @@ -10,12 +10,32 @@ //! //! The associated types follow suite with the [`ndarray`] crate, each of which defines a //! different style of representation for the parameters. +//! +#![cfg_attr(not(feature = "std"), no_std)] +#![allow( + clippy::missing_saftey_doc, + clippy::module_inception, + clippy::needless_doctest_main, + clippy::upper_case_acronyms +)] + +#[cfg(feature = "alloc")] +extern crate alloc; + +#[cfg(all(not(feature = "alloc"), not(feature = "std")))] +compiler_error! { + "Either the \"alloc\" or \"std\" feature must be enabled for this crate." +} + #[doc(inline)] -pub use self::{error::ParamsError, params::ParamsBase, types::*}; +pub use self::{error::*, params::ParamsBase, types::*}; + +#[cfg(feature = "init")] +extern crate concision_init as init; -/// this module provides the [`ParamsError`] type for handling various errors within the module +/// Error handling for parameters pub mod error; -/// this module implements various iterators for the [`ParamsBase`] +/// The [`iter`] module implements various iterators for parameters pub mod iter; mod params; @@ -35,7 +55,7 @@ mod impls { } mod types { - //! this module defines various types and type aliases for the `params` module + //! Additional types supporting the params module #[doc(inline)] pub use self::prelude::*; @@ -47,8 +67,9 @@ mod types { } } -pub(crate) mod prelude { - pub use super::error::ParamsError; - pub use super::params::ParamsBase; - pub use super::types::*; +#[doc(hidden)] +pub mod prelude { + pub use crate::error::ParamsError; + pub use crate::params::*; + pub use crate::types::*; } diff --git a/core/src/params/params.rs b/params/src/params.rs similarity index 84% rename from core/src/params/params.rs rename to params/src/params.rs index d3a3eff8..d62977f2 100644 --- a/core/src/params/params.rs +++ b/params/src/params.rs @@ -11,22 +11,22 @@ use ndarray::{ /// model where the bias tensor is always `n-1` dimensions smaller than the `weights` tensor. /// Consequently, this constrains the [`ParamsBase`] implementation to only support dimensions /// that can be reduced by one axis (i.e. $`\mbox{rank}(D)>0`$), which is typically the "zero-th" axis. -pub struct ParamsBase +pub struct ParamsBase::Elem> where D: Dimension, - S: RawData, + S: RawData, { - pub(crate) bias: ArrayBase, - pub(crate) weights: ArrayBase, + pub(crate) bias: ArrayBase, + pub(crate) weights: ArrayBase, } -impl ParamsBase +impl ParamsBase where D: Dimension, S: RawData, { /// create a new instance of the [`ParamsBase`] with the given bias and weights - pub const fn new(bias: ArrayBase, weights: ArrayBase) -> Self { + pub const fn new(bias: ArrayBase, weights: ArrayBase) -> Self { Self { bias, weights } } /// returns a new instance of the [`ParamsBase`] using the initialization routine @@ -63,7 +63,7 @@ where Self::new(bias, weights) } /// create a new instance of the [`ParamsBase`] with the given bias used the default weights - pub fn from_bias(shape: Sh, bias: ArrayBase) -> Self + pub fn from_bias(shape: Sh, bias: ArrayBase) -> Self where A: Clone + Default, D: RemoveAxis, @@ -75,7 +75,7 @@ where } /// create a new instance of the [`ParamsBase`] with the given weights used the default /// bias - pub fn from_weights(shape: Sh, weights: ArrayBase) -> Self + pub fn from_weights(shape: Sh, weights: ArrayBase) -> Self where A: Clone + Default, D: RemoveAxis, @@ -132,23 +132,23 @@ where Self::from_elem(shape, A::zero()) } /// returns an immutable reference to the bias - pub const fn bias(&self) -> &ArrayBase { + pub const fn bias(&self) -> &ArrayBase { &self.bias } /// returns a mutable reference to the bias - pub const fn bias_mut(&mut self) -> &mut ArrayBase { + pub const fn bias_mut(&mut self) -> &mut ArrayBase { &mut self.bias } /// returns an immutable reference to the weights - pub const fn weights(&self) -> &ArrayBase { + pub const fn weights(&self) -> &ArrayBase { &self.weights } /// returns a mutable reference to the weights - pub const fn weights_mut(&mut self) -> &mut ArrayBase { + pub const fn weights_mut(&mut self) -> &mut ArrayBase { &mut self.weights } /// assign the bias - pub fn assign_bias(&mut self, bias: &ArrayBase) -> &mut Self + pub fn assign_bias(&mut self, bias: &ArrayBase) -> &mut Self where A: Clone, S: DataMut, @@ -157,7 +157,7 @@ where self } /// assign the weights - pub fn assign_weights(&mut self, weights: &ArrayBase) -> &mut Self + pub fn assign_weights(&mut self, weights: &ArrayBase) -> &mut Self where A: Clone, S: DataMut, @@ -166,41 +166,26 @@ where self } /// replace the bias and return the previous state; uses [replace](core::mem::replace) - pub fn replace_bias(&mut self, bias: ArrayBase) -> ArrayBase { + pub fn replace_bias( + &mut self, + bias: ArrayBase, + ) -> ArrayBase { core::mem::replace(&mut self.bias, bias) } /// replace the weights and return the previous state; uses [replace](core::mem::replace) - pub fn replace_weights(&mut self, weights: ArrayBase) -> ArrayBase { + pub fn replace_weights(&mut self, weights: ArrayBase) -> ArrayBase { core::mem::replace(&mut self.weights, weights) } /// set the bias - pub fn set_bias(&mut self, bias: ArrayBase) -> &mut Self { + pub fn set_bias(&mut self, bias: ArrayBase) -> &mut Self { *self.bias_mut() = bias; self } /// set the weights - pub fn set_weights(&mut self, weights: ArrayBase) -> &mut Self { + pub fn set_weights(&mut self, weights: ArrayBase) -> &mut Self { *self.weights_mut() = weights; self } - /// perform a single backpropagation step - pub fn backward(&mut self, input: &X, grad: &Y, lr: A) -> crate::Result - where - A: Clone, - S: Data, - Self: crate::Backward, - { - >::backward(self, input, grad, lr) - } - /// forward propagation - pub fn forward(&self, input: &X) -> crate::Result - where - A: Clone, - S: Data, - Self: crate::Forward, - { - >::forward(self, input) - } /// returns the dimensions of the weights pub fn dim(&self) -> D::Pattern { self.weights().dim() @@ -230,13 +215,19 @@ where self.weights().raw_dim() } /// returns the shape of the parameters; uses the shape of the weight tensor - pub fn shape(&self) -> &[usize] { - self.weights().shape() + pub fn shape<'a>(&'a self) -> &'a [usize] + where + A: 'a, + { + self.weights.shape() } /// returns the shape of the bias tensor; the shape should be equivalent to that of the /// weight tensor minus the "zero-th" axis - pub fn shape_bias(&self) -> &[usize] { - self.bias().shape() + pub fn shape_bias(&self) -> &[usize] + where + A: 'static, + { + self.bias.shape() } /// returns the total number of parameters within the layer pub fn size(&self) -> usize { diff --git a/core/src/params/types/aliases.rs b/params/src/types/aliases.rs similarity index 84% rename from core/src/params/types/aliases.rs rename to params/src/types/aliases.rs index e8b33189..fed92869 100644 --- a/core/src/params/types/aliases.rs +++ b/params/src/types/aliases.rs @@ -7,18 +7,18 @@ use crate::params::ParamsBase; use ndarray::{CowRepr, Ix2, OwnedArcRepr, OwnedRepr, RawViewRepr, ViewRepr}; /// A type alias for a [`ParamsBase`] with an owned internal layout -pub type Params = ParamsBase, D>; +pub type Params = ParamsBase, D, A>; /// A type alias for shared parameters -pub type ArcParams = ParamsBase, D>; +pub type ArcParams = ParamsBase, D, A>; /// A type alias for an immutable view of the parameters -pub type ParamsView<'a, A, D = Ix2> = ParamsBase, D>; +pub type ParamsView<'a, A, D = Ix2> = ParamsBase, D, A>; /// A type alias for a mutable view of the parameters -pub type ParamsViewMut<'a, A, D = Ix2> = ParamsBase, D>; +pub type ParamsViewMut<'a, A, D = Ix2> = ParamsBase, D, A>; /// A type alias for a [`ParamsBase`] with a _borrowed_ internal layout -pub type CowParams<'a, A, D = Ix2> = ParamsBase, D>; +pub type CowParams<'a, A, D = Ix2> = ParamsBase, D, A>; /// A type alias for the [`ParamsBase`] whose elements are of type `*const A` using a /// [`RawViewRepr`] layout -pub type RawViewParams = ParamsBase, D>; +pub type RawViewParams = ParamsBase, D, A>; /// A type alias for the [`ParamsBase`] whose elements are of type `*mut A` using a /// [`RawViewRepr`] layout -pub type RawMutParams = ParamsBase, D>; +pub type RawMutParams = ParamsBase, D, A>; diff --git a/params/tests/default.rs b/params/tests/default.rs new file mode 100644 index 00000000..233a07af --- /dev/null +++ b/params/tests/default.rs @@ -0,0 +1,17 @@ +/* + Appellation: default + Contrib: FL03 +*/ + +fn add(a: A, b: B) -> C +where + A: core::ops::Add, +{ + a + b +} + +#[test] +fn compiles() { + assert_eq!(add(10, 10), 20); + assert_ne!(add(1, 1), 3); +} diff --git a/params/tests/params.rs b/params/tests/params.rs new file mode 100644 index 00000000..5dba0707 --- /dev/null +++ b/params/tests/params.rs @@ -0,0 +1,52 @@ +/* + Appellation: params + Contrib: @FL03 +*/ +extern crate concision_params as cnc; +use concision_params::Params; + +#[test] +fn test_params_ones() { + // weights retain the given shape (d_in, d_out) + // bias retains the shape (d_out,) + let ones = Params::::ones((3, 4)); + assert_eq!(ones.dim(), (3, 4)); + assert_eq!(ones.bias().dim(), 4); + assert!( + ones.iter() + .all(|(w, b)| w.iter().all(|&wi| wi == 1.0) && b == &1.0) + ); +} + +#[test] +fn test_params_zeros() { + // weights retain the given shape (d_in, d_out) + // bias retains the shape (d_out,) + let zeros = Params::::zeros((3, 4)); + assert_eq!(zeros.dim(), (3, 4)); + assert_eq!(zeros.bias().dim(), 4); + assert!( + zeros + .iter() + .all(|(w, b)| w.iter().all(|&wi| wi == 0.0) && b == &0.0) + ); +} + +#[test] +#[cfg(feature = "init")] +fn test_params_init() { + use concision_init::Initialize; + + let lecun = Params::::lecun_normal((3, 4)); + assert_eq!(lecun.dim(), (3, 4)); + + let glorot_norm = Params::::glorot_normal((3, 4)); + assert_eq!(glorot_norm.dim(), (3, 4)); + assert_ne!(lecun, glorot_norm); + let glorot_uniform = Params::::glorot_uniform((3, 4)).expect("glorot_uniform failed"); + assert_eq!(glorot_uniform.dim(), (3, 4)); + assert_ne!(lecun, glorot_uniform); + assert_ne!(glorot_norm, glorot_uniform); + let truncnorm = Params::::truncnorm((3, 4), 0.0, 1.0).expect("truncnorm failed"); + assert_eq!(truncnorm.dim(), (3, 4)); +} diff --git a/traits/Cargo.toml b/traits/Cargo.toml new file mode 100644 index 00000000..9a1687a7 --- /dev/null +++ b/traits/Cargo.toml @@ -0,0 +1,91 @@ +[package] +build = "build.rs" +description = "this crate implements the core modules for the concision framework" +name = "concision-traits" + +authors.workspace = true +categories.workspace = true +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +readme.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[package.metadata.docs.rs] +all-features = false +features = ["full"] +rustc-args = ["--cfg", "docsrs"] +version = "v{{version}}" + +[package.metadata.release] +no-dev-version = true +tag-name = "{{version}}" + +[lib] +bench = false +crate-type = ["cdylib", "rlib"] +doc = true +doctest = true +test = true + +[dependencies] +# custom +variants = { workspace = true } +# error-handling +thiserror = { workspace = true } +# macros & utilities +paste = { workspace = true } +# mathematics +approx = { optional = true, workspace = true } +ndarray = { workspace = true } +num-complex = { optional = true, workspace = true } +num-traits = { workspace = true } +# random +getrandom = { default-features = false, optional = true, workspace = true } +rand = { optional = true, workspace = true } +rand_distr = { optional = true, workspace = true } + +[dev-dependencies] +lazy_static = { workspace = true } + +[features] +default = ["std"] + +full = [ + "default", + "approx", + "complex", + "rand", +] + +nightly = [] + + +# ************* [FF:Dependencies] ************* +std = [ + "alloc", "ndarray/std", + "num-complex?/std", + "num-traits/std", + "rand?/std", + "rand?/std_rng", + "thiserror/std" +] + +wasi = [] + +wasm = ["getrandom?/wasm_js"] +# ************* [FF:Dependencies] ************* +alloc = [] + +approx = ["dep:approx", "ndarray/approx"] + +blas = ["ndarray/blas"] + +complex = ["dep:num-complex"] + +rand = ["dep:rand", "dep:rand_distr", "num-complex?/rand", "rng"] + +rng = ["dep:getrandom", "rand?/small_rng", "rand?/thread_rng"] diff --git a/traits/build.rs b/traits/build.rs new file mode 100644 index 00000000..940a4ce4 --- /dev/null +++ b/traits/build.rs @@ -0,0 +1,8 @@ +/* + Appellation: build + Contrib: FL03 +*/ + +fn main() { + println!("cargo::rustc-check-cfg=cfg(no_std)"); +} diff --git a/core/src/traits/apply.rs b/traits/src/apply.rs similarity index 88% rename from core/src/traits/apply.rs rename to traits/src/apply.rs index 3bd6cafc..75da1b81 100644 --- a/core/src/traits/apply.rs +++ b/traits/src/apply.rs @@ -61,7 +61,6 @@ pub trait ApplyMut { ************* Implementations ************* */ use ndarray::{Array, ArrayBase, Data, DataMut, Dimension, ScalarOperand}; -use ndtensor::{Tensor, TensorBase}; impl CallInto for T { type Output = T; @@ -98,7 +97,7 @@ where } } -impl Apply for ArrayBase +impl Apply for ArrayBase where A: ScalarOperand, D: Dimension, @@ -114,23 +113,23 @@ where } } -impl Apply for TensorBase -where - A: ScalarOperand, - D: Dimension, - S: Data, -{ - type Cont = Tensor; - - fn apply(&self, f: F) -> Self::Cont - where - F: Fn(A) -> V, - { - self.map(f) - } -} - -impl Apply for &ArrayBase +// impl Apply for TensorBase +// where +// A: ScalarOperand, +// D: Dimension, +// S: Data, +// { +// type Cont = Tensor; + +// fn apply(&self, f: F) -> Self::Cont +// where +// F: Fn(A) -> V, +// { +// self.map(f) +// } +// } + +impl Apply for &ArrayBase where A: ScalarOperand, D: Dimension, @@ -146,7 +145,7 @@ where } } -impl Apply for &mut ArrayBase +impl Apply for &mut ArrayBase where A: ScalarOperand, D: Dimension, @@ -162,7 +161,7 @@ where } } -impl ApplyMut for ArrayBase +impl ApplyMut for ArrayBase where A: ScalarOperand, D: Dimension, @@ -179,7 +178,7 @@ where } } -impl ApplyMut for &mut ArrayBase +impl ApplyMut for &mut ArrayBase where A: ScalarOperand, D: Dimension, diff --git a/core/src/traits/clip.rs b/traits/src/clip.rs similarity index 96% rename from core/src/traits/clip.rs rename to traits/src/clip.rs index c237833d..8a17a55f 100644 --- a/core/src/traits/clip.rs +++ b/traits/src/clip.rs @@ -38,11 +38,11 @@ pub trait ClipMut { /* ************* Implementations ************* */ -use crate::ops::{L1Norm, L2Norm}; +use crate::norm::{L1Norm, L2Norm}; use ndarray::{ArrayBase, Dimension, ScalarOperand}; use num_traits::Float; -impl Clip for ArrayBase +impl Clip for ArrayBase where A: 'static + Clone + PartialOrd, S: ndarray::Data, @@ -55,7 +55,7 @@ where } } -impl ClipMut for ArrayBase +impl ClipMut for ArrayBase where A: Float + ScalarOperand, S: ndarray::DataMut, diff --git a/core/src/traits/codex.rs b/traits/src/codex.rs similarity index 100% rename from core/src/traits/codex.rs rename to traits/src/codex.rs diff --git a/core/src/traits/convert.rs b/traits/src/convert.rs similarity index 100% rename from core/src/traits/convert.rs rename to traits/src/convert.rs diff --git a/core/src/traits/entropy.rs b/traits/src/entropy.rs similarity index 89% rename from core/src/traits/entropy.rs rename to traits/src/entropy.rs index e59aec4c..3562375c 100644 --- a/core/src/traits/entropy.rs +++ b/traits/src/entropy.rs @@ -29,7 +29,7 @@ pub trait MeanSquaredError { use ndarray::{ArrayBase, Data, Dimension, ScalarOperand}; use num_traits::{Float, FromPrimitive}; -impl CrossEntropy for ArrayBase +impl CrossEntropy for ArrayBase where A: Float + FromPrimitive + ScalarOperand, D: Dimension, @@ -42,7 +42,7 @@ where } } -impl MeanAbsoluteError for ArrayBase +impl MeanAbsoluteError for ArrayBase where A: Float + FromPrimitive + ScalarOperand, D: Dimension, @@ -55,7 +55,7 @@ where } } -impl MeanSquaredError for ArrayBase +impl MeanSquaredError for ArrayBase where A: Float + FromPrimitive + ScalarOperand, D: Dimension, diff --git a/traits/src/error.rs b/traits/src/error.rs new file mode 100644 index 00000000..568cc1a7 --- /dev/null +++ b/traits/src/error.rs @@ -0,0 +1,49 @@ +/* + Appellation: error + Contrib: @FL03 +*/ +//! This module implements the core [`Error`] type for the framework and provides a [`Result`] +//! type alias for convenience. +#[cfg(feature = "alloc")] +use alloc::{boxed::Box, string::String}; + +#[allow(dead_code)] +/// a type alias for a [Result](core::result::Result) configured with an [`Error`] as its error +/// type. +pub type Result = core::result::Result; + +/// The [`Error`] type enumerates various errors that can occur within the framework. +#[derive(Debug, thiserror::Error)] +#[non_exhaustive] +pub enum Error { + #[error(transparent)] + FmtError(#[from] core::fmt::Error), + #[cfg(feature = "std")] + #[error(transparent)] + IoError(#[from] std::io::Error), + #[error(transparent)] + ShapeError(#[from] ndarray::ShapeError), + #[error(transparent)] + #[cfg(feature = "rand")] + UniformError(#[from] rand_distr::uniform::Error), + #[cfg(feature = "alloc")] + #[error(transparent)] + BoxError(#[from] Box), + #[cfg(feature = "alloc")] + #[error("Unknown Error: {0}")] + Unknown(String), +} + +#[cfg(feature = "alloc")] +impl From for Error { + fn from(value: String) -> Self { + Self::Unknown(value) + } +} + +#[cfg(feature = "alloc")] +impl From<&str> for Error { + fn from(value: &str) -> Self { + String::from(value).into() + } +} diff --git a/core/src/ops/fill.rs b/traits/src/fill.rs similarity index 87% rename from core/src/ops/fill.rs rename to traits/src/fill.rs index d50d95a5..0c765ef9 100644 --- a/core/src/ops/fill.rs +++ b/traits/src/fill.rs @@ -26,14 +26,14 @@ pub trait IsSquare { ******** implementations ******** */ -impl MaskFill for ArrayBase +impl MaskFill for ArrayBase where A: Clone, D: Dimension, S: DataMut, Self: Clone, { - type Output = ArrayBase; + type Output = ArrayBase; fn masked_fill(&self, mask: &Array, value: A) -> Self::Output { let mut arr = self.clone(); @@ -46,10 +46,10 @@ where } } -impl IsSquare for ArrayBase +impl IsSquare for ArrayBase where D: Dimension, - S: RawData, + S: RawData, { fn is_square(&self) -> bool { let first = self.shape().first().unwrap(); diff --git a/core/src/traits/gradient.rs b/traits/src/gradient.rs similarity index 76% rename from core/src/traits/gradient.rs rename to traits/src/gradient.rs index 22360389..1e3d957a 100644 --- a/core/src/traits/gradient.rs +++ b/traits/src/gradient.rs @@ -17,14 +17,9 @@ pub trait Gradient { pub trait ApplyGradient { type Output; - fn apply_gradient(&mut self, grad: &Delta, lr: T) -> crate::Result; + fn apply_gradient(&mut self, grad: &Delta, lr: T) -> Option; - fn apply_gradient_with_decay( - &mut self, - grad: &Delta, - lr: T, - decay: T, - ) -> crate::Result; + fn apply_gradient_with_decay(&mut self, grad: &Delta, lr: T, decay: T) -> Option; } /// This trait extends the [ApplyGradient] trait by allowing for momentum-based optimization @@ -37,7 +32,7 @@ pub trait ApplyGradientExt: ApplyGradient { lr: T, momentum: T, velocity: &mut Self::Velocity, - ) -> crate::Result; + ) -> Option; fn apply_gradient_with_decay_and_momentum( &mut self, @@ -46,17 +41,17 @@ pub trait ApplyGradientExt: ApplyGradient { decay: T, momentum: T, velocity: &mut Self::Velocity, - ) -> crate::Result; + ) -> Option; } /* ************* Implementations ************* */ -use ndarray::{Array, ArrayBase, Data, DataMut, Dimension, ScalarOperand, ShapeError}; +use ndarray::{Array, ArrayBase, Data, DataMut, Dimension, ScalarOperand}; use num_traits::{Float, FromPrimitive}; -impl ApplyGradient, A> for ArrayBase +impl ApplyGradient, A> for ArrayBase where A: Float + FromPrimitive + ScalarOperand, S: DataMut, @@ -65,9 +60,10 @@ where { type Output = (); - fn apply_gradient(&mut self, grad: &ArrayBase, lr: A) -> crate::Result { + fn apply_gradient(&mut self, grad: &ArrayBase, lr: A) -> Option { if self.shape() != grad.shape() { - return Err(ShapeError::from_kind(ndarray::ErrorKind::IncompatibleShape).into()); + // return Err(ShapeError::from_kind(ndarray::ErrorKind::IncompatibleShape).into()); + return None; } let batch_size = if !grad.shape().is_empty() { A::from_usize(self.shape()[0]).unwrap() @@ -75,7 +71,7 @@ where A::one() }; self.scaled_add(lr / batch_size, grad); - Ok(()) + Some(()) } fn apply_gradient_with_decay( @@ -83,21 +79,23 @@ where grad: &ArrayBase, lr: A, decay: A, - ) -> crate::Result { + ) -> Option { if self.shape() != grad.shape() { - return Err(ShapeError::from_kind(ndarray::ErrorKind::IncompatibleShape).into()); + // return Err(ShapeError::from_kind(ndarray::ErrorKind::IncompatibleShape).into()); + return None; } let batch_size = if !grad.shape().is_empty() { A::from_usize(self.shape()[0]).unwrap() } else { A::one() }; - self.scaled_add(lr / batch_size, &(grad + &*self * decay)); - Ok(()) + let rhs = grad + &*self * decay; + self.scaled_add(lr / batch_size, &rhs); + Some(()) } } -impl ApplyGradientExt, A> for ArrayBase +impl ApplyGradientExt, A> for ArrayBase where A: Float + FromPrimitive + ScalarOperand, S: DataMut, @@ -112,9 +110,10 @@ where lr: A, momentum: A, velocity: &mut Self::Velocity, - ) -> crate::Result { + ) -> Option { if self.shape() != grad.shape() { - return Err(ShapeError::from_kind(ndarray::ErrorKind::IncompatibleShape).into()); + // return Err(ShapeError::from_kind(ndarray::ErrorKind::IncompatibleShape).into()); + return None; } let batch_size = if !grad.shape().is_empty() { A::from_usize(self.shape()[0]).unwrap() @@ -123,7 +122,7 @@ where }; *velocity = &*velocity * momentum + grad * (A::one() - momentum); self.scaled_add(lr / batch_size, velocity); - Ok(()) + Some(()) } fn apply_gradient_with_decay_and_momentum( @@ -133,11 +132,10 @@ where decay: A, momentum: A, velocity: &mut Self::Velocity, - ) -> crate::Result { + ) -> Option { if self.shape() != grad.shape() { - return Err( - ndarray::ShapeError::from_kind(ndarray::ErrorKind::IncompatibleShape).into(), - ); + // return Err(ShapeError::from_kind(ndarray::ErrorKind::IncompatibleShape).into()); + return None; } let batch_size = if !grad.shape().is_empty() { A::from_usize(self.shape()[0]).unwrap() @@ -148,6 +146,6 @@ where let adjusted_grad = grad + &*self * decay; *velocity = &*velocity * momentum + adjusted_grad * (A::one() - momentum); self.scaled_add(lr / batch_size, velocity); - Ok(()) + Some(()) } } diff --git a/traits/src/lib.rs b/traits/src/lib.rs new file mode 100644 index 00000000..e69b0d1f --- /dev/null +++ b/traits/src/lib.rs @@ -0,0 +1,69 @@ +/* + Appellation: concision-traits + Contrib: @FL03 +*/ +//! Traits for the concicion machine learning framework +#![allow( + clippy::missing_safety_doc, + clippy::module_inception, + clippy::needless_doctest_main, + clippy::upper_case_acronyms +)] +#![cfg_attr(not(feature = "std"), no_std)] +#![cfg_attr(feature = "nightly", feature(allocator_api))] +#![crate_type = "lib"] + +#[cfg(not(all(feature = "std", feature = "alloc")))] +compiler_error! { + "At least one of the 'std' or 'alloc' features must be enabled." +} + +#[cfg(feature = "alloc")] +extern crate alloc; + +#[macro_use] +pub(crate) mod macros { + #[macro_use] + pub mod seal; +} + +pub mod error; + +mod apply; +mod clip; +mod codex; +mod convert; +mod fill; +mod gradient; +mod like; +mod norm; +mod propagation; +mod reshape; +mod shape; +mod store; +mod tensor_ops; +mod wnb; + +// re-exports +#[doc(inline)] +pub use self::error::*; +#[doc(inline)] +pub use self::prelude::*; + +#[doc(hidden)] +pub mod prelude { + pub use crate::apply::*; + pub use crate::clip::*; + pub use crate::codex::*; + pub use crate::convert::*; + pub use crate::fill::*; + pub use crate::gradient::*; + pub use crate::like::*; + pub use crate::norm::*; + pub use crate::propagation::*; + pub use crate::reshape::*; + pub use crate::shape::*; + pub use crate::store::*; + pub use crate::tensor_ops::*; + pub use crate::wnb::*; +} diff --git a/core/src/traits/like.rs b/traits/src/like.rs similarity index 87% rename from core/src/traits/like.rs rename to traits/src/like.rs index 2d168d7e..5baddb54 100644 --- a/core/src/traits/like.rs +++ b/traits/src/like.rs @@ -3,7 +3,7 @@ Contrib: FL03 */ use ndarray::{ArrayBase, DataOwned, Dimension, Ix2, ShapeBuilder}; -use num::traits::{Num, One, Zero}; +use num_traits::{Num, One, Zero}; pub trait NdLike where @@ -60,13 +60,13 @@ ndlike! { /* ************* Implementations ************* */ -impl NdLike for ArrayBase +impl NdLike for ArrayBase where A: Clone + Num, D: Dimension, S: DataOwned, { - type Output = ArrayBase; + type Output = ArrayBase; fn default_like(&self) -> Self::Output where @@ -90,13 +90,13 @@ where } } -impl ArrayLike for ArrayBase +impl ArrayLike for ArrayBase where A: Clone, D: Dimension, S: ndarray::DataOwned, { - type Output = ArrayBase; + type Output = ArrayBase; fn array_like(&self, shape: Sh, elem: A) -> Self::Output where @@ -110,13 +110,13 @@ where } } -impl FillLike for ArrayBase +impl FillLike for ArrayBase where A: Clone, D: Dimension, S: DataOwned, { - type Output = ArrayBase; + type Output = ArrayBase; fn fill_like(&self, elem: A) -> Self::Output { ArrayBase::from_elem(self.dim(), elem) @@ -126,13 +126,13 @@ where macro_rules! impl_ndlike { ($name:ident::$method:ident.$call:ident: $($p:tt)*) => { - impl $name for ArrayBase + impl $name for ArrayBase where A: $($p)*, D: Dimension, S: DataOwned, { - type Output = ArrayBase; + type Output = ArrayBase; fn $method(&self) -> Self::Output { ArrayBase::$call(self.dim()) diff --git a/traits/src/macros/seal.rs b/traits/src/macros/seal.rs new file mode 100644 index 00000000..05f2bbf6 --- /dev/null +++ b/traits/src/macros/seal.rs @@ -0,0 +1,50 @@ +/* + Appellation: seal + Contrib: FL03 +*/ +//! The public parts of this private module are used to create traits +//! that cannot be implemented outside of our own crate. This way we +//! can feel free to extend those traits without worrying about it +//! being a breaking change for other implementations. +//! +//! ## Usage +//! +//! To define a private trait, you can use the [`private!`] macro, which will define a hidden +//! method `__private__` that can only be implemented within the crate. + +/// If this type is pub but not publicly reachable, third parties +/// can't name it and can't implement traits using it. +#[allow(dead_code)] +pub struct Seal; +/// the [`private!`] macro is used to seal a particular trait, defining a hidden method that +/// may only be implemented within the bounds of the crate. +#[allow(unused_macros)] +macro_rules! private { + () => { + /// This trait is private to implement; this method exists to make it + /// impossible to implement outside the crate. + #[doc(hidden)] + fn __private__(&self) -> $crate::macros::seal::Seal; + }; +} +/// the [`seal!`] macro is used to implement a private method on a type, which is used to seal +/// the type so that it cannot be implemented outside of the crate. +#[allow(unused_macros)] +macro_rules! seal { + () => { + fn __private__(&self) -> $crate::macros::seal::Seal { + $crate::macros::seal::Seal + } + }; +} +/// this macros is used to implement a trait for a type, sealing it so that +/// it cannot be implemented outside of the crate. This is most usefuly for creating other +/// macros that can be used to implement some raw, sealed trait on the given _types_. +#[allow(unused_macros)] +macro_rules! sealed { + (impl$(<$($T:ident),*>)? $trait:ident for $name:ident$(<$($V:ident),*>)? $(where $($rest:tt)*)?) => { + impl$(<$($T),*>)? $trait for $name$(<$($V),*>)? $(where $($rest)*)? { + seal!(); + } + }; +} diff --git a/core/src/ops/norm/l_norm.rs b/traits/src/norm.rs similarity index 92% rename from core/src/ops/norm/l_norm.rs rename to traits/src/norm.rs index 93494f31..e4d9da00 100644 --- a/core/src/ops/norm/l_norm.rs +++ b/traits/src/norm.rs @@ -48,7 +48,7 @@ where macro_rules! impl_norm { ($trait:ident::$method:ident($($param:ident: $type:ty),*) => $self:ident$(.$call:ident())*) => { - impl $trait for ArrayBase + impl $trait for ArrayBase where A: Float + ScalarOperand, D: Dimension, @@ -61,7 +61,7 @@ macro_rules! impl_norm { } } - impl<'a, A, S, D> $trait for &'a ArrayBase + impl<'a, A, S, D> $trait for &'a ArrayBase where A: Float + ScalarOperand, D: Dimension, @@ -74,7 +74,7 @@ macro_rules! impl_norm { } } - impl<'a, A, S, D> $trait for &'a mut ArrayBase + impl<'a, A, S, D> $trait for &'a mut ArrayBase where A: Float + ScalarOperand, D: Dimension, diff --git a/core/src/traits/propagation.rs b/traits/src/propagation.rs similarity index 70% rename from core/src/traits/propagation.rs rename to traits/src/propagation.rs index d36a6b7f..38a6bbc1 100644 --- a/core/src/traits/propagation.rs +++ b/traits/src/propagation.rs @@ -8,23 +8,18 @@ pub trait Backward { type Elem; type Output; - fn backward( - &mut self, - input: &X, - delta: &Delta, - gamma: Self::Elem, - ) -> crate::Result; + fn backward(&mut self, input: &X, delta: &Delta, gamma: Self::Elem) -> Option; } /// This trait denotes entities capable of performing a single forward step pub trait Forward { type Output; /// a single forward step - fn forward(&self, input: &Rhs) -> crate::Result; + fn forward(&self, input: &Rhs) -> Option; /// this method enables the forward pass to be generically _activated_ using some closure. /// This is useful for isolating the logic of the forward pass from that of the activation /// function and is often used by layers and models. - fn forward_then(&self, input: &Rhs, then: F) -> crate::Result + fn forward_then(&self, input: &Rhs, then: F) -> Option where F: FnOnce(Self::Output) -> Self::Output, { @@ -38,7 +33,7 @@ pub trait Forward { use ndarray::linalg::Dot; use ndarray::{ArrayBase, Data, Dimension}; -// impl Backward for ArrayBase +// impl Backward for ArrayBase // where // A: LinalgScalar + FromPrimitive, // D: Dimension, @@ -56,7 +51,7 @@ use ndarray::{ArrayBase, Data, Dimension}; // input: &X, // delta: &Y, // gamma: Self::Elem, -// ) -> crate::Result { +// ) -> Option { // let grad = input.dot(delta); // let next = &self + grad * gamma; // self.assign(&next)?; @@ -65,17 +60,16 @@ use ndarray::{ArrayBase, Data, Dimension}; // } // } -impl Forward for ArrayBase +impl Forward for ArrayBase where A: Clone, D: Dimension, S: Data, - for<'a> X: Dot, Output = Y>, + for<'a> X: Dot, Output = Y>, { type Output = Y; - fn forward(&self, input: &X) -> crate::Result { - let output = input.dot(self); - Ok(output) + fn forward(&self, input: &X) -> Option { + Some(input.dot(self)) } } diff --git a/core/src/ops/reshape.rs b/traits/src/reshape.rs similarity index 93% rename from core/src/ops/reshape.rs rename to traits/src/reshape.rs index 61eb0db7..59021f2f 100644 --- a/core/src/ops/reshape.rs +++ b/traits/src/reshape.rs @@ -39,7 +39,7 @@ where } } -impl Unsqueeze for ArrayBase +impl Unsqueeze for ArrayBase where D: Dimension, S: RawData, @@ -51,7 +51,7 @@ where } } -impl Unsqueeze for &ArrayBase +impl Unsqueeze for &ArrayBase where D: Dimension, S: RawDataClone, diff --git a/core/src/traits/shape.rs b/traits/src/shape.rs similarity index 100% rename from core/src/traits/shape.rs rename to traits/src/shape.rs diff --git a/core/src/traits/store.rs b/traits/src/store.rs similarity index 95% rename from core/src/traits/store.rs rename to traits/src/store.rs index 0f8fd996..ccf11fc4 100644 --- a/core/src/traits/store.rs +++ b/traits/src/store.rs @@ -19,7 +19,7 @@ pub trait Sequential { macro_rules! impl_raw_store { (@impl $($name:ident)::*<$T:ident>) => { - impl<$T> $crate::traits::store::RawStore for $($name)::*<$T> { + impl<$T> $crate::store::RawStore for $($name)::*<$T> { type Elem = $T; seal!(); @@ -38,7 +38,7 @@ macro_rules! impl_raw_store { macro_rules! impl_sequential { (@impl $($name:ident)::*<$T:ident>) => { - impl<$T> $crate::traits::store::Sequential for $($name)::*<$T> { + impl<$T> $crate::store::Sequential for $($name)::*<$T> { seal!(); } }; diff --git a/traits/src/tensor_ops.rs b/traits/src/tensor_ops.rs new file mode 100644 index 00000000..e02aec70 --- /dev/null +++ b/traits/src/tensor_ops.rs @@ -0,0 +1,192 @@ +/* + appellation: tensor_ops + authors: @FL03 +*/ + +/// apply an affine transformation to a tensor; +/// affine transformation is defined as `mul * self + add` +pub trait Affine { + type Output; + + fn affine(&self, mul: X, add: Y) -> Self::Output; +} +/// The [`Inverse`] trait generically establishes an interface for computing the inverse of a +/// type, regardless of if its a tensor, scalar, or some other compatible type. +pub trait Inverse { + /// the output, or result, of the inverse operation + type Output; + /// compute the inverse of the current object, producing some [`Output`](Inverse::Output) + fn inverse(&self) -> Self::Output; +} +/// The [`MatMul`] trait defines an interface for matrix multiplication. +pub trait MatMul { + type Output; + + fn matmul(&self, rhs: &Rhs) -> Self::Output; +} +/// The [`MatPow`] trait defines an interface for computing the exponentiation of a matrix. +pub trait MatPow { + type Output; + /// raise the tensor to the power of the right-hand side, producing some [`Output`](Matpow::Output) + fn matpow(&self, rhs: Rhs) -> Self::Output; +} + +/// The [`Transpose`] trait generically establishes an interface for transposing a type +pub trait Transpose { + /// the output, or result, of the transposition + type Output; + /// transpose a reference to the current object + fn transpose(&self) -> Self::Output; +} + +/* + ********* Implementations ********* +*/ +use ndarray::linalg::Dot; +use ndarray::{Array, Array2, ArrayBase, Data, Dimension, Ix2, LinalgScalar, ScalarOperand, s}; +use num_traits::{Num, NumAssign}; + +impl Affine for Array +where + A: LinalgScalar + ScalarOperand, + D: Dimension, +{ + type Output = Array; + + fn affine(&self, mul: A, add: A) -> Self::Output { + self * mul + add + } +} + +// #[cfg(not(feature = "blas"))] +impl Inverse for Array +where + T: Copy + NumAssign + ScalarOperand, +{ + type Output = Option; + + fn inverse(&self) -> Self::Output { + let (rows, cols) = self.dim(); + + if !self.is_square() { + return None; // Matrix must be square for inversion + } + + let identity = Array2::eye(rows); + + // Construct an augmented matrix by concatenating the original matrix with an identity matrix + let mut aug = Array2::zeros((rows, 2 * cols)); + aug.slice_mut(s![.., ..cols]).assign(self); + aug.slice_mut(s![.., cols..]).assign(&identity); + + // Perform Gaussian elimination to reduce the left half to the identity matrix + for i in 0..rows { + let pivot = aug[[i, i]]; + + if pivot == T::zero() { + return None; // Matrix is singular + } + + aug.slice_mut(s![i, ..]).mapv_inplace(|x| x / pivot); + + for j in 0..rows { + if i != j { + let am = aug.clone(); + let factor = aug[[j, i]]; + let rhs = am.slice(s![i, ..]); + aug.slice_mut(s![j, ..]) + .zip_mut_with(&rhs, |x, &y| *x -= y * factor); + } + } + } + + // Extract the inverted matrix from the augmented matrix + let inverted = aug.slice(s![.., cols..]); + + Some(inverted.to_owned()) + } +} +// #[cfg(feature = "blas")] +// impl Inverse for Array +// where +// T: Copy + NumAssign + ScalarOperand, +// { +// type Output = Option; + +// fn inverse(&self) -> Self::Output { +// use ndarray_linalg::solve::Inverse; +// self.inv().ok() +// } +// } + +impl MatMul for ArrayBase +where + A: ndarray::LinalgScalar, + D: Dimension, + S: Data, + ArrayBase: Dot, +{ + type Output = Y; + + fn matmul(&self, rhs: &X) -> Self::Output { + >::dot(self, rhs) + } +} + +impl MatMul> for Vec +where + T: Copy + Num, +{ + type Output = T; + + fn matmul(&self, rhs: &Vec) -> T { + self.iter() + .zip(rhs.iter()) + .fold(T::zero(), |acc, (&a, &b)| acc + a * b) + } +} + +impl MatMul<[T; N]> for [T; N] +where + T: Copy + Num, +{ + type Output = T; + + fn matmul(&self, rhs: &[T; N]) -> T { + self.iter() + .zip(rhs.iter()) + .fold(T::zero(), |acc, (&a, &b)| acc + a * b) + } +} +impl MatPow for ArrayBase +where + A: Copy + Num + 'static, + S: Data, + ArrayBase: Clone + Dot, Output = Array>, +{ + type Output = Array; + + fn matpow(&self, rhs: i32) -> Self::Output { + if !self.is_square() { + panic!("Matrix must be square to be raised to a power"); + } + let mut res = Array::eye(self.shape()[0]); + for _ in 0..rhs { + res = res.dot(self); + } + res + } +} + +impl<'a, A, S, D> Transpose for &'a ArrayBase +where + A: 'a, + D: Dimension, + S: Data, +{ + type Output = ndarray::ArrayView<'a, A, D>; + + fn transpose(&self) -> Self::Output { + self.t() + } +} diff --git a/core/src/traits/wnb.rs b/traits/src/wnb.rs similarity index 61% rename from core/src/traits/wnb.rs rename to traits/src/wnb.rs index ad586e4c..5e19f526 100644 --- a/core/src/traits/wnb.rs +++ b/traits/src/wnb.rs @@ -1,16 +1,16 @@ use ndarray::{ArrayBase, Data, DataMut, Dimension, RawData}; -pub trait Weighted +pub trait Weighted::Elem>: Sized where D: Dimension, - S: RawData, + S: RawData, { /// returns the weights of the model - fn weights(&self) -> &ArrayBase; + fn weights(&self) -> &ArrayBase; /// returns a mutable reference to the weights of the model - fn weights_mut(&mut self) -> &mut ArrayBase; + fn weights_mut(&mut self) -> &mut ArrayBase; /// assigns the given bias to the current weight - fn assign_weights(&mut self, weights: &ArrayBase) -> &mut Self + fn assign_weights(&mut self, weights: &ArrayBase) -> &mut Self where S: DataMut, S::Elem: Clone, @@ -19,11 +19,11 @@ where self } /// replaces the current weights with the given weights - fn replace_weights(&mut self, weights: ArrayBase) -> ArrayBase { + fn replace_weights(&mut self, weights: ArrayBase) -> ArrayBase { core::mem::replace(self.weights_mut(), weights) } /// sets the weights of the model - fn set_weights(&mut self, weights: ArrayBase) -> &mut Self { + fn set_weights(&mut self, weights: ArrayBase) -> &mut Self { *self.weights_mut() = weights; self } @@ -45,17 +45,17 @@ where } } -pub trait Biased: Weighted +pub trait Biased::Elem>: Weighted where D: Dimension, - S: RawData, + S: RawData, { /// returns the bias of the model - fn bias(&self) -> &ArrayBase; + fn bias(&self) -> &ArrayBase; /// returns a mutable reference to the bias of the model - fn bias_mut(&mut self) -> &mut ArrayBase; + fn bias_mut(&mut self) -> &mut ArrayBase; /// assigns the given bias to the current bias - fn assign_bias(&mut self, bias: &ArrayBase) -> &mut Self + fn assign_bias(&mut self, bias: &ArrayBase) -> &mut Self where S: DataMut, S::Elem: Clone, @@ -64,11 +64,11 @@ where self } /// replaces the current bias with the given bias - fn replace_bias(&mut self, bias: ArrayBase) -> ArrayBase { + fn replace_bias(&mut self, bias: ArrayBase) -> ArrayBase { core::mem::replace(self.bias_mut(), bias) } /// sets the bias of the model - fn set_bias(&mut self, bias: ArrayBase) -> &mut Self { + fn set_bias(&mut self, bias: ArrayBase) -> &mut Self { *self.bias_mut() = bias; self } @@ -93,32 +93,3 @@ where /* ************* Implementations ************* */ -use crate::params::ParamsBase; - -impl Weighted for ParamsBase -where - S: RawData, - D: Dimension, -{ - fn weights(&self) -> &ArrayBase { - &self.weights - } - - fn weights_mut(&mut self) -> &mut ArrayBase { - &mut self.weights - } -} - -impl Biased for ParamsBase -where - S: RawData, - D: Dimension, -{ - fn bias(&self) -> &ArrayBase { - &self.bias - } - - fn bias_mut(&mut self) -> &mut ArrayBase { - &mut self.bias - } -} diff --git a/traits/tests/default.rs b/traits/tests/default.rs new file mode 100644 index 00000000..233a07af --- /dev/null +++ b/traits/tests/default.rs @@ -0,0 +1,17 @@ +/* + Appellation: default + Contrib: FL03 +*/ + +fn add(a: A, b: B) -> C +where + A: core::ops::Add, +{ + a + b +} + +#[test] +fn compiles() { + assert_eq!(add(10, 10), 20); + assert_ne!(add(1, 1), 3); +} diff --git a/traits/tests/traits.rs b/traits/tests/traits.rs new file mode 100644 index 00000000..da2943a1 --- /dev/null +++ b/traits/tests/traits.rs @@ -0,0 +1,49 @@ +/* + Appellation: traits + Contrib: FL03 +*/ +use concision_traits::*; +use ndarray::{Array2, array}; + +#[test] +fn test_affine() { + let x = array![[0.0, 1.0], [2.0, 3.0]]; + + let y = x.affine(4.0, -2.0); + assert_eq!(y, array![[-2.0, 2.0], [6.0, 10.0]]); +} + +#[test] +fn test_inverse() { + let a = array![[1.0, 2.0], [3.0, 4.0]]; + let b = array![[1.0, 2.0, 3.0,], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]]; + let exp = array![[-2.0, 1.0], [1.5, -0.5]]; + assert_eq!(Some(exp), a.inverse()); + assert_eq!(None, b.inverse()); +} + +#[test] +fn test_masked_fill() { + let shape = (2, 2); + let mask = array![[true, false], [false, true]]; + let arr = Array2::::from_shape_fn(shape, |(i, j)| (i * shape.1 + j) as f64); + let a = arr.masked_fill(&mask, 0.0); + assert_eq!(a, array![[0.0, 1.0], [2.0, 0.0]]); +} + +#[test] +fn test_matrix_power() { + let x = array![[1.0, 2.0], [3.0, 4.0]]; + assert_eq!(x.matpow(0), Array2::::eye(2)); + assert_eq!(x.matpow(1), x); + assert_eq!(x.matpow(2), x.dot(&x)); +} + +#[test] +fn test_unsqueeze() { + let arr = array![1, 2, 3, 4]; + let a = arr.clone().unsqueeze(0); + assert_eq!(a.dim(), (1, 4)); + let b = arr.unsqueeze(1); + assert_eq!(b.dim(), (4, 1)); +} diff --git a/utils/Cargo.toml b/utils/Cargo.toml index 0fc27b00..1f237cbd 100644 --- a/utils/Cargo.toml +++ b/utils/Cargo.toml @@ -14,21 +14,8 @@ repository.workspace = true rust-version.workspace = true version.workspace = true -[package.metadata.docs.rs] -all-features = false -features = ["full"] -rustc-args = ["--cfg", "docsrs"] -version = "v{{version}}" - -[package.metadata.release] -no-dev-version = true -tag-name = "{{version}}" - [lib] -crate-type = [ - "cdylib", - "rlib" -] +crate-type = ["cdylib", "rlib"] bench = false doc = true doctest = true @@ -36,8 +23,7 @@ test = true [dependencies] # custom -scsys = { workspace = true } -scsys-derive = { workspace = true } +variants = { workspace = true } # concurrency & parallelism rayon = { optional = true, workspace = true } # data & serialization @@ -66,135 +52,108 @@ rand = { optional = true, workspace = true } rand_distr = { optional = true, workspace = true } [dev-dependencies] +anyhow = { features = ["std"], workspace = true } lazy_static = { workspace = true } - [features] default = [ - "std", + "std", ] full = [ - "anyhow", - "approx", - "complex", - "default", - "json", - "rand", - "serde", - "tracing", + "approx", + "complex", + "default", + "json", + "rand", + "serde", + "tracing", ] nightly = [] # ************* [FF:Features] ************* signal =[ - "complex", - "rustfft", + "complex", + "rustfft", ] # ************* [FF:Dependencies] ************* std = [ - "alloc", - "anyhow?/std", - "ndarray/std", - "num/std", - "num-complex?/std", - "num-traits/std", - "rand?/std", - "rand?/std_rng", - "serde/std", - "scsys/std", - "strum/std", - "thiserror/std", - "tracing?/std", -] - -wasi = [ - "scsys/wasi", -] + "alloc", + "ndarray/std", + "num/std", + "num-complex?/std", + "num-traits/std", + "rand?/std", + "rand?/std_rng", + "serde/std", + "strum/std", + "thiserror/std", + "tracing?/std", + "variants/std", +] + +wasi = [] wasm = [ - "getrandom?/wasm_js", - "scsys/wasm", + "getrandom?/wasm_js", ] # ************* [FF:Dependencies] ************* alloc = [ - "num/alloc", - "serde?/alloc", - "serde_json?/alloc", - "scsys/alloc", -] - -anyhow = [ - "dep:anyhow", - "scsys/anyhow", + "num/alloc", + "serde?/alloc", + "serde_json?/alloc", + "variants/alloc", ] -approx = [ - "dep:approx", - "ndarray/approx", -] +approx = ["dep:approx", "ndarray/approx"] -blas = [ - "ndarray/blas", -] +blas = ["ndarray/blas"] complex = ["dep:num-complex"] json = [ - "alloc", - "scsys/json", - "serde", - "serde_json", + "alloc", + "serde", + "serde_json", ] rand = [ - "dep:rand", - "dep:rand_distr", - "num/rand", - "num-complex?/rand", - "rng", - "scsys/rand", + "dep:rand", + "dep:rand_distr", + "num/rand", + "num-complex?/rand", + "rng", ] -rayon = [ - "dep:rayon", - "ndarray/rayon", -] +rayon = ["dep:rayon", "ndarray/rayon"] rng = [ - "dep:getrandom", - "rand?/small_rng", - "rand?/thread_rng", - "scsys/rng", + "dep:getrandom", + "rand?/small_rng", + "rand?/thread_rng", ] rustfft = ["dep:rustfft"] serde = [ - "dep:serde", - "dep:serde_derive", - "ndarray/serde", - "num/serde", - "num-complex?/serde", - "rand?/serde", - "rand_distr?/serde", - "scsys/serde", + "dep:serde", + "dep:serde_derive", + "ndarray/serde", + "num/serde", + "num-complex?/serde", + "rand?/serde", + "rand_distr?/serde", ] serde_json = ["dep:serde_json"] -tracing = [ - "dep:tracing", - "scsys/tracing", -] - +tracing = ["dep:tracing"] -# ************* [Unit Tests] ************* +# ********* [Unit Tests] ********* [[test]] name = "default" -required-features = [] [[test]] name = "fft" @@ -207,3 +166,14 @@ required-features = ["approx", "std", "complex"] [[test]] name = "traits" required-features = ["approx", "std", "complex"] + +# ********* [Metadata] ********* +[package.metadata.docs.rs] +all-features = false +features = ["full"] +rustc-args = ["--cfg", "docsrs"] +version = "v{{version}}" + +[package.metadata.release] +no-dev-version = true +tag-name = "{{version}}" \ No newline at end of file diff --git a/utils/src/signal/fft/types/mode.rs b/utils/src/signal/fft/types/mode.rs index 20f1efe5..89c97059 100644 --- a/utils/src/signal/fft/types/mode.rs +++ b/utils/src/signal/fft/types/mode.rs @@ -52,7 +52,6 @@ toggle! { Ord, PartialEq, PartialOrd, - scsys::VariantConstructors, strum::AsRefStr, strum::Display, strum::EnumCount, @@ -61,6 +60,7 @@ toggle! { strum::EnumString, strum::VariantArray, strum::VariantNames, + variants::VariantConstructors, )] #[cfg_attr( feature = "serde", @@ -85,7 +85,6 @@ pub enum FftMode { Ord, PartialEq, PartialOrd, - scsys::VariantConstructors, strum::AsRefStr, strum::Display, strum::EnumCount, @@ -94,6 +93,7 @@ pub enum FftMode { strum::EnumString, strum::VariantArray, strum::VariantNames, + variants::VariantConstructors, )] #[cfg_attr( feature = "serde", diff --git a/utils/src/signal/fft/types/plan.rs b/utils/src/signal/fft/types/plan.rs index fc54da79..997c3ed8 100644 --- a/utils/src/signal/fft/types/plan.rs +++ b/utils/src/signal/fft/types/plan.rs @@ -2,15 +2,13 @@ Appellation: plan Contrib: FL03 */ -use core::slice; - use crate::signal::fft::fft_permutation; +#[cfg(feature = "alloc")] +use alloc::vec::{self, Vec}; + #[derive(Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] -#[cfg_attr( - feature = "serde", - derive(serde_derive::Deserialize, serde_derive::Serialize) -)] +#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct FftPlan { len: usize, plan: Vec, @@ -38,7 +36,7 @@ impl FftPlan { self.plan().get(index) } - pub fn iter(&self) -> slice::Iter { + pub fn iter<'a>(&'a self) -> core::slice::Iter<'a, usize> { self.plan().iter() } @@ -93,7 +91,7 @@ impl FromIterator for FftPlan { impl IntoIterator for FftPlan { type Item = usize; - type IntoIter = std::vec::IntoIter; + type IntoIter = vec::IntoIter; fn into_iter(self) -> Self::IntoIter { self.plan.into_iter() @@ -102,7 +100,7 @@ impl IntoIterator for FftPlan { impl<'a> IntoIterator for &'a mut FftPlan { type Item = &'a mut usize; - type IntoIter = slice::IterMut<'a, usize>; + type IntoIter = core::slice::IterMut<'a, usize>; fn into_iter(self) -> Self::IntoIter { self.plan.iter_mut() diff --git a/utils/src/stats/summary.rs b/utils/src/stats/summary.rs index 2e148653..5be21462 100644 --- a/utils/src/stats/summary.rs +++ b/utils/src/stats/summary.rs @@ -147,13 +147,13 @@ where } fn std(&self) -> Self::Output { - let mean = self.mean().unwrap_or_else(A::zero); + let mean = self.mean(); let sum = self.iter().copied().map(|x| (x - mean).pow(2)).sum::(); (sum / self.elems()).sqrt() } fn var(&self) -> Self::Output { - let mean = self.mean().unwrap_or_else(A::zero); + let mean = self.mean(); let sum = self.iter().copied().map(|x| (x - mean).pow(2)).sum::(); sum / self.elems() }