diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index e0a517ab06..ce363420ef 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -44,16 +44,30 @@ ENV PATH=$CARGO_HOME/bin:$PATH COPY rust-toolchain.toml . RUN TOOLCHAIN_VERSION="$(grep channel rust-toolchain.toml | awk '{print $3}' | tr -d '"')" && \ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- \ - --profile minimal \ -y \ --default-toolchain "${TOOLCHAIN_VERSION}" \ --target wasm32-unknown-unknown -# Install wasm-bindgen-cli in the same profile as other components, to sacrifice some performance & disk space to gain -# better build caching -RUN if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ - RUSTFLAGS="-C target-feature=-crt-static" \ - # Meanwhile if you want to update wasm-bindgen you also need to update version in: - # - packages/wasm-dpp/Cargo.toml - # - packages/wasm-dpp/scripts/build-wasm.sh - cargo install wasm-bindgen-cli@0.2.86 --locked +# Download and install cargo-binstall +ENV BINSTALL_VERSION=1.10.11 +RUN set -ex; \ + if [ "$TARGETARCH" = "amd64" ]; then \ + CARGO_BINSTALL_ARCH="x86_64-unknown-linux-musl"; \ + elif [ "$TARGETARCH" = "arm64" ] || [ "$TARGETARCH" = "aarch64" ]; then \ + CARGO_BINSTALL_ARCH="aarch64-unknown-linux-musl"; \ + else \ + echo "Unsupported architecture: $TARGETARCH"; exit 1; \ + fi; \ + DOWNLOAD_URL="https://github.com/cargo-bins/cargo-binstall/releases/download/v${BINSTALL_VERSION}/cargo-binstall-${CARGO_BINSTALL_ARCH}.tgz"; \ + curl -L --fail --show-error "$DOWNLOAD_URL" -o /tmp/cargo-binstall.tgz; \ + tar -xzf /tmp/cargo-binstall.tgz -C /tmp cargo-binstall; \ + chmod +x /tmp/cargo-binstall; \ + /tmp/cargo-binstall -y --force cargo-binstall; \ + rm /tmp/cargo-binstall; \ + cargo binstall -V + +RUN cargo binstall wasm-bindgen-cli@0.2.86 --locked \ + --no-discover-github-token \ + --disable-telemetry \ + --no-track \ + --no-confirm diff --git a/.dockerignore b/.dockerignore index d93249fec2..91aeb4a4db 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,6 +2,7 @@ .idea .vscode .nyc_output +.github *.log Dockerfile **/*.spec.ts diff --git a/.github/actions/aws_credentials/action.yaml b/.github/actions/aws_credentials/action.yaml new file mode 100644 index 0000000000..afef5fba5c --- /dev/null +++ b/.github/actions/aws_credentials/action.yaml @@ -0,0 +1,49 @@ +--- +name: "aws_credentials" +description: | + Configure .aws/credentials file with provided access key ID and secret access key. + + This action creates a credentials file in ${HOME}/.aws/credentials with the provided access key ID and secret access key. + It also sets AWS_PROFILE and AWS_SHARED_CREDENTIALS_FILE environment variables to use this profile. + + It can conflict with other actions that define AWS credentials or set AWS_PROFILE env variable. + Explicitly set AWS_PROFILE=sccache and unset AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY in case + of conflicting settings. +inputs: + access_key_id: + description: Access key ID + required: true + secret_access_key: + description: Secret access key + required: true + profile: + description: AWS profile to use; set AWS_PROFILE env variable to use this profile + default: "default" + +runs: + using: composite + steps: + - name: Configure AWS credentials + shell: bash + run: | + mkdir -p "${HOME}/.aws" + cat >> ${HOME}/.aws/credentials << EOF + [${{ inputs.profile }}] + aws_access_key_id=${{ inputs.access_key_id }} + aws_secret_access_key=${{ inputs.secret_access_key }} + EOF + chmod -R go-rwx ${HOME}/.aws + + - name: Set env variables + shell: bash + run: | + # Exit on any error + set -euo pipefail + # Validate AWS_PROFILE is not empty + if [ -z "${{ inputs.profile }}" ]; then + echo "Error: AWS_PROFILE cannot be empty" + exit 1 + fi + # Export variables + echo "AWS_PROFILE=${{ inputs.profile }}" >> $GITHUB_ENV + echo "AWS_SHARED_CREDENTIALS_FILE=${HOME}/.aws/credentials" >> $GITHUB_ENV diff --git a/.github/actions/aws_ecr_login/action.yaml b/.github/actions/aws_ecr_login/action.yaml new file mode 100644 index 0000000000..fc93942222 --- /dev/null +++ b/.github/actions/aws_ecr_login/action.yaml @@ -0,0 +1,42 @@ +--- +# Login to AWS ECR +name: "aws_ecr_login" +description: "Login to AWS ECR to store Docker containers" +inputs: + aws_account_id: + description: AWS account ID (AWS_ACCOUNT_ID) + required: true + aws_access_key_id: + description: Access key ID (AWS_ACCESS_KEY_ID) + required: true + aws_secret_access_key: + description: Secret access key (AWS_SECRET_ACCESS_KEY) + required: true + aws_region: + description: AWS region to use (AWS_REGION) + required: true + +runs: + using: composite + steps: + - name: Configure AWS credentials and bucket region + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ inputs.aws_access_key_id }} + aws-secret-access-key: ${{ inputs.aws_secret_access_key }} + aws-region: ${{ inputs.aws_region }} + + - name: Login to ECR + run: | + aws ecr get-login-password \ + --region ${{ inputs.aws_region }} | docker login --username AWS --password-stdin ${{ inputs.aws_account_id }}.dkr.ecr.${{ inputs.aws_region }}.amazonaws.com + shell: bash + + # Unset AWS credentials to avoid conflicts, as we prefer credentials from ~/.aws/credentials to authenticate + - name: Unset AWS credentials to avoid conflicts + shell: bash + run: | + echo AWS_DEFAULT_REGION='' >> $GITHUB_ENV + echo AWS_REGION='' >> $GITHUB_ENV + echo AWS_ACCESS_KEY_ID='' >> $GITHUB_ENV + echo AWS_SECRET_ACCESS_KEY='' >> $GITHUB_ENV diff --git a/.github/actions/docker/action.yaml b/.github/actions/docker/action.yaml index d41ebf4dcf..459a3fe6c4 100644 --- a/.github/actions/docker/action.yaml +++ b/.github/actions/docker/action.yaml @@ -1,6 +1,13 @@ --- name: "Build and push docker image" -description: "Build and push docker image by digest with Rust caching" +description: | + Build and push docker image by digest with extensive caching. + + This action builds and pushes a Docker image to Docker Hub. + It uses caching for Rust dependencies and Docker layers. + It also provides sccache settings to the docker builder for caching Rust compilation. + + Layers cache and sccache will use the same credentials and S3 bucket, but different prefixes. inputs: image_name: description: Name of image in Docker Hub, like `drive` @@ -22,27 +29,27 @@ inputs: description: User name to use when pushing images to Docker Hub dockerhub_token: description: Docker Hub token to use - cache_mounts: - description: Load cache mounts cache - default: | - cargo_registry_index - cargo_registry_cache - cargo_git cargo_profile: description: Cargo build profile, i.e release or dev default: dev - bucket: - description: S3 bucket to use for caching, must match runner define in `runs-on` - default: multi-runner-cache-x1xibo9c - region: + cache_bucket: + description: S3 bucket to use for caching (both sccache and layer cache) + required: true + cache_region: description: S3 bucket region required: true - aws_access_key_id: - description: AWS access key ID + cache_endpoint: + description: S3 endpoint to use for caching required: true - aws_secret_access_key: - description: AWS secret access key + cache_access_key_id: + description: Access key ID for s3 cache required: true + cache_secret_access_key: + description: Secret access key for s3 cache + required: true + cache_to_name: + description: "Save cache to name manifest (should be used only on default branch)" + default: "false" outputs: digest: value: ${{ steps.docker_build.outputs.digest }} @@ -65,17 +72,10 @@ runs: install: true driver-opts: env.BUILDKIT_STEP_LOG_MAX_SIZE=10485760 cleanup: false - config-inline: | + buildkitd-config-inline: | [worker.oci] gc = false - - name: Load Docker mount cache - uses: dcginfra/buildkit-cache-dance/inject@s5cmd - if: ${{ inputs.cache_mounts != '' }} - with: - bucket: ${{ inputs.bucket }} - mounts: ${{ inputs.cache_mounts }} - - name: Set Docker tags and labels from image tag id: docker_meta uses: docker/metadata-action@v5 @@ -90,33 +90,109 @@ runs: id: layer_cache_settings with: name: ${{ inputs.image_name }} - bucket: ${{ inputs.bucket }} + region: ${{ inputs.cache_region }} + bucket: ${{ inputs.cache_bucket }} + endpoint: ${{ inputs.cache_endpoint }} + prefix: "cache-layers/${{ inputs.platform }}/" + cache_to_name: ${{ inputs.cache_to_name }} + s3_access_key_id: ${{ inputs.cache_access_key_id }} + s3_secret_access_key: ${{ inputs.cache_secret_access_key }} + + - name: Set HOME variable to github context + shell: bash + run: echo "HOME=$HOME" >> $GITHUB_ENV + + - name: Cargo cache for Docker + uses: actions/cache@v4 + id: cargo-cache + with: + path: | + ${{ env.HOME }}/cargo-cache-registry-index + ${{ env.HOME }}/cargo-cache-registry-cache + ${{ env.HOME }}/cargo-cache-git-db + key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Inject cargo cache into docker + uses: reproducible-containers/buildkit-cache-dance@v3.1.2 + with: + cache-map: | + { + "${{ env.HOME }}/cargo-cache-registry-index": { + "target": "/root/.cargo/registry/index", + "id": "cargo_registry_index" + }, + "${{ env.HOME }}/cargo-cache-registry-cache": { + "target": "/root/.cargo/registry/cache", + "id": "cargo_registry_cache" + }, + "${{ env.HOME }}/cargo-cache-git-db": { + "target": "/root/.cargo/git/db", + "id": "cargo_git" + } + } + skip-extraction: ${{ steps.cargo-cache.outputs.cache-hit }} + + - name: Yarn unplugged cache for Docker + uses: actions/cache@v4 + id: yarn-cache + with: + path: ${{ env.HOME }}/yarn-unplugged-cache + key: ${{ inputs.platform }}-yarn-unplugged-${{ hashFiles('yarn.lock') }} + restore-keys: | + ${{ inputs.platform }}-yarn-unplugged- + + - name: Set arch + id: arch + uses: actions/github-script@v6 + with: + result-encoding: "string" + script: return '${{ inputs.platform }}'.replace('linux/', ''); + + - name: Inject cargo cache into docker + uses: reproducible-containers/buildkit-cache-dance@v3.1.2 + with: + cache-map: | + { + "${{ env.HOME }}/yarn-unplugged-cache": { + "target": "/tmp/unplugged", + "id": "unplugged_${{ steps.arch.outputs.result }}" + } + } + skip-extraction: ${{ steps.yarn-cache.outputs.cache-hit }} + + - name: Configure sccache settings + uses: ./.github/actions/sccache + id: sccache + with: + bucket: ${{ inputs.cache_bucket }} + region: ${{ inputs.cache_region }} + endpoint: ${{ inputs.cache_endpoint }} + access_key_id: ${{ inputs.cache_access_key_id }} + secret_access_key: ${{ inputs.cache_secret_access_key }} + platform: ${{ inputs.platform }} + install: false - name: Build and push Docker image ${{ inputs.image }} id: docker_build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 + env: + # AWS profile to be used by layer cache; sccache settings are passed via build-args + AWS_PROFILE: ${{ steps.layer_cache_settings.outputs.aws_profile }} with: context: . builder: ${{ steps.buildx.outputs.name }} target: ${{ inputs.target }} labels: ${{ steps.docker_meta.outputs.labels }} + push: ${{ inputs.push_tags }} tags: ${{ inputs.push_tags == 'true' && steps.docker_meta.outputs.tags || '' }} platforms: ${{ inputs.platform }} + secret-files: | + AWS=${{ env.HOME }}/.aws/credentials build-args: | CARGO_BUILD_PROFILE=${{ inputs.cargo_profile }} - RUSTC_WRAPPER=sccache - SCCACHE_BUCKET=${{ inputs.bucket }} - SCCACHE_REGION=${{ inputs.region }} - SCCACHE_S3_KEY_PREFIX=${{ runner.os }}/sccache - AWS_ACCESS_KEY_ID=${{ inputs.aws_access_key_id }} - AWS_SECRET_ACCESS_KEY=${{ inputs.aws_secret_access_key }} + ${{ steps.sccache.outputs.env_vars }} cache-from: ${{ steps.layer_cache_settings.outputs.cache_from }} cache-to: ${{ steps.layer_cache_settings.outputs.cache_to }} outputs: type=image,name=${{ inputs.image_org }}/${{ inputs.image_name }},push-by-digest=${{ inputs.push_tags != 'true' }},name-canonical=true,push=true - - - name: Save Docker mount cache - uses: dcginfra/buildkit-cache-dance/extract@s5cmd - if: ${{ inputs.cache_mounts != '' }} - with: - bucket: ${{ inputs.bucket }} - mounts: ${{ inputs.cache_mounts }} diff --git a/.github/actions/librocksdb/action.yaml b/.github/actions/librocksdb/action.yaml new file mode 100644 index 0000000000..217e2745eb --- /dev/null +++ b/.github/actions/librocksdb/action.yaml @@ -0,0 +1,49 @@ +--- +# This action builds and caches librocksdb. If we find that this solution consumes too much time, we can consider +# prebuilding librocksdb outside of the pipeline (eg. in the grovedb release process), publish as an artifact, and +# download it in the pipeline. +name: "librocksdb" +description: "Build and install librocksdb" +inputs: + version: + description: RocksDB version, eg. "8.10.2" + required: false + default: "8.10.2" + force: + description: Force rebuild + required: false + default: "false" + +runs: + using: composite + steps: + - name: Cache librocksdb + uses: actions/cache@v4 + id: librocksdb-cache + with: + key: librocksdb/${{ inputs.version }}/${{ runner.os }}/${{ runner.arch }} + path: /opt/rocksdb + + - if: ${{ steps.librocksdb-cache.outputs.cache-hit != 'true' || inputs.force == 'true' }} + shell: bash + name: Build librocksdb + run: | + set -ex + WORKDIR=/tmp/rocksdb-build + mkdir -p ${WORKDIR}/rocksdb + mkdir -p /opt/rocksdb/usr/local/lib/ + pushd ${WORKDIR}/rocksdb + + # building rocksdb + git clone https://github.com/facebook/rocksdb.git -b v${{ inputs.version }} --depth 1 . + make -j$(nproc) static_lib + make DESTDIR=/opt/rocksdb install-static + set +x + + echo Done. + echo Configuration: + echo + echo "ROCKSDB_STATIC='/opt/rocksdb/usr/local/lib/librocksdb.a'" + echo "ROCKSDB_LIB_DIR='/opt/rocksdb/usr/local/lib'" + + popd diff --git a/.github/actions/local-network/action.yaml b/.github/actions/local-network/action.yaml index 3c6c468117..c2c6060ba6 100644 --- a/.github/actions/local-network/action.yaml +++ b/.github/actions/local-network/action.yaml @@ -11,32 +11,27 @@ runs: - name: Setup Node.JS uses: ./.github/actions/nodejs - - name: Restore JS build artifacts - uses: strophy/actions-cache@opendal-update + - name: Download JS build artifacts + uses: actions/download-artifact@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache - path: build-js-artifacts-${{ github.sha }}.tar - key: build-js-artifacts/${{ github.sha }} - - - name: Unpack JS build artifacts archive - shell: bash - run: tar -xf build-js-artifacts-${{ github.sha }}.tar + name: js-build-${{ github.sha }} + path: packages - name: Get dashmate fingerprint id: dashmate-fingerprint shell: bash run: echo "sha=$(git log -1 --format="%h" -- packages/dashmate)" >> $GITHUB_OUTPUT - # TODO: Use upload artifacts action instead + - name: Set HOME variable to github context + shell: bash + run: echo "HOME=$HOME" >> $GITHUB_ENV + - name: Restore local network data id: local-network-data - uses: strophy/actions-cache/restore@opendal-update + uses: actions/cache/restore@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: local-network-data path: | - /home/ubuntu/.dashmate + ${{ env.HOME }}/.dashmate **/.env dashmate_volumes_dump key: local-network-volumes/${{ steps.dashmate-fingerprint.outputs.sha }} @@ -68,12 +63,10 @@ runs: if: steps.local-network-data.outputs.cache-hit != 'true' - name: Save local network data - uses: strophy/actions-cache/save@opendal-update + uses: actions/cache/save@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: local-network-data path: | - /home/ubuntu/.dashmate + ${{ env.HOME }}/.dashmate **/.env dashmate_volumes_dump key: local-network-volumes/${{ steps.dashmate-fingerprint.outputs.sha }} @@ -90,9 +83,9 @@ runs: docker tag ${{ inputs.image_org }}/dashmate-helper:$SHA_TAG dashpay/dashmate-helper:$VERSION # Replace DAPI and Drive images with new org and tag in dashmate config - sed -i -E "s/dashpay\/(drive|dapi):[^\"]+/${{ inputs.image_org }}\/\1:${SHA_TAG}/g" /home/ubuntu/.dashmate/config.json + sed -i -E "s/dashpay\/(drive|dapi):[^\"]+/${{ inputs.image_org }}\/\1:${SHA_TAG}/g" ${{ env.HOME }}/.dashmate/config.json - cat /home/ubuntu/.dashmate/config.json + cat ${{ env.HOME }}/.dashmate/config.json - name: Start local network shell: bash diff --git a/.github/actions/nodejs/action.yaml b/.github/actions/nodejs/action.yaml index 7cb41d5955..8460a19d16 100644 --- a/.github/actions/nodejs/action.yaml +++ b/.github/actions/nodejs/action.yaml @@ -17,11 +17,9 @@ runs: shell: bash run: npm config set audit false - - name: Cache NPM build artifacts (S3 bucket cache) - uses: strophy/actions-cache@opendal-update + - name: Cache NPM build artifacts + uses: actions/cache@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache path: | .yarn/unplugged key: ${{ runner.os }}/yarn/unplugged/${{ runner.arch }}/${{ hashFiles('yarn.lock') }} diff --git a/.github/actions/rust/action.yaml b/.github/actions/rust/action.yaml index 8854007013..ff26db3fad 100644 --- a/.github/actions/rust/action.yaml +++ b/.github/actions/rust/action.yaml @@ -17,6 +17,7 @@ inputs: required: false default: "true" +# TODO: Cache deps here to save 1 minute runs: using: composite steps: @@ -30,12 +31,10 @@ runs: fi echo "TOOLCHAIN_VERSION=$TOOLCHAIN_VERSION" >> $GITHUB_ENV - echo "::set-output name=version::$TOOLCHAIN_VERSION" + echo "version=$TOOLCHAIN_VERSION" >> $GITHUB_OUTPUT - # TODO: Move to AMI and build every day - uses: dtolnay/rust-toolchain@master name: Install Rust toolchain - id: install_rust with: toolchain: ${{ steps.rust_toolchain.outputs.version }} target: ${{ inputs.target }} @@ -60,7 +59,6 @@ runs: ;; esac - # TODO: Move to AMI and build every day - name: Check if protoc is installed id: check-protoc shell: bash @@ -84,38 +82,29 @@ runs: echo "PROTOC=${HOME}/.local/bin/protoc" >> $GITHUB_ENV export PATH="${PATH}:${HOME}/.local/bin" - - name: Run sccache-cache - uses: mozilla-actions/sccache-action@v0.0.3 - with: - version: "v0.7.1" # Must be the same as in Dockerfile - if: inputs.cache == 'true' - - - name: Hash ref_name - id: hashed-ref-name + - name: Set HOME variable to github context shell: bash - run: echo "key=$(echo '${{ github.ref_name }}' | sha256sum | cut -d ' ' -f1)" >> $GITHUB_OUTPUT + run: echo "HOME=$HOME" >> $GITHUB_ENV - - name: Cache cargo registry (S3 bucket cache) - uses: strophy/actions-cache@opendal-update + - name: Cache cargo registry + uses: actions/cache@v4 if: inputs.cache == 'true' with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache path: | - /home/ubuntu/.cargo/registry/index - /home/ubuntu/.cargo/registry/cache - /home/ubuntu/.cargo/git + ${{ env.HOME }}/.cargo/registry/index + ${{ env.HOME }}/.cargo/registry/cache + ${{ env.HOME }}/.cargo/git key: ${{ runner.os }}/cargo/registry/${{ hashFiles('**/Cargo.lock') }} restore-keys: | ${{ runner.os }}/cargo/registry/${{ hashFiles('**/Cargo.lock') }} ${{ runner.os }}/cargo/registry/ - # TODO: Move to AMI and build every day - name: Install clang id: deps-clang shell: bash if: runner.os == 'Linux' run: | sudo apt update -qq - sudo apt install -qq --yes clang llvm + # snappy is required by rust rocksdb + sudo apt install -qq --yes clang llvm libsnappy-dev sudo update-alternatives --set cc /usr/bin/clang diff --git a/.github/actions/s3-layer-cache-settings/action.yaml b/.github/actions/s3-layer-cache-settings/action.yaml index b4073b87c7..4260ee0659 100644 --- a/.github/actions/s3-layer-cache-settings/action.yaml +++ b/.github/actions/s3-layer-cache-settings/action.yaml @@ -1,4 +1,4 @@ -name: 'Get S3 Docker Layer Cache settings' +name: "Get S3 Docker Layer Cache settings" description: | This action generates string with s3-based cache configuration for docker buildx. It defines three manifests: @@ -6,55 +6,71 @@ description: | - name and head ref to hit all builds for this branch with this name - just name to hit all builds for this name + To correcly use caching, ensure buildx has AWS_PROFILE environment set to value of `aws_profile` output. + inputs: name: - description: 'Cache key name will be used as a prefix for all docker image manifests' + description: "Cache key name will be used as a prefix for all docker image manifests" required: true head_ref: - description: 'Head ref for an additional manifest to hit all builds for this head' + description: "Head ref for an additional manifest to hit all builds for this head" default: ${{ github.ref }} region: description: S3 region - default: eu-west-1 + required: true bucket: description: S3 bucket name - default: multi-runner-cache-x1xibo9c + required: true + endpoint: + description: S3 endpoint to use for caching + required: false prefix: description: S3 key prefix - default: 'cache-layers/' + default: "cache-layers/" + s3_access_key_id: + description: Access key ID for S3 cache + required: true + s3_secret_access_key: + description: Secret access key for S3 cache + required: true mode: description: Cache mode default: max + cache_to_name: + description: "Save cache to name manifest (should be used only on default branch)" + default: "false" outputs: cache_to: - description: 'String with s3-based cache configuration for docker buildx cache-to option' + description: "String with s3-based cache configuration for docker buildx cache-to option" value: ${{ steps.script.outputs.cache_to }} cache_from: - description: 'String with s3-based cache configuration for docker buildx cache-from option' + description: "String with s3-based cache configuration for docker buildx cache-from option" value: ${{ steps.script.outputs.cache_from }} + aws_profile: + description: "AWS profile to use for s3 cache, to set inside AWS_PROFILE env var" + value: layers runs: using: composite steps: + - name: Configure AWS credentials for s3 layers + uses: ./.github/actions/aws_credentials + with: + access_key_id: ${{ inputs.s3_access_key_id }} + secret_access_key: ${{ inputs.s3_secret_access_key }} + profile: "layers" + - uses: actions/github-script@v6 id: script with: script: | - const sanitizedHeadRef = '${{ inputs.head_ref }}'.replace(/[^a-zA-Z0-9]/g, '-'); - - const manifestNames = [ - '${{ inputs.name }}_sha_${{ github.sha }}', - `${{ inputs.name }}_tag_${ sanitizedHeadRef }`, - '${{ inputs.name }}' - ]; - const settings = { type: 's3', region: '${{ inputs.region }}', bucket: '${{ inputs.bucket }}', prefix: '${{ inputs.prefix }}', - name: manifestNames.join(';'), + endpoint_url: '${{ inputs.endpoint }}', }; const settingsString = Object.entries(settings) @@ -62,5 +78,30 @@ runs: .map(([key, value]) => `${key}=${value}`) .join(','); - core.setOutput('cache_from', settingsString); - core.setOutput('cache_to', `${settingsString},mode=${{ inputs.mode }}`); + const sanitizedHeadRef = '${{ inputs.head_ref }}'.replace(/[^a-zA-Z0-9]/g, '-'); + + const shaManifestName = '${{ inputs.name }}_sha_${{ github.sha }}'; + const headRefManifestName = '${{ inputs.name }}_tag_' + sanitizedHeadRef; + + const cacheFromManifestNames = [ + shaManifestName, + headRefManifestName, + '${{ inputs.name }}', + ]; + + const cacheFrom = cacheFromManifestNames + .map((name) => `${settingsString},name=${name}`) + .join('\n'); + + core.setOutput('cache_from', cacheFrom); + + const cacheToManifestNames = [ + shaManifestName, + headRefManifestName, + ]; + + if ('${{ inputs.cache_to_name }}' === 'true') { + cacheToManifestNames.push('${{ inputs.name }}'); + } + + core.setOutput('cache_to', `${settingsString},mode=${{ inputs.mode }},name=${cacheToManifestNames.join(';')}`); diff --git a/.github/actions/sccache/action.yaml b/.github/actions/sccache/action.yaml new file mode 100644 index 0000000000..14954b1f20 --- /dev/null +++ b/.github/actions/sccache/action.yaml @@ -0,0 +1,84 @@ +--- +name: "sccache" +description: | + Configure sccache caching. + + This action installs sccache and configures it to use an S3 bucket for caching. + It also sets environment variables to use when building Rust projects. + + It can conflict with other actions that define AWS credentials or set AWS_PROFILE env variable. + Manually set AWS_PROFILE=sccache and unset AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY in case + of conflicting settings. +inputs: + bucket: + description: S3 bucket to use for caching + required: true + region: + description: S3 bucket region + required: true + endpoint: + description: S3 endpoint to use for caching + required: true + access_key_id: + description: S3 endpoint access key ID + required: true + secret_access_key: + description: S3 endpoint secret access key + required: true + platform: + description: "Platform and architecture to use when caching; defaults to linux/amd64" + required: false + default: "linux/amd64" + install: + description: "Install sccache" + default: "true" + version: + description: "sccache version" + default: "0.8.2" + required: false +outputs: + env_vars: + description: "Environment variables set by this action" + value: | + AWS_PROFILE=sccache + CARGO_INCREMENTAL=0 + RUSTC_WRAPPER=sccache + SCCACHE_BUCKET=${{ inputs.bucket }} + SCCACHE_REGION=${{ inputs.region }} + SCCACHE_ENDPOINT=${{ inputs.endpoint }} + SCCACHE_S3_KEY_PREFIX=sccache/${{ inputs.platform }}/ + SCCACHE_VERSION=${{ inputs.version }} + CC="sccache clang" + CXX="sccache clang++" + +# TODO: Cache deps here to save 1 minute +runs: + using: composite + steps: + - name: Install sccache binary + if: ${{ inputs.install == 'true' }} + uses: mozilla-actions/sccache-action@v0.0.6 + with: + version: "v${{ inputs.version }}" + + - name: Configure AWS credentials + uses: ./.github/actions/aws_credentials + with: + access_key_id: ${{ inputs.access_key_id }} + secret_access_key: ${{ inputs.secret_access_key }} + profile: "sccache" + + - name: Configure sccache + shell: bash + run: | + echo "AWS_PROFILE=sccache" >> $GITHUB_ENV + echo "CARGO_INCREMENTAL=0" >> $GITHUB_ENV + echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV + echo "SCCACHE_BUCKET=${{ inputs.bucket }}" >> $GITHUB_ENV + echo "SCCACHE_REGION=${{ inputs.region }}" >> $GITHUB_ENV + echo "SCCACHE_ENDPOINT=${{ inputs.endpoint }}" >> $GITHUB_ENV + echo "SCCACHE_S3_KEY_PREFIX=sccache/${{ inputs.platform }}/" >> $GITHUB_ENV + # "SCCACHE_VERSION" is used inside Docker to install the same version of sccache + echo "SCCACHE_VERSION=${{ inputs.version }}" >> $GITHUB_ENV + echo "CC=sccache clang" >> $GITHUB_ENV + echo "CXX=sccache clang++" >> $GITHUB_ENV diff --git a/.github/package-filters/js-packages.yml b/.github/package-filters/js-packages.yml index d158bb39f6..b42237f81b 100644 --- a/.github/package-filters/js-packages.yml +++ b/.github/package-filters/js-packages.yml @@ -1,3 +1,7 @@ +'@dashevo/wallet-utils-contract': &wallet-utils-contract + - .github/workflows/tests* + - packages/wallet-utils-contract/** + '@dashevo/dashpay-contract': &dashpay-contract - .github/workflows/tests* - packages/dashpay-contract/** @@ -25,6 +29,7 @@ - *masternode-reward-shares-contract - *dpns-contract - *withdrawals-contract + - *wallet-utils-contract - packages/rs-platform-serialization/** - packages/rs-platform-serialization-derive/** - packages/rs-platform-value/** diff --git a/.github/package-filters/rs-packages.yml b/.github/package-filters/rs-packages.yml index f50dea216d..c813323e1d 100644 --- a/.github/package-filters/rs-packages.yml +++ b/.github/package-filters/rs-packages.yml @@ -1,3 +1,7 @@ +wallet-utils-contract: &wallet-utils-contract + - .github/workflows/tests* + - packages/wallet-utils-contract/** + dashpay-contract: &dashpay-contract - .github/workflows/tests* - packages/dashpay-contract/** @@ -25,6 +29,7 @@ dpp: &dpp - *masternode-reward-shares-contract - *dpns-contract - *withdrawals-contract + - *wallet-utils-contract - *json-schema-compatibility-validator - packages/rs-platform-serialization/** - packages/rs-platform-serialization-derive/** diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 12f512b720..d8e24ef706 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,7 +7,7 @@ on: jobs: build: name: Deploy docs - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Checkout main uses: actions/checkout@v4 diff --git a/.github/workflows/manage-runs.yml b/.github/workflows/manage-runs.yml deleted file mode 100644 index 2c07af5b4f..0000000000 --- a/.github/workflows/manage-runs.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Manage runs -on: - pull_request: - types: - - closed - -jobs: - cancel-merged-or-closed-pr-runs: - name: Cancel runs for merged or closed PRs - runs-on: ubuntu-22.04 - steps: - - uses: octokit/request-action@v2.x - id: get_active_workflows - with: - route: GET /repos/{owner}/{repo}/actions/runs?status=in_progress&event=pull_request - owner: dashpay - repo: platform - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Extract running workflow ids - id: extract_workflow_ids - run: | - current_branch=${GITHUB_HEAD_REF} - - # loop thru the workflows found & filter out ones that are not on PRs pointing to this branch - workflow_ids=$(echo '${{ steps.get_active_workflows.outputs.data }}' | \ - jq '.workflow_runs | map({id, head_branch})' | \ - jq 'map(select(.head_branch == "'$current_branch'")) | map(.id)' | \ - jq 'join(",")') - - # strip the wrapping quote marks before passing to next step - echo 'WORKFLOW_IDS='$(echo $workflow_ids | tr -d '"') >> $GITHUB_ENV - - - name: Cancel active workflow runs - run: | - for id in ${WORKFLOW_IDS//,/ } - do - echo "Cancelling workflow with id: $id" - - # use curl here as I have no idea how to use a github action in a loop - curl \ - -X POST \ - -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ - https://api.github.com/repos/${{ github.repository }}/actions/runs/$id/cancel - done diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index d475a3eef8..e75151c841 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -10,7 +10,7 @@ on: jobs: pr-title: name: PR title - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Validate conventional PR title uses: amannn/action-semantic-pull-request@v5 diff --git a/.github/workflows/prebuild-devcontainers.yml b/.github/workflows/prebuild-devcontainers.yml index 794fa3d4a5..38a1ecd403 100644 --- a/.github/workflows/prebuild-devcontainers.yml +++ b/.github/workflows/prebuild-devcontainers.yml @@ -7,6 +7,8 @@ on: - '.github/workflows/prebuild-devcontainers.yml' - rust-toolchain.toml - Dockerfile + branches: + - master workflow_dispatch: concurrency: @@ -16,8 +18,8 @@ concurrency: jobs: build: name: Build and push devcontainer - runs-on: ["self-hosted", "linux", "x64", "ubuntu-platform"] - timeout-minutes: 60 + runs-on: ubuntu-24.04 + timeout-minutes: 240 steps: - name: Checkout repo uses: actions/checkout@v4 diff --git a/.github/workflows/release-docker-image.yml b/.github/workflows/release-docker-image.yml index 5c56ade272..51b829982c 100644 --- a/.github/workflows/release-docker-image.yml +++ b/.github/workflows/release-docker-image.yml @@ -33,13 +33,13 @@ jobs: build-image: name: Build ${{ matrix.platform }} image runs-on: ${{ matrix.runner }} - timeout-minutes: 25 + timeout-minutes: 180 strategy: matrix: include: - - runner: ["self-hosted", "linux", "x64", "ubuntu-platform"] + - runner: ubuntu-24.04 platform: linux/amd64 - - runner: ["self-hosted", "linux", "arm64", "ubuntu-platform"] + - runner: ubuntu-24.04 platform: linux/arm64 steps: - name: Check out repo @@ -47,12 +47,9 @@ jobs: with: fetch-depth: 0 - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + if: ${{ matrix.platform == 'linux/arm64' }} - name: Build and push by digest uses: ./.github/actions/docker @@ -66,9 +63,13 @@ jobs: cargo_profile: ${{ inputs.cargo_profile }} dockerhub_username: ${{ secrets.DOCKERHUB_USERNAME }} dockerhub_token: ${{ secrets.DOCKERHUB_TOKEN }} - region: ${{ secrets.AWS_REGION }} - aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + cache_region: ${{ vars.CACHE_REGION }} + cache_bucket: ${{ vars.CACHE_S3_BUCKET }} + cache_endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + cache_access_key_id: ${{ secrets.CACHE_KEY_ID }} + cache_secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + # On release, we generate a new "base" image, so we need to save cache to name manifest, like '.../drive' + cache_to_name: ${{ github.event_name == 'release' && 'true' || 'false' }} - name: Export digest run: | @@ -78,10 +79,16 @@ jobs: touch "${{ env.DIGEST_DIR_PATH }}/${digest#sha256:}" ls -lah ${{ env.DIGEST_DIR_PATH }} + - name: Format DIGEST_PLATFORM + run: | + PLATFORM="${{ matrix.platform }}" + # replace slash (/) with dash (-) + echo "DIGEST_PLATFORM=${PLATFORM//\//-}" >> $GITHUB_ENV + - name: Upload digest - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: ${{ env.DIGEST_NAME }} + name: ${{ env.DIGEST_NAME }}-${{ env.DIGEST_PLATFORM }} path: ${{ env.DIGEST_DIR_PATH }}/* if-no-files-found: error retention-days: 1 @@ -89,13 +96,14 @@ jobs: publish-manifest: name: Publish image tags needs: build-image - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Download digests - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: ${{ env.DIGEST_NAME }} + pattern: ${{ env.DIGEST_NAME }}* path: ${{ env.DIGEST_DIR_PATH }} + merge-multiple: true - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -134,6 +142,7 @@ jobs: with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Create manifest list and push working-directory: ${{ env.DIGEST_DIR_PATH }} run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index dba5d592f5..269798a2ff 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ concurrency: jobs: release-npm: name: Release NPM packages - runs-on: ["self-hosted", "linux", "arm64", "ubuntu-platform"] + runs-on: ubuntu-24.04 timeout-minutes: 15 if: github.event_name != 'workflow_dispatch' steps: @@ -33,51 +33,52 @@ jobs: env: TAG_PREFIX: v - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 + - uses: softwareforgood/check-artifact-v4-existence@v0 + id: check-artifact with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - - name: Retrieve JS build artifacts - uses: strophy/actions-cache@opendal-update - id: cache - with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache - path: build-js-artifacts-${{ github.sha }}.tar - key: build-js-artifacts/${{ github.sha }} - - - name: Unpack JS build artifacts archive - run: tar -xf build-js-artifacts-${{ github.sha }}.tar - if: ${{ steps.cache.outputs.cache-hit == 'true' }} + name: js-build-${{ github.sha }} - name: Login to DockerHub uses: docker/login-action@v3 - if: ${{ steps.cache.outputs.cache-hit != 'true' }} with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Setup Rust uses: ./.github/actions/rust with: target: wasm32-unknown-unknown - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + if: ${{ steps.check-artifact.outputs.exists != 'true' }} + + - name: Setup sccache + uses: ./.github/actions/sccache + with: + bucket: ${{ vars.CACHE_S3_BUCKET }} + region: ${{ vars.AWS_REGION }} + endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + access_key_id: ${{ secrets.CACHE_KEY_ID }} + secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Setup Node.JS uses: ./.github/actions/nodejs + - name: Install Cargo binstall + uses: cargo-bins/cargo-binstall@v1.3.1 + if: ${{ steps.check-artifact.outputs.exists != 'true' }} + + - name: Install wasm-bindgen-cli + run: cargo binstall wasm-bindgen-cli@0.2.86 + if: ${{ steps.check-artifact.outputs.exists != 'true' }} + - name: Build packages run: yarn build env: CARGO_BUILD_PROFILE: release - RUSTC_WRAPPER: sccache - SCCACHE_BUCKET: multi-runner-cache-x1xibo9c - SCCACHE_REGION: ${{ secrets.AWS_REGION }} - SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/wasm/wasm32 - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Set suffix uses: actions/github-script@v6 @@ -113,7 +114,7 @@ jobs: run: yarn config set npmAuthToken ${{ secrets.NPM_TOKEN }} - name: Publish NPM packages - run: yarn workspaces foreach --all --no-private --parallel npm publish --access public --tag ${{ steps.tag.outputs.result }} + run: yarn workspaces foreach --all --no-private --parallel npm publish --tolerate-republish --access public --tag ${{ steps.tag.outputs.result }} - name: Ignore only already cached artifacts run: | @@ -125,16 +126,25 @@ jobs: echo ".idea" >> .gitignore echo ".ultra.cache.json" >> .gitignore echo "db/*" >> .gitignore - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Get modified files id: diff - run: git ls-files --others --exclude-standard >> artifacts_list.txt - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + run: | + echo "files<> $GITHUB_OUTPUT + git ls-files --others --exclude-standard >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - - name: Create an archive of built files - run: xargs -a artifacts_list.txt tar cvf build-js-artifacts-${{ github.sha }}.tar - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + - name: Upload the archive of built files + uses: actions/upload-artifact@v4 + with: + name: js-build-${{ github.sha }} + path: ${{ steps.diff.outputs.files }} + retention-days: 1 + if-no-files-found: error + include-hidden-files: true + if: ${{ steps.check-artifact.outputs.exists != 'true' }} release-drive-image: name: Release Drive image @@ -208,11 +218,11 @@ jobs: matrix: include: - package_type: tarballs - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: win - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: deb - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: macos os: macos-14 steps: @@ -221,23 +231,11 @@ jobs: with: fetch-depth: 0 - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 + - name: Download JS build artifacts + uses: actions/download-artifact@v4 with: - aws-region: ${{ secrets.AWS_REGION }} - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - - - name: Retrieve JS build artifacts - uses: strophy/actions-cache@opendal-update - with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache - path: build-js-artifacts-${{ github.sha }}.tar - key: build-js-artifacts/${{ github.sha }} - - - name: Unpack JS build artifacts archive - run: tar -xf build-js-artifacts-${{ github.sha }}.tar + name: js-build-${{ github.sha }} + path: packages - name: Install macOS build deps if: runner.os == 'macOS' @@ -284,7 +282,7 @@ jobs: run: "${GITHUB_WORKSPACE}/scripts/pack_dashmate.sh ${{ matrix.package_type }}" - name: Upload artifacts to action summary - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: github.event_name != 'release' with: name: dashmate diff --git a/.github/workflows/tests-build-image.yml b/.github/workflows/tests-build-image.yml index f8b7c830f4..e14b7c1cf8 100644 --- a/.github/workflows/tests-build-image.yml +++ b/.github/workflows/tests-build-image.yml @@ -17,34 +17,35 @@ on: jobs: build-image: name: Build ${{ inputs.name }} image - runs-on: ["self-hosted", "linux", "arm64", "ubuntu-platform"] + runs-on: ubuntu-24.04 + timeout-minutes: 30 steps: - name: Check out repo uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - name: Login to ECR - run: aws ecr get-login-password --region ${{ secrets.AWS_REGION }} | docker login --username AWS --password-stdin ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com - shell: bash + uses: ./.github/actions/aws_ecr_login + with: + aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws_region: ${{ vars.AWS_REGION }} + aws_account_id: ${{ secrets.AWS_ACCOUNT_ID }} - name: Build and push by SHA uses: ./.github/actions/docker with: image_name: ${{ inputs.image_name }} - image_org: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com + image_org: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ vars.AWS_REGION }}.amazonaws.com target: ${{ inputs.target }} - platform: linux/arm64 + platform: linux/amd64 push_tags: true dockerhub_username: ${{ secrets.DOCKERHUB_USERNAME }} dockerhub_token: ${{ secrets.DOCKERHUB_TOKEN }} - region: ${{ secrets.AWS_REGION }} - aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + cache_region: ${{ vars.CACHE_REGION }} + cache_bucket: ${{ vars.CACHE_S3_BUCKET }} + cache_endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + cache_access_key_id: ${{ secrets.CACHE_KEY_ID }} + cache_secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + cache_to_name: ${{ github.event_name == 'push' && 'true' || 'false' }} diff --git a/.github/workflows/tests-build-js.yml b/.github/workflows/tests-build-js.yml index a367af55fc..a36bd91e2c 100644 --- a/.github/workflows/tests-build-js.yml +++ b/.github/workflows/tests-build-js.yml @@ -4,56 +4,58 @@ on: jobs: build-js: name: Build JS - runs-on: ["self-hosted", "linux", "arm64", "ubuntu-platform"] + runs-on: ubuntu-24.04 + timeout-minutes: 10 steps: - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 + - uses: softwareforgood/check-artifact-v4-existence@v0 + id: check-artifact with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - # TODO: Use upload artifacts action instead - - name: Cache build artifacts - uses: strophy/actions-cache@opendal-update - id: cache - with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache - path: build-js-artifacts-${{ github.sha }}.tar - key: build-js-artifacts/${{ github.sha }} + name: js-build-${{ github.sha }} - name: Check out repo uses: actions/checkout@v4 - if: ${{ steps.cache.outputs.cache-hit != 'true' }} with: fetch-depth: 0 + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Login to DockerHub uses: docker/login-action@v3 - if: ${{ steps.cache.outputs.cache-hit != 'true' }} with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Setup Node.JS uses: ./.github/actions/nodejs - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Setup Rust uses: ./.github/actions/rust - if: ${{ steps.cache.outputs.cache-hit != 'true' }} with: target: wasm32-unknown-unknown + if: ${{ steps.check-artifact.outputs.exists != 'true' }} + + - name: Setup sccache + uses: ./.github/actions/sccache + with: + bucket: ${{ vars.CACHE_S3_BUCKET }} + region: ${{ vars.CACHE_REGION }} + endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + access_key_id: ${{ secrets.CACHE_KEY_ID }} + secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + if: ${{ steps.check-artifact.outputs.exists != 'true' }} + + - name: Install Cargo binstall + uses: cargo-bins/cargo-binstall@v1.3.1 + if: ${{ steps.check-artifact.outputs.exists != 'true' }} + + - name: Install wasm-bindgen-cli + run: cargo binstall wasm-bindgen-cli@0.2.86 + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Build JS packages run: yarn build - if: ${{ steps.cache.outputs.cache-hit != 'true' }} - env: - RUSTC_WRAPPER: sccache - SCCACHE_BUCKET: multi-runner-cache-x1xibo9c - SCCACHE_REGION: ${{ secrets.AWS_REGION }} - SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/wasm/wasm32 + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Ignore only already cached artifacts run: | @@ -63,15 +65,24 @@ jobs: echo "node_modules" >> .gitignore echo ".nyc_output" >> .gitignore echo ".idea" >> .gitignore - echo ".ultra.cache.json" >> .gitignore + echo ".ultra.exists.json" >> .gitignore echo "db/*" >> .gitignore - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - name: Get modified files id: diff - run: git ls-files --others --exclude-standard >> artifacts_list.txt - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + run: | + echo "files<> $GITHUB_OUTPUT + git ls-files --others --exclude-standard >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + if: ${{ steps.check-artifact.outputs.exists != 'true' }} - - name: Create an archive of built files - run: xargs -a artifacts_list.txt tar cvf build-js-artifacts-${{ github.sha }}.tar - if: ${{ steps.cache.outputs.cache-hit != 'true' }} + - name: Upload the archive of built files + uses: actions/upload-artifact@v4 + with: + name: js-build-${{ github.sha }} + path: ${{ steps.diff.outputs.files }} + retention-days: 1 + if-no-files-found: error + include-hidden-files: true + if: ${{ steps.check-artifact.outputs.exists != 'true' }} diff --git a/.github/workflows/tests-codeql.yml b/.github/workflows/tests-codeql.yml index d00a66c8df..9ed7351bf7 100644 --- a/.github/workflows/tests-codeql.yml +++ b/.github/workflows/tests-codeql.yml @@ -4,7 +4,8 @@ on: jobs: codeql: name: Run Code QL - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 + timeout-minutes: 15 permissions: actions: read contents: read @@ -20,26 +21,14 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - name: Setup Node.JS uses: ./.github/actions/nodejs - - name: Restore JS build artifacts archive - uses: strophy/actions-cache@opendal-update + - name: Download JS build artifacts + uses: actions/download-artifact@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache - path: build-js-artifacts-${{ github.sha }}.tar - key: build-js-artifacts/${{ github.sha }} - - - name: Unpack JS build artifacts archive - run: tar -xf build-js-artifacts-${{ github.sha }}.tar + name: js-build-${{ github.sha }} + path: packages - name: Initialize CodeQL uses: github/codeql-action/init@v2 diff --git a/.github/workflows/tests-dashmate.yml b/.github/workflows/tests-dashmate.yml index 369c72e06a..170006c22a 100644 --- a/.github/workflows/tests-dashmate.yml +++ b/.github/workflows/tests-dashmate.yml @@ -17,8 +17,7 @@ on: jobs: dashmate-test: name: Run ${{ inputs.name }} tests - # TODO: Try with Github Runner, probably it will be the same time - runs-on: [ "self-hosted", "linux", "arm64", "ubuntu-platform" ] + runs-on: ubuntu-24.04 timeout-minutes: 15 steps: - name: Check out repo @@ -32,34 +31,29 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 + - name: Login to ECR + uses: ./.github/actions/aws_ecr_login with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} + aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws_region: ${{ vars.AWS_REGION }} + aws_account_id: ${{ secrets.AWS_ACCOUNT_ID }} - name: Setup Node.JS uses: ./.github/actions/nodejs - - name: Restore JS build artifacts - uses: strophy/actions-cache@opendal-update + - name: Download JS build artifacts + uses: actions/download-artifact@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache - path: build-js-artifacts-${{ github.sha }}.tar - key: build-js-artifacts/${{ github.sha }} - - - name: Unpack JS build artifacts archive - run: tar -xvf build-js-artifacts-${{ github.sha }}.tar + name: js-build-${{ github.sha }} + path: packages - name: Replace with pre-built images run: | - set -x + set -e # Login to ECR - DOCKER_HUB_ORG="${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com" - aws ecr get-login-password --region ${{ secrets.AWS_REGION }} | docker login --username AWS --password-stdin $DOCKER_HUB_ORG + DOCKER_HUB_ORG="${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ vars.AWS_REGION }}.amazonaws.com" SHA_TAG=sha-${{ github.sha }} @@ -84,14 +78,17 @@ jobs: run: echo "sha=$(git log -1 --format="%h" -- packages/dashmate)" >> $GITHUB_OUTPUT if: inputs.restore_local_network_data == true - - name: Use test suite local network data to speed up dashmate local network tests + - name: Set HOME variable to github context + shell: bash + run: echo "HOME=$HOME" >> $GITHUB_ENV + + - name: Restore test suite local network data to speed up dashmate local network tests id: local-network-data - uses: strophy/actions-cache/restore@opendal-update + uses: actions/cache/restore@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: local-network-data path: | - /home/ubuntu/.dashmate + ${{ env.HOME }}/.dashmate + **/.env dashmate_volumes_dump key: local-network-volumes/${{ steps.dashmate-fingerprint.outputs.sha }} if: inputs.restore_local_network_data == true @@ -105,7 +102,7 @@ jobs: env: DEBUG: 1 DASHMATE_E2E_TESTS_SKIP_IMAGE_BUILD: true - DASHMATE_E2E_TESTS_LOCAL_HOMEDIR: /home/ubuntu/.dashmate + DASHMATE_E2E_TESTS_LOCAL_HOMEDIR: ${{ env.HOME }}/.dashmate if: steps.local-network-data.outputs.cache-hit == 'true' - name: Run tests without cache @@ -113,9 +110,8 @@ jobs: env: DEBUG: 1 DASHMATE_E2E_TESTS_SKIP_IMAGE_BUILD: true - if: steps.local-network-data.outputs.cache-hit != 'true' + if: steps.local-network-data.outputs.cache-hit == 'false' - name: Show Docker logs if: ${{ failure() }} uses: jwalton/gh-docker-logs@v2 - diff --git a/.github/workflows/tests-js-package.yml b/.github/workflows/tests-js-package.yml index ef508ec073..a2bf39a47b 100644 --- a/.github/workflows/tests-js-package.yml +++ b/.github/workflows/tests-js-package.yml @@ -17,7 +17,8 @@ on: jobs: lint: name: Linting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 + timeout-minutes: 5 permissions: id-token: write contents: read @@ -25,33 +26,22 @@ jobs: - name: Check out repo uses: actions/checkout@v4 - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - name: Setup Node.JS uses: ./.github/actions/nodejs - - name: Restore JS build artifacts - uses: strophy/actions-cache@opendal-update + - name: Download JS build artifacts + uses: actions/download-artifact@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache - path: build-js-artifacts-${{ github.sha }}.tar - key: build-js-artifacts/${{ github.sha }} - - - name: Unpack JS build artifacts archive - run: tar -xf build-js-artifacts-${{ github.sha }}.tar + name: js-build-${{ github.sha }} + path: packages - name: Run ESLinter run: yarn workspace "${{ inputs.package }}" lint test: name: Tests - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 + timeout-minutes: 15 permissions: id-token: write contents: read @@ -62,26 +52,14 @@ jobs: with: fetch-depth: 0 - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-region: ${{ secrets.AWS_REGION }} - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - - name: Setup Node.JS uses: ./.github/actions/nodejs - - name: Restore JS build artifacts - uses: strophy/actions-cache@opendal-update + - name: Download JS build artifacts + uses: actions/download-artifact@v4 with: - bucket: multi-runner-cache-x1xibo9c - root: actions-cache - path: build-js-artifacts-${{ github.sha }}.tar - key: build-js-artifacts/${{ github.sha }} - - - name: Unpack JS build artifacts archive - run: tar -xf build-js-artifacts-${{ github.sha }}.tar + name: js-build-${{ github.sha }} + path: packages - name: Create necessary dotenv files run: | diff --git a/.github/workflows/tests-packges-functional.yml b/.github/workflows/tests-packges-functional.yml index 57f735f0d5..dcd99f7581 100644 --- a/.github/workflows/tests-packges-functional.yml +++ b/.github/workflows/tests-packges-functional.yml @@ -4,11 +4,10 @@ on: jobs: test-functional: name: Run functional tests - runs-on: [ "self-hosted", "linux", "arm64", "ubuntu-platform" ] + runs-on: ubuntu-24.04 timeout-minutes: 15 env: - CHROME_BIN: /usr/bin/brave-browser - ECR_HOST: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com + ECR_HOST: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ vars.AWS_REGION }}.amazonaws.com steps: - name: Check out repo uses: actions/checkout@v4 @@ -21,15 +20,13 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - name: Login to ECR - run: aws ecr get-login-password --region ${{ secrets.AWS_REGION }} | docker login --username AWS --password-stdin ${{ env.ECR_HOST }} + uses: ./.github/actions/aws_ecr_login + with: + aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws_region: ${{ vars.AWS_REGION }} + aws_account_id: ${{ secrets.AWS_ACCOUNT_ID }} - name: Start local network uses: ./.github/actions/local-network diff --git a/.github/workflows/tests-rs-package.yml b/.github/workflows/tests-rs-package.yml index e666491ebc..7730485c62 100644 --- a/.github/workflows/tests-rs-package.yml +++ b/.github/workflows/tests-rs-package.yml @@ -5,14 +5,6 @@ on: description: The package name to run tests for type: string required: true - test-runner: - description: Runner for tests. Must be JSON valid string. - type: string - default: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' - lint-runner: - description: Runner for linting. Must be JSON valid string. - type: string - default: '"ubuntu-22.04"' check-each-feature: description: If true, try to build each individual feature for this crate type: boolean @@ -21,7 +13,7 @@ on: jobs: lint: name: Linting - runs-on: ${{ fromJSON(inputs.lint-runner) }} + runs-on: ubuntu-24.04 permissions: id-token: write contents: read @@ -30,30 +22,34 @@ jobs: - name: Check out repo uses: actions/checkout@v4 - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - name: Setup Rust uses: ./.github/actions/rust with: components: clippy + - name: Setup sccache + uses: ./.github/actions/sccache + with: + bucket: ${{ vars.CACHE_S3_BUCKET }} + region: ${{ vars.CACHE_REGION }} + endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + access_key_id: ${{ secrets.CACHE_KEY_ID }} + secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - uses: clechasseur/rs-clippy-check@v3 with: args: --package ${{ inputs.package }} --all-features --locked -- --no-deps env: - RUSTC_WRAPPER: sccache - SCCACHE_BUCKET: multi-runner-cache-x1xibo9c - SCCACHE_REGION: ${{ secrets.AWS_REGION }} - SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu - + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" + SNAPPY_STATIC: "/usr/lib/x86_64-linux-gnu/libsnappy.a" + SNAPPY_LIB_DIR: "/usr/lib/x86_64-linux-gnu" formatting: name: Formatting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 5 steps: - name: Check out repo @@ -65,12 +61,15 @@ jobs: components: rustfmt cache: false + # We don't use cache for this step, nothing to cache here + # This step doesn't need librocksdb, so we don't install it + - name: Check formatting run: cargo fmt --check --package=${{ inputs.package }} unused_deps: name: Unused dependencies - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: id-token: write contents: read @@ -79,16 +78,20 @@ jobs: - name: Check out repo uses: actions/checkout@v4 - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-region: ${{ secrets.AWS_REGION }} - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - - name: Setup Rust uses: ./.github/actions/rust + - name: Setup sccache + uses: ./.github/actions/sccache + with: + bucket: ${{ vars.CACHE_S3_BUCKET }} + region: ${{ vars.CACHE_REGION }} + endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + access_key_id: ${{ secrets.CACHE_KEY_ID }} + secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Get crate ${{ inputs.package }} info id: crate_info uses: ./.github/actions/crate_info @@ -98,16 +101,17 @@ jobs: - name: Find unused dependencies uses: lklimek/cargo-machete@feat/workdir env: - RUSTC_WRAPPER: sccache - SCCACHE_BUCKET: multi-runner-cache-x1xibo9c - SCCACHE_REGION: ${{ secrets.AWS_REGION }} - SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" + SNAPPY_STATIC: "/usr/lib/x86_64-linux-gnu/libsnappy.a" + SNAPPY_LIB_DIR: "/usr/lib/x86_64-linux-gnu" with: args: ${{ steps.crate_info.outputs.cargo_manifest_dir }} detect_structure_changes: name: Detect immutable structure changes - runs-on: ubuntu-22.04 + timeout-minutes: 10 + runs-on: ubuntu-24.04 # FIXME: as we use `gh pr view` below, this check can only # run on pull requests. We should find a way to run it # when manual triggers are used. @@ -168,49 +172,60 @@ jobs: test: name: Tests - runs-on: ${{ fromJSON(inputs.test-runner) }} + runs-on: ubuntu-24.04 timeout-minutes: 25 steps: - name: Check out repo uses: actions/checkout@v4 - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-region: ${{ secrets.AWS_REGION }} - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - - name: Setup Rust uses: ./.github/actions/rust + - name: Setup sccache + uses: ./.github/actions/sccache + with: + bucket: ${{ vars.CACHE_S3_BUCKET }} + region: ${{ vars.CACHE_REGION }} + endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + access_key_id: ${{ secrets.CACHE_KEY_ID }} + secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Run tests run: cargo test --package=${{ inputs.package }} --all-features --locked env: - RUSTC_WRAPPER: sccache - SCCACHE_BUCKET: multi-runner-cache-x1xibo9c - SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" + SNAPPY_STATIC: "/usr/lib/x86_64-linux-gnu/libsnappy.a" + SNAPPY_LIB_DIR: "/usr/lib/x86_64-linux-gnu" check_each_feature: name: Check each feature - runs-on: ${{ fromJSON(inputs.test-runner) }} + runs-on: ubuntu-24.04 timeout-minutes: 10 if: ${{ inputs.check-each-feature }} steps: - name: Check out repo - uses: actions/checkout@v3 - - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-region: ${{ secrets.AWS_REGION }} - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + uses: actions/checkout@v4 - name: Setup Rust uses: ./.github/actions/rust + - name: Setup sccache + uses: ./.github/actions/sccache + with: + bucket: ${{ vars.CACHE_S3_BUCKET }} + region: ${{ vars.CACHE_REGION }} + endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + access_key_id: ${{ secrets.CACHE_KEY_ID }} + secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Get crate ${{ runner.arch }} info id: crate_info uses: ./.github/actions/crate_info @@ -219,13 +234,14 @@ jobs: - name: Check each feature in ${{ inputs.package }} env: - RUSTC_WRAPPER: sccache - SCCACHE_BUCKET: multi-runner-cache-x1xibo9c - SCCACHE_REGION: ${{ secrets.AWS_REGION }} - SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" + SNAPPY_STATIC: "/usr/lib/x86_64-linux-gnu/libsnappy.a" + SNAPPY_LIB_DIR: "/usr/lib/x86_64-linux-gnu" run: | echo Verify all features disabled set -ex + features="${{ steps.crate_info.outputs.features }}" fails="" RUSTFLAGS="-D warnings" diff --git a/.github/workflows/tests-test-suite.yml b/.github/workflows/tests-test-suite.yml index f1d0ac1621..698e5ea153 100644 --- a/.github/workflows/tests-test-suite.yml +++ b/.github/workflows/tests-test-suite.yml @@ -22,11 +22,10 @@ on: jobs: test-suite: name: Run ${{ inputs.name }} - runs-on: [ "self-hosted", "linux", "arm64", "ubuntu-platform" ] + runs-on: ubuntu-24.04 timeout-minutes: 15 env: - CHROME_BIN: /usr/bin/brave-browser - ECR_HOST: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com + ECR_HOST: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ vars.AWS_REGION }}.amazonaws.com steps: - name: Check out repo uses: actions/checkout@v4 @@ -39,15 +38,13 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Configure AWS credentials and bucket region - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - name: Login to ECR - run: aws ecr get-login-password --region ${{ secrets.AWS_REGION }} | docker login --username AWS --password-stdin ${{ env.ECR_HOST }} + uses: ./.github/actions/aws_ecr_login + with: + aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws_region: ${{ vars.AWS_REGION }} + aws_account_id: ${{ secrets.AWS_ACCOUNT_ID }} - name: Start local network uses: ./.github/actions/local-network @@ -57,8 +54,8 @@ jobs: - name: Run test suite run: yarn workspace @dashevo/platform-test-suite ${{ inputs.command }} env: - BROWSER_TEST_BATCH_INDEX: ${{ inputs.batch_index }} - BROWSER_TEST_BATCH_TOTAL: ${{ inputs.batch_total }} + BROWSER_TEST_BATCH_INDEX: ${{ inputs.batch_index }} + BROWSER_TEST_BATCH_TOTAL: ${{ inputs.batch_total }} - name: Show Docker logs if: ${{ failure() }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c823d0cd06..4cf511cfbb 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -22,13 +22,13 @@ jobs: changes: name: Determine changed packages if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 outputs: js-packages: ${{ steps.filter-js.outputs.changes }} rs-packages: ${{ steps.filter-rs.outputs.changes }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 @@ -83,21 +83,25 @@ jobs: uses: ./.github/workflows/tests-rs-package.yml with: package: ${{ matrix.rs-package }} - # lint-runner: ${{ contains(fromJSON('["drive-abci", "drive"]'), matrix.rs-package) && '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' || '"ubuntu-22.04"' }} - # FIXME: Clippy fails on github hosted runners, most likely due to RAM usage. Using self-hosted runners for now. - lint-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' - # Run drive tests on self-hosted 4x - test-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' check-each-feature: ${{ contains(fromJSON('["dash-sdk","rs-dapi-client","dapi-grpc","dpp","drive-abci"]'), matrix.rs-package) }} rs-crates-security: name: Rust crates security audit if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 + - name: Setup sccache + uses: ./.github/actions/sccache + with: + bucket: ${{ vars.CACHE_S3_BUCKET }} + region: ${{ vars.CACHE_REGION }} + endpoint: ${{ vars.CACHE_S3_ENDPOINT }} + access_key_id: ${{ secrets.CACHE_KEY_ID }} + secret_access_key: ${{ secrets.CACHE_SECRET_KEY }} + - name: Audit crates uses: rustsec/audit-check@v1 with: @@ -122,7 +126,7 @@ jobs: js-deps-versions: name: JS dependency versions check if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 @@ -141,7 +145,7 @@ jobs: js-npm-security: name: JS NPM security audit if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 @@ -163,6 +167,7 @@ jobs: secrets: inherit needs: - changes + - build-js - build-images strategy: fail-fast: false diff --git a/.pnp.cjs b/.pnp.cjs index 0a13a010d1..c642620374 100755 --- a/.pnp.cjs +++ b/.pnp.cjs @@ -69,6 +69,10 @@ const RAW_RUNTIME_STATE = "name": "@dashevo/wallet-lib",\ "reference": "workspace:packages/wallet-lib"\ },\ + {\ + "name": "@dashevo/wallet-utils-contract",\ + "reference": "workspace:packages/wallet-utils-contract"\ + },\ {\ "name": "@dashevo/wasm-dpp",\ "reference": "workspace:packages/wasm-dpp"\ @@ -94,6 +98,7 @@ const RAW_RUNTIME_STATE = ["@dashevo/platform", ["workspace:."]],\ ["@dashevo/platform-test-suite", ["workspace:packages/platform-test-suite"]],\ ["@dashevo/wallet-lib", ["workspace:packages/wallet-lib"]],\ + ["@dashevo/wallet-utils-contract", ["workspace:packages/wallet-utils-contract"]],\ ["@dashevo/wasm-dpp", ["workspace:packages/wasm-dpp"]],\ ["@dashevo/withdrawals-contract", ["workspace:packages/withdrawals-contract"]],\ ["dash", ["workspace:packages/js-dash-sdk"]],\ @@ -3086,6 +3091,24 @@ const RAW_RUNTIME_STATE = "linkType": "SOFT"\ }]\ ]],\ + ["@dashevo/wallet-utils-contract", [\ + ["workspace:packages/wallet-utils-contract", {\ + "packageLocation": "./packages/wallet-utils-contract/",\ + "packageDependencies": [\ + ["@dashevo/wallet-utils-contract", "workspace:packages/wallet-utils-contract"],\ + ["@dashevo/wasm-dpp", "workspace:packages/wasm-dpp"],\ + ["chai", "npm:4.3.10"],\ + ["dirty-chai", "virtual:e2d057e7cc143d3cb9bec864f4a2d862441b5a09f81f8e6c46e7a098cbc89e4d07017cc6e2e2142d5704bb55da853cbec2d025ebc0b30e8696c31380c00f2c7d#npm:2.0.1"],\ + ["eslint", "npm:8.53.0"],\ + ["eslint-config-airbnb-base", "virtual:e2d057e7cc143d3cb9bec864f4a2d862441b5a09f81f8e6c46e7a098cbc89e4d07017cc6e2e2142d5704bb55da853cbec2d025ebc0b30e8696c31380c00f2c7d#npm:15.0.0"],\ + ["eslint-plugin-import", "virtual:e2d057e7cc143d3cb9bec864f4a2d862441b5a09f81f8e6c46e7a098cbc89e4d07017cc6e2e2142d5704bb55da853cbec2d025ebc0b30e8696c31380c00f2c7d#npm:2.29.0"],\ + ["mocha", "npm:10.2.0"],\ + ["sinon", "npm:17.0.1"],\ + ["sinon-chai", "virtual:e2d057e7cc143d3cb9bec864f4a2d862441b5a09f81f8e6c46e7a098cbc89e4d07017cc6e2e2142d5704bb55da853cbec2d025ebc0b30e8696c31380c00f2c7d#npm:3.7.0"]\ + ],\ + "linkType": "SOFT"\ + }]\ + ]],\ ["@dashevo/wasm-dpp", [\ ["workspace:packages/wasm-dpp", {\ "packageLocation": "./packages/wasm-dpp/",\ @@ -5633,7 +5656,7 @@ const RAW_RUNTIME_STATE = ["chalk", "npm:3.0.0"],\ ["ci-info", "npm:3.8.0"],\ ["clipanion", "virtual:ba24742e5bfaec41d1d0434e6865c2744ffb69a716f18fa84bd7d44084043647838926c96a391f66b9946857565b9825479e97ad07079ef40683084ce389d203#npm:3.2.0"],\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["diff", "npm:5.1.0"],\ ["globby", "npm:11.1.0"],\ ["got", "npm:11.8.6"],\ @@ -5742,7 +5765,7 @@ const RAW_RUNTIME_STATE = ["@yarnpkg/parsers", "npm:3.0.0-rc.42"],\ ["chalk", "npm:3.0.0"],\ ["clipanion", "virtual:ba24742e5bfaec41d1d0434e6865c2744ffb69a716f18fa84bd7d44084043647838926c96a391f66b9946857565b9825479e97ad07079ef40683084ce389d203#npm:3.2.0"],\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["fast-glob", "npm:3.2.12"],\ ["micromatch", "npm:4.0.7"],\ ["tslib", "npm:2.6.2"]\ @@ -8389,28 +8412,16 @@ const RAW_RUNTIME_STATE = "packageLocation": "./.yarn/cache/cross-env-npm-7.0.3-96d81820f4-e99911f0d3.zip/node_modules/cross-env/",\ "packageDependencies": [\ ["cross-env", "npm:7.0.3"],\ - ["cross-spawn", "npm:7.0.3"]\ + ["cross-spawn", "npm:7.0.5"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["cross-spawn", [\ - ["npm:6.0.5", {\ - "packageLocation": "./.yarn/cache/cross-spawn-npm-6.0.5-2deab6c280-f07e643b48.zip/node_modules/cross-spawn/",\ - "packageDependencies": [\ - ["cross-spawn", "npm:6.0.5"],\ - ["nice-try", "npm:1.0.5"],\ - ["path-key", "npm:2.0.1"],\ - ["semver", "npm:7.5.3"],\ - ["shebang-command", "npm:1.2.0"],\ - ["which", "npm:1.3.1"]\ - ],\ - "linkType": "HARD"\ - }],\ - ["npm:7.0.3", {\ - "packageLocation": "./.yarn/cache/cross-spawn-npm-7.0.3-e4ff3e65b3-e1a13869d2.zip/node_modules/cross-spawn/",\ + ["npm:7.0.5", {\ + "packageLocation": "./.yarn/cache/cross-spawn-npm-7.0.5-cf92562a27-c95062469d.zip/node_modules/cross-spawn/",\ "packageDependencies": [\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["path-key", "npm:3.1.1"],\ ["shebang-command", "npm:2.0.0"],\ ["which", "npm:2.0.2"]\ @@ -9692,7 +9703,7 @@ const RAW_RUNTIME_STATE = ["@ungap/structured-clone", "npm:1.2.0"],\ ["ajv", "npm:6.12.6"],\ ["chalk", "npm:4.1.2"],\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["debug", "virtual:4b12ba5111caf7e8338099bdbc7cb046a9f8e079a44e74d0c03dca469876e3071ebbe671c5e90ae6b78ae33e22c205fa5ed32169a4aabd1404b13c56d09986e1#npm:4.3.4"],\ ["doctrine", "npm:3.0.0"],\ ["escape-string-regexp", "npm:4.0.0"],\ @@ -10226,7 +10237,7 @@ const RAW_RUNTIME_STATE = "packageLocation": "./.yarn/cache/execa-npm-5.1.1-191347acf5-8ada91f2d7.zip/node_modules/execa/",\ "packageDependencies": [\ ["execa", "npm:5.1.1"],\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["get-stream", "npm:6.0.1"],\ ["human-signals", "npm:2.1.0"],\ ["is-stream", "npm:2.0.1"],\ @@ -10637,7 +10648,7 @@ const RAW_RUNTIME_STATE = "packageLocation": "./.yarn/cache/foreground-child-npm-2.0.0-80c976b61e-f36574ad8e.zip/node_modules/foreground-child/",\ "packageDependencies": [\ ["foreground-child", "npm:2.0.0"],\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["signal-exit", "npm:3.0.7"]\ ],\ "linkType": "HARD"\ @@ -10646,7 +10657,7 @@ const RAW_RUNTIME_STATE = "packageLocation": "./.yarn/cache/foreground-child-npm-3.1.1-77e78ed774-087edd4485.zip/node_modules/foreground-child/",\ "packageDependencies": [\ ["foreground-child", "npm:3.1.1"],\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["signal-exit", "npm:4.1.0"]\ ],\ "linkType": "HARD"\ @@ -12425,7 +12436,7 @@ const RAW_RUNTIME_STATE = "packageDependencies": [\ ["istanbul-lib-processinfo", "npm:2.0.3"],\ ["archy", "npm:1.0.0"],\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["istanbul-lib-coverage", "npm:3.2.2"],\ ["p-map", "npm:3.0.0"],\ ["rimraf", "npm:3.0.2"],\ @@ -14282,7 +14293,7 @@ const RAW_RUNTIME_STATE = ["log-symbols", "npm:4.1.0"],\ ["minimatch", "npm:5.0.1"],\ ["ms", "npm:2.1.3"],\ - ["nanoid", "npm:3.3.3"],\ + ["nanoid", "npm:3.3.8"],\ ["serialize-javascript", "npm:6.0.0"],\ ["strip-json-comments", "npm:3.1.1"],\ ["supports-color", "npm:8.1.1"],\ @@ -14312,7 +14323,7 @@ const RAW_RUNTIME_STATE = ["log-symbols", "npm:4.1.0"],\ ["minimatch", "npm:4.2.1"],\ ["ms", "npm:2.1.3"],\ - ["nanoid", "npm:3.3.1"],\ + ["nanoid", "npm:3.3.8"],\ ["serialize-javascript", "npm:6.0.0"],\ ["strip-json-comments", "npm:3.1.1"],\ ["supports-color", "npm:8.1.1"],\ @@ -14473,17 +14484,10 @@ const RAW_RUNTIME_STATE = }]\ ]],\ ["nanoid", [\ - ["npm:3.3.1", {\ - "packageLocation": "./.yarn/cache/nanoid-npm-3.3.1-bdd760bee0-306f2cb9e4.zip/node_modules/nanoid/",\ - "packageDependencies": [\ - ["nanoid", "npm:3.3.1"]\ - ],\ - "linkType": "HARD"\ - }],\ - ["npm:3.3.3", {\ - "packageLocation": "./.yarn/cache/nanoid-npm-3.3.3-25d865be84-c703ed58a2.zip/node_modules/nanoid/",\ + ["npm:3.3.8", {\ + "packageLocation": "./.yarn/cache/nanoid-npm-3.3.8-d22226208b-2d1766606c.zip/node_modules/nanoid/",\ "packageDependencies": [\ - ["nanoid", "npm:3.3.3"]\ + ["nanoid", "npm:3.3.8"]\ ],\ "linkType": "HARD"\ }]\ @@ -14549,15 +14553,6 @@ const RAW_RUNTIME_STATE = "linkType": "HARD"\ }]\ ]],\ - ["nice-try", [\ - ["npm:1.0.5", {\ - "packageLocation": "./.yarn/cache/nice-try-npm-1.0.5-963856b16f-0b4af3b5bb.zip/node_modules/nice-try/",\ - "packageDependencies": [\ - ["nice-try", "npm:1.0.5"]\ - ],\ - "linkType": "HARD"\ - }]\ - ]],\ ["nise", [\ ["npm:5.1.5", {\ "packageLocation": "./.yarn/cache/nise-npm-5.1.5-847a2de198-c6afe82b91.zip/node_modules/nise/",\ @@ -15675,7 +15670,7 @@ const RAW_RUNTIME_STATE = "packageDependencies": [\ ["password-prompt", "npm:1.1.2"],\ ["ansi-escapes", "npm:3.2.0"],\ - ["cross-spawn", "npm:6.0.5"]\ + ["cross-spawn", "npm:7.0.5"]\ ],\ "linkType": "HARD"\ }],\ @@ -15684,7 +15679,7 @@ const RAW_RUNTIME_STATE = "packageDependencies": [\ ["password-prompt", "npm:1.1.3"],\ ["ansi-escapes", "npm:4.3.2"],\ - ["cross-spawn", "npm:7.0.3"]\ + ["cross-spawn", "npm:7.0.5"]\ ],\ "linkType": "HARD"\ }]\ @@ -15742,13 +15737,6 @@ const RAW_RUNTIME_STATE = }]\ ]],\ ["path-key", [\ - ["npm:2.0.1", {\ - "packageLocation": "./.yarn/cache/path-key-npm-2.0.1-b1a971833d-6e654864e3.zip/node_modules/path-key/",\ - "packageDependencies": [\ - ["path-key", "npm:2.0.1"]\ - ],\ - "linkType": "HARD"\ - }],\ ["npm:3.1.1", {\ "packageLocation": "./.yarn/cache/path-key-npm-3.1.1-0e66ea8321-55cd7a9dd4.zip/node_modules/path-key/",\ "packageDependencies": [\ @@ -17205,14 +17193,6 @@ const RAW_RUNTIME_STATE = }]\ ]],\ ["shebang-command", [\ - ["npm:1.2.0", {\ - "packageLocation": "./.yarn/cache/shebang-command-npm-1.2.0-8990ba5d1d-9eed175030.zip/node_modules/shebang-command/",\ - "packageDependencies": [\ - ["shebang-command", "npm:1.2.0"],\ - ["shebang-regex", "npm:1.0.0"]\ - ],\ - "linkType": "HARD"\ - }],\ ["npm:2.0.0", {\ "packageLocation": "./.yarn/cache/shebang-command-npm-2.0.0-eb2b01921d-6b52fe8727.zip/node_modules/shebang-command/",\ "packageDependencies": [\ @@ -17223,13 +17203,6 @@ const RAW_RUNTIME_STATE = }]\ ]],\ ["shebang-regex", [\ - ["npm:1.0.0", {\ - "packageLocation": "./.yarn/cache/shebang-regex-npm-1.0.0-c3612b74e9-404c5a752c.zip/node_modules/shebang-regex/",\ - "packageDependencies": [\ - ["shebang-regex", "npm:1.0.0"]\ - ],\ - "linkType": "HARD"\ - }],\ ["npm:3.0.0", {\ "packageLocation": "./.yarn/cache/shebang-regex-npm-3.0.0-899a0cd65e-1a2bcae50d.zip/node_modules/shebang-regex/",\ "packageDependencies": [\ @@ -19248,7 +19221,7 @@ const RAW_RUNTIME_STATE = ["ultra-runner", "npm:3.10.5"],\ ["ansi-split", "npm:1.0.1"],\ ["chalk", "npm:4.1.2"],\ - ["cross-spawn", "npm:7.0.3"],\ + ["cross-spawn", "npm:7.0.5"],\ ["fast-glob", "npm:3.2.12"],\ ["globrex", "npm:0.1.2"],\ ["ignore", "npm:5.2.0"],\ diff --git a/.yarn/cache/cross-spawn-npm-6.0.5-2deab6c280-f07e643b48.zip b/.yarn/cache/cross-spawn-npm-6.0.5-2deab6c280-f07e643b48.zip deleted file mode 100644 index c48fb4ce05..0000000000 Binary files a/.yarn/cache/cross-spawn-npm-6.0.5-2deab6c280-f07e643b48.zip and /dev/null differ diff --git a/.yarn/cache/cross-spawn-npm-7.0.3-e4ff3e65b3-e1a13869d2.zip b/.yarn/cache/cross-spawn-npm-7.0.3-e4ff3e65b3-e1a13869d2.zip deleted file mode 100644 index 9aa6dc0d86..0000000000 Binary files a/.yarn/cache/cross-spawn-npm-7.0.3-e4ff3e65b3-e1a13869d2.zip and /dev/null differ diff --git a/.yarn/cache/cross-spawn-npm-7.0.5-cf92562a27-c95062469d.zip b/.yarn/cache/cross-spawn-npm-7.0.5-cf92562a27-c95062469d.zip new file mode 100644 index 0000000000..afb750a5c2 Binary files /dev/null and b/.yarn/cache/cross-spawn-npm-7.0.5-cf92562a27-c95062469d.zip differ diff --git a/.yarn/cache/fsevents-patch-19706e7e35-10.zip b/.yarn/cache/fsevents-patch-19706e7e35-10.zip deleted file mode 100644 index aff1ab12ce..0000000000 Binary files a/.yarn/cache/fsevents-patch-19706e7e35-10.zip and /dev/null differ diff --git a/.yarn/cache/nanoid-npm-3.3.1-bdd760bee0-306f2cb9e4.zip b/.yarn/cache/nanoid-npm-3.3.1-bdd760bee0-306f2cb9e4.zip deleted file mode 100644 index aaa2856372..0000000000 Binary files a/.yarn/cache/nanoid-npm-3.3.1-bdd760bee0-306f2cb9e4.zip and /dev/null differ diff --git a/.yarn/cache/nanoid-npm-3.3.3-25d865be84-c703ed58a2.zip b/.yarn/cache/nanoid-npm-3.3.3-25d865be84-c703ed58a2.zip deleted file mode 100644 index b4130ad872..0000000000 Binary files a/.yarn/cache/nanoid-npm-3.3.3-25d865be84-c703ed58a2.zip and /dev/null differ diff --git a/.yarn/cache/nanoid-npm-3.3.8-d22226208b-2d1766606c.zip b/.yarn/cache/nanoid-npm-3.3.8-d22226208b-2d1766606c.zip new file mode 100644 index 0000000000..ec9e2621c6 Binary files /dev/null and b/.yarn/cache/nanoid-npm-3.3.8-d22226208b-2d1766606c.zip differ diff --git a/.yarn/cache/nice-try-npm-1.0.5-963856b16f-0b4af3b5bb.zip b/.yarn/cache/nice-try-npm-1.0.5-963856b16f-0b4af3b5bb.zip deleted file mode 100644 index e022a139d6..0000000000 Binary files a/.yarn/cache/nice-try-npm-1.0.5-963856b16f-0b4af3b5bb.zip and /dev/null differ diff --git a/.yarn/cache/path-key-npm-2.0.1-b1a971833d-6e654864e3.zip b/.yarn/cache/path-key-npm-2.0.1-b1a971833d-6e654864e3.zip deleted file mode 100644 index a643be7be9..0000000000 Binary files a/.yarn/cache/path-key-npm-2.0.1-b1a971833d-6e654864e3.zip and /dev/null differ diff --git a/.yarn/cache/shebang-command-npm-1.2.0-8990ba5d1d-9eed175030.zip b/.yarn/cache/shebang-command-npm-1.2.0-8990ba5d1d-9eed175030.zip deleted file mode 100644 index 9b734d105d..0000000000 Binary files a/.yarn/cache/shebang-command-npm-1.2.0-8990ba5d1d-9eed175030.zip and /dev/null differ diff --git a/.yarn/cache/shebang-regex-npm-1.0.0-c3612b74e9-404c5a752c.zip b/.yarn/cache/shebang-regex-npm-1.0.0-c3612b74e9-404c5a752c.zip deleted file mode 100644 index 607d724c33..0000000000 Binary files a/.yarn/cache/shebang-regex-npm-1.0.0-c3612b74e9-404c5a752c.zip and /dev/null differ diff --git a/.yarnrc.yml b/.yarnrc.yml index c59f803d7d..148512db4c 100644 --- a/.yarnrc.yml +++ b/.yarnrc.yml @@ -17,6 +17,10 @@ npmAuditExcludePackages: - micromatch # TODO: remove when new micromatch will be released https://github.com/advisories/GHSA-952p-6rrq-rcjv - eslint # TODO: Update eslint https://github.com/dashpay/platform/issues/2212 - elliptic # TODO: Remove when elliptic >6.5.7 released + - memdown # TODO: Update leveldb + - levelup # TODO: Update leveldb + - deferred-leveldown # TODO: Update leveldb + - abstract-leveldown # TODO: Update leveldb packageExtensions: "@dashevo/protobufjs@*": diff --git a/CHANGELOG.md b/CHANGELOG.md index 5bc63c80c9..e1422f602c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,151 @@ -### [1.5.1](https://github.com/dashpay/platform/compare/v1.5.0...v1.5.1) (2024-11-04) +### [1.7.0](https://github.com/dashpay/platform/compare/v1.6.2...v1.7.0) (2024-12-13) + + +### âš  BREAKING CHANGES + +* **sdk:** `AddressList.available` removed +* **sdk:** you need to use `Waitable` to call `wait_for_response()` +* **sdk:** changed multiple args of functions in state transition broadcast logic +* **sdk:** `From for Address` replaced with `TryFrom for Address` +* **sdk:** `From<&str> for AddressList` replaced with `FromStr for AddressList` +* **sdk:** `FromIterator for AddressList` replaced with `FromIterator
for AddressList` +* **sdk:** `LowLevelDashCoreClient` now returns `DashCoreError` instead of `ContextProviderError`. +* **sdk:** Added `ContextProviderError::DashCoreError` variant +* **sdk:** `dash_sdk::Error::CoreClientError` now uses `DashCoreError` instead of `dashcore_rpc::Error`. + +### Features + +* **sdk:** ban addresses failed in sdk ([#2351](https://github.com/dashpay/platform/issues/2351)) + + +### Bug Fixes + +* **drive:** security vulnerability in hashbrown ([#2375](https://github.com/dashpay/platform/issues/2375)) +* **sdk:** create channel error due to empty address ([#2317](https://github.com/dashpay/platform/issues/2317)) + + +### Build System + +* explicitly require tonic v1.12.3 ([#2357](https://github.com/dashpay/platform/issues/2357)) + + +### Continuous Integration + +* remove manage runs workflow ([#2325](https://github.com/dashpay/platform/issues/2325)) +* replace actions/upload-artifact@v3 with actions/upload-artifact@v4 ([#2359](https://github.com/dashpay/platform/issues/2359)) + + +### Miscellaneous Chores + +* make protocol version 7 ([#2376](https://github.com/dashpay/platform/issues/2376)) +* **dashmate:** set tenderdash version to 1 ([#2385](https://github.com/dashpay/platform/issues/2385)) +* **dashmate:** update Core to version 22 ([#2384](https://github.com/dashpay/platform/issues/2384)) +* address vulnerabilty GHSA-mwcw-c2x4-8c55 ([#2382](https://github.com/dashpay/platform/issues/2382)) + + +### Code Refactoring + +* **sdk:** unify state transition processing ([#2338](https://github.com/dashpay/platform/issues/2338)) +* **sdk:** separate dash core client error + +### [1.6.2](https://github.com/dashpay/platform/compare/v1.6.1...v1.6.2) (2024-12-03) +### Bug Fixes + +* **dashmate:** status command fails if drive is not running ([#2364](https://github.com/dashpay/platform/issues/2364)) +* **dashmate:** zero ssl verification passes without being verified ([#2365](https://github.com/dashpay/platform/issues/2365)) + + +### Miscellaneous Chores + +* ignore leveldb deprecation warnings ([#2366](https://github.com/dashpay/platform/issues/2366)) + + +### Build System + +* enable drive image for Ivy Bridge CPU ([#2363](https://github.com/dashpay/platform/issues/2363)) + +### [1.6.1](https://github.com/dashpay/platform/compare/v1.6.0...v1.6.1) (2024-11-29) + +## [1.6.0-dev.2](https://github.com/dashpay/platform/compare/v1.6.0-dev.1...v1.6.0-dev.2) (2024-11-27) + + +### âš  BREAKING CHANGES + +* **sdk:** retry broadcast operations (#2337) + + +### Features + +* **sdk:** retry broadcast operations ([#2337](https://github.com/dashpay/platform/issues/2337)) + + +### Reverted + +* **dashmate:** update Core to version 22 + + +### Continuous Integration + +* change s3 cache provider to optimize costs ([#2344](https://github.com/dashpay/platform/issues/2344)) + +## [1.6.0-dev.1](https://github.com/dashpay/platform/compare/v1.5.1...v1.6.0-dev.1) (2024-11-25) + + +### Features + +* integrate wallet contract ([#2345](https://github.com/dashpay/platform/issues/2345)) +* **sdk:** fetch many and return metadata and proof to client ([#2331](https://github.com/dashpay/platform/issues/2331)) +* **sdk:** including grovedb proof bytes when error in proof verification ([#2332](https://github.com/dashpay/platform/issues/2332)) + + +### Bug Fixes + +* **dashmate:** container name is already in use ([#2341](https://github.com/dashpay/platform/issues/2341)) +* **dashmate:** failing doctor with drive metrics enabled ([#2348](https://github.com/dashpay/platform/issues/2348)) +* **dashmate:** various ZeroSSL cert verification errors ([#2339](https://github.com/dashpay/platform/issues/2339)) +* document query start after ([#2347](https://github.com/dashpay/platform/issues/2347)) +* **drive:** nonce already present in past internal error ([#2343](https://github.com/dashpay/platform/issues/2343)) + + +### Build System + +* adjust docker build context ([#2336](https://github.com/dashpay/platform/issues/2336)) +* use cargo binstall to speed up builds ([#2321](https://github.com/dashpay/platform/issues/2321)) + + +### Miscellaneous Chores + +* **dashmate:** update Core to version 22 +* **sdk:** expose proof verifier errors ([#2333](https://github.com/dashpay/platform/issues/2333)) +* **sdk:** vote poll queries encoding ([#2334](https://github.com/dashpay/platform/issues/2334)) + + +### Continuous Integration + +* improve caching ([#2329](https://github.com/dashpay/platform/issues/2329)) +* prebuilt librocksdb in docker image ([#2318](https://github.com/dashpay/platform/issues/2318)) +* run devcontainers workflow only on push to master ([#2295](https://github.com/dashpay/platform/issues/2295)) +* switch release workflow to github runners ([#2346](https://github.com/dashpay/platform/issues/2346)) +* switch test workflow to github runners ([#2319](https://github.com/dashpay/platform/issues/2319)) +* use prebuilt librocksdb in github actions ([#2316](https://github.com/dashpay/platform/issues/2316)) + + +### Tests + +* hardcoded identity transfers in strategy tests ([#2322](https://github.com/dashpay/platform/issues/2322)) + + +### [1.5.1](https://github.com/dashpay/platform/compare/v1.5.0...v1.5.1) (2024-11-04) + ### Bug Fixes * **drive:** apply batch is not using transaction in `remove_all_votes_given_by_identities` ([#2309](https://github.com/dashpay/platform/issues/2309)) * **drive:** uncommitted state if db transaction fails ([#2305](https://github.com/dashpay/platform/issues/2305)) + ## [1.5.0](https://github.com/dashpay/platform/compare/v1.4.1...v1.5.0) (2024-11-01) ### âš  BREAKING CHANGES diff --git a/Cargo.lock b/Cargo.lock index 58ec63a80f..3fae6b8a6e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,19 +4,13 @@ version = 3 [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - [[package]] name = "adler2" version = "2.0.0" @@ -70,9 +64,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.18" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" [[package]] name = "android-tzdata" @@ -97,9 +91,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -112,49 +106,49 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" [[package]] name = "arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" dependencies = [ "derive_arbitrary", ] @@ -167,9 +161,9 @@ checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "arrayref" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" [[package]] name = "arrayvec" @@ -196,9 +190,9 @@ dependencies = [ [[package]] name = "async-stream" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" dependencies = [ "async-stream-impl", "futures-core", @@ -207,13 +201,13 @@ dependencies = [ [[package]] name = "async-stream-impl" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -224,7 +218,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -235,9 +229,9 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "axum" @@ -266,7 +260,7 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper", "tokio", "tower", "tower-layer", @@ -276,9 +270,9 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" dependencies = [ "async-trait", "bytes", @@ -289,7 +283,7 @@ dependencies = [ "mime", "pin-project-lite", "rustversion", - "sync_wrapper 0.1.2", + "sync_wrapper", "tower-layer", "tower-service", "tracing", @@ -297,21 +291,20 @@ dependencies = [ [[package]] name = "axum-macros" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00c055ee2d014ae5981ce1016374e8213682aa14d9bf40e48ab48b5f3ef20eaa" +checksum = "57d123550fa8d071b7255cb0cc04dc302baa6c8c4a79f55701552684d8399bce" dependencies = [ - "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "backon" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4fa97bb310c33c811334143cf64c5bb2b7b3c06e453db6b095d7061eff8f113" +checksum = "ba5289ec98f68f28dd809fd601059e6aa908bb8f6108620930828283d4ee23d7" dependencies = [ "fastrand", "gloo-timers", @@ -320,17 +313,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.4", + "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -401,15 +394,15 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.75", + "syn 2.0.89", "which", ] [[package]] name = "bindgen" -version = "0.69.4" +version = "0.69.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" dependencies = [ "bitflags 2.6.0", "cexpr", @@ -422,7 +415,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -432,7 +425,7 @@ source = "git+https://github.com/dashpay/rs-bip37-bloom-filter?branch=develop#35 dependencies = [ "bitvec", "murmur3", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -452,9 +445,9 @@ checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" [[package]] name = "bitcoin-io" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "340e09e8399c7bd8912f495af6aa58bea0c9214773417ffaa8f6460f93aaee56" +checksum = "0b47c4ab7a93edb0c7198c5535ed9b52b63095f4e9b45279c6736cec4b856baf" [[package]] name = "bitcoin_hashes" @@ -492,15 +485,15 @@ dependencies = [ [[package]] name = "blake3" -version = "1.5.4" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7" +checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", - "constant_time_eq 0.3.0", + "constant_time_eq 0.3.1", ] [[package]] @@ -535,9 +528,9 @@ dependencies = [ [[package]] name = "borsh" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" +checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" dependencies = [ "borsh-derive", "cfg_aliases", @@ -545,16 +538,15 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b" +checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244" dependencies = [ "once_cell", - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.75", - "syn_derive", + "syn 2.0.89", ] [[package]] @@ -608,9 +600,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" dependencies = [ "serde", ] @@ -644,9 +636,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.1.14" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d2eb3cd3d1bf4529e31c215ee6f93ec5a3d536d9f578f93d9d33ee19562932" +checksum = "f34d93e62b03caf570cccc334cbc6c2fceca82f39211051345108adcba3eebdc" dependencies = [ "jobserver", "libc", @@ -676,7 +668,7 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "check-features" -version = "1.5.1" +version = "1.7.0" dependencies = [ "toml", ] @@ -770,9 +762,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.16" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" dependencies = [ "clap_builder", "clap_derive", @@ -780,9 +772,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.15" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" dependencies = [ "anstream", "anstyle", @@ -792,27 +784,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.13" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" [[package]] name = "colorchoice" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "colored" @@ -835,9 +827,9 @@ dependencies = [ [[package]] name = "console-api" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ed14aa9c9f927213c6e4f3ef75faaad3406134efe84ba2cb7983431d5f0931" +checksum = "8030735ecb0d128428b64cd379809817e620a40e5001c54465b99ec5feec2857" dependencies = [ "futures-core", "prost", @@ -848,9 +840,9 @@ dependencies = [ [[package]] name = "console-subscriber" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e3a111a37f3333946ebf9da370ba5c5577b18eb342ec683eb488dd21980302" +checksum = "6539aa9c6a4cd31f4b1c040f860a1eac9aa80e7df6b05d506a6e7179936d6a01" dependencies = [ "console-api", "crossbeam-channel", @@ -886,9 +878,9 @@ checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" [[package]] name = "constant_time_eq" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] name = "convert_case" @@ -906,6 +898,16 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -914,9 +916,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.13" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51e852e6dc9a5bed1fae92dd2375037bf2b768725bf3be87811edee3249d09ad" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -1040,12 +1042,12 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "dapi-grpc" -version = "1.5.1" +version = "1.7.0" dependencies = [ "dapi-grpc-macros", "futures-core", @@ -1061,12 +1063,12 @@ dependencies = [ [[package]] name = "dapi-grpc-macros" -version = "1.5.1" +version = "1.7.0" dependencies = [ "dapi-grpc", - "heck 0.5.0", + "heck", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -1090,7 +1092,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -1101,12 +1103,12 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "dash-sdk" -version = "1.5.1" +version = "1.7.0" dependencies = [ "arc-swap", "async-trait", @@ -1135,7 +1137,7 @@ dependencies = [ "serde", "serde_json", "test-case", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-test", "tokio-util", @@ -1212,17 +1214,17 @@ dependencies = [ [[package]] name = "dashpay-contract" -version = "1.5.1" +version = "1.7.0" dependencies = [ "platform-value", "platform-version", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] name = "data-contracts" -version = "1.5.1" +version = "1.7.0" dependencies = [ "dashpay-contract", "dpns-contract", @@ -1231,19 +1233,20 @@ dependencies = [ "platform-value", "platform-version", "serde_json", - "thiserror", + "thiserror 1.0.69", + "wallet-utils-contract", "withdrawals-contract", ] [[package]] name = "delegate" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5060bb0febb73fa907273f8a7ed17ab4bf831d585eac835b28ec24a1e2460956" +checksum = "bc2323e10c92e1cf4d86e11538512e6dc03ceb586842970b6332af3d4046a046" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -1268,13 +1271,13 @@ dependencies = [ [[package]] name = "derive_arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -1287,7 +1290,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -1307,7 +1310,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", "unicode-xid", ] @@ -1336,7 +1339,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -1353,17 +1356,17 @@ checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" [[package]] name = "dpns-contract" -version = "1.5.1" +version = "1.7.0" dependencies = [ "platform-value", "platform-version", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] name = "dpp" -version = "1.5.1" +version = "1.7.0" dependencies = [ "anyhow", "assert_matches", @@ -1407,13 +1410,13 @@ dependencies = [ "sha2", "strum", "test-case", - "thiserror", + "thiserror 1.0.69", "tokio", ] [[package]] name = "drive" -version = "1.5.1" +version = "1.7.0" dependencies = [ "arc-swap", "assert_matches", @@ -1448,13 +1451,13 @@ dependencies = [ "serde_json", "sqlparser", "tempfile", - "thiserror", + "thiserror 1.0.69", "tracing", ] [[package]] name = "drive-abci" -version = "1.5.1" +version = "1.7.0" dependencies = [ "arc-swap", "assert_matches", @@ -1498,7 +1501,7 @@ dependencies = [ "strategy-tests", "tempfile", "tenderdash-abci", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-util", "tracing", @@ -1508,7 +1511,7 @@ dependencies = [ [[package]] name = "drive-proof-verifier" -version = "1.5.1" +version = "1.7.0" dependencies = [ "bincode", "dapi-grpc", @@ -1522,7 +1525,7 @@ dependencies = [ "serde", "serde_json", "tenderdash-abci", - "thiserror", + "thiserror 1.0.69", "tracing", ] @@ -1533,7 +1536,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9c8d6ea916fadcd87e3d1ff4802b696d717c83519b47e76f267ab77e536dd5a" dependencies = [ "ed-derive", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1580,9 +1583,9 @@ checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "encoding_rs" -version = "0.8.34" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] @@ -1604,7 +1607,7 @@ checksum = "f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -1660,12 +1663,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1708,7 +1711,7 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "synstructure", + "synstructure 0.12.6", ] [[package]] @@ -1718,24 +1721,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" dependencies = [ "bit-set", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] name = "fastrand" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" [[package]] name = "feature-flags-contract" -version = "1.5.1" +version = "1.7.0" dependencies = [ "platform-value", "platform-version", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1762,12 +1765,12 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.32" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c0596c1eac1f9e04ed902702e9878208b336edc9d6fddc8a48387349bab3666" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", - "miniz_oxide 0.8.0", + "miniz_oxide", ] [[package]] @@ -1785,6 +1788,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2" + [[package]] name = "foreign-types" version = "0.3.2" @@ -1839,9 +1848,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -1854,9 +1863,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -1864,15 +1873,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -1881,38 +1890,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", @@ -1951,9 +1960,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -2001,7 +2010,7 @@ dependencies = [ "reqwest", "sha2", "tempfile", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-util", "tower-http", @@ -2016,7 +2025,7 @@ checksum = "360f7c8d3b20beafcbf3cde8754bbcfd201ae2a30ec7594a4b9678fd2fa3c7a8" dependencies = [ "integer-encoding", "intmap", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -2029,7 +2038,7 @@ dependencies = [ "hex", "integer-encoding", "intmap", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -2054,7 +2063,7 @@ dependencies = [ "integer-encoding", "num_cpus", "rand", - "thiserror", + "thiserror 1.0.69", "time", ] @@ -2081,7 +2090,7 @@ dependencies = [ "rocksdb", "strum", "tempfile", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -2090,7 +2099,7 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4be0c1a1ef97068fe93212e7b6f349e0b44a9fc90063c8c28e110cfb8c2fcb2" dependencies = [ - "thiserror", + "thiserror 1.0.69", "versioned-feature-core 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2111,14 +2120,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "921b9a29facf9d3f0de667cd1da083a34695ede9e7bfacd74bb5bd29f8f7c178" dependencies = [ "serde", - "serde_with 3.9.0", + "serde_with 3.11.0", ] [[package]] name = "h2" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", @@ -2160,20 +2169,15 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.5" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ - "ahash 0.8.11", "allocator-api2", + "equivalent", + "foldhash", ] -[[package]] -name = "hashbrown" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" - [[package]] name = "hdrhistogram" version = "7.5.4" @@ -2187,12 +2191,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - [[package]] name = "heck" version = "0.5.0" @@ -2295,9 +2293,9 @@ dependencies = [ [[package]] name = "http-range-header" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a397c49fec283e3d6211adbe480be95aae5f304cfb923e9970e08956d5168a" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" [[package]] name = "http-serde" @@ -2311,9 +2309,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.4" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "httpdate" @@ -2329,9 +2327,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", @@ -2350,9 +2348,9 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", "http", @@ -2367,9 +2365,9 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ "hyper", "hyper-util", @@ -2396,9 +2394,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.7" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", @@ -2409,7 +2407,6 @@ dependencies = [ "pin-project-lite", "socket2", "tokio", - "tower", "tower-service", "tracing", ] @@ -2437,6 +2434,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -2445,12 +2560,23 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "icu_normalizer", + "icu_properties", ] [[package]] @@ -2471,7 +2597,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown 0.15.0", + "hashbrown 0.15.2", "serde", ] @@ -2501,9 +2627,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.9.0" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" [[package]] name = "is-terminal" @@ -2560,9 +2686,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jobserver" @@ -2590,19 +2716,19 @@ checksum = "ec9ad60d674508f3ca8f380a928cfe7b096bc729c4e2dbfe3852bc45da3ab30b" dependencies = [ "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] name = "json-schema-compatibility-validator" -version = "1.5.1" +version = "1.7.0" dependencies = [ "assert_matches", "json-patch", "json-schema-compatibility-validator", "once_cell", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -2663,9 +2789,9 @@ checksum = "744a4c881f502e98c2241d2e5f50040ac73b30194d64452bb6260393b53f0dc9" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.166" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "c2ccc108bbc0b1331bd061864e7cd823c0cab660bbe6970e66e2c0614decde36" [[package]] name = "libloading" @@ -2683,7 +2809,7 @@ version = "0.16.0+8.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce3d60bc059831dc1c83903fb45c103f75db65c5a7bf22272764d9cc683e348c" dependencies = [ - "bindgen 0.69.4", + "bindgen 0.69.5", "bzip2-sys", "cc", "glob", @@ -2695,9 +2821,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.19" +version = "1.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc53a7799a7496ebc9fd29f31f7df80e83c9bda5299768af5f9e59eeea74647" +checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" dependencies = [ "cc", "pkg-config", @@ -2710,6 +2836,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" + [[package]] name = "lock_api" version = "0.4.12" @@ -2734,18 +2866,18 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "lru" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.14.5", + "hashbrown 0.15.2", ] [[package]] name = "lz4-sys" -version = "1.10.0" +version = "1.11.1+lz4-1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "109de74d5d2353660401699a4174a4ff23fcc649caf553df71933c7fb45ad868" +checksum = "6bd8c0d6c6ed0cd30b3652886bb8711dc4bb01d637a68105a3d5158039b418e6" dependencies = [ "cc", "libc", @@ -2753,12 +2885,12 @@ dependencies = [ [[package]] name = "masternode-reward-shares-contract" -version = "1.5.1" +version = "1.7.0" dependencies = [ "platform-value", "platform-version", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -2784,9 +2916,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "metrics" -version = "0.23.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884adb57038347dfbaf2d5065887b6cf4312330dc8e94bc30a1a839bd79d3261" +checksum = "7a7deb012b3b2767169ff203fadb4c6b0b82b947512e5eb9e0b78c2e186ad9e3" dependencies = [ "ahash 0.8.11", "portable-atomic", @@ -2794,9 +2926,9 @@ dependencies = [ [[package]] name = "metrics-exporter-prometheus" -version = "0.15.3" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4f0c8427b39666bf970460908b213ec09b3b350f20c0c2eabcbba51704a08e6" +checksum = "85b6f8152da6d7892ff1b7a1c0fa3f435e92b5918ad67035c3bb432111d9a29b" dependencies = [ "base64 0.22.1", "http-body-util", @@ -2807,22 +2939,21 @@ dependencies = [ "metrics", "metrics-util", "quanta", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", ] [[package]] name = "metrics-util" -version = "0.17.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4259040465c955f9f2f1a4a8a16dc46726169bca0f88e8fb2dbeced487c3e828" +checksum = "15b482df36c13dd1869d73d14d28cd4855fbd6cfc32294bee109908a9f4a4ed7" dependencies = [ "crossbeam-epoch", "crossbeam-utils", - "hashbrown 0.14.5", + "hashbrown 0.15.2", "metrics", - "num_cpus", "quanta", "sketches-ddsketch", ] @@ -2849,15 +2980,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" -[[package]] -name = "miniz_oxide" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" -dependencies = [ - "adler", -] - [[package]] name = "miniz_oxide" version = "0.8.0" @@ -2881,9 +3003,9 @@ dependencies = [ [[package]] name = "mockall" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" +checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" dependencies = [ "cfg-if", "downcast", @@ -2895,14 +3017,14 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" +checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -2924,7 +3046,7 @@ dependencies = [ "rustc_version", "smallvec", "tagptr", - "thiserror", + "thiserror 1.0.69", "triomphe", "uuid", ] @@ -2953,7 +3075,7 @@ dependencies = [ "openssl-probe", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "tempfile", ] @@ -3037,7 +3159,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -3126,26 +3248,26 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "object" -version = "0.36.3" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "oorandom" @@ -3155,9 +3277,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "openssl" -version = "0.10.66" +version = "0.10.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" dependencies = [ "bitflags 2.6.0", "cfg-if", @@ -3176,7 +3298,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -3187,9 +3309,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.103" +version = "0.9.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" dependencies = [ "cc", "libc", @@ -3205,9 +3327,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -3285,29 +3407,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.5" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.5" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -3327,13 +3449,13 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "platform-serialization" -version = "1.5.1" +version = "1.7.0" dependencies = [ "bincode", "platform-version", @@ -3341,17 +3463,17 @@ dependencies = [ [[package]] name = "platform-serialization-derive" -version = "1.5.1" +version = "1.7.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", "virtue 0.0.17", ] [[package]] name = "platform-value" -version = "1.5.1" +version = "1.7.0" dependencies = [ "base64 0.22.1", "bincode", @@ -3366,36 +3488,36 @@ dependencies = [ "regex", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "treediff", ] [[package]] name = "platform-value-convertible" -version = "1.5.1" +version = "1.7.0" dependencies = [ "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "platform-version" -version = "1.5.1" +version = "1.7.0" dependencies = [ "bincode", "grovedb-version", "once_cell", - "thiserror", + "thiserror 1.0.69", "versioned-feature-core 1.0.0 (git+https://github.com/dashpay/versioned-feature-core)", ] [[package]] name = "platform-versioning" -version = "1.5.1" +version = "1.7.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -3413,24 +3535,24 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "414cec62c6634ae900ea1c56128dfe87cf63e7caece0852ec76aba307cebadb7" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" [[package]] name = "plotters-svg" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81b30686a7d9c3e010b84284bdd26a29f2138574f52f5eb6f794fc0ad924e705" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" dependencies = [ "plotters-backend", ] [[package]] name = "portable-atomic" -version = "1.7.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" +checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" [[package]] name = "powerfmt" @@ -3485,12 +3607,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.20" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -3505,50 +3627,27 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" -dependencies = [ - "toml_edit 0.21.1", -] - -[[package]] -name = "proc-macro-error" -version = "1.0.4" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", + "toml_edit 0.22.22", ] [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] [[package]] name = "prost" -version = "0.13.1" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13db3d3fde688c61e2446b4d843bc27a7e8af269a69440c0308021dc92333cc" +checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" dependencies = [ "bytes", "prost-derive", @@ -3556,12 +3655,12 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.1" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bb182580f71dd070f88d01ce3de9f4da5021db7115d2e1c3605a754153b77c1" +checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15" dependencies = [ "bytes", - "heck 0.5.0", + "heck", "itertools 0.13.0", "log", "multimap", @@ -3571,28 +3670,28 @@ dependencies = [ "prost", "prost-types", "regex", - "syn 2.0.75", + "syn 2.0.89", "tempfile", ] [[package]] name = "prost-derive" -version = "0.13.1" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18bec9b0adc4eba778b33684b7ba3e7137789434769ee3ce3930463ef904cfca" +checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" dependencies = [ "anyhow", "itertools 0.13.0", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "prost-types" -version = "0.13.1" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cee5168b05f49d4b0ca581206eb14a7b22fafd963efe729ac48eb03266e25cc2" +checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" dependencies = [ "prost", ] @@ -3679,9 +3778,9 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "11.1.0" +version = "11.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb9ee317cfe3fbd54b36a511efc1edd42e216903c9cd575e686dd68a2ba90d8d" +checksum = "1ab240315c661615f2ee9f0f2cd32d5a7343a84d5ebcccb99d46e6637565e7b0" dependencies = [ "bitflags 2.6.0", ] @@ -3708,23 +3807,23 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.3" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ "bitflags 2.6.0", ] [[package]] name = "regex" -version = "1.10.6" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] @@ -3738,13 +3837,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.4", + "regex-syntax 0.8.5", ] [[package]] @@ -3755,9 +3854,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rend" @@ -3809,7 +3908,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper", "system-configuration", "tokio", "tokio-native-tls", @@ -3877,7 +3976,7 @@ dependencies = [ [[package]] name = "rs-dapi-client" -version = "1.5.1" +version = "1.7.0" dependencies = [ "backon", "chrono", @@ -3890,7 +3989,7 @@ dependencies = [ "serde", "serde_json", "sha2", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", ] @@ -3935,18 +4034,18 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" -version = "0.38.34" +version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags 2.6.0", "errno", @@ -3957,9 +4056,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.12" +version = "0.23.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" +checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" dependencies = [ "log", "once_cell", @@ -3972,38 +4071,36 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" dependencies = [ "openssl-probe", - "rustls-pemfile", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 3.0.1", ] [[package]] name = "rustls-pemfile" -version = "2.1.3" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ - "base64 0.22.1", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" [[package]] name = "rustls-webpki" -version = "0.102.6" +version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ "ring", "rustls-pki-types", @@ -4012,9 +4109,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "ryu" @@ -4043,11 +4140,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -4090,7 +4187,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.6.0", - "core-foundation", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.10.0", "core-foundation-sys", "libc", "security-framework-sys", @@ -4098,9 +4208,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.11.1" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", @@ -4114,9 +4224,9 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.208" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] @@ -4142,20 +4252,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.208" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "serde_json" -version = "1.0.126" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3b863381a05ffefbc82571a2d893edf47b27fb0ebedbf582c39640e51abebef" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "indexmap 2.6.0", "itoa", @@ -4182,14 +4292,14 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "serde_spanned" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] @@ -4224,9 +4334,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.9.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cecfa94848272156ea67b2b1a53f20fc7bc638c4a46d2f8abde08f05f4b857" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" dependencies = [ "base64 0.22.1", "chrono", @@ -4236,7 +4346,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "serde_with_macros 3.9.0", + "serde_with_macros 3.11.0", "time", ] @@ -4249,19 +4359,19 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "serde_with_macros" -version = "3.9.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8fee4991ef4f274617a51ad4af30519438dacb2f56ac773b08a1922ff743350" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -4327,13 +4437,13 @@ checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" [[package]] name = "simdutf8" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" [[package]] name = "simple-signer" -version = "1.5.1" +version = "1.7.0" dependencies = [ "base64 0.22.1", "bincode", @@ -4343,9 +4453,9 @@ dependencies = [ [[package]] name = "sketches-ddsketch" -version = "0.2.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85636c14b73d81f541e525f585c0a2109e6744e1565b5c1668e31c70c10ed65c" +checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a" [[package]] name = "slab" @@ -4364,9 +4474,9 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -4397,9 +4507,15 @@ dependencies = [ "log", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "strategy-tests" -version = "1.5.1" +version = "1.7.0" dependencies = [ "bincode", "dpp", @@ -4437,11 +4553,11 @@ version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "rustversion", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -4472,38 +4588,20 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.75" +version = "2.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9" +checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "syn_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn 2.0.75", -] - [[package]] name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - -[[package]] -name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] @@ -4520,6 +4618,17 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", +] + [[package]] name = "system-configuration" version = "0.6.1" @@ -4527,7 +4636,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags 2.6.0", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys", ] @@ -4555,9 +4664,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", @@ -4578,7 +4687,7 @@ dependencies = [ "semver", "serde_json", "tenderdash-proto", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-util", "tracing", @@ -4618,7 +4727,7 @@ dependencies = [ "tonic-build", "ureq", "walkdir", - "zip 2.2.0", + "zip 2.2.1", ] [[package]] @@ -4654,7 +4763,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -4665,28 +4774,48 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", "test-case-core", ] [[package]] name = "thiserror" -version = "1.0.64" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa" +dependencies = [ + "thiserror-impl 2.0.3", ] [[package]] name = "thiserror-impl" -version = "1.0.64" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", ] [[package]] @@ -4730,6 +4859,16 @@ dependencies = [ "time-core", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinytemplate" version = "1.2.1" @@ -4757,9 +4896,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.40.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "bytes", @@ -4782,7 +4921,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -4852,7 +4991,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.20", + "toml_edit 0.22.22", ] [[package]] @@ -4877,26 +5016,15 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" -dependencies = [ - "indexmap 2.6.0", - "toml_datetime", - "winnow 0.5.40", -] - -[[package]] -name = "toml_edit" -version = "0.22.20" +version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ "indexmap 2.6.0", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.18", + "winnow 0.6.20", ] [[package]] @@ -4935,15 +5063,16 @@ dependencies = [ [[package]] name = "tonic-build" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "568392c5a2bd0020723e3f387891176aabafe36fd9fcd074ad309dfa0c8eb964" +checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" dependencies = [ "prettyplease", "proc-macro2", "prost-build", + "prost-types", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] @@ -5002,9 +5131,9 @@ source = "git+https://github.com/QuantumExplorer/tower?branch=fix/indexMap2OnV04 [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -5014,20 +5143,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -5101,39 +5230,21 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] - -[[package]] -name = "unicode-bidi" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "unicode-normalization" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" -dependencies = [ - "tinyvec", -] +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-xid" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229730647fbc343e3a80e463c1db7f78f3855d3f3739bee0dda773c9a037c90a" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "untrusted" @@ -5143,31 +5254,46 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.10.1" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a" +checksum = "b30e6f97efe1fa43535ee241ee76967d3ff6ff3953ebb430d8d55c5393029e7b" dependencies = [ "base64 0.22.1", "flate2", + "litemap", "log", "once_cell", "rustls", "rustls-pki-types", "url", "webpki-roots", + "yoke", + "zerofrom", ] [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -5176,9 +5302,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "getrandom", "rand", @@ -5235,6 +5361,16 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "wallet-utils-contract" +version = "1.7.0" +dependencies = [ + "platform-value", + "platform-version", + "serde_json", + "thiserror 1.0.69", +] + [[package]] name = "want" version = "0.3.1" @@ -5271,7 +5407,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", "wasm-bindgen-shared", ] @@ -5305,7 +5441,7 @@ checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5318,7 +5454,7 @@ checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" [[package]] name = "wasm-dpp" -version = "1.5.1" +version = "1.7.0" dependencies = [ "anyhow", "async-trait", @@ -5333,7 +5469,7 @@ dependencies = [ "serde", "serde-wasm-bindgen", "serde_json", - "thiserror", + "thiserror 1.0.69", "wasm-bindgen", "wasm-bindgen-futures", "wasm-logger", @@ -5363,9 +5499,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.3" +version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ "rustls-pki-types", ] @@ -5611,16 +5747,16 @@ dependencies = [ [[package]] name = "winnow" -version = "0.6.18" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ "memchr", ] [[package]] name = "withdrawals-contract" -version = "1.5.1" +version = "1.7.0" dependencies = [ "num_enum 0.5.11", "platform-value", @@ -5628,9 +5764,21 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "thiserror", + "thiserror 1.0.69", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "wyz" version = "0.5.1" @@ -5646,6 +5794,30 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" +[[package]] +name = "yoke" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", + "synstructure 0.13.1", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -5664,7 +5836,28 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", +] + +[[package]] +name = "zerofrom" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", + "synstructure 0.13.1", ] [[package]] @@ -5685,7 +5878,29 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.89", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", ] [[package]] @@ -5710,9 +5925,9 @@ dependencies = [ [[package]] name = "zip" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc5e4288ea4057ae23afc69a4472434a87a2495cafce6632fd1c4ec9f5cf3494" +checksum = "99d52293fc86ea7cf13971b3bb81eb21683636e7ae24c729cdaf1b7c4157a352" dependencies = [ "arbitrary", "crc32fast", @@ -5721,7 +5936,7 @@ dependencies = [ "flate2", "indexmap 2.6.0", "memchr", - "thiserror", + "thiserror 2.0.3", "zopfli", ] diff --git a/Cargo.toml b/Cargo.toml index ab03e80fb4..3b7b503758 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ members = [ "packages/simple-signer", "packages/rs-json-schema-compatibility-validator", "packages/check-features", + "packages/wallet-utils-contract" ] [workspace.package] @@ -37,3 +38,4 @@ rust-version = "1.80" tower-service = { git = "https://github.com/QuantumExplorer/tower", branch = "fix/indexMap2OnV0413" } tower-layer = { git = "https://github.com/QuantumExplorer/tower", branch = "fix/indexMap2OnV0413" } tower = { git = "https://github.com/QuantumExplorer/tower", branch = "fix/indexMap2OnV0413" } + diff --git a/Dockerfile b/Dockerfile index 5039a062b6..5c8ac67655 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,42 +1,66 @@ -# syntax = docker/dockerfile:1.5 +# syntax = docker/dockerfile:1.7-labs # Docker image for rs-drive-abci # # This image is divided multiple parts: # - deps-base - includes all base dependencies and some libraries # - deps-sccache - deps image with sccache included +# - deps-compilation - deps image with all compilation dependencies - it's either deps-base or deps-sccache +# - deps-rocksdb - build static rocksdb library # - deps - all deps, including wasm-bindgen-cli; built on top of either deps-base or deps-sccache -# - sources - includes full source code +# - build-planner - image used to prepare build plan for rs-drive-abci # - build-* - actual build process of given image # - drive-abci, dashmate-helper, test-suite, dapi - final images # # The following build arguments can be provided using --build-arg: # - CARGO_BUILD_PROFILE - set to `release` to build final binary, without debugging information # - NODE_ENV - node.js environment name to use to build the library -# - RUSTC_WRAPPER - set to `sccache` to enable sccache support and make the following variables available: -# - SCCACHE_GHA_ENABLED, ACTIONS_CACHE_URL, ACTIONS_RUNTIME_TOKEN - store sccache caches inside github actions -# - SCCACHE_MEMCACHED - set to memcache server URI (eg. tcp://172.17.0.1:11211) to enable sccache memcached backend # - ALPINE_VERSION - use different version of Alpine base image; requires also rust:apline... # image to be available # - USERNAME, USER_UID, USER_GID - specification of user used to run the binary +# +# # sccache cache backends +# +# To enable sccache support and make the following variables available: +# 1. For S3 buckets: +# - SCCACHE_BUCKET - S3 bucket name +# - AWS_PROFILE +# - SCCACHE_REGION +# - SCCACHE_S3_KEY_PREFIX +# - SCCACHE_ENDPOINT +# - also, AWS credentials file ($HOME/.aws/credentials) should be provided as a secret file with id=AWS +# 2. For Github Actions: +# - SCCACHE_GHA_ENABLED, ACTIONS_CACHE_URL +# - also, Github Actions token should be provided as a secret file with id=GHA +# 3. For memcached: +# - SCCACHE_MEMCACHED - set to memcache server URI (eg. tcp://172.17.0.1:11211) to enable sccache memcached backend + # # BUILD PROCESS # # 1. All these --mount... are to cache reusable info between runs. # See https://doc.rust-lang.org/cargo/guide/cargo-home.html#caching-the-cargo-home-in-ci -# 2. We add `--config net.git-fetch-with-cli=true` to address ARM build issue, -# see https://github.com/rust-lang/cargo/issues/10781#issuecomment-1441071052 -# 3. Github Actions have shared networking configured, so we need to set a random -# SCCACHE_SERVER_PORT port to avoid conflicts in case of parallel compilation +# 2. Github Actions have shared networking configured, so we need to set a random SCCACHE_SERVER_PORT port to avoid +# conflicts in case of parallel compilation. +# 3. Configuration variables are shared between runs using /root/env file. ARG ALPINE_VERSION=3.18 -ARG PROTOC_VERSION=27.3 -ARG RUSTC_WRAPPER + +# deps-${RUSTC_WRAPPER:-base} +# If one of SCCACHE_GHA_ENABLED, SCCACHE_BUCKET, SCCACHE_MEMCACHED is set, then deps-sccache is used, otherwise deps-base +ARG SCCACHE_GHA_ENABLED +ARG SCCACHE_BUCKET +ARG SCCACHE_MEMCACHED + +# Determine if we have sccache enabled; if yes, use deps-sccache, otherwise use deps-base as a dependency image +ARG DEPS_IMAGE=${SCCACHE_GHA_ENABLED}${SCCACHE_BUCKET}${SCCACHE_MEMCACHED} +ARG DEPS_IMAGE=${DEPS_IMAGE:+sccache} +ARG DEPS_IMAGE=deps-${DEPS_IMAGE:-base} # # DEPS: INSTALL AND CACHE DEPENDENCIES # -FROM node:20-alpine${ALPINE_VERSION} as deps-base +FROM node:20-alpine${ALPINE_VERSION} AS deps-base # # Install some dependencies @@ -53,6 +77,7 @@ RUN apk add --no-cache \ linux-headers \ llvm-static llvm-dev \ openssl-dev \ + snappy-static snappy-dev \ perl \ python3 \ unzip \ @@ -60,6 +85,13 @@ RUN apk add --no-cache \ xz \ zeromq-dev +# Configure snappy, dependency of librocksdb-sys +RUN <> /root/env +echo "export SNAPPY_LIB_DIR=/usr/lib" >> /root/env +echo "export SNAPPY_INCLUDE_DIR=/usr/include" >> /root/env +EOS + # Configure Node.js RUN npm config set --global audit false @@ -71,7 +103,6 @@ ARG TARGETARCH WORKDIR /platform -# TODO: It doesn't sharing PATH between stages, so we need "source $HOME/.cargo/env" everywhere COPY rust-toolchain.toml . RUN TOOLCHAIN_VERSION="$(grep channel rust-toolchain.toml | awk '{print $3}' | tr -d '"')" && \ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- \ @@ -80,9 +111,16 @@ RUN TOOLCHAIN_VERSION="$(grep channel rust-toolchain.toml | awk '{print $3}' | t --default-toolchain "${TOOLCHAIN_VERSION}" \ --target wasm32-unknown-unknown +ONBUILD ENV HOME=/root +ONBUILD ENV CARGO_HOME=$HOME/.cargo + +# Configure Rust toolchain +# It doesn't sharing PATH between stages, so we need "source $HOME/.cargo/env" everywhere +RUN echo 'source $HOME/.cargo/env' >> /root/env + # Install protoc - protobuf compiler # The one shipped with Alpine does not work -ARG PROTOC_VERSION +ARG PROTOC_VERSION=27.3 RUN if [[ "$TARGETARCH" == "arm64" ]] ; then export PROTOC_ARCH=aarch_64; else export PROTOC_ARCH=x86_64; fi; \ curl -Ls https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-${PROTOC_ARCH}.zip \ -o /tmp/protoc.zip && \ @@ -93,16 +131,23 @@ RUN if [[ "$TARGETARCH" == "arm64" ]] ; then export PROTOC_ARCH=aarch_64; else e # Switch to clang RUN rm /usr/bin/cc && ln -s /usr/bin/clang /usr/bin/cc -# Select whether we want dev or release -ARG CARGO_BUILD_PROFILE=dev -ENV CARGO_BUILD_PROFILE ${CARGO_BUILD_PROFILE} - ARG NODE_ENV=production -ENV NODE_ENV ${NODE_ENV} +ENV NODE_ENV=${NODE_ENV} + +# +# DEPS-SCCACHE stage +# +# This stage is used to install sccache and configure it. +# Later on, one should source /root/env before building to use sccache. +# +# Note that, due to security concerns, each stage needs to declare variables containing authentication secrets, like +# ACTIONS_RUNTIME_TOKEN, AWS_SECRET_ACCESS_KEY. This is to prevent leaking secrets to the final image. The secrets are +# loaded using docker buildx `--secret` flag and need to be explicitly mounted with `--mount=type=secret,id=SECRET_ID`. FROM deps-base AS deps-sccache -ARG SCCHACHE_VERSION=0.7.1 +# SCCACHE_VERSION must be the same as in github actions, to avoid cache incompatibility +ARG SCCHACHE_VERSION=0.8.2 # Install sccache for caching RUN if [[ "$TARGETARCH" == "arm64" ]] ; then export SCC_ARCH=aarch64; else export SCC_ARCH=x86_64; fi; \ @@ -114,76 +159,215 @@ RUN if [[ "$TARGETARCH" == "arm64" ]] ; then export SCC_ARCH=aarch64; else expor # # Configure sccache # -ARG RUSTC_WRAPPER -ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} # Set args below to use Github Actions cache; see https://github.com/mozilla/sccache/blob/main/docs/GHA.md ARG SCCACHE_GHA_ENABLED -ENV SCCACHE_GHA_ENABLED=${SCCACHE_GHA_ENABLED} - ARG ACTIONS_CACHE_URL -ENV ACTIONS_CACHE_URL=${ACTIONS_CACHE_URL} - -ARG ACTIONS_RUNTIME_TOKEN -ENV ACTIONS_RUNTIME_TOKEN=${ACTIONS_RUNTIME_TOKEN} # Alternative solution is to use memcache ARG SCCACHE_MEMCACHED -ENV SCCACHE_MEMCACHED=${SCCACHE_MEMCACHED} # S3 storage ARG SCCACHE_BUCKET -ENV SCCACHE_BUCKET=${SCCACHE_BUCKET} - +ARG AWS_PROFILE ARG SCCACHE_REGION -ENV SCCACHE_REGION=${SCCACHE_REGION} - -# Disable incremental buildings, not supported by sccache -ARG CARGO_INCREMENTAL=false -ENV CARGO_INCREMENTAL=${CARGO_INCREMENTAL} +ARG SCCACHE_S3_KEY_PREFIX +ARG SCCACHE_ENDPOINT + +# Generate sccache configuration variables and save them to /root/env +# +# We only enable one cache at a time. Setting env variables belonging to multiple cache backends may fail the build. +RUN --mount=type=secret,id=AWS <> /root/env + echo "export ACTIONS_CACHE_URL=${ACTIONS_CACHE_URL}" >> /root/env + # ACTIONS_RUNTIME_TOKEN is a secret so we quote it here, and it will be loaded when `source /root/env` is run + echo 'export ACTIONS_RUNTIME_TOKEN="$(cat /run/secrets/GHA)"' >> /root/env + + ### AWS S3 ### + elif [ -n "${SCCACHE_BUCKET}" ]; then + echo "export SCCACHE_BUCKET='${SCCACHE_BUCKET}'" >> /root/env + echo "export SCCACHE_REGION='${SCCACHE_REGION}'" >> /root/env + [ -n "${AWS_PROFILE}" ] && echo "export AWS_PROFILE='${AWS_PROFILE}'" >> /root/env + echo "export SCCACHE_ENDPOINT='${SCCACHE_ENDPOINT}'" >> /root/env + echo "export SCCACHE_S3_KEY_PREFIX='${SCCACHE_S3_KEY_PREFIX}'" >> /root/env + + # Configure AWS credentials + mkdir --mode=0700 -p "$HOME/.aws" + ln -s /run/secrets/AWS "$HOME/.aws/credentials" + echo "export AWS_SHARED_CREDENTIALS_FILE=$HOME/.aws/credentials" >> /root/env + + # Check if AWS credentials file is mounted correctly, eg. --mount=type=secret,id=AWS + echo '[ -e "${AWS_SHARED_CREDENTIALS_FILE}" ] || { + echo "$(id -u): Cannot read ${AWS_SHARED_CREDENTIALS_FILE}; did you use RUN --mount=type=secret,id=AWS ?"; + exit 1; + }' >> /root/env + + ### memcached ### + elif [ -n "${SCCACHE_MEMCACHED}" ]; then + # memcached + echo "export SCCACHE_MEMCACHED='${SCCACHE_MEMCACHED}'" >> /root/env + else + echo "Error: cannot determine sccache cache backend" >&2 + exit 1 + fi + + echo "export SCCACHE_SERVER_PORT=$((RANDOM+1025))" >> /root/env + + # Configure compilers to use sccache + echo "export CXX='sccache clang++'" >> /root/env + echo "export CC='sccache clang'" >> /root/env + echo "export RUSTC_WRAPPER=sccache" >> /root/env + # Disable Rust incremental builds, not supported by sccache + echo 'export CARGO_INCREMENTAL=0' >> /root/env + + # for debugging, we display what we generated + cat /root/env +EOS + +# Image containing compolation dependencies; used to overcome lack of interpolation in COPY --from +FROM ${DEPS_IMAGE} AS deps-compilation +# Stage intentionally left empty + +# +# BUILD ROCKSDB STATIC LIBRARY +# + +FROM deps-compilation AS deps-rocksdb + +RUN mkdir -p /tmp/rocksdb +WORKDIR /tmp/rocksdb + + +# RUN --mount=type=secret,id=AWS < a.c +# sccache clang -o a.o -c a.c +# cd - + +# sccache -s +# EOS + +RUN --mount=type=secret,id=AWS <> /root/env +echo "export ROCKSDB_LIB_DIR=/opt/rocksdb/usr/local/lib" >> /root/env +echo "export ROCKSDB_INCLUDE_DIR=/opt/rocksdb/usr/local/include" >> /root/env +EOS -ARG AWS_ACCESS_KEY_ID -ARG AWS_SECRET_ACCESS_KEY # # DEPS: FULL DEPENDENCIES LIST # -# This is separate from `deps` to use sccache for caching -FROM deps-${RUSTC_WRAPPER:-base} AS deps +FROM deps-rocksdb AS deps -ARG SCCACHE_S3_KEY_PREFIX -ENV SCCACHE_S3_KEY_PREFIX=${SCCACHE_S3_KEY_PREFIX}/${TARGETARCH}/linux-musl WORKDIR /platform -# Install wasm-bindgen-cli in the same profile as other components, to sacrifice some performance & disk space to gain -# better build caching -RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOME}/registry/index \ - --mount=type=cache,sharing=shared,id=cargo_registry_cache,target=${CARGO_HOME}/registry/cache \ - --mount=type=cache,sharing=shared,id=cargo_git,target=${CARGO_HOME}/git/db \ - --mount=type=cache,sharing=shared,id=target_${TARGETARCH},target=/platform/target \ - export SCCACHE_SERVER_PORT=$((RANDOM+1025)) && \ - source $HOME/.cargo/env && \ - if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ - RUSTFLAGS="-C target-feature=-crt-static" \ - CARGO_TARGET_DIR="/platform/target" \ - # TODO: Build wasm with build.rs - # Meanwhile if you want to update wasm-bindgen you also need to update version in: - # - packages/wasm-dpp/Cargo.toml - # - packages/wasm-dpp/scripts/build-wasm.sh - cargo install --profile "$CARGO_BUILD_PROFILE" wasm-bindgen-cli@0.2.86 cargo-chef@0.1.67 --locked +# Download and install cargo-binstall +ENV BINSTALL_VERSION=1.10.11 +RUN --mount=type=secret,id=AWS \ + set -ex; \ + source /root/env; \ + if [ "$TARGETARCH" = "amd64" ]; then \ + CARGO_BINSTALL_ARCH="x86_64-unknown-linux-musl"; \ + elif [ "$TARGETARCH" = "arm64" ]; then \ + CARGO_BINSTALL_ARCH="aarch64-unknown-linux-musl"; \ + else \ + echo "Unsupported architecture: $TARGETARCH"; exit 1; \ + fi; \ + # Construct download URL + DOWNLOAD_URL="https://github.com/cargo-bins/cargo-binstall/releases/download/v${BINSTALL_VERSION}/cargo-binstall-${CARGO_BINSTALL_ARCH}.tgz"; \ + # Download and extract the cargo-binstall binary + curl -A "Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/81.0" -L --proto '=https' --tlsv1.2 -sSf "$DOWNLOAD_URL" | tar -xvzf -; \ + ./cargo-binstall -y --force cargo-binstall@${BINSTALL_VERSION}; \ + rm ./cargo-binstall; \ + cargo binstall -V + +RUN --mount=type=secret,id=AWS \ + source /root/env; \ + cargo binstall wasm-bindgen-cli@0.2.86 cargo-chef@0.1.67 \ + --locked \ + --no-discover-github-token \ + --disable-telemetry \ + --no-track \ + --no-confirm + + +# Select whether we want dev or release +ONBUILD ARG CARGO_BUILD_PROFILE=dev # # Rust build planner to speed up builds # FROM deps AS build-planner + WORKDIR /platform -COPY . . -RUN source $HOME/.cargo/env && \ - cargo chef prepare --recipe-path recipe.json -# Workaround: as we cache dapi-grpc, its build.rs is not rerun, so we need to touch it -RUN touch /platform/packages/dapi-grpc/build.rs +COPY --parents \ + Cargo.lock \ + Cargo.toml \ + rust-toolchain.toml \ + .cargo \ + packages/dapi-grpc \ + packages/rs-dapi-grpc-macros \ + packages/rs-dpp \ + packages/rs-drive \ + packages/rs-platform-value \ + packages/rs-platform-serialization \ + packages/rs-platform-serialization-derive \ + packages/rs-platform-version \ + packages/rs-platform-versioning \ + packages/rs-platform-value-convertible \ + packages/rs-drive-abci \ + packages/dashpay-contract \ + packages/withdrawals-contract \ + packages/masternode-reward-shares-contract \ + packages/feature-flags-contract \ + packages/dpns-contract \ + packages/wallet-utils-contract \ + packages/data-contracts \ + packages/strategy-tests \ + packages/simple-signer \ + packages/rs-json-schema-compatibility-validator \ + # TODO: We don't need those. Maybe dynamically remove them from workspace or move outside of monorepo? + packages/rs-drive-proof-verifier \ + packages/wasm-dpp \ + packages/rs-dapi-client \ + packages/rs-sdk \ + packages/check-features \ + /platform/ + +RUN --mount=type=secret,id=AWS \ + source /root/env && \ + cargo chef prepare $RELEASE --recipe-path recipe.json # # STAGE: BUILD RS-DRIVE-ABCI @@ -193,29 +377,65 @@ FROM deps AS build-drive-abci SHELL ["/bin/bash", "-o", "pipefail","-e", "-x", "-c"] -ARG SCCACHE_S3_KEY_PREFIX -ENV SCCACHE_S3_KEY_PREFIX=${SCCACHE_S3_KEY_PREFIX}/${TARGETARCH}/linux-musl - WORKDIR /platform -COPY --from=build-planner /platform/recipe.json recipe.json +COPY --from=build-planner --parents /platform/recipe.json /platform/.cargo / # Build dependencies - this is the caching Docker layer! RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOME}/registry/index \ --mount=type=cache,sharing=shared,id=cargo_registry_cache,target=${CARGO_HOME}/registry/cache \ --mount=type=cache,sharing=shared,id=cargo_git,target=${CARGO_HOME}/git/db \ - --mount=type=cache,sharing=shared,id=target_${TARGETARCH},target=/platform/target \ - source $HOME/.cargo/env && \ - export SCCACHE_SERVER_PORT=$((RANDOM+1025)) && \ - if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ + --mount=type=secret,id=AWS \ + set -ex; \ + source /root/env && \ + if [[ "${CARGO_BUILD_PROFILE}" == "release" ]] ; then \ + mv .cargo/config-release.toml .cargo/config.toml; \ + else \ + export FEATURES_FLAG="--features=console,grovedbg" ; \ + fi && \ cargo chef cook \ --recipe-path recipe.json \ --profile "$CARGO_BUILD_PROFILE" \ --package drive-abci \ + ${FEATURES_FLAG} \ --locked && \ - if [[ "${RUSTC_WRAPPER}" == "sccache" ]] ; then sccache --show-stats; fi - -COPY . . + if [[ -x /usr/bin/sccache ]]; then sccache --show-stats; fi + +COPY --parents \ + Cargo.lock \ + Cargo.toml \ + rust-toolchain.toml \ + .cargo \ + packages/dapi-grpc \ + packages/rs-dapi-grpc-macros \ + packages/rs-dpp \ + packages/rs-drive \ + packages/rs-platform-value \ + packages/rs-platform-serialization \ + packages/rs-platform-serialization-derive \ + packages/rs-platform-version \ + packages/rs-platform-versioning \ + packages/rs-platform-value-convertible \ + packages/rs-drive-abci \ + packages/dashpay-contract \ + packages/wallet-utils-contract \ + packages/withdrawals-contract \ + packages/masternode-reward-shares-contract \ + packages/feature-flags-contract \ + packages/dpns-contract \ + packages/data-contracts \ + packages/strategy-tests \ + # These packages are part of workspace and must be here otherwise it builds from scratch + # See todo below + packages/simple-signer \ + packages/rs-json-schema-compatibility-validator \ + # TODO: We don't need those. Maybe dynamically remove them from workspace or move outside of monorepo? + packages/rs-drive-proof-verifier \ + packages/wasm-dpp \ + packages/rs-dapi-client \ + packages/rs-sdk \ + packages/check-features \ + /platform/ RUN mkdir /artifacts @@ -223,34 +443,33 @@ RUN mkdir /artifacts RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOME}/registry/index \ --mount=type=cache,sharing=shared,id=cargo_registry_cache,target=${CARGO_HOME}/registry/cache \ --mount=type=cache,sharing=shared,id=cargo_git,target=${CARGO_HOME}/git/db \ - --mount=type=cache,sharing=shared,id=target_${TARGETARCH},target=/platform/target \ - source $HOME/.cargo/env && \ - export SCCACHE_SERVER_PORT=$((RANDOM+1025)) && \ + --mount=type=secret,id=AWS \ + set -ex; \ + source /root/env && \ if [[ "${CARGO_BUILD_PROFILE}" == "release" ]] ; then \ mv .cargo/config-release.toml .cargo/config.toml && \ export OUT_DIRECTORY=release ; \ else \ export FEATURES_FLAG="--features=console,grovedbg" ; \ export OUT_DIRECTORY=debug ; \ - fi && \ - if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ + # Workaround: as we cache dapi-grpc, its build.rs is not rerun, so we need to touch it + echo "// $(date) " >> /platform/packages/dapi-grpc/build.rs && \ cargo build \ --profile "${CARGO_BUILD_PROFILE}" \ --package drive-abci \ ${FEATURES_FLAG} \ --locked && \ - cp /platform/target/${OUT_DIRECTORY}/drive-abci /artifacts/ && \ - if [[ "${RUSTC_WRAPPER}" == "sccache" ]] ; then sccache --show-stats; fi + cp target/${OUT_DIRECTORY}/drive-abci /artifacts/ && \ + if [[ -x /usr/bin/sccache ]]; then sccache --show-stats; fi && \ + # Remove /platform to reduce layer size + rm -rf /platform # # STAGE: BUILD JAVASCRIPT INTERMEDIATE IMAGE # FROM deps AS build-js -ARG SCCACHE_S3_KEY_PREFIX -ENV SCCACHE_S3_KEY_PREFIX=${SCCACHE_S3_KEY_PREFIX}/wasm/wasm32 - WORKDIR /platform COPY --from=build-planner /platform/recipe.json recipe.json @@ -259,34 +478,68 @@ COPY --from=build-planner /platform/recipe.json recipe.json RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOME}/registry/index \ --mount=type=cache,sharing=shared,id=cargo_registry_cache,target=${CARGO_HOME}/registry/cache \ --mount=type=cache,sharing=shared,id=cargo_git,target=${CARGO_HOME}/git/db \ - --mount=type=cache,sharing=shared,id=target_${TARGETARCH},target=/platform/target \ - source $HOME/.cargo/env && \ - export SCCACHE_SERVER_PORT=$((RANDOM+1025)) && \ - if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ + --mount=type=secret,id=AWS \ + source /root/env && \ cargo chef cook \ --recipe-path recipe.json \ --profile "$CARGO_BUILD_PROFILE" \ --package wasm-dpp \ --target wasm32-unknown-unknown \ --locked && \ - if [[ "${RUSTC_WRAPPER}" == "sccache" ]] ; then sccache --show-stats; fi - -COPY . . + if [[ -x /usr/bin/sccache ]]; then sccache --show-stats; fi + + +# Rust deps +COPY --parents \ + Cargo.lock \ + Cargo.toml \ + rust-toolchain.toml \ + .cargo \ + packages/rs-dpp \ + packages/rs-platform-value \ + packages/rs-platform-serialization \ + packages/rs-platform-serialization-derive \ + packages/rs-platform-version \ + packages/rs-platform-versioning \ + packages/rs-platform-value-convertible \ + packages/rs-json-schema-compatibility-validator \ + # Common + packages/wasm-dpp \ + packages/dashpay-contract \ + packages/withdrawals-contract \ + packages/wallet-utils-contract \ + packages/masternode-reward-shares-contract \ + packages/feature-flags-contract \ + packages/dpns-contract \ + packages/data-contracts \ + packages/dapi-grpc \ + # JS deps + .yarn \ + .pnp* \ + .yarnrc.yml \ + yarn.lock \ + package.json \ + packages/js-grpc-common \ + packages/js-dapi-client \ + packages/wallet-lib \ + packages/js-dash-sdk \ + packages/dash-spv \ + /platform/ RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOME}/registry/index \ --mount=type=cache,sharing=shared,id=cargo_registry_cache,target=${CARGO_HOME}/registry/cache \ --mount=type=cache,sharing=shared,id=cargo_git,target=${CARGO_HOME}/git/db \ - --mount=type=cache,sharing=shared,id=target_wasm,target=/platform/target \ --mount=type=cache,sharing=shared,id=unplugged_${TARGETARCH},target=/tmp/unplugged \ - source $HOME/.cargo/env && \ + --mount=type=secret,id=AWS \ + source /root/env && \ cp -R /tmp/unplugged /platform/.yarn/ && \ yarn install --inline-builds && \ cp -R /platform/.yarn/unplugged /tmp/ && \ - export SCCACHE_SERVER_PORT=$((RANDOM+1025)) && \ - if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ export SKIP_GRPC_PROTO_BUILD=1 && \ yarn build && \ - if [[ "${RUSTC_WRAPPER}" == "sccache" ]]; then sccache --show-stats; fi + if [[ -x /usr/bin/sccache ]]; then sccache --show-stats; fi && \ + # Remove target directory and rust packages to save space + rm -rf target packages/rs-* # # STAGE: FINAL DRIVE-ABCI IMAGE @@ -306,7 +559,7 @@ RUN mkdir -p /var/log/dash \ ${REJECTIONS_PATH} COPY --from=build-drive-abci /artifacts/drive-abci /usr/bin/drive-abci -COPY --from=build-drive-abci /platform/packages/rs-drive-abci/.env.mainnet /var/lib/dash/rs-drive-abci/.env +COPY packages/rs-drive-abci/.env.mainnet /var/lib/dash/rs-drive-abci/.env # Create a volume VOLUME /var/lib/dash/rs-drive-abci/db @@ -343,6 +596,9 @@ EXPOSE 29090 # FROM build-js AS build-dashmate-helper +# Copy dashmate package +COPY packages/dashmate packages/dashmate + # Install Test Suite specific dependencies using previous # node_modules directory to reuse built binaries RUN yarn workspaces focus --production dashmate @@ -359,6 +615,7 @@ LABEL description="Dashmate Helper Node.JS" WORKDIR /platform +# TODO: Do one COPY with --parents COPY --from=build-dashmate-helper /platform/.yarn /platform/.yarn COPY --from=build-dashmate-helper /platform/package.json /platform/yarn.lock /platform/.yarnrc.yml /platform/.pnp* /platform/ @@ -370,6 +627,7 @@ COPY --from=build-dashmate-helper /platform/packages/js-dapi-client packages/js- COPY --from=build-dashmate-helper /platform/packages/js-grpc-common packages/js-grpc-common COPY --from=build-dashmate-helper /platform/packages/dapi-grpc packages/dapi-grpc COPY --from=build-dashmate-helper /platform/packages/dash-spv packages/dash-spv +COPY --from=build-dashmate-helper /platform/packages/wallet-utils-contract packages/wallet-utils-contract COPY --from=build-dashmate-helper /platform/packages/withdrawals-contract packages/withdrawals-contract COPY --from=build-dashmate-helper /platform/packages/masternode-reward-shares-contract packages/masternode-reward-shares-contract COPY --from=build-dashmate-helper /platform/packages/feature-flags-contract packages/feature-flags-contract @@ -387,6 +645,8 @@ ENTRYPOINT ["/platform/packages/dashmate/docker/entrypoint.sh"] # FROM build-js AS build-test-suite +COPY packages/platform-test-suite packages/platform-test-suite + # Install Test Suite specific dependencies using previous # node_modules directory to reuse built binaries RUN yarn workspaces focus --production @dashevo/platform-test-suite @@ -404,38 +664,6 @@ LABEL description="Dash Platform test suite" WORKDIR /platform COPY --from=build-test-suite /platform /platform - - -# Copy yarn and Cargo files -COPY --from=build-test-suite /platform/.yarn /platform/.yarn -COPY --from=build-test-suite /platform/package.json /platform/yarn.lock \ - /platform/.yarnrc.yml /platform/.pnp.* /platform/Cargo.lock /platform/rust-toolchain.toml ./ -# Use Cargo.toml.template instead of Cargo.toml from project root to avoid copying unnecessary Rust packages -COPY --from=build-test-suite /platform/packages/platform-test-suite/Cargo.toml.template ./Cargo.toml - -# Copy only necessary packages from monorepo -COPY --from=build-test-suite /platform/packages/platform-test-suite packages/platform-test-suite -COPY --from=build-test-suite /platform/packages/dashpay-contract packages/dashpay-contract -COPY --from=build-test-suite /platform/packages/wallet-lib packages/wallet-lib -COPY --from=build-test-suite /platform/packages/js-dash-sdk packages/js-dash-sdk -COPY --from=build-test-suite /platform/packages/js-dapi-client packages/js-dapi-client -COPY --from=build-test-suite /platform/packages/js-grpc-common packages/js-grpc-common -COPY --from=build-test-suite /platform/packages/dapi-grpc packages/dapi-grpc -COPY --from=build-test-suite /platform/packages/dash-spv packages/dash-spv -COPY --from=build-test-suite /platform/packages/withdrawals-contract packages/withdrawals-contract -COPY --from=build-test-suite /platform/packages/rs-platform-value packages/rs-platform-value -COPY --from=build-test-suite /platform/packages/masternode-reward-shares-contract packages/masternode-reward-shares-contract -COPY --from=build-test-suite /platform/packages/feature-flags-contract packages/feature-flags-contract -COPY --from=build-test-suite /platform/packages/dpns-contract packages/dpns-contract -COPY --from=build-test-suite /platform/packages/data-contracts packages/data-contracts -COPY --from=build-test-suite /platform/packages/rs-platform-serialization packages/rs-platform-serialization -COPY --from=build-test-suite /platform/packages/rs-platform-serialization-derive packages/rs-platform-serialization-derive -COPY --from=build-test-suite /platform/packages/rs-platform-version packages/rs-platform-version -COPY --from=build-test-suite /platform/packages/rs-platform-versioning packages/rs-platform-versioning -COPY --from=build-test-suite /platform/packages/rs-platform-value-convertible packages/rs-platform-value-convertible -COPY --from=build-test-suite /platform/packages/rs-dpp packages/rs-dpp -COPY --from=build-test-suite /platform/packages/wasm-dpp packages/wasm-dpp - COPY --from=build-test-suite /platform/packages/platform-test-suite/.env.example /platform/packages/platform-test-suite/.env EXPOSE 2500 2501 2510 @@ -447,6 +675,8 @@ ENTRYPOINT ["/platform/packages/platform-test-suite/bin/test.sh"] # FROM build-js AS build-dapi +COPY packages/dapi packages/dapi + # Install Test Suite specific dependencies using previous # node_modules directory to reuse built binaries RUN yarn workspaces focus --production @dashevo/dapi @@ -464,6 +694,7 @@ RUN apk add --no-cache zeromq-dev WORKDIR /platform/packages/dapi +# TODO: Do one COPY with --parents COPY --from=build-dapi /platform/.yarn /platform/.yarn COPY --from=build-dapi /platform/package.json /platform/yarn.lock /platform/.yarnrc.yml /platform/.pnp* /platform/ # List of required dependencies. Based on: @@ -472,7 +703,6 @@ COPY --from=build-dapi /platform/packages/dapi /platform/packages/dapi COPY --from=build-dapi /platform/packages/dapi-grpc /platform/packages/dapi-grpc COPY --from=build-dapi /platform/packages/js-grpc-common /platform/packages/js-grpc-common COPY --from=build-dapi /platform/packages/wasm-dpp /platform/packages/wasm-dpp -COPY --from=build-dapi /platform/packages/js-dapi-client /platform/packages/js-dapi-client RUN cp /platform/packages/dapi/.env.example /platform/packages/dapi/.env diff --git a/README.md b/README.md index 3b2bd65ecb..8c5254f33c 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ this repository may be used on the following networks: in terminal run `echo 'export PATH="/opt/homebrew/opt/llvm/bin:$PATH"' >> ~/.zshrc` or `echo 'export PATH="/opt/homebrew/opt/llvm/bin:$PATH"' >> ~/.bash_profile` depending on your default shell. You can find your default shell with `echo $SHELL` - Reload your shell with `source ~/.zshrc` or `source ~/.bash_profile` - - `cargo install wasm-bindgen-cli@0.2.85` + - `cargo install wasm-bindgen-cli@0.2.86` - *double-check that wasm-bindgen-cli version above matches wasm-bindgen version in Cargo.lock file* - *Depending on system, additional packages may need to be installed as a prerequisite for wasm-bindgen-cli. If anything is missing, installation will error and prompt what packages are missing (i.e. clang, llvm, libssl-dev)* - essential build tools - example for Debian/Ubuntu: `apt install -y build-essential libssl-dev pkg-config clang cmake llvm` diff --git a/package.json b/package.json index d75d8a0ee9..1fb1219cca 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/platform", - "version": "1.5.1", + "version": "1.7.0", "private": true, "scripts": { "setup": "yarn install && yarn run build && yarn run configure", @@ -57,6 +57,7 @@ "packages/feature-flags-contract", "packages/js-dapi-client", "packages/wallet-lib", + "packages/wallet-utils-contract", "packages/js-dash-sdk", "packages/dapi", "packages/dashmate", @@ -92,7 +93,9 @@ "ws": "^8.17.1", "body-parser": "^1.20.3", "path-to-regexp": "^1.9.0", - "cookie": "^0.7.0" + "cookie": "^0.7.0", + "cross-spawn": "^7.0.5", + "nanoid": "^3.3.8" }, "dependencies": { "node-gyp": "^10.0.1" diff --git a/packages/bench-suite/package.json b/packages/bench-suite/package.json index 362166e446..a6aafba96c 100644 --- a/packages/bench-suite/package.json +++ b/packages/bench-suite/package.json @@ -1,7 +1,7 @@ { "name": "@dashevo/bench-suite", "private": true, - "version": "1.5.1", + "version": "1.7.0", "description": "Dash Platform benchmark tool", "scripts": { "bench": "node ./bin/bench.js", diff --git a/packages/check-features/Cargo.toml b/packages/check-features/Cargo.toml index 8f12fca91f..dfb68a8e90 100644 --- a/packages/check-features/Cargo.toml +++ b/packages/check-features/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "check-features" -version = "1.5.1" +version = "1.7.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/packages/dapi-grpc/Cargo.toml b/packages/dapi-grpc/Cargo.toml index 446de30b50..6444245d9e 100644 --- a/packages/dapi-grpc/Cargo.toml +++ b/packages/dapi-grpc/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "dapi-grpc" description = "GRPC client for Dash Platform" -version = "1.5.1" +version = "1.7.0" authors = [ "Samuel Westrich ", "Igor Markin ", @@ -35,7 +35,7 @@ mocks = ["serde", "dep:serde_json"] [dependencies] prost = { version = "0.13" } futures-core = "0.3.30" -tonic = { version = "0.12", features = [ +tonic = { version = "0.12.3", features = [ "codegen", "prost", ], default-features = false } @@ -49,7 +49,7 @@ dapi-grpc-macros = { path = "../rs-dapi-grpc-macros" } platform-version = { path = "../rs-platform-version" } [build-dependencies] -tonic-build = { version = "0.12" } +tonic-build = { version = "0.12.3" } [lib] diff --git a/packages/dapi-grpc/build.rs b/packages/dapi-grpc/build.rs index f70b685fbd..642b614ab9 100644 --- a/packages/dapi-grpc/build.rs +++ b/packages/dapi-grpc/build.rs @@ -289,7 +289,7 @@ impl MappingConfig { create_dir_all(&self.out_dir)?; self.builder - .compile(&[self.protobuf_file], &self.proto_includes) + .compile_protos(&[self.protobuf_file], &self.proto_includes) } } diff --git a/packages/dapi-grpc/package.json b/packages/dapi-grpc/package.json index 7f917c9d00..fb7696339f 100644 --- a/packages/dapi-grpc/package.json +++ b/packages/dapi-grpc/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/dapi-grpc", - "version": "1.5.1", + "version": "1.7.0", "description": "DAPI GRPC definition file and generated clients", "browser": "browser.js", "main": "node.js", diff --git a/packages/dapi/package.json b/packages/dapi/package.json index b49f84fb35..e1175fee93 100644 --- a/packages/dapi/package.json +++ b/packages/dapi/package.json @@ -1,7 +1,7 @@ { "name": "@dashevo/dapi", "private": true, - "version": "1.5.1", + "version": "1.7.0", "description": "A decentralized API for the Dash network", "scripts": { "api": "node scripts/api.js", diff --git a/packages/dash-spv/package.json b/packages/dash-spv/package.json index a7b5c17505..1c415d01cd 100644 --- a/packages/dash-spv/package.json +++ b/packages/dash-spv/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/dash-spv", - "version": "2.5.1", + "version": "2.7.0", "description": "Repository containing SPV functions used by @dashevo", "main": "index.js", "scripts": { diff --git a/packages/dashmate/configs/defaults/getBaseConfigFactory.js b/packages/dashmate/configs/defaults/getBaseConfigFactory.js index e26e67ca50..68d5258ff9 100644 --- a/packages/dashmate/configs/defaults/getBaseConfigFactory.js +++ b/packages/dashmate/configs/defaults/getBaseConfigFactory.js @@ -53,7 +53,7 @@ export default function getBaseConfigFactory() { port: 3001, }, docker: { - image: 'dashpay/dashd:21', + image: 'dashpay/dashd:22', commandArgs: [], }, p2p: { @@ -310,7 +310,7 @@ export default function getBaseConfigFactory() { tenderdash: { mode: 'full', docker: { - image: 'dashpay/tenderdash:1.3', + image: 'dashpay/tenderdash:1', }, p2p: { host: '0.0.0.0', diff --git a/packages/dashmate/configs/getConfigFileMigrationsFactory.js b/packages/dashmate/configs/getConfigFileMigrationsFactory.js index 862f3abee3..fd05cab111 100644 --- a/packages/dashmate/configs/getConfigFileMigrationsFactory.js +++ b/packages/dashmate/configs/getConfigFileMigrationsFactory.js @@ -1032,6 +1032,30 @@ export default function getConfigFileMigrationsFactory(homeDir, defaultConfigs) }); return configFile; }, + '1.6.0': (configFile) => { + Object.entries(configFile.configs) + .forEach(([, options]) => { + options.platform.drive.abci.docker.image = 'dashpay/drive:1-dev'; + options.platform.dapi.api.docker.image = 'dashpay/dapi:1-dev'; + }); + return configFile; + }, + '1.6.1': (configFile) => { + Object.entries(configFile.configs) + .forEach(([, options]) => { + options.platform.drive.abci.docker.image = 'dashpay/drive:1'; + options.platform.dapi.api.docker.image = 'dashpay/dapi:1'; + }); + return configFile; + }, + '1.7.0': (configFile) => { + Object.entries(configFile.configs) + .forEach(([, options]) => { + options.core.docker.image = 'dashpay/dashd:22'; + options.platform.drive.tenderdash.docker.image = 'dashpay/tenderdash:1'; + }); + return configFile; + }, }; } diff --git a/packages/dashmate/package.json b/packages/dashmate/package.json index 62a7fe591e..f3d8964911 100644 --- a/packages/dashmate/package.json +++ b/packages/dashmate/package.json @@ -1,6 +1,6 @@ { "name": "dashmate", - "version": "1.5.1", + "version": "1.7.0", "description": "Distribution package for Dash node installation", "scripts": { "lint": "eslint .", diff --git a/packages/dashmate/src/commands/status/masternode.js b/packages/dashmate/src/commands/status/masternode.js index cbc34cd4ca..df4ce999f5 100644 --- a/packages/dashmate/src/commands/status/masternode.js +++ b/packages/dashmate/src/commands/status/masternode.js @@ -22,8 +22,6 @@ export default class MasternodeStatusCommand extends ConfigBaseCommand { /** * @param {Object} args * @param {Object} flags - * @param {DockerCompose} dockerCompose - * @param {createRpcClient} createRpcClient * @param {Config} config * @param getMasternodeScope getMasternodeScope * @return {Promise} @@ -31,8 +29,6 @@ export default class MasternodeStatusCommand extends ConfigBaseCommand { async runWithDependencies( args, flags, - dockerCompose, - createRpcClient, config, getMasternodeScope, ) { diff --git a/packages/dashmate/src/listr/tasks/doctor/collectSamplesTaskFactory.js b/packages/dashmate/src/listr/tasks/doctor/collectSamplesTaskFactory.js index 3909edff78..b36def7246 100644 --- a/packages/dashmate/src/listr/tasks/doctor/collectSamplesTaskFactory.js +++ b/packages/dashmate/src/listr/tasks/doctor/collectSamplesTaskFactory.js @@ -296,7 +296,7 @@ export default function collectSamplesTaskFactory( // eslint-disable-next-line no-param-reassign task.output = 'Reading Drive metrics'; - const url = `http://${config.get('platform.drive.abci.rpc.host')}:${config.get('platform.drive.abci.rpc.port')}/metrics`; + const url = `http://${config.get('platform.drive.abci.metrics.host')}:${config.get('platform.drive.abci.metrics.port')}/metrics`; const result = fetchTextOrError(url); diff --git a/packages/dashmate/src/listr/tasks/setup/local/configureCoreTaskFactory.js b/packages/dashmate/src/listr/tasks/setup/local/configureCoreTaskFactory.js index d059e9c860..522509b739 100644 --- a/packages/dashmate/src/listr/tasks/setup/local/configureCoreTaskFactory.js +++ b/packages/dashmate/src/listr/tasks/setup/local/configureCoreTaskFactory.js @@ -2,7 +2,6 @@ import { Listr } from 'listr2'; import { Observable } from 'rxjs'; import DashCoreLib from '@dashevo/dashcore-lib'; import waitForNodesToHaveTheSameHeight from '../../../../core/waitForNodesToHaveTheSameHeight.js'; -import waitForNodesToHaveTheSameSporks from '../../../../core/waitForNodesToHaveTheSameSporks.js'; import { NETWORK_LOCAL, HPMN_COLLATERAL_AMOUNT } from '../../../../constants.js'; @@ -93,9 +92,40 @@ export default function configureCoreTaskFactory( }, }, { - title: 'Activating DIP3', + title: 'Create wallet', + task: async () => { + const disablePrivateKeys = false; + const createBlankWallet = false; + const walletPassphrase = ''; + const avoidReuse = false; + const loadOnStartup = true; + const descriptors = false; + + await ctx.coreService.getRpcClient().createWallet( + 'main', + disablePrivateKeys, + createBlankWallet, + walletPassphrase, + avoidReuse, + descriptors, + loadOnStartup, + ); + }, + }, + { + title: 'Generating funds to use as a collateral for masternodes', + task: () => { + const amount = HPMN_COLLATERAL_AMOUNT * configGroup.length; + return generateToAddressTask( + configGroup.find((c) => c.getName() === 'local_seed'), + amount, + ); + }, + }, + { + title: 'Activating v19 and v20', task: () => new Observable(async (observer) => { - const dip3ActivationHeight = 1000; + const dip3ActivationHeight = 901; const blocksToGenerateInOneStep = 10; let blocksGenerated = 0; @@ -126,37 +156,6 @@ export default function configureCoreTaskFactory( return this; }), }, - { - title: 'Create wallet', - task: async () => { - const disablePrivateKeys = false; - const createBlankWallet = false; - const walletPassphrase = ''; - const avoidReuse = false; - const loadOnStartup = true; - const descriptors = false; - - await ctx.coreService.getRpcClient().createWallet( - 'main', - disablePrivateKeys, - createBlankWallet, - walletPassphrase, - avoidReuse, - descriptors, - loadOnStartup, - ); - }, - }, - { - title: 'Generating funds to use as a collateral for masternodes', - task: () => { - const amount = HPMN_COLLATERAL_AMOUNT * configGroup.length; - return generateToAddressTask( - configGroup.find((c) => c.getName() === 'local_seed'), - amount, - ); - }, - }, { title: 'Register masternodes', task: async () => { @@ -276,51 +275,6 @@ export default function configureCoreTaskFactory( ); }, }, - { - title: 'Wait for nodes to have the same sporks', - task: () => waitForNodesToHaveTheSameSporks(ctx.coreServices), - }, - { - title: 'Activating DIP8 to enable ChainLocks', - task: () => new Observable(async (observer) => { - let isDip8Activated = false; - let blockchainInfo; - - let blocksGenerated = 0; - - const blocksToGenerateInOneStep = 10; - - do { - ({ - result: blockchainInfo, - } = await ctx.seedCoreService.getRpcClient().getBlockchainInfo()); - - isDip8Activated = blockchainInfo.softforks.dip0008.active; - - if (isDip8Activated) { - break; - } - - await generateBlocks( - ctx.seedCoreService, - blocksToGenerateInOneStep, - NETWORK_LOCAL, - // eslint-disable-next-line no-loop-func - (blocks) => { - blocksGenerated += blocks; - - observer.next(`${blocksGenerated} blocks generated`); - }, - ); - } while (!isDip8Activated); - - observer.next(`DIP8 has been activated at height ${blockchainInfo.softforks.dip0008.height}`); - - observer.complete(); - - return this; - }), - }, { title: 'Wait for nodes to have the same height', task: () => waitForNodesToHaveTheSameHeight( @@ -347,47 +301,6 @@ export default function configureCoreTaskFactory( title: 'Wait for quorums to be enabled', task: () => enableCoreQuorumsTask(), }, - { - title: 'Activating V20 fork', - task: () => new Observable(async (observer) => { - let isV20Activated = false; - let blockchainInfo; - - let blocksGenerated = 0; - - const blocksToGenerateInOneStep = 10; - - do { - ({ - result: blockchainInfo, - } = await ctx.seedCoreService.getRpcClient().getBlockchainInfo()); - - isV20Activated = blockchainInfo.softforks && blockchainInfo.softforks.v20 - && blockchainInfo.softforks.v20.active; - if (isV20Activated) { - break; - } - - await generateBlocks( - ctx.seedCoreService, - blocksToGenerateInOneStep, - NETWORK_LOCAL, - // eslint-disable-next-line no-loop-func - (blocks) => { - blocksGenerated += blocks; - - observer.next(`${blocksGenerated} blocks generated`); - }, - ); - } while (!isV20Activated); - - observer.next(`V20 fork has been activated at height ${blockchainInfo.softforks.v20.height}`); - - observer.complete(); - - return this; - }), - }, { title: 'Wait for nodes to have the same height', task: () => waitForNodesToHaveTheSameHeight( @@ -396,41 +309,23 @@ export default function configureCoreTaskFactory( ), }, { - title: 'Enable EHF spork', - task: async () => new Observable(async (observer) => { - const seedRpcClient = ctx.seedCoreService.getRpcClient(); - const { - result: initialCoreChainLockedHeight, - } = await seedRpcClient.getBlockCount(); - - await activateCoreSpork( - seedRpcClient, - 'SPORK_24_TEST_EHF', - initialCoreChainLockedHeight, - ); - - let isEhfActivated = false; - let blockchainInfo; + title: 'Activating v21 fork', + task: () => new Observable(async (observer) => { + const dip3ActivationHeight = 1001; + const blocksToGenerateInOneStep = 10; let blocksGenerated = 0; - - const blocksToGenerateInOneStep = 48; + let { + result: currentBlockHeight, + } = await ctx.coreService.getRpcClient().getBlockCount(); do { ({ - result: blockchainInfo, - } = await ctx.seedCoreService.getRpcClient().getBlockchainInfo()); - - isEhfActivated = blockchainInfo.softforks && blockchainInfo.softforks.mn_rr - && blockchainInfo.softforks.mn_rr.active; - if (isEhfActivated) { - break; - } - - await ctx.bumpMockTime(blocksToGenerateInOneStep); + result: currentBlockHeight, + } = await ctx.coreService.getRpcClient().getBlockCount()); await generateBlocks( - ctx.seedCoreService, + ctx.coreService, blocksToGenerateInOneStep, NETWORK_LOCAL, // eslint-disable-next-line no-loop-func @@ -440,9 +335,7 @@ export default function configureCoreTaskFactory( observer.next(`${blocksGenerated} blocks generated`); }, ); - } while (!isEhfActivated); - - observer.next(`EHF has been activated at height ${blockchainInfo.softforks.mn_rr.height}`); + } while (dip3ActivationHeight > currentBlockHeight); observer.complete(); diff --git a/packages/dashmate/src/listr/tasks/ssl/VerificationServer.js b/packages/dashmate/src/listr/tasks/ssl/VerificationServer.js index eba5de00e4..5f19b26408 100644 --- a/packages/dashmate/src/listr/tasks/ssl/VerificationServer.js +++ b/packages/dashmate/src/listr/tasks/ssl/VerificationServer.js @@ -3,8 +3,14 @@ import path from 'path'; import dots from 'dot'; import os from 'os'; import { TEMPLATES_DIR } from '../../../constants.js'; +import wait from '../../../util/wait.js'; export default class VerificationServer { + /** + * @param {string} verification url + */ + #validationUrl; + /** * * @param {Docker} docker @@ -35,6 +41,8 @@ export default class VerificationServer { throw new Error('Server is already setup'); } + this.#validationUrl = validationUrl; + this.config = config; dots.templateSettings.strip = false; @@ -97,29 +105,36 @@ export default class VerificationServer { await this.dockerPull(image); - try { - this.container = await this.docker.createContainer(opts); - } catch (e) { - if (e.statusCode === 409) { + let retries = 0; + const MAX_RETRIES = 3; + while (!this.container && retries <= MAX_RETRIES) { + try { + this.container = await this.docker.createContainer(opts); + } catch (e) { + // Throw any other error except container name conflict + if (e.statusCode !== 409) { + throw e; + } + + // Container name is already in use + // Remove container const danglingContainer = await this.docker.getContainer(name); - await danglingContainer.remove({ force: true }); try { await danglingContainer.wait(); } catch (waitError) { - // Skip error if container is already removed - if (e.statusCode !== 404) { - throw e; + // Throw any other error except container not found + if (waitError.statusCode !== 404) { + throw waitError; } - } - // Try to create a container one more type - this.container = await this.docker.createContainer(opts); + // Skip error if container is already removed + } } - throw e; + retries++; } this.startedContainers.addContainer(opts.name); @@ -153,6 +168,31 @@ export default class VerificationServer { this.container = null; } + async waitForServerIsResponding() { + const MAX_WAIT_TIME = 10000; // Maximum wait time in milliseconds + const INTERVAL = 500; // Interval to check in milliseconds + const FETCH_TIMEOUT = 2000; // Timeout for each fetch in ms + const startTime = Date.now(); + + while (Date.now() - startTime < MAX_WAIT_TIME) { + try { + const response = await fetch( + this.#validationUrl, + { signal: AbortSignal.timeout(FETCH_TIMEOUT) }, + ); + if (response.ok) { + return true; + } + } catch (e) { + // Ignore errors and continue retrying + } + + await wait(INTERVAL); + } + + return false; + } + /** * Destroy verification server files * diff --git a/packages/dashmate/src/listr/tasks/ssl/zerossl/obtainZeroSSLCertificateTaskFactory.js b/packages/dashmate/src/listr/tasks/ssl/zerossl/obtainZeroSSLCertificateTaskFactory.js index 71d7b7809b..4636b37069 100644 --- a/packages/dashmate/src/listr/tasks/ssl/zerossl/obtainZeroSSLCertificateTaskFactory.js +++ b/packages/dashmate/src/listr/tasks/ssl/zerossl/obtainZeroSSLCertificateTaskFactory.js @@ -166,7 +166,20 @@ export default function obtainZeroSSLCertificateTaskFactory( { title: 'Start verification server', skip: (ctx) => ctx.certificate && !['pending_validation', 'draft'].includes(ctx.certificate.status), - task: async () => verificationServer.start(), + task: async (ctx) => { + await verificationServer.start(); + + const isResponding = await verificationServer.waitForServerIsResponding(); + + if (!isResponding) { + throw new Error(`Verification server is not responding. +Please ensure that port 80 on your public IP address ${ctx.externalIp} is open +for incoming HTTP connections. You may need to configure your firewall to +ensure this port is accessible from the public internet. If you are using +Network Address Translation (NAT), please enable port forwarding for port 80 +and all Dash service ports listed above.`); + } + }, }, { title: 'Verify certificate IP address', @@ -177,16 +190,38 @@ export default function obtainZeroSSLCertificateTaskFactory( try { await verifyDomain(ctx.certificate.id, ctx.apiKey); } catch (e) { + // Error: The given certificate is not ready for domain verification + // Sometimes this error means that certificate is already verified + if (e.code === 2831) { + const certificate = await getCertificate(ctx.apiKey, ctx.certificate.id); + // Just proceed on certificate download if we see it's already issued. + if (certificate.status === 'issued') { + return; + } + } + + // If retry is disabled, throw the error + // or prompt the user to retry if (ctx.noRetry !== true) { + let errorMessage = e.message; + + // Get the error message from details if it exists + if (e.type === 'domain_control_validation_failed' && e.details[ctx.externalIp]) { + const errorDetails = Object.values(e.details[ctx.externalIp])[0]; + if (errorDetails?.error) { + errorMessage = errorDetails.error_info; + } + } + retry = await task.prompt({ type: 'toggle', - header: chalk` An error occurred during verification: {red ${e.message}} + header: chalk` An error occurred during verification: {red ${errorMessage}} - Please ensure that port 80 on your public IP address ${ctx.externalIp} is open - for incoming HTTP connections. You may need to configure your firewall to - ensure this port is accessible from the public internet. If you are using - Network Address Translation (NAT), please enable port forwarding for port 80 - and all Dash service ports listed above.`, + Please ensure that port 80 on your public IP address ${ctx.externalIp} is open + for incoming HTTP connections. You may need to configure your firewall to + ensure this port is accessible from the public internet. If you are using + Network Address Translation (NAT), please enable port forwarding for port 80 + and all Dash service ports listed above.`, message: 'Try again?', enabled: 'Yes', disabled: 'No', diff --git a/packages/dashmate/src/status/scopes/platform.js b/packages/dashmate/src/status/scopes/platform.js index c3fe48d8e8..467e8e4ce0 100644 --- a/packages/dashmate/src/status/scopes/platform.js +++ b/packages/dashmate/src/status/scopes/platform.js @@ -1,4 +1,3 @@ -import prettyMs from 'pretty-ms'; import { PortStateEnum } from '../enums/portState.js'; import DockerComposeError from '../../docker/errors/DockerComposeError.js'; import providers from '../providers.js'; @@ -6,6 +5,7 @@ import { DockerStatusEnum } from '../enums/dockerStatus.js'; import { ServiceStatusEnum } from '../enums/serviceStatus.js'; import determineStatus from '../determineStatus.js'; import ContainerIsNotPresentError from '../../docker/errors/ContainerIsNotPresentError.js'; +import ServiceIsNotRunningError from '../../docker/errors/ServiceIsNotRunningError.js'; /** * @returns {getPlatformScopeFactory} @@ -218,7 +218,8 @@ export default function getPlatformScopeFactory( // Throw an error if it's not a Drive issue if (!(e instanceof DockerComposeError && e.dockerComposeExecutionResult - && e.dockerComposeExecutionResult.exitCode !== 0)) { + && e.dockerComposeExecutionResult.exitCode !== 0) + && !(e instanceof ServiceIsNotRunningError)) { throw e; } } @@ -312,11 +313,7 @@ export default function getPlatformScopeFactory( if (mnRRSoftFork.active) { scope.platformActivation = `Activated (at height ${mnRRSoftFork.height})`; } else { - const startTime = mnRRSoftFork.bip9.start_time; - - const diff = (new Date().getTime() - startTime) / 1000; - - scope.platformActivation = `Waiting for activation (approximately in ${prettyMs(diff, { compact: true })})`; + scope.platformActivation = `Waiting for activation on height ${mnRRSoftFork.height}`; } const [tenderdash, drive] = await Promise.all([ diff --git a/packages/dashmate/templates/core/dash.conf.dot b/packages/dashmate/templates/core/dash.conf.dot index cbc0b93373..78b71b095a 100644 --- a/packages/dashmate/templates/core/dash.conf.dot +++ b/packages/dashmate/templates/core/dash.conf.dot @@ -31,7 +31,6 @@ debuglogfile=/var/log/dash/debug.log # JSON RPC server=1 rpcwallet=main -deprecatedrpc=hpmn rpcworkqueue=64 rpcthreads=16 rpcwhitelistdefault=0 @@ -93,6 +92,7 @@ fallbackfee=0.00001 {{?? it.network === 'local'}} regtest=1 [regtest] +testactivationheight=mn_rr@1000 {{? it.core.spork.address}}sporkaddr={{=it.core.spork.address}}{{?}} {{? it.core.spork.privateKey}}sporkkey={{=it.core.spork.privateKey}}{{?}} {{? it.core.miner.mediantime}}mocktime={{=it.core.miner.mediantime}}{{?}} @@ -108,7 +108,7 @@ devnet={{=it.core.devnet.name}} powtargetspacing={{=it.core.devnet.powTargetSpacing}} minimumdifficultyblocks={{=it.core.devnet.minimumDifficultyBlocks}} highsubsidyblocks=500 -highsubsidyfactor=10 +highsubsidyfactor=100 llmqchainlocks={{=it.core.devnet.llmq.chainLocks}} llmqinstantsenddip0024={{=it.core.devnet.llmq.instantSend}} llmqplatform={{=it.core.devnet.llmq.platform}} diff --git a/packages/dashpay-contract/Cargo.toml b/packages/dashpay-contract/Cargo.toml index 186a17e7aa..cd693a6453 100644 --- a/packages/dashpay-contract/Cargo.toml +++ b/packages/dashpay-contract/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "dashpay-contract" description = "DashPay data contract schema and tools" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/dashpay-contract/package.json b/packages/dashpay-contract/package.json index 25321078af..6200358290 100644 --- a/packages/dashpay-contract/package.json +++ b/packages/dashpay-contract/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/dashpay-contract", - "version": "1.5.1", + "version": "1.7.0", "description": "Reference contract of the DashPay DPA on Dash Evolution", "scripts": { "lint": "eslint .", diff --git a/packages/data-contracts/Cargo.toml b/packages/data-contracts/Cargo.toml index 4de78dcec7..5f8e3a624e 100644 --- a/packages/data-contracts/Cargo.toml +++ b/packages/data-contracts/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "data-contracts" description = "Dash Platform system data contracts" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" @@ -16,3 +16,4 @@ dpns-contract = { path = "../dpns-contract" } dashpay-contract = { path = "../dashpay-contract" } feature-flags-contract = { path = "../feature-flags-contract" } platform-value = { path = "../rs-platform-value" } +wallet-utils-contract = { path = "../wallet-utils-contract" } diff --git a/packages/data-contracts/src/error.rs b/packages/data-contracts/src/error.rs index b9a5dd5d24..0550873b01 100644 --- a/packages/data-contracts/src/error.rs +++ b/packages/data-contracts/src/error.rs @@ -102,3 +102,20 @@ impl From for Error { } } } + +impl From for Error { + fn from(e: wallet_utils_contract::Error) -> Self { + match e { + wallet_utils_contract::Error::UnknownVersionMismatch { + method, + known_versions, + received, + } => Error::UnknownVersionMismatch { + method, + known_versions, + received, + }, + wallet_utils_contract::Error::InvalidSchemaJson(e) => Error::InvalidSchemaJson(e), + } + } +} diff --git a/packages/data-contracts/src/lib.rs b/packages/data-contracts/src/lib.rs index f6761d1eb8..65f324137f 100644 --- a/packages/data-contracts/src/lib.rs +++ b/packages/data-contracts/src/lib.rs @@ -9,6 +9,7 @@ pub use feature_flags_contract; pub use masternode_reward_shares_contract; use platform_value::Identifier; use platform_version::version::PlatformVersion; +pub use wallet_utils_contract; pub use withdrawals_contract; #[repr(u8)] @@ -19,6 +20,7 @@ pub enum SystemDataContract { FeatureFlags = 2, DPNS = 3, Dashpay = 4, + WalletUtils = 5, } pub struct DataContractSource { @@ -37,6 +39,7 @@ impl SystemDataContract { SystemDataContract::FeatureFlags => feature_flags_contract::ID_BYTES, SystemDataContract::DPNS => dpns_contract::ID_BYTES, SystemDataContract::Dashpay => dashpay_contract::ID_BYTES, + SystemDataContract::WalletUtils => wallet_utils_contract::ID_BYTES, }; Identifier::new(bytes) } @@ -82,6 +85,13 @@ impl SystemDataContract { definitions: dashpay_contract::load_definitions(platform_version)?, document_schemas: dashpay_contract::load_documents_schemas(platform_version)?, }, + SystemDataContract::WalletUtils => DataContractSource { + id_bytes: wallet_utils_contract::ID_BYTES, + owner_id_bytes: wallet_utils_contract::OWNER_ID_BYTES, + version: platform_version.system_data_contracts.wallet as u32, + definitions: wallet_utils_contract::load_definitions(platform_version)?, + document_schemas: wallet_utils_contract::load_documents_schemas(platform_version)?, + }, }; Ok(data) diff --git a/packages/dpns-contract/Cargo.toml b/packages/dpns-contract/Cargo.toml index e24782cdb2..144be65f29 100644 --- a/packages/dpns-contract/Cargo.toml +++ b/packages/dpns-contract/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "dpns-contract" description = "DPNS data contract schema and tools" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/dpns-contract/package.json b/packages/dpns-contract/package.json index d86f2c43c5..b2892b39c4 100644 --- a/packages/dpns-contract/package.json +++ b/packages/dpns-contract/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/dpns-contract", - "version": "1.5.1", + "version": "1.7.0", "description": "A contract and helper scripts for DPNS DApp", "scripts": { "lint": "eslint .", diff --git a/packages/feature-flags-contract/Cargo.toml b/packages/feature-flags-contract/Cargo.toml index 2a72038ced..9110fbbc65 100644 --- a/packages/feature-flags-contract/Cargo.toml +++ b/packages/feature-flags-contract/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "feature-flags-contract" description = "Feature flags data contract schema and tools" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/feature-flags-contract/package.json b/packages/feature-flags-contract/package.json index 3079219f55..f7bb0f3926 100644 --- a/packages/feature-flags-contract/package.json +++ b/packages/feature-flags-contract/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/feature-flags-contract", - "version": "1.5.1", + "version": "1.7.0", "description": "Data Contract to store Dash Platform feature flags", "scripts": { "build": "", diff --git a/packages/js-dapi-client/package.json b/packages/js-dapi-client/package.json index c9568844c9..a5a5ffd8cf 100644 --- a/packages/js-dapi-client/package.json +++ b/packages/js-dapi-client/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/dapi-client", - "version": "1.5.1", + "version": "1.7.0", "description": "Client library used to access Dash DAPI endpoints", "main": "lib/index.js", "contributors": [ diff --git a/packages/js-dash-sdk/package.json b/packages/js-dash-sdk/package.json index 7702c6759a..77c0634b39 100644 --- a/packages/js-dash-sdk/package.json +++ b/packages/js-dash-sdk/package.json @@ -1,6 +1,6 @@ { "name": "dash", - "version": "4.5.1", + "version": "4.7.0", "description": "Dash library for JavaScript/TypeScript ecosystem (Wallet, DAPI, Primitives, BLS, ...)", "main": "build/index.js", "unpkg": "dist/dash.min.js", diff --git a/packages/js-grpc-common/package.json b/packages/js-grpc-common/package.json index c620755f63..9119272974 100644 --- a/packages/js-grpc-common/package.json +++ b/packages/js-grpc-common/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/grpc-common", - "version": "1.5.1", + "version": "1.7.0", "description": "Common GRPC library", "main": "index.js", "scripts": { diff --git a/packages/masternode-reward-shares-contract/Cargo.toml b/packages/masternode-reward-shares-contract/Cargo.toml index f2c805e606..317e1d21d6 100644 --- a/packages/masternode-reward-shares-contract/Cargo.toml +++ b/packages/masternode-reward-shares-contract/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "masternode-reward-shares-contract" description = "Masternode reward shares data contract schema and tools" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/masternode-reward-shares-contract/package.json b/packages/masternode-reward-shares-contract/package.json index 85737a8ecb..cbf2736ca9 100644 --- a/packages/masternode-reward-shares-contract/package.json +++ b/packages/masternode-reward-shares-contract/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/masternode-reward-shares-contract", - "version": "1.5.1", + "version": "1.7.0", "description": "A contract and helper scripts for reward sharing", "scripts": { "lint": "eslint .", diff --git a/packages/platform-test-suite/package.json b/packages/platform-test-suite/package.json index 23f0ba377f..895af1ea58 100644 --- a/packages/platform-test-suite/package.json +++ b/packages/platform-test-suite/package.json @@ -1,7 +1,7 @@ { "name": "@dashevo/platform-test-suite", "private": true, - "version": "1.5.1", + "version": "1.7.0", "description": "Dash Network end-to-end tests", "scripts": { "test": "yarn exec bin/test.sh", diff --git a/packages/rs-dapi-client/Cargo.toml b/packages/rs-dapi-client/Cargo.toml index 847b9403f5..c6f32b443d 100644 --- a/packages/rs-dapi-client/Cargo.toml +++ b/packages/rs-dapi-client/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "rs-dapi-client" -version = "1.5.1" +version = "1.7.0" edition = "2021" [features] @@ -37,5 +37,6 @@ lru = { version = "0.12.3" } serde = { version = "1.0.197", optional = true, features = ["derive"] } serde_json = { version = "1.0.120", optional = true } chrono = { version = "0.4.38", features = ["serde"] } + [dev-dependencies] tokio = { version = "1.40", features = ["macros"] } diff --git a/packages/rs-dapi-client/src/address_list.rs b/packages/rs-dapi-client/src/address_list.rs index 0c21ecc0b1..2f59b22c3b 100644 --- a/packages/rs-dapi-client/src/address_list.rs +++ b/packages/rs-dapi-client/src/address_list.rs @@ -1,12 +1,14 @@ //! Subsystem to manage DAPI nodes. use chrono::Utc; -use dapi_grpc::tonic::codegen::http; use dapi_grpc::tonic::transport::Uri; use rand::{rngs::SmallRng, seq::IteratorRandom, SeedableRng}; -use std::collections::HashSet; +use std::collections::hash_map::Entry; +use std::collections::HashMap; use std::hash::{Hash, Hasher}; +use std::mem; use std::str::FromStr; +use std::sync::{Arc, RwLock}; use std::time::Duration; const DEFAULT_BASE_BAN_PERIOD: Duration = Duration::from_secs(60); @@ -14,54 +16,68 @@ const DEFAULT_BASE_BAN_PERIOD: Duration = Duration::from_secs(60); /// DAPI address. #[derive(Debug, Clone, Eq)] #[cfg_attr(feature = "mocks", derive(serde::Serialize, serde::Deserialize))] -pub struct Address { - ban_count: usize, - banned_until: Option>, - #[cfg_attr(feature = "mocks", serde(with = "http_serde::uri"))] - uri: Uri, -} +pub struct Address(#[cfg_attr(feature = "mocks", serde(with = "http_serde::uri"))] Uri); impl FromStr for Address { type Err = AddressListError; fn from_str(s: &str) -> Result { Uri::from_str(s) - .map(Address::from) - .map_err(AddressListError::from) + .map_err(|e| AddressListError::InvalidAddressUri(e.to_string())) + .map(Address::try_from)? } } impl PartialEq for Address { fn eq(&self, other: &Self) -> bool { - self.uri == other.uri + self.0 == other.0 } } impl PartialEq for Address { fn eq(&self, other: &Uri) -> bool { - self.uri == *other + self.0 == *other } } impl Hash for Address { fn hash(&self, state: &mut H) { - self.uri.hash(state); + self.0.hash(state); } } -impl From for Address { - fn from(uri: Uri) -> Self { - Address { - ban_count: 0, - banned_until: None, - uri, +impl TryFrom for Address { + type Error = AddressListError; + + fn try_from(value: Uri) -> Result { + if value.host().is_none() { + return Err(AddressListError::InvalidAddressUri( + "uri must contain host".to_string(), + )); } + + Ok(Address(value)) } } impl Address { + /// Get [Uri] of a node. + pub fn uri(&self) -> &Uri { + &self.0 + } +} + +/// Address status +/// Contains information about the number of bans and the time until the next ban is lifted. +#[derive(Debug, Default, Clone)] +pub struct AddressStatus { + ban_count: usize, + banned_until: Option>, +} + +impl AddressStatus { /// Ban the [Address] so it won't be available through [AddressList::get_live_address] for some time. - fn ban(&mut self, base_ban_period: &Duration) { + pub fn ban(&mut self, base_ban_period: &Duration) { let coefficient = (self.ban_count as f64).exp(); let ban_period = Duration::from_secs_f64(base_ban_period.as_secs_f64() * coefficient); @@ -75,35 +91,27 @@ impl Address { } /// Clears ban record. - fn unban(&mut self) { + pub fn unban(&mut self) { self.ban_count = 0; self.banned_until = None; } - - /// Get [Uri] of a node. - pub fn uri(&self) -> &Uri { - &self.uri - } } /// [AddressList] errors #[derive(Debug, thiserror::Error)] #[cfg_attr(feature = "mocks", derive(serde::Serialize, serde::Deserialize))] pub enum AddressListError { - /// Specified address is not present in the list - #[error("address {0} not found in the list")] - AddressNotFound(#[cfg_attr(feature = "mocks", serde(with = "http_serde::uri"))] Uri), /// A valid uri is required to create an Address #[error("unable parse address: {0}")] #[cfg_attr(feature = "mocks", serde(skip))] - InvalidAddressUri(#[from] http::uri::InvalidUri), + InvalidAddressUri(String), } /// A structure to manage DAPI addresses to select from /// for [DapiRequest](crate::DapiRequest) execution. #[derive(Debug, Clone)] pub struct AddressList { - addresses: HashSet
, + addresses: Arc>>, base_ban_period: Duration, } @@ -115,7 +123,7 @@ impl Default for AddressList { impl std::fmt::Display for Address { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.uri.fmt(f) + self.0.fmt(f) } } @@ -128,83 +136,103 @@ impl AddressList { /// Creates an empty [AddressList] with adjustable base ban time. pub fn with_settings(base_ban_period: Duration) -> Self { AddressList { - addresses: HashSet::new(), + addresses: Arc::new(RwLock::new(HashMap::new())), base_ban_period, } } /// Bans address - pub(crate) fn ban_address(&mut self, address: &Address) -> Result<(), AddressListError> { - if !self.addresses.remove(address) { - return Err(AddressListError::AddressNotFound(address.uri.clone())); - }; + /// Returns false if the address is not in the list. + pub fn ban(&self, address: &Address) -> bool { + let mut guard = self.addresses.write().unwrap(); - let mut banned_address = address.clone(); - banned_address.ban(&self.base_ban_period); + let Some(status) = guard.get_mut(address) else { + return false; + }; - self.addresses.insert(banned_address); + status.ban(&self.base_ban_period); - Ok(()) + true } /// Clears address' ban record - pub(crate) fn unban_address(&mut self, address: &Address) -> Result<(), AddressListError> { - if !self.addresses.remove(address) { - return Err(AddressListError::AddressNotFound(address.uri.clone())); + /// Returns false if the address is not in the list. + pub fn unban(&self, address: &Address) -> bool { + let mut guard = self.addresses.write().unwrap(); + + let Some(status) = guard.get_mut(address) else { + return false; }; - let mut unbanned_address = address.clone(); - unbanned_address.unban(); + status.unban(); + + true + } - self.addresses.insert(unbanned_address); + /// Check if the address is banned. + pub fn is_banned(&self, address: &Address) -> bool { + let guard = self.addresses.read().unwrap(); - Ok(()) + guard + .get(address) + .map(|status| status.is_banned()) + .unwrap_or(false) } /// Adds a node [Address] to [AddressList] /// Returns false if the address is already in the list. pub fn add(&mut self, address: Address) -> bool { - self.addresses.insert(address) + let mut guard = self.addresses.write().unwrap(); + + match guard.entry(address) { + Entry::Occupied(_) => false, + Entry::Vacant(e) => { + e.insert(AddressStatus::default()); + + true + } + } } - // TODO: this is the most simple way to add an address - // however we need to support bulk loading (e.g. providing a network name) - // and also fetch updated from SML. + /// Remove address from the list + /// Returns [AddressStatus] if the address was in the list. + pub fn remove(&mut self, address: &Address) -> Option { + let mut guard = self.addresses.write().unwrap(); + + guard.remove(address) + } + + #[deprecated] + // TODO: Remove in favor of add /// Add a node [Address] to [AddressList] by [Uri]. /// Returns false if the address is already in the list. pub fn add_uri(&mut self, uri: Uri) -> bool { - self.addresses.insert(uri.into()) + self.add(Address::try_from(uri).expect("valid uri")) } /// Randomly select a not banned address. - pub fn get_live_address(&self) -> Option<&Address> { - let mut rng = SmallRng::from_entropy(); + pub fn get_live_address(&self) -> Option
{ + let guard = self.addresses.read().unwrap(); - self.unbanned().into_iter().choose(&mut rng) - } + let mut rng = SmallRng::from_entropy(); - /// Get all addresses that are not banned. - fn unbanned(&self) -> Vec<&Address> { let now = chrono::Utc::now(); - self.addresses + guard .iter() - .filter(|addr| { - addr.banned_until + .filter(|(_, status)| { + status + .banned_until .map(|banned_until| banned_until < now) .unwrap_or(true) }) - .collect() - } - - /// Get number of available, not banned addresses. - pub fn available(&self) -> usize { - self.unbanned().len() + .choose(&mut rng) + .map(|(addr, _)| addr.clone()) } /// Get number of all addresses, both banned and not banned. pub fn len(&self) -> usize { - self.addresses.len() + self.addresses.read().unwrap().len() } /// Check if the list is empty. @@ -212,38 +240,43 @@ impl AddressList { /// Returns false if there is at least one address in the list. /// Banned addresses are also counted. pub fn is_empty(&self) -> bool { - self.addresses.is_empty() + self.addresses.read().unwrap().is_empty() } } -// TODO: Must be changed to FromStr -impl From<&str> for AddressList { - fn from(value: &str) -> Self { - let uri_list: Vec = value +impl IntoIterator for AddressList { + type Item = (Address, AddressStatus); + type IntoIter = std::collections::hash_map::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + let mut guard = self.addresses.write().unwrap(); + + let addresses_map = mem::take(&mut *guard); + + addresses_map.into_iter() + } +} + +impl FromStr for AddressList { + type Err = AddressListError; + + fn from_str(s: &str) -> Result { + let uri_list: Vec
= s .split(',') - .map(|uri| Uri::from_str(uri).expect("invalid uri")) - .collect(); + .map(Address::from_str) + .collect::>()?; - Self::from_iter(uri_list) + Ok(Self::from_iter(uri_list)) } } -impl FromIterator for AddressList { - fn from_iter>(iter: T) -> Self { +impl FromIterator
for AddressList { + fn from_iter>(iter: T) -> Self { let mut address_list = Self::new(); for uri in iter { - address_list.add_uri(uri); + address_list.add(uri); } address_list } } - -impl IntoIterator for AddressList { - type Item = Address; - type IntoIter = std::collections::hash_set::IntoIter
; - - fn into_iter(self) -> Self::IntoIter { - self.addresses.into_iter() - } -} diff --git a/packages/rs-dapi-client/src/dapi_client.rs b/packages/rs-dapi-client/src/dapi_client.rs index 579c62e015..ebca641f40 100644 --- a/packages/rs-dapi-client/src/dapi_client.rs +++ b/packages/rs-dapi-client/src/dapi_client.rs @@ -3,14 +3,15 @@ use backon::{ConstantBuilder, Retryable}; use dapi_grpc::mock::Mockable; use dapi_grpc::tonic::async_trait; -use std::fmt::Debug; +use std::fmt::{Debug, Display}; use std::sync::atomic::AtomicUsize; -use std::sync::{Arc, RwLock}; +use std::sync::Arc; use std::time::Duration; use tracing::Instrument; use crate::address_list::AddressListError; use crate::connection_pool::ConnectionPool; +use crate::request_settings::AppliedRequestSettings; use crate::transport::TransportError; use crate::{ transport::{TransportClient, TransportRequest}, @@ -72,7 +73,7 @@ impl Mockable for DapiClientError { /// Access point to DAPI. #[derive(Debug, Clone)] pub struct DapiClient { - address_list: Arc>, + address_list: AddressList, settings: RequestSettings, pool: ConnectionPool, #[cfg(feature = "dump")] @@ -86,7 +87,7 @@ impl DapiClient { let address_count = 3 * address_list.len(); Self { - address_list: Arc::new(RwLock::new(address_list)), + address_list, settings, pool: ConnectionPool::new(address_count), #[cfg(feature = "dump")] @@ -95,11 +96,74 @@ impl DapiClient { } /// Return the [DapiClient] address list. - pub fn address_list(&self) -> &Arc> { + pub fn address_list(&self) -> &AddressList { &self.address_list } } +/// Ban address in case of retryable error or unban it +/// if it was banned, and the request was successful. +pub fn update_address_ban_status( + address_list: &AddressList, + result: &ExecutionResult, + applied_settings: &AppliedRequestSettings, +) where + E: CanRetry + Display + Debug, +{ + match &result { + Ok(response) => { + // Unban the address if it was banned and node responded successfully this time + if address_list.is_banned(&response.address) { + if address_list.unban(&response.address) { + tracing::debug!(address = ?response.address, "unban successfully responded address {}", response.address); + } else { + // The address might be already removed from the list + // by background process (i.e., SML update), and it's fine. + tracing::debug!( + address = ?response.address, + "unable to unban address {} because it's not in the list anymore", + response.address + ); + } + } + } + Err(error) => { + if error.can_retry() { + if let Some(address) = error.address.as_ref() { + if applied_settings.ban_failed_address { + if address_list.ban(address) { + tracing::warn!( + ?address, + ?error, + "ban address {address} due to error: {error}" + ); + } else { + // The address might be already removed from the list + // by background process (i.e., SML update), and it's fine. + tracing::debug!( + ?address, + ?error, + "unable to ban address {address} because it's not in the list anymore" + ); + } + } else { + tracing::debug!( + ?error, + ?address, + "we should ban the address {address} due to the error but banning is disabled" + ); + } + } else { + tracing::debug!( + ?error, + "we should ban an address due to the error but address is absent" + ); + } + } + } + }; +} + #[async_trait] impl DapiRequestExecutor for DapiClient { /// Execute the [DapiRequest](crate::DapiRequest). @@ -140,18 +204,11 @@ impl DapiRequestExecutor for DapiClient { let retries_counter = Arc::clone(retries_counter_arc_ref); // Try to get an address to initialize transport on: - let address_list = self + let address_result = self .address_list - .read() - .expect("can't get address list for read"); - - let address_result = address_list .get_live_address() - .cloned() .ok_or(DapiClientError::NoAvailableAddresses); - drop(address_list); - let _span = tracing::trace_span!( "execute request", address = ?address_result, @@ -177,7 +234,7 @@ impl DapiRequestExecutor for DapiClient { // `impl Future`, not a `Result` itself. let address = address_result.map_err(|inner| ExecutionError { inner, - retries: retries_counter.load(std::sync::atomic::Ordering::Acquire), + retries: retries_counter.load(std::sync::atomic::Ordering::Relaxed), address: None, })?; @@ -190,72 +247,44 @@ impl DapiRequestExecutor for DapiClient { ) .map_err(|error| ExecutionError { inner: DapiClientError::Transport(error), - retries: retries_counter.load(std::sync::atomic::Ordering::Acquire), + retries: retries_counter.load(std::sync::atomic::Ordering::Relaxed), address: Some(address.clone()), })?; - let response = transport_request + let result = transport_request .execute_transport(&mut transport_client, &applied_settings) .await .map_err(DapiClientError::Transport); - match &response { - Ok(_) => { - // Unban the address if it was banned and node responded successfully this time - if address.is_banned() { - let mut address_list = self - .address_list - .write() - .expect("can't get address list for write"); - - address_list.unban_address(&address).map_err(|error| { - ExecutionError { - inner: DapiClientError::AddressList(error), - retries: retries_counter - .load(std::sync::atomic::Ordering::Acquire), - address: Some(address.clone()), - } - })?; + let retries = retries_counter.load(std::sync::atomic::Ordering::Relaxed); + + let execution_result = result + .map(|inner| { + tracing::trace!(response = ?inner, "received {} response", response_name); + + ExecutionResponse { + inner, + retries, + address: address.clone(), } + }) + .map_err(|inner| { + tracing::debug!(error = ?inner, "received error: {inner}"); - tracing::trace!(?response, "received {} response", response_name); - } - Err(error) => { - if error.can_retry() { - if applied_settings.ban_failed_address { - let mut address_list = self - .address_list - .write() - .expect("can't get address list for write"); - - address_list.ban_address(&address).map_err(|error| { - ExecutionError { - inner: DapiClientError::AddressList(error), - retries: retries_counter - .load(std::sync::atomic::Ordering::Acquire), - address: Some(address.clone()), - } - })?; - } - } else { - tracing::trace!(?error, "received error"); + ExecutionError { + inner, + retries, + address: Some(address.clone()), } - } - }; + }); - let retries = retries_counter.load(std::sync::atomic::Ordering::Acquire); + update_address_ban_status::( + &self.address_list, + &execution_result, + &applied_settings, + ); - response - .map(|inner| ExecutionResponse { - inner, - retries, - address: address.clone(), - }) - .map_err(|inner| ExecutionError { - inner, - retries, - address: Some(address), - }) + execution_result } }; @@ -265,7 +294,7 @@ impl DapiRequestExecutor for DapiClient { .retry(retry_settings) .notify(|error, duration| { let retries_counter = Arc::clone(&retries_counter_arc); - retries_counter.fetch_add(1, std::sync::atomic::Ordering::AcqRel); + retries_counter.fetch_add(1, std::sync::atomic::Ordering::Relaxed); tracing::warn!( ?error, diff --git a/packages/rs-dapi-client/src/executor.rs b/packages/rs-dapi-client/src/executor.rs index e1b5dca2f9..0afb8f5705 100644 --- a/packages/rs-dapi-client/src/executor.rs +++ b/packages/rs-dapi-client/src/executor.rs @@ -124,6 +124,18 @@ where /// Result of request execution pub type ExecutionResult = Result, ExecutionError>; +impl From> for ExecutionResult { + fn from(response: ExecutionResponse) -> Self { + ExecutionResult::::Ok(response) + } +} + +impl From> for ExecutionResult { + fn from(e: ExecutionError) -> Self { + ExecutionResult::::Err(e) + } +} + impl IntoInner> for ExecutionResult { fn into_inner(self) -> Result { match self { @@ -145,3 +157,64 @@ where } } } + +/// Convert Result to ExecutionResult, taking context from ExecutionResponse. +pub trait WrapToExecutionResult: Sized { + /// Convert self (eg. some [Result]) to [ExecutionResult], taking context information from `W` (eg. ExecutionResponse). + /// + /// This function simplifies processing of results by wrapping them into ExecutionResult. + /// It is useful when you have execution result retrieved in previous step and you want to + /// add it to the result of the current step. + /// + /// Useful when chaining multiple commands and you want to keep track of retries and address. + /// + /// ## Example + /// + /// ```rust + /// use rs_dapi_client::{ExecutionResponse, ExecutionResult, WrapToExecutionResult}; + /// + /// fn some_request() -> ExecutionResult { + /// Ok(ExecutionResponse { + /// inner: 42, + /// retries: 123, + /// address: "http://127.0.0.1".parse().expect("create mock address"), + /// }) + /// } + /// + /// fn next_step() -> Result { + /// Err("next error".to_string()) + /// } + /// + /// let response = some_request().expect("request should succeed"); + /// let result: ExecutionResult = next_step().wrap_to_execution_result(&response); + /// + /// if let ExecutionResult::Err(error) = result { + /// assert_eq!(error.inner, "next error"); + /// assert_eq!(error.retries, 123); + /// } else { + /// panic!("Expected error"); + /// } + /// ``` + fn wrap_to_execution_result(self, result: &W) -> ExecutionResult; +} + +impl WrapToExecutionResult> for Result +where + R: From, + RE: From, +{ + fn wrap_to_execution_result(self, result: &ExecutionResponse) -> ExecutionResult { + match self { + Ok(r) => ExecutionResult::Ok(ExecutionResponse { + inner: r.into(), + retries: result.retries, + address: result.address.clone(), + }), + Err(e) => ExecutionResult::Err(ExecutionError { + inner: e.into(), + retries: result.retries, + address: Some(result.address.clone()), + }), + } + } +} diff --git a/packages/rs-dapi-client/src/lib.rs b/packages/rs-dapi-client/src/lib.rs index 2ce4a9da43..e820a714a0 100644 --- a/packages/rs-dapi-client/src/lib.rs +++ b/packages/rs-dapi-client/src/lib.rs @@ -16,12 +16,14 @@ pub mod transport; pub use address_list::Address; pub use address_list::AddressList; pub use address_list::AddressListError; +pub use address_list::AddressStatus; pub use connection_pool::ConnectionPool; -pub use dapi_client::{DapiClient, DapiClientError}; +pub use dapi_client::{update_address_ban_status, DapiClient, DapiClientError}; #[cfg(feature = "dump")] pub use dump::DumpData; pub use executor::{ DapiRequestExecutor, ExecutionError, ExecutionResponse, ExecutionResult, InnerInto, IntoInner, + WrapToExecutionResult, }; use futures::{future::BoxFuture, FutureExt}; pub use request_settings::RequestSettings; diff --git a/packages/rs-dapi-client/src/request_settings.rs b/packages/rs-dapi-client/src/request_settings.rs index 21a1f69b38..9ad08e8861 100644 --- a/packages/rs-dapi-client/src/request_settings.rs +++ b/packages/rs-dapi-client/src/request_settings.rs @@ -19,7 +19,10 @@ const DEFAULT_BAN_FAILED_ADDRESS: bool = true; pub struct RequestSettings { /// Timeout for establishing a connection. pub connect_timeout: Option, - /// Timeout for a request. + /// Timeout for single request (soft limit). + /// + /// Note that the total maximum time of execution can exceed `(timeout + connect_timeout) * retries` + /// as it accounts for internal processing time between retries. pub timeout: Option, /// Number of retries in case of failed requests. If max retries reached, the last error is returned. /// 1 means one request and one retry in case of error, etc. diff --git a/packages/rs-dapi-client/src/transport/grpc.rs b/packages/rs-dapi-client/src/transport/grpc.rs index fb1f08c842..62a7590406 100644 --- a/packages/rs-dapi-client/src/transport/grpc.rs +++ b/packages/rs-dapi-client/src/transport/grpc.rs @@ -44,8 +44,8 @@ impl TransportClient for PlatformGrpcClient { .get_or_create(PoolPrefix::Platform, &uri, None, || { match create_channel(uri.clone(), None) { Ok(channel) => Ok(Self::new(channel).into()), - Err(e) => Err(dapi_grpc::tonic::Status::failed_precondition(format!( - "Channel creation failed: {}", + Err(e) => Err(dapi_grpc::tonic::Status::invalid_argument(format!( + "channel creation failed: {}", e ))), } @@ -65,7 +65,7 @@ impl TransportClient for PlatformGrpcClient { Some(settings), || match create_channel(uri.clone(), Some(settings)) { Ok(channel) => Ok(Self::new(channel).into()), - Err(e) => Err(dapi_grpc::tonic::Status::failed_precondition(format!( + Err(e) => Err(dapi_grpc::tonic::Status::invalid_argument(format!( "Channel creation failed: {}", e ))), @@ -81,7 +81,7 @@ impl TransportClient for CoreGrpcClient { .get_or_create(PoolPrefix::Core, &uri, None, || { match create_channel(uri.clone(), None) { Ok(channel) => Ok(Self::new(channel).into()), - Err(e) => Err(dapi_grpc::tonic::Status::failed_precondition(format!( + Err(e) => Err(dapi_grpc::tonic::Status::invalid_argument(format!( "Channel creation failed: {}", e ))), @@ -102,7 +102,7 @@ impl TransportClient for CoreGrpcClient { Some(settings), || match create_channel(uri.clone(), Some(settings)) { Ok(channel) => Ok(Self::new(channel).into()), - Err(e) => Err(dapi_grpc::tonic::Status::failed_precondition(format!( + Err(e) => Err(dapi_grpc::tonic::Status::invalid_argument(format!( "Channel creation failed: {}", e ))), @@ -132,8 +132,38 @@ impl CanRetry for dapi_grpc::tonic::Status { } } -/// A shortcut to link between gRPC request type, response type, client and its -/// method in order to represent it in a form of types and data. +/// Macro to implement the `TransportRequest` trait for a given request type, response type, client type, and settings. +/// +/// # Parameters +/// +/// - `$request:ty`: The request type for which the `TransportRequest` trait will be implemented. +/// - `$response:ty`: The response type returned by the transport request. +/// - `$client:ty`: The client type used to execute the transport request (eg. generated by `tonic` crate). +/// - `$settings:expr`: The settings to be used for the transport request; these settings will override client's +/// default settings, but can still be overriden by arguments to +/// the [`DapiRequestExecutor::execute`](crate::DapiRequestExecutor::execute) method. +/// - `$($method:tt)+`: The method of `$client` to be called to execute the request. +/// +/// # Example +/// +/// ```compile_fail +/// impl_transport_request_grpc!( +/// MyRequestType, +/// MyResponseType, +/// MyClientType, +/// my_settings, +/// my_method +/// ); +/// ``` +/// +/// This will generate an implementation of the `TransportRequest` trait for `MyRequestType` +/// that uses `MyClientType` to execute the `my_method` method, with the specified `my_settings`. +/// +/// The generated implementation will: +/// - Define the associated types `Client` and `Response`. +/// - Set the `SETTINGS_OVERRIDES` constant to the provided settings. +/// - Implement the `method_name` function to return the name of the method as a string. +/// - Implement the `execute_transport` function to execute the transport request using the provided client and settings. macro_rules! impl_transport_request_grpc { ($request:ty, $response:ty, $client:ty, $settings:expr, $($method:tt)+) => { impl TransportRequest for $request { diff --git a/packages/rs-dapi-grpc-macros/Cargo.toml b/packages/rs-dapi-grpc-macros/Cargo.toml index b67219c562..90f280a6ed 100644 --- a/packages/rs-dapi-grpc-macros/Cargo.toml +++ b/packages/rs-dapi-grpc-macros/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "dapi-grpc-macros" -version = "1.5.1" +version = "1.7.0" edition = "2021" description = "Macros used by dapi-grpc. Internal use only." diff --git a/packages/rs-dpp/Cargo.toml b/packages/rs-dpp/Cargo.toml index e9c0eed2d8..f4eb5e5710 100644 --- a/packages/rs-dpp/Cargo.toml +++ b/packages/rs-dpp/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dpp" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true authors = [ @@ -29,7 +29,7 @@ dashcore = { git = "https://github.com/dashpay/rust-dashcore", features = [ "signer", "serde", "bls", - "eddsa" + "eddsa", ], default-features = false, tag = "0.32.0" } env_logger = { version = "0.11" } getrandom = { version = "0.2", features = ["js"] } @@ -56,7 +56,7 @@ platform-version = { path = "../rs-platform-version" } platform-versioning = { path = "../rs-platform-versioning" } platform-serialization = { path = "../rs-platform-serialization" } platform-serialization-derive = { path = "../rs-platform-serialization-derive" } -derive_more = { version = "1.0", features = ["from", "display"] } +derive_more = { version = "1.0", features = ["from", "display", "try_into"] } nohash-hasher = "0.2.0" rust_decimal = "1.29.1" rust_decimal_macros = "1.29.1" diff --git a/packages/rs-dpp/src/lib.rs b/packages/rs-dpp/src/lib.rs index a5d4ec177e..168a9ad284 100644 --- a/packages/rs-dpp/src/lib.rs +++ b/packages/rs-dpp/src/lib.rs @@ -1,5 +1,5 @@ #![cfg_attr(docsrs, feature(doc_cfg))] -// Coding conventions +// Coding conventions . #![forbid(unsafe_code)] //#![deny(missing_docs)] #![allow(dead_code)] diff --git a/packages/rs-dpp/src/state_transition/proof_result.rs b/packages/rs-dpp/src/state_transition/proof_result.rs index ebff592c8c..012326ac30 100644 --- a/packages/rs-dpp/src/state_transition/proof_result.rs +++ b/packages/rs-dpp/src/state_transition/proof_result.rs @@ -5,7 +5,7 @@ use crate::voting::votes::Vote; use platform_value::Identifier; use std::collections::BTreeMap; -#[derive(Debug)] +#[derive(Debug, strum::Display, derive_more::TryInto)] pub enum StateTransitionProofResult { VerifiedDataContract(DataContract), VerifiedIdentity(Identity), diff --git a/packages/rs-dpp/src/state_transition/state_transitions/identity/identity_credit_transfer_transition/v0/v0_methods.rs b/packages/rs-dpp/src/state_transition/state_transitions/identity/identity_credit_transfer_transition/v0/v0_methods.rs index e04e061f67..cf376b9b83 100644 --- a/packages/rs-dpp/src/state_transition/state_transitions/identity/identity_credit_transfer_transition/v0/v0_methods.rs +++ b/packages/rs-dpp/src/state_transition/state_transitions/identity/identity_credit_transfer_transition/v0/v0_methods.rs @@ -52,21 +52,18 @@ impl IdentityCreditTransferTransitionMethodsV0 for IdentityCreditTransferTransit ); } } - None => { - let key = identity - .get_first_public_key_matching( - Purpose::TRANSFER, - SecurityLevel::full_range().into(), - KeyType::all_key_types().into(), - true, + None => identity + .get_first_public_key_matching( + Purpose::TRANSFER, + SecurityLevel::full_range().into(), + KeyType::all_key_types().into(), + true, + ) + .ok_or_else(|| { + ProtocolError::DesiredKeyWithTypePurposeSecurityLevelMissing( + "no transfer public key".to_string(), ) - .ok_or_else(|| { - ProtocolError::DesiredKeyWithTypePurposeSecurityLevelMissing( - "no transfer public key".to_string(), - ) - })?; - key - } + })?, }; transition.sign_external( diff --git a/packages/rs-drive-abci/Cargo.toml b/packages/rs-drive-abci/Cargo.toml index c8462dc0de..92d0090a28 100644 --- a/packages/rs-drive-abci/Cargo.toml +++ b/packages/rs-drive-abci/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "drive-abci" -version = "1.5.1" +version = "1.7.0" authors = [ "Samuel Westrich ", "Ivan Shumkov ", @@ -58,8 +58,8 @@ file-rotate = { version = "0.7.3" } reopen = { version = "1.0.3" } delegate = { version = "0.13" } regex = { version = "1.8.1" } -metrics = { version = "0.23" } -metrics-exporter-prometheus = { version = "0.15", default-features = false, features = [ +metrics = { version = "0.24" } +metrics-exporter-prometheus = { version = "0.16", default-features = false, features = [ "http-listener", ] } url = { version = "2.3.1" } diff --git a/packages/rs-drive-abci/src/execution/platform_events/protocol_upgrade/perform_events_on_first_block_of_protocol_change/mod.rs b/packages/rs-drive-abci/src/execution/platform_events/protocol_upgrade/perform_events_on_first_block_of_protocol_change/mod.rs index 56c1e17c6a..7a101b4e34 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/protocol_upgrade/perform_events_on_first_block_of_protocol_change/mod.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/protocol_upgrade/perform_events_on_first_block_of_protocol_change/mod.rs @@ -61,7 +61,7 @@ impl Platform { previous_protocol_version, platform_version, ), - None => return Ok(()), + None => Ok(()), Some(version) => Err(Error::Execution(ExecutionError::UnknownVersionMismatch { method: "perform_events_on_first_block_of_protocol_change".to_string(), known_versions: vec![0], diff --git a/packages/rs-drive-abci/src/execution/platform_events/protocol_upgrade/perform_events_on_first_block_of_protocol_change/v0/mod.rs b/packages/rs-drive-abci/src/execution/platform_events/protocol_upgrade/perform_events_on_first_block_of_protocol_change/v0/mod.rs index f5cf202665..d36250ea3e 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/protocol_upgrade/perform_events_on_first_block_of_protocol_change/v0/mod.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/protocol_upgrade/perform_events_on_first_block_of_protocol_change/v0/mod.rs @@ -4,6 +4,8 @@ use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::platform_types::platform_state::PlatformState; use dpp::block::block_info::BlockInfo; use dpp::dashcore::hashes::Hash; +use dpp::data_contracts::SystemDataContract; +use dpp::system_data_contracts::load_system_data_contract; use dpp::version::PlatformVersion; use dpp::version::ProtocolVersion; use drive::drive::identity::key::fetch::{ @@ -52,6 +54,35 @@ impl Platform { )?; } + if previous_protocol_version < 6 && platform_version.protocol_version >= 6 { + self.transition_to_version_6(block_info, transaction, platform_version)?; + } + + Ok(()) + } + + /// Initializes the wallet contract that supports mobile wallets with additional + /// functionality + /// + /// This function is called during the transition from protocol version 5 to protocol version 6 + /// and higher to set up the wallet contract in the platform. + fn transition_to_version_6( + &self, + block_info: &BlockInfo, + transaction: &Transaction, + platform_version: &PlatformVersion, + ) -> Result<(), Error> { + let contract = + load_system_data_contract(SystemDataContract::WalletUtils, platform_version)?; + + self.drive.insert_contract( + &contract, + *block_info, + true, + Some(transaction), + platform_version, + )?; + Ok(()) } diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/check_tx_verification/v0/mod.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/check_tx_verification/v0/mod.rs index acd1515749..c27387ebaa 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/check_tx_verification/v0/mod.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/check_tx_verification/v0/mod.rs @@ -15,7 +15,7 @@ use crate::error::execution::ExecutionError; use crate::execution::check_tx::CheckTxLevel; use crate::execution::types::state_transition_execution_context::StateTransitionExecutionContext; use crate::execution::validation::state_transition::common::asset_lock::proof::verify_is_not_spent::AssetLockProofVerifyIsNotSpent; -use crate::execution::validation::state_transition::processor::v0::{StateTransitionIdentityBalanceValidationV0, StateTransitionBasicStructureValidationV0, StateTransitionNonceValidationV0, StateTransitionIdentityBasedSignatureValidationV0, StateTransitionStructureKnownInStateValidationV0, StateTransitionIsAllowedValidationV0}; +use crate::execution::validation::state_transition::processor::v0::{StateTransitionIdentityBalanceValidationV0, StateTransitionBasicStructureValidationV0, StateTransitionNonceValidationV0, StateTransitionIdentityBasedSignatureValidationV0, StateTransitionStructureKnownInStateValidationV0, StateTransitionIsAllowedValidationV0, StateTransitionHasNonceValidationV0}; use crate::execution::validation::state_transition::ValidationMode; pub(super) fn state_transition_to_execution_event_for_check_tx_v0<'a, C: CoreRPCLike>( @@ -45,7 +45,7 @@ pub(super) fn state_transition_to_execution_event_for_check_tx_v0<'a, C: CoreRPC } // Only identity top up and identity create do not have nonces validation - if state_transition.has_nonces_validation() { + if state_transition.has_nonce_validation(platform_version)? { let result = state_transition.validate_nonces( &platform.into(), platform.state.last_block_info(), @@ -240,7 +240,7 @@ pub(super) fn state_transition_to_execution_event_for_check_tx_v0<'a, C: CoreRPC ) } } else { - if state_transition.has_nonces_validation() { + if state_transition.has_nonce_validation(platform_version)? { let result = state_transition.validate_nonces( &platform.into(), platform.state.last_block_info(), diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/processor/v0/mod.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/processor/v0/mod.rs index f1210e3c0f..c3f10fa275 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/processor/v0/mod.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/processor/v0/mod.rs @@ -85,7 +85,7 @@ pub(super) fn process_state_transition_v0<'a, C: CoreRPCLike>( }; // Only identity top up and identity create do not have nonces validation - if state_transition.has_nonces_validation() { + if state_transition.has_nonce_validation(platform_version)? { // Validating identity contract nonce, this must happen after validating the signature let result = state_transition.validate_nonces( &platform.into(), @@ -383,12 +383,11 @@ pub(crate) trait StateTransitionNonceValidationV0 { execution_context: &mut StateTransitionExecutionContext, platform_version: &PlatformVersion, ) -> Result; +} - /// True if the state transition validates nonces, either identity nonces or identity contract - /// nonces - fn has_nonces_validation(&self) -> bool { - true - } +pub(crate) trait StateTransitionHasNonceValidationV0 { + /// True if the state transition has nonces validation. + fn has_nonce_validation(&self, platform_version: &PlatformVersion) -> Result; } /// A trait for validating state transitions within a blockchain. @@ -607,17 +606,50 @@ impl StateTransitionNonceValidationV0 for StateTransition { _ => Ok(SimpleConsensusValidationResult::new()), } } +} - fn has_nonces_validation(&self) -> bool { - matches!( - self, - StateTransition::DocumentsBatch(_) - | StateTransition::DataContractCreate(_) - | StateTransition::DataContractUpdate(_) - | StateTransition::IdentityUpdate(_) - | StateTransition::IdentityCreditTransfer(_) - | StateTransition::IdentityCreditWithdrawal(_) - ) +impl StateTransitionHasNonceValidationV0 for StateTransition { + fn has_nonce_validation(&self, platform_version: &PlatformVersion) -> Result { + match platform_version + .drive_abci + .validation_and_processing + .has_nonce_validation + { + 0 => { + let has_nonce_validation = matches!( + self, + StateTransition::DocumentsBatch(_) + | StateTransition::DataContractCreate(_) + | StateTransition::DataContractUpdate(_) + | StateTransition::IdentityUpdate(_) + | StateTransition::IdentityCreditTransfer(_) + | StateTransition::IdentityCreditWithdrawal(_) + ); + + Ok(has_nonce_validation) + } + 1 => { + // Preferably to use match without wildcard arm (_) to avoid missing cases + // in the future when new state transitions are added + let has_nonce_validation = match self { + StateTransition::DocumentsBatch(_) + | StateTransition::DataContractCreate(_) + | StateTransition::DataContractUpdate(_) + | StateTransition::IdentityUpdate(_) + | StateTransition::IdentityCreditTransfer(_) + | StateTransition::IdentityCreditWithdrawal(_) + | StateTransition::MasternodeVote(_) => true, + StateTransition::IdentityCreate(_) | StateTransition::IdentityTopUp(_) => false, + }; + + Ok(has_nonce_validation) + } + version => Err(Error::Execution(ExecutionError::UnknownVersionMismatch { + method: "StateTransition::has_nonce_validation".to_string(), + known_versions: vec![0, 1], + received: version, + })), + } } } diff --git a/packages/rs-drive-abci/src/lib.rs b/packages/rs-drive-abci/src/lib.rs index 5e475d189a..f9a51dcd1b 100644 --- a/packages/rs-drive-abci/src/lib.rs +++ b/packages/rs-drive-abci/src/lib.rs @@ -30,8 +30,10 @@ pub mod rpc; /// Core utilities pub mod core; + /// Metrics subsystem pub mod metrics; + /// Test helpers and fixtures #[cfg(any(feature = "mocks", test))] pub mod test; diff --git a/packages/rs-drive-abci/src/metrics.rs b/packages/rs-drive-abci/src/metrics.rs index 943f0d5aae..ec77b07467 100644 --- a/packages/rs-drive-abci/src/metrics.rs +++ b/packages/rs-drive-abci/src/metrics.rs @@ -11,7 +11,7 @@ use metrics_exporter_prometheus::PrometheusBuilder; /// Default Prometheus port (29090) pub const DEFAULT_PROMETHEUS_PORT: u16 = 29090; - +/// Last block time in seconds const COUNTER_LAST_BLOCK_TIME: &str = "abci_last_block_time_seconds"; const COUNTER_LAST_HEIGHT: &str = "abci_last_finalized_height"; const HISTOGRAM_FINALIZED_ROUND: &str = "abci_finalized_round"; diff --git a/packages/rs-drive-abci/tests/strategy_tests/main.rs b/packages/rs-drive-abci/tests/strategy_tests/main.rs index 2312241cc6..03bb92bc1a 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/main.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/main.rs @@ -2602,7 +2602,10 @@ mod tests { &simple_signer, &mut rng, platform_version, - ); + ) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) + .collect(); let strategy = NetworkStrategy { strategy: Strategy { @@ -3910,7 +3913,7 @@ mod tests { strategy: Strategy { start_contracts: vec![], operations: vec![Operation { - op_type: OperationType::IdentityTransfer, + op_type: OperationType::IdentityTransfer(None), frequency: Frequency { times_per_block_range: 1..3, chance_per_block: None, diff --git a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs index 667b846868..bf3235ea78 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs @@ -41,7 +41,7 @@ use drive_abci::rpc::core::MockCoreRPCLike; use rand::prelude::{IteratorRandom, SliceRandom, StdRng}; use rand::Rng; use strategy_tests::Strategy; -use strategy_tests::transitions::{create_state_transitions_for_identities, create_state_transitions_for_identities_and_proofs, instant_asset_lock_proof_fixture, instant_asset_lock_proof_fixture_with_dynamic_range}; +use strategy_tests::transitions::{create_state_transitions_for_identities, create_state_transitions_for_identities_and_proofs, instant_asset_lock_proof_fixture_with_dynamic_range}; use std::borrow::Cow; use std::collections::{BTreeMap, HashMap, HashSet}; use std::ops::RangeInclusive; @@ -404,8 +404,18 @@ impl NetworkStrategy { ); state_transitions.append(&mut new_transitions); } + // Extend the state transitions with the strategy's hard coded start identities + // Filtering out the ones that have no create transition if !self.strategy.start_identities.hard_coded.is_empty() { - state_transitions.extend(self.strategy.start_identities.hard_coded.clone()); + state_transitions.extend( + self.strategy.start_identities.hard_coded.iter().filter_map( + |(identity, transition)| { + transition.as_ref().map(|create_transition| { + (identity.clone(), create_transition.clone()) + }) + }, + ), + ); } } let frequency = &self.strategy.identity_inserts.frequency; @@ -1196,7 +1206,7 @@ impl NetworkStrategy { operations.push(state_transition); } } - OperationType::IdentityTransfer if current_identities.len() > 1 => { + OperationType::IdentityTransfer(_) if current_identities.len() > 1 => { let identities_clone = current_identities.clone(); // Sender is the first in the list, which should be loaded_identity diff --git a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs index e14f8d7b1b..83834520c0 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs @@ -79,13 +79,17 @@ mod tests { simple_signer.add_keys(keys1); - let start_identities = create_state_transitions_for_identities( - vec![identity1], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) + .collect(); let dpns_contract = platform .drive @@ -363,13 +367,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) + .collect(); let dpns_contract = platform .drive @@ -635,13 +643,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) + .collect(); let dpns_contract = platform .drive @@ -988,13 +1000,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) + .collect(); let dpns_contract = platform .drive @@ -1353,13 +1369,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) + .collect(); let dpns_contract = platform .drive diff --git a/packages/rs-drive-proof-verifier/Cargo.toml b/packages/rs-drive-proof-verifier/Cargo.toml index 8d783510c5..e19a9ced1b 100644 --- a/packages/rs-drive-proof-verifier/Cargo.toml +++ b/packages/rs-drive-proof-verifier/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "drive-proof-verifier" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true diff --git a/packages/rs-drive-proof-verifier/src/error.rs b/packages/rs-drive-proof-verifier/src/error.rs index 3203eb7317..3fb5825a8c 100644 --- a/packages/rs-drive-proof-verifier/src/error.rs +++ b/packages/rs-drive-proof-verifier/src/error.rs @@ -15,6 +15,15 @@ pub enum Error { #[error("dash drive: {error}")] DriveError { error: String }, + /// GroveDB error, often for issues with proofs + #[error("grovedb: {error}")] + GroveDBError { + proof_bytes: Vec, + height: u64, + time_ms: u64, + error: String, + }, + /// Dash Protocol error #[error("dash protocol: {error}")] ProtocolError { error: String }, diff --git a/packages/rs-drive-proof-verifier/src/proof.rs b/packages/rs-drive-proof-verifier/src/proof.rs index 3685df1779..6399a3e691 100644 --- a/packages/rs-drive-proof-verifier/src/proof.rs +++ b/packages/rs-drive-proof-verifier/src/proof.rs @@ -274,8 +274,14 @@ impl FromProof for Identity { id.into_buffer(), platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -324,8 +330,14 @@ impl FromProof for Identity { public_key_hash, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -391,8 +403,14 @@ impl FromProof for IdentityPublicKeys { false, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; let maybe_keys: Option = if let Some(identity) = maybe_identity { @@ -520,8 +538,14 @@ impl FromProof for IdentityNonceFetcher { false, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -577,8 +601,14 @@ impl FromProof for IdentityContractNo false, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -627,8 +657,14 @@ impl FromProof for IdentityBalance { false, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -678,8 +714,14 @@ impl FromProof for IdentityBalances { &identity_ids, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -726,8 +768,14 @@ impl FromProof for IdentityBalan false, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -775,8 +823,14 @@ impl FromProof for DataContract { id.into_buffer(), platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -827,8 +881,14 @@ impl FromProof for DataContracts { ids.as_slice(), platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -896,8 +956,14 @@ impl FromProof for DataContractHistory offset, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -935,17 +1001,17 @@ impl FromProof for StateTransitionPro error: e.to_string(), })?; - let metadata = response.metadata().or(Err(Error::EmptyResponseMetadata))?; + let mtd = response.metadata().or(Err(Error::EmptyResponseMetadata))?; - if metadata.epoch > MAX_EPOCH as u32 { - return Err(drive::error::Error::Proof(ProofError::InvalidMetadata(format!("platform returned an epoch {} that was higher that maximum of a 16 bit integer", metadata.epoch))).into()); + if mtd.epoch > MAX_EPOCH as u32 { + return Err(drive::error::Error::Proof(ProofError::InvalidMetadata(format!("platform returned an epoch {} that was higher that maximum of a 16 bit integer", mtd.epoch))).into()); } let block_info = BlockInfo { - time_ms: metadata.time_ms, - height: metadata.height, - core_height: metadata.core_chain_locked_height, - epoch: (metadata.epoch as u16).try_into()?, + time_ms: mtd.time_ms, + height: mtd.height, + core_height: mtd.core_chain_locked_height, + epoch: (mtd.epoch as u16).try_into()?, }; let contracts_provider_fn = provider.as_contract_lookup_fn(); @@ -957,13 +1023,19 @@ impl FromProof for StateTransitionPro &contracts_provider_fn, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; - verify_tenderdash_proof(proof, metadata, &root_hash, provider)?; + verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; - Ok((Some(result), metadata.clone(), proof.clone())) + Ok((Some(result), mtd.clone(), proof.clone())) } } @@ -1044,8 +1116,14 @@ impl FromProof for ExtendedEpochInfos { ascending, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; let epoch_info = epoch_info @@ -1093,7 +1171,17 @@ impl FromProof for ProtocolVersionUpgrade let mtd = response.metadata().or(Err(Error::EmptyResponseMetadata))?; let (root_hash, objects) = - Drive::verify_upgrade_state(&proof.grovedb_proof, platform_version)?; + Drive::verify_upgrade_state(&proof.grovedb_proof, platform_version).map_err( + |e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), + }, + )?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1145,7 +1233,16 @@ impl FromProof for MasternodeProtoco start_pro_tx_hash, try_u32_to_u16(request_v0.count)?, platform_version, - )?; + ) + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), + })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1250,9 +1347,16 @@ where let (root_hash, documents) = request .verify_proof(&proof.grovedb_proof, platform_version) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; + let documents = documents .into_iter() .map(|d| (d.id(), Some(d))) @@ -1333,8 +1437,14 @@ impl FromProof for IdentitiesContrac false, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1375,8 +1485,14 @@ impl FromProof for ContestedResources { let (root_hash, items) = resolved_request .verify_contests_proof(&proof.grovedb_proof, platform_version) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1419,8 +1535,14 @@ impl FromProof for Contenders { let (root_hash, contested_resource_vote_state) = resolved_request .verify_vote_poll_vote_state_proof(&proof.grovedb_proof, platform_version) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1473,8 +1595,14 @@ impl FromProof for Voters { let (root_hash, voters) = resolved_request .verify_vote_poll_votes_proof(&proof.grovedb_proof, platform_version) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1519,8 +1647,14 @@ impl FromProof for ResourceV &contract_provider_fn, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1563,8 +1697,14 @@ impl FromProof for VotePollsGroupedByTim &proof.grovedb_proof, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1602,14 +1742,23 @@ impl FromProof for PrefundedSpe let proof = response.proof().or(Err(Error::NoProofInResult))?; + let mtd = response.metadata().or(Err(Error::EmptyResponseMetadata))?; + let (root_hash, balance) = Drive::verify_specialized_balance( &proof.grovedb_proof, balance_id.into_buffer(), false, platform_version, - )?; - - let mtd = response.metadata().or(Err(Error::EmptyResponseMetadata))?; + ) + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), + })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1707,8 +1856,14 @@ impl FromProof for TotalCreditsInPla mtd.core_chain_locked_height, platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1754,8 +1909,14 @@ impl FromProof for Propose ProposerQueryType::ByIds(ids), platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; @@ -1818,8 +1979,14 @@ impl FromProof for Propo ProposerQueryType::ByRange(limit.map(|l| l as u16), formatted_start), platform_version, ) - .map_err(|e| Error::DriveError { - error: e.to_string(), + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), })?; verify_tenderdash_proof(proof, mtd, &root_hash, provider)?; diff --git a/packages/rs-drive-verify-c-binding/Cargo.toml b/packages/rs-drive-verify-c-binding/Cargo.toml index 1f6d9b4f1e..22da440ca7 100644 --- a/packages/rs-drive-verify-c-binding/Cargo.toml +++ b/packages/rs-drive-verify-c-binding/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "rs-drive-verify-c-binding" -version = "1.1.0" +version = "1.6.2" edition = "2021" rust-version.workspace = true diff --git a/packages/rs-drive/Cargo.toml b/packages/rs-drive/Cargo.toml index 337452f7b2..7af925f26c 100644 --- a/packages/rs-drive/Cargo.toml +++ b/packages/rs-drive/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "drive" description = "Dash drive built on top of GroveDB" -version = "1.5.1" +version = "1.7.0" authors = [ "Samuel Westrich ", "Ivan Shumkov ", diff --git a/packages/rs-drive/src/query/mod.rs b/packages/rs-drive/src/query/mod.rs index 7491454a64..f6aa81deb2 100644 --- a/packages/rs-drive/src/query/mod.rs +++ b/packages/rs-drive/src/query/mod.rs @@ -54,7 +54,7 @@ use dpp::document; use dpp::prelude::Identifier; #[cfg(feature = "server")] use { - crate::{drive::Drive, error::Error::GroveDB, fees::op::LowLevelDriveOperation}, + crate::{drive::Drive, fees::op::LowLevelDriveOperation}, dpp::block::block_info::BlockInfo, }; // Crate-local unconditional imports @@ -141,6 +141,26 @@ pub mod drive_contested_document_query; /// A query to get the block counts of proposers in an epoch pub mod proposer_block_count_query; +#[cfg(any(feature = "server", feature = "verify"))] +/// Represents a starting point for a query based on a specific document. +/// +/// This struct encapsulates all the necessary details to define the starting +/// conditions for a query, including the document to start from, its type, +/// associated index property, and whether the document itself should be included +/// in the query results. +#[derive(Debug, Clone)] +pub struct StartAtDocument<'a> { + /// The document that serves as the starting point for the query. + pub document: Document, + + /// The type of the document, providing metadata about its schema and structure. + pub document_type: DocumentTypeRef<'a>, + + /// Indicates whether the starting document itself should be included in the query results. + /// - `true`: The document is included in the results. + /// - `false`: The document is excluded, and the query starts from the next matching document. + pub included: bool, +} #[cfg(any(feature = "server", feature = "verify"))] /// Internal clauses struct #[derive(Clone, Debug, PartialEq, Default)] @@ -898,7 +918,7 @@ impl<'a> DriveDocumentQuery<'a> { let (starts_at_document, start_at_path_query) = match &self.start_at { None => Ok((None, None)), Some(starts_at) => { - // First if we have a startAt or or startsAfter we must get the element + // First if we have a startAt or startsAfter we must get the element // from the backing store let (start_at_document_path, start_at_document_key) = @@ -970,7 +990,7 @@ impl<'a> DriveDocumentQuery<'a> { vec![&start_at_path_query, &main_path_query], &platform_version.drive.grove_version, ) - .map_err(GroveDB)?; + .map_err(Error::GroveDB)?; merged.query.limit = limit.map(|a| a.saturating_add(1)); Ok(merged) } else { @@ -1252,13 +1272,16 @@ impl<'a> DriveDocumentQuery<'a> { #[cfg(any(feature = "server", feature = "verify"))] /// Returns a `Query` that either starts at or after the given document ID if given. fn inner_query_from_starts_at_for_id( - starts_at_document: &Option<(Document, DocumentTypeRef, &IndexProperty, bool)>, + starts_at_document: Option<&StartAtDocument>, left_to_right: bool, ) -> Query { // We only need items after the start at document let mut inner_query = Query::new_with_direction(left_to_right); - if let Some((document, _, _, included)) = starts_at_document { + if let Some(StartAtDocument { + document, included, .. + }) = starts_at_document + { let start_at_key = document.id().to_vec(); if *included { inner_query.insert_range_from(start_at_key..) @@ -1313,18 +1336,19 @@ impl<'a> DriveDocumentQuery<'a> { #[cfg(any(feature = "server", feature = "verify"))] /// Returns a `Query` that either starts at or after the given document if given. - // We are passing in starts_at_document 4 parameters - // The document - // The document type (borrowed) - // The index property (borrowed) - // if the element itself should be included. ie StartAt vs StartAfter fn inner_query_from_starts_at( - starts_at_document: &Option<(Document, DocumentTypeRef, &IndexProperty, bool)>, + starts_at_document: Option<&StartAtDocument>, + indexed_property: &IndexProperty, left_to_right: bool, platform_version: &PlatformVersion, ) -> Result { let mut inner_query = Query::new_with_direction(left_to_right); - if let Some((document, document_type, indexed_property, included)) = starts_at_document { + if let Some(StartAtDocument { + document, + document_type, + included, + }) = starts_at_document + { // We only need items after the start at document let start_at_key = document.get_raw_for_document_type( indexed_property.name.as_str(), @@ -1357,55 +1381,171 @@ impl<'a> DriveDocumentQuery<'a> { Ok(inner_query) } + #[cfg(any(feature = "server", feature = "verify"))] + fn recursive_create_query( + left_over_index_properties: &[&IndexProperty], + unique: bool, + starts_at_document: Option<&StartAtDocument>, //for key level, included + indexed_property: &IndexProperty, + order_by: Option<&IndexMap>, + platform_version: &PlatformVersion, + ) -> Result, Error> { + match left_over_index_properties.split_first() { + None => Ok(None), + Some((first, left_over)) => { + let left_to_right = if let Some(order_by) = order_by { + order_by + .get(first.name.as_str()) + .map(|order_clause| order_clause.ascending) + .unwrap_or(first.ascending) + } else { + first.ascending + }; + + let mut inner_query = Self::inner_query_from_starts_at( + starts_at_document, + indexed_property, + left_to_right, + platform_version, + )?; + DriveDocumentQuery::recursive_insert_on_query( + &mut inner_query, + left_over, + unique, + starts_at_document, + left_to_right, + order_by, + platform_version, + )?; + Ok(Some(inner_query)) + } + } + } + #[cfg(any(feature = "server", feature = "verify"))] /// Recursively queries as long as there are leftover index properties. + /// The in_start_at_document_sub_path_needing_conditional is interesting. + /// It indicates whether the start at document should be applied as a conditional + /// For example if we have a tree + /// Root + /// ├── model + /// │ ├── sedan + /// │ │ ├── brand_name + /// │ │ │ ├── Honda + /// │ │ │ │ ├── car_type + /// │ │ │ │ │ ├── Accord + /// │ │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ │ ├── a47d2... + /// │ │ │ │ │ │ │ ├── e19c8... + /// │ │ │ │ │ │ │ └── f1a7b... + /// │ │ │ │ │ └── Civic + /// │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ ├── b65a7... + /// │ │ │ │ │ │ └── c43de... + /// │ │ │ ├── Toyota + /// │ │ │ │ ├── car_type + /// │ │ │ │ │ ├── Camry + /// │ │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ │ └── 1a9d2... + /// │ │ │ │ │ └── Corolla + /// │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ ├── 3f7b4... + /// │ │ │ │ │ │ ├── 4e8fa... + /// │ │ │ │ │ │ └── 9b1c6... + /// │ ├── suv + /// │ │ ├── brand_name + /// │ │ │ ├── Ford* + /// │ │ │ │ ├── car_type* + /// │ │ │ │ │ ├── Escape* + /// │ │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ │ ├── 102bc... + /// │ │ │ │ │ │ │ ├── 29f8e... <- Set After this document + /// │ │ │ │ │ │ │ └── 6b1a3... + /// │ │ │ │ │ └── Explorer + /// │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ ├── b2a9d... + /// │ │ │ │ │ │ └── f4d5c... + /// │ │ │ ├── Nissan + /// │ │ │ │ ├── car_type + /// │ │ │ │ │ ├── Rogue + /// │ │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ │ ├── 5a9c3... + /// │ │ │ │ │ │ │ └── 7e4b9... + /// │ │ │ │ │ └── Murano + /// │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ ├── 8f6a2... + /// │ │ │ │ │ │ └── 9c7d4... + /// │ ├── truck + /// │ │ ├── brand_name + /// │ │ │ ├── Ford + /// │ │ │ │ ├── car_type + /// │ │ │ │ │ ├── F-150 + /// │ │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ │ ├── 72a3b... + /// │ │ │ │ │ │ │ └── 94c8e... + /// │ │ │ │ │ └── Ranger + /// │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ ├── 3f4b1... + /// │ │ │ │ │ │ ├── 6e7d2... + /// │ │ │ │ │ │ └── 8a1f5... + /// │ │ │ ├── Toyota + /// │ │ │ │ ├── car_type + /// │ │ │ │ │ ├── Tundra + /// │ │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ │ ├── 7c9a4... + /// │ │ │ │ │ │ │ └── a5d1e... + /// │ │ │ │ │ └── Tacoma + /// │ │ │ │ │ ├── 0 + /// │ │ │ │ │ │ ├── 1e7f4... + /// │ │ │ │ │ │ └── 6b9d3... + /// + /// let's say we are asking for suv's after 29f8e + /// here the * denotes the area needing a conditional + /// We need a conditional subquery on Ford to say only things after Ford (with Ford included) + /// We need a conditional subquery on Escape to say only things after Escape (with Escape included) fn recursive_insert_on_query( - query: Option<&mut Query>, + query: &mut Query, left_over_index_properties: &[&IndexProperty], unique: bool, - starts_at_document: &Option<(Document, DocumentTypeRef, &IndexProperty, bool)>, //for key level, included + starts_at_document: Option<&StartAtDocument>, //for key level, included default_left_to_right: bool, order_by: Option<&IndexMap>, platform_version: &PlatformVersion, ) -> Result, Error> { match left_over_index_properties.split_first() { None => { - if let Some(query) = query { - match unique { - true => { - query.set_subquery_key(vec![0]); - - // In the case things are NULL we allow to have multiple values - let inner_query = Self::inner_query_from_starts_at_for_id( - starts_at_document, - true, //for ids we always go left to right - ); - query.add_conditional_subquery( - QueryItem::Key(b"".to_vec()), - Some(vec![vec![0]]), - Some(inner_query), - ); - } - false => { - query.set_subquery_key(vec![0]); - // we just get all by document id order ascending - let full_query = Self::inner_query_from_starts_at_for_id( - &None, - default_left_to_right, - ); - query.set_subquery(full_query); - - let inner_query = Self::inner_query_from_starts_at_for_id( - starts_at_document, - default_left_to_right, - ); - - query.add_conditional_subquery( - QueryItem::Key(b"".to_vec()), - Some(vec![vec![0]]), - Some(inner_query), - ); - } + match unique { + true => { + query.set_subquery_key(vec![0]); + + // In the case things are NULL we allow to have multiple values + let inner_query = Self::inner_query_from_starts_at_for_id( + starts_at_document, + true, //for ids we always go left to right + ); + query.add_conditional_subquery( + QueryItem::Key(b"".to_vec()), + Some(vec![vec![0]]), + Some(inner_query), + ); + } + false => { + query.set_subquery_key(vec![0]); + // we just get all by document id order ascending + let full_query = + Self::inner_query_from_starts_at_for_id(None, default_left_to_right); + query.set_subquery(full_query); + + let inner_query = Self::inner_query_from_starts_at_for_id( + starts_at_document, + default_left_to_right, + ); + + query.add_conditional_subquery( + QueryItem::Key(b"".to_vec()), + Some(vec![vec![0]]), + Some(inner_query), + ); } } Ok(None) @@ -1420,79 +1560,223 @@ impl<'a> DriveDocumentQuery<'a> { first.ascending }; - match query { - None => { - let mut inner_query = Self::inner_query_from_starts_at( - starts_at_document, - left_to_right, - platform_version, - )?; - DriveDocumentQuery::recursive_insert_on_query( - Some(&mut inner_query), - left_over, - unique, - starts_at_document, - left_to_right, - order_by, + if let Some(start_at_document_inner) = starts_at_document { + let StartAtDocument { + document, + document_type, + included, + } = start_at_document_inner; + let start_at_key = document + .get_raw_for_document_type( + first.name.as_str(), + *document_type, + None, platform_version, - )?; - Ok(Some(inner_query)) - } - Some(query) => { - if let Some((document, document_type, _indexed_property, included)) = - starts_at_document - { - let start_at_key = document - .get_raw_for_document_type( - first.name.as_str(), - *document_type, - None, - platform_version, - ) - .ok() - .flatten(); - - // We should always include if we have left_over - let non_conditional_included = - !left_over.is_empty() | *included | start_at_key.is_none(); - - let mut non_conditional_query = Self::inner_query_starts_from_key( - start_at_key, - left_to_right, - non_conditional_included, - ); - - DriveDocumentQuery::recursive_insert_on_query( - Some(&mut non_conditional_query), - left_over, - unique, - starts_at_document, - left_to_right, - order_by, - platform_version, - )?; + ) + .ok() + .flatten(); + + // We should always include if we have left_over + let non_conditional_included = + !left_over.is_empty() || *included || start_at_key.is_none(); + + let mut non_conditional_query = Self::inner_query_starts_from_key( + start_at_key.clone(), + left_to_right, + non_conditional_included, + ); + + // We place None here on purpose, this has been well-thought-out + // and should not change. The reason is that the path of the start + // at document is used only on the conditional subquery and not on the + // main query + // for example in the following + // Our query will be with $ownerId == a3f9b81c4d7e6a9f5b1c3e8a2d9c4f7b + // With start after 8f2d5 + // We want to get from 2024-11-17T12:45:00Z + // withdrawal + // ├── $ownerId + // │ ├── a3f9b81c4d7e6a9f5b1c3e8a2d9c4f7b + // │ │ ├── $updatedAt + // │ │ │ ├── 2024-11-17T12:45:00Z <- conditional subquery here + // │ │ │ │ ├── status + // │ │ │ │ │ ├── 0 + // │ │ │ │ │ │ ├── 7a9f1... + // │ │ │ │ │ │ └── 4b8c3... + // │ │ │ │ │ ├── 1 + // │ │ │ │ │ │ ├── 8f2d5... <- start after + // │ │ │ │ │ │ └── 5c1e4... + // │ │ │ │ │ ├── 2 + // │ │ │ │ │ │ ├── 2e7a9... + // │ │ │ │ │ │ └── 1c8b3... + // │ │ │ ├── 2024-11-18T11:25:00Z <- we want all statuses here, so normal subquery, with None as start at document + // │ │ │ │ ├── status + // │ │ │ │ │ ├── 0 + // │ │ │ │ │ │ └── 1a4f2... + // │ │ │ │ │ ├── 2 + // │ │ │ │ │ │ ├── 3e7a9... + // │ │ │ │ │ │ └── 198b4... + // │ ├── b6d7e9c4a5f2b3d8e1a7c9f4b1e8a3f + // │ │ ├── $updatedAt + // │ │ │ ├── 2024-11-17T13:30:00Z + // │ │ │ │ ├── status + // │ │ │ │ │ ├── 0 + // │ │ │ │ │ │ ├── 6d7e2... + // │ │ │ │ │ │ └── 9c7f5... + // │ │ │ │ │ ├── 3 + // │ │ │ │ │ │ ├── 3a9b7... + // │ │ │ │ │ │ └── 8e5c4... + // │ │ │ │ │ ├── 4 + // │ │ │ │ │ │ ├── 1f7a8... + // │ │ │ │ │ │ └── 2c9b3... + // println!("going to call recursive_insert_on_query on non_conditional_query {} with left_over {:?}", non_conditional_query, left_over); + DriveDocumentQuery::recursive_insert_on_query( + &mut non_conditional_query, + left_over, + unique, + None, + left_to_right, + order_by, + platform_version, + )?; - query.set_subquery(non_conditional_query); - } else { - let mut inner_query = Query::new_with_direction(first.ascending); - inner_query.insert_all(); - DriveDocumentQuery::recursive_insert_on_query( - Some(&mut inner_query), - left_over, - unique, - starts_at_document, - left_to_right, - order_by, - platform_version, - )?; - query.set_subquery(inner_query); - } - query.set_subquery_key(first.name.as_bytes().to_vec()); - Ok(None) + DriveDocumentQuery::recursive_conditional_insert_on_query( + &mut non_conditional_query, + start_at_key, + left_over, + unique, + start_at_document_inner, + left_to_right, + order_by, + platform_version, + )?; + + query.set_subquery(non_conditional_query); + } else { + let mut inner_query = Query::new_with_direction(first.ascending); + inner_query.insert_all(); + DriveDocumentQuery::recursive_insert_on_query( + &mut inner_query, + left_over, + unique, + starts_at_document, + left_to_right, + order_by, + platform_version, + )?; + query.set_subquery(inner_query); + } + query.set_subquery_key(first.name.as_bytes().to_vec()); + Ok(None) + } + } + } + + #[cfg(any(feature = "server", feature = "verify"))] + fn recursive_conditional_insert_on_query( + query: &mut Query, + conditional_value: Option>, + left_over_index_properties: &[&IndexProperty], + unique: bool, + starts_at_document: &StartAtDocument, + default_left_to_right: bool, + order_by: Option<&IndexMap>, + platform_version: &PlatformVersion, + ) -> Result<(), Error> { + match left_over_index_properties.split_first() { + None => { + match unique { + true => { + // In the case things are NULL we allow to have multiple values + let inner_query = Self::inner_query_from_starts_at_for_id( + Some(starts_at_document), + true, //for ids we always go left to right + ); + query.add_conditional_subquery( + QueryItem::Key(b"".to_vec()), + Some(vec![vec![0]]), + Some(inner_query), + ); + } + false => { + let inner_query = Self::inner_query_from_starts_at_for_id( + Some(starts_at_document), + default_left_to_right, + ); + + query.add_conditional_subquery( + QueryItem::Key(conditional_value.unwrap_or_default()), + Some(vec![vec![0]]), + Some(inner_query), + ); } } } + Some((first, left_over)) => { + let left_to_right = if let Some(order_by) = order_by { + order_by + .get(first.name.as_str()) + .map(|order_clause| order_clause.ascending) + .unwrap_or(first.ascending) + } else { + first.ascending + }; + + let StartAtDocument { + document, + document_type, + .. + } = starts_at_document; + + let lower_start_at_key = document + .get_raw_for_document_type( + first.name.as_str(), + *document_type, + None, + platform_version, + ) + .ok() + .flatten(); + + // We include it if we are not unique, + // or if we are unique but the value is empty + let non_conditional_included = !unique || lower_start_at_key.is_none(); + + let mut non_conditional_query = Self::inner_query_starts_from_key( + lower_start_at_key.clone(), + left_to_right, + non_conditional_included, + ); + + DriveDocumentQuery::recursive_insert_on_query( + &mut non_conditional_query, + left_over, + unique, + None, + left_to_right, + order_by, + platform_version, + )?; + + DriveDocumentQuery::recursive_conditional_insert_on_query( + &mut non_conditional_query, + lower_start_at_key, + left_over, + unique, + starts_at_document, + left_to_right, + order_by, + platform_version, + )?; + + query.add_conditional_subquery( + QueryItem::Key(conditional_value.unwrap_or_default()), + Some(vec![first.name.as_bytes().to_vec()]), + Some(non_conditional_query), + ); + } } + Ok(()) } #[cfg(any(feature = "server", feature = "verify"))] @@ -1529,8 +1813,7 @@ impl<'a> DriveDocumentQuery<'a> { !(self .internal_clauses .equal_clauses - .get(field.name.as_str()) - .is_some() + .contains_key(field.name.as_str()) || (last_clause.is_some() && last_clause.unwrap().field == field.name) || (subquery_clause.is_some() && subquery_clause.unwrap().field == field.name)) }) @@ -1569,14 +1852,17 @@ impl<'a> DriveDocumentQuery<'a> { let first_index = index.properties.first().ok_or(Error::Drive( DriveError::CorruptedContractIndexes("index must have properties".to_string()), ))?; // Index must have properties - Self::recursive_insert_on_query( - None, + Self::recursive_create_query( left_over_index_properties.as_slice(), index.unique, - &starts_at_document.map(|(document, included)| { - (document, self.document_type, first_index, included) - }), - first_index.ascending, + starts_at_document + .map(|(document, included)| StartAtDocument { + document, + document_type: self.document_type, + included, + }) + .as_ref(), + first_index, None, platform_version, )? @@ -1614,22 +1900,17 @@ impl<'a> DriveDocumentQuery<'a> { match subquery_clause { None => { - // There is a last_clause, but no subquery_clause, we should use the index property of the last clause - // We need to get the terminal indexes unused by clauses. - let last_index_property = index - .properties - .iter() - .find(|field| where_clause.field == field.name) - .ok_or(Error::Drive(DriveError::CorruptedContractIndexes( - "index must have last_clause field".to_string(), - )))?; Self::recursive_insert_on_query( - Some(&mut query), + &mut query, left_over_index_properties.as_slice(), index.unique, - &starts_at_document.map(|(document, included)| { - (document, self.document_type, last_index_property, included) - }), + starts_at_document + .map(|(document, included)| StartAtDocument { + document, + document_type: self.document_type, + included, + }) + .as_ref(), left_to_right, Some(&self.order_by), platform_version, @@ -1648,20 +1929,17 @@ impl<'a> DriveDocumentQuery<'a> { order_clause.ascending, platform_version, )?; - let last_index_property = index - .properties - .iter() - .find(|field| subquery_where_clause.field == field.name) - .ok_or(Error::Drive(DriveError::CorruptedContractIndexes( - "index must have subquery_clause field".to_string(), - )))?; Self::recursive_insert_on_query( - Some(&mut subquery), + &mut subquery, left_over_index_properties.as_slice(), index.unique, - &starts_at_document.map(|(document, included)| { - (document, self.document_type, last_index_property, included) - }), + starts_at_document + .map(|(document, included)| StartAtDocument { + document, + document_type: self.document_type, + included, + }) + .as_ref(), left_to_right, Some(&self.order_by), platform_version, diff --git a/packages/rs-drive/src/query/vote_poll_contestant_votes_query.rs b/packages/rs-drive/src/query/vote_poll_contestant_votes_query.rs index f170d6820d..a145237aad 100644 --- a/packages/rs-drive/src/query/vote_poll_contestant_votes_query.rs +++ b/packages/rs-drive/src/query/vote_poll_contestant_votes_query.rs @@ -12,6 +12,7 @@ use crate::fees::op::LowLevelDriveOperation; #[cfg(feature = "server")] use crate::query::GroveError; use crate::query::Query; +use bincode::{Decode, Encode}; #[cfg(feature = "server")] use dpp::block::block_info::BlockInfo; use dpp::identifier::Identifier; @@ -27,7 +28,7 @@ use grovedb::{PathQuery, SizedQuery}; use platform_version::version::PlatformVersion; /// Vote Poll Drive Query struct -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Encode, Decode)] pub struct ContestedDocumentVotePollVotesDriveQuery { /// What vote poll are we asking for? pub vote_poll: ContestedDocumentResourceVotePoll, diff --git a/packages/rs-drive/src/query/vote_poll_vote_state_query.rs b/packages/rs-drive/src/query/vote_poll_vote_state_query.rs index b1fa7413a1..1d2398918d 100644 --- a/packages/rs-drive/src/query/vote_poll_vote_state_query.rs +++ b/packages/rs-drive/src/query/vote_poll_vote_state_query.rs @@ -12,6 +12,7 @@ use crate::error::Error; use crate::fees::op::LowLevelDriveOperation; #[cfg(feature = "server")] use crate::query::GroveError; +use bincode::{Decode, Encode}; use dpp::block::block_info::BlockInfo; use dpp::data_contract::DataContract; use dpp::identifier::Identifier; @@ -36,7 +37,7 @@ use platform_version::version::PlatformVersion; /// /// This enum defines the various types of results that can be returned when querying the drive /// for contested document vote poll information. -#[derive(Debug, PartialEq, Clone, Copy)] +#[derive(Debug, PartialEq, Clone, Copy, Encode, Decode)] pub enum ContestedDocumentVotePollDriveQueryResultType { /// The documents associated with the vote poll are returned in the query result. Documents, @@ -90,7 +91,7 @@ impl TryFrom for ContestedDocumentVotePollDriveQueryResultType { } /// Vote Poll Drive Query struct -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Encode, Decode)] pub struct ContestedDocumentVotePollDriveQuery { /// What vote poll are we asking for? pub vote_poll: ContestedDocumentResourceVotePoll, diff --git a/packages/rs-drive/src/query/vote_polls_by_document_type_query.rs b/packages/rs-drive/src/query/vote_polls_by_document_type_query.rs index f85b882c0a..8c23beb792 100644 --- a/packages/rs-drive/src/query/vote_polls_by_document_type_query.rs +++ b/packages/rs-drive/src/query/vote_polls_by_document_type_query.rs @@ -13,6 +13,7 @@ use crate::fees::op::LowLevelDriveOperation; use crate::query::GroveError; use crate::query::Query; use crate::util::object_size_info::DataContractResolvedInfo; +use bincode::{Decode, Encode}; #[cfg(feature = "server")] use dpp::block::block_info::BlockInfo; use dpp::data_contract::accessors::v0::DataContractV0Getters; @@ -32,7 +33,7 @@ use grovedb::{PathQuery, SizedQuery}; use platform_version::version::PlatformVersion; /// Vote Poll Drive Query struct -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Encode, Decode)] pub struct VotePollsByDocumentTypeQuery { /// The contract information associated with the document. pub contract_id: Identifier, diff --git a/packages/rs-drive/src/query/vote_polls_by_end_date_query.rs b/packages/rs-drive/src/query/vote_polls_by_end_date_query.rs index fb34e8a619..fa0c62d5a1 100644 --- a/packages/rs-drive/src/query/vote_polls_by_end_date_query.rs +++ b/packages/rs-drive/src/query/vote_polls_by_end_date_query.rs @@ -9,6 +9,7 @@ use crate::fees::op::LowLevelDriveOperation; use crate::query::GroveError; use crate::query::Query; use crate::util::common::encode::{decode_u64, encode_u64}; +use bincode::{Decode, Encode}; #[cfg(feature = "server")] use dpp::block::block_info::BlockInfo; #[cfg(feature = "server")] @@ -29,7 +30,7 @@ use platform_version::version::PlatformVersion; use std::collections::BTreeMap; /// Vote Poll Drive Query struct -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Encode, Decode)] pub struct VotePollsByEndDateDriveQuery { /// What is the start time we are asking for pub start_time: Option<(TimestampMillis, TimestampIncluded)>, diff --git a/packages/rs-drive/src/query/vote_query.rs b/packages/rs-drive/src/query/vote_query.rs index e6fb7863c5..c49927ae10 100644 --- a/packages/rs-drive/src/query/vote_query.rs +++ b/packages/rs-drive/src/query/vote_query.rs @@ -1,12 +1,13 @@ use crate::drive::votes::paths::vote_contested_resource_identity_votes_tree_path_for_identity_vec; use crate::error::Error; use crate::query::Query; +use bincode::{Decode, Encode}; use dpp::identifier::Identifier; use dpp::voting::vote_polls::VotePoll; use grovedb::{PathQuery, SizedQuery}; /// Vote Drive Query struct -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Encode, Decode)] pub struct IdentityBasedVoteDriveQuery { /// The identity who would have made the vote pub identity_id: Identifier, diff --git a/packages/rs-drive/tests/query_tests.rs b/packages/rs-drive/tests/query_tests.rs index 23d8491885..6bad5144f9 100644 --- a/packages/rs-drive/tests/query_tests.rs +++ b/packages/rs-drive/tests/query_tests.rs @@ -68,15 +68,16 @@ use dpp::document::{DocumentV0Getters, DocumentV0Setters}; use dpp::fee::default_costs::CachedEpochIndexFeeVersions; use dpp::identity::TimestampMillis; use dpp::platform_value; +use dpp::platform_value::string_encoding::Encoding; #[cfg(feature = "server")] use dpp::prelude::DataContract; use dpp::tests::json_document::json_document_to_contract; #[cfg(feature = "server")] use dpp::util::cbor_serializer; -use once_cell::sync::Lazy; - use dpp::version::fee::FeeVersion; use dpp::version::PlatformVersion; +use once_cell::sync::Lazy; +use rand::prelude::StdRng; #[cfg(feature = "server")] use drive::drive::contract::test_helpers::add_init_contracts_structure_operations; @@ -434,12 +435,36 @@ struct Domain { subdomain_rules: SubdomainRules, } +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Withdrawal { + #[serde(rename = "$id")] + pub id: Identifier, // Unique identifier for the withdrawal + + #[serde(rename = "$ownerId")] + pub owner_id: Identifier, // Identity of the withdrawal owner + + #[serde(rename = "$createdAt")] + pub created_at: TimestampMillis, + + #[serde(rename = "$updatedAt")] + pub updated_at: TimestampMillis, + + pub transaction_index: Option, // Optional sequential index of the transaction + pub transaction_sign_height: Option, // Optional Core height on which the transaction was signed + pub amount: u64, // Amount to withdraw (minimum: 1000) + pub core_fee_per_byte: u32, // Fee in Duffs/Byte (minimum: 1, max: 4294967295) + pub pooling: u8, // Pooling level (enum: 0, 1, 2) + pub output_script: Vec, // Byte array (size: 23-25) + pub status: u8, // Status (enum: 0 - Pending, 1 - Signed, etc.) +} + #[cfg(feature = "server")] #[test] fn test_serialization_and_deserialization() { let platform_version = PlatformVersion::latest(); - let domains = Domain::random_domains_in_parent(20, 100, "dash"); + let domains = Domain::random_domains_in_parent(20, None, 100, "dash"); let contract = json_document_to_contract( "tests/supporting_files/contract/dpns/dpns-contract.json", false, @@ -566,8 +591,10 @@ fn test_serialization_and_deserialization_with_null_values() { #[cfg(feature = "server")] impl Domain { /// Creates `count` random names as domain names for the given parent domain + /// If total owners in None it will create a new owner id per domain. fn random_domains_in_parent( count: u32, + total_owners: Option, seed: u64, normalized_parent_domain_name: &str, ) -> Vec { @@ -575,13 +602,29 @@ impl Domain { "tests/supporting_files/contract/family/first-names.txt", ); let mut vec: Vec = Vec::with_capacity(count as usize); + let mut rng = StdRng::seed_from_u64(seed); + + let owners = if let Some(total_owners) = total_owners { + if total_owners == 0 { + return vec![]; + } + (0..total_owners) + .map(|_| Identifier::random_with_rng(&mut rng)) + .collect() + } else { + vec![] + }; - let mut rng = rand::rngs::StdRng::seed_from_u64(seed); for _i in 0..count { let label = first_names.choose(&mut rng).unwrap(); let domain = Domain { id: Identifier::random_with_rng(&mut rng), - owner_id: Identifier::random_with_rng(&mut rng), + owner_id: if let Some(_) = total_owners { + // Pick a random owner from the owners list + *owners.choose(&mut rng).unwrap() + } else { + Identifier::random_with_rng(&mut rng) + }, label: Some(label.clone()), normalized_label: Some(label.to_lowercase()), normalized_parent_domain_name: normalized_parent_domain_name.to_string(), @@ -599,6 +642,75 @@ impl Domain { } } +#[cfg(feature = "server")] +impl Withdrawal { + /// Generate `count` random withdrawals + /// If `total_owners` is provided, assigns withdrawals to random owners from a predefined set. + pub fn random_withdrawals(count: u32, total_owners: Option, seed: u64) -> Vec { + let mut rng = StdRng::seed_from_u64(seed); + + // Generate a list of random owners if `total_owners` is provided + let owners: Vec = if let Some(total) = total_owners { + (0..total) + .map(|_| Identifier::random_with_rng(&mut rng)) + .collect() + } else { + vec![] + }; + + let mut next_transaction_index = 1; // Start transaction index from 1 + + let mut next_timestamp = 1732192259000; + + (0..count) + .map(|_| { + let owner_id = if !owners.is_empty() { + *owners.choose(&mut rng).unwrap() + } else { + Identifier::random_with_rng(&mut rng) + }; + + // Determine the status randomly + let status = if rng.gen_bool(0.5) { + 0 + } else { + rng.gen_range(1..=4) + }; // 0 = Pending, 1-4 = other statuses + + // Determine transaction index and sign height based on status + let (transaction_index, transaction_sign_height) = if status == 0 { + (None, None) // No transaction index or sign height for Pending status + } else { + let index = next_transaction_index; + next_transaction_index += 1; // Increment index for next withdrawal + (Some(index), Some(rng.gen_range(1..=500000))) // Set sign height only if transaction index is set + }; + + let output_script_length = rng.gen_range(23..=25); + let output_script: Vec = (0..output_script_length).map(|_| rng.gen()).collect(); + + let created_at = next_timestamp; + + next_timestamp += rng.gen_range(0..3) * 2000; + + Withdrawal { + id: Identifier::random_with_rng(&mut rng), + owner_id, + transaction_index, + transaction_sign_height, + amount: rng.gen_range(1000..=1_000_000), // Example range (minimum: 1000) + core_fee_per_byte: 0, // Always 0 + pooling: 0, // Always 0 + output_script, + status, + created_at, + updated_at: created_at, + } + }) + .collect() + } +} + #[cfg(feature = "server")] /// Adds `count` random domain names to the given contract pub fn add_domains_to_contract( @@ -606,10 +718,11 @@ pub fn add_domains_to_contract( contract: &DataContract, transaction: TransactionArg, count: u32, + total_owners: Option, seed: u64, ) { let platform_version = PlatformVersion::latest(); - let domains = Domain::random_domains_in_parent(count, seed, "dash"); + let domains = Domain::random_domains_in_parent(count, total_owners, seed, "dash"); let document_type = contract .document_type_for_name("domain") .expect("expected to get document type"); @@ -641,9 +754,56 @@ pub fn add_domains_to_contract( } } +#[cfg(feature = "server")] +/// Adds `count` random withdrawals to the given contract +pub fn add_withdrawals_to_contract( + drive: &Drive, + contract: &DataContract, + transaction: TransactionArg, + count: u32, + total_owners: Option, + seed: u64, +) { + let platform_version = PlatformVersion::latest(); + let withdrawals = Withdrawal::random_withdrawals(count, total_owners, seed); + let document_type = contract + .document_type_for_name("withdrawal") + .expect("expected to get document type"); + for domain in withdrawals { + let value = platform_value::to_value(domain).expect("expected value"); + let document = + Document::from_platform_value(value, platform_version).expect("expected value"); + + let storage_flags = Some(Cow::Owned(StorageFlags::SingleEpoch(0))); + + drive + .add_document_for_contract( + DocumentAndContractInfo { + owned_document_info: OwnedDocumentInfo { + document_info: DocumentRefInfo((&document, storage_flags)), + owner_id: None, + }, + contract, + document_type, + }, + true, + BlockInfo::genesis(), + true, + transaction, + platform_version, + None, + ) + .expect("document should be inserted"); + } +} + #[cfg(feature = "server")] /// Sets up and inserts random domain name data to the DPNS contract to test queries on. -pub fn setup_dpns_tests_with_batches(count: u32, seed: u64) -> (Drive, DataContract) { +pub fn setup_dpns_tests_with_batches( + count: u32, + total_owners: Option, + seed: u64, +) -> (Drive, DataContract) { let drive = setup_drive(Some(DriveConfig::default())); let db_transaction = drive.grove.start_transaction(); @@ -667,7 +827,61 @@ pub fn setup_dpns_tests_with_batches(count: u32, seed: u64) -> (Drive, DataContr Some(&db_transaction), ); - add_domains_to_contract(&drive, &contract, Some(&db_transaction), count, seed); + add_domains_to_contract( + &drive, + &contract, + Some(&db_transaction), + count, + total_owners, + seed, + ); + drive + .grove + .commit_transaction(db_transaction) + .unwrap() + .expect("transaction should be committed"); + + (drive, contract) +} + +#[cfg(feature = "server")] +/// Sets up and inserts random withdrawal to the Withdrawal contract to test queries on. +pub fn setup_withdrawal_tests( + count: u32, + total_owners: Option, + seed: u64, +) -> (Drive, DataContract) { + let drive = setup_drive(Some(DriveConfig::default())); + + let db_transaction = drive.grove.start_transaction(); + + // Create contracts tree + let mut batch = GroveDbOpBatch::new(); + + add_init_contracts_structure_operations(&mut batch); + + let platform_version = PlatformVersion::latest(); + + drive + .grove_apply_batch(batch, false, Some(&db_transaction), &platform_version.drive) + .expect("expected to create contracts tree successfully"); + + // setup code + let contract = setup_contract( + &drive, + "tests/supporting_files/contract/withdrawals/withdrawals-contract.json", + None, + Some(&db_transaction), + ); + + add_withdrawals_to_contract( + &drive, + &contract, + Some(&db_transaction), + count, + total_owners, + seed, + ); drive .grove .commit_transaction(db_transaction) @@ -738,7 +952,7 @@ pub fn setup_dpns_tests_label_not_required(count: u32, seed: u64) -> (Drive, Dat Some(&db_transaction), ); - add_domains_to_contract(&drive, &contract, Some(&db_transaction), count, seed); + add_domains_to_contract(&drive, &contract, Some(&db_transaction), count, None, seed); drive .grove .commit_transaction(db_transaction) @@ -3078,7 +3292,7 @@ fn test_query_with_cached_contract() { #[cfg(feature = "server")] #[test] fn test_dpns_query_contract_verification() { - let (drive, contract) = setup_dpns_tests_with_batches(10, 11456); + let (drive, contract) = setup_dpns_tests_with_batches(10, None, 11456); let platform_version = PlatformVersion::latest(); @@ -3155,7 +3369,7 @@ fn test_contract_keeps_history_fetch_and_verification() { #[cfg(feature = "server")] #[test] fn test_dpns_query() { - let (drive, contract) = setup_dpns_tests_with_batches(10, 11456); + let (drive, contract) = setup_dpns_tests_with_batches(10, None, 11456); let platform_version = PlatformVersion::latest(); @@ -3707,7 +3921,7 @@ fn test_dpns_insertion_with_aliases() { #[test] fn test_dpns_query_start_at() { // The point of this test is to test the situation where we have a start at a certain value for the DPNS query. - let (drive, contract) = setup_dpns_tests_with_batches(10, 11456); + let (drive, contract) = setup_dpns_tests_with_batches(10, None, 11456); let platform_version = PlatformVersion::latest(); @@ -3801,7 +4015,7 @@ fn test_dpns_query_start_at() { #[test] fn test_dpns_query_start_after() { // The point of this test is to test the situation where we have a start at a certain value for the DPNS query. - let (drive, contract) = setup_dpns_tests_with_batches(10, 11456); + let (drive, contract) = setup_dpns_tests_with_batches(10, None, 11456); let platform_version = PlatformVersion::latest(); @@ -3895,7 +4109,7 @@ fn test_dpns_query_start_after() { #[test] fn test_dpns_query_start_at_desc() { // The point of this test is to test the situation where we have a start at a certain value for the DPNS query. - let (drive, contract) = setup_dpns_tests_with_batches(10, 11456); + let (drive, contract) = setup_dpns_tests_with_batches(10, None, 11456); let platform_version = PlatformVersion::latest(); @@ -3989,7 +4203,7 @@ fn test_dpns_query_start_at_desc() { #[test] fn test_dpns_query_start_after_desc() { // The point of this test is to test the situation where we have a start at a certain value for the DPNS query. - let (drive, contract) = setup_dpns_tests_with_batches(10, 11456); + let (drive, contract) = setup_dpns_tests_with_batches(10, None, 11456); let platform_version = PlatformVersion::latest(); @@ -4465,7 +4679,8 @@ fn test_dpns_query_start_after_with_null_id() { .expect("we should be able to deserialize the document"); let normalized_label_value = document .get("normalizedLabel") - .expect("we should be able to get the first name"); + .cloned() + .unwrap_or(Value::Null); if normalized_label_value.is_null() { String::from("") } else { @@ -4804,6 +5019,291 @@ fn test_dpns_query_start_after_with_null_id_desc() { assert_eq!(results, proof_results); } +#[cfg(feature = "server")] +#[test] +fn test_withdrawals_query_by_owner_id() { + // We create 10 withdrawals owned by 2 identities + let (drive, contract) = setup_withdrawal_tests(10, Some(2), 11456); + + let platform_version = PlatformVersion::latest(); + + let db_transaction = drive.grove.start_transaction(); + + let root_hash = drive + .grove + .root_hash(Some(&db_transaction), &platform_version.drive.grove_version) + .unwrap() + .expect("there is always a root hash"); + + let expected_app_hash = vec![ + 144, 177, 24, 41, 104, 174, 220, 135, 164, 0, 240, 215, 42, 60, 249, 142, 150, 169, 135, + 72, 151, 35, 238, 131, 164, 229, 106, 83, 198, 109, 65, 211, + ]; + + assert_eq!(root_hash.as_slice(), expected_app_hash); + + // Document Ids are + // document v0 : id:2kTB6gW4wCCnySj3UFUJQM3aUYBd6qDfLCY74BnWmFKu owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:09 updated_at:2024-11-21 12:31:09 amount:(i64)646767 coreFeePerByte:(i64)0 outputScript:bytes 00952c808390e575c8dd29fc07ccfed7b428e1ec2ffcb23e pooling:(i64)0 status:(i64)1 transactionIndex:(i64)4 transactionSignHeight:(i64)303186 + // document v0 : id:3T4aKmidGKA4ETnWYSedm6ETzrcdkfPL2r3D6eg6CSib owner_id:CH1EHBkN5FUuQ7z8ep1abroLPzzYjagvM5XV2NYR3DEh created_at:2024-11-21 12:31:01 updated_at:2024-11-21 12:31:01 amount:(i64)971045 coreFeePerByte:(i64)0 outputScript:bytes 525dfc160c160a7a52ef3301a7e55fccf41d73857f50a55a4d pooling:(i64)0 status:(i64)1 transactionIndex:(i64)2 transactionSignHeight:(i64)248787 + // document v0 : id:3X2QfUfR8EeVZQAKmEjcue5xDv3CZXrfPTgXkZ5vQo13 owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)122155 coreFeePerByte:(i64)0 outputScript:bytes f76eb8b953ff41040d906c25a4ae42884bedb41a07fc3a pooling:(i64)0 status:(i64)3 transactionIndex:(i64)7 transactionSignHeight:(i64)310881 + // document v0 : id:5ikeRNwvFekr6ex32B4dLEcCaSsgXXHJBx5rJ2rwuhEV owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:30:59 updated_at:2024-11-21 12:30:59 amount:(i64)725014 coreFeePerByte:(i64)0 outputScript:bytes 51f203a755a7ff25ba8645841f80403ee98134690b2c0dd5e2 pooling:(i64)0 status:(i64)3 transactionIndex:(i64)1 transactionSignHeight:(i64)4072 + // document v0 : id:74giZJn9fNczYRsxxh3wVnktJS1vzTiRWYinKK1rRcyj owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)151943 coreFeePerByte:(i64)0 outputScript:bytes 9db03f4c8a51e4e9855e008aae6121911b4831699c53ed pooling:(i64)0 status:(i64)1 transactionIndex:(i64)5 transactionSignHeight:(i64)343099 + // document v0 : id:8iqDAFxTzHYcmUWtcNnCRoj9Fss4HE1G3GP3HhVAZJhn owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:13 updated_at:2024-11-21 12:31:13 amount:(i64)409642 coreFeePerByte:(i64)0 outputScript:bytes 19fe0a2458a47e1726191f4dc94d11bcfacf821d024043 pooling:(i64)0 status:(i64)4 transactionIndex:(i64)8 transactionSignHeight:(i64)304397 + // document v0 : id:BdH274iP17nhquQVY4KMCAM6nwyPRc8AFJkUT91vxhbc owner_id:CH1EHBkN5FUuQ7z8ep1abroLPzzYjagvM5XV2NYR3DEh created_at:2024-11-21 12:31:03 updated_at:2024-11-21 12:31:03 amount:(i64)81005 coreFeePerByte:(i64)0 outputScript:bytes 2666e87b6cc7ddf2b63e7e52c348818c05e5562efa48f5 pooling:(i64)0 status:(i64)0 + // document v0 : id:CCjaU67Pe79Vt51oXvQ5SkyNiypofNX9DS9PYydN9tpD owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:01 updated_at:2024-11-21 12:31:01 amount:(i64)455074 coreFeePerByte:(i64)0 outputScript:bytes acde2e1652771b50a2c68fd330ee1d4b8e115631ce72375432 pooling:(i64)0 status:(i64)3 transactionIndex:(i64)3 transactionSignHeight:(i64)261103 + // document v0 : id:DxFzXvkb2mNQHmeVknsv3gWsc6rMtLk9AsS5zMpy6hou owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:05 updated_at:2024-11-21 12:31:05 amount:(i64)271303 coreFeePerByte:(i64)0 outputScript:bytes 0b845e8c3a4679f1913172f7fd939cc153f458519de8ed3d pooling:(i64)0 status:(i64)0 + // document v0 : id:FDnvFN7e72LcZEojTWNmJTP7uzok3BtvbKnaa5gjqCpW owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)123433 coreFeePerByte:(i64)0 outputScript:bytes 82712473b2d0fc5663afb1a08006913ccccbf38e091a8cc7 pooling:(i64)0 status:(i64)4 transactionIndex:(i64)6 transactionSignHeight:(i64)319518 + + let query_value = json!({ + "where": [ + ["$ownerId", "==", "A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ"] + ], + "limit": 2 + }); + let where_cbor = cbor_serializer::serializable_value_to_cbor(&query_value, None) + .expect("expected to serialize to cbor"); + let domain_document_type = contract + .document_type_for_name("withdrawal") + .expect("contract should have a domain document type"); + let query = DriveDocumentQuery::from_cbor( + where_cbor.as_slice(), + &contract, + domain_document_type, + &drive.config, + ) + .expect("query should be built"); + let (results, _, _) = query + .execute_raw_results_no_proof(&drive, None, Some(&db_transaction), platform_version) + .expect("proof should be executed"); + let names: Vec = results + .iter() + .map(|result| { + let document = + Document::from_bytes(result.as_slice(), domain_document_type, platform_version) + .expect("we should be able to deserialize the document"); + document.id().to_string(Encoding::Base58) + }) + .collect(); + + let a_names = [ + "5ikeRNwvFekr6ex32B4dLEcCaSsgXXHJBx5rJ2rwuhEV".to_string(), + "CCjaU67Pe79Vt51oXvQ5SkyNiypofNX9DS9PYydN9tpD".to_string(), + ]; + + assert_eq!(names, a_names); + + let (proof_root_hash, proof_results, _) = query + .execute_with_proof_only_get_elements(&drive, None, None, platform_version) + .expect("we should be able to a proof"); + assert_eq!(root_hash, proof_root_hash); + assert_eq!(results, proof_results); +} + +#[cfg(feature = "server")] +#[test] +fn test_withdrawals_query_start_after_query_by_owner_id() { + // We create 10 withdrawals owned by 2 identities + let (drive, contract) = setup_withdrawal_tests(10, Some(2), 11456); + + let platform_version = PlatformVersion::latest(); + + let db_transaction = drive.grove.start_transaction(); + + let root_hash = drive + .grove + .root_hash(Some(&db_transaction), &platform_version.drive.grove_version) + .unwrap() + .expect("there is always a root hash"); + + let expected_app_hash = vec![ + 144, 177, 24, 41, 104, 174, 220, 135, 164, 0, 240, 215, 42, 60, 249, 142, 150, 169, 135, + 72, 151, 35, 238, 131, 164, 229, 106, 83, 198, 109, 65, 211, + ]; + + assert_eq!(root_hash.as_slice(), expected_app_hash); + + // Document Ids are + // document v0 : id:2kTB6gW4wCCnySj3UFUJQM3aUYBd6qDfLCY74BnWmFKu owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:09 updated_at:2024-11-21 12:31:09 amount:(i64)646767 coreFeePerByte:(i64)0 outputScript:bytes 00952c808390e575c8dd29fc07ccfed7b428e1ec2ffcb23e pooling:(i64)0 status:(i64)1 transactionIndex:(i64)4 transactionSignHeight:(i64)303186 + // document v0 : id:3T4aKmidGKA4ETnWYSedm6ETzrcdkfPL2r3D6eg6CSib owner_id:CH1EHBkN5FUuQ7z8ep1abroLPzzYjagvM5XV2NYR3DEh created_at:2024-11-21 12:31:01 updated_at:2024-11-21 12:31:01 amount:(i64)971045 coreFeePerByte:(i64)0 outputScript:bytes 525dfc160c160a7a52ef3301a7e55fccf41d73857f50a55a4d pooling:(i64)0 status:(i64)1 transactionIndex:(i64)2 transactionSignHeight:(i64)248787 + // document v0 : id:3X2QfUfR8EeVZQAKmEjcue5xDv3CZXrfPTgXkZ5vQo13 owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)122155 coreFeePerByte:(i64)0 outputScript:bytes f76eb8b953ff41040d906c25a4ae42884bedb41a07fc3a pooling:(i64)0 status:(i64)3 transactionIndex:(i64)7 transactionSignHeight:(i64)310881 + // document v0 : id:5ikeRNwvFekr6ex32B4dLEcCaSsgXXHJBx5rJ2rwuhEV owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:30:59 updated_at:2024-11-21 12:30:59 amount:(i64)725014 coreFeePerByte:(i64)0 outputScript:bytes 51f203a755a7ff25ba8645841f80403ee98134690b2c0dd5e2 pooling:(i64)0 status:(i64)3 transactionIndex:(i64)1 transactionSignHeight:(i64)4072 + // document v0 : id:74giZJn9fNczYRsxxh3wVnktJS1vzTiRWYinKK1rRcyj owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)151943 coreFeePerByte:(i64)0 outputScript:bytes 9db03f4c8a51e4e9855e008aae6121911b4831699c53ed pooling:(i64)0 status:(i64)1 transactionIndex:(i64)5 transactionSignHeight:(i64)343099 + // document v0 : id:8iqDAFxTzHYcmUWtcNnCRoj9Fss4HE1G3GP3HhVAZJhn owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:13 updated_at:2024-11-21 12:31:13 amount:(i64)409642 coreFeePerByte:(i64)0 outputScript:bytes 19fe0a2458a47e1726191f4dc94d11bcfacf821d024043 pooling:(i64)0 status:(i64)4 transactionIndex:(i64)8 transactionSignHeight:(i64)304397 + // document v0 : id:BdH274iP17nhquQVY4KMCAM6nwyPRc8AFJkUT91vxhbc owner_id:CH1EHBkN5FUuQ7z8ep1abroLPzzYjagvM5XV2NYR3DEh created_at:2024-11-21 12:31:03 updated_at:2024-11-21 12:31:03 amount:(i64)81005 coreFeePerByte:(i64)0 outputScript:bytes 2666e87b6cc7ddf2b63e7e52c348818c05e5562efa48f5 pooling:(i64)0 status:(i64)0 + // document v0 : id:CCjaU67Pe79Vt51oXvQ5SkyNiypofNX9DS9PYydN9tpD owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:01 updated_at:2024-11-21 12:31:01 amount:(i64)455074 coreFeePerByte:(i64)0 outputScript:bytes acde2e1652771b50a2c68fd330ee1d4b8e115631ce72375432 pooling:(i64)0 status:(i64)3 transactionIndex:(i64)3 transactionSignHeight:(i64)261103 + // document v0 : id:DxFzXvkb2mNQHmeVknsv3gWsc6rMtLk9AsS5zMpy6hou owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:05 updated_at:2024-11-21 12:31:05 amount:(i64)271303 coreFeePerByte:(i64)0 outputScript:bytes 0b845e8c3a4679f1913172f7fd939cc153f458519de8ed3d pooling:(i64)0 status:(i64)0 + // document v0 : id:FDnvFN7e72LcZEojTWNmJTP7uzok3BtvbKnaa5gjqCpW owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)123433 coreFeePerByte:(i64)0 outputScript:bytes 82712473b2d0fc5663afb1a08006913ccccbf38e091a8cc7 pooling:(i64)0 status:(i64)4 transactionIndex:(i64)6 transactionSignHeight:(i64)319518 + + let query_value = json!({ + "where": [ + ["$ownerId", "==", "A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ"] + ], + "startAfter": "CCjaU67Pe79Vt51oXvQ5SkyNiypofNX9DS9PYydN9tpD", + "limit": 3, + }); + + // This will use the identity recent index + // { + // "name": "identityRecent", + // "properties": [ + // { + // "$ownerId": "asc" + // }, + // { + // "$updatedAt": "asc" + // }, + // { + // "status": "asc" + // } + // ], + // "unique": false + // }, + + let where_cbor = cbor_serializer::serializable_value_to_cbor(&query_value, None) + .expect("expected to serialize to cbor"); + let domain_document_type = contract + .document_type_for_name("withdrawal") + .expect("contract should have a domain document type"); + let query = DriveDocumentQuery::from_cbor( + where_cbor.as_slice(), + &contract, + domain_document_type, + &drive.config, + ) + .expect("query should be built"); + let (results, _, _) = query + .execute_raw_results_no_proof(&drive, None, Some(&db_transaction), platform_version) + .expect("proof should be executed"); + let names: Vec = results + .iter() + .map(|result| { + let document = + Document::from_bytes(result.as_slice(), domain_document_type, platform_version) + .expect("we should be able to deserialize the document"); + document.id().to_string(Encoding::Base58) + }) + .collect(); + + // We only get back 2 values, even though we put limit 3 because the time with status 0 is an + // empty tree and consumes a limit + let a_names = [ + "DxFzXvkb2mNQHmeVknsv3gWsc6rMtLk9AsS5zMpy6hou".to_string(), + "2kTB6gW4wCCnySj3UFUJQM3aUYBd6qDfLCY74BnWmFKu".to_string(), + ]; + + assert_eq!(names, a_names); + + let (proof_root_hash, proof_results, _) = query + .execute_with_proof_only_get_elements(&drive, None, None, platform_version) + .expect("we should be able to a proof"); + assert_eq!(root_hash, proof_root_hash); + assert_eq!(results, proof_results); +} + +#[cfg(feature = "server")] +#[test] +fn test_withdrawals_query_start_after_query_by_owner_id_desc() { + // We create 10 withdrawals owned by 2 identities + let (drive, contract) = setup_withdrawal_tests(10, Some(2), 11456); + + let platform_version = PlatformVersion::latest(); + + let db_transaction = drive.grove.start_transaction(); + + let root_hash = drive + .grove + .root_hash(Some(&db_transaction), &platform_version.drive.grove_version) + .unwrap() + .expect("there is always a root hash"); + + let expected_app_hash = vec![ + 144, 177, 24, 41, 104, 174, 220, 135, 164, 0, 240, 215, 42, 60, 249, 142, 150, 169, 135, + 72, 151, 35, 238, 131, 164, 229, 106, 83, 198, 109, 65, 211, + ]; + + assert_eq!(root_hash.as_slice(), expected_app_hash); + + // Document Ids are + // document v0 : id:2kTB6gW4wCCnySj3UFUJQM3aUYBd6qDfLCY74BnWmFKu owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:09 updated_at:2024-11-21 12:31:09 amount:(i64)646767 coreFeePerByte:(i64)0 outputScript:bytes 00952c808390e575c8dd29fc07ccfed7b428e1ec2ffcb23e pooling:(i64)0 status:(i64)1 transactionIndex:(i64)4 transactionSignHeight:(i64)303186 + // document v0 : id:3T4aKmidGKA4ETnWYSedm6ETzrcdkfPL2r3D6eg6CSib owner_id:CH1EHBkN5FUuQ7z8ep1abroLPzzYjagvM5XV2NYR3DEh created_at:2024-11-21 12:31:01 updated_at:2024-11-21 12:31:01 amount:(i64)971045 coreFeePerByte:(i64)0 outputScript:bytes 525dfc160c160a7a52ef3301a7e55fccf41d73857f50a55a4d pooling:(i64)0 status:(i64)1 transactionIndex:(i64)2 transactionSignHeight:(i64)248787 + // document v0 : id:3X2QfUfR8EeVZQAKmEjcue5xDv3CZXrfPTgXkZ5vQo13 owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)122155 coreFeePerByte:(i64)0 outputScript:bytes f76eb8b953ff41040d906c25a4ae42884bedb41a07fc3a pooling:(i64)0 status:(i64)3 transactionIndex:(i64)7 transactionSignHeight:(i64)310881 + // document v0 : id:5ikeRNwvFekr6ex32B4dLEcCaSsgXXHJBx5rJ2rwuhEV owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:30:59 updated_at:2024-11-21 12:30:59 amount:(i64)725014 coreFeePerByte:(i64)0 outputScript:bytes 51f203a755a7ff25ba8645841f80403ee98134690b2c0dd5e2 pooling:(i64)0 status:(i64)3 transactionIndex:(i64)1 transactionSignHeight:(i64)4072 + // document v0 : id:74giZJn9fNczYRsxxh3wVnktJS1vzTiRWYinKK1rRcyj owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)151943 coreFeePerByte:(i64)0 outputScript:bytes 9db03f4c8a51e4e9855e008aae6121911b4831699c53ed pooling:(i64)0 status:(i64)1 transactionIndex:(i64)5 transactionSignHeight:(i64)343099 + // document v0 : id:8iqDAFxTzHYcmUWtcNnCRoj9Fss4HE1G3GP3HhVAZJhn owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:13 updated_at:2024-11-21 12:31:13 amount:(i64)409642 coreFeePerByte:(i64)0 outputScript:bytes 19fe0a2458a47e1726191f4dc94d11bcfacf821d024043 pooling:(i64)0 status:(i64)4 transactionIndex:(i64)8 transactionSignHeight:(i64)304397 + // document v0 : id:BdH274iP17nhquQVY4KMCAM6nwyPRc8AFJkUT91vxhbc owner_id:CH1EHBkN5FUuQ7z8ep1abroLPzzYjagvM5XV2NYR3DEh created_at:2024-11-21 12:31:03 updated_at:2024-11-21 12:31:03 amount:(i64)81005 coreFeePerByte:(i64)0 outputScript:bytes 2666e87b6cc7ddf2b63e7e52c348818c05e5562efa48f5 pooling:(i64)0 status:(i64)0 + // document v0 : id:CCjaU67Pe79Vt51oXvQ5SkyNiypofNX9DS9PYydN9tpD owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:01 updated_at:2024-11-21 12:31:01 amount:(i64)455074 coreFeePerByte:(i64)0 outputScript:bytes acde2e1652771b50a2c68fd330ee1d4b8e115631ce72375432 pooling:(i64)0 status:(i64)3 transactionIndex:(i64)3 transactionSignHeight:(i64)261103 + // document v0 : id:DxFzXvkb2mNQHmeVknsv3gWsc6rMtLk9AsS5zMpy6hou owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:05 updated_at:2024-11-21 12:31:05 amount:(i64)271303 coreFeePerByte:(i64)0 outputScript:bytes 0b845e8c3a4679f1913172f7fd939cc153f458519de8ed3d pooling:(i64)0 status:(i64)0 + // document v0 : id:FDnvFN7e72LcZEojTWNmJTP7uzok3BtvbKnaa5gjqCpW owner_id:A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ created_at:2024-11-21 12:31:11 updated_at:2024-11-21 12:31:11 amount:(i64)123433 coreFeePerByte:(i64)0 outputScript:bytes 82712473b2d0fc5663afb1a08006913ccccbf38e091a8cc7 pooling:(i64)0 status:(i64)4 transactionIndex:(i64)6 transactionSignHeight:(i64)319518 + + let query_value = json!({ + "where": [ + ["$ownerId", "==", "A8GdKdMT7eDvtjnmMXe1Z3YaTtJzZdxNDRkeLb8goFrZ"] + ], + "startAfter": "2kTB6gW4wCCnySj3UFUJQM3aUYBd6qDfLCY74BnWmFKu", + "limit": 3, + "orderBy": [ + ["$updatedAt", "desc"] + ] + }); + + // This will use the identity recent index + // { + // "name": "identityRecent", + // "properties": [ + // { + // "$ownerId": "asc" + // }, + // { + // "$updatedAt": "asc" + // }, + // { + // "status": "asc" + // } + // ], + // "unique": false + // }, + + let where_cbor = cbor_serializer::serializable_value_to_cbor(&query_value, None) + .expect("expected to serialize to cbor"); + let domain_document_type = contract + .document_type_for_name("withdrawal") + .expect("contract should have a domain document type"); + let query = DriveDocumentQuery::from_cbor( + where_cbor.as_slice(), + &contract, + domain_document_type, + &drive.config, + ) + .expect("query should be built"); + let (results, _, _) = query + .execute_raw_results_no_proof(&drive, None, Some(&db_transaction), platform_version) + .expect("proof should be executed"); + let names: Vec = results + .iter() + .map(|result| { + let document = + Document::from_bytes(result.as_slice(), domain_document_type, platform_version) + .expect("we should be able to deserialize the document"); + document.id().to_string(Encoding::Base58) + }) + .collect(); + + // We only get back 2 values, even though we put limit 3 because the time with status 0 is an + // empty tree and consumes a limit + let a_names = [ + "DxFzXvkb2mNQHmeVknsv3gWsc6rMtLk9AsS5zMpy6hou".to_string(), + "CCjaU67Pe79Vt51oXvQ5SkyNiypofNX9DS9PYydN9tpD".to_string(), + ]; + + assert_eq!(names, a_names); + + let (proof_root_hash, proof_results, _) = query + .execute_with_proof_only_get_elements(&drive, None, None, platform_version) + .expect("we should be able to a proof"); + assert_eq!(root_hash, proof_root_hash); + assert_eq!(results, proof_results); +} + #[cfg(feature = "server")] #[test] fn test_query_a_b_c_d_e_contract() { diff --git a/packages/rs-drive/tests/supporting_files/contract/withdrawals/withdrawals-contract.json b/packages/rs-drive/tests/supporting_files/contract/withdrawals/withdrawals-contract.json new file mode 100644 index 0000000000..5e12831bef --- /dev/null +++ b/packages/rs-drive/tests/supporting_files/contract/withdrawals/withdrawals-contract.json @@ -0,0 +1,141 @@ +{ + "$format_version": "0", + "id": "A6Z7WkPjzp8Qe77Av5PNxY2E8JFCYpSVdJ8tZE94PErh", + "ownerId": "B1XbULsStFtFhJoc6qmMKx8a3nH4YCsotupSWoBiFaKr", + "version": 1, + "documentSchemas": { + "withdrawal": { + "description": "Withdrawal document to track underlying withdrawal transactions. Withdrawals should be created with IdentityWithdrawalTransition", + "creationRestrictionMode": 2, + "type": "object", + "indices": [ + { + "name": "identityStatus", + "properties": [ + { + "$ownerId": "asc" + }, + { + "status": "asc" + }, + { + "$createdAt": "asc" + } + ], + "unique": false + }, + { + "name": "identityRecent", + "properties": [ + { + "$ownerId": "asc" + }, + { + "$updatedAt": "asc" + }, + { + "status": "asc" + } + ], + "unique": false + }, + { + "name": "pooling", + "properties": [ + { + "status": "asc" + }, + { + "pooling": "asc" + }, + { + "coreFeePerByte": "asc" + }, + { + "$updatedAt": "asc" + } + ], + "unique": false + }, + { + "name": "transaction", + "properties": [ + { + "status": "asc" + }, + { + "transactionIndex": "asc" + } + ], + "unique": false + } + ], + "properties": { + "transactionIndex": { + "type": "integer", + "description": "Sequential index of asset unlock (withdrawal) transaction. Populated when a withdrawal pooled into withdrawal transaction", + "minimum": 1, + "position": 0 + }, + "transactionSignHeight": { + "type": "integer", + "description": "The Core height on which transaction was signed", + "minimum": 1, + "position": 1 + }, + "amount": { + "type": "integer", + "description": "The amount to be withdrawn", + "minimum": 1000, + "position": 2 + }, + "coreFeePerByte": { + "type": "integer", + "description": "This is the fee that you are willing to spend for this transaction in Duffs/Byte", + "minimum": 1, + "maximum": 4294967295, + "position": 3 + }, + "pooling": { + "type": "integer", + "description": "This indicated the level at which Platform should try to pool this transaction", + "enum": [ + 0, + 1, + 2 + ], + "position": 4 + }, + "outputScript": { + "type": "array", + "byteArray": true, + "minItems": 23, + "maxItems": 25, + "position": 5 + }, + "status": { + "type": "integer", + "enum": [ + 0, + 1, + 2, + 3, + 4 + ], + "description": "0 - Pending, 1 - Signed, 2 - Broadcasted, 3 - Complete, 4 - Expired", + "position": 6 + } + }, + "additionalProperties": false, + "required": [ + "$createdAt", + "$updatedAt", + "amount", + "coreFeePerByte", + "pooling", + "outputScript", + "status" + ] + } + } +} \ No newline at end of file diff --git a/packages/rs-json-schema-compatibility-validator/Cargo.toml b/packages/rs-json-schema-compatibility-validator/Cargo.toml index 94bb898e93..aa4763a256 100644 --- a/packages/rs-json-schema-compatibility-validator/Cargo.toml +++ b/packages/rs-json-schema-compatibility-validator/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "json-schema-compatibility-validator" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true authors = ["Ivan Shumkov "] diff --git a/packages/rs-platform-serialization-derive/Cargo.toml b/packages/rs-platform-serialization-derive/Cargo.toml index c672d9c6f9..1017408832 100644 --- a/packages/rs-platform-serialization-derive/Cargo.toml +++ b/packages/rs-platform-serialization-derive/Cargo.toml @@ -2,7 +2,7 @@ name = "platform-serialization-derive" authors = ["Samuel Westrich "] description = "Bincode serialization and deserialization derivations" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/rs-platform-serialization/Cargo.toml b/packages/rs-platform-serialization/Cargo.toml index 6a001eb453..8b58f6b878 100644 --- a/packages/rs-platform-serialization/Cargo.toml +++ b/packages/rs-platform-serialization/Cargo.toml @@ -2,7 +2,7 @@ name = "platform-serialization" authors = ["Samuel Westrich "] description = "Bincode based serialization and deserialization" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/rs-platform-value-convertible/Cargo.toml b/packages/rs-platform-value-convertible/Cargo.toml index b67b0183f2..f518e7b341 100644 --- a/packages/rs-platform-value-convertible/Cargo.toml +++ b/packages/rs-platform-value-convertible/Cargo.toml @@ -2,7 +2,7 @@ name = "platform-value-convertible" authors = ["Samuel Westrich "] description = "Convertion to and from platform values" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/rs-platform-value/Cargo.toml b/packages/rs-platform-value/Cargo.toml index 00c240ce38..bbb11d7b77 100644 --- a/packages/rs-platform-value/Cargo.toml +++ b/packages/rs-platform-value/Cargo.toml @@ -2,7 +2,7 @@ name = "platform-value" authors = ["Samuel Westrich "] description = "A simple value module" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/rs-platform-version/Cargo.toml b/packages/rs-platform-version/Cargo.toml index 818784b2fa..1ad05c6401 100644 --- a/packages/rs-platform-version/Cargo.toml +++ b/packages/rs-platform-version/Cargo.toml @@ -2,7 +2,7 @@ name = "platform-version" authors = ["Samuel Westrich "] description = "Versioning library for Platform" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/mod.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/mod.rs index c2044bb829..4bd5ff6269 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/mod.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/mod.rs @@ -1,12 +1,14 @@ pub mod v1; pub mod v2; pub mod v3; +pub mod v4; use versioned_feature_core::{FeatureVersion, OptionalFeatureVersion}; #[derive(Clone, Debug, Default)] pub struct DriveAbciValidationVersions { pub state_transitions: DriveAbciStateTransitionValidationVersions, + pub has_nonce_validation: FeatureVersion, pub process_state_transition: FeatureVersion, pub state_transition_to_execution_event_for_check_tx: FeatureVersion, pub penalties: PenaltyAmounts, diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v1.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v1.rs index a617c54afb..20820165d1 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v1.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v1.rs @@ -131,6 +131,7 @@ pub const DRIVE_ABCI_VALIDATION_VERSIONS_V1: DriveAbciValidationVersions = document_update_price_transition_state_validation: 0, }, }, + has_nonce_validation: 0, process_state_transition: 0, state_transition_to_execution_event_for_check_tx: 0, penalties: PenaltyAmounts { diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v2.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v2.rs index 577fc0c74b..7cd1b59353 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v2.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v2.rs @@ -131,6 +131,7 @@ pub const DRIVE_ABCI_VALIDATION_VERSIONS_V2: DriveAbciValidationVersions = document_update_price_transition_state_validation: 0, }, }, + has_nonce_validation: 0, process_state_transition: 0, state_transition_to_execution_event_for_check_tx: 0, penalties: PenaltyAmounts { diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v3.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v3.rs index 76197377e6..42f4068cdd 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v3.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v3.rs @@ -131,6 +131,7 @@ pub const DRIVE_ABCI_VALIDATION_VERSIONS_V3: DriveAbciValidationVersions = document_update_price_transition_state_validation: 0, }, }, + has_nonce_validation: 0, process_state_transition: 0, state_transition_to_execution_event_for_check_tx: 0, penalties: PenaltyAmounts { diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v4.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v4.rs new file mode 100644 index 0000000000..ff70d44107 --- /dev/null +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_validation_versions/v4.rs @@ -0,0 +1,147 @@ +use crate::version::drive_abci_versions::drive_abci_validation_versions::{ + DriveAbciAssetLockValidationVersions, DriveAbciDocumentsStateTransitionValidationVersions, + DriveAbciStateTransitionCommonValidationVersions, DriveAbciStateTransitionValidationVersion, + DriveAbciStateTransitionValidationVersions, DriveAbciValidationConstants, + DriveAbciValidationDataTriggerAndBindingVersions, DriveAbciValidationDataTriggerVersions, + DriveAbciValidationVersions, PenaltyAmounts, +}; + +pub const DRIVE_ABCI_VALIDATION_VERSIONS_V4: DriveAbciValidationVersions = + DriveAbciValidationVersions { + state_transitions: DriveAbciStateTransitionValidationVersions { + common_validation_methods: DriveAbciStateTransitionCommonValidationVersions { + asset_locks: DriveAbciAssetLockValidationVersions { + fetch_asset_lock_transaction_output_sync: 0, + verify_asset_lock_is_not_spent_and_has_enough_balance: 0, + }, + validate_identity_public_key_contract_bounds: 0, + validate_identity_public_key_ids_dont_exist_in_state: 0, + validate_identity_public_key_ids_exist_in_state: 0, + validate_state_transition_identity_signed: 0, + validate_unique_identity_public_key_hashes_in_state: 0, + validate_master_key_uniqueness: 0, + validate_simple_pre_check_balance: 0, + }, + max_asset_lock_usage_attempts: 16, + identity_create_state_transition: DriveAbciStateTransitionValidationVersion { + basic_structure: Some(0), + advanced_structure: Some(0), + identity_signatures: Some(0), + advanced_minimum_balance_pre_check: None, + nonce: None, + state: 0, + transform_into_action: 0, + }, + identity_update_state_transition: DriveAbciStateTransitionValidationVersion { + basic_structure: Some(0), + advanced_structure: Some(0), + identity_signatures: Some(0), + advanced_minimum_balance_pre_check: None, + nonce: Some(0), + state: 0, + transform_into_action: 0, + }, + identity_top_up_state_transition: DriveAbciStateTransitionValidationVersion { + basic_structure: Some(0), + advanced_structure: None, + identity_signatures: None, + advanced_minimum_balance_pre_check: None, + nonce: None, + state: 0, + transform_into_action: 0, + }, + identity_credit_withdrawal_state_transition: + DriveAbciStateTransitionValidationVersion { + basic_structure: Some(1), + advanced_structure: None, + identity_signatures: None, + advanced_minimum_balance_pre_check: Some(0), + nonce: Some(0), + state: 0, + transform_into_action: 0, + }, + identity_credit_withdrawal_state_transition_purpose_matches_requirements: 0, + identity_credit_transfer_state_transition: DriveAbciStateTransitionValidationVersion { + basic_structure: Some(0), + advanced_structure: None, + identity_signatures: None, + advanced_minimum_balance_pre_check: Some(0), + nonce: Some(0), + state: 0, + transform_into_action: 0, + }, + masternode_vote_state_transition: DriveAbciStateTransitionValidationVersion { + basic_structure: None, + advanced_structure: Some(0), + identity_signatures: None, + advanced_minimum_balance_pre_check: Some(0), + nonce: Some(0), + state: 0, + transform_into_action: 0, + }, + contract_create_state_transition: DriveAbciStateTransitionValidationVersion { + basic_structure: Some(0), + advanced_structure: None, + identity_signatures: None, + advanced_minimum_balance_pre_check: None, + nonce: Some(0), + state: 0, + transform_into_action: 0, + }, + contract_update_state_transition: DriveAbciStateTransitionValidationVersion { + basic_structure: None, + advanced_structure: None, + identity_signatures: None, + advanced_minimum_balance_pre_check: None, + nonce: Some(0), + state: 0, + transform_into_action: 0, + }, + documents_batch_state_transition: DriveAbciDocumentsStateTransitionValidationVersions { + balance_pre_check: 0, + basic_structure: 0, + advanced_structure: 0, + state: 0, + revision: 0, + transform_into_action: 0, + data_triggers: DriveAbciValidationDataTriggerAndBindingVersions { + bindings: 0, + triggers: DriveAbciValidationDataTriggerVersions { + create_contact_request_data_trigger: 0, + create_domain_data_trigger: 0, + create_identity_data_trigger: 0, + create_feature_flag_data_trigger: 0, + create_masternode_reward_shares_data_trigger: 0, + delete_withdrawal_data_trigger: 0, + reject_data_trigger: 0, + }, + }, + is_allowed: 0, + document_create_transition_structure_validation: 0, + document_delete_transition_structure_validation: 0, + document_replace_transition_structure_validation: 0, + document_transfer_transition_structure_validation: 0, + document_purchase_transition_structure_validation: 0, + document_update_price_transition_structure_validation: 0, + document_create_transition_state_validation: 1, + document_delete_transition_state_validation: 0, + document_replace_transition_state_validation: 0, + document_transfer_transition_state_validation: 0, + document_purchase_transition_state_validation: 0, + document_update_price_transition_state_validation: 0, + }, + }, + has_nonce_validation: 1, + process_state_transition: 0, + state_transition_to_execution_event_for_check_tx: 0, + penalties: PenaltyAmounts { + identity_id_not_correct: 50000000, + unique_key_already_present: 10000000, + validation_of_added_keys_structure_failure: 10000000, + validation_of_added_keys_proof_of_possession_failure: 50000000, + }, + event_constants: DriveAbciValidationConstants { + maximum_vote_polls_to_process: 2, + maximum_contenders_to_consider: 100, + }, + }; diff --git a/packages/rs-platform-version/src/version/mod.rs b/packages/rs-platform-version/src/version/mod.rs index 5c9f2d2942..b143a1daf1 100644 --- a/packages/rs-platform-version/src/version/mod.rs +++ b/packages/rs-platform-version/src/version/mod.rs @@ -1,5 +1,5 @@ mod protocol_version; -use crate::version::v5::PROTOCOL_VERSION_5; +use crate::version::v7::PROTOCOL_VERSION_7; pub use protocol_version::*; mod consensus_versions; @@ -17,8 +17,10 @@ pub mod v2; pub mod v3; pub mod v4; pub mod v5; +pub mod v6; +pub mod v7; pub type ProtocolVersion = u32; -pub const LATEST_VERSION: ProtocolVersion = PROTOCOL_VERSION_5; +pub const LATEST_VERSION: ProtocolVersion = PROTOCOL_VERSION_7; pub const INITIAL_PROTOCOL_VERSION: ProtocolVersion = 1; diff --git a/packages/rs-platform-version/src/version/protocol_version.rs b/packages/rs-platform-version/src/version/protocol_version.rs index 67f1b96754..d793384c72 100644 --- a/packages/rs-platform-version/src/version/protocol_version.rs +++ b/packages/rs-platform-version/src/version/protocol_version.rs @@ -20,6 +20,8 @@ use crate::version::v2::PLATFORM_V2; use crate::version::v3::PLATFORM_V3; use crate::version::v4::PLATFORM_V4; use crate::version::v5::PLATFORM_V5; +use crate::version::v6::PLATFORM_V6; +use crate::version::v7::PLATFORM_V7; use crate::version::ProtocolVersion; pub use versioned_feature_core::*; @@ -41,6 +43,8 @@ pub const PLATFORM_VERSIONS: &[PlatformVersion] = &[ PLATFORM_V3, PLATFORM_V4, PLATFORM_V5, + PLATFORM_V6, + PLATFORM_V7, ]; #[cfg(feature = "mock-versions")] @@ -49,7 +53,7 @@ pub static PLATFORM_TEST_VERSIONS: OnceLock> = OnceLock::ne #[cfg(feature = "mock-versions")] const DEFAULT_PLATFORM_TEST_VERSIONS: &[PlatformVersion] = &[TEST_PLATFORM_V2, TEST_PLATFORM_V3]; -pub const LATEST_PLATFORM_VERSION: &PlatformVersion = &PLATFORM_V5; +pub const LATEST_PLATFORM_VERSION: &PlatformVersion = &PLATFORM_V7; pub const DESIRED_PLATFORM_VERSION: &PlatformVersion = LATEST_PLATFORM_VERSION; diff --git a/packages/rs-platform-version/src/version/system_data_contract_versions/mod.rs b/packages/rs-platform-version/src/version/system_data_contract_versions/mod.rs index 53af3b5ce0..cc444b25e9 100644 --- a/packages/rs-platform-version/src/version/system_data_contract_versions/mod.rs +++ b/packages/rs-platform-version/src/version/system_data_contract_versions/mod.rs @@ -9,4 +9,5 @@ pub struct SystemDataContractVersions { pub dashpay: FeatureVersion, pub masternode_reward_shares: FeatureVersion, pub feature_flags: FeatureVersion, + pub wallet: FeatureVersion, } diff --git a/packages/rs-platform-version/src/version/system_data_contract_versions/v1.rs b/packages/rs-platform-version/src/version/system_data_contract_versions/v1.rs index 205d6fda2a..a55db9a296 100644 --- a/packages/rs-platform-version/src/version/system_data_contract_versions/v1.rs +++ b/packages/rs-platform-version/src/version/system_data_contract_versions/v1.rs @@ -7,4 +7,5 @@ pub const SYSTEM_DATA_CONTRACT_VERSIONS_V1: SystemDataContractVersions = dashpay: 1, masternode_reward_shares: 1, feature_flags: 1, + wallet: 1, }; diff --git a/packages/rs-platform-version/src/version/v6.rs b/packages/rs-platform-version/src/version/v6.rs new file mode 100644 index 0000000000..b282d1b752 --- /dev/null +++ b/packages/rs-platform-version/src/version/v6.rs @@ -0,0 +1,63 @@ +use crate::version::consensus_versions::ConsensusVersions; +use crate::version::dpp_versions::dpp_asset_lock_versions::v1::DPP_ASSET_LOCK_VERSIONS_V1; +use crate::version::dpp_versions::dpp_contract_versions::v1::CONTRACT_VERSIONS_V1; +use crate::version::dpp_versions::dpp_costs_versions::v1::DPP_COSTS_VERSIONS_V1; +use crate::version::dpp_versions::dpp_document_versions::v1::DOCUMENT_VERSIONS_V1; +use crate::version::dpp_versions::dpp_factory_versions::v1::DPP_FACTORY_VERSIONS_V1; +use crate::version::dpp_versions::dpp_identity_versions::v1::IDENTITY_VERSIONS_V1; +use crate::version::dpp_versions::dpp_method_versions::v1::DPP_METHOD_VERSIONS_V1; +use crate::version::dpp_versions::dpp_state_transition_conversion_versions::v2::STATE_TRANSITION_CONVERSION_VERSIONS_V2; +use crate::version::dpp_versions::dpp_state_transition_method_versions::v1::STATE_TRANSITION_METHOD_VERSIONS_V1; +use crate::version::dpp_versions::dpp_state_transition_serialization_versions::v1::STATE_TRANSITION_SERIALIZATION_VERSIONS_V1; +use crate::version::dpp_versions::dpp_state_transition_versions::v2::STATE_TRANSITION_VERSIONS_V2; +use crate::version::dpp_versions::dpp_validation_versions::v2::DPP_VALIDATION_VERSIONS_V2; +use crate::version::dpp_versions::dpp_voting_versions::v2::VOTING_VERSION_V2; +use crate::version::dpp_versions::DPPVersion; +use crate::version::drive_abci_versions::drive_abci_method_versions::v4::DRIVE_ABCI_METHOD_VERSIONS_V4; +use crate::version::drive_abci_versions::drive_abci_query_versions::v1::DRIVE_ABCI_QUERY_VERSIONS_V1; +use crate::version::drive_abci_versions::drive_abci_structure_versions::v1::DRIVE_ABCI_STRUCTURE_VERSIONS_V1; +use crate::version::drive_abci_versions::drive_abci_validation_versions::v4::DRIVE_ABCI_VALIDATION_VERSIONS_V4; +use crate::version::drive_abci_versions::drive_abci_withdrawal_constants::v2::DRIVE_ABCI_WITHDRAWAL_CONSTANTS_V2; +use crate::version::drive_abci_versions::DriveAbciVersion; +use crate::version::drive_versions::v2::DRIVE_VERSION_V2; +use crate::version::fee::v1::FEE_VERSION1; +use crate::version::protocol_version::PlatformVersion; +use crate::version::system_data_contract_versions::v1::SYSTEM_DATA_CONTRACT_VERSIONS_V1; +use crate::version::system_limits::v1::SYSTEM_LIMITS_V1; +use crate::version::ProtocolVersion; + +pub const PROTOCOL_VERSION_6: ProtocolVersion = 6; + +/// This version adds a fix for nonce validation. +pub const PLATFORM_V6: PlatformVersion = PlatformVersion { + protocol_version: PROTOCOL_VERSION_6, + drive: DRIVE_VERSION_V2, + drive_abci: DriveAbciVersion { + structs: DRIVE_ABCI_STRUCTURE_VERSIONS_V1, + methods: DRIVE_ABCI_METHOD_VERSIONS_V4, + validation_and_processing: DRIVE_ABCI_VALIDATION_VERSIONS_V4, // Changed to version 4 + withdrawal_constants: DRIVE_ABCI_WITHDRAWAL_CONSTANTS_V2, + query: DRIVE_ABCI_QUERY_VERSIONS_V1, + }, + dpp: DPPVersion { + costs: DPP_COSTS_VERSIONS_V1, + validation: DPP_VALIDATION_VERSIONS_V2, + state_transition_serialization_versions: STATE_TRANSITION_SERIALIZATION_VERSIONS_V1, + state_transition_conversion_versions: STATE_TRANSITION_CONVERSION_VERSIONS_V2, + state_transition_method_versions: STATE_TRANSITION_METHOD_VERSIONS_V1, + state_transitions: STATE_TRANSITION_VERSIONS_V2, + contract_versions: CONTRACT_VERSIONS_V1, + document_versions: DOCUMENT_VERSIONS_V1, + identity_versions: IDENTITY_VERSIONS_V1, + voting_versions: VOTING_VERSION_V2, + asset_lock_versions: DPP_ASSET_LOCK_VERSIONS_V1, + methods: DPP_METHOD_VERSIONS_V1, + factory_versions: DPP_FACTORY_VERSIONS_V1, + }, + system_data_contracts: SYSTEM_DATA_CONTRACT_VERSIONS_V1, + fee_version: FEE_VERSION1, + system_limits: SYSTEM_LIMITS_V1, + consensus: ConsensusVersions { + tenderdash_consensus_version: 1, + }, +}; diff --git a/packages/rs-platform-version/src/version/v7.rs b/packages/rs-platform-version/src/version/v7.rs new file mode 100644 index 0000000000..ee53268997 --- /dev/null +++ b/packages/rs-platform-version/src/version/v7.rs @@ -0,0 +1,64 @@ +use crate::version::consensus_versions::ConsensusVersions; +use crate::version::dpp_versions::dpp_asset_lock_versions::v1::DPP_ASSET_LOCK_VERSIONS_V1; +use crate::version::dpp_versions::dpp_contract_versions::v1::CONTRACT_VERSIONS_V1; +use crate::version::dpp_versions::dpp_costs_versions::v1::DPP_COSTS_VERSIONS_V1; +use crate::version::dpp_versions::dpp_document_versions::v1::DOCUMENT_VERSIONS_V1; +use crate::version::dpp_versions::dpp_factory_versions::v1::DPP_FACTORY_VERSIONS_V1; +use crate::version::dpp_versions::dpp_identity_versions::v1::IDENTITY_VERSIONS_V1; +use crate::version::dpp_versions::dpp_method_versions::v1::DPP_METHOD_VERSIONS_V1; +use crate::version::dpp_versions::dpp_state_transition_conversion_versions::v2::STATE_TRANSITION_CONVERSION_VERSIONS_V2; +use crate::version::dpp_versions::dpp_state_transition_method_versions::v1::STATE_TRANSITION_METHOD_VERSIONS_V1; +use crate::version::dpp_versions::dpp_state_transition_serialization_versions::v1::STATE_TRANSITION_SERIALIZATION_VERSIONS_V1; +use crate::version::dpp_versions::dpp_state_transition_versions::v2::STATE_TRANSITION_VERSIONS_V2; +use crate::version::dpp_versions::dpp_validation_versions::v2::DPP_VALIDATION_VERSIONS_V2; +use crate::version::dpp_versions::dpp_voting_versions::v2::VOTING_VERSION_V2; +use crate::version::dpp_versions::DPPVersion; +use crate::version::drive_abci_versions::drive_abci_method_versions::v4::DRIVE_ABCI_METHOD_VERSIONS_V4; +use crate::version::drive_abci_versions::drive_abci_query_versions::v1::DRIVE_ABCI_QUERY_VERSIONS_V1; +use crate::version::drive_abci_versions::drive_abci_structure_versions::v1::DRIVE_ABCI_STRUCTURE_VERSIONS_V1; +use crate::version::drive_abci_versions::drive_abci_validation_versions::v4::DRIVE_ABCI_VALIDATION_VERSIONS_V4; +use crate::version::drive_abci_versions::drive_abci_withdrawal_constants::v2::DRIVE_ABCI_WITHDRAWAL_CONSTANTS_V2; +use crate::version::drive_abci_versions::DriveAbciVersion; +use crate::version::drive_versions::v2::DRIVE_VERSION_V2; +use crate::version::fee::v1::FEE_VERSION1; +use crate::version::protocol_version::PlatformVersion; +use crate::version::system_data_contract_versions::v1::SYSTEM_DATA_CONTRACT_VERSIONS_V1; +use crate::version::system_limits::v1::SYSTEM_LIMITS_V1; +use crate::version::ProtocolVersion; + +pub const PROTOCOL_VERSION_7: ProtocolVersion = 7; + +/// This version adds token support. +//todo: make changes +pub const PLATFORM_V7: PlatformVersion = PlatformVersion { + protocol_version: PROTOCOL_VERSION_7, + drive: DRIVE_VERSION_V2, + drive_abci: DriveAbciVersion { + structs: DRIVE_ABCI_STRUCTURE_VERSIONS_V1, + methods: DRIVE_ABCI_METHOD_VERSIONS_V4, + validation_and_processing: DRIVE_ABCI_VALIDATION_VERSIONS_V4, + withdrawal_constants: DRIVE_ABCI_WITHDRAWAL_CONSTANTS_V2, + query: DRIVE_ABCI_QUERY_VERSIONS_V1, + }, + dpp: DPPVersion { + costs: DPP_COSTS_VERSIONS_V1, + validation: DPP_VALIDATION_VERSIONS_V2, + state_transition_serialization_versions: STATE_TRANSITION_SERIALIZATION_VERSIONS_V1, + state_transition_conversion_versions: STATE_TRANSITION_CONVERSION_VERSIONS_V2, + state_transition_method_versions: STATE_TRANSITION_METHOD_VERSIONS_V1, + state_transitions: STATE_TRANSITION_VERSIONS_V2, + contract_versions: CONTRACT_VERSIONS_V1, + document_versions: DOCUMENT_VERSIONS_V1, + identity_versions: IDENTITY_VERSIONS_V1, + voting_versions: VOTING_VERSION_V2, + asset_lock_versions: DPP_ASSET_LOCK_VERSIONS_V1, + methods: DPP_METHOD_VERSIONS_V1, + factory_versions: DPP_FACTORY_VERSIONS_V1, + }, + system_data_contracts: SYSTEM_DATA_CONTRACT_VERSIONS_V1, + fee_version: FEE_VERSION1, + system_limits: SYSTEM_LIMITS_V1, + consensus: ConsensusVersions { + tenderdash_consensus_version: 1, + }, +}; diff --git a/packages/rs-platform-versioning/Cargo.toml b/packages/rs-platform-versioning/Cargo.toml index 2fd6d2ffa3..3aa0fb8785 100644 --- a/packages/rs-platform-versioning/Cargo.toml +++ b/packages/rs-platform-versioning/Cargo.toml @@ -2,7 +2,7 @@ name = "platform-versioning" authors = ["Samuel Westrich "] description = "Version derivation" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/rs-sdk/Cargo.toml b/packages/rs-sdk/Cargo.toml index b9b943726c..cde3ebec10 100644 --- a/packages/rs-sdk/Cargo.toml +++ b/packages/rs-sdk/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dash-sdk" -version = "1.5.1" +version = "1.7.0" edition = "2021" [dependencies] diff --git a/packages/rs-sdk/examples/read_contract.rs b/packages/rs-sdk/examples/read_contract.rs index 7ac2cc333d..75e1e1214d 100644 --- a/packages/rs-sdk/examples/read_contract.rs +++ b/packages/rs-sdk/examples/read_contract.rs @@ -3,7 +3,7 @@ use std::{num::NonZeroUsize, str::FromStr}; use clap::Parser; use dash_sdk::{mock::provider::GrpcContextProvider, platform::Fetch, Sdk, SdkBuilder}; use dpp::prelude::{DataContract, Identifier}; -use rs_dapi_client::AddressList; +use rs_dapi_client::{Address, AddressList}; use zeroize::Zeroizing; #[derive(clap::Parser, Debug)] @@ -80,14 +80,14 @@ fn setup_sdk(config: &Config) -> Sdk { // Let's build the Sdk. // First, we need an URI of some Dash Platform DAPI host to connect to and use as seed. - let uri = http::Uri::from_str(&format!( - "http://{}:{}", + let address = Address::from_str(&format!( + "https://{}:{}", config.server_address, config.platform_port )) .expect("parse uri"); // Now, we create the Sdk with the wallet and context provider. - let sdk = SdkBuilder::new(AddressList::from_iter([uri])) + let sdk = SdkBuilder::new(AddressList::from_iter([address])) .build() .expect("cannot build sdk"); diff --git a/packages/rs-sdk/src/core/transaction.rs b/packages/rs-sdk/src/core/transaction.rs index a71a6f664c..39d196b57a 100644 --- a/packages/rs-sdk/src/core/transaction.rs +++ b/packages/rs-sdk/src/core/transaction.rs @@ -57,7 +57,7 @@ impl Sdk { self.execute(core_transactions_stream, RequestSettings::default()) .await .into_inner() - .map_err(|e| Error::DapiClientError(e.to_string())) + .map_err(|e| e.into()) } /// Waits for a response for the asset lock proof diff --git a/packages/rs-sdk/src/error.rs b/packages/rs-sdk/src/error.rs index 2d0ba29a2b..23def69d1a 100644 --- a/packages/rs-sdk/src/error.rs +++ b/packages/rs-sdk/src/error.rs @@ -1,4 +1,5 @@ //! Definitions of errors +use dapi_grpc::tonic::Code; use dpp::consensus::ConsensusError; use dpp::serialization::PlatformDeserializable; use dpp::version::PlatformVersionError; @@ -56,6 +57,10 @@ pub enum Error { /// SDK operation timeout reached error #[error("SDK operation timeout {} secs reached: {1}", .0.as_secs())] TimeoutReached(Duration, String), + + /// Returned when an attempt is made to create an object that already exists in the system + #[error("Object already exists: {0}")] + AlreadyExists(String), /// Generic error // TODO: Use domain specific errors instead of generic ones #[error("SDK error: {0}")] @@ -78,6 +83,7 @@ pub enum Error { impl From for Error { fn from(value: DapiClientError) -> Self { if let DapiClientError::Transport(TransportError::Grpc(status)) = &value { + // If we have some consensus error metadata, we deserialize it and return as ConsensusError if let Some(consensus_error_value) = status .metadata() .get_bin("dash-serialized-consensus-error-bin") @@ -88,11 +94,18 @@ impl From for Error { .map(|consensus_error| { Self::Protocol(ProtocolError::ConsensusError(Box::new(consensus_error))) }) - .unwrap_or_else(Self::Protocol); + .unwrap_or_else(|e| { + tracing::debug!("Failed to deserialize consensus error: {}", e); + Self::Protocol(e) + }); + } + // Otherwise we parse the error code and act accordingly + if status.code() == Code::AlreadyExists { + return Self::AlreadyExists(status.message().to_string()); } } - Self::DapiClientError(format!("{:?}", value)) + Self::DapiClientError(value.to_string()) } } diff --git a/packages/rs-sdk/src/lib.rs b/packages/rs-sdk/src/lib.rs index 1f928ab6db..f965730c83 100644 --- a/packages/rs-sdk/src/lib.rs +++ b/packages/rs-sdk/src/lib.rs @@ -75,6 +75,7 @@ pub use dashcore_rpc; pub use dpp; pub use drive; pub use drive_proof_verifier::types as query_types; +pub use drive_proof_verifier::Error as ProofVerifierError; pub use rs_dapi_client as dapi_client; pub mod sync; diff --git a/packages/rs-sdk/src/platform/fetch.rs b/packages/rs-sdk/src/platform/fetch.rs index 80564fbdf2..7fdf5e1974 100644 --- a/packages/rs-sdk/src/platform/fetch.rs +++ b/packages/rs-sdk/src/platform/fetch.rs @@ -195,7 +195,7 @@ where .dapi_client_settings .override_by(settings.unwrap_or_default()); - retry(settings, fut).await.into_inner() + retry(sdk.address_list(), settings, fut).await.into_inner() } /// Fetch single object from Platform. diff --git a/packages/rs-sdk/src/platform/fetch_many.rs b/packages/rs-sdk/src/platform/fetch_many.rs index 4653835557..1fcdb1043a 100644 --- a/packages/rs-sdk/src/platform/fetch_many.rs +++ b/packages/rs-sdk/src/platform/fetch_many.rs @@ -19,7 +19,8 @@ use dapi_grpc::platform::v0::{ GetDataContractsRequest, GetEpochsInfoRequest, GetEvonodesProposedEpochBlocksByIdsRequest, GetEvonodesProposedEpochBlocksByRangeRequest, GetIdentitiesBalancesRequest, GetIdentityKeysRequest, GetPathElementsRequest, GetProtocolVersionUpgradeStateRequest, - GetProtocolVersionUpgradeVoteStatusRequest, GetVotePollsByEndDateRequest, + GetProtocolVersionUpgradeVoteStatusRequest, GetVotePollsByEndDateRequest, Proof, + ResponseMetadata, }; use dashcore_rpc::dashcore::ProTxHash; use dpp::data_contract::DataContract; @@ -145,8 +146,71 @@ where sdk: &Sdk, query: Q, ) -> Result { + Self::fetch_many_with_metadata_and_proof(sdk, query, None) + .await + .map(|(objects, _, _)| objects) + } + + /// Fetch multiple objects from Platform with metadata. + /// + /// Fetch objects from Platform that satisfy the provided [Query]. + /// This method allows you to retrieve the metadata associated with the response. + /// + /// ## Parameters + /// + /// - `sdk`: An instance of [Sdk]. + /// - `query`: A query parameter implementing [`crate::platform::query::Query`] to specify the data to be fetched. + /// - `settings`: An optional `RequestSettings` to give greater flexibility on the request. + /// + /// ## Returns + /// + /// Returns a `Result` containing either: + /// + /// * A tuple `(O, ResponseMetadata)` where `O` is the collection of fetched objects, and `ResponseMetadata` contains metadata about the response. + /// * [`Error`](crate::error::Error) when an error occurs. + /// + /// ## Error Handling + /// + /// Any errors encountered during the execution are returned as [Error] instances. + async fn fetch_many_with_metadata>::Request>>( + sdk: &Sdk, + query: Q, + settings: Option, + ) -> Result<(O, ResponseMetadata), Error> { + Self::fetch_many_with_metadata_and_proof(sdk, query, settings) + .await + .map(|(objects, metadata, _)| (objects, metadata)) + } + + /// Fetch multiple objects from Platform with metadata and underlying proof. + /// + /// Fetch objects from Platform that satisfy the provided [Query]. + /// This method allows you to retrieve the metadata and the underlying proof associated with the response. + /// + /// ## Parameters + /// + /// - `sdk`: An instance of [Sdk]. + /// - `query`: A query parameter implementing [`crate::platform::query::Query`] to specify the data to be fetched. + /// - `settings`: An optional `RequestSettings` to give greater flexibility on the request. + /// + /// ## Returns + /// + /// Returns a `Result` containing either: + /// + /// * A tuple `(O, ResponseMetadata, Proof)` where `O` is the collection of fetched objects, `ResponseMetadata` contains metadata about the response, and `Proof` is the underlying proof. + /// * [`Error`](crate::error::Error) when an error occurs. + /// + /// ## Error Handling + /// + /// Any errors encountered during the execution are returned as [Error] instances. + async fn fetch_many_with_metadata_and_proof>::Request>>( + sdk: &Sdk, + query: Q, + settings: Option, + ) -> Result<(O, ResponseMetadata, Proof), Error> { let request = &query.query(sdk.prove())?; - let closure = |settings: RequestSettings| async move { + + let fut = |settings: RequestSettings| async move { let ExecutionResponse { address, retries, @@ -158,28 +222,37 @@ where .map_err(|e| e.inner_into())?; let object_type = std::any::type_name::().to_string(); - tracing::trace!(request = ?request, response = ?response, ?address, retries, object_type, "fetched object from platform"); + tracing::trace!( + request = ?request, + response = ?response, + ?address, + retries, + object_type, + "fetched objects from platform" + ); - sdk.parse_proof::<>::Request, O>(request.clone(), response) - .await - .map(|o| ExecutionResponse { - inner: o, - retries, - address: address.clone(), - }) - .map_err(|e| ExecutionError { - inner: e, - retries, - address: Some(address), - }) + sdk.parse_proof_with_metadata_and_proof::<>::Request, O>( + request.clone(), + response, + ) + .await + .map_err(|e| ExecutionError { + inner: e, + address: Some(address.clone()), + retries, + }) + .map(|(o, metadata, proof)| ExecutionResponse { + inner: (o.unwrap_or_default(), metadata, proof), + retries, + address: address.clone(), + }) }; - let settings = sdk.dapi_client_settings; + let settings = sdk + .dapi_client_settings + .override_by(settings.unwrap_or_default()); - retry(settings, closure) - .await - .into_inner() - .map(|o| o.unwrap_or_default()) + retry(sdk.address_list(), settings, fut).await.into_inner() } /// Fetch multiple objects from Platform by their identifiers. @@ -254,13 +327,13 @@ impl FetchMany for Document { ) -> Result { let document_query: &DocumentQuery = &query.query(sdk.prove())?; - retry(sdk.dapi_client_settings, |settings| async move { + retry(sdk.address_list(), sdk.dapi_client_settings, |settings| async move { let request = document_query.clone(); let ExecutionResponse { address, retries, - inner: response, + inner: response } = request.execute(sdk, settings).await.map_err(|e| e.inner_into())?; tracing::trace!(request=?document_query, response=?response, ?address, retries, "fetch multiple documents"); diff --git a/packages/rs-sdk/src/platform/fetch_unproved.rs b/packages/rs-sdk/src/platform/fetch_unproved.rs index ac3a682f81..d98d598844 100644 --- a/packages/rs-sdk/src/platform/fetch_unproved.rs +++ b/packages/rs-sdk/src/platform/fetch_unproved.rs @@ -55,7 +55,6 @@ where /// - `settings`: Request settings for the connection to Platform. /// /// ## Returns - /// Returns: /// * `Ok(Some(Self))` when object is found. /// * `Ok(None)` when object is not found. /// * [`Err(Error)`](Error) when an error occurs. @@ -107,7 +106,9 @@ where }; let settings = sdk.dapi_client_settings.override_by(settings); - retry(settings, closure).await.into_inner() + retry(sdk.address_list(), settings, closure) + .await + .into_inner() } } diff --git a/packages/rs-sdk/src/platform/transition.rs b/packages/rs-sdk/src/platform/transition.rs index 6bd51a3b2e..c82a494d2d 100644 --- a/packages/rs-sdk/src/platform/transition.rs +++ b/packages/rs-sdk/src/platform/transition.rs @@ -2,7 +2,6 @@ pub mod broadcast; pub(crate) mod broadcast_identity; pub mod broadcast_request; -pub(crate) mod context; pub mod purchase_document; pub mod put_contract; pub mod put_document; @@ -14,8 +13,7 @@ pub mod transfer_document; mod txid; pub mod update_price_of_document; pub mod vote; +pub mod waitable; pub mod withdraw_from_identity; -pub use context::*; - pub use txid::TxId; diff --git a/packages/rs-sdk/src/platform/transition/broadcast.rs b/packages/rs-sdk/src/platform/transition/broadcast.rs index 7e4c6488c1..f7c3f75d32 100644 --- a/packages/rs-sdk/src/platform/transition/broadcast.rs +++ b/packages/rs-sdk/src/platform/transition/broadcast.rs @@ -1,75 +1,154 @@ use super::broadcast_request::BroadcastRequestForStateTransition; +use super::put_settings::PutSettings; use crate::platform::block_info_from_metadata::block_info_from_metadata; +use crate::sync::retry; use crate::{Error, Sdk}; +use dapi_grpc::platform::v0::{Proof, WaitForStateTransitionResultResponse}; use dapi_grpc::platform::VersionedGrpcResponse; use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::state_transition::StateTransition; use drive::drive::Drive; use drive_proof_verifier::error::ContextProviderError; use drive_proof_verifier::DataContractProvider; -use rs_dapi_client::{DapiRequest, IntoInner, RequestSettings}; +use rs_dapi_client::WrapToExecutionResult; +use rs_dapi_client::{DapiRequest, ExecutionError, InnerInto, IntoInner, RequestSettings}; #[async_trait::async_trait] pub trait BroadcastStateTransition { - async fn broadcast(&self, sdk: &Sdk) -> Result<(), Error>; - async fn broadcast_and_wait( + async fn broadcast(&self, sdk: &Sdk, settings: Option) -> Result<(), Error>; + async fn wait_for_response>( &self, sdk: &Sdk, - time_out_ms: Option, - ) -> Result; + settings: Option, + ) -> Result; + async fn broadcast_and_wait>( + &self, + sdk: &Sdk, + settings: Option, + ) -> Result; } #[async_trait::async_trait] impl BroadcastStateTransition for StateTransition { - async fn broadcast(&self, sdk: &Sdk) -> Result<(), Error> { - let request = self.broadcast_request_for_state_transition()?; + async fn broadcast(&self, sdk: &Sdk, settings: Option) -> Result<(), Error> { + let retry_settings = match settings { + Some(s) => sdk.dapi_client_settings.override_by(s.request_settings), + None => sdk.dapi_client_settings, + }; - request - .execute(sdk, RequestSettings::default()) - .await // TODO: We need better way to handle execution errors - .into_inner()?; + // async fn retry_test_function(settings: RequestSettings) -> ExecutionResult<(), dash_sdk::Error> + let factory = |request_settings: RequestSettings| async move { + let request = + self.broadcast_request_for_state_transition() + .map_err(|e| ExecutionError { + inner: e, + address: None, + retries: 0, + })?; + request + .execute(sdk, request_settings) + .await + .map_err(|e| e.inner_into()) + }; // response is empty for a broadcast, result comes from the stream wait for state transition result - - Ok(()) + retry(sdk.address_list(), retry_settings, factory) + .await + .into_inner() + .map(|_| ()) } - - async fn broadcast_and_wait( + async fn wait_for_response>( &self, sdk: &Sdk, - _time_out_ms: Option, - ) -> Result { - let request = self.broadcast_request_for_state_transition()?; - // TODO: Implement retry logic - request - .clone() - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; + settings: Option, + ) -> Result { + let retry_settings = match settings { + Some(s) => sdk.dapi_client_settings.override_by(s.request_settings), + None => sdk.dapi_client_settings, + }; - let request = self.wait_for_state_transition_result_request()?; + // prepare a factory that will generate closure which executes actual code + let factory = |request_settings: RequestSettings| async move { + let request = self + .wait_for_state_transition_result_request() + .map_err(|e| ExecutionError { + inner: e, + address: None, + retries: 0, + })?; - let response = request - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; + let response = request.execute(sdk, request_settings).await.inner_into()?; - let block_info = block_info_from_metadata(response.metadata()?)?; - let proof = response.proof_owned()?; - let context_provider = - sdk.context_provider() - .ok_or(Error::from(ContextProviderError::Config( + let grpc_response: &WaitForStateTransitionResultResponse = &response.inner; + let metadata = grpc_response + .metadata() + .wrap_to_execution_result(&response)? + .inner; + let block_info = block_info_from_metadata(metadata) + .wrap_to_execution_result(&response)? + .inner; + let proof: &Proof = (*grpc_response) + .proof() + .wrap_to_execution_result(&response)? + .inner; + + let context_provider = sdk.context_provider().ok_or(ExecutionError { + inner: Error::from(ContextProviderError::Config( "Context provider not initialized".to_string(), - )))?; + )), + address: Some(response.address.clone()), + retries: response.retries, + })?; + + let (_, result) = Drive::verify_state_transition_was_executed_with_proof( + self, + &block_info, + proof.grovedb_proof.as_slice(), + &context_provider.as_contract_lookup_fn(), + sdk.version(), + ) + .wrap_to_execution_result(&response)? + .inner; + + let variant_name = result.to_string(); + T::try_from(result) + .map_err(|_| { + Error::InvalidProvedResponse(format!( + "invalid proved response: cannot convert from {} to {}", + variant_name, + std::any::type_name::(), + )) + }) + .wrap_to_execution_result(&response) + }; - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - self, - &block_info, - proof.grovedb_proof.as_slice(), - &context_provider.as_contract_lookup_fn(), - sdk.version(), - )?; + let future = retry(sdk.address_list(), retry_settings, factory); + // run the future with or without timeout, depending on the settings + let wait_timeout = settings.and_then(|s| s.wait_timeout); + match wait_timeout { + Some(timeout) => tokio::time::timeout(timeout, future) + .await + .map_err(|e| { + Error::TimeoutReached( + timeout, + format!("Timeout waiting for result of {} (tx id: {}) affecting object {}: {:?}", + self.name(), + self.transaction_id().map(hex::encode).unwrap_or("UNKNOWN".to_string()), + self.unique_identifiers().join(","), + e), + ) + })? + .into_inner(), + None => future.await.into_inner(), + } + } - Ok(result) + async fn broadcast_and_wait>( + &self, + sdk: &Sdk, + settings: Option, + ) -> Result { + self.broadcast(sdk, settings).await?; + self.wait_for_response::(sdk, settings).await } } diff --git a/packages/rs-sdk/src/platform/transition/context.rs b/packages/rs-sdk/src/platform/transition/context.rs deleted file mode 100644 index c2d3f27e82..0000000000 --- a/packages/rs-sdk/src/platform/transition/context.rs +++ /dev/null @@ -1,5 +0,0 @@ -//! Not sure if we need it at all - -pub enum TransitionContext { - Todo, -} diff --git a/packages/rs-sdk/src/platform/transition/purchase_document.rs b/packages/rs-sdk/src/platform/transition/purchase_document.rs index 1ede5c247e..530c1c6b83 100644 --- a/packages/rs-sdk/src/platform/transition/purchase_document.rs +++ b/packages/rs-sdk/src/platform/transition/purchase_document.rs @@ -1,29 +1,21 @@ -use crate::platform::transition::broadcast_request::BroadcastRequestForStateTransition; -use std::sync::Arc; - -use crate::{Error, Sdk}; - -use crate::platform::block_info_from_metadata::block_info_from_metadata; +use super::broadcast::BroadcastStateTransition; +use super::waitable::Waitable; use crate::platform::transition::put_settings::PutSettings; -use dapi_grpc::platform::VersionedGrpcResponse; +use crate::{Error, Sdk}; use dpp::data_contract::document_type::accessors::DocumentTypeV0Getters; use dpp::data_contract::document_type::DocumentType; -use dpp::data_contract::DataContract; -use dpp::document::{Document, DocumentV0Getters}; +use dpp::document::Document; use dpp::fee::Credits; use dpp::identity::signer::Signer; use dpp::identity::IdentityPublicKey; use dpp::prelude::Identifier; use dpp::state_transition::documents_batch_transition::methods::v0::DocumentsBatchTransitionMethodsV0; use dpp::state_transition::documents_batch_transition::DocumentsBatchTransition; -use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::state_transition::StateTransition; -use drive::drive::Drive; -use rs_dapi_client::{DapiRequest, IntoInner, RequestSettings}; #[async_trait::async_trait] /// A trait for purchasing a document on Platform -pub trait PurchaseDocument { +pub trait PurchaseDocument: Waitable { /// Tries to purchase a document on platform /// Setting settings to `None` sets default connection behavior async fn purchase_document( @@ -37,14 +29,6 @@ pub trait PurchaseDocument { settings: Option, ) -> Result; - /// Waits for the response of a state transition after it has been broadcast - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - data_contract: Arc, - ) -> Result; - /// Tries to purchase a document on platform and waits for the response async fn purchase_document_and_wait_for_response( &self, @@ -53,8 +37,8 @@ pub trait PurchaseDocument { document_type: DocumentType, purchaser_id: Identifier, identity_public_key: IdentityPublicKey, - data_contract: Arc, signer: &S, + settings: Option, ) -> Result; } @@ -96,60 +80,11 @@ impl PurchaseDocument for Document { None, )?; - let request = transition.broadcast_request_for_state_transition()?; - - request - .clone() - .execute(sdk, settings.request_settings) - .await // TODO: We need better way to handle execution errors - .into_inner()?; - + transition.broadcast(sdk, Some(settings)).await?; // response is empty for a broadcast, result comes from the stream wait for state transition result - Ok(transition) } - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - data_contract: Arc, - ) -> Result { - let request = state_transition.wait_for_state_transition_result_request()?; - // TODO: Implement retry logic - let response = request - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; - - let block_info = block_info_from_metadata(response.metadata()?)?; - - let proof = response.proof_owned()?; - - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - &state_transition, - &block_info, - proof.grovedb_proof.as_slice(), - &|_| Ok(Some(data_contract.clone())), - sdk.version(), - )?; - - match result { - StateTransitionProofResult::VerifiedDocuments(mut documents) => { - let document = documents - .remove(self.id_ref()) - .ok_or(Error::InvalidProvedResponse( - "did not prove the sent document".to_string(), - ))? - .ok_or(Error::InvalidProvedResponse( - "expected there to actually be a document".to_string(), - ))?; - Ok(document) - } - _ => Err(Error::DapiClientError("proved a non document".to_string())), - } - } - async fn purchase_document_and_wait_for_response( &self, price: Credits, @@ -157,8 +92,8 @@ impl PurchaseDocument for Document { document_type: DocumentType, purchaser_id: Identifier, identity_public_key: IdentityPublicKey, - data_contract: Arc, signer: &S, + settings: Option, ) -> Result { let state_transition = self .purchase_document( @@ -168,18 +103,10 @@ impl PurchaseDocument for Document { purchaser_id, identity_public_key, signer, - None, + settings, ) .await?; - let document = >::wait_for_response( - self, - sdk, - state_transition, - data_contract, - ) - .await?; - - Ok(document) + Self::wait_for_response(sdk, state_transition, settings).await } } diff --git a/packages/rs-sdk/src/platform/transition/put_contract.rs b/packages/rs-sdk/src/platform/transition/put_contract.rs index a8f07b0b31..9e206f9dd2 100644 --- a/packages/rs-sdk/src/platform/transition/put_contract.rs +++ b/packages/rs-sdk/src/platform/transition/put_contract.rs @@ -1,11 +1,8 @@ -use crate::platform::transition::broadcast_request::BroadcastRequestForStateTransition; use std::collections::BTreeMap; use crate::{Error, Sdk}; -use crate::platform::block_info_from_metadata::block_info_from_metadata; use crate::platform::transition::put_settings::PutSettings; -use dapi_grpc::platform::VersionedGrpcResponse; use dpp::data_contract::accessors::v0::DataContractV0Getters; use dpp::data_contract::DataContract; use dpp::identity::identity_public_key::accessors::v0::IdentityPublicKeyGettersV0; @@ -13,16 +10,14 @@ use dpp::identity::signer::Signer; use dpp::identity::{IdentityPublicKey, PartialIdentity}; use dpp::state_transition::data_contract_create_transition::methods::DataContractCreateTransitionMethodsV0; use dpp::state_transition::data_contract_create_transition::DataContractCreateTransition; -use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::state_transition::StateTransition; -use drive::drive::Drive; -use drive_proof_verifier::error::ContextProviderError; -use drive_proof_verifier::DataContractProvider; -use rs_dapi_client::{DapiRequest, IntoInner, RequestSettings}; + +use super::broadcast::BroadcastStateTransition; +use super::waitable::Waitable; #[async_trait::async_trait] /// A trait for putting a contract to platform -pub trait PutContract { +pub trait PutContract: Waitable { /// Puts a document on platform /// setting settings to `None` sets default connection behavior async fn put_to_platform( @@ -33,19 +28,13 @@ pub trait PutContract { settings: Option, ) -> Result; - /// Waits for the response of a state transition after it has been broadcast - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - ) -> Result; - /// Puts a contract on platform and waits for the confirmation proof async fn put_to_platform_and_wait_for_response( &self, sdk: &Sdk, identity_public_key: IdentityPublicKey, signer: &S, + settings: Option, ) -> Result; } @@ -81,69 +70,23 @@ impl PutContract for DataContract { None, )?; - let request = transition.broadcast_request_for_state_transition()?; - - request - .clone() - .execute(sdk, settings.unwrap_or_default().request_settings) - .await // TODO: We need better way to handle execution errors - .into_inner()?; - + transition.broadcast(sdk, settings).await?; // response is empty for a broadcast, result comes from the stream wait for state transition result Ok(transition) } - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - ) -> Result { - let request = state_transition.wait_for_state_transition_result_request()?; - - let response = request - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; - - let block_info = block_info_from_metadata(response.metadata()?)?; - - let proof = response.proof_owned()?; - let context_provider = - sdk.context_provider() - .ok_or(Error::from(ContextProviderError::Config( - "Context provider not initialized".to_string(), - )))?; - - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - &state_transition, - &block_info, - proof.grovedb_proof.as_slice(), - &context_provider.as_contract_lookup_fn(), - sdk.version(), - )?; - - //todo verify - - match result { - StateTransitionProofResult::VerifiedDataContract(data_contract) => Ok(data_contract), - _ => Err(Error::DapiClientError("proved a non document".to_string())), - } - } - async fn put_to_platform_and_wait_for_response( &self, sdk: &Sdk, identity_public_key: IdentityPublicKey, signer: &S, + settings: Option, ) -> Result { let state_transition = self - .put_to_platform(sdk, identity_public_key, signer, None) + .put_to_platform(sdk, identity_public_key, signer, settings) .await?; - let data_contract = - >::wait_for_response(self, sdk, state_transition).await?; - - Ok(data_contract) + Self::wait_for_response(sdk, state_transition, settings).await } } diff --git a/packages/rs-sdk/src/platform/transition/put_document.rs b/packages/rs-sdk/src/platform/transition/put_document.rs index 806e640d93..3ef5c5c864 100644 --- a/packages/rs-sdk/src/platform/transition/put_document.rs +++ b/packages/rs-sdk/src/platform/transition/put_document.rs @@ -1,27 +1,19 @@ -use crate::platform::transition::broadcast_request::BroadcastRequestForStateTransition; -use std::sync::Arc; - -use crate::{Error, Sdk}; - -use crate::platform::block_info_from_metadata::block_info_from_metadata; +use super::broadcast::BroadcastStateTransition; +use super::waitable::Waitable; use crate::platform::transition::put_settings::PutSettings; -use dapi_grpc::platform::VersionedGrpcResponse; +use crate::{Error, Sdk}; use dpp::data_contract::document_type::accessors::DocumentTypeV0Getters; use dpp::data_contract::document_type::DocumentType; -use dpp::data_contract::DataContract; use dpp::document::{Document, DocumentV0Getters}; use dpp::identity::signer::Signer; use dpp::identity::IdentityPublicKey; use dpp::state_transition::documents_batch_transition::methods::v0::DocumentsBatchTransitionMethodsV0; use dpp::state_transition::documents_batch_transition::DocumentsBatchTransition; -use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::state_transition::StateTransition; -use drive::drive::Drive; -use rs_dapi_client::{DapiRequest, IntoInner, RequestSettings}; #[async_trait::async_trait] /// A trait for putting a document to platform -pub trait PutDocument { +pub trait PutDocument: Waitable { /// Puts a document on platform /// setting settings to `None` sets default connection behavior async fn put_to_platform( @@ -34,14 +26,6 @@ pub trait PutDocument { settings: Option, ) -> Result; - /// Waits for the response of a state transition after it has been broadcast - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - data_contract: Arc, - ) -> Result; - /// Puts an identity on platform and waits for the confirmation proof async fn put_to_platform_and_wait_for_response( &self, @@ -49,8 +33,8 @@ pub trait PutDocument { document_type: DocumentType, document_state_transition_entropy: [u8; 32], identity_public_key: IdentityPublicKey, - data_contract: Arc, signer: &S, + settings: Option, ) -> Result; } @@ -90,70 +74,19 @@ impl PutDocument for Document { None, )?; - let request = transition.broadcast_request_for_state_transition()?; - - request - .clone() - .execute(sdk, settings.request_settings) - .await // TODO: We need better way to handle execution errors - .into_inner()?; - // response is empty for a broadcast, result comes from the stream wait for state transition result - + transition.broadcast(sdk, Some(settings)).await?; Ok(transition) } - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - data_contract: Arc, - ) -> Result { - let request = state_transition.wait_for_state_transition_result_request()?; - // TODO: Implement retry logic - let response = request - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; - - let block_info = block_info_from_metadata(response.metadata()?)?; - - let proof = response.proof_owned()?; - - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - &state_transition, - &block_info, - proof.grovedb_proof.as_slice(), - &|_| Ok(Some(data_contract.clone())), - sdk.version(), - )?; - - //todo verify - - match result { - StateTransitionProofResult::VerifiedDocuments(mut documents) => { - let document = documents - .remove(self.id_ref()) - .ok_or(Error::InvalidProvedResponse( - "did not prove the sent document".to_string(), - ))? - .ok_or(Error::InvalidProvedResponse( - "expected there to actually be a document".to_string(), - ))?; - Ok(document) - } - _ => Err(Error::DapiClientError("proved a non document".to_string())), - } - } - async fn put_to_platform_and_wait_for_response( &self, sdk: &Sdk, document_type: DocumentType, document_state_transition_entropy: [u8; 32], identity_public_key: IdentityPublicKey, - data_contract: Arc, signer: &S, + settings: Option, ) -> Result { let state_transition = self .put_to_platform( @@ -162,15 +95,10 @@ impl PutDocument for Document { document_state_transition_entropy, identity_public_key, signer, - None, + settings, ) .await?; - // TODO: Why do we need full type annotation? - let document = - >::wait_for_response(self, sdk, state_transition, data_contract) - .await?; - - Ok(document) + Self::wait_for_response(sdk, state_transition, settings).await } } diff --git a/packages/rs-sdk/src/platform/transition/put_identity.rs b/packages/rs-sdk/src/platform/transition/put_identity.rs index 30276a06a0..ce79b52d81 100644 --- a/packages/rs-sdk/src/platform/transition/put_identity.rs +++ b/packages/rs-sdk/src/platform/transition/put_identity.rs @@ -1,43 +1,39 @@ use crate::platform::transition::broadcast_identity::BroadcastRequestForNewIdentity; -use crate::platform::transition::broadcast_request::BroadcastRequestForStateTransition; -use crate::platform::Fetch; use crate::{Error, Sdk}; -use dapi_grpc::platform::VersionedGrpcResponse; -use dapi_grpc::tonic::Code; +use super::broadcast::BroadcastStateTransition; +use super::put_settings::PutSettings; +use super::waitable::Waitable; use dpp::dashcore::PrivateKey; use dpp::identity::signer::Signer; use dpp::prelude::{AssetLockProof, Identity}; -use drive_proof_verifier::error::ContextProviderError; -use drive_proof_verifier::DataContractProvider; +use dpp::state_transition::StateTransition; -use crate::platform::block_info_from_metadata::block_info_from_metadata; -use dpp::state_transition::proof_result::StateTransitionProofResult; -use drive::drive::Drive; -use rs_dapi_client::transport::TransportError; -use rs_dapi_client::{DapiClientError, DapiRequest, IntoInner, RequestSettings}; - -#[async_trait::async_trait] /// A trait for putting an identity to platform -pub trait PutIdentity { - /// Puts an identity on platform +#[async_trait::async_trait] +pub trait PutIdentity: Waitable { + /// Puts an identity on platform. + /// + /// TODO: Discuss if it should not actually consume self, since it is no longer valid (eg. identity id is changed) async fn put_to_platform( &self, sdk: &Sdk, asset_lock_proof: AssetLockProof, asset_lock_proof_private_key: &PrivateKey, signer: &S, - ) -> Result<(), Error>; - /// Puts an identity on platform and waits for the confirmation proof + settings: Option, + ) -> Result; + + /// Puts an identity on platform and waits for the confirmation proof. async fn put_to_platform_and_wait_for_response( &self, sdk: &Sdk, asset_lock_proof: AssetLockProof, asset_lock_proof_private_key: &PrivateKey, signer: &S, - ) -> Result; + settings: Option, + ) -> Result; } - #[async_trait::async_trait] impl PutIdentity for Identity { async fn put_to_platform( @@ -46,23 +42,18 @@ impl PutIdentity for Identity { asset_lock_proof: AssetLockProof, asset_lock_proof_private_key: &PrivateKey, signer: &S, - ) -> Result<(), Error> { - let (_, request) = self.broadcast_request_for_new_identity( + settings: Option, + ) -> Result { + let (state_transition, _) = self.broadcast_request_for_new_identity( asset_lock_proof, asset_lock_proof_private_key, signer, sdk.version(), )?; - request - .clone() - .execute(sdk, RequestSettings::default()) - .await // TODO: We need better way to handle execution errors - .into_inner()?; - // response is empty for a broadcast, result comes from the stream wait for state transition result - - Ok(()) + state_transition.broadcast(sdk, settings).await?; + Ok(state_transition) } async fn put_to_platform_and_wait_for_response( @@ -71,68 +62,18 @@ impl PutIdentity for Identity { asset_lock_proof: AssetLockProof, asset_lock_proof_private_key: &PrivateKey, signer: &S, + settings: Option, ) -> Result { - let identity_id = asset_lock_proof.create_identifier()?; - let (state_transition, request) = self.broadcast_request_for_new_identity( - asset_lock_proof, - asset_lock_proof_private_key, - signer, - sdk.version(), - )?; - - let response_result = request - .clone() - .execute(sdk, RequestSettings::default()) - .await - .into_inner(); - - match response_result { - Ok(_) => {} - //todo make this more reliable - Err(DapiClientError::Transport(TransportError::Grpc(te))) - if te.code() == Code::AlreadyExists => - { - tracing::debug!( - ?identity_id, - "attempt to create identity that already exists" - ); - let identity = Identity::fetch(sdk, identity_id).await?; - return identity.ok_or(Error::DapiClientError( - "identity was proved to not exist but was said to exist".to_string(), - )); - } - Err(e) => return Err(e.into()), - } - - let request = state_transition.wait_for_state_transition_result_request()?; - // TODO: Implement retry logic - - let response = request - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; - - let block_info = block_info_from_metadata(response.metadata()?)?; - let proof = response.proof_owned()?; - let context_provider = - sdk.context_provider() - .ok_or(Error::from(ContextProviderError::Config( - "Context provider not initialized".to_string(), - )))?; - - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - &state_transition, - &block_info, - proof.grovedb_proof.as_slice(), - &context_provider.as_contract_lookup_fn(), - sdk.version(), - )?; - - //todo verify - - match result { - StateTransitionProofResult::VerifiedIdentity(identity) => Ok(identity), - _ => Err(Error::DapiClientError("proved a non identity".to_string())), - } + let state_transition = self + .put_to_platform( + sdk, + asset_lock_proof, + asset_lock_proof_private_key, + signer, + settings, + ) + .await?; + + Self::wait_for_response(sdk, state_transition, settings).await } } diff --git a/packages/rs-sdk/src/platform/transition/put_settings.rs b/packages/rs-sdk/src/platform/transition/put_settings.rs index 7ddaef7a68..02d60100b3 100644 --- a/packages/rs-sdk/src/platform/transition/put_settings.rs +++ b/packages/rs-sdk/src/platform/transition/put_settings.rs @@ -1,3 +1,5 @@ +use std::time::Duration; + use dpp::prelude::UserFeeIncrease; use rs_dapi_client::RequestSettings; @@ -7,4 +9,19 @@ pub struct PutSettings { pub request_settings: RequestSettings, pub identity_nonce_stale_time_s: Option, pub user_fee_increase: Option, + /// Soft limit of total time to wait for state transition to be executed (included in a block). + /// + /// This is an upper limit, and other settings may affect the actual wait time + /// (like DAPI timeouts, [RequestSettings::timeout], [RequestSettings::retries], etc.). + /// If you want to use `wait_timeout`, tune `retries` accordingly. + /// + /// It can be exceeded due to execution of non-cancellable parts of the Sdk. + // TODO: Simplify timeout logic when waiting for response in Sdk, as having 3 different timeouts is confusing. + pub wait_timeout: Option, +} + +impl From for RequestSettings { + fn from(settings: PutSettings) -> Self { + settings.request_settings + } } diff --git a/packages/rs-sdk/src/platform/transition/top_up_identity.rs b/packages/rs-sdk/src/platform/transition/top_up_identity.rs index c43d8a9f19..10998b6ae7 100644 --- a/packages/rs-sdk/src/platform/transition/top_up_identity.rs +++ b/packages/rs-sdk/src/platform/transition/top_up_identity.rs @@ -1,26 +1,22 @@ -use crate::platform::block_info_from_metadata::block_info_from_metadata; -use crate::platform::transition::broadcast_request::BroadcastRequestForStateTransition; +use super::broadcast::BroadcastStateTransition; +use super::put_settings::PutSettings; +use super::waitable::Waitable; use crate::{Error, Sdk}; -use dapi_grpc::platform::VersionedGrpcResponse; use dpp::dashcore::PrivateKey; -use dpp::identity::Identity; +use dpp::identity::{Identity, PartialIdentity}; use dpp::prelude::{AssetLockProof, UserFeeIncrease}; use dpp::state_transition::identity_topup_transition::methods::IdentityTopUpTransitionMethodsV0; use dpp::state_transition::identity_topup_transition::IdentityTopUpTransition; -use dpp::state_transition::proof_result::StateTransitionProofResult; -use drive::drive::Drive; -use drive_proof_verifier::error::ContextProviderError; -use drive_proof_verifier::DataContractProvider; -use rs_dapi_client::{DapiRequest, IntoInner, RequestSettings}; #[async_trait::async_trait] -pub trait TopUpIdentity { +pub trait TopUpIdentity: Waitable { async fn top_up_identity( &self, sdk: &Sdk, asset_lock_proof: AssetLockProof, asset_lock_proof_private_key: &PrivateKey, user_fee_increase: Option, + settings: Option, ) -> Result; } @@ -32,6 +28,7 @@ impl TopUpIdentity for Identity { asset_lock_proof: AssetLockProof, asset_lock_proof_private_key: &PrivateKey, user_fee_increase: Option, + settings: Option, ) -> Result { let state_transition = IdentityTopUpTransition::try_from_identity( self, @@ -41,46 +38,10 @@ impl TopUpIdentity for Identity { sdk.version(), None, )?; + let identity: PartialIdentity = state_transition.broadcast_and_wait(sdk, settings).await?; - let request = state_transition.broadcast_request_for_state_transition()?; - - request - .clone() - .execute(sdk, RequestSettings::default()) - .await // TODO: We need better way to handle execution errors - .into_inner()?; - - let request = state_transition.wait_for_state_transition_result_request()?; - // TODO: Implement retry logic in wait for state transition result - let response = request - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; - - let block_info = block_info_from_metadata(response.metadata()?)?; - - let proof = response.proof_owned()?; - let context_provider = - sdk.context_provider() - .ok_or(Error::from(ContextProviderError::Config( - "Context provider not initialized".to_string(), - )))?; - - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - &state_transition, - &block_info, - proof.grovedb_proof.as_slice(), - &context_provider.as_contract_lookup_fn(), - sdk.version(), - )?; - - match result { - StateTransitionProofResult::VerifiedPartialIdentity(identity) => { - identity.balance.ok_or(Error::DapiClientError( - "expected an identity balance".to_string(), - )) - } - _ => Err(Error::DapiClientError("proved a non identity".to_string())), - } + identity.balance.ok_or(Error::DapiClientError( + "expected an identity balance".to_string(), + )) } } diff --git a/packages/rs-sdk/src/platform/transition/transfer.rs b/packages/rs-sdk/src/platform/transition/transfer.rs index bf330a1024..7bd7ddd364 100644 --- a/packages/rs-sdk/src/platform/transition/transfer.rs +++ b/packages/rs-sdk/src/platform/transition/transfer.rs @@ -5,18 +5,25 @@ use crate::platform::transition::broadcast::BroadcastStateTransition; use crate::platform::transition::put_settings::PutSettings; use crate::{Error, Sdk}; use dpp::identity::signer::Signer; -use dpp::identity::{Identity, IdentityPublicKey}; +use dpp::identity::{Identity, IdentityPublicKey, PartialIdentity}; use dpp::state_transition::identity_credit_transfer_transition::methods::IdentityCreditTransferTransitionMethodsV0; use dpp::state_transition::identity_credit_transfer_transition::IdentityCreditTransferTransition; -use dpp::state_transition::proof_result::StateTransitionProofResult; + +use super::waitable::Waitable; #[async_trait::async_trait] -pub trait TransferToIdentity { +pub trait TransferToIdentity: Waitable { /// Function to transfer credits from an identity to another identity. Returns the final /// identity balance. /// /// If signing_transfer_key_to_use is not set, we will try to use one in the signer that is /// available for the transfer. + /// + /// This method will resolve once the state transition is executed. + /// + /// ## Returns + /// + /// Final balance of the identity after the transfer. async fn transfer_credits( &self, sdk: &Sdk, @@ -53,15 +60,10 @@ impl TransferToIdentity for Identity { None, )?; - let result = state_transition.broadcast_and_wait(sdk, None).await?; + let identity: PartialIdentity = state_transition.broadcast_and_wait(sdk, settings).await?; - match result { - StateTransitionProofResult::VerifiedPartialIdentity(identity) => { - identity.balance.ok_or(Error::DapiClientError( - "expected an identity balance after transfer".to_string(), - )) - } - _ => Err(Error::DapiClientError("proved a non identity".to_string())), - } + identity.balance.ok_or(Error::DapiClientError( + "expected an identity balance after transfer".to_string(), + )) } } diff --git a/packages/rs-sdk/src/platform/transition/transfer_document.rs b/packages/rs-sdk/src/platform/transition/transfer_document.rs index a64c76cb95..2106141ae3 100644 --- a/packages/rs-sdk/src/platform/transition/transfer_document.rs +++ b/packages/rs-sdk/src/platform/transition/transfer_document.rs @@ -1,28 +1,21 @@ +use super::waitable::Waitable; use crate::platform::transition::broadcast_request::BroadcastRequestForStateTransition; -use std::sync::Arc; - -use crate::{Error, Sdk}; - -use crate::platform::block_info_from_metadata::block_info_from_metadata; use crate::platform::transition::put_settings::PutSettings; use crate::platform::Identifier; -use dapi_grpc::platform::VersionedGrpcResponse; +use crate::{Error, Sdk}; use dpp::data_contract::document_type::accessors::DocumentTypeV0Getters; use dpp::data_contract::document_type::DocumentType; -use dpp::data_contract::DataContract; use dpp::document::{Document, DocumentV0Getters}; use dpp::identity::signer::Signer; use dpp::identity::IdentityPublicKey; use dpp::state_transition::documents_batch_transition::methods::v0::DocumentsBatchTransitionMethodsV0; use dpp::state_transition::documents_batch_transition::DocumentsBatchTransition; -use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::state_transition::StateTransition; -use drive::drive::Drive; -use rs_dapi_client::{DapiRequest, IntoInner, RequestSettings}; +use rs_dapi_client::{DapiRequest, IntoInner}; #[async_trait::async_trait] /// A trait for transferring a document on Platform -pub trait TransferDocument { +pub trait TransferDocument: Waitable { /// Transfers a document on platform /// Setting settings to `None` sets default connection behavior async fn transfer_document_to_identity( @@ -35,14 +28,6 @@ pub trait TransferDocument { settings: Option, ) -> Result; - /// Waits for the response of a state transition after it has been broadcast - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - data_contract: Arc, - ) -> Result; - /// Transfers a document on platform and waits for the response async fn transfer_document_to_identity_and_wait_for_response( &self, @@ -50,8 +35,8 @@ pub trait TransferDocument { sdk: &Sdk, document_type: DocumentType, identity_public_key: IdentityPublicKey, - data_contract: Arc, signer: &S, + settings: Option, ) -> Result; } @@ -104,55 +89,14 @@ impl TransferDocument for Document { Ok(transition) } - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - data_contract: Arc, - ) -> Result { - let request = state_transition.wait_for_state_transition_result_request()?; - - let response = request - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; - - let block_info = block_info_from_metadata(response.metadata()?)?; - - let proof = response.proof_owned()?; - - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - &state_transition, - &block_info, - proof.grovedb_proof.as_slice(), - &|_| Ok(Some(data_contract.clone())), - sdk.version(), - )?; - - match result { - StateTransitionProofResult::VerifiedDocuments(mut documents) => { - let document = documents - .remove(self.id_ref()) - .ok_or(Error::InvalidProvedResponse( - "did not prove the sent document".to_string(), - ))? - .ok_or(Error::InvalidProvedResponse( - "expected there to actually be a document".to_string(), - ))?; - Ok(document) - } - _ => Err(Error::DapiClientError("proved a non document".to_string())), - } - } - async fn transfer_document_to_identity_and_wait_for_response( &self, recipient_id: Identifier, sdk: &Sdk, document_type: DocumentType, identity_public_key: IdentityPublicKey, - data_contract: Arc, signer: &S, + settings: Option, ) -> Result { let state_transition = self .transfer_document_to_identity( @@ -161,18 +105,10 @@ impl TransferDocument for Document { document_type, identity_public_key, signer, - None, + settings, ) .await?; - let document = >::wait_for_response( - self, - sdk, - state_transition, - data_contract, - ) - .await?; - - Ok(document) + Self::wait_for_response(sdk, state_transition, settings).await } } diff --git a/packages/rs-sdk/src/platform/transition/update_price_of_document.rs b/packages/rs-sdk/src/platform/transition/update_price_of_document.rs index 0f331cde5d..99a5642bf9 100644 --- a/packages/rs-sdk/src/platform/transition/update_price_of_document.rs +++ b/packages/rs-sdk/src/platform/transition/update_price_of_document.rs @@ -1,28 +1,21 @@ -use crate::platform::transition::broadcast_request::BroadcastRequestForStateTransition; -use std::sync::Arc; - use crate::{Error, Sdk}; -use crate::platform::block_info_from_metadata::block_info_from_metadata; +use super::broadcast::BroadcastStateTransition; +use super::waitable::Waitable; use crate::platform::transition::put_settings::PutSettings; -use dapi_grpc::platform::VersionedGrpcResponse; use dpp::data_contract::document_type::accessors::DocumentTypeV0Getters; use dpp::data_contract::document_type::DocumentType; -use dpp::data_contract::DataContract; use dpp::document::{Document, DocumentV0Getters}; use dpp::fee::Credits; use dpp::identity::signer::Signer; use dpp::identity::IdentityPublicKey; use dpp::state_transition::documents_batch_transition::methods::v0::DocumentsBatchTransitionMethodsV0; use dpp::state_transition::documents_batch_transition::DocumentsBatchTransition; -use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::state_transition::StateTransition; -use drive::drive::Drive; -use rs_dapi_client::{DapiRequest, IntoInner, RequestSettings}; #[async_trait::async_trait] /// A trait for updating the price of a document on Platform -pub trait UpdatePriceOfDocument { +pub trait UpdatePriceOfDocument: Waitable { /// Updates the price of a document on platform /// Setting settings to `None` sets default connection behavior async fn update_price_of_document( @@ -35,14 +28,6 @@ pub trait UpdatePriceOfDocument { settings: Option, ) -> Result; - /// Waits for the response of a state transition after it has been broadcast - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - data_contract: Arc, - ) -> Result; - /// Updates the price of a document on platform and waits for the response async fn update_price_of_document_and_wait_for_response( &self, @@ -50,8 +35,8 @@ pub trait UpdatePriceOfDocument { sdk: &Sdk, document_type: DocumentType, identity_public_key: IdentityPublicKey, - data_contract: Arc, signer: &S, + settings: Option, ) -> Result; } @@ -92,81 +77,24 @@ impl UpdatePriceOfDocument for Document { None, )?; - let request = transition.broadcast_request_for_state_transition()?; - - request - .clone() - .execute(sdk, settings.request_settings) - .await // TODO: We need better way to handle execution errors - .into_inner()?; - // response is empty for a broadcast, result comes from the stream wait for state transition result - + transition.broadcast(sdk, Some(settings)).await?; Ok(transition) } - async fn wait_for_response( - &self, - sdk: &Sdk, - state_transition: StateTransition, - data_contract: Arc, - ) -> Result { - let request = state_transition.wait_for_state_transition_result_request()?; - // TODO: Implement retry logic - let response = request - .execute(sdk, RequestSettings::default()) - .await - .into_inner()?; - - let block_info = block_info_from_metadata(response.metadata()?)?; - - let proof = response.proof_owned()?; - - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - &state_transition, - &block_info, - proof.grovedb_proof.as_slice(), - &|_| Ok(Some(data_contract.clone())), - sdk.version(), - )?; - - match result { - StateTransitionProofResult::VerifiedDocuments(mut documents) => { - let document = documents - .remove(self.id_ref()) - .ok_or(Error::InvalidProvedResponse( - "did not prove the sent document".to_string(), - ))? - .ok_or(Error::InvalidProvedResponse( - "expected there to actually be a document".to_string(), - ))?; - Ok(document) - } - _ => Err(Error::DapiClientError("proved a non document".to_string())), - } - } - async fn update_price_of_document_and_wait_for_response( &self, price: Credits, sdk: &Sdk, document_type: DocumentType, identity_public_key: IdentityPublicKey, - data_contract: Arc, signer: &S, + settings: Option, ) -> Result { let state_transition = self .update_price_of_document(price, sdk, document_type, identity_public_key, signer, None) .await?; - let document = >::wait_for_response( - self, - sdk, - state_transition, - data_contract, - ) - .await?; - - Ok(document) + Self::wait_for_response(sdk, state_transition, settings).await } } diff --git a/packages/rs-sdk/src/platform/transition/vote.rs b/packages/rs-sdk/src/platform/transition/vote.rs index 5666b8b42d..3734e892f2 100644 --- a/packages/rs-sdk/src/platform/transition/vote.rs +++ b/packages/rs-sdk/src/platform/transition/vote.rs @@ -1,10 +1,8 @@ -use crate::platform::block_info_from_metadata::block_info_from_metadata; use crate::platform::query::VoteQuery; use crate::platform::transition::broadcast_request::BroadcastRequestForStateTransition; use crate::platform::transition::put_settings::PutSettings; use crate::platform::Fetch; use crate::{Error, Sdk}; -use dapi_grpc::platform::VersionedGrpcResponse; use dpp::identifier::MasternodeIdentifiers; use dpp::identity::hash::IdentityPublicKeyHashMethodsV0; use dpp::identity::signer::Signer; @@ -12,16 +10,15 @@ use dpp::identity::IdentityPublicKey; use dpp::prelude::Identifier; use dpp::state_transition::masternode_vote_transition::methods::MasternodeVoteTransitionMethodsV0; use dpp::state_transition::masternode_vote_transition::MasternodeVoteTransition; -use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::voting::votes::resource_vote::accessors::v0::ResourceVoteGettersV0; use dpp::voting::votes::Vote; -use drive::drive::Drive; -use drive_proof_verifier::{error::ContextProviderError, DataContractProvider}; use rs_dapi_client::{DapiRequest, IntoInner}; +use super::waitable::Waitable; + #[async_trait::async_trait] /// A trait for putting a vote on platform -pub trait PutVote { +pub trait PutVote: Waitable { /// Puts an identity on platform async fn put_to_platform( &self, @@ -129,37 +126,7 @@ impl PutVote for Vote { } } } - - let request = masternode_vote_transition.wait_for_state_transition_result_request()?; - let response = request - .execute(sdk, settings.request_settings) - .await - .into_inner()?; - - let block_info = block_info_from_metadata(response.metadata()?)?; - let proof = response.proof_owned()?; - let context_provider = - sdk.context_provider() - .ok_or(Error::from(ContextProviderError::Config( - "Context provider not initialized".to_string(), - )))?; - - let (_, result) = Drive::verify_state_transition_was_executed_with_proof( - &masternode_vote_transition, - &block_info, - proof.grovedb_proof.as_slice(), - &context_provider.as_contract_lookup_fn(), - sdk.version(), - )?; - - //todo verify - - match result { - StateTransitionProofResult::VerifiedMasternodeVote(vote) => Ok(vote), - _ => Err(Error::DapiClientError( - "proved something that was not a vote".to_string(), - )), - } + Self::wait_for_response(sdk, masternode_vote_transition, Some(settings)).await } } diff --git a/packages/rs-sdk/src/platform/transition/waitable.rs b/packages/rs-sdk/src/platform/transition/waitable.rs new file mode 100644 index 0000000000..a63acb0949 --- /dev/null +++ b/packages/rs-sdk/src/platform/transition/waitable.rs @@ -0,0 +1,131 @@ +use std::collections::BTreeMap; + +use super::broadcast::BroadcastStateTransition; +use super::put_settings::PutSettings; +use crate::platform::Fetch; +use crate::Error; +use crate::Sdk; +use dpp::document::Document; +use dpp::prelude::{DataContract, Identifier, Identity}; +use dpp::state_transition::identity_create_transition::accessors::IdentityCreateTransitionAccessorsV0; +use dpp::state_transition::StateTransition; +use dpp::state_transition::StateTransitionLike; +use dpp::voting::votes::Vote; +use dpp::ProtocolError; + +/// Waitable trait provides a wait to wait for a response of a state transition after it has been broadcast and +/// receive altered objects. +/// +/// This is simple conveniance trait wrapping the [`BroadcastStateTransition::wait_for_response`] method. +#[async_trait::async_trait] +pub trait Waitable: Sized { + async fn wait_for_response( + sdk: &Sdk, + state_transition: StateTransition, + settings: Option, + ) -> Result; +} +#[async_trait::async_trait] +impl Waitable for DataContract { + async fn wait_for_response( + sdk: &Sdk, + state_transition: StateTransition, + settings: Option, + ) -> Result { + state_transition.wait_for_response(sdk, settings).await + } +} + +#[async_trait::async_trait] +impl Waitable for Document { + async fn wait_for_response( + sdk: &Sdk, + state_transition: StateTransition, + settings: Option, + ) -> Result { + let doc_id = if let StateTransition::DocumentsBatch(transition) = &state_transition { + let ids = transition.modified_data_ids(); + if ids.len() != 1 { + return Err(Error::Protocol( + dpp::ProtocolError::InvalidStateTransitionType(format!( + "expected state transition with exactly one document, got {}", + ids.into_iter() + .map(|id| id + .to_string(dpp::platform_value::string_encoding::Encoding::Base58)) + .collect::>() + .join(", ") + )), + )); + } + ids[0] + } else { + return Err(Error::Protocol(ProtocolError::InvalidStateTransitionType( + format!( + "expected state transition to be a DocumentsBatchTransition, got {}", + state_transition.name() + ), + ))); + }; + + let mut documents: BTreeMap> = + state_transition.wait_for_response(sdk, settings).await?; + + let document: Document = documents + .remove(&doc_id) + .ok_or(Error::InvalidProvedResponse( + "did not prove the sent document".to_string(), + ))? + .ok_or(Error::InvalidProvedResponse( + "expected there to actually be a document".to_string(), + ))?; + + Ok(document) + } +} + +#[async_trait::async_trait] +impl Waitable for Identity { + async fn wait_for_response( + sdk: &Sdk, + state_transition: StateTransition, + settings: Option, + ) -> Result { + let result: Result = state_transition.wait_for_response(sdk, settings).await; + + match result { + Ok(identity) => Ok(identity), + // TODO: We need to refactor sdk Error to be able to retrieve gRPC error code and identify conflicts + Err(Error::AlreadyExists(_)) => { + let identity_id = if let StateTransition::IdentityCreate(st) = state_transition { + st.identity_id() + } else { + return Err(Error::Generic(format!( + "expected identity create state transition, got {:?}", + state_transition.name() + ))); + }; + + tracing::debug!( + ?identity_id, + "attempt to create identity that already exists" + ); + let identity = Identity::fetch(sdk, identity_id).await?; + identity.ok_or(Error::DapiClientError( + "identity was proved to not exist but was said to exist".to_string(), + )) + } + Err(e) => Err(e), + } + } +} + +#[async_trait::async_trait] +impl Waitable for Vote { + async fn wait_for_response( + sdk: &Sdk, + state_transition: StateTransition, + settings: Option, + ) -> Result { + state_transition.wait_for_response(sdk, settings).await + } +} diff --git a/packages/rs-sdk/src/platform/transition/withdraw_from_identity.rs b/packages/rs-sdk/src/platform/transition/withdraw_from_identity.rs index 1d72c86e07..4cd59e811b 100644 --- a/packages/rs-sdk/src/platform/transition/withdraw_from_identity.rs +++ b/packages/rs-sdk/src/platform/transition/withdraw_from_identity.rs @@ -62,7 +62,7 @@ impl WithdrawFromIdentity for Identity { None, )?; - let result = state_transition.broadcast_and_wait(sdk, None).await?; + let result = state_transition.broadcast_and_wait(sdk, settings).await?; match result { StateTransitionProofResult::VerifiedPartialIdentity(identity) => { diff --git a/packages/rs-sdk/src/platform/types/evonode.rs b/packages/rs-sdk/src/platform/types/evonode.rs index 70bbabee61..2f91e17106 100644 --- a/packages/rs-sdk/src/platform/types/evonode.rs +++ b/packages/rs-sdk/src/platform/types/evonode.rs @@ -25,8 +25,8 @@ use std::fmt::Debug; /// use futures::executor::block_on; /// /// let sdk = Sdk::new_mock(); -/// let uri: http::Uri = "http://127.0.0.1:1".parse().unwrap(); -/// let node = EvoNode::new(uri.into()); +/// let address = "http://127.0.0.1:1".parse().expect("valid address"); +/// let node = EvoNode::new(address); /// let status = block_on(EvoNodeStatus::fetch_unproved(&sdk, node)).unwrap(); /// ``` diff --git a/packages/rs-sdk/src/sdk.rs b/packages/rs-sdk/src/sdk.rs index 3fd570e206..c823df2eae 100644 --- a/packages/rs-sdk/src/sdk.rs +++ b/packages/rs-sdk/src/sdk.rs @@ -50,6 +50,16 @@ pub const DEFAULT_QUORUM_PUBLIC_KEYS_CACHE_SIZE: usize = 100; /// The default identity nonce stale time in seconds pub const DEFAULT_IDENTITY_NONCE_STALE_TIME_S: u64 = 1200; //20 mins +/// The default request settings for the SDK, used when the user does not provide any. +/// +/// Use [SdkBuilder::with_settings] to set custom settings. +const DEFAULT_REQUEST_SETTINGS: RequestSettings = RequestSettings { + retries: Some(3), + timeout: None, + ban_failed_address: None, + connect_timeout: None, +}; + /// a type to represent staleness in seconds pub type StalenessInSeconds = u64; @@ -184,7 +194,7 @@ enum SdkInstance { dapi: Arc>, /// Mock SDK implementation processing mock expectations and responses. mock: Arc>, - + address_list: AddressList, /// Platform version configured for this Sdk version: &'static PlatformVersion, }, @@ -554,19 +564,11 @@ impl Sdk { } /// Return the [DapiClient] address list - pub fn address_list(&self) -> Result { + pub fn address_list(&self) -> &AddressList { match &self.inner { - SdkInstance::Dapi { dapi, version: _ } => { - let address_list_arc = dapi.address_list(); - let address_list_lock = address_list_arc - .read() - .map_err(|e| format!("Failed to read address list: {e}"))?; - Ok(address_list_lock.clone()) - } + SdkInstance::Dapi { dapi, .. } => dapi.address_list(), #[cfg(feature = "mocks")] - SdkInstance::Mock { .. } => { - unimplemented!("mock Sdk does not have address list") - } + SdkInstance::Mock { address_list, .. } => address_list, } } } @@ -705,7 +707,7 @@ pub struct SdkBuilder { /// /// If `None`, a mock client will be created. addresses: Option, - settings: RequestSettings, + settings: Option, network: Network, @@ -755,7 +757,7 @@ impl Default for SdkBuilder { fn default() -> Self { Self { addresses: None, - settings: RequestSettings::default(), + settings: None, network: Network::Dash, core_ip: "".to_string(), core_port: 0, @@ -836,7 +838,7 @@ impl SdkBuilder { /// /// See [`RequestSettings`] for more information. pub fn with_settings(mut self, settings: RequestSettings) -> Self { - self.settings = settings; + self.settings = Some(settings); self } @@ -952,17 +954,22 @@ impl SdkBuilder { pub fn build(self) -> Result { PlatformVersion::set_current(self.version); + let dapi_client_settings = match self.settings { + Some(settings) => DEFAULT_REQUEST_SETTINGS.override_by(settings), + None => DEFAULT_REQUEST_SETTINGS, + }; + let sdk= match self.addresses { // non-mock mode Some(addresses) => { - let dapi = DapiClient::new(addresses, self.settings); + let dapi = DapiClient::new(addresses,dapi_client_settings); #[cfg(feature = "mocks")] let dapi = dapi.dump_dir(self.dump_dir.clone()); #[allow(unused_mut)] // needs to be mutable for #[cfg(feature = "mocks")] let mut sdk= Sdk{ network: self.network, - dapi_client_settings: self.settings, + dapi_client_settings, inner:SdkInstance::Dapi { dapi, version:self.version }, proofs:self.proofs, context_provider: ArcSwapOption::new( self.context_provider.map(Arc::new)), @@ -1025,11 +1032,12 @@ impl SdkBuilder { let mock_sdk = Arc::new(Mutex::new(mock_sdk)); let sdk= Sdk { network: self.network, - dapi_client_settings: self.settings, + dapi_client_settings, inner:SdkInstance::Mock { mock:mock_sdk.clone(), dapi, - version:self.version, + address_list: AddressList::new(), + version: self.version, }, dump_dir: self.dump_dir.clone(), proofs:self.proofs, diff --git a/packages/rs-sdk/src/sync.rs b/packages/rs-sdk/src/sync.rs index 38a878e174..5f5d266669 100644 --- a/packages/rs-sdk/src/sync.rs +++ b/packages/rs-sdk/src/sync.rs @@ -6,13 +6,17 @@ use arc_swap::ArcSwap; use drive_proof_verifier::error::ContextProviderError; -use rs_dapi_client::{CanRetry, ExecutionResult, RequestSettings}; +use rs_dapi_client::{ + update_address_ban_status, AddressList, CanRetry, ExecutionResult, RequestSettings, +}; +use std::fmt::Display; use std::{ fmt::Debug, future::Future, sync::{mpsc::SendError, Arc}, }; use tokio::{runtime::TryCurrentError, sync::Mutex}; + #[derive(Debug, thiserror::Error)] pub enum AsyncError { /// Not running inside tokio runtime @@ -110,6 +114,7 @@ async fn worker( /// /// ## Parameters /// +/// - `address_list` - list of addresses to be used for the requests. /// - `settings` - global settings with any request-specific settings overrides applied. /// - `future_factory_fn` - closure that returns a future that should be retried. It should take [`RequestSettings`] as /// an argument and return [`ExecutionResult`]. @@ -138,8 +143,9 @@ async fn worker( /// } /// #[tokio::main] /// async fn main() { +/// let address_list = rs_dapi_client::AddressList::default(); /// let global_settings = RequestSettings::default(); -/// dash_sdk::sync::retry(global_settings, retry_test_function).await.expect_err("should fail"); +/// dash_sdk::sync::retry(&address_list, global_settings, retry_test_function).await.expect_err("should fail"); /// } /// ``` /// @@ -154,13 +160,14 @@ async fn worker( /// /// - [`::backon`] crate that is used by this function. pub async fn retry( + address_list: &AddressList, settings: RequestSettings, future_factory_fn: FutureFactoryFn, ) -> ExecutionResult where Fut: Future>, FutureFactoryFn: FnMut(RequestSettings) -> Fut, - E: CanRetry + Debug, + E: CanRetry + Display + Debug, { let max_retries = settings.retries.unwrap_or_default(); @@ -187,21 +194,26 @@ where async move { let settings = closure_settings.load_full().clone(); let mut func = inner_fn.lock().await; - (*func)(*settings).await + let result = (*func)(*settings).await; + + // Ban or unban the address based on the result + update_address_ban_status(address_list, &result, &settings.finalize()); + + result } }; - let result= ::backon::Retryable::retry(closure,backoff_strategy) + let result = ::backon::Retryable::retry(closure, backoff_strategy) .when(|e| { if e.can_retry() { - // requests sent for current execution attempt; + // requests sent for current execution attempt; let requests_sent = e.retries + 1; - // requests sent in all preceeding attempts; user expects `settings.retries +1` + // requests sent in all preceeding attempts; user expects `settings.retries +1` retries += requests_sent; let all_requests_sent = retries; - if all_requests_sent <=max_retries { // we account for for initial request + if all_requests_sent <= max_retries { // we account for initial request tracing::warn!(retry = all_requests_sent, max_retries, error=?e, "retrying request"); let new_settings = RequestSettings { retries: Some(max_retries - all_requests_sent), // limit num of retries for lower layer @@ -231,6 +243,7 @@ where #[cfg(test)] mod test { use super::*; + use derive_more::Display; use http::Uri; use rs_dapi_client::ExecutionError; use std::{ @@ -314,7 +327,7 @@ mod test { } } - #[derive(Debug)] + #[derive(Debug, Display)] enum MockError { Generic, } @@ -342,7 +355,7 @@ mod test { Err(ExecutionError { inner: MockError::Generic, retries, - address: Some(Uri::from_static("http://localhost").into()), + address: Some("http://localhost".parse().expect("valid address")), }) } @@ -352,6 +365,8 @@ mod test { for _ in 0..1 { let counter = Arc::new(AtomicUsize::new(0)); + let address_list = AddressList::default(); + // we retry 5 times, and expect 5 retries + 1 initial request let mut global_settings = RequestSettings::default(); global_settings.retries = Some(expected_requests - 1); @@ -361,7 +376,7 @@ mod test { retry_test_function(s, counter) }; - retry(global_settings, closure) + retry(&address_list, global_settings, closure) .await .expect_err("should fail"); diff --git a/packages/rs-sdk/tests/fetch/config.rs b/packages/rs-sdk/tests/fetch/config.rs index c2f8edbc4e..f55484f5ce 100644 --- a/packages/rs-sdk/tests/fetch/config.rs +++ b/packages/rs-sdk/tests/fetch/config.rs @@ -8,7 +8,7 @@ use dpp::{ dashcore::{hashes::Hash, ProTxHash}, prelude::Identifier, }; -use rs_dapi_client::AddressList; +use rs_dapi_client::{Address, AddressList}; use serde::Deserialize; use std::{path::PathBuf, str::FromStr}; use zeroize::Zeroizing; @@ -131,9 +131,12 @@ impl Config { false => "http", }; - let address: String = format!("{}://{}:{}", scheme, self.platform_host, self.platform_port); + let address: Address = + format!("{}://{}:{}", scheme, self.platform_host, self.platform_port) + .parse() + .expect("valid address"); - AddressList::from_iter(vec![http::Uri::from_str(&address).expect("valid uri")]) + AddressList::from_iter([address]) } /// Create new SDK instance diff --git a/packages/rs-sdk/tests/fetch/contested_resource.rs b/packages/rs-sdk/tests/fetch/contested_resource.rs index 643396d495..c37d8da39c 100644 --- a/packages/rs-sdk/tests/fetch/contested_resource.rs +++ b/packages/rs-sdk/tests/fetch/contested_resource.rs @@ -232,11 +232,11 @@ async fn contested_resources_limit_PLAN_656() { /// None #[test_case::test_case(|_q| {}, Ok("ContestedResources([ContestedResource(Text(".into()); "unmodified base query is Ok")] #[test_case::test_case(|q| q.start_index_values = vec![Value::Text("".to_string())], Ok("".into()); "index value empty string is Ok")] -#[test_case::test_case(|q| q.document_type_name = "some random non-existing name".to_string(), Err(r#"code: InvalidArgument, message: "document type some random non-existing name not found"#); "non existing document type returns InvalidArgument")] -#[test_case::test_case(|q| q.index_name = "nx index".to_string(), Err(r#"code: InvalidArgument, message: "index with name nx index is not the contested index"#); "non existing index returns InvalidArgument")] -#[test_case::test_case(|q| q.index_name = "dashIdentityId".to_string(), Err(r#"code: InvalidArgument, message: "index with name dashIdentityId is not the contested index"#); "existing non-contested index returns InvalidArgument")] +#[test_case::test_case(|q| q.document_type_name = "some random non-existing name".to_string(), Err(r#"status: InvalidArgument, message: "document type some random non-existing name not found"#); "non existing document type returns InvalidArgument")] +#[test_case::test_case(|q| q.index_name = "nx index".to_string(), Err(r#"status: InvalidArgument, message: "index with name nx index is not the contested index"#); "non existing index returns InvalidArgument")] +#[test_case::test_case(|q| q.index_name = "dashIdentityId".to_string(), Err(r#"status: InvalidArgument, message: "index with name dashIdentityId is not the contested index"#); "existing non-contested index returns InvalidArgument")] // Disabled due to bug PLAN-653 -// #[test_case::test_case(|q| q.start_at_value = Some((Value::Array(vec![]), true)), Err(r#"code: InvalidArgument"#); "start_at_value wrong index type returns InvalidArgument PLAN-653")] +// #[test_case::test_case(|q| q.start_at_value = Some((Value::Array(vec![]), true)), Err(r#"status: InvalidArgument"#); "start_at_value wrong index type returns InvalidArgument PLAN-653")] #[test_case::test_case(|q| q.start_index_values = vec![], Ok(r#"ContestedResources([ContestedResource(Text("dash"))])"#.into()); "start_index_values empty vec returns top-level keys")] #[test_case::test_case(|q| q.start_index_values = vec![Value::Text("".to_string())], Ok(r#"ContestedResources([])"#.into()); "start_index_values empty string returns zero results")] #[test_case::test_case(|q| { @@ -276,8 +276,8 @@ async fn contested_resources_limit_PLAN_656() { q.end_index_values = vec![Value::Text("zzz non existing".to_string())] }, Ok("ContestedResources([])".into()); "Non-existing end_index_values returns error")] #[test_case::test_case(|q| q.end_index_values = vec![Value::Array(vec![0.into(), 1.into()])], Err("incorrect index values error: too many end index values were provided"); "wrong type of end_index_values should return InvalidArgument")] -#[test_case::test_case(|q| q.limit = Some(0), Err(r#"code: InvalidArgument"#); "limit 0 returns InvalidArgument")] -#[test_case::test_case(|q| q.limit = Some(u16::MAX), Err(r#"code: InvalidArgument"#); "limit u16::MAX returns InvalidArgument")] +#[test_case::test_case(|q| q.limit = Some(0), Err(r#"status: InvalidArgument"#); "limit 0 returns InvalidArgument")] +#[test_case::test_case(|q| q.limit = Some(u16::MAX), Err(r#"status: InvalidArgument"#); "limit u16::MAX returns InvalidArgument")] // Disabled due to bug PLAN-656 // #[test_case::test_case(|q| { // q.start_index_values = vec![Value::Text("dash".to_string())]; diff --git a/packages/rs-sdk/tests/fetch/contested_resource_vote_state.rs b/packages/rs-sdk/tests/fetch/contested_resource_vote_state.rs index 6c0bd2f7c4..dd21b55aa7 100644 --- a/packages/rs-sdk/tests/fetch/contested_resource_vote_state.rs +++ b/packages/rs-sdk/tests/fetch/contested_resource_vote_state.rs @@ -107,7 +107,7 @@ async fn contested_resource_vote_states_nx_contract() { if let dash_sdk::error::Error::DapiClientError(e) = result { assert!( e.contains( - "Transport(Grpc(Status { code: InvalidArgument, message: \"contract not found error" + "Transport(Grpc(Status { status: InvalidArgument, message: \"contract not found error" ), "we should get contract not found error, got: {:?}", e, @@ -280,9 +280,9 @@ type MutFn = fn(&mut ContestedDocumentVotePollDriveQuery); #[test_case(|q| q.limit = Some(u16::MAX), Err("limit 65535 out of bounds of [1, 100]"); "limit u16::MAX")] #[test_case(|q| q.start_at = Some(([0x11; 32], true)), Ok("Contenders { winner: None, contenders: {Identifier("); "start_at does not exist should return next contenders")] #[test_case(|q| q.start_at = Some(([0xff; 32], true)), Ok("Contenders { winner: None, contenders: {}, abstain_vote_tally: None, lock_vote_tally: None }"); "start_at 0xff;32 should return zero contenders")] -#[test_case(|q| q.vote_poll.document_type_name = "nx doctype".to_string(), Err(r#"code: InvalidArgument, message: "document type nx doctype not found"#); "non existing document type returns InvalidArgument")] -#[test_case(|q| q.vote_poll.index_name = "nx index".to_string(), Err(r#"code: InvalidArgument, message: "index with name nx index is not the contested index"#); "non existing index returns InvalidArgument")] -#[test_case(|q| q.vote_poll.index_name = "dashIdentityId".to_string(), Err(r#"code: InvalidArgument, message: "index with name dashIdentityId is not the contested index"#); "existing non-contested index returns InvalidArgument")] +#[test_case(|q| q.vote_poll.document_type_name = "nx doctype".to_string(), Err(r#"status: InvalidArgument, message: "document type nx doctype not found"#); "non existing document type returns InvalidArgument")] +#[test_case(|q| q.vote_poll.index_name = "nx index".to_string(), Err(r#"status: InvalidArgument, message: "index with name nx index is not the contested index"#); "non existing index returns InvalidArgument")] +#[test_case(|q| q.vote_poll.index_name = "dashIdentityId".to_string(), Err(r#"status: InvalidArgument, message: "index with name dashIdentityId is not the contested index"#); "existing non-contested index returns InvalidArgument")] #[test_case(|q| q.vote_poll.index_values = vec![], Err("query uses index parentNameAndLabel, this index has 2 properties, but the query provided 0 index values instead"); "index_values empty vec returns error")] #[test_case(|q| q.vote_poll.index_values = vec![Value::Text("".to_string())], Err("query uses index parentNameAndLabel, this index has 2 properties, but the query provided 1 index values instead"); "index_values empty string returns error")] #[test_case(|q| q.vote_poll.index_values = vec![Value::Text("dash".to_string())], Err("query uses index parentNameAndLabel, this index has 2 properties, but the query provided 1 index values instead"); "index_values with one value returns error")] diff --git a/packages/rs-sdk/tests/fetch/evonode.rs b/packages/rs-sdk/tests/fetch/evonode.rs index 0d35d5be9f..b2521ba864 100644 --- a/packages/rs-sdk/tests/fetch/evonode.rs +++ b/packages/rs-sdk/tests/fetch/evonode.rs @@ -5,6 +5,7 @@ use dash_sdk::platform::{types::evonode::EvoNode, FetchUnproved}; use dpp::dashcore::{hashes::Hash, ProTxHash}; use drive_proof_verifier::types::EvoNodeStatus; use http::Uri; +use rs_dapi_client::Address; use std::time::Duration; /// Given some existing evonode URIs, WHEN we connect to them, THEN we get status. use tokio::time::timeout; @@ -16,9 +17,7 @@ async fn test_evonode_status() { let cfg = Config::new(); let sdk = cfg.setup_api("test_evonode_status").await; - let addresses = cfg.address_list(); - - for address in addresses { + for (address, _status) in cfg.address_list() { let node = EvoNode::new(address.clone()); match timeout( Duration::from_secs(3), @@ -33,8 +32,9 @@ async fn test_evonode_status() { status.chain.latest_block_height > 0, "latest block height must be positive" ); - assert!( - status.node.pro_tx_hash.unwrap_or_default().len() == ProTxHash::LEN, + assert_eq!( + status.node.pro_tx_hash.unwrap_or_default().len(), + ProTxHash::LEN, "latest block hash must be non-empty" ); // Add more specific assertions based on expected status properties @@ -61,11 +61,11 @@ async fn test_evonode_status_refused() { let cfg = Config::new(); let sdk = cfg.setup_api("test_evonode_status_refused").await; - let uri: Uri = "http://127.0.0.1:1".parse().unwrap(); + let address: Address = "http://127.0.0.1:1".parse().expect("valid address"); - let node = EvoNode::new(uri.clone().into()); + let node = EvoNode::new(address.clone()); let result = EvoNodeStatus::fetch_unproved(&sdk, node).await; - tracing::debug!(?result, ?uri, "evonode status"); + tracing::debug!(?result, ?address, "evonode status"); assert!(result.is_err()); } diff --git a/packages/rs-sdk/tests/vectors/test_evonode_status_refused/msg_EvoNode_6db392ff1869b56ecc7de9ace5864123671ed14d3f0c537aa8e878d24e529de5.json b/packages/rs-sdk/tests/vectors/test_evonode_status/msg_EvoNode_dae36baf8dec4f117f97a27099eb28ff908ec0406a4ac48fff5727a9b9a4ee57.json similarity index 58% rename from packages/rs-sdk/tests/vectors/test_evonode_status_refused/msg_EvoNode_6db392ff1869b56ecc7de9ace5864123671ed14d3f0c537aa8e878d24e529de5.json rename to packages/rs-sdk/tests/vectors/test_evonode_status/msg_EvoNode_dae36baf8dec4f117f97a27099eb28ff908ec0406a4ac48fff5727a9b9a4ee57.json index c80da24adb..c2bdd96612 100644 Binary files a/packages/rs-sdk/tests/vectors/test_evonode_status_refused/msg_EvoNode_6db392ff1869b56ecc7de9ace5864123671ed14d3f0c537aa8e878d24e529de5.json and b/packages/rs-sdk/tests/vectors/test_evonode_status/msg_EvoNode_dae36baf8dec4f117f97a27099eb28ff908ec0406a4ac48fff5727a9b9a4ee57.json differ diff --git a/packages/rs-sdk/tests/vectors/test_evonode_status/msg_EvoNode_fbdf15806b1160a9fb482d5663371cdde55f94897dcf9d905573b01fe445fbc9.json b/packages/rs-sdk/tests/vectors/test_evonode_status/msg_EvoNode_fbdf15806b1160a9fb482d5663371cdde55f94897dcf9d905573b01fe445fbc9.json deleted file mode 100644 index e51843cf30..0000000000 Binary files a/packages/rs-sdk/tests/vectors/test_evonode_status/msg_EvoNode_fbdf15806b1160a9fb482d5663371cdde55f94897dcf9d905573b01fe445fbc9.json and /dev/null differ diff --git a/packages/rs-sdk/tests/vectors/test_evonode_status_refused/msg_EvoNode_7a8ca78c81edf0322718e172f59dab90acb35dbe92b5072c67ae42b121a30dae.json b/packages/rs-sdk/tests/vectors/test_evonode_status_refused/msg_EvoNode_7a8ca78c81edf0322718e172f59dab90acb35dbe92b5072c67ae42b121a30dae.json new file mode 100644 index 0000000000..a72158ecd4 Binary files /dev/null and b/packages/rs-sdk/tests/vectors/test_evonode_status_refused/msg_EvoNode_7a8ca78c81edf0322718e172f59dab90acb35dbe92b5072c67ae42b121a30dae.json differ diff --git a/packages/simple-signer/Cargo.toml b/packages/simple-signer/Cargo.toml index d8f05cb442..bcaa1f6b13 100644 --- a/packages/simple-signer/Cargo.toml +++ b/packages/simple-signer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "simple-signer" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true diff --git a/packages/strategy-tests/Cargo.toml b/packages/strategy-tests/Cargo.toml index ddfadefba0..c445290ecc 100644 --- a/packages/strategy-tests/Cargo.toml +++ b/packages/strategy-tests/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "strategy-tests" -version = "1.5.1" +version = "1.7.0" authors = [ "Samuel Westrich ", "Ivan Shumkov ", diff --git a/packages/strategy-tests/src/lib.rs b/packages/strategy-tests/src/lib.rs index 61395d99f2..efdb702a48 100644 --- a/packages/strategy-tests/src/lib.rs +++ b/packages/strategy-tests/src/lib.rs @@ -44,6 +44,7 @@ use platform_version::TryFromPlatformVersioned; use rand::prelude::StdRng; use rand::seq::{IteratorRandom, SliceRandom}; use rand::Rng; +use transitions::create_identity_credit_transfer_transition; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::ops::RangeInclusive; use bincode::{Decode, Encode}; @@ -146,7 +147,7 @@ pub struct StartIdentities { pub keys_per_identity: u8, pub starting_balances: u64, // starting balance in duffs pub extra_keys: KeyMaps, - pub hard_coded: Vec<(Identity, StateTransition)>, + pub hard_coded: Vec<(Identity, Option)>, } /// Identities to register on the first block of the strategy @@ -1287,38 +1288,65 @@ impl Strategy { } // Generate state transition for identity transfer operation - OperationType::IdentityTransfer if current_identities.len() > 1 => { + OperationType::IdentityTransfer(identity_transfer_info) => { for _ in 0..count { - let identities_count = current_identities.len(); - if identities_count == 0 { - break; - } + // Handle the case where specific sender, recipient, and amount are provided + if let Some(transfer_info) = identity_transfer_info { + let sender = current_identities + .iter() + .find(|identity| identity.id() == transfer_info.from) + .expect( + "Expected to find sender identity in hardcoded start identities", + ); + let recipient = current_identities + .iter() + .find(|identity| identity.id() == transfer_info.to) + .expect( + "Expected to find recipient identity in hardcoded start identities", + ); - // Select a random identity from the current_identities for the sender - let random_index_sender = rng.gen_range(0..identities_count); + let state_transition = create_identity_credit_transfer_transition( + &sender, + &recipient, + identity_nonce_counter, + signer, // This means in the TUI, the loaded identity must always be the sender since we're always signing with it for now + transfer_info.amount, + ); + operations.push(state_transition); + } else if current_identities.len() > 1 { + // Handle the case where no sender, recipient, and amount are provided - // Clone current_identities to a Vec for manipulation - let mut unused_identities: Vec<_> = - current_identities.iter().cloned().collect(); - unused_identities.remove(random_index_sender); // Remove the sender - let unused_identities_count = unused_identities.len(); + let identities_count = current_identities.len(); + if identities_count == 0 { + break; + } - // Select a random identity from the remaining ones for the recipient - let random_index_recipient = rng.gen_range(0..unused_identities_count); - let recipient = &unused_identities[random_index_recipient]; + // Select a random identity from the current_identities for the sender + let random_index_sender = rng.gen_range(0..identities_count); - // Use the sender index on the original slice - let sender = &mut current_identities[random_index_sender]; + // Clone current_identities to a Vec for manipulation + let mut unused_identities: Vec<_> = + current_identities.iter().cloned().collect(); + unused_identities.remove(random_index_sender); // Remove the sender + let unused_identities_count = unused_identities.len(); - let state_transition = - crate::transitions::create_identity_credit_transfer_transition( + // Select a random identity from the remaining ones for the recipient + let random_index_recipient = + rng.gen_range(0..unused_identities_count); + let recipient = &unused_identities[random_index_recipient]; + + // Use the sender index on the original slice + let sender = &mut current_identities[random_index_sender]; + + let state_transition = create_identity_credit_transfer_transition( sender, recipient, identity_nonce_counter, signer, 300000, ); - operations.push(state_transition); + operations.push(state_transition); + } } } diff --git a/packages/strategy-tests/src/operations.rs b/packages/strategy-tests/src/operations.rs index 675e996843..d35fc9f503 100644 --- a/packages/strategy-tests/src/operations.rs +++ b/packages/strategy-tests/src/operations.rs @@ -497,6 +497,13 @@ impl VoteAction { pub type AmountRange = RangeInclusive; +#[derive(Clone, Debug, PartialEq, Encode, Decode)] +pub struct IdentityTransferInfo { + pub from: Identifier, + pub to: Identifier, + pub amount: Credits, +} + #[derive(Clone, Debug, PartialEq)] pub enum OperationType { Document(DocumentOp), @@ -505,7 +512,7 @@ pub enum OperationType { IdentityWithdrawal(AmountRange), ContractCreate(RandomDocumentTypeParameters, DocumentTypeCount), ContractUpdate(DataContractUpdateOp), - IdentityTransfer, + IdentityTransfer(Option), ResourceVote(ResourceVoteOp), } @@ -517,7 +524,7 @@ enum OperationTypeInSerializationFormat { IdentityWithdrawal(AmountRange), ContractCreate(RandomDocumentTypeParameters, DocumentTypeCount), ContractUpdate(Vec), - IdentityTransfer, + IdentityTransfer(Option), ResourceVote(ResourceVoteOpSerializable), } @@ -563,7 +570,9 @@ impl PlatformSerializableWithPlatformVersion for OperationType { contract_op_in_serialization_format, ) } - OperationType::IdentityTransfer => OperationTypeInSerializationFormat::IdentityTransfer, + OperationType::IdentityTransfer(identity_transfer_info) => { + OperationTypeInSerializationFormat::IdentityTransfer(identity_transfer_info) + } OperationType::ResourceVote(resource_vote_op) => { let vote_op_in_serialization_format = resource_vote_op.try_into_platform_versioned(platform_version)?; @@ -626,7 +635,9 @@ impl PlatformDeserializableWithPotentialValidationFromVersionedStructure for Ope )?; OperationType::ContractUpdate(update_op) } - OperationTypeInSerializationFormat::IdentityTransfer => OperationType::IdentityTransfer, + OperationTypeInSerializationFormat::IdentityTransfer(identity_transfer_info) => { + OperationType::IdentityTransfer(identity_transfer_info) + } OperationTypeInSerializationFormat::ResourceVote(resource_vote_op) => { let vote_op = resource_vote_op.try_into_platform_versioned(platform_version)?; OperationType::ResourceVote(vote_op) diff --git a/packages/strategy-tests/src/transitions.rs b/packages/strategy-tests/src/transitions.rs index 85d03eb333..c77b51e290 100644 --- a/packages/strategy-tests/src/transitions.rs +++ b/packages/strategy-tests/src/transitions.rs @@ -802,7 +802,7 @@ pub fn create_identity_withdrawal_transition_with_output_address( /// - If the sender's identity does not have a suitable authentication key available for signing. /// - If there's an error during the signing process. pub fn create_identity_credit_transfer_transition( - identity: &mut Identity, + identity: &Identity, recipient: &Identity, identity_nonce_counter: &mut BTreeMap, signer: &mut SimpleSigner, diff --git a/packages/wallet-lib/package.json b/packages/wallet-lib/package.json index b044abe3b7..f258d9678d 100644 --- a/packages/wallet-lib/package.json +++ b/packages/wallet-lib/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/wallet-lib", - "version": "8.5.1", + "version": "8.7.0", "description": "Light wallet library for Dash", "main": "src/index.js", "unpkg": "dist/wallet-lib.min.js", diff --git a/packages/wallet-utils-contract/.eslintrc b/packages/wallet-utils-contract/.eslintrc new file mode 100644 index 0000000000..cb6c7636b6 --- /dev/null +++ b/packages/wallet-utils-contract/.eslintrc @@ -0,0 +1,18 @@ +{ + "extends": "airbnb-base", + "rules": { + "no-plusplus": 0, + "eol-last": [ + "error", + "always" + ], + "class-methods-use-this": "off", + "curly": [ + "error", + "all" + ] + }, + "globals": { + "BigInt": true + } +} diff --git a/packages/wallet-utils-contract/.mocharc.yml b/packages/wallet-utils-contract/.mocharc.yml new file mode 100644 index 0000000000..164b941c1b --- /dev/null +++ b/packages/wallet-utils-contract/.mocharc.yml @@ -0,0 +1,2 @@ +require: test/bootstrap.js +recursive: true diff --git a/packages/wallet-utils-contract/Cargo.toml b/packages/wallet-utils-contract/Cargo.toml new file mode 100644 index 0000000000..7e2527e7a2 --- /dev/null +++ b/packages/wallet-utils-contract/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "wallet-utils-contract" +description = "Wallet data contract schema and tools" +version = "1.7.0" +edition = "2021" +rust-version.workspace = true +license = "MIT" + +[dependencies] +thiserror = "1.0.64" +platform-version = { path = "../rs-platform-version" } +serde_json = { version = "1.0" } +platform-value = { path = "../rs-platform-value" } diff --git a/packages/wallet-utils-contract/LICENSE b/packages/wallet-utils-contract/LICENSE new file mode 100644 index 0000000000..3be9583375 --- /dev/null +++ b/packages/wallet-utils-contract/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2019 Dash Core Group, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/packages/wallet-utils-contract/README.md b/packages/wallet-utils-contract/README.md new file mode 100644 index 0000000000..ce6b5a6233 --- /dev/null +++ b/packages/wallet-utils-contract/README.md @@ -0,0 +1,26 @@ +# Wallet Utils Contract + +[![Build Status](https://github.com/dashpay/platform/actions/workflows/release.yml/badge.svg)](https://github.com/dashpay/platform/actions/workflows/release.yml) +[![NPM version](https://img.shields.io/npm/v/@dashevo/wallet-contract.svg?style=flat-square)](https://npmjs.org/package/@dashevo/wallet-contract) + +JSON Contracts for Dash Wallet apps + +## Table of Contents + +- [Install](#install) +- [Contributing](#contributing) +- [License](#license) + +## Install + +```sh +npm install @dashevo/wallet-contract +``` + +## Contributing + +Feel free to dive in! [Open an issue](https://github.com/dashpay/platform/issues/new/choose) or submit PRs. + +## License + +[MIT](LICENSE) © Dash Core Group, Inc. diff --git a/packages/wallet-utils-contract/lib/systemIds.js b/packages/wallet-utils-contract/lib/systemIds.js new file mode 100644 index 0000000000..f1aa5b80c0 --- /dev/null +++ b/packages/wallet-utils-contract/lib/systemIds.js @@ -0,0 +1,4 @@ +module.exports = { + ownerId: '11111111111111111111111111111111', + contractId: '7CSFGeF4WNzgDmx94zwvHkYaG3Dx4XEe5LFsFgJswLbm', +}; diff --git a/packages/wallet-utils-contract/package.json b/packages/wallet-utils-contract/package.json new file mode 100644 index 0000000000..621f0c05ef --- /dev/null +++ b/packages/wallet-utils-contract/package.json @@ -0,0 +1,29 @@ +{ + "name": "@dashevo/wallet-utils-contract", + "version": "1.7.0", + "description": "A contract and helper scripts for Wallet DApp", + "scripts": { + "lint": "eslint .", + "test": "yarn run test:unit", + "test:unit": "mocha 'test/unit/**/*.spec.js'" + }, + "contributors": [ + { + "name": "Eric Britten", + "email": "eric.britten@dash.org", + "url": "https://github.com/hashengineering" + } + ], + "license": "MIT", + "devDependencies": { + "@dashevo/wasm-dpp": "workspace:*", + "chai": "^4.3.10", + "dirty-chai": "^2.0.1", + "eslint": "^8.53.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-plugin-import": "^2.29.0", + "mocha": "^10.2.0", + "sinon": "^17.0.1", + "sinon-chai": "^3.7.0" + } +} diff --git a/packages/wallet-utils-contract/schema/v1/wallet-utils-contract-documents.json b/packages/wallet-utils-contract/schema/v1/wallet-utils-contract-documents.json new file mode 100644 index 0000000000..b1a012e26f --- /dev/null +++ b/packages/wallet-utils-contract/schema/v1/wallet-utils-contract-documents.json @@ -0,0 +1,49 @@ +{ + "txMetadata": { + "type": "object", + "documentsMutable": true, + "canBeDeleted": true, + "indices": [ + { + "name": "updated", + "properties": [ + { + "$ownerId": "asc" + }, + { + "$updatedAt": "asc" + } + ] + } + ], + "properties": { + "keyIndex": { + "type": "integer", + "minimum": 0, + "description": "The derivation index used to create the encryption key.", + "position": 0 + }, + "encryptionKeyIndex": { + "type": "integer", + "minimum": 0, + "description": "The secondary index used to derive the encryption key that is used to encrypt and decrypt encryptedData.", + "position": 1 + }, + "encryptedMetadata": { + "type": "array", + "byteArray": true, + "minItems": 32, + "maxItems": 4096, + "description": "encrypted metadata using AES-CBC-256", + "position": 2 + } + }, + "required": [ + "keyIndex", + "encryptionKeyIndex", + "encryptedMetadata", + "$updatedAt" + ], + "additionalProperties": false + } +} diff --git a/packages/wallet-utils-contract/src/error.rs b/packages/wallet-utils-contract/src/error.rs new file mode 100644 index 0000000000..d01bbcc91c --- /dev/null +++ b/packages/wallet-utils-contract/src/error.rs @@ -0,0 +1,17 @@ +use platform_version::version::FeatureVersion; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + /// Platform expected some specific versions + #[error("platform unknown version on {method}, received: {received}")] + UnknownVersionMismatch { + /// method + method: String, + /// the allowed versions for this method + known_versions: Vec, + /// requested core height + received: FeatureVersion, + }, + #[error("schema deserialize error: {0}")] + InvalidSchemaJson(#[from] serde_json::Error), +} diff --git a/packages/wallet-utils-contract/src/lib.rs b/packages/wallet-utils-contract/src/lib.rs new file mode 100644 index 0000000000..70dafcc26f --- /dev/null +++ b/packages/wallet-utils-contract/src/lib.rs @@ -0,0 +1,37 @@ +mod error; +pub mod v1; + +pub use crate::error::Error; +use platform_value::{Identifier, IdentifierBytes32}; +use platform_version::version::PlatformVersion; +use serde_json::Value; + +pub const ID_BYTES: [u8; 32] = [ + 92, 20, 14, 101, 92, 2, 101, 187, 194, 168, 8, 113, 109, 225, 132, 121, 133, 19, 89, 24, 173, + 81, 205, 253, 11, 118, 102, 75, 169, 91, 163, 124, +]; + +pub const OWNER_ID_BYTES: [u8; 32] = [0; 32]; + +pub const ID: Identifier = Identifier(IdentifierBytes32(ID_BYTES)); +pub const OWNER_ID: Identifier = Identifier(IdentifierBytes32(OWNER_ID_BYTES)); +pub fn load_definitions(platform_version: &PlatformVersion) -> Result, Error> { + match platform_version.system_data_contracts.withdrawals { + 1 => Ok(None), + version => Err(Error::UnknownVersionMismatch { + method: "wallet_contract::load_definitions".to_string(), + known_versions: vec![1], + received: version, + }), + } +} +pub fn load_documents_schemas(platform_version: &PlatformVersion) -> Result { + match platform_version.system_data_contracts.withdrawals { + 1 => v1::load_documents_schemas(), + version => Err(Error::UnknownVersionMismatch { + method: "wallet_contract::load_documents_schemas".to_string(), + known_versions: vec![1], + received: version, + }), + } +} diff --git a/packages/wallet-utils-contract/src/v1/mod.rs b/packages/wallet-utils-contract/src/v1/mod.rs new file mode 100644 index 0000000000..c6e5034231 --- /dev/null +++ b/packages/wallet-utils-contract/src/v1/mod.rs @@ -0,0 +1,21 @@ +use crate::Error; +use serde_json::Value; + +pub mod document_types { + pub mod tx_metadata { + pub const NAME: &str = "tx_metadata"; + + pub mod properties { + pub const KEY_INDEX: &str = "keyIndex"; + pub const ENCRYPTION_KEY_INDEX: &str = "encryptionKeyIndex"; + pub const ENCRYPTED_METADATA: &str = "encryptedMetadata"; + } + } +} + +pub fn load_documents_schemas() -> Result { + serde_json::from_str(include_str!( + "../../schema/v1/wallet-utils-contract-documents.json" + )) + .map_err(Error::InvalidSchemaJson) +} diff --git a/packages/wallet-utils-contract/test/.eslintrc b/packages/wallet-utils-contract/test/.eslintrc new file mode 100644 index 0000000000..720ced7385 --- /dev/null +++ b/packages/wallet-utils-contract/test/.eslintrc @@ -0,0 +1,12 @@ +{ + "env": { + "node": true, + "mocha": true + }, + "rules": { + "import/no-extraneous-dependencies": "off" + }, + "globals": { + "expect": true + } +} diff --git a/packages/wallet-utils-contract/test/bootstrap.js b/packages/wallet-utils-contract/test/bootstrap.js new file mode 100644 index 0000000000..7af04f464d --- /dev/null +++ b/packages/wallet-utils-contract/test/bootstrap.js @@ -0,0 +1,30 @@ +const sinon = require('sinon'); +const sinonChai = require('sinon-chai'); + +const { expect, use } = require('chai'); +const dirtyChai = require('dirty-chai'); + +const { + default: loadWasmDpp, +} = require('@dashevo/wasm-dpp'); + +use(dirtyChai); +use(sinonChai); + +exports.mochaHooks = { + beforeAll: loadWasmDpp, + + beforeEach() { + if (!this.sinon) { + this.sinon = sinon.createSandbox(); + } else { + this.sinon.restore(); + } + }, + + afterEach() { + this.sinon.restore(); + }, +}; + +global.expect = expect; diff --git a/packages/wallet-utils-contract/test/unit/walletContract.spec.js b/packages/wallet-utils-contract/test/unit/walletContract.spec.js new file mode 100644 index 0000000000..5b3efe97af --- /dev/null +++ b/packages/wallet-utils-contract/test/unit/walletContract.spec.js @@ -0,0 +1,187 @@ +const crypto = require('crypto'); + +const { + DashPlatformProtocol, + JsonSchemaError, +} = require('@dashevo/wasm-dpp'); +const generateRandomIdentifier = require('@dashevo/wasm-dpp/lib/test/utils/generateRandomIdentifierAsync'); + +const { expect } = require('chai'); +const walletContractDocumentsSchema = require('../../schema/v1/wallet-utils-contract-documents.json'); + +const expectJsonSchemaError = (validationResult, errorCount = 1) => { + const errors = validationResult.getErrors(); + expect(errors) + .to + .have + .length(errorCount); + + const error = validationResult.getErrors()[0]; + expect(error) + .to + .be + .instanceof(JsonSchemaError); + + return error; +}; + +describe('Wallet Contract', () => { + let dpp; + let dataContract; + let identityId; + + beforeEach(async () => { + dpp = new DashPlatformProtocol( + { generate: () => crypto.randomBytes(32) }, + ); + + identityId = await generateRandomIdentifier(); + + dataContract = dpp.dataContract.create(identityId, BigInt(1), walletContractDocumentsSchema); + }); + + it('should have a valid contract definition', async () => { + expect(() => dpp.dataContract.create(identityId, BigInt(1), walletContractDocumentsSchema)) + .to + .not + .throw(); + }); + + describe('documents', () => { + describe('txMetadata', () => { + let rawTxMetadataDocument; + + beforeEach(() => { + rawTxMetadataDocument = { + keyIndex: 0, + encryptionKeyIndex: 100, + encryptedMetadata: crypto.randomBytes(64), + }; + }); + + describe('keyIndex', () => { + it('should be defined', async () => { + delete rawTxMetadataDocument.keyIndex; + + const document = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + const validationResult = document.validate(dpp.protocolVersion); + const error = expectJsonSchemaError(validationResult); + + expect(error.keyword) + .to + .equal('required'); + expect(error.params.missingProperty) + .to + .equal('keyIndex'); + }); + + it('should be a non-negative integer', async () => { + rawTxMetadataDocument.keyIndex = -1; + const document = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + const validationResult = document.validate(dpp.protocolVersion); + const error = expectJsonSchemaError(validationResult); + expect(error.keyword).to.equal('minimum'); + }); + }); + + describe('encryptionKeyIndex', () => { + it('should be defined', async () => { + delete rawTxMetadataDocument.encryptionKeyIndex; + + const document = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + const validationResult = document.validate(dpp.protocolVersion); + const error = expectJsonSchemaError(validationResult); + + expect(error.keyword) + .to + .equal('required'); + expect(error.params.missingProperty) + .to + .equal('encryptionKeyIndex'); + }); + + it('should be a non-negative integer', async () => { + rawTxMetadataDocument.encryptionKeyIndex = -1; + const document = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + const validationResult = document.validate(dpp.protocolVersion); + const error = expectJsonSchemaError(validationResult); + expect(error.keyword).to.equal('minimum'); + }); + }); + + describe('encryptedMetadata', () => { + it('should be defined', async () => { + delete rawTxMetadataDocument.encryptedMetadata; + + const document = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + const validationResult = document.validate(dpp.protocolVersion); + const error = expectJsonSchemaError(validationResult); + + expect(error.keyword) + .to + .equal('required'); + expect(error.params.missingProperty) + .to + .equal('encryptedMetadata'); + }); + + it('should be not shorter than 32 bytes', async () => { + rawTxMetadataDocument.encryptedMetadata = crypto.randomBytes(31); + + const document = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + const validationResult = document.validate(dpp.protocolVersion); + const error = expectJsonSchemaError(validationResult); + + expect(error.keyword) + .to + .equal('minItems'); + expect(error.instancePath) + .to + .equal('/encryptedMetadata'); + }); + + it('should be not longer than 4096 bytes', async () => { + rawTxMetadataDocument.encryptedMetadata = crypto.randomBytes(4097); + + const document = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + const validationResult = document.validate(dpp.protocolVersion); + const error = expectJsonSchemaError(validationResult); + + expect(error.keyword) + .to + .equal('maxItems'); + expect(error.instancePath) + .to + .equal('/encryptedMetadata'); + }); + }); + + it('should not have additional properties', async () => { + rawTxMetadataDocument.someOtherProperty = 42; + + const document = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + const validationResult = document.validate(dpp.protocolVersion); + const error = expectJsonSchemaError(validationResult); + + expect(error.keyword) + .to + .equal('additionalProperties'); + expect(error.params.additionalProperties) + .to + .deep + .equal(['someOtherProperty']); + }); + + it('should be valid', async () => { + const txMetadata = dpp.document.create(dataContract, identityId, 'txMetadata', rawTxMetadataDocument); + + const result = await txMetadata.validate(dpp.protocolVersion); + + expect(result.isValid()) + .to + .be + .true(); + }); + }); + }); +}); diff --git a/packages/wasm-dpp/Cargo.toml b/packages/wasm-dpp/Cargo.toml index 2841e16c81..2b34c8af6f 100644 --- a/packages/wasm-dpp/Cargo.toml +++ b/packages/wasm-dpp/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "wasm-dpp" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true authors = ["Anton Suprunchuk "] diff --git a/packages/wasm-dpp/README.md b/packages/wasm-dpp/README.md index cc31d1a824..73b3494c45 100644 --- a/packages/wasm-dpp/README.md +++ b/packages/wasm-dpp/README.md @@ -39,7 +39,7 @@ Library consumers must ignore class names minification for `@dashevo/wasm-dpp` l - Install [Rust](https://www.rust-lang.org/tools/install) v1.73+ - Add wasm32 target: `$ rustup target add wasm32-unknown-unknown` -- Install wasm-bingen-cli: `cargo install wasm-bindgen-cli@0.2.85` +- Install wasm-bingen-cli: `cargo install wasm-bindgen-cli@0.2.86` - *double-check that wasm-bindgen-cli version above matches wasm-bindgen version in Cargo.lock file* - *Depending on system, additional packages may need to be installed as a prerequisite for wasm-bindgen-cli. If anything is missing, installation will error and prompt what packages are missing (i.e. clang, llvm, libssl-dev)* diff --git a/packages/wasm-dpp/package.json b/packages/wasm-dpp/package.json index 36c6cc466c..ca3b0ca918 100644 --- a/packages/wasm-dpp/package.json +++ b/packages/wasm-dpp/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/wasm-dpp", - "version": "1.5.1", + "version": "1.7.0", "description": "The JavaScript implementation of the Dash Platform Protocol", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/packages/wasm-dpp/scripts/build-wasm.sh b/packages/wasm-dpp/scripts/build-wasm.sh index 2b89311437..0c154372ba 100755 --- a/packages/wasm-dpp/scripts/build-wasm.sh +++ b/packages/wasm-dpp/scripts/build-wasm.sh @@ -28,8 +28,8 @@ fi # - packages/wasm-dpp/Cargo.toml # - Dockerfile if ! [[ -x "$(command -v wasm-bindgen)" ]]; then - echo "Wasm-bindgen CLI ${WASM_BINDGEN_VERSION} is not installed. Installing" - cargo install --config net.git-fetch-with-cli=true --profile "${CARGO_BUILD_PROFILE}" -f "wasm-bindgen-cli@0.2.86" + echo "Wasm-bindgen CLI ${WASM_BINDGEN_VERSION} is not installed." + exit 1 fi # On a mac, bundled clang won't work - you need to install LLVM manually through brew, diff --git a/packages/wasm-dpp/src/lib.rs b/packages/wasm-dpp/src/lib.rs index 6d0aa9c50c..8d815ed6c2 100644 --- a/packages/wasm-dpp/src/lib.rs +++ b/packages/wasm-dpp/src/lib.rs @@ -19,6 +19,7 @@ mod identifier; mod identity; mod metadata; // mod state_repository; +/// State transitions pub mod state_transition; // mod version; diff --git a/packages/withdrawals-contract/Cargo.toml b/packages/withdrawals-contract/Cargo.toml index e9be9eb577..21f474d5e5 100644 --- a/packages/withdrawals-contract/Cargo.toml +++ b/packages/withdrawals-contract/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "withdrawals-contract" description = "Witdrawals data contract schema and tools" -version = "1.5.1" +version = "1.7.0" edition = "2021" rust-version.workspace = true license = "MIT" diff --git a/packages/withdrawals-contract/package.json b/packages/withdrawals-contract/package.json index ecefb6502b..d49ca9d113 100644 --- a/packages/withdrawals-contract/package.json +++ b/packages/withdrawals-contract/package.json @@ -1,6 +1,6 @@ { "name": "@dashevo/withdrawals-contract", - "version": "1.5.1", + "version": "1.7.0", "description": "Data Contract to manipulate and track withdrawals", "scripts": { "build": "", diff --git a/scripts/configure_test_suite_network.sh b/scripts/configure_test_suite_network.sh index 54e6f99349..498e9d2d03 100755 --- a/scripts/configure_test_suite_network.sh +++ b/scripts/configure_test_suite_network.sh @@ -66,7 +66,7 @@ else CERT_FLAG="" ST_EXECUTION_INTERVAL=15000 fi -SKIP_SYNC_BEFORE_HEIGHT=$(curl -s $INSIGHT_URL | jq '.height - 200') +SKIP_SYNC_BEFORE_HEIGHT=4800 # $(curl -s $INSIGHT_URL | jq '.height - 200') # check variables are not empty if [ -z "$FAUCET_ADDRESS" ] || \ diff --git a/yarn.lock b/yarn.lock index 08923aac7f..0119fa898d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1935,6 +1935,22 @@ __metadata: languageName: unknown linkType: soft +"@dashevo/wallet-utils-contract@workspace:packages/wallet-utils-contract": + version: 0.0.0-use.local + resolution: "@dashevo/wallet-utils-contract@workspace:packages/wallet-utils-contract" + dependencies: + "@dashevo/wasm-dpp": "workspace:*" + chai: "npm:^4.3.10" + dirty-chai: "npm:^2.0.1" + eslint: "npm:^8.53.0" + eslint-config-airbnb-base: "npm:^15.0.0" + eslint-plugin-import: "npm:^2.29.0" + mocha: "npm:^10.2.0" + sinon: "npm:^17.0.1" + sinon-chai: "npm:^3.7.0" + languageName: unknown + linkType: soft + "@dashevo/wasm-dpp@workspace:*, @dashevo/wasm-dpp@workspace:packages/wasm-dpp": version: 0.0.0-use.local resolution: "@dashevo/wasm-dpp@workspace:packages/wasm-dpp" @@ -6273,27 +6289,14 @@ __metadata: languageName: node linkType: hard -"cross-spawn@npm:7.0.3, cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.1, cross-spawn@npm:^7.0.2, cross-spawn@npm:^7.0.3": - version: 7.0.3 - resolution: "cross-spawn@npm:7.0.3" +"cross-spawn@npm:^7.0.5": + version: 7.0.5 + resolution: "cross-spawn@npm:7.0.5" dependencies: path-key: "npm:^3.1.0" shebang-command: "npm:^2.0.0" which: "npm:^2.0.1" - checksum: e1a13869d2f57d974de0d9ef7acbf69dc6937db20b918525a01dacb5032129bd552d290d886d981e99f1b624cb03657084cc87bd40f115c07ecf376821c729ce - languageName: node - linkType: hard - -"cross-spawn@npm:^6.0.5": - version: 6.0.5 - resolution: "cross-spawn@npm:6.0.5" - dependencies: - nice-try: "npm:^1.0.4" - path-key: "npm:^2.0.1" - semver: "npm:^5.5.0" - shebang-command: "npm:^1.2.0" - which: "npm:^1.2.9" - checksum: f07e643b4875f26adffcd7f13bc68d9dff20cf395f8ed6f43a23f3ee24fc3a80a870a32b246fd074e514c8fd7da5f978ac6a7668346eec57aa87bac89c1ed3a1 + checksum: c95062469d4bdbc1f099454d01c0e77177a3733012d41bf907a71eb8d22d2add43b5adf6a0a14ef4e7feaf804082714d6c262ef4557a1c480b86786c120d18e2 languageName: node linkType: hard @@ -11383,21 +11386,12 @@ __metadata: languageName: node linkType: hard -"nanoid@npm:3.3.1": - version: 3.3.1 - resolution: "nanoid@npm:3.3.1" +"nanoid@npm:^3.3.8": + version: 3.3.8 + resolution: "nanoid@npm:3.3.8" bin: nanoid: bin/nanoid.cjs - checksum: 306f2cb9e4dcfb94738b09de9dc63839a37db33626f66b24dbcc8f66d4b91784645794a7c4f250d629e4d66f5385164c6748c58ac5b7c95217e9e048590efbe4 - languageName: node - linkType: hard - -"nanoid@npm:3.3.3": - version: 3.3.3 - resolution: "nanoid@npm:3.3.3" - bin: - nanoid: bin/nanoid.cjs - checksum: c703ed58a234b68245a8a4826dd25c1453a9017d34fa28bc58e7aa8247de87d854582fa2209d7aee04084cff9ce150be8fd30300abe567dc615d4e8e735f2d99 + checksum: 2d1766606cf0d6f47b6f0fdab91761bb81609b2e3d367027aff45e6ee7006f660fb7e7781f4a34799fe6734f1268eeed2e37a5fdee809ade0c2d4eb11b0f9c40 languageName: node linkType: hard @@ -11450,13 +11444,6 @@ __metadata: languageName: node linkType: hard -"nice-try@npm:^1.0.4": - version: 1.0.5 - resolution: "nice-try@npm:1.0.5" - checksum: 0b4af3b5bb5d86c289f7a026303d192a7eb4417231fe47245c460baeabae7277bcd8fd9c728fb6bd62c30b3e15cd6620373e2cf33353b095d8b403d3e8a15aff - languageName: node - linkType: hard - "nise@npm:^5.1.5": version: 5.1.5 resolution: "nise@npm:5.1.5" @@ -12566,13 +12553,6 @@ __metadata: languageName: node linkType: hard -"path-key@npm:^2.0.1": - version: 2.0.1 - resolution: "path-key@npm:2.0.1" - checksum: 6e654864e34386a2a8e6bf72cf664dcabb76574dd54013add770b374384d438aca95f4357bb26935b514a4e4c2c9b19e191f2200b282422a76ee038b9258c5e7 - languageName: node - linkType: hard - "path-key@npm:^3.0.0, path-key@npm:^3.1.0": version: 3.1.1 resolution: "path-key@npm:3.1.1" @@ -13893,15 +13873,6 @@ __metadata: languageName: node linkType: hard -"shebang-command@npm:^1.2.0": - version: 1.2.0 - resolution: "shebang-command@npm:1.2.0" - dependencies: - shebang-regex: "npm:^1.0.0" - checksum: 9eed1750301e622961ba5d588af2212505e96770ec376a37ab678f965795e995ade7ed44910f5d3d3cb5e10165a1847f52d3348c64e146b8be922f7707958908 - languageName: node - linkType: hard - "shebang-command@npm:^2.0.0": version: 2.0.0 resolution: "shebang-command@npm:2.0.0" @@ -13911,13 +13882,6 @@ __metadata: languageName: node linkType: hard -"shebang-regex@npm:^1.0.0": - version: 1.0.0 - resolution: "shebang-regex@npm:1.0.0" - checksum: 404c5a752cd40f94591dfd9346da40a735a05139dac890ffc229afba610854d8799aaa52f87f7e0c94c5007f2c6af55bdcaeb584b56691926c5eaf41dc8f1372 - languageName: node - linkType: hard - "shebang-regex@npm:^3.0.0": version: 3.0.0 resolution: "shebang-regex@npm:3.0.0" @@ -16047,7 +16011,7 @@ __metadata: languageName: node linkType: hard -"which@npm:^1.2.1, which@npm:^1.2.9": +"which@npm:^1.2.1": version: 1.3.1 resolution: "which@npm:1.3.1" dependencies: